Skip to content

Commit

Permalink
BertMaskedLM Task Model and Preprocessor (#774)
Browse files Browse the repository at this point in the history
* bert_masekd_lm init

* Merge branch 'master' into BertMaskedLM

* WIP : BERT MASKED LM

* Added Tests

* Black Formatting

* Fixed Format

* Fixed formatting

* black + lint.sh

* Reformat codew

* Updated Docstring for bert_tokenizer

* Updated masked_lm_generator.py

* fixed linting

* Changed Boolean Variables tp Numeric

* Formatted using shell/format.sh

* Updated bert_masked_lm.py

* typo fix

---------

Co-authored-by: Matt Watson <[email protected]>
  • Loading branch information
Cyber-Machine and mattdangerw authored Mar 3, 2023
1 parent a995dd6 commit 91fe6bd
Show file tree
Hide file tree
Showing 6 changed files with 579 additions and 6 deletions.
4 changes: 4 additions & 0 deletions keras_nlp/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@
from keras_nlp.models.bart.bart_tokenizer import BartTokenizer
from keras_nlp.models.bert.bert_backbone import BertBackbone
from keras_nlp.models.bert.bert_classifier import BertClassifier
from keras_nlp.models.bert.bert_masked_lm import BertMaskedLM
from keras_nlp.models.bert.bert_masked_lm_preprocessor import (
BertMaskedLMPreprocessor,
)
from keras_nlp.models.bert.bert_preprocessor import BertPreprocessor
from keras_nlp.models.bert.bert_tokenizer import BertTokenizer
from keras_nlp.models.deberta_v3.deberta_v3_backbone import DebertaV3Backbone
Expand Down
152 changes: 152 additions & 0 deletions keras_nlp/models/bert/bert_masked_lm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
# Copyright 2022 The KerasNLP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT masked LM model."""

import copy

from tensorflow import keras

from keras_nlp.layers.masked_lm_head import MaskedLMHead
from keras_nlp.models.bert.bert_backbone import BertBackbone
from keras_nlp.models.bert.bert_backbone import bert_kernel_initializer
from keras_nlp.models.bert.bert_masked_lm_preprocessor import (
BertMaskedLMPreprocessor,
)
from keras_nlp.models.bert.bert_presets import backbone_presets
from keras_nlp.models.task import Task
from keras_nlp.utils.python_utils import classproperty


@keras.utils.register_keras_serializable(package="keras_nlp")
class BertMaskedLM(Task):
"""An end-to-end BERT model for the masked language modeling task.
This model will train BERT on a masked language modeling task.
The model will predict labels for a number of masked tokens in the
input data. For usage of this model with pre-trained weights, see the
`from_preset()` method.
This model can optionally be configured with a `preprocessor` layer, in
which case inputs can be raw string features during `fit()`, `predict()`,
and `evaluate()`. Inputs will be tokenized and dynamically masked during
training and evaluation. This is done by default when creating the model
with `from_preset()`.
Disclaimer: Pre-trained models are provided on an "as is" basis, without
warranties or conditions of any kind.
Args:
backbone: A `keras_nlp.models.BertBackbone` instance.
preprocessor: A `keras_nlp.models.BertMaskedLMPreprocessor` or
`None`. If `None`, this model will not apply preprocessing, and
inputs should be preprocessed before calling the model.
Example usage:
Raw string inputs and pretrained backbone.
```python
# Create a dataset with raw string features. Labels are inferred.
features = ["The quick brown fox jumped.", "I forgot my homework."]
# Create a BertMaskedLM with a pretrained backbone and further train
# on an MLM task.
masked_lm = keras_nlp.models.BertMaskedLM.from_preset(
"bert_base_en",
)
masked_lm.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
)
masked_lm.fit(x=features, batch_size=2)
```
Preprocessed inputs and custom backbone.
```python
# Create a preprocessed dataset where 0 is the mask token.
preprocessed_features = {
"token_ids": tf.constant(
[[1, 2, 0, 4, 0, 6, 7, 8]] * 2, shape=(2, 8)
),
"padding_mask": tf.constant(
[[1, 1, 1, 1, 1, 1, 1, 1]] * 2, shape=(2, 8)
),
"mask_positions": tf.constant([[2, 4]] * 2, shape=(2, 2)),
"segment_ids": tf.constant([[0, 0, 0, 0, 0, 0, 0, 0]] * 2, shape=(2, 8))
}
# Labels are the original masked values.
labels = [[3, 5]] * 2
# Randomly initialize a BERT encoder
backbone = keras_nlp.models.BertBackbone(
vocabulary_size=50265,
num_layers=12,
num_heads=12,
hidden_dim=768,
intermediate_dim=3072,
max_sequence_length=12
)
# Create a BERT masked LM model and fit the data.
masked_lm = keras_nlp.models.BertMaskedLM(
backbone,
preprocessor=None,
)
masked_lm.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
)
masked_lm.fit(x=preprocessed_features, y=labels, batch_size=2)
```
"""

def __init__(
self,
backbone,
preprocessor=None,
**kwargs,
):
inputs = {
**backbone.input,
"mask_positions": keras.Input(
shape=(None,), dtype="int32", name="mask_positions"
),
}
backbone_outputs = backbone(backbone.input)
outputs = MaskedLMHead(
vocabulary_size=backbone.vocabulary_size,
embedding_weights=backbone.token_embedding.embeddings,
intermediate_activation="gelu",
kernel_initializer=bert_kernel_initializer(),
name="mlm_head",
)(backbone_outputs["sequence_output"], inputs["mask_positions"])

# Instantiate using Functional API Model constructor
super().__init__(
inputs=inputs,
outputs=outputs,
include_preprocessing=preprocessor is not None,
**kwargs,
)
# All references to `self` below this line
self.backbone = backbone
self.preprocessor = preprocessor

@classproperty
def backbone_cls(cls):
return BertBackbone

@classproperty
def preprocessor_cls(cls):
return BertMaskedLMPreprocessor

@classproperty
def presets(cls):
return copy.deepcopy(backbone_presets)
138 changes: 138 additions & 0 deletions keras_nlp/models/bert/bert_masked_lm_preprocessor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# Copyright 2022 The KerasNLP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""BERT masked language model preprocessor layer."""

from absl import logging
from tensorflow import keras

from keras_nlp.layers.masked_lm_mask_generator import MaskedLMMaskGenerator
from keras_nlp.models.bert.bert_preprocessor import BertPreprocessor
from keras_nlp.utils.keras_utils import pack_x_y_sample_weight


@keras.utils.register_keras_serializable(package="keras_nlp")
class BertMaskedLMPreprocessor(BertPreprocessor):
"""BERT preprocessing for the masked language modeling task.
This preprocessing layer will prepare inputs for a masked language modeling
task. It is primarily intended for use with the
`keras_nlp.models.BertMaskedLM` task model. Preprocessing will occur in
multiple steps.
- Tokenize any number of input segments using the `tokenizer`.
- Pack the inputs together using a `keras_nlp.layers.MultiSegmentPacker`.
with the appropriate `"[CLS]"`, `"[SEP]"`, `"[SEP]"`, `"[SEP]"` and `"[PAD]"` tokens.
- Randomly select non-special tokens to mask, controlled by
`mask_selection_rate`.
- Construct a `(x, y, sample_weight)` tuple suitable for training with a
`keras_nlp.models.BertMaskedLM` task model.
Examples:
```python
# Load the preprocessor from a preset.
preprocessor = keras_nlp.models.BertMaskedLMPreprocessor.from_preset(
"bert_base_en"
)
# Tokenize and mask a single sentence.
sentence = tf.constant("The quick brown fox jumped.")
preprocessor(sentence)
# Tokenize and mask a batch of sentences.
sentences = tf.constant(
["The quick brown fox jumped.", "Call me Ishmael."]
)
preprocessor(sentences)
# Tokenize and mask a dataset of sentences.
features = tf.constant(
["The quick brown fox jumped.", "Call me Ishmael."]
)
ds = tf.data.Dataset.from_tensor_slices((features))
ds = ds.map(preprocessor, num_parallel_calls=tf.data.AUTOTUNE)
# Alternatively, you can create a preprocessor from your own vocabulary.
# The usage is exactly the same as above.
vocab = ["[PAD]", "[UNK]", "[CLS]", "[SEP]", "[MASK]"]
vocab += ["THE", "QUICK", "BROWN", "FOX"]
vocab += ["Call", "me", "Ishmael"]
tokenizer = keras_nlp.models.BertTokenizer(vocabulary=vocab)
preprocessor = keras_nlp.models.BertMaskedLMPreprocessor(tokenizer)
```
"""

def __init__(
self,
tokenizer,
sequence_length=512,
truncate="round_robin",
mask_selection_rate=0.15,
mask_selection_length=96,
mask_token_rate=0.8,
random_token_rate=0.1,
**kwargs,
):
super().__init__(
tokenizer,
sequence_length=sequence_length,
truncate=truncate,
**kwargs,
)

self.masker = MaskedLMMaskGenerator(
mask_selection_rate=mask_selection_rate,
mask_selection_length=mask_selection_length,
mask_token_rate=mask_token_rate,
random_token_rate=random_token_rate,
vocabulary_size=tokenizer.vocabulary_size(),
mask_token_id=tokenizer.mask_token_id,
unselectable_token_ids=[
tokenizer.cls_token_id,
tokenizer.sep_token_id,
tokenizer.pad_token_id,
],
)

def get_config(self):
config = super().get_config()
config.update(
{
"mask_selection_rate": self.masker.mask_selection_rate,
"mask_selection_length": self.masker.mask_selection_length,
"mask_token_rate": self.masker.mask_token_rate,
"random_token_rate": self.masker.random_token_rate,
}
)
return config

def call(self, x, y=None, sample_weight=None):
if y is not None or sample_weight is not None:
logging.warning(
f"{self.__class__.__name__} generates `y` and `sample_weight` "
"based on your input data, but your data already contains `y` "
"or `sample_weight`. Your `y` and `sample_weight` will be "
"ignored."
)

x = super().call(x)

token_ids, padding_mask, segment_ids = (
x["token_ids"],
x["padding_mask"],
x["segment_ids"],
)
masker_outputs = self.masker(token_ids)
x = {
"token_ids": masker_outputs["token_ids"],
"padding_mask": padding_mask,
"segment_ids": segment_ids,
"mask_positions": masker_outputs["mask_positions"],
}
y = masker_outputs["mask_ids"]
sample_weight = masker_outputs["mask_weights"]
return pack_x_y_sample_weight(x, y, sample_weight)
Loading

0 comments on commit 91fe6bd

Please sign in to comment.