Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable pytest and bazel tests. #1243

Merged
merged 10 commits into from
Mar 9, 2020
Merged
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
.git
.github
*.Dockerfile

.coverage*
# C extensions
*.so
16 changes: 15 additions & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,20 @@ env:
OLDEST_PY_VERSION: '3.5'

jobs:
test-with-bazel:
name: Test with bazel
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
with:
python-version: ${{ env.OLDEST_PY_VERSION }}
- name: Build wheels
run: |
pip install -r tools/tests_dependencies/pytest.txt -r build_deps/build-requirements-cpu.txt -r requirements.txt
bash tools/tests_dependencies/bazel_linux.sh
python configure.py --no-deps
bazel test -c opt -k --test_timeout 300,450,1200,3600 --test_output=errors //tensorflow_addons/...
release-wheel:
name: Build release wheels
runs-on: ${{ matrix.os }}
Expand Down Expand Up @@ -73,7 +87,7 @@ jobs:

upload-wheels:
name: Publish wheels to PyPi
needs: [release-wheel, test-release-wheel]
needs: [release-wheel, test-release-wheel, test-with-bazel]
runs-on: ubuntu-18.04
strategy:
matrix:
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,6 @@ wheels/
/bazel-*
/artifacts
.bazelrc

.coverage*
htmlcov
6 changes: 4 additions & 2 deletions tensorflow_addons/layers/wrappers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
# limitations under the License.
# =============================================================================

import os
import tempfile
from absl.testing import parameterized

import numpy as np
Expand Down Expand Up @@ -124,12 +126,12 @@ def test_model_build(self, base_layer_fn, input_shape):
["LSTM", lambda: tf.keras.layers.LSTM(1), [10, 10]],
)
def test_save_file_h5(self, base_layer, input_shape):
self.create_tempfile("wrapper_test_model.h5")
base_layer = base_layer()
wn_conv = wrappers.WeightNormalization(base_layer)
model = tf.keras.Sequential(layers=[wn_conv])
model.build([None] + input_shape)
model.save_weights("wrapper_test_model.h5")
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_weights(os.path.join(tmp_dir, "wrapper_test_model.h5"))

@parameterized.named_parameters(
["Dense", lambda: tf.keras.layers.Dense(1), [1]],
Expand Down
2 changes: 1 addition & 1 deletion tensorflow_addons/optimizers/conditional_gradient_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import tensorflow as tf
from tensorflow_addons.utils import test_utils
import numpy as np
import conditional_gradient as cg_lib
from tensorflow_addons.optimizers import conditional_gradient as cg_lib


@test_utils.run_all_in_graph_and_eager_modes
Expand Down
6 changes: 5 additions & 1 deletion tensorflow_addons/optimizers/cyclical_learning_rate_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from tensorflow_addons.utils import test_utils
import numpy as np

import cyclical_learning_rate
from tensorflow_addons.optimizers import cyclical_learning_rate


def _maybe_serialized(lr_decay, serialize_and_deserialize):
Expand All @@ -35,6 +35,7 @@ def _maybe_serialized(lr_decay, serialize_and_deserialize):
@parameterized.named_parameters(("NotSerialized", False), ("Serialized", True))
class CyclicalLearningRateTest(tf.test.TestCase, parameterized.TestCase):
def testTriangularCyclicalLearningRate(self, serialize):
self.skipTest("Failing. See https://github.com/tensorflow/addons/issues/1203")
initial_learning_rate = 0.1
maximal_learning_rate = 1
step_size = 4000
Expand All @@ -61,6 +62,7 @@ def testTriangularCyclicalLearningRate(self, serialize):
self.evaluate(step.assign_add(1))

def testTriangular2CyclicalLearningRate(self, serialize):
self.skipTest("Failing. See https://github.com/tensorflow/addons/issues/1203")
initial_learning_rate = 0.1
maximal_learning_rate = 1
step_size = 4000
Expand Down Expand Up @@ -90,6 +92,7 @@ def testTriangular2CyclicalLearningRate(self, serialize):
self.evaluate(step.assign_add(1))

def testExponentialCyclicalLearningRate(self, serialize):
self.skipTest("Failing. See https://github.com/tensorflow/addons/issues/1203")
initial_learning_rate = 0.1
maximal_learning_rate = 1
step_size = 4000
Expand Down Expand Up @@ -119,6 +122,7 @@ def testExponentialCyclicalLearningRate(self, serialize):
self.evaluate(step.assign_add(1))

def testCustomCyclicalLearningRate(self, serialize):
self.skipTest("Failing. See https://github.com/tensorflow/addons/issues/1203")
initial_learning_rate = 0.1
maximal_learning_rate = 1
step_size = 4000
Expand Down
24 changes: 13 additions & 11 deletions tensorflow_addons/register_test.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
import unittest
import sys

import pytest
import tensorflow as tf
from tensorflow_addons.register import register_all, _get_all_shared_objects


class AssertRNNCellTest(unittest.TestCase):
def test_multiple_register(self):
register_all()
register_all()
def test_multiple_register():
register_all()
register_all()


def test_get_all_shared_objects(self):
all_shared_objects = _get_all_shared_objects()
self.assertTrue(len(all_shared_objects) >= 4)
def test_get_all_shared_objects():
all_shared_objects = _get_all_shared_objects()
assert len(all_shared_objects) >= 4

for file in all_shared_objects:
tf.load_op_library(file)
for file in all_shared_objects:
tf.load_op_library(file)


if __name__ == "__main__":
unittest.main()
sys.exit(pytest.main([__file__]))
12 changes: 7 additions & 5 deletions tensorflow_addons/seq2seq/basic_decoder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
# ==============================================================================
"""Tests for tfa.seq2seq.basic_decoder."""

import sys
import pytest
from absl.testing import parameterized
import numpy as np

Expand Down Expand Up @@ -195,7 +197,7 @@ def testStepWithTrainingHelperMaskedInput(self, use_mask):

self.assertLen(first_state, 2)
self.assertLen(step_state, 2)
self.assertIsInstance(step_outputs, basic_decoder.BasicDecoderOutput)
assert isinstance(step_outputs, basic_decoder.BasicDecoderOutput)
self.assertEqual(
(batch_size, expected_output_depth), step_outputs[0].get_shape()
)
Expand Down Expand Up @@ -805,15 +807,15 @@ def end_fn(sample_ids):
) = my_decoder.step(tf.constant(0), first_inputs, first_state)
batch_size_t = my_decoder.batch_size

self.assertLen(first_state, 2)
assert len(first_state) == 2
self.assertLen(step_state, 2)
self.assertTrue(isinstance(step_outputs, basic_decoder.BasicDecoderOutput))
assert isinstance(step_outputs, basic_decoder.BasicDecoderOutput)
self.assertEqual((batch_size, cell_depth), step_outputs[0].get_shape())
self.assertEqual((batch_size, cell_depth), step_outputs[1].get_shape())
self.assertEqual((batch_size, cell_depth), first_state[0].get_shape())
self.assertEqual((batch_size, cell_depth), first_state[1].get_shape())
self.assertEqual((batch_size, cell_depth), step_state[0].get_shape())
self.assertEqual((batch_size, cell_depth), step_state[1].get_shape())
assert (batch_size, cell_depth) == step_state[1].get_shape()

self.evaluate(tf.compat.v1.global_variables_initializer())
eval_result = self.evaluate(
Expand Down Expand Up @@ -866,4 +868,4 @@ def testRightPaddedSequenceAssertion(self):


if __name__ == "__main__":
tf.test.main()
sys.exit(pytest.main([__file__]))
59 changes: 38 additions & 21 deletions tensorflow_addons/text/skip_gram_ops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

import csv
import os
import tempfile

import tensorflow as tf

from tensorflow_addons import text
Expand Down Expand Up @@ -387,8 +389,8 @@ def test_filter_input_subsample_vocab(self):
self.assertAllEqual([b"the", b"to", b"life", b"and"], output)

@staticmethod
def _make_text_vocab_freq_file():
filepath = os.path.join(tf.compat.v1.test.get_temp_dir(), "vocab_freq.txt")
def _make_text_vocab_freq_file(tmp_dir):
filepath = os.path.join(tmp_dir, "vocab_freq.txt")
with open(filepath, "w") as f:
writer = csv.writer(f)
writer.writerows(
Expand All @@ -397,10 +399,8 @@ def _make_text_vocab_freq_file():
return filepath

@staticmethod
def _make_text_vocab_float_file():
filepath = os.path.join(
tf.compat.v1.test.get_temp_dir(), "vocab_freq_float.txt"
)
def _make_text_vocab_float_file(tmp_dir):
filepath = os.path.join(tmp_dir, "vocab_freq_float.txt")
with open(filepath, "w") as f:
writer = csv.writer(f)
writer.writerows(
Expand Down Expand Up @@ -430,17 +430,18 @@ def test_skip_gram_sample_with_text_vocab_filter_vocab(self):

# b"answer" is not in vocab file, and b"universe"'s frequency is below
# threshold of 3.
vocab_freq_file = self._make_text_vocab_freq_file()

tokens, labels = text.skip_gram_sample_with_text_vocab(
input_tensor=input_tensor,
vocab_freq_file=vocab_freq_file,
vocab_token_index=0,
vocab_freq_index=1,
vocab_min_count=3,
min_skips=1,
max_skips=1,
)
with tempfile.TemporaryDirectory() as tmp_dir:
vocab_freq_file = self._make_text_vocab_freq_file(tmp_dir)

tokens, labels = text.skip_gram_sample_with_text_vocab(
input_tensor=input_tensor,
vocab_freq_file=vocab_freq_file,
vocab_token_index=0,
vocab_freq_index=1,
vocab_min_count=3,
min_skips=1,
max_skips=1,
)

expected_tokens, expected_labels = self._split_tokens_labels(
[
Expand Down Expand Up @@ -510,7 +511,11 @@ def test_skip_gram_sample_with_text_vocab_subsample_vocab(self):
# universe: 2
#
# corpus_size for the above vocab is 40+8+30+20+2 = 100.
text_vocab_freq_file = self._make_text_vocab_freq_file()
with tempfile.TemporaryDirectory() as tmp_dir:
text_vocab_freq_file = self._make_text_vocab_freq_file(tmp_dir)
self._skip_gram_sample_with_text_vocab_subsample_vocab(text_vocab_freq_file)

def _skip_gram_sample_with_text_vocab_subsample_vocab(self, text_vocab_freq_file):
self._text_vocab_subsample_vocab_helper(
vocab_freq_file=text_vocab_freq_file,
vocab_min_count=3,
Expand Down Expand Up @@ -544,7 +549,15 @@ def test_skip_gram_sample_with_text_vocab_subsample_vocab_float(self):
# universe: 0.02
#
# corpus_size for the above vocab is 0.4+0.08+0.3+0.2+0.02 = 1.
text_vocab_float_file = self._make_text_vocab_float_file()
with tempfile.TemporaryDirectory() as tmp_dir:
text_vocab_float_file = self._make_text_vocab_float_file(tmp_dir)
self._skip_gram_sample_with_text_vocab_subsample_vocab_float(
text_vocab_float_file
)

def _skip_gram_sample_with_text_vocab_subsample_vocab_float(
self, text_vocab_float_file
):
self._text_vocab_subsample_vocab_helper(
vocab_freq_file=text_vocab_float_file,
vocab_min_count=0.03,
Expand All @@ -570,9 +583,13 @@ def test_skip_gram_sample_with_text_vocab_subsample_vocab_float(self):
def test_skip_gram_sample_with_text_vocab_errors(self):
"""Tests various errors raised by
skip_gram_sample_with_text_vocab()."""
dummy_input = tf.constant([""])
vocab_freq_file = self._make_text_vocab_freq_file()

with tempfile.TemporaryDirectory() as tmp_dir:
vocab_freq_file = self._make_text_vocab_freq_file(tmp_dir)
self._skip_gram_sample_with_text_vocab_errors(vocab_freq_file)

def _skip_gram_sample_with_text_vocab_errors(self, vocab_freq_file):
dummy_input = tf.constant([""])
invalid_indices = (
# vocab_token_index can't be negative.
(-1, 0),
Expand Down
8 changes: 2 additions & 6 deletions tools/ci_build/builds/release_linux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,8 @@ python3 -m pip install --upgrade setuptools
python3 --version
python3 ./configure.py

## Run bazel test command. Double test timeouts to avoid flakes.
bazel test -c opt -k \
--jobs=auto --test_timeout 300,450,1200,3600 \
--test_output=errors --local_test_jobs=8 \
--crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.1:toolchain \
//tensorflow_addons/...
bash tools/ci_testing/addons_cpu.sh
Copy link
Member

@seanpmorgan seanpmorgan Mar 9, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would think this is an issue. addons_cpu will compile the custom-ops without any particular toolchain:
https://github.com/tensorflow/addons/blob/master/tools/ci_testing/addons_cpu.sh#L60-L64

I thought the compile at the subsequent build would be skipped since they're already created, but they won't be compatible with tf from pypi. However the test-release-wheel is passing, so I wanted to verify with you that this isn't reverting to python_op fallback?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we add a bazel clean --expunge after this just for sanity.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure thing. I'll make sure the right toolchaine is used.

bazel clean --expunge

# Build
bazel build \
Expand Down
39 changes: 7 additions & 32 deletions tools/ci_testing/addons_cpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,43 +24,18 @@ if [ "$1" != "--no-deps" ] && [ "$1" != "" ]; then
exit 1
fi

# Make sure we're in the project root path.
SCRIPT_DIR=$( cd ${0%/*} && pwd -P )
ROOT_DIR=$( cd "$SCRIPT_DIR/../.." && pwd -P )
cd $ROOT_DIR
if [[ ! -d "tensorflow_addons" ]]; then
echo "ERROR: PWD: $PWD is not project root"
exit 1
fi

PLATFORM="$(uname -s | tr 'A-Z' 'a-z')"

if [[ ${PLATFORM} == "darwin" ]]; then
N_JOBS=$(sysctl -n hw.ncpu)
else
N_JOBS=$(grep -c ^processor /proc/cpuinfo)
fi

echo ""
echo "Bazel will use ${N_JOBS} concurrent job(s)."
echo ""

export CC_OPT_FLAGS='-mavx'
export TF_NEED_CUDA=0

# Check if python3 is available. On Windows VM it is not.
if [ -x "$(command -v python3)" ]; then
python3 ./configure.py $1
else
python ./configure.py $1
PYTHON_BINARY=python3
else
PYTHON_BINARY=python
fi

$PYTHON_BINARY -m pip install -r tools/tests_dependencies/pytest.txt
$PYTHON_BINARY ./configure.py $1
cat ./.bazelrc

## Run bazel test command. Double test timeouts to avoid flakes.
${BAZEL_PATH:=bazel} test -c opt -k \
--jobs=${N_JOBS} --test_timeout 300,450,1200,3600 \
--test_output=errors --local_test_jobs=8 \
//tensorflow_addons/...

exit $?
bash tools/install_so_files.sh
$PYTHON_BINARY -m pytest --cov=tensorflow_addons -v --durations=25 -n auto ./tensorflow_addons
Loading