From 4e75f9850c22828f1aa2606f086b4d2703a3d2ce Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 17 Apr 2022 16:58:26 -0700 Subject: [PATCH 01/38] Build with TF 2.9 --- .github/workflows/release.yml | 14 +++++++------- CONTRIBUTING.md | 2 +- README.md | 3 ++- tensorflow_addons/utils/resource_loader.py | 4 ++-- tensorflow_addons/version.py | 4 ++-- tools/build_dev_container.sh | 2 +- tools/docker/cpu_tests.Dockerfile | 2 +- tools/install_deps/tensorflow-cpu.txt | 2 +- tools/install_deps/tensorflow.txt | 2 +- tools/run_gpu_tests.sh | 2 +- 10 files changed, 19 insertions(+), 18 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c76dfd1d4b..9443960158 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,19 +42,19 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.6.3', '2.8.0'] + tf-version: ['2.7.1', '2.9.0rc0'] cpu: ['x86'] exclude: - py-version: '3.10' - tf-version: '2.6.3' + tf-version: '2.7.1' include: - os: 'macos-11' cpu: 'arm64' - tf-version: '2.8.0' + tf-version: '2.9.0rc0' py-version: '3.8' - os: 'macos-11' cpu: 'arm64' - tf-version: '2.8.0' + tf-version: '2.9.0rc0' py-version: '3.9' fail-fast: false steps: @@ -99,16 +99,16 @@ jobs: matrix: os: ['macOS', 'Windows', 'Linux'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.8.0'] + tf-version: ['2.9.0rc0'] cpu: ['x86'] include: - os: 'macOS' cpu: 'arm64' - tf-version: '2.8.0' + tf-version: '2.9.0rc0' py-version: '3.8' - os: 'macOS' cpu: 'arm64' - tf-version: '2.8.0' + tf-version: '2.9.0rc0' py-version: '3.9' fail-fast: false if: (github.event_name == 'push' && github.ref == 'refs/heads/master') || github.event_name == 'release' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 679973fe64..2fc7ef68b3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -174,7 +174,7 @@ Just run from the root: ```bash pip install tensorflow==2.8 -# you can use "pip install tensorflow-cpu==2.8.0" too if you're not testing on gpu. +# you can use "pip install tensorflow-cpu==2.9.0rc0" too if you're not testing on gpu. pip install -e ./ ``` diff --git a/README.md b/README.md index 6a766454f1..4ac31ab6ac 100644 --- a/README.md +++ b/README.md @@ -80,7 +80,8 @@ what it was tested against. #### Python Op Compatibility Matrix | TensorFlow Addons | TensorFlow | Python | |:----------------------- |:---|:---------- | -| tfa-nightly | 2.6, 2.7, 2.8 | 3.7, 3.8, 3.9, 3.10 | +| tfa-nightly | 2.7, 2.8, 2.9 | 3.7, 3.8, 3.9, 3.10 | +| tensorflow-addons-0.16.1 | 2.7, 2.8, 2.9 |3.7, 3.8, 3.9, 3.10 | | tensorflow-addons-0.16.1 | 2.6, 2.7, 2.8 |3.7, 3.8, 3.9, 3.10 | | tensorflow-addons-0.15.0 | 2.5, 2.6, 2.7 |3.7, 3.8, 3.9 | | tensorflow-addons-0.14.0 | 2.4, 2.5, 2.6 |3.6, 3.7, 3.8, 3.9 | diff --git a/tensorflow_addons/utils/resource_loader.py b/tensorflow_addons/utils/resource_loader.py index 0465754652..7e53842e26 100644 --- a/tensorflow_addons/utils/resource_loader.py +++ b/tensorflow_addons/utils/resource_loader.py @@ -20,8 +20,8 @@ import tensorflow as tf -INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.8.0" -EXCLUSIVE_MAX_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0" +INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0rc0" +EXCLUSIVE_MAX_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.10.0" abi_warning_already_raised = False SKIP_CUSTOM_OPS = False diff --git a/tensorflow_addons/version.py b/tensorflow_addons/version.py index 634677fb78..102024e5cd 100644 --- a/tensorflow_addons/version.py +++ b/tensorflow_addons/version.py @@ -15,8 +15,8 @@ """Define TensorFlow Addons version information.""" # Required TensorFlow version [min, max) -INCLUSIVE_MIN_TF_VERSION = "2.6.0" -EXCLUSIVE_MAX_TF_VERSION = "2.9.0" +INCLUSIVE_MIN_TF_VERSION = "2.7.0" +EXCLUSIVE_MAX_TF_VERSION = "2.10.0" # We follow Semantic Versioning (https://semver.org/) _MAJOR_VERSION = "0" diff --git a/tools/build_dev_container.sh b/tools/build_dev_container.sh index 806da116c5..93128e4031 100755 --- a/tools/build_dev_container.sh +++ b/tools/build_dev_container.sh @@ -4,7 +4,7 @@ set -x -e docker build \ -f tools/docker/dev_container.Dockerfile \ - --build-arg TF_VERSION=2.8.0 \ + --build-arg TF_VERSION=2.9.0rc0 \ --build-arg TF_PACKAGE=tensorflow \ --build-arg PY_VERSION=$PY_VERSION \ --no-cache \ diff --git a/tools/docker/cpu_tests.Dockerfile b/tools/docker/cpu_tests.Dockerfile index d31f7d1748..7ee17d308a 100644 --- a/tools/docker/cpu_tests.Dockerfile +++ b/tools/docker/cpu_tests.Dockerfile @@ -1,7 +1,7 @@ #syntax=docker/dockerfile:1.1.5-experimental FROM python:3.7 as build_wheel -ARG TF_VERSION=2.8.0 +ARG TF_VERSION=2.9.0rc0 RUN pip install --default-timeout=1000 tensorflow-cpu==$TF_VERSION RUN apt-get update && apt-get install -y sudo rsync diff --git a/tools/install_deps/tensorflow-cpu.txt b/tools/install_deps/tensorflow-cpu.txt index bd0ef8c655..c44dc3fc1c 100644 --- a/tools/install_deps/tensorflow-cpu.txt +++ b/tools/install_deps/tensorflow-cpu.txt @@ -1 +1 @@ -tensorflow-cpu~=2.8.0 +tensorflow-cpu~=2.9.0rc0 diff --git a/tools/install_deps/tensorflow.txt b/tools/install_deps/tensorflow.txt index 6fe6f46997..5cae589afd 100644 --- a/tools/install_deps/tensorflow.txt +++ b/tools/install_deps/tensorflow.txt @@ -1 +1 @@ -tensorflow~=2.8.0 \ No newline at end of file +tensorflow~=2.9.0rc0 \ No newline at end of file diff --git a/tools/run_gpu_tests.sh b/tools/run_gpu_tests.sh index a1039deb1d..f5cc92ca20 100644 --- a/tools/run_gpu_tests.sh +++ b/tools/run_gpu_tests.sh @@ -6,7 +6,7 @@ export DOCKER_BUILDKIT=1 docker build \ -f tools/docker/build_wheel.Dockerfile \ --target tfa_gpu_tests \ - --build-arg TF_VERSION=2.8.0 \ + --build-arg TF_VERSION=2.9.0rc0 \ --build-arg PY_VERSION=3.7 \ -t tfa_gpu_tests ./ docker run --rm -t --gpus=all tfa_gpu_tests From 95248d66a6bb750401e173aafec9625d6f929d0b Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 17 Apr 2022 17:02:13 -0700 Subject: [PATCH 02/38] Remove exclusion --- .github/workflows/release.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9443960158..3d88d17cd4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -44,9 +44,6 @@ jobs: py-version: ['3.7', '3.8', '3.9', '3.10'] tf-version: ['2.7.1', '2.9.0rc0'] cpu: ['x86'] - exclude: - - py-version: '3.10' - tf-version: '2.7.1' include: - os: 'macos-11' cpu: 'arm64' From 7c871d08e176ac885ce0b4337a805f96637aa82d Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 12:29:02 +0000 Subject: [PATCH 03/38] Adapt to refactored test_utils namespace --- tensorflow_addons/utils/test_utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tensorflow_addons/utils/test_utils.py b/tensorflow_addons/utils/test_utils.py index c376d901e6..2d298e75d2 100644 --- a/tensorflow_addons/utils/test_utils.py +++ b/tensorflow_addons/utils/test_utils.py @@ -26,6 +26,8 @@ from tensorflow_addons.utils import resource_loader # TODO: copy the layer_test implementation in Addons. +if tf.__version__[:3] > "2.8": + from keras.testing_infra.test_utils import layer_test # noqa: F401 if tf.__version__[:3] > "2.5": from keras.testing_utils import layer_test # noqa: F401 else: From 263391b9e860b087fef9f25675998aa4016d9fa8 Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 12:31:52 +0000 Subject: [PATCH 04/38] Fix condition --- tensorflow_addons/utils/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/utils/test_utils.py b/tensorflow_addons/utils/test_utils.py index 2d298e75d2..cf3d0aacda 100644 --- a/tensorflow_addons/utils/test_utils.py +++ b/tensorflow_addons/utils/test_utils.py @@ -28,7 +28,7 @@ # TODO: copy the layer_test implementation in Addons. if tf.__version__[:3] > "2.8": from keras.testing_infra.test_utils import layer_test # noqa: F401 -if tf.__version__[:3] > "2.5": +elif tf.__version__[:3] > "2.5": from keras.testing_utils import layer_test # noqa: F401 else: from tensorflow.python.keras.testing_utils import layer_test # noqa: F401 From 54d69d0bd96f0a535df50a7d0d2dc4f0a6a82db5 Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 12:40:25 +0000 Subject: [PATCH 05/38] Exclude TF 2.7.1 python 10 --- .github/workflows/release.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3d88d17cd4..9443960158 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -44,6 +44,9 @@ jobs: py-version: ['3.7', '3.8', '3.9', '3.10'] tf-version: ['2.7.1', '2.9.0rc0'] cpu: ['x86'] + exclude: + - py-version: '3.10' + tf-version: '2.7.1' include: - os: 'macos-11' cpu: 'arm64' From 55700f9f1a9659b61f1124f174a14d704c5868a3 Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 13:07:37 +0000 Subject: [PATCH 06/38] Update cuda toolchain for manylinux2014 --- tools/install_so_files.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/install_so_files.sh b/tools/install_so_files.sh index b4cc013a05..f0d30d8cd1 100644 --- a/tools/install_so_files.sh +++ b/tools/install_so_files.sh @@ -1,7 +1,7 @@ set -e -x if [ "$TF_NEED_CUDA" == "1" ]; then - CUDA_FLAG="--crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11:toolchain" + CUDA_FLAG="--crosstool_top=@ubuntu20.04-gcc9_manylinux2014-cuda11.2-cudnn8.1-tensorrt7.2_config_cuda//crosstool:toolchain" fi bazel build $CUDA_FLAG //tensorflow_addons/... From 31540e19b1419c519c0d30e441a67c78494a8c84 Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 21:02:26 +0000 Subject: [PATCH 07/38] Add TF workspace --- WORKSPACE | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/WORKSPACE b/WORKSPACE index 42da54b7c7..b3b3a0e470 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -18,3 +18,45 @@ tf_configure( ) cuda_configure(name = "local_config_cuda") + +http_archive( + name = "org_tensorflow", + sha256 = "7d736fa5ff3868516359d8370e7b57251b8080243fc38e8089ee0ceb8ee90264", + strip_prefix = "tensorflow-2.9.0-rc0", + urls = [ + "https://github.com/tensorflow/tensorflow/archive/refs/tags/v2.9.0-rc0.tar.gz", + ], +) + + +load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3") + + +tf_workspace3() + + +load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2") + + +tf_workspace2() + + +load("@org_tensorflow//tensorflow:workspace1.bzl", "tf_workspace1") + + +tf_workspace1() + + +load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0") + + +tf_workspace0() + + +load("//third_party/toolchains/tf:tf_configure.bzl", "tf_configure") + + +tf_configure(name = "local_config_tf") + + +load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") \ No newline at end of file From 821cd3a417e341c785a11bc474f9508a453e58e7 Mon Sep 17 00:00:00 2001 From: bhack Date: Tue, 19 Apr 2022 21:04:18 +0000 Subject: [PATCH 08/38] Fix format --- WORKSPACE | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index b3b3a0e470..a5f7b64ae2 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -28,35 +28,24 @@ http_archive( ], ) - load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3") - tf_workspace3() - load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2") - tf_workspace2() - load("@org_tensorflow//tensorflow:workspace1.bzl", "tf_workspace1") - tf_workspace1() - load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0") - tf_workspace0() - load("//third_party/toolchains/tf:tf_configure.bzl", "tf_configure") - tf_configure(name = "local_config_tf") - -load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") \ No newline at end of file +load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") From 76a04e17ed7778f9eafd0dea36d93c49ccc2a876 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 16:08:59 -0700 Subject: [PATCH 09/38] Debug --- .github/workflows/release.yml | 12 ++++++------ CONTRIBUTING.md | 4 ++-- configure.py | 5 ++++- tensorflow_addons/utils/resource_loader.py | 2 +- tools/build_dev_container.sh | 2 +- tools/docker/build_wheel.Dockerfile | 2 +- tools/docker/cpu_tests.Dockerfile | 2 +- tools/install_deps/tensorflow-cpu.txt | 2 +- tools/install_deps/tensorflow.txt | 2 +- tools/run_gpu_tests.sh | 2 +- 10 files changed, 19 insertions(+), 16 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9443960158..ff325a0ff9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,7 +42,7 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.7.1', '2.9.0rc0'] + tf-version: ['2.7.1', '2.9.0rc1'] cpu: ['x86'] exclude: - py-version: '3.10' @@ -50,11 +50,11 @@ jobs: include: - os: 'macos-11' cpu: 'arm64' - tf-version: '2.9.0rc0' + tf-version: '2.9.0rc1' py-version: '3.8' - os: 'macos-11' cpu: 'arm64' - tf-version: '2.9.0rc0' + tf-version: '2.9.0rc1' py-version: '3.9' fail-fast: false steps: @@ -99,16 +99,16 @@ jobs: matrix: os: ['macOS', 'Windows', 'Linux'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.9.0rc0'] + tf-version: ['2.9.0rc1'] cpu: ['x86'] include: - os: 'macOS' cpu: 'arm64' - tf-version: '2.9.0rc0' + tf-version: '2.9.0rc1' py-version: '3.8' - os: 'macOS' cpu: 'arm64' - tf-version: '2.9.0rc0' + tf-version: '2.9.0rc1' py-version: '3.9' fail-fast: false if: (github.event_name == 'push' && github.ref == 'refs/heads/master') || github.event_name == 'release' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2fc7ef68b3..b3e37e4ee6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -174,7 +174,7 @@ Just run from the root: ```bash pip install tensorflow==2.8 -# you can use "pip install tensorflow-cpu==2.9.0rc0" too if you're not testing on gpu. +# you can use "pip install tensorflow-cpu==2.9.0rc1" too if you're not testing on gpu. pip install -e ./ ``` @@ -329,7 +329,7 @@ quickly, as Bazel has great support for caching and distributed testing. To test with Bazel: ```bash -python3 -m pip install tensorflow==2.8 +python3 -m pip install tensorflow==2.9rc1 python3 configure.py python3 -m pip install -r tools/install_deps/pytest.txt bazel test -c opt -k \ diff --git a/configure.py b/configure.py index a08c938baa..0994417772 100644 --- a/configure.py +++ b/configure.py @@ -182,8 +182,11 @@ def configure_cuda(): write("test --config=cuda") write("build --config=cuda") + write("build --experimental_repo_remote_exec") write("build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true") - write("build:cuda --crosstool_top=@local_config_cuda//crosstool:toolchain") + write( + "build:cuda --crosstool_top=@ubuntu20.04-gcc9_manylinux2014-cuda11.2-cudnn8.1-tensorrt7.2_config_cuda//crosstool:toolchain" + ) if __name__ == "__main__": diff --git a/tensorflow_addons/utils/resource_loader.py b/tensorflow_addons/utils/resource_loader.py index 7e53842e26..0db98249ea 100644 --- a/tensorflow_addons/utils/resource_loader.py +++ b/tensorflow_addons/utils/resource_loader.py @@ -20,7 +20,7 @@ import tensorflow as tf -INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0rc0" +INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0rc1" EXCLUSIVE_MAX_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.10.0" abi_warning_already_raised = False SKIP_CUSTOM_OPS = False diff --git a/tools/build_dev_container.sh b/tools/build_dev_container.sh index 93128e4031..b4811e5740 100755 --- a/tools/build_dev_container.sh +++ b/tools/build_dev_container.sh @@ -4,7 +4,7 @@ set -x -e docker build \ -f tools/docker/dev_container.Dockerfile \ - --build-arg TF_VERSION=2.9.0rc0 \ + --build-arg TF_VERSION=2.9.0rc1 \ --build-arg TF_PACKAGE=tensorflow \ --build-arg PY_VERSION=$PY_VERSION \ --no-cache \ diff --git a/tools/docker/build_wheel.Dockerfile b/tools/docker/build_wheel.Dockerfile index de402d6290..4f9ef5ce12 100644 --- a/tools/docker/build_wheel.Dockerfile +++ b/tools/docker/build_wheel.Dockerfile @@ -40,7 +40,7 @@ RUN bazel build \ --noshow_loading_progress \ --verbose_failures \ --test_output=errors \ - --crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11:toolchain \ + --crosstool_top=@ubuntu20.04-gcc9_manylinux2014-cuda11.2-cudnn8.1-tensorrt7.2_config_cuda//crosstool:toolchain \ build_pip_pkg && \ # Package Whl bazel-bin/build_pip_pkg artifacts $NIGHTLY_FLAG diff --git a/tools/docker/cpu_tests.Dockerfile b/tools/docker/cpu_tests.Dockerfile index 7ee17d308a..6509d67649 100644 --- a/tools/docker/cpu_tests.Dockerfile +++ b/tools/docker/cpu_tests.Dockerfile @@ -1,7 +1,7 @@ #syntax=docker/dockerfile:1.1.5-experimental FROM python:3.7 as build_wheel -ARG TF_VERSION=2.9.0rc0 +ARG TF_VERSION=2.9.0rc1 RUN pip install --default-timeout=1000 tensorflow-cpu==$TF_VERSION RUN apt-get update && apt-get install -y sudo rsync diff --git a/tools/install_deps/tensorflow-cpu.txt b/tools/install_deps/tensorflow-cpu.txt index c44dc3fc1c..cb5eb189e6 100644 --- a/tools/install_deps/tensorflow-cpu.txt +++ b/tools/install_deps/tensorflow-cpu.txt @@ -1 +1 @@ -tensorflow-cpu~=2.9.0rc0 +tensorflow-cpu~=2.9.0rc1 diff --git a/tools/install_deps/tensorflow.txt b/tools/install_deps/tensorflow.txt index 5cae589afd..62948623f1 100644 --- a/tools/install_deps/tensorflow.txt +++ b/tools/install_deps/tensorflow.txt @@ -1 +1 @@ -tensorflow~=2.9.0rc0 \ No newline at end of file +tensorflow~=2.9.0rc1 \ No newline at end of file diff --git a/tools/run_gpu_tests.sh b/tools/run_gpu_tests.sh index f5cc92ca20..9b521590d9 100644 --- a/tools/run_gpu_tests.sh +++ b/tools/run_gpu_tests.sh @@ -6,7 +6,7 @@ export DOCKER_BUILDKIT=1 docker build \ -f tools/docker/build_wheel.Dockerfile \ --target tfa_gpu_tests \ - --build-arg TF_VERSION=2.9.0rc0 \ + --build-arg TF_VERSION=2.9.0rc1 \ --build-arg PY_VERSION=3.7 \ -t tfa_gpu_tests ./ docker run --rm -t --gpus=all tfa_gpu_tests From 025670f14ccf81be85f02681e39ede26fd5e0a66 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 16:14:07 -0700 Subject: [PATCH 10/38] Debug --- configure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.py b/configure.py index 0994417772..1c49a5a154 100644 --- a/configure.py +++ b/configure.py @@ -134,6 +134,7 @@ def create_build_configuration(): write("build --spawn_strategy=standalone") write("build --strategy=Genrule=standalone") + write("build --experimental_repo_remote_exec") write("build -c opt") if Version(tf.__version__) >= Version("2.9.0"): @@ -182,7 +183,6 @@ def configure_cuda(): write("test --config=cuda") write("build --config=cuda") - write("build --experimental_repo_remote_exec") write("build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true") write( "build:cuda --crosstool_top=@ubuntu20.04-gcc9_manylinux2014-cuda11.2-cudnn8.1-tensorrt7.2_config_cuda//crosstool:toolchain" From 7c231fefd6e007a20cea1dda5d80fc55e1453ed3 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 16:18:15 -0700 Subject: [PATCH 11/38] Debug --- WORKSPACE | 6 ------ 1 file changed, 6 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index a5f7b64ae2..28e6328b0c 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -43,9 +43,3 @@ tf_workspace1() load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0") tf_workspace0() - -load("//third_party/toolchains/tf:tf_configure.bzl", "tf_configure") - -tf_configure(name = "local_config_tf") - -load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") From 228e20e3ed3d33392f9112faf7b0c4845b32bd33 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 16:38:15 -0700 Subject: [PATCH 12/38] Debug --- configure.py | 4 ++++ tensorflow_addons/utils/test_utils.py | 5 +---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/configure.py b/configure.py index 1c49a5a154..295adff92c 100644 --- a/configure.py +++ b/configure.py @@ -132,6 +132,10 @@ def create_build_configuration(): write_action_env("TF_SHARED_LIBRARY_NAME", get_shared_lib_name()) write_action_env("TF_CXX11_ABI_FLAG", tf.sysconfig.CXX11_ABI_FLAG) + print("!!!!!!!!!!!!!!!!!!!!!DEBUG - CX11 FLAG !!!!!!!!!!!!!!!!!!!!!!") + print(tf.sysconfig.CXX11_ABI_FLAG) + print("!!!!!!!!!!!!!!!!!!!!!DEBUG - CX11 FLAG !!!!!!!!!!!!!!!!!!!!!!") + write("build --spawn_strategy=standalone") write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") diff --git a/tensorflow_addons/utils/test_utils.py b/tensorflow_addons/utils/test_utils.py index cf3d0aacda..fde88c6a26 100644 --- a/tensorflow_addons/utils/test_utils.py +++ b/tensorflow_addons/utils/test_utils.py @@ -25,13 +25,10 @@ from tensorflow_addons import options from tensorflow_addons.utils import resource_loader -# TODO: copy the layer_test implementation in Addons. if tf.__version__[:3] > "2.8": from keras.testing_infra.test_utils import layer_test # noqa: F401 -elif tf.__version__[:3] > "2.5": - from keras.testing_utils import layer_test # noqa: F401 else: - from tensorflow.python.keras.testing_utils import layer_test # noqa: F401 + from keras.testing_utils import layer_test # noqa: F401 NUMBER_OF_WORKERS = int(os.environ.get("PYTEST_XDIST_WORKER_COUNT", "1")) WORKER_ID = int(os.environ.get("PYTEST_XDIST_WORKER", "gw0")[2]) From b54cd4f1f462555ad73b2b6dd3a116c80d866298 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 17:11:01 -0700 Subject: [PATCH 13/38] Debug --- configure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.py b/configure.py index 295adff92c..f950be1335 100644 --- a/configure.py +++ b/configure.py @@ -141,7 +141,7 @@ def create_build_configuration(): write("build --experimental_repo_remote_exec") write("build -c opt") - if Version(tf.__version__) >= Version("2.9.0"): + if Version(tf.__version__) >= Version("2.9.0rc1"): glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' else: glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' From 48b7fc1736387368b26cc43621d6908557fb82f4 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 17:35:21 -0700 Subject: [PATCH 14/38] Update auditwheel plat --- configure.py | 4 ---- tools/docker/build_wheel.Dockerfile | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/configure.py b/configure.py index f950be1335..06110ce0ce 100644 --- a/configure.py +++ b/configure.py @@ -132,10 +132,6 @@ def create_build_configuration(): write_action_env("TF_SHARED_LIBRARY_NAME", get_shared_lib_name()) write_action_env("TF_CXX11_ABI_FLAG", tf.sysconfig.CXX11_ABI_FLAG) - print("!!!!!!!!!!!!!!!!!!!!!DEBUG - CX11 FLAG !!!!!!!!!!!!!!!!!!!!!!") - print(tf.sysconfig.CXX11_ABI_FLAG) - print("!!!!!!!!!!!!!!!!!!!!!DEBUG - CX11 FLAG !!!!!!!!!!!!!!!!!!!!!!") - write("build --spawn_strategy=standalone") write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") diff --git a/tools/docker/build_wheel.Dockerfile b/tools/docker/build_wheel.Dockerfile index 4f9ef5ce12..9147abb1c9 100644 --- a/tools/docker/build_wheel.Dockerfile +++ b/tools/docker/build_wheel.Dockerfile @@ -46,7 +46,7 @@ RUN bazel build \ bazel-bin/build_pip_pkg artifacts $NIGHTLY_FLAG RUN bash tools/releases/tf_auditwheel_patch.sh -RUN python -m auditwheel repair --plat manylinux2010_x86_64 artifacts/*.whl +RUN python -m auditwheel repair --plat manylinux2014_x86_64 artifacts/*.whl RUN ls -al wheelhouse/ # ------------------------------------------------------------------- From 11be2a521012912dd1ee82d4c566fd9d2bf44f36 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 18:12:42 -0700 Subject: [PATCH 15/38] Remove outdated toolchain --- .../gcc7_manylinux2010-nvcc-cuda11/BUILD | 118 -- .../cc_toolchain_config.bzl | 1493 ----------------- .../bin/crosstool_wrapper_driver_is_not_gcc | 281 ---- 3 files changed, 1892 deletions(-) delete mode 100644 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/BUILD delete mode 100644 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/cc_toolchain_config.bzl delete mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/clang/bin/crosstool_wrapper_driver_is_not_gcc diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/BUILD b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/BUILD deleted file mode 100644 index 44172e9f50..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/BUILD +++ /dev/null @@ -1,118 +0,0 @@ -# This file is expanded from a template by cuda_configure.bzl -# Update cuda_configure.bzl#verify_build_defines when adding new variables. - -load(":cc_toolchain_config.bzl", "cc_toolchain_config") - -licenses(["restricted"]) - -package(default_visibility = ["//visibility:public"]) - -toolchain( - name = "toolchain-linux-x86_64", - exec_compatible_with = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", - ], - target_compatible_with = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", - ], - toolchain = ":cc-compiler-local", - toolchain_type = "@bazel_tools//tools/cpp:toolchain_type", -) - -cc_toolchain_suite( - name = "toolchain", - toolchains = { - "local|compiler": ":cc-compiler-local", - "darwin|compiler": ":cc-compiler-darwin", - "k8": ":cc-compiler-local", - "darwin": ":cc-compiler-darwin", - }, -) - -cc_toolchain( - name = "cc-compiler-local", - all_files = ":crosstool_wrapper_driver_is_not_gcc", - compiler_files = ":empty", - dwp_files = ":empty", - linker_files = ":crosstool_wrapper_driver_is_not_gcc", - objcopy_files = ":empty", - strip_files = ":empty", - # To support linker flags that need to go to the start of command line - # we need the toolchain to support parameter files. Parameter files are - # last on the command line and contain all shared libraries to link, so all - # regular options will be left of them. - supports_param_files = 1, - toolchain_config = ":cc-compiler-local-config", - toolchain_identifier = "local_linux", -) - -cc_toolchain_config( - name = "cc-compiler-local-config", - builtin_include_directories = [ - "/dt7/usr/include/c++/7", - "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", - "/dt7/usr/include/c++/7/backward", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", - "/dt7/usr/include", - "/usr/local/cuda-11.2/targets/x86_64-linux/include", - "/usr/local/cuda-11.2/include", - "/usr/local/cuda-11.2/extras/CUPTI/include", - "/usr/include", - ], - cpu = "local", - extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], - host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", - host_compiler_prefix = "/usr/bin", - host_compiler_warnings = [], - host_unfiltered_compile_flags = [], - linker_bin_path = "/usr/bin", -) - -cc_toolchain( - name = "cc-compiler-darwin", - all_files = ":crosstool_wrapper_driver_is_not_gcc", - compiler_files = ":empty", - dwp_files = ":empty", - linker_files = ":crosstool_wrapper_driver_is_not_gcc", - objcopy_files = ":empty", - strip_files = ":empty", - supports_param_files = 0, - toolchain_config = ":cc-compiler-local-darwin", - toolchain_identifier = "local_darwin", -) - -cc_toolchain_config( - name = "cc-compiler-local-darwin", - builtin_include_directories = [ - "/dt7/usr/include/c++/7", - "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", - "/dt7/usr/include/c++/7/backward", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", - "/dt7/usr/include", - "/usr/local/cuda-11.2/targets/x86_64-linux/include", - "/usr/local/cuda-11.2/include", - "/usr/local/cuda-11.2/extras/CUPTI/include", - "/usr/include", - ], - cpu = "darwin", - extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], - host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", - host_compiler_prefix = "/usr/bin", - host_compiler_warnings = [], - host_unfiltered_compile_flags = [], - linker_bin_path = "/usr/bin", -) - -filegroup( - name = "empty", - srcs = [], -) - -filegroup( - name = "crosstool_wrapper_driver_is_not_gcc", - srcs = ["clang/bin/crosstool_wrapper_driver_is_not_gcc"], -) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/cc_toolchain_config.bzl b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/cc_toolchain_config.bzl deleted file mode 100644 index ba002b4543..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/cc_toolchain_config.bzl +++ /dev/null @@ -1,1493 +0,0 @@ -"""cc_toolchain_config rule for configuring CUDA toolchains on Linux, Mac, and Windows.""" - -load( - "@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", - "action_config", - "env_entry", - "env_set", - "feature", - "feature_set", - "flag_group", - "flag_set", - "tool", - "tool_path", - "variable_with_value", -) -load( - "@bazel_tools//tools/build_defs/cc:action_names.bzl", - "ASSEMBLE_ACTION_NAME", - "CC_FLAGS_MAKE_VARIABLE_ACTION_NAME", - "CLIF_MATCH_ACTION_NAME", - "CPP_COMPILE_ACTION_NAME", - "CPP_HEADER_PARSING_ACTION_NAME", - "CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME", - "CPP_LINK_EXECUTABLE_ACTION_NAME", - "CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME", - "CPP_LINK_STATIC_LIBRARY_ACTION_NAME", - "CPP_MODULE_CODEGEN_ACTION_NAME", - "CPP_MODULE_COMPILE_ACTION_NAME", - "C_COMPILE_ACTION_NAME", - "LINKSTAMP_COMPILE_ACTION_NAME", - "LTO_BACKEND_ACTION_NAME", - "LTO_INDEXING_ACTION_NAME", - "OBJCPP_COMPILE_ACTION_NAME", - "OBJCPP_EXECUTABLE_ACTION_NAME", - "OBJC_ARCHIVE_ACTION_NAME", - "OBJC_COMPILE_ACTION_NAME", - "OBJC_EXECUTABLE_ACTION_NAME", - "OBJC_FULLY_LINK_ACTION_NAME", - "PREPROCESS_ASSEMBLE_ACTION_NAME", - "STRIP_ACTION_NAME", -) - -ACTION_NAMES = struct( - assemble = ASSEMBLE_ACTION_NAME, - c_compile = C_COMPILE_ACTION_NAME, - cc_flags_make_variable = CC_FLAGS_MAKE_VARIABLE_ACTION_NAME, - clif_match = CLIF_MATCH_ACTION_NAME, - cpp_compile = CPP_COMPILE_ACTION_NAME, - cpp_header_parsing = CPP_HEADER_PARSING_ACTION_NAME, - cpp_link_dynamic_library = CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME, - cpp_link_executable = CPP_LINK_EXECUTABLE_ACTION_NAME, - cpp_link_nodeps_dynamic_library = CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME, - cpp_link_static_library = CPP_LINK_STATIC_LIBRARY_ACTION_NAME, - cpp_module_codegen = CPP_MODULE_CODEGEN_ACTION_NAME, - cpp_module_compile = CPP_MODULE_COMPILE_ACTION_NAME, - ld_embed_data = "ld_embed_data", - linkstamp_compile = LINKSTAMP_COMPILE_ACTION_NAME, - lto_backend = LTO_BACKEND_ACTION_NAME, - lto_indexing = LTO_INDEXING_ACTION_NAME, - objc_archive = OBJC_ARCHIVE_ACTION_NAME, - objc_compile = OBJC_COMPILE_ACTION_NAME, - objc_executable = OBJC_EXECUTABLE_ACTION_NAME, - objc_fully_link = OBJC_FULLY_LINK_ACTION_NAME, - objcopy_embed_data = "objcopy_embed_data", - objcpp_compile = OBJCPP_COMPILE_ACTION_NAME, - objcpp_executable = OBJCPP_EXECUTABLE_ACTION_NAME, - preprocess_assemble = PREPROCESS_ASSEMBLE_ACTION_NAME, - strip = STRIP_ACTION_NAME, -) - -def _impl(ctx): - if (ctx.attr.cpu == "darwin"): - toolchain_identifier = "local_darwin" - elif (ctx.attr.cpu == "local"): - toolchain_identifier = "local_linux" - elif (ctx.attr.cpu == "x64_windows"): - toolchain_identifier = "local_windows" - else: - fail("Unreachable") - - host_system_name = "local" - - target_system_name = "local" - - if (ctx.attr.cpu == "darwin"): - target_cpu = "darwin" - elif (ctx.attr.cpu == "local"): - target_cpu = "local" - elif (ctx.attr.cpu == "x64_windows"): - target_cpu = "x64_windows" - else: - fail("Unreachable") - - if (ctx.attr.cpu == "local"): - target_libc = "local" - elif (ctx.attr.cpu == "darwin"): - target_libc = "macosx" - elif (ctx.attr.cpu == "x64_windows"): - target_libc = "msvcrt" - else: - fail("Unreachable") - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - compiler = "compiler" - elif (ctx.attr.cpu == "x64_windows"): - compiler = "msvc-cl" - else: - fail("Unreachable") - - abi_version = "local" - - abi_libc_version = "local" - - cc_target_os = None - - builtin_sysroot = None - - all_link_actions = [ - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ] - - cpp_link_dynamic_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_dynamic_library, - implies = [ - "nologo", - "shared_flag", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - "has_configured_linker_path", - "def_file", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - cpp_link_nodeps_dynamic_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_nodeps_dynamic_library, - implies = [ - "nologo", - "shared_flag", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - "has_configured_linker_path", - "def_file", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - cpp_link_static_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_static_library, - implies = [ - "nologo", - "archiver_flags", - "input_param_flags", - "linker_param_file", - "msvc_env", - ], - tools = [tool(path = ctx.attr.msvc_lib_path)], - ) - - assemble_action = action_config( - action_name = ACTION_NAMES.assemble, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "sysroot", - ], - tools = [tool(path = ctx.attr.msvc_ml_path)], - ) - - preprocess_assemble_action = action_config( - action_name = ACTION_NAMES.preprocess_assemble, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "sysroot", - ], - tools = [tool(path = ctx.attr.msvc_ml_path)], - ) - - c_compile_action = action_config( - action_name = ACTION_NAMES.c_compile, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "parse_showincludes", - "user_compile_flags", - "sysroot", - "unfiltered_compile_flags", - ], - tools = [tool(path = ctx.attr.msvc_cl_path)], - ) - - cpp_compile_action = action_config( - action_name = ACTION_NAMES.cpp_compile, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "parse_showincludes", - "user_compile_flags", - "sysroot", - "unfiltered_compile_flags", - ], - tools = [tool(path = ctx.attr.msvc_cl_path)], - ) - - cpp_link_executable_action = action_config( - action_name = ACTION_NAMES.cpp_link_executable, - implies = [ - "nologo", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - action_configs = [] - elif (ctx.attr.cpu == "x64_windows"): - action_configs = [ - assemble_action, - preprocess_assemble_action, - c_compile_action, - cpp_compile_action, - cpp_link_executable_action, - cpp_link_dynamic_library_action, - cpp_link_nodeps_dynamic_library_action, - cpp_link_static_library_action, - ] - else: - fail("Unreachable") - - no_windows_export_all_symbols_feature = feature(name = "no_windows_export_all_symbols") - - pic_feature = feature( - name = "pic", - enabled = True, - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group(flags = ["-fPIC"], expand_if_available = "pic"), - flag_group( - flags = ["-fPIE"], - expand_if_not_available = "pic", - ), - ], - ), - ], - ) - - preprocessor_defines_feature = feature( - name = "preprocessor_defines", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ], - flag_groups = [ - flag_group( - flags = ["/D%{preprocessor_defines}"], - iterate_over = "preprocessor_defines", - ), - ], - ), - ], - ) - - generate_pdb_file_feature = feature( - name = "generate_pdb_file", - requires = [ - feature_set(features = ["dbg"]), - feature_set(features = ["fastbuild"]), - ], - ) - - linkstamps_feature = feature( - name = "linkstamps", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["%{linkstamp_paths}"], - iterate_over = "linkstamp_paths", - expand_if_available = "linkstamp_paths", - ), - ], - ), - ], - ) - - unfiltered_compile_flags_feature = feature( - name = "unfiltered_compile_flags", - flag_sets = ([ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ctx.attr.host_unfiltered_compile_flags, - ), - ], - ), - ] if ctx.attr.host_unfiltered_compile_flags else []), - ) - - determinism_feature = feature( - name = "determinism", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-Wno-builtin-macro-redefined", - "-D__DATE__=\"redacted\"", - "-D__TIMESTAMP__=\"redacted\"", - "-D__TIME__=\"redacted\"", - ], - ), - ], - ), - ], - ) - - nologo_feature = feature( - name = "nologo", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_static_library, - ], - flag_groups = [flag_group(flags = ["/nologo"])], - ), - ], - ) - - supports_pic_feature = feature(name = "supports_pic", enabled = True) - - output_execpath_flags_feature = feature( - name = "output_execpath_flags", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["/OUT:%{output_execpath}"], - expand_if_available = "output_execpath", - ), - ], - ), - ], - ) - - default_link_flags_feature = feature( - name = "default_link_flags", - enabled = True, - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/MACHINE:X64"])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - hardening_feature = feature( - name = "hardening", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-U_FORTIFY_SOURCE", - "-D_FORTIFY_SOURCE=1", - "-fstack-protector", - ], - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["-Wl,-z,relro,-z,now"])], - ), - flag_set( - actions = [ACTION_NAMES.cpp_link_executable], - flag_groups = [flag_group(flags = ["-pie", "-Wl,-z,relro,-z,now"])], - ), - ], - ) - elif (ctx.attr.cpu == "darwin"): - hardening_feature = feature( - name = "hardening", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-U_FORTIFY_SOURCE", - "-D_FORTIFY_SOURCE=1", - "-fstack-protector", - ], - ), - ], - ), - flag_set( - actions = [ACTION_NAMES.cpp_link_executable], - flag_groups = [flag_group(flags = ["-pie"])], - ), - ], - ) - else: - hardening_feature = None - - supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True) - - targets_windows_feature = feature( - name = "targets_windows", - enabled = True, - implies = ["copy_dynamic_libraries_to_binary"], - ) - - msvc_env_feature = feature( - name = "msvc_env", - env_sets = [ - env_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_static_library, - ], - env_entries = [ - env_entry(key = "PATH", value = ctx.attr.msvc_env_path), - env_entry( - key = "INCLUDE", - value = ctx.attr.msvc_env_include, - ), - env_entry(key = "LIB", value = ctx.attr.msvc_env_lib), - env_entry(key = "TMP", value = ctx.attr.msvc_env_tmp), - env_entry(key = "TEMP", value = ctx.attr.msvc_env_tmp), - ], - ), - ], - ) - - linker_subsystem_flag_feature = feature( - name = "linker_subsystem_flag", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/SUBSYSTEM:CONSOLE"])], - ), - ], - ) - - dynamic_link_msvcrt_no_debug_feature = feature( - name = "dynamic_link_msvcrt_no_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MD"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrt.lib"])], - ), - ], - requires = [ - feature_set(features = ["fastbuild"]), - feature_set(features = ["opt"]), - ], - ) - - warnings_feature = feature( - name = "warnings", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-Wall"] + ctx.attr.host_compiler_warnings, - ), - ], - ), - ], - ) - - dynamic_link_msvcrt_debug_feature = feature( - name = "dynamic_link_msvcrt_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MDd"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrtd.lib"])], - ), - ], - requires = [feature_set(features = ["dbg"])], - ) - - compiler_output_flags_feature = feature( - name = "compiler_output_flags", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.assemble], - flag_groups = [ - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fo%{output_file}", "/Zi"], - expand_if_not_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - expand_if_not_available = "output_assembly_file", - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fo%{output_file}"], - expand_if_not_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - expand_if_not_available = "output_assembly_file", - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fa%{output_file}"], - expand_if_available = "output_assembly_file", - ), - ], - expand_if_available = "output_file", - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["/P", "/Fi%{output_file}"], - expand_if_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - ), - ], - ), - ], - ) - - default_compile_flags_feature = feature( - name = "default_compile_flags", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.linkstamp_compile, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.lto_backend, - ACTION_NAMES.clif_match, - ], - flag_groups = [ - flag_group( - flags = [ - "/DCOMPILER_MSVC", - "/DNOMINMAX", - "/D_WIN32_WINNT=0x0600", - "/D_CRT_SECURE_NO_DEPRECATE", - "/D_CRT_SECURE_NO_WARNINGS", - "/D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS", - "/bigobj", - "/Zm500", - "/J", - "/Gy", - "/GF", - "/EHsc", - "/wd4351", - "/wd4291", - "/wd4250", - "/wd4996", - ], - ), - ], - ), - ], - ) - - static_link_msvcrt_debug_feature = feature( - name = "static_link_msvcrt_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MTd"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmtd.lib"])], - ), - ], - requires = [feature_set(features = ["dbg"])], - ) - - static_link_msvcrt_feature = feature(name = "static_link_msvcrt") - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - dbg_feature = feature( - name = "dbg", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-g"])], - ), - ], - implies = ["common"], - ) - elif (ctx.attr.cpu == "x64_windows"): - dbg_feature = feature( - name = "dbg", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEBUG:FULL", "/INCREMENTAL:NO"])], - ), - ], - implies = ["generate_pdb_file"], - ) - else: - dbg_feature = None - - undefined_dynamic_feature = feature( - name = "undefined-dynamic", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-undefined", "dynamic_lookup"])], - ), - ], - ) - - parse_showincludes_feature = feature( - name = "parse_showincludes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_header_parsing, - ], - flag_groups = [flag_group(flags = ["/showIncludes"])], - ), - ], - ) - - linker_param_file_feature = feature( - name = "linker_param_file", - flag_sets = [ - flag_set( - actions = all_link_actions + - [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - flags = ["@%{linker_param_file}"], - expand_if_available = "linker_param_file", - ), - ], - ), - ], - ) - - static_link_msvcrt_no_debug_feature = feature( - name = "static_link_msvcrt_no_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MT"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmt.lib"])], - ), - ], - requires = [ - feature_set(features = ["fastbuild"]), - feature_set(features = ["opt"]), - ], - ) - - supports_interface_shared_libraries_feature = feature( - name = "supports_interface_shared_libraries", - enabled = True, - ) - - disable_assertions_feature = feature( - name = "disable-assertions", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-DNDEBUG"])], - ), - ], - ) - - if (ctx.attr.cpu == "x64_windows"): - fastbuild_feature = feature( - name = "fastbuild", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group(flags = ["/DEBUG:FASTLINK", "/INCREMENTAL:NO"]), - ], - ), - ], - implies = ["generate_pdb_file"], - ) - elif (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - fastbuild_feature = feature(name = "fastbuild", implies = ["common"]) - else: - fastbuild_feature = None - - user_compile_flags_feature = feature( - name = "user_compile_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ["%{user_compile_flags}"], - iterate_over = "user_compile_flags", - expand_if_available = "user_compile_flags", - ), - ], - ), - ], - ) - - compiler_input_flags_feature = feature( - name = "compiler_input_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ["/c", "%{source_file}"], - expand_if_available = "source_file", - ), - ], - ), - ], - ) - - no_legacy_features_feature = feature(name = "no_legacy_features") - - archiver_flags_feature = feature( - name = "archiver_flags", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - flags = ["/OUT:%{output_execpath}"], - expand_if_available = "output_execpath", - ), - ], - ), - ], - ) - - redirector_feature = feature( - name = "redirector", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ], - flag_groups = [ - flag_group( - flags = [ - "-B", - "external/local_config_cuda/crosstool/windows/msvc_wrapper_for_nvcc.py", - ], - ), - ], - ), - ], - ) - - linker_bin_path_feature = feature( - name = "linker-bin-path", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-B" + ctx.attr.linker_bin_path])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])], - ), - ], - implies = ["common", "disable-assertions"], - ) - elif (ctx.attr.cpu == "darwin"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], - ), - ], - ), - ], - implies = ["common", "disable-assertions"], - ) - elif (ctx.attr.cpu == "x64_windows"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/O2", "/DNDEBUG"])], - ), - ], - ) - else: - opt_feature = None - - include_paths_feature = feature( - name = "include_paths", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ], - flag_groups = [ - flag_group( - flags = ["/I%{quote_include_paths}"], - iterate_over = "quote_include_paths", - ), - flag_group( - flags = ["/I%{include_paths}"], - iterate_over = "include_paths", - ), - flag_group( - flags = ["/I%{system_include_paths}"], - iterate_over = "system_include_paths", - ), - ], - ), - ], - ) - - shared_flag_feature = feature( - name = "shared_flag", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["/DLL"])], - ), - ], - ) - - windows_export_all_symbols_feature = feature(name = "windows_export_all_symbols") - - frame_pointer_feature = feature( - name = "frame-pointer", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-fno-omit-frame-pointer"])], - ), - ], - ) - - build_id_feature = feature( - name = "build-id", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["-Wl,--build-id=md5", "-Wl,--hash-style=gnu"], - ), - ], - ), - ], - ) - - sysroot_feature = feature( - name = "sysroot", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = ["--sysroot=%{sysroot}"], - iterate_over = "sysroot", - expand_if_available = "sysroot", - ), - ], - ), - ], - ) - - def_file_feature = feature( - name = "def_file", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["/DEF:%{def_file_path}", "/ignore:4070"], - expand_if_available = "def_file_path", - ), - ], - ), - ], - ) - - if (ctx.attr.cpu == "darwin"): - stdlib_feature = feature( - name = "stdlib", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-lc++"])], - ), - ], - ) - elif (ctx.attr.cpu == "local"): - stdlib_feature = feature( - name = "stdlib", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-lstdc++"])], - ), - ], - ) - else: - stdlib_feature = None - - no_stripping_feature = feature(name = "no_stripping") - - alwayslink_feature = feature( - name = "alwayslink", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-Wl,-no-as-needed"])], - ), - ], - ) - - input_param_flags_feature = feature( - name = "input_param_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = ["/IMPLIB:%{interface_library_output_path}"], - expand_if_available = "interface_library_output_path", - ), - ], - ), - flag_set( - actions = all_link_actions + - [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - iterate_over = "libraries_to_link", - flag_groups = [ - flag_group( - iterate_over = "libraries_to_link.object_files", - flag_groups = [flag_group(flags = ["%{libraries_to_link.object_files}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "object_file_group", - ), - ), - flag_group( - flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "object_file", - ), - ), - flag_group( - flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "interface_library", - ), - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["%{libraries_to_link.name}"], - expand_if_false = "libraries_to_link.is_whole_archive", - ), - flag_group( - flags = ["/WHOLEARCHIVE:%{libraries_to_link.name}"], - expand_if_true = "libraries_to_link.is_whole_archive", - ), - ], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "static_library", - ), - ), - ], - expand_if_available = "libraries_to_link", - ), - ], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - no_canonical_prefixes_feature = feature( - name = "no-canonical-prefixes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = [ - "-no-canonical-prefixes", - ] + ctx.attr.extra_no_canonical_prefixes_flags, - ), - ], - ), - ], - ) - elif (ctx.attr.cpu == "darwin"): - no_canonical_prefixes_feature = feature( - name = "no-canonical-prefixes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["-no-canonical-prefixes"])], - ), - ], - ) - else: - no_canonical_prefixes_feature = None - - has_configured_linker_path_feature = feature(name = "has_configured_linker_path") - - copy_dynamic_libraries_to_binary_feature = feature(name = "copy_dynamic_libraries_to_binary") - - user_link_flags_feature = feature( - name = "user_link_flags", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["%{user_link_flags}"], - iterate_over = "user_link_flags", - expand_if_available = "user_link_flags", - ), - ], - ), - ], - ) - - cpp11_feature = feature( - name = "c++11", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-std=c++11"])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - common_feature = feature( - name = "common", - implies = [ - "stdlib", - "c++11", - "determinism", - "alwayslink", - "hardening", - "warnings", - "frame-pointer", - "build-id", - "no-canonical-prefixes", - "linker-bin-path", - ], - ) - elif (ctx.attr.cpu == "darwin"): - common_feature = feature( - name = "common", - implies = [ - "stdlib", - "c++11", - "determinism", - "hardening", - "warnings", - "frame-pointer", - "no-canonical-prefixes", - "linker-bin-path", - "undefined-dynamic", - ], - ) - else: - common_feature = None - - if (ctx.attr.cpu == "local"): - features = [ - cpp11_feature, - stdlib_feature, - determinism_feature, - alwayslink_feature, - pic_feature, - hardening_feature, - warnings_feature, - frame_pointer_feature, - build_id_feature, - no_canonical_prefixes_feature, - disable_assertions_feature, - linker_bin_path_feature, - common_feature, - opt_feature, - fastbuild_feature, - dbg_feature, - supports_dynamic_linker_feature, - supports_pic_feature, - ] - elif (ctx.attr.cpu == "darwin"): - features = [ - cpp11_feature, - stdlib_feature, - determinism_feature, - pic_feature, - hardening_feature, - warnings_feature, - frame_pointer_feature, - no_canonical_prefixes_feature, - disable_assertions_feature, - linker_bin_path_feature, - undefined_dynamic_feature, - common_feature, - opt_feature, - fastbuild_feature, - dbg_feature, - supports_dynamic_linker_feature, - supports_pic_feature, - ] - elif (ctx.attr.cpu == "x64_windows"): - features = [ - no_legacy_features_feature, - redirector_feature, - nologo_feature, - has_configured_linker_path_feature, - no_stripping_feature, - targets_windows_feature, - copy_dynamic_libraries_to_binary_feature, - default_compile_flags_feature, - msvc_env_feature, - include_paths_feature, - preprocessor_defines_feature, - parse_showincludes_feature, - generate_pdb_file_feature, - shared_flag_feature, - linkstamps_feature, - output_execpath_flags_feature, - archiver_flags_feature, - input_param_flags_feature, - linker_subsystem_flag_feature, - user_link_flags_feature, - default_link_flags_feature, - linker_param_file_feature, - static_link_msvcrt_feature, - static_link_msvcrt_no_debug_feature, - dynamic_link_msvcrt_no_debug_feature, - static_link_msvcrt_debug_feature, - dynamic_link_msvcrt_debug_feature, - dbg_feature, - fastbuild_feature, - opt_feature, - user_compile_flags_feature, - sysroot_feature, - unfiltered_compile_flags_feature, - compiler_output_flags_feature, - compiler_input_flags_feature, - def_file_feature, - windows_export_all_symbols_feature, - no_windows_export_all_symbols_feature, - supports_dynamic_linker_feature, - supports_interface_shared_libraries_feature, - ] - else: - fail("Unreachable") - - cxx_builtin_include_directories = ctx.attr.builtin_include_directories - - if (ctx.attr.cpu == "x64_windows"): - tool_paths = [ - tool_path(name = "ar", path = ctx.attr.msvc_lib_path), - tool_path(name = "ml", path = ctx.attr.msvc_ml_path), - tool_path(name = "cpp", path = ctx.attr.msvc_cl_path), - tool_path(name = "gcc", path = ctx.attr.msvc_cl_path), - tool_path(name = "gcov", path = "wrapper/bin/msvc_nop.bat"), - tool_path(name = "ld", path = ctx.attr.msvc_link_path), - tool_path(name = "nm", path = "wrapper/bin/msvc_nop.bat"), - tool_path( - name = "objcopy", - path = "wrapper/bin/msvc_nop.bat", - ), - tool_path( - name = "objdump", - path = "wrapper/bin/msvc_nop.bat", - ), - tool_path( - name = "strip", - path = "wrapper/bin/msvc_nop.bat", - ), - ] - elif (ctx.attr.cpu == "local"): - tool_paths = [ - tool_path(name = "gcc", path = ctx.attr.host_compiler_path), - tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/ar"), - tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), - tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), - tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), - tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), - tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), - tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), - tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), - ] - elif (ctx.attr.cpu == "darwin"): - tool_paths = [ - tool_path(name = "gcc", path = ctx.attr.host_compiler_path), - tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/libtool"), - tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), - tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), - tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), - tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), - tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), - tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), - tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), - ] - else: - fail("Unreachable") - - out = ctx.actions.declare_file(ctx.label.name) - ctx.actions.write(out, "Fake executable") - return [ - cc_common.create_cc_toolchain_config_info( - ctx = ctx, - features = features, - action_configs = action_configs, - artifact_name_patterns = [], - cxx_builtin_include_directories = cxx_builtin_include_directories, - toolchain_identifier = toolchain_identifier, - host_system_name = host_system_name, - target_system_name = target_system_name, - target_cpu = target_cpu, - target_libc = target_libc, - compiler = compiler, - abi_version = abi_version, - abi_libc_version = abi_libc_version, - tool_paths = tool_paths, - make_variables = [], - builtin_sysroot = builtin_sysroot, - cc_target_os = cc_target_os, - ), - DefaultInfo( - executable = out, - ), - ] - -cc_toolchain_config = rule( - attrs = { - "cpu": attr.string( - mandatory = True, - values = [ - "darwin", - "local", - "x64_windows", - ], - ), - "builtin_include_directories": attr.string_list(), - "extra_no_canonical_prefixes_flags": attr.string_list(), - "host_compiler_path": attr.string(), - "host_compiler_prefix": attr.string(), - "host_compiler_warnings": attr.string_list(), - "host_unfiltered_compile_flags": attr.string_list(), - "linker_bin_path": attr.string(), - "msvc_cl_path": attr.string(default = "msvc_not_used"), - "msvc_env_include": attr.string(default = "msvc_not_used"), - "msvc_env_lib": attr.string(default = "msvc_not_used"), - "msvc_env_path": attr.string(default = "msvc_not_used"), - "msvc_env_tmp": attr.string(default = "msvc_not_used"), - "msvc_lib_path": attr.string(default = "msvc_not_used"), - "msvc_link_path": attr.string(default = "msvc_not_used"), - "msvc_ml_path": attr.string(default = "msvc_not_used"), - }, - executable = True, - provides = [CcToolchainConfigInfo], - implementation = _impl, -) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/clang/bin/crosstool_wrapper_driver_is_not_gcc b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/clang/bin/crosstool_wrapper_driver_is_not_gcc deleted file mode 100755 index 01b454807a..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda11/clang/bin/crosstool_wrapper_driver_is_not_gcc +++ /dev/null @@ -1,281 +0,0 @@ -#!/usr/bin/env python -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Crosstool wrapper for compiling CUDA programs. - -SYNOPSIS: - crosstool_wrapper_is_not_gcc [options passed in by cc_library() - or cc_binary() rule] - -DESCRIPTION: - This script is expected to be called by the cc_library() or cc_binary() bazel - rules. When the option "-x cuda" is present in the list of arguments passed - to this script, it invokes the nvcc CUDA compiler. Most arguments are passed - as is as a string to --compiler-options of nvcc. When "-x cuda" is not - present, this wrapper invokes hybrid_driver_is_not_gcc with the input - arguments as is. -""" - -from __future__ import print_function - -from argparse import ArgumentParser -import os -import subprocess -import re -import sys -import pipes - -# Template values set by cuda_autoconf. -CPU_COMPILER = ('/dt7/usr/bin/gcc') -GCC_HOST_COMPILER_PATH = ('/dt7/usr/bin/gcc') - -NVCC_PATH = '/usr/local/cuda-11.2/bin/nvcc' -PREFIX_DIR = os.path.dirname(GCC_HOST_COMPILER_PATH) -NVCC_VERSION = '11.2' - -def Log(s): - print('gpus/crosstool: {0}'.format(s)) - - -def GetOptionValue(argv, option): - """Extract the list of values for option from the argv list. - - Args: - argv: A list of strings, possibly the argv passed to main(). - option: The option whose value to extract, with the leading '-'. - - Returns: - A list of values, either directly following the option, - (eg., -opt val1 val2) or values collected from multiple occurrences of - the option (eg., -opt val1 -opt val2). - """ - - parser = ArgumentParser() - parser.add_argument(option, nargs='*', action='append') - option = option.lstrip('-').replace('-', '_') - args, _ = parser.parse_known_args(argv) - if not args or not vars(args)[option]: - return [] - else: - return sum(vars(args)[option], []) - - -def GetHostCompilerOptions(argv): - """Collect the -isystem, -iquote, and --sysroot option values from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - - Returns: - The string that can be used as the --compiler-options to nvcc. - """ - - parser = ArgumentParser() - parser.add_argument('-isystem', nargs='*', action='append') - parser.add_argument('-iquote', nargs='*', action='append') - parser.add_argument('--sysroot', nargs=1) - parser.add_argument('-g', nargs='*', action='append') - parser.add_argument('-fno-canonical-system-headers', action='store_true') - parser.add_argument('-no-canonical-prefixes', action='store_true') - - args, _ = parser.parse_known_args(argv) - - opts = '' - - if args.isystem: - opts += ' -isystem ' + ' -isystem '.join(sum(args.isystem, [])) - if args.iquote: - opts += ' -iquote ' + ' -iquote '.join(sum(args.iquote, [])) - if args.g: - opts += ' -g' + ' -g'.join(sum(args.g, [])) - if args.fno_canonical_system_headers: - opts += ' -fno-canonical-system-headers' - if args.no_canonical_prefixes: - opts += ' -no-canonical-prefixes' - if args.sysroot: - opts += ' --sysroot ' + args.sysroot[0] - - return opts - -def _update_options(nvcc_options): - if NVCC_VERSION in ("7.0",): - return nvcc_options - - update_options = { "relaxed-constexpr" : "expt-relaxed-constexpr" } - return [ update_options[opt] if opt in update_options else opt - for opt in nvcc_options ] - -def GetNvccOptions(argv): - """Collect the -nvcc_options values from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - - Returns: - The string that can be passed directly to nvcc. - """ - - parser = ArgumentParser() - parser.add_argument('-nvcc_options', nargs='*', action='append') - - args, _ = parser.parse_known_args(argv) - - if args.nvcc_options: - options = _update_options(sum(args.nvcc_options, [])) - return ' '.join(['--'+a for a in options]) - return '' - -def system(cmd): - """Invokes cmd with os.system(). - - Args: - cmd: The command. - - Returns: - The exit code if the process exited with exit() or -signal - if the process was terminated by a signal. - """ - retv = os.system(cmd) - if os.WIFEXITED(retv): - return os.WEXITSTATUS(retv) - else: - return -os.WTERMSIG(retv) - -def InvokeNvcc(argv, log=False): - """Call nvcc with arguments assembled from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - log: True if logging is requested. - - Returns: - The return value of calling system('nvcc ' + args) - """ - - host_compiler_options = GetHostCompilerOptions(argv) - nvcc_compiler_options = GetNvccOptions(argv) - opt_option = GetOptionValue(argv, '-O') - m_options = GetOptionValue(argv, '-m') - m_options = ''.join([' -m' + m for m in m_options if m in ['32', '64']]) - include_options = GetOptionValue(argv, '-I') - out_file = GetOptionValue(argv, '-o') - depfiles = GetOptionValue(argv, '-MF') - defines = GetOptionValue(argv, '-D') - defines = ''.join([' -D' + define for define in defines]) - undefines = GetOptionValue(argv, '-U') - undefines = ''.join([' -U' + define for define in undefines]) - std_options = GetOptionValue(argv, '-std') - # Supported -std flags as of CUDA 9.0. Only keep last to mimic gcc/clang. - nvcc_allowed_std_options = ["c++03", "c++11", "c++14"] - std_options = ''.join([' -std=' + define - for define in std_options if define in nvcc_allowed_std_options][-1:]) - fatbin_options = ''.join([' --fatbin-options=' + option - for option in GetOptionValue(argv, '-Xcuda-fatbinary')]) - - # The list of source files get passed after the -c option. I don't know of - # any other reliable way to just get the list of source files to be compiled. - src_files = GetOptionValue(argv, '-c') - - # Pass -w through from host to nvcc, but don't do anything fancier with - # warnings-related flags, since they're not necessarily the same across - # compilers. - warning_options = ' -w' if '-w' in argv else '' - - if len(src_files) == 0: - return 1 - if len(out_file) != 1: - return 1 - - opt = (' -O2' if (len(opt_option) > 0 and int(opt_option[0]) > 0) - else ' -g') - - includes = (' -I ' + ' -I '.join(include_options) - if len(include_options) > 0 - else '') - - # Unfortunately, there are other options that have -c prefix too. - # So allowing only those look like C/C++ files. - src_files = [f for f in src_files if - re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)] - srcs = ' '.join(src_files) - out = ' -o ' + out_file[0] - - nvccopts = '-D_FORCE_INLINES ' - for capability in GetOptionValue(argv, "--cuda-gpu-arch"): - capability = capability[len('sm_'):] - nvccopts += r'-gencode=arch=compute_%s,\"code=sm_%s\" ' % (capability, - capability) - for capability in GetOptionValue(argv, '--cuda-include-ptx'): - capability = capability[len('sm_'):] - nvccopts += r'-gencode=arch=compute_%s,\"code=compute_%s\" ' % (capability, - capability) - nvccopts += nvcc_compiler_options - nvccopts += undefines - nvccopts += defines - nvccopts += std_options - nvccopts += m_options - nvccopts += warning_options - nvccopts += fatbin_options - - if depfiles: - # Generate the dependency file - depfile = depfiles[0] - cmd = (NVCC_PATH + ' ' + nvccopts + - ' --compiler-options "' + host_compiler_options + '"' + - ' --compiler-bindir=' + GCC_HOST_COMPILER_PATH + - ' -I .' + - ' -x cu ' + opt + includes + ' ' + srcs + ' -M -o ' + depfile) - if log: Log(cmd) - exit_status = system(cmd) - if exit_status != 0: - return exit_status - - cmd = (NVCC_PATH + ' ' + nvccopts + - ' --compiler-options "' + host_compiler_options + ' -fPIC"' + - ' --compiler-bindir=' + GCC_HOST_COMPILER_PATH + - ' -I .' + - ' -x cu ' + opt + includes + ' -c ' + srcs + out) - - # TODO(zhengxq): for some reason, 'gcc' needs this help to find 'as'. - # Need to investigate and fix. - cmd = 'PATH=' + PREFIX_DIR + ':$PATH ' + cmd - if log: Log(cmd) - return system(cmd) - - -def main(): - parser = ArgumentParser() - parser.add_argument('-x', nargs=1) - parser.add_argument('--cuda_log', action='store_true') - args, leftover = parser.parse_known_args(sys.argv[1:]) - - if args.x and args.x[0] == 'cuda': - if args.cuda_log: Log('-x cuda') - leftover = [pipes.quote(s) for s in leftover] - if args.cuda_log: Log('using nvcc') - return InvokeNvcc(leftover, log=args.cuda_log) - - # Strip our flags before passing through to the CPU compiler for files which - # are not -x cuda. We can't just pass 'leftover' because it also strips -x. - # We not only want to pass -x to the CPU compiler, but also keep it in its - # relative location in the argv list (the compiler is actually sensitive to - # this). - cpu_compiler_flags = [flag for flag in sys.argv[1:] - if not flag.startswith(('--cuda_log'))] - - return subprocess.call([CPU_COMPILER] + cpu_compiler_flags) - -if __name__ == '__main__': - sys.exit(main()) From 5a57e501ed7159d6e09bcb3ba5dae35eb670491f Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 18:35:09 -0700 Subject: [PATCH 16/38] Remove outdated conditions --- .github/workflows/release.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ff325a0ff9..454775d838 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -76,8 +76,7 @@ jobs: name: Setup Bazel # Ubuntu bazel is run inside of the docker image run: bash tools/install_deps/install_bazelisk.sh ./ - - if: matrix.py-version != '3.9' || matrix.tf-version != '2.4.2' - name: Build wheels + - name: Build wheels env: OS: ${{ runner.os }} PY_VERSION: ${{ matrix.py-version }} @@ -86,8 +85,7 @@ jobs: CPU: ${{ matrix.cpu }} shell: bash run: bash .github/workflows/make_wheel_${OS}_${CPU}.sh - - if: matrix.py-version != '3.9' || matrix.tf-version != '2.4.2' - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v1 with: name: ${{ runner.os }}-${{ matrix.py-version }}-tf${{ matrix.tf-version }}-${{ matrix.cpu }}-wheel path: wheelhouse From 1f2034a07f61f3e1fb5e00d3653c290e9aadd4f4 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 18:48:36 -0700 Subject: [PATCH 17/38] Update docs --- CONTRIBUTING.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b3e37e4ee6..e7046c0c1c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -173,7 +173,7 @@ conda activate my_dev_environement Just run from the root: ```bash -pip install tensorflow==2.8 +pip install tensorflow==2.9 # you can use "pip install tensorflow-cpu==2.9.0rc1" too if you're not testing on gpu. pip install -e ./ ``` @@ -262,7 +262,7 @@ If you need a custom C++/Cuda op for your test, compile your ops with ```bash python configure.py -pip install tensorflow==2.8 -e ./ -r tools/install_deps/pytest.txt +pip install tensorflow==2.9 -e ./ -r tools/install_deps/pytest.txt bash tools/install_so_files.sh # Linux/macos/WSL2 sh tools/install_so_files.sh # PowerShell ``` @@ -290,7 +290,7 @@ docker run --gpus all --rm -it -v ${PWD}:/addons -w /addons gcr.io/tensorflow-te Configure: ```bash -python3 -m pip install tensorflow==2.8 +python3 -m pip install tensorflow==2.9 python3 ./configure.py # Links project with TensorFlow dependency ``` From 2f2b473c8f84edf0450c8293e82870a058071141 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 19:35:21 -0700 Subject: [PATCH 18/38] debug --- .github/workflows/make_wheel_Windows_x86.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/make_wheel_Windows_x86.sh b/.github/workflows/make_wheel_Windows_x86.sh index ff3953a643..b55d348fe1 100644 --- a/.github/workflows/make_wheel_Windows_x86.sh +++ b/.github/workflows/make_wheel_Windows_x86.sh @@ -7,6 +7,8 @@ export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/ python --version python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION +export PYTHON_BIN_PATH=C:\\hostedtoolcache\\windows\\Python\\3.8.10\\x64\\python3.exe + # Test bash ./tools/testing/build_and_run_tests.sh From 887b6915c654facce153c4ebfc197ffcca63cae4 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 19:53:20 -0700 Subject: [PATCH 19/38] debug --- .github/workflows/make_wheel_Windows_x86.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/make_wheel_Windows_x86.sh b/.github/workflows/make_wheel_Windows_x86.sh index b55d348fe1..27ef1da79c 100644 --- a/.github/workflows/make_wheel_Windows_x86.sh +++ b/.github/workflows/make_wheel_Windows_x86.sh @@ -7,7 +7,8 @@ export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/ python --version python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION -export PYTHON_BIN_PATH=C:\\hostedtoolcache\\windows\\Python\\3.8.10\\x64\\python3.exe +export PYTHON_BIN_PATH=`which python` +export _PYTHON_BIN_PATH=`which python` # Test bash ./tools/testing/build_and_run_tests.sh From ceeb44c6709d55c63a480b0bc772c035b902a92b Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 20:10:24 -0700 Subject: [PATCH 20/38] debug --- .github/workflows/make_wheel_Windows_x86.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/make_wheel_Windows_x86.sh b/.github/workflows/make_wheel_Windows_x86.sh index 27ef1da79c..3ab35bb0be 100644 --- a/.github/workflows/make_wheel_Windows_x86.sh +++ b/.github/workflows/make_wheel_Windows_x86.sh @@ -7,8 +7,7 @@ export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/ python --version python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION -export PYTHON_BIN_PATH=`which python` -export _PYTHON_BIN_PATH=`which python` +export _PYTHON_BIN_PATH=$(which python) # Test bash ./tools/testing/build_and_run_tests.sh From f377162b531ccfdcf0be6c617a136440988d5ec0 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 20:16:12 -0700 Subject: [PATCH 21/38] Add github issue to dockerfile --- tools/docker/build_wheel.Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/docker/build_wheel.Dockerfile b/tools/docker/build_wheel.Dockerfile index 9147abb1c9..73d228d321 100644 --- a/tools/docker/build_wheel.Dockerfile +++ b/tools/docker/build_wheel.Dockerfile @@ -7,6 +7,7 @@ ARG PY_VERSION ARG TF_VERSION # TODO: Remove this if tensorflow/build container removes their keras-nightly install +# https://github.com/tensorflow/build/issues/78 RUN python -m pip uninstall -y keras-nightly RUN python -m pip install --default-timeout=1000 tensorflow==$TF_VERSION From d5683c536707e9bba6ec0b6954ed4c63fee37d3c Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 20:52:48 -0700 Subject: [PATCH 22/38] Rename variable --- .github/workflows/make_wheel_Windows_x86.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/make_wheel_Windows_x86.sh b/.github/workflows/make_wheel_Windows_x86.sh index 3ab35bb0be..82497c3838 100644 --- a/.github/workflows/make_wheel_Windows_x86.sh +++ b/.github/workflows/make_wheel_Windows_x86.sh @@ -7,7 +7,7 @@ export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/ python --version python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION -export _PYTHON_BIN_PATH=$(which python) +export PYTHON_BIN_PATH=$(which python) # Test bash ./tools/testing/build_and_run_tests.sh From 07903908b77d0e5d96842175ae8ee80a661d4eb8 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Sun, 24 Apr 2022 22:00:20 -0700 Subject: [PATCH 23/38] Better diff --- .github/workflows/make_wheel_Windows_x86.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/make_wheel_Windows_x86.sh b/.github/workflows/make_wheel_Windows_x86.sh index 82497c3838..438e5967a4 100644 --- a/.github/workflows/make_wheel_Windows_x86.sh +++ b/.github/workflows/make_wheel_Windows_x86.sh @@ -1,14 +1,13 @@ set -e -x export TF_NEED_CUDA=0 +export PYTHON_BIN_PATH=$(which python) export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/VC/" # Install Deps python --version python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION -export PYTHON_BIN_PATH=$(which python) - # Test bash ./tools/testing/build_and_run_tests.sh From 24565d1f63e19814f022fa0768639e6ab5227be3 Mon Sep 17 00:00:00 2001 From: Sean Morgan Date: Mon, 25 Apr 2022 20:34:09 -0700 Subject: [PATCH 24/38] Fix compat --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 4ac31ab6ac..4b27d5cdf3 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,6 @@ what it was tested against. | TensorFlow Addons | TensorFlow | Python | |:----------------------- |:---|:---------- | | tfa-nightly | 2.7, 2.8, 2.9 | 3.7, 3.8, 3.9, 3.10 | -| tensorflow-addons-0.16.1 | 2.7, 2.8, 2.9 |3.7, 3.8, 3.9, 3.10 | | tensorflow-addons-0.16.1 | 2.6, 2.7, 2.8 |3.7, 3.8, 3.9, 3.10 | | tensorflow-addons-0.15.0 | 2.5, 2.6, 2.7 |3.7, 3.8, 3.9 | | tensorflow-addons-0.14.0 | 2.4, 2.5, 2.6 |3.6, 3.7, 3.8, 3.9 | From 002008190d54763aeed4f85b72f215688daeb1e8 Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 13:16:25 +0200 Subject: [PATCH 25/38] Upgrate to 2.9 final --- .devcontainer/devcontainer.json | 2 +- .github/workflows/release.yml | 12 ++++++------ CONTRIBUTING.md | 4 ++-- WORKSPACE | 6 +++--- tensorflow_addons/utils/resource_loader.py | 2 +- tools/build_dev_container.sh | 2 +- tools/docker/cpu_tests.Dockerfile | 2 +- tools/install_deps/tensorflow-cpu.txt | 2 +- tools/install_deps/tensorflow.txt | 2 +- tools/run_gpu_tests.sh | 2 +- 10 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 290f2fd94b..202fdd31c3 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -55,5 +55,5 @@ "seccomp=unconfined" ], // Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/pytest ./tensorflow_addons/layerscontainers/non-root. - "remoteUser": "vscode" + //"remoteUser": "vscode" } \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 454775d838..53599cbb1e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,7 +42,7 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.7.1', '2.9.0rc1'] + tf-version: ['2.7.1', '2.9.0'] cpu: ['x86'] exclude: - py-version: '3.10' @@ -50,11 +50,11 @@ jobs: include: - os: 'macos-11' cpu: 'arm64' - tf-version: '2.9.0rc1' + tf-version: '2.9.0' py-version: '3.8' - os: 'macos-11' cpu: 'arm64' - tf-version: '2.9.0rc1' + tf-version: '2.9.0' py-version: '3.9' fail-fast: false steps: @@ -97,16 +97,16 @@ jobs: matrix: os: ['macOS', 'Windows', 'Linux'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.9.0rc1'] + tf-version: ['2.9.0'] cpu: ['x86'] include: - os: 'macOS' cpu: 'arm64' - tf-version: '2.9.0rc1' + tf-version: '2.9.0' py-version: '3.8' - os: 'macOS' cpu: 'arm64' - tf-version: '2.9.0rc1' + tf-version: '2.9.0' py-version: '3.9' fail-fast: false if: (github.event_name == 'push' && github.ref == 'refs/heads/master') || github.event_name == 'release' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e7046c0c1c..e850767755 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -173,8 +173,8 @@ conda activate my_dev_environement Just run from the root: ```bash -pip install tensorflow==2.9 -# you can use "pip install tensorflow-cpu==2.9.0rc1" too if you're not testing on gpu. +pip install tensorflow==2.8 +# you can use "pip install tensorflow-cpu==2.9.0" too if you're not testing on gpu. pip install -e ./ ``` diff --git a/WORKSPACE b/WORKSPACE index 28e6328b0c..343e50fe1a 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -21,10 +21,10 @@ cuda_configure(name = "local_config_cuda") http_archive( name = "org_tensorflow", - sha256 = "7d736fa5ff3868516359d8370e7b57251b8080243fc38e8089ee0ceb8ee90264", - strip_prefix = "tensorflow-2.9.0-rc0", + sha256 = "8087cb0c529f04a4bfe480e49925cd64a904ad16d8ec66b98e2aacdfd53c80ff", + strip_prefix = "tensorflow-2.9.0", urls = [ - "https://github.com/tensorflow/tensorflow/archive/refs/tags/v2.9.0-rc0.tar.gz", + "https://github.com/tensorflow/tensorflow/archive/refs/tags/v2.9.0.tar.gz", ], ) diff --git a/tensorflow_addons/utils/resource_loader.py b/tensorflow_addons/utils/resource_loader.py index 0db98249ea..9276d25997 100644 --- a/tensorflow_addons/utils/resource_loader.py +++ b/tensorflow_addons/utils/resource_loader.py @@ -20,7 +20,7 @@ import tensorflow as tf -INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0rc1" +INCLUSIVE_MIN_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.9.0" EXCLUSIVE_MAX_TF_VERSION_FOR_ABI_COMPATIBILITY = "2.10.0" abi_warning_already_raised = False SKIP_CUSTOM_OPS = False diff --git a/tools/build_dev_container.sh b/tools/build_dev_container.sh index b4811e5740..cecf807f26 100755 --- a/tools/build_dev_container.sh +++ b/tools/build_dev_container.sh @@ -4,7 +4,7 @@ set -x -e docker build \ -f tools/docker/dev_container.Dockerfile \ - --build-arg TF_VERSION=2.9.0rc1 \ + --build-arg TF_VERSION=2.9.0 \ --build-arg TF_PACKAGE=tensorflow \ --build-arg PY_VERSION=$PY_VERSION \ --no-cache \ diff --git a/tools/docker/cpu_tests.Dockerfile b/tools/docker/cpu_tests.Dockerfile index 6509d67649..5d8b6a173b 100644 --- a/tools/docker/cpu_tests.Dockerfile +++ b/tools/docker/cpu_tests.Dockerfile @@ -1,7 +1,7 @@ #syntax=docker/dockerfile:1.1.5-experimental FROM python:3.7 as build_wheel -ARG TF_VERSION=2.9.0rc1 +ARG TF_VERSION=2.9.0 RUN pip install --default-timeout=1000 tensorflow-cpu==$TF_VERSION RUN apt-get update && apt-get install -y sudo rsync diff --git a/tools/install_deps/tensorflow-cpu.txt b/tools/install_deps/tensorflow-cpu.txt index cb5eb189e6..694692e49a 100644 --- a/tools/install_deps/tensorflow-cpu.txt +++ b/tools/install_deps/tensorflow-cpu.txt @@ -1 +1 @@ -tensorflow-cpu~=2.9.0rc1 +tensorflow-cpu~=2.9.0 diff --git a/tools/install_deps/tensorflow.txt b/tools/install_deps/tensorflow.txt index 62948623f1..9473276a8b 100644 --- a/tools/install_deps/tensorflow.txt +++ b/tools/install_deps/tensorflow.txt @@ -1 +1 @@ -tensorflow~=2.9.0rc1 \ No newline at end of file +tensorflow~=2.9.0 \ No newline at end of file diff --git a/tools/run_gpu_tests.sh b/tools/run_gpu_tests.sh index 9b521590d9..caffd747b2 100644 --- a/tools/run_gpu_tests.sh +++ b/tools/run_gpu_tests.sh @@ -6,7 +6,7 @@ export DOCKER_BUILDKIT=1 docker build \ -f tools/docker/build_wheel.Dockerfile \ --target tfa_gpu_tests \ - --build-arg TF_VERSION=2.9.0rc1 \ + --build-arg TF_VERSION=2.9.0 \ --build-arg PY_VERSION=3.7 \ -t tfa_gpu_tests ./ docker run --rm -t --gpus=all tfa_gpu_tests From 791c104fad28497a80510dba484290884016ef64 Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 13:19:50 +0200 Subject: [PATCH 26/38] Adapt to rootless container --- .devcontainer/Dockerfile | 32 -------------------------------- 1 file changed, 32 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index dad42be6fe..0a0df10099 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,34 +1,2 @@ ARG IMAGE_TYPE=latest-cpu FROM tfaddons/dev_container:$IMAGE_TYPE - -ARG USERNAME=vscode -ARG USER_UID=1000 -ARG USER_GID=$USER_UID - -# Options for common package install script - SHA updated on release -ARG INSTALL_ZSH="false" -ARG UPGRADE_PACKAGES="false" -ARG COMMON_SCRIPT_SOURCE="https://raw.githubusercontent.com/microsoft/vscode-dev-containers/master/script-library/common-debian.sh" -ARG COMMON_SCRIPT_SHA="dev-mode" - -# Configure apt and install packages -RUN apt-get update \ - && export DEBIAN_FRONTEND=noninteractive \ - # - # Verify git, common tools / libs installed, add/modify non-root user, optionally install zsh - && apt-get -y install --no-install-recommends curl ca-certificates 2>&1 \ - && curl -sSL ${COMMON_SCRIPT_SOURCE} -o /tmp/common-setup.sh \ - && ([ "${COMMON_SCRIPT_SHA}" = "dev-mode" ] || (echo "${COMMON_SCRIPT_SHA} */tmp/common-setup.sh" | sha256sum -c -)) \ - && /bin/bash /tmp/common-setup.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" \ - && rm /tmp/common-setup.sh \ - # - # ********************************************************************* - # * Uncomment this section to use RUN to install other dependencies. * - # * See https://aka.ms/vscode-remote/containers/dockerfile-run * - # ********************************************************************* - # && apt-get -y install --no-install-recommends - # - # Clean up - && apt-get autoremove -y \ - && apt-get clean -y \ - && rm -rf /var/lib/apt/lists/* From 48bc03fa2880f56335ee6a832d421605fa59df72 Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 13:35:38 +0200 Subject: [PATCH 27/38] Update configure.py --- configure.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/configure.py b/configure.py index 044130acb3..eb452ad5ab 100644 --- a/configure.py +++ b/configure.py @@ -23,6 +23,11 @@ import tensorflow as tf +try: + from packaging.version import Version +except ImportError: + from distutils.version import LooseVersion as Version + _TFA_BAZELRC = ".bazelrc" @@ -145,6 +150,7 @@ def create_build_configuration(): write("build:windows --copt=/arch=AVX") write("build:windows --cxxopt=/std:c++14") write("build:windows --host_cxxopt=/std:c++14") + write("build --cxxopt=" + glibcxx) if is_macos() or is_linux(): if not is_linux_ppc64le() and not is_linux_arm() and not is_linux_aarch64(): From ec5607b86f6168efbac7f41ab12f7548f0f6590d Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 16:30:56 +0200 Subject: [PATCH 28/38] Update configure.py --- configure.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/configure.py b/configure.py index eb452ad5ab..ab683faf5d 100644 --- a/configure.py +++ b/configure.py @@ -23,10 +23,6 @@ import tensorflow as tf -try: - from packaging.version import Version -except ImportError: - from distutils.version import LooseVersion as Version _TFA_BAZELRC = ".bazelrc" @@ -137,11 +133,6 @@ def create_build_configuration(): write("build --experimental_repo_remote_exec") write("build -c opt") - if Version(tf.__version__) >= Version("2.9.0"): - glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' - else: - glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' - if is_windows(): write("build --config=windows") write("build:windows --enable_runfiles") From 2d7c04b9f2cfa524207cd85fc0aef2e98fbff6eb Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 16:32:16 +0200 Subject: [PATCH 29/38] Update CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e850767755..d31cc15c7b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -329,7 +329,7 @@ quickly, as Bazel has great support for caching and distributed testing. To test with Bazel: ```bash -python3 -m pip install tensorflow==2.9rc1 +python3 -m pip install tensorflow==2.9 python3 configure.py python3 -m pip install -r tools/install_deps/pytest.txt bazel test -c opt -k \ From e68b863ca38aa0e9024bcd948d2fce61bad5d674 Mon Sep 17 00:00:00 2001 From: bhack Date: Thu, 19 May 2022 16:34:31 +0200 Subject: [PATCH 30/38] Update configure.py --- configure.py | 1 - 1 file changed, 1 deletion(-) diff --git a/configure.py b/configure.py index ab683faf5d..4259631592 100644 --- a/configure.py +++ b/configure.py @@ -141,7 +141,6 @@ def create_build_configuration(): write("build:windows --copt=/arch=AVX") write("build:windows --cxxopt=/std:c++14") write("build:windows --host_cxxopt=/std:c++14") - write("build --cxxopt=" + glibcxx) if is_macos() or is_linux(): if not is_linux_ppc64le() and not is_linux_arm() and not is_linux_aarch64(): From 8f646dcb61817c0e9187e161599f70c41ca67e92 Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 20:55:13 +0200 Subject: [PATCH 31/38] Upgrade to 2.7.2 --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 53599cbb1e..a0a37b4d02 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,11 +42,11 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.7.1', '2.9.0'] + tf-version: ['2.7.2', '2.9.0'] cpu: ['x86'] exclude: - py-version: '3.10' - tf-version: '2.7.1' + tf-version: '2.7.2' include: - os: 'macos-11' cpu: 'arm64' From 997bb700f159032a05d66c0b5442399c918521b7 Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 20:57:12 +0200 Subject: [PATCH 32/38] Add 2.8.1 --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a0a37b4d02..fd9b52fb72 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,7 +42,7 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.7.2', '2.9.0'] + tf-version: ['2.7.2', '2.8.1' '2.9.0'] cpu: ['x86'] exclude: - py-version: '3.10' From 96be309b5a3cf722fd3f944088fbed814d87bf50 Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 20:58:53 +0200 Subject: [PATCH 33/38] Fix typo --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fd9b52fb72..d2d56a880a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -42,7 +42,7 @@ jobs: # https://github.com/bazelbuild/bazel/issues/14232#issuecomment-1011247429 os: ['macos-10.15', 'windows-2019', 'ubuntu-18.04'] py-version: ['3.7', '3.8', '3.9', '3.10'] - tf-version: ['2.7.2', '2.8.1' '2.9.0'] + tf-version: ['2.7.2', '2.8.1', '2.9.0'] cpu: ['x86'] exclude: - py-version: '3.10' From 68978d4dd371c22fb0b4808743bddd4f414fc64e Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 21:26:23 +0200 Subject: [PATCH 34/38] Ovveride the cxx11 abi flag for old releases --- configure.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/configure.py b/configure.py index 4259631592..1026470512 100644 --- a/configure.py +++ b/configure.py @@ -23,6 +23,10 @@ import tensorflow as tf +try: + from packaging.version import Version +except ImportError: + from distutils.version import LooseVersion as Version _TFA_BAZELRC = ".bazelrc" @@ -132,6 +136,12 @@ def create_build_configuration(): write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") write("build -c opt") + + + if Version(tf.__version__) >= Version("2.9.0"): + glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' + else: + glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' if is_windows(): write("build --config=windows") From ff43c84b4d3ef3baa932ac056cc3131e0c41547a Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 21:28:28 +0200 Subject: [PATCH 35/38] Update configure.py --- configure.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/configure.py b/configure.py index 1026470512..ceb5243053 100644 --- a/configure.py +++ b/configure.py @@ -138,9 +138,7 @@ def create_build_configuration(): write("build -c opt") - if Version(tf.__version__) >= Version("2.9.0"): - glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' - else: + if Version(tf.__version__) < Version("2.9.0"): glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' if is_windows(): From f46837501220b59da3f3204251ebf7dd0a6eb97b Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 21:32:43 +0200 Subject: [PATCH 36/38] Update configure.py --- configure.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/configure.py b/configure.py index ceb5243053..f471257327 100644 --- a/configure.py +++ b/configure.py @@ -136,9 +136,11 @@ def create_build_configuration(): write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") write("build -c opt") + write("build --cxxopt=" + glibcxx) - - if Version(tf.__version__) < Version("2.9.0"): + if Version(tf.__version__) >= Version("2.9.0"): + glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' + else: glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' if is_windows(): From 9bc429d74cb4f8942237a1ca150e319288113d1d Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 21:50:20 +0200 Subject: [PATCH 37/38] Update configure.py --- configure.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/configure.py b/configure.py index f471257327..4b27a92552 100644 --- a/configure.py +++ b/configure.py @@ -132,17 +132,17 @@ def create_build_configuration(): write_action_env("TF_SHARED_LIBRARY_NAME", get_shared_lib_name()) write_action_env("TF_CXX11_ABI_FLAG", tf.sysconfig.CXX11_ABI_FLAG) + if Version(tf.__version__) >= Version("2.9.0"): + glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' + else: + glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' + write("build --spawn_strategy=standalone") write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") write("build -c opt") write("build --cxxopt=" + glibcxx) - if Version(tf.__version__) >= Version("2.9.0"): - glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' - else: - glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' - if is_windows(): write("build --config=windows") write("build:windows --enable_runfiles") From 7711a1196a1209210186b22d69a7a10754e27f19 Mon Sep 17 00:00:00 2001 From: bhack Date: Fri, 20 May 2022 22:21:33 +0200 Subject: [PATCH 38/38] Update configure.py --- configure.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configure.py b/configure.py index 4b27a92552..0d65e88a98 100644 --- a/configure.py +++ b/configure.py @@ -136,13 +136,13 @@ def create_build_configuration(): glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=1"' else: glibcxx = '"-D_GLIBCXX_USE_CXX11_ABI=0"' - + write("build --spawn_strategy=standalone") write("build --strategy=Genrule=standalone") write("build --experimental_repo_remote_exec") write("build -c opt") write("build --cxxopt=" + glibcxx) - + if is_windows(): write("build --config=windows") write("build:windows --enable_runfiles")