diff --git a/ports/libtorch/add-include-chrono.patch b/ports/libtorch/add-include-chrono.patch new file mode 100644 index 00000000000000..0375ba860eb956 --- /dev/null +++ b/ports/libtorch/add-include-chrono.patch @@ -0,0 +1,11 @@ +diff --git a/torch/csrc/jit/runtime/logging.h b/torch/csrc/jit/runtime/logging.h +index 5499ecf..269168e 100644 +--- a/torch/csrc/jit/runtime/logging.h ++++ b/torch/csrc/jit/runtime/logging.h +@@ -1,5 +1,6 @@ + #pragma once + ++#include + #include + #include + #include diff --git a/ports/libtorch/portfile.cmake b/ports/libtorch/portfile.cmake index 4dc05e8dc48706..24d5130f233d33 100644 --- a/ports/libtorch/portfile.cmake +++ b/ports/libtorch/portfile.cmake @@ -39,6 +39,7 @@ vcpkg_from_github( fix-aten-cutlass.patch fix-build-error-with-fmt11.patch no-abs-path.patch + add-include-chrono.patch ) file(REMOVE_RECURSE "${SOURCE_PATH}/caffe2/core/macros.h") # We must use generated header files diff --git a/ports/libtorch/vcpkg.json b/ports/libtorch/vcpkg.json index 1d2b9770749453..eda740b804489d 100644 --- a/ports/libtorch/vcpkg.json +++ b/ports/libtorch/vcpkg.json @@ -1,7 +1,7 @@ { "name": "libtorch", "version": "2.1.2", - "port-version": 9, + "port-version": 10, "description": "Tensors and Dynamic neural networks in Python with strong GPU acceleration", "homepage": "https://pytorch.org/", "license": null, diff --git a/versions/baseline.json b/versions/baseline.json index ae7edec66224fa..55cabc73e21681 100644 --- a/versions/baseline.json +++ b/versions/baseline.json @@ -5242,7 +5242,7 @@ }, "libtorch": { "baseline": "2.1.2", - "port-version": 9 + "port-version": 10 }, "libtorrent": { "baseline": "2.0.10", diff --git a/versions/l-/libtorch.json b/versions/l-/libtorch.json index 3a1b3878fd5081..bf8ec57c1eb7d2 100644 --- a/versions/l-/libtorch.json +++ b/versions/l-/libtorch.json @@ -1,5 +1,10 @@ { "versions": [ + { + "git-tree": "7307f8c37b266aeb61d83e069740e75cff863bf6", + "version": "2.1.2", + "port-version": 10 + }, { "git-tree": "0a4d67bf0f603494ff7ce3e51587513a1df295fe", "version": "2.1.2",