upgrade to version 2.0.1
This commit is contained in:
parent
efa58f0d7c
commit
9a900a357e
@ -1,617 +0,0 @@
|
|||||||
From d862908738d31f3d5e5dc2caabe51930e7cf020e Mon Sep 17 00:00:00 2001
|
|
||||||
From: Nikita Shulga <nshulga@fb.com>
|
|
||||||
Date: Tue, 3 May 2022 20:21:55 +0000
|
|
||||||
Subject: [PATCH] Remove breakpad dependency
|
|
||||||
|
|
||||||
This functionality does not seem to be used
|
|
||||||
and there are some requests to update dependency.
|
|
||||||
|
|
||||||
Add `third_party` to torch_cpu include directories if compiling with
|
|
||||||
Caffe2 support, as `caffe2/quantization/server/conv_dnnlowp_op.cc` depends on `third_party/fbgemm/src/RefImplementations.h`
|
|
||||||
|
|
||||||
Pull Request resolved: https://github.com/pytorch/pytorch/pull/75394
|
|
||||||
Approved by: https://github.com/janeyx99, https://github.com/seemethere
|
|
||||||
---
|
|
||||||
CMakeLists.txt | 5 -
|
|
||||||
caffe2/CMakeLists.txt | 8 +-
|
|
||||||
cmake/Dependencies.cmake | 4 -
|
|
||||||
cmake/Summary.cmake | 1 -
|
|
||||||
test/test_cpp_extensions_jit.py | 79 +----------
|
|
||||||
test/test_utils.py | 30 +----
|
|
||||||
tools/build_variables.bzl | 1 -
|
|
||||||
torch/csrc/Module.cpp | 7 -
|
|
||||||
torch/csrc/api/include/torch/utils.h | 1 -
|
|
||||||
torch/csrc/utils/crash_handler.cpp | 169 ------------------------
|
|
||||||
torch/csrc/utils/crash_handler.h | 35 -----
|
|
||||||
torch/csrc/utils/init.cpp | 14 --
|
|
||||||
torch/csrc/utils/init.h | 5 -
|
|
||||||
torch/testing/_internal/common_utils.py | 15 ---
|
|
||||||
15 files changed, 5 insertions(+), 372 deletions(-)
|
|
||||||
delete mode 100644 torch/csrc/utils/crash_handler.cpp
|
|
||||||
delete mode 100644 torch/csrc/utils/crash_handler.h
|
|
||||||
|
|
||||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
|
||||||
index e51760b5..7f62e1c6 100644
|
|
||||||
--- a/CMakeLists.txt
|
|
||||||
+++ b/CMakeLists.txt
|
|
||||||
@@ -209,7 +209,6 @@ cmake_dependent_option(
|
|
||||||
"USE_CUDNN" OFF)
|
|
||||||
option(USE_FBGEMM "Use FBGEMM (quantized 8-bit server operators)" ON)
|
|
||||||
option(USE_KINETO "Use Kineto profiling library" ON)
|
|
||||||
-option(USE_BREAKPAD "Use breakpad crash dump library" ON)
|
|
||||||
option(USE_CUPTI_SO "Use CUPTI as a shared library" OFF)
|
|
||||||
option(USE_FAKELOWP "Use FakeLowp operators" OFF)
|
|
||||||
option(USE_FFMPEG "Use ffmpeg" OFF)
|
|
||||||
@@ -273,10 +272,6 @@ if(NOT DEFINED USE_VULKAN)
|
|
||||||
"ANDROID" OFF)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
-if(IOS)
|
|
||||||
- set(USE_BREAKPAD OFF)
|
|
||||||
-endif()
|
|
||||||
-
|
|
||||||
option(USE_SOURCE_DEBUG_ON_MOBILE "Enable " ON)
|
|
||||||
option(USE_LITE_INTERPRETER_PROFILER "Enable " ON)
|
|
||||||
option(USE_VULKAN_FP16_INFERENCE "Vulkan - Use fp16 inference" OFF)
|
|
||||||
diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt
|
|
||||||
index d57d7ebb..cfcc1ae2 100644
|
|
||||||
--- a/caffe2/CMakeLists.txt
|
|
||||||
+++ b/caffe2/CMakeLists.txt
|
|
||||||
@@ -709,7 +709,6 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
|
|
||||||
${TORCH_SRC_DIR}/csrc/api/src/optim/schedulers/step_lr.cpp
|
|
||||||
${TORCH_SRC_DIR}/csrc/api/src/serialize/input-archive.cpp
|
|
||||||
${TORCH_SRC_DIR}/csrc/api/src/serialize/output-archive.cpp
|
|
||||||
- ${TORCH_SRC_DIR}/csrc/utils/crash_handler.cpp
|
|
||||||
)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
@@ -1066,10 +1065,9 @@ if(USE_TBB)
|
|
||||||
target_link_libraries(torch_cpu PUBLIC TBB::tbb)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
-if(USE_BREAKPAD)
|
|
||||||
- target_compile_definitions(torch_cpu PRIVATE ADD_BREAKPAD_SIGNAL_HANDLER)
|
|
||||||
- target_include_directories(torch_cpu PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../third_party ${CMAKE_CURRENT_LIST_DIR}/../third_party/breakpad/src)
|
|
||||||
- target_link_libraries(torch_cpu PRIVATE breakpad)
|
|
||||||
+if(BUILD_CAFFE2 AND BUILD_CAFFE2_OPS AND USE_FBGEMM)
|
|
||||||
+ # FIXME: quantization/server/conv_dnnlowp_op.cc depends on fbgemm/src/RefImplementations.h
|
|
||||||
+ target_include_directories(torch_cpu PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../third_party)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
target_include_directories(torch_cpu PRIVATE ${ATen_CPU_INCLUDE})
|
|
||||||
diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
|
|
||||||
index 557ab649..525fea7a 100644
|
|
||||||
--- a/cmake/Dependencies.cmake
|
|
||||||
+++ b/cmake/Dependencies.cmake
|
|
||||||
@@ -1824,10 +1824,6 @@ set_target_properties(fmt-header-only PROPERTIES INTERFACE_COMPILE_FEATURES "")
|
|
||||||
list(APPEND Caffe2_DEPENDENCY_LIBS fmt::fmt-header-only)
|
|
||||||
set(BUILD_SHARED_LIBS ${TEMP_BUILD_SHARED_LIBS} CACHE BOOL "Build shared libs" FORCE)
|
|
||||||
|
|
||||||
-if(USE_BREAKPAD)
|
|
||||||
- add_subdirectory(${CMAKE_CURRENT_LIST_DIR}/../third_party/breakpad)
|
|
||||||
-endif()
|
|
||||||
-
|
|
||||||
# ---[ Kineto
|
|
||||||
# edge profiler depends on KinetoProfiler but it only does cpu
|
|
||||||
# profiling. Thus we dont need USE_CUDA/USE_ROCM
|
|
||||||
diff --git a/cmake/Summary.cmake b/cmake/Summary.cmake
|
|
||||||
index 20401207..1b15ec86 100644
|
|
||||||
--- a/cmake/Summary.cmake
|
|
||||||
+++ b/cmake/Summary.cmake
|
|
||||||
@@ -185,7 +185,6 @@ function(caffe2_print_configuration_summary)
|
|
||||||
message(STATUS " SELECTED_OP_LIST : ${SELECTED_OP_LIST}")
|
|
||||||
endif()
|
|
||||||
message(STATUS " USE_DEPLOY : ${USE_DEPLOY}")
|
|
||||||
- message(STATUS " USE_BREAKPAD : ${USE_BREAKPAD}")
|
|
||||||
message(STATUS " Public Dependencies : ${Caffe2_PUBLIC_DEPENDENCY_LIBS}")
|
|
||||||
message(STATUS " Private Dependencies : ${Caffe2_DEPENDENCY_LIBS}")
|
|
||||||
# coreml
|
|
||||||
diff --git a/test/test_cpp_extensions_jit.py b/test/test_cpp_extensions_jit.py
|
|
||||||
index 06b1133d..9875f4ee 100644
|
|
||||||
--- a/test/test_cpp_extensions_jit.py
|
|
||||||
+++ b/test/test_cpp_extensions_jit.py
|
|
||||||
@@ -10,15 +10,12 @@ import tempfile
|
|
||||||
import subprocess
|
|
||||||
import glob
|
|
||||||
|
|
||||||
-import textwrap
|
|
||||||
-from multiprocessing import Process
|
|
||||||
-
|
|
||||||
import torch.testing._internal.common_utils as common
|
|
||||||
import torch
|
|
||||||
import torch.backends.cudnn
|
|
||||||
import torch.utils.cpp_extension
|
|
||||||
from torch.utils.cpp_extension import CUDA_HOME, ROCM_HOME
|
|
||||||
-from torch.testing._internal.common_utils import gradcheck, TEST_WITH_ASAN, has_breakpad
|
|
||||||
+from torch.testing._internal.common_utils import gradcheck
|
|
||||||
|
|
||||||
|
|
||||||
TEST_CUDA = torch.cuda.is_available() and CUDA_HOME is not None
|
|
||||||
@@ -869,80 +866,6 @@ class TestCppExtensionJIT(common.TestCase):
|
|
||||||
|
|
||||||
gradcheck(torch.ops.my.add, [a, b], eps=1e-2)
|
|
||||||
|
|
||||||
- @staticmethod
|
|
||||||
- def _crash_handler_test_process(stderr_file, destination):
|
|
||||||
- # Code to enable dumps and trigger a segfault
|
|
||||||
- if sys.platform == "win32":
|
|
||||||
- destination = destination.replace("\\", "\\\\")
|
|
||||||
- csrc = textwrap.dedent(f"""
|
|
||||||
- #include <torch/torch.h>
|
|
||||||
- #include <locale>
|
|
||||||
- #include <iostream>
|
|
||||||
- #include <codecvt>
|
|
||||||
- #include <string>
|
|
||||||
-
|
|
||||||
- int fail() {{
|
|
||||||
- std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
|
|
||||||
- std::string narrow("{destination}");
|
|
||||||
- std::wstring wide = converter.from_bytes(narrow);
|
|
||||||
- torch::crash_handler::enable_minidumps(wide.c_str());
|
|
||||||
-
|
|
||||||
- volatile int* bad = nullptr;
|
|
||||||
- return *bad;
|
|
||||||
- }}
|
|
||||||
- """)
|
|
||||||
- else:
|
|
||||||
- csrc = textwrap.dedent(f"""
|
|
||||||
- #include <torch/torch.h>
|
|
||||||
-
|
|
||||||
- int fail() {{
|
|
||||||
- torch::crash_handler::enable_minidumps("{destination}");
|
|
||||||
-
|
|
||||||
- volatile int* bad = nullptr;
|
|
||||||
- return *bad;
|
|
||||||
- }}
|
|
||||||
- """)
|
|
||||||
-
|
|
||||||
- # Some special stuff to overwrite stderr for a C++ extension
|
|
||||||
- # Copied from: https://stackoverflow.com/questions/8804893/redirect-stdout-from-python-for-c-calls
|
|
||||||
- sys.stdout.flush()
|
|
||||||
- newstdout = os.dup(2)
|
|
||||||
- devnull = os.open(stderr_file, os.O_WRONLY)
|
|
||||||
- os.dup2(devnull, 2)
|
|
||||||
- os.close(devnull)
|
|
||||||
- sys.stdout = os.fdopen(newstdout, 'w')
|
|
||||||
-
|
|
||||||
- module = torch.utils.cpp_extension.load_inline(
|
|
||||||
- name="segfault",
|
|
||||||
- cpp_sources=csrc,
|
|
||||||
- functions=["fail"],
|
|
||||||
- )
|
|
||||||
- module.fail()
|
|
||||||
-
|
|
||||||
- @unittest.skipIf(TEST_WITH_ASAN, "ASAN disables the crash handler's signal handler")
|
|
||||||
- @unittest.skipIf(not has_breakpad(), "Built without breakpad")
|
|
||||||
- @unittest.skipIf(os.environ.get("TEST_CONFIG") == "force_on_cpu", "fails on force_on_cpu config, tracked w/ #65253")
|
|
||||||
- def test_crash_handler(self):
|
|
||||||
- with tempfile.TemporaryDirectory() as temp_dir, tempfile.NamedTemporaryFile(delete=not sys.platform == "win32") as stderr:
|
|
||||||
- # Use multiprocessing to spin up a separate process to make catching
|
|
||||||
- # the segfault easier
|
|
||||||
- p = Process(target=self._crash_handler_test_process, args=(stderr.name, temp_dir))
|
|
||||||
- p.start()
|
|
||||||
- p.join()
|
|
||||||
-
|
|
||||||
- with open(stderr.name) as f:
|
|
||||||
- result = f.read().strip()
|
|
||||||
-
|
|
||||||
- # Check that the signal handler was called
|
|
||||||
- self.assertTrue(result.startswith(f"Wrote minidump to {temp_dir}"))
|
|
||||||
-
|
|
||||||
- with open(result.replace("Wrote minidump to ", ""), "rb") as dump_file:
|
|
||||||
- dump_bytes = dump_file.read()
|
|
||||||
-
|
|
||||||
- # Check that the file has the correct magic number
|
|
||||||
- self.assertEqual(b"MDMP", dump_bytes[0:4])
|
|
||||||
-
|
|
||||||
-
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
common.run_tests()
|
|
||||||
diff --git a/test/test_utils.py b/test/test_utils.py
|
|
||||||
index c8f4e3aa..5dd34b72 100644
|
|
||||||
--- a/test/test_utils.py
|
|
||||||
+++ b/test/test_utils.py
|
|
||||||
@@ -2,8 +2,6 @@
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
-import contextlib
|
|
||||||
-import io
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import random
|
|
||||||
@@ -21,7 +19,7 @@ import torch.utils.cpp_extension
|
|
||||||
import torch.hub as hub
|
|
||||||
from torch.autograd._functions.utils import check_onnx_broadcast
|
|
||||||
from torch.onnx.symbolic_opset9 import _prepare_onnx_paddings
|
|
||||||
-from torch.testing._internal.common_utils import has_breakpad, load_tests, retry, IS_SANDCASTLE, IS_WINDOWS, TEST_WITH_ASAN
|
|
||||||
+from torch.testing._internal.common_utils import load_tests, retry, IS_SANDCASTLE, IS_WINDOWS
|
|
||||||
|
|
||||||
# load_tests from torch.testing._internal.common_utils is used to automatically filter tests for
|
|
||||||
# sharding on sandcastle. This line silences flake warnings
|
|
||||||
@@ -757,32 +755,6 @@ class TestAssert(TestCase):
|
|
||||||
ms(torch.tensor([False], dtype=torch.bool))
|
|
||||||
|
|
||||||
|
|
||||||
-class TestCrashHandler(TestCase):
|
|
||||||
- @unittest.skipIf(TEST_WITH_ASAN, "ASAN disables the crash handler's signal handler")
|
|
||||||
- @unittest.skipIf(not has_breakpad(), "Built without breakpad")
|
|
||||||
- def test_python_exception_writing(self):
|
|
||||||
- with tempfile.TemporaryDirectory() as temp_dir:
|
|
||||||
- torch.utils._crash_handler.enable_minidumps(temp_dir)
|
|
||||||
- torch.utils._crash_handler.enable_minidumps_on_exceptions()
|
|
||||||
-
|
|
||||||
- files = os.listdir(temp_dir)
|
|
||||||
- self.assertEqual(len(files), 0)
|
|
||||||
-
|
|
||||||
- f = io.StringIO()
|
|
||||||
- with contextlib.redirect_stderr(f):
|
|
||||||
- try:
|
|
||||||
- @torch.jit.script
|
|
||||||
- def x(i: int):
|
|
||||||
- return i + "2" # type: ignore[operator]
|
|
||||||
- except RuntimeError as e:
|
|
||||||
- pass
|
|
||||||
-
|
|
||||||
- files = os.listdir(temp_dir)
|
|
||||||
- self.assertEqual(len(files), 1)
|
|
||||||
- self.assertTrue(files[0].endswith(".dmp"))
|
|
||||||
- torch.utils._crash_handler.disable_minidumps()
|
|
||||||
-
|
|
||||||
-
|
|
||||||
@unittest.skipIf(IS_SANDCASTLE, "cpp_extension is OSS only")
|
|
||||||
class TestStandaloneCPPJIT(TestCase):
|
|
||||||
def test_load_standalone(self):
|
|
||||||
diff --git a/tools/build_variables.bzl b/tools/build_variables.bzl
|
|
||||||
index f78b7cb0..dc705d3e 100644
|
|
||||||
--- a/tools/build_variables.bzl
|
|
||||||
+++ b/tools/build_variables.bzl
|
|
||||||
@@ -772,7 +772,6 @@ torch_cpp_srcs = [
|
|
||||||
"torch/csrc/api/src/optim/schedulers/step_lr.cpp",
|
|
||||||
"torch/csrc/api/src/serialize/input-archive.cpp",
|
|
||||||
"torch/csrc/api/src/serialize/output-archive.cpp",
|
|
||||||
- "torch/csrc/utils/crash_handler.cpp",
|
|
||||||
]
|
|
||||||
|
|
||||||
libtorch_python_cuda_core_sources = [
|
|
||||||
diff --git a/torch/csrc/Module.cpp b/torch/csrc/Module.cpp
|
|
||||||
index 49f51790..86aea308 100644
|
|
||||||
--- a/torch/csrc/Module.cpp
|
|
||||||
+++ b/torch/csrc/Module.cpp
|
|
||||||
@@ -55,7 +55,6 @@
|
|
||||||
#include <torch/csrc/utils/tensor_new.h>
|
|
||||||
#include <torch/csrc/utils/tensor_numpy.h>
|
|
||||||
#include <torch/csrc/utils/python_dispatch.h>
|
|
||||||
-#include <torch/csrc/utils/crash_handler.h>
|
|
||||||
#include <torch/csrc/utils/python_arg_parser.h>
|
|
||||||
#include <torch/csrc/utils/pycfunction_helpers.h>
|
|
||||||
#include <torch/csrc/jit/python/python_tracer.h>
|
|
||||||
@@ -64,7 +63,6 @@
|
|
||||||
#include <torch/csrc/monitor/python_init.h>
|
|
||||||
#include <torch/csrc/onnx/init.h>
|
|
||||||
#include <torch/csrc/utils/init.h>
|
|
||||||
-#include <torch/csrc/utils/crash_handler.h>
|
|
||||||
#include <torch/csrc/api/include/torch/python/init.h>
|
|
||||||
|
|
||||||
#ifdef USE_DISTRIBUTED
|
|
||||||
@@ -839,7 +837,6 @@ PyObject* initModule() {
|
|
||||||
torch::monitor::initMonitorBindings(module);
|
|
||||||
torch::impl::dispatch::initDispatchBindings(module);
|
|
||||||
torch::throughput_benchmark::initThroughputBenchmarkBindings(module);
|
|
||||||
- torch::crash_handler::initCrashHandlerBindings(module);
|
|
||||||
torch::autograd::initReturnTypes(module);
|
|
||||||
torch::autograd::initNNFunctions(module);
|
|
||||||
torch::autograd::initFFTFunctions(module);
|
|
||||||
@@ -895,10 +892,6 @@ PyObject* initModule() {
|
|
||||||
|
|
||||||
// Automatically translate errors thrown from pybind11 functions
|
|
||||||
py::register_exception_translator([](std::exception_ptr e) { // NOLINT
|
|
||||||
- if (torch::crash_handler::is_enabled_on_exceptions()) {
|
|
||||||
- torch::crash_handler::write_minidump();
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
try {
|
|
||||||
if (e) {
|
|
||||||
std::rethrow_exception(e);
|
|
||||||
diff --git a/torch/csrc/api/include/torch/utils.h b/torch/csrc/api/include/torch/utils.h
|
|
||||||
index f664074d..3bb6363a 100644
|
|
||||||
--- a/torch/csrc/api/include/torch/utils.h
|
|
||||||
+++ b/torch/csrc/api/include/torch/utils.h
|
|
||||||
@@ -5,7 +5,6 @@
|
|
||||||
#include <torch/csrc/autograd/grad_mode.h>
|
|
||||||
#include <torch/csrc/autograd/profiler.h>
|
|
||||||
#include <torch/csrc/api/include/torch/types.h>
|
|
||||||
-#include <torch/csrc/utils/crash_handler.h>
|
|
||||||
#include <cstdint>
|
|
||||||
|
|
||||||
namespace torch {
|
|
||||||
diff --git a/torch/csrc/utils/crash_handler.cpp b/torch/csrc/utils/crash_handler.cpp
|
|
||||||
deleted file mode 100644
|
|
||||||
index 8fb318b2..00000000
|
|
||||||
--- a/torch/csrc/utils/crash_handler.cpp
|
|
||||||
+++ /dev/null
|
|
||||||
@@ -1,169 +0,0 @@
|
|
||||||
-#include <cstdlib>
|
|
||||||
-#include <cstring>
|
|
||||||
-#include <iostream>
|
|
||||||
-
|
|
||||||
-#ifdef ADD_BREAKPAD_SIGNAL_HANDLER
|
|
||||||
-#ifdef __linux__
|
|
||||||
-#include <breakpad/src/client/linux/handler/exception_handler.h>
|
|
||||||
-#include <csignal>
|
|
||||||
-#elif __APPLE__
|
|
||||||
-#include <breakpad/src/client/mac/handler/exception_handler.h>
|
|
||||||
-#elif _WIN32
|
|
||||||
-#include <breakpad/src/client/windows/handler/exception_handler.h>
|
|
||||||
-#else
|
|
||||||
-#error unsupported platform
|
|
||||||
-#endif
|
|
||||||
-#endif
|
|
||||||
-
|
|
||||||
-#include <c10/util/Exception.h>
|
|
||||||
-#include <torch/csrc/utils/crash_handler.h>
|
|
||||||
-
|
|
||||||
-namespace torch {
|
|
||||||
-namespace crash_handler {
|
|
||||||
-
|
|
||||||
-#ifdef ADD_BREAKPAD_SIGNAL_HANDLER
|
|
||||||
-
|
|
||||||
-static std::unique_ptr<google_breakpad::ExceptionHandler> handler; // NOLINT
|
|
||||||
-static STRING_TYPE minidump_directory; // NOLINT
|
|
||||||
-static bool enabled_for_exceptions = false; // NOLINT
|
|
||||||
-
|
|
||||||
-#if __linux__
|
|
||||||
-bool dump_callback(
|
|
||||||
- const google_breakpad::MinidumpDescriptor& descriptor,
|
|
||||||
- void* context,
|
|
||||||
- bool succeeded) {
|
|
||||||
- if (succeeded) {
|
|
||||||
- std::cerr << "Wrote minidump to " << descriptor.path() << std::endl;
|
|
||||||
- }
|
|
||||||
- return succeeded;
|
|
||||||
-}
|
|
||||||
-#elif __APPLE__
|
|
||||||
-
|
|
||||||
-bool dump_callback(
|
|
||||||
- const char* dump_dir,
|
|
||||||
- const char* minidump_id,
|
|
||||||
- void* context,
|
|
||||||
- bool succeeded) {
|
|
||||||
- if (succeeded) {
|
|
||||||
- std::cerr << "Wrote minidump to " << dump_dir << "/" << minidump_id
|
|
||||||
- << ".dmp" << std::endl;
|
|
||||||
- }
|
|
||||||
- return succeeded;
|
|
||||||
-}
|
|
||||||
-#elif _WIN32
|
|
||||||
-bool dump_callback(
|
|
||||||
- const wchar_t* dump_path,
|
|
||||||
- const wchar_t* minidump_id,
|
|
||||||
- void* context,
|
|
||||||
- EXCEPTION_POINTERS* exinfo,
|
|
||||||
- MDRawAssertionInfo* assertion,
|
|
||||||
- bool succeeded) {
|
|
||||||
- if (succeeded) {
|
|
||||||
- // Printing with wcerr inserts spaces between all the characters for some
|
|
||||||
- // reason. If someone figures that out then we can get rid of the std::string
|
|
||||||
- // conversions here.
|
|
||||||
- std::wstring dump_path_ws(dump_path);
|
|
||||||
- std::string dump_path_string(dump_path_ws.begin(), dump_path_ws.end());
|
|
||||||
- std::wstring minidump_id_ws(minidump_id);
|
|
||||||
- std::string minidump_id_string(minidump_id_ws.begin(), minidump_id_ws.end());
|
|
||||||
- std::cerr << "Wrote minidump to " << dump_path_string << "\\" << minidump_id_string << ".dmp" << std::endl;
|
|
||||||
- }
|
|
||||||
- return succeeded;
|
|
||||||
-}
|
|
||||||
-#endif
|
|
||||||
-
|
|
||||||
-void enable_minidumps(const STRING_TYPE& dir) {
|
|
||||||
- minidump_directory = dir;
|
|
||||||
-// The constructor here registers the actual signal handler
|
|
||||||
-#ifdef __linux__
|
|
||||||
- handler = std::make_unique<google_breakpad::ExceptionHandler>(
|
|
||||||
- google_breakpad::MinidumpDescriptor(minidump_directory),
|
|
||||||
- nullptr,
|
|
||||||
- dump_callback,
|
|
||||||
- nullptr,
|
|
||||||
- true,
|
|
||||||
- -1);
|
|
||||||
-#elif __APPLE__
|
|
||||||
- handler = std::make_unique<google_breakpad::ExceptionHandler>(
|
|
||||||
- /*dump_path=*/minidump_directory.c_str(),
|
|
||||||
- /*filter=*/nullptr,
|
|
||||||
- /*callback=*/dump_callback,
|
|
||||||
- /*callback_context=*/nullptr,
|
|
||||||
- /*install_handler=*/true,
|
|
||||||
- /*port_name=*/nullptr);
|
|
||||||
-#elif _WIN32
|
|
||||||
- handler = std::make_unique<google_breakpad::ExceptionHandler>(
|
|
||||||
- /*dump_path=*/minidump_directory.c_str(),
|
|
||||||
- /*filter=*/nullptr,
|
|
||||||
- /*callback=*/dump_callback,
|
|
||||||
- /*callback_context=*/nullptr,
|
|
||||||
- /*handler_types=*/
|
|
||||||
- google_breakpad::ExceptionHandler::HandlerType::HANDLER_ALL);
|
|
||||||
-#endif
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void disable_minidumps() {
|
|
||||||
- handler.reset();
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-const STRING_TYPE& get_minidump_directory() {
|
|
||||||
- if (handler == nullptr) {
|
|
||||||
- AT_ERROR(
|
|
||||||
- "Minidump handler is uninintialized, make sure to call enable_minidumps first");
|
|
||||||
- }
|
|
||||||
- return minidump_directory;
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-bool is_enabled_on_exceptions() {
|
|
||||||
- if (handler == nullptr) {
|
|
||||||
- return false;
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- return enabled_for_exceptions;
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void write_minidump() {
|
|
||||||
- TORCH_CHECK(
|
|
||||||
- handler != nullptr,
|
|
||||||
- "Minidump handler is uninintialized, make sure to call enable_minidumps first");
|
|
||||||
- handler->WriteMinidump();
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void enable_minidumps_on_exceptions() {
|
|
||||||
- if (handler == nullptr) {
|
|
||||||
- AT_ERROR(
|
|
||||||
- "Minidump handler is uninintialized, make sure to call enable_minidumps first");
|
|
||||||
- }
|
|
||||||
- enabled_for_exceptions = true;
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-#else
|
|
||||||
-// On unspported systems we can't do anything, so stub out everything.
|
|
||||||
-void enable_minidumps(const STRING_TYPE& dir) {
|
|
||||||
- AT_ERROR("Compiled without minidump support");
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void disable_minidumps() {
|
|
||||||
- // Purposefully do nothing
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-const STRING_TYPE& get_minidump_directory() {
|
|
||||||
- AT_ERROR("Compiled without minidump support");
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-bool is_enabled_on_exceptions() {
|
|
||||||
- return false;
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void write_minidump() {
|
|
||||||
- AT_ERROR("Compiled without minidump support");
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-void enable_minidumps_on_exceptions() {
|
|
||||||
- AT_ERROR("Compiled without minidump support");
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-#endif
|
|
||||||
-
|
|
||||||
-} // namespace crash_handler
|
|
||||||
-} // namespace torch
|
|
||||||
diff --git a/torch/csrc/utils/crash_handler.h b/torch/csrc/utils/crash_handler.h
|
|
||||||
deleted file mode 100644
|
|
||||||
index 93f03380..00000000
|
|
||||||
--- a/torch/csrc/utils/crash_handler.h
|
|
||||||
+++ /dev/null
|
|
||||||
@@ -1,35 +0,0 @@
|
|
||||||
-#pragma once
|
|
||||||
-#include <torch/csrc/Export.h>
|
|
||||||
-#include <string>
|
|
||||||
-
|
|
||||||
-namespace torch {
|
|
||||||
-namespace crash_handler {
|
|
||||||
-
|
|
||||||
-#ifdef _WIN32
|
|
||||||
-typedef std::wstring STRING_TYPE;
|
|
||||||
-#else
|
|
||||||
-typedef std::string STRING_TYPE;
|
|
||||||
-#endif
|
|
||||||
-
|
|
||||||
-// Set up a handler that writes minidumps to 'dir' on signals. This is not
|
|
||||||
-// necessary to call unless you want to change 'dir' to something other than
|
|
||||||
-// the default '/tmp/pytorch_crashes'.
|
|
||||||
-TORCH_API void enable_minidumps(const STRING_TYPE& dir);
|
|
||||||
-
|
|
||||||
-// Enable minidumps when passing exceptions up to Python. By default these don't
|
|
||||||
-// do anything special, but it can be useful to write out a minidump on
|
|
||||||
-// exceptions for debugging purposes. This has no effect in C++.
|
|
||||||
-TORCH_API void enable_minidumps_on_exceptions();
|
|
||||||
-
|
|
||||||
-// Disable all minidump writing and un-register the signal handler
|
|
||||||
-TORCH_API void disable_minidumps();
|
|
||||||
-
|
|
||||||
-// Get the directory that minidumps will be written to
|
|
||||||
-TORCH_API const STRING_TYPE& get_minidump_directory();
|
|
||||||
-
|
|
||||||
-// These are TORCH_API'ed since they are used from libtorch_python.so
|
|
||||||
-TORCH_API bool is_enabled_on_exceptions();
|
|
||||||
-TORCH_API void write_minidump();
|
|
||||||
-
|
|
||||||
-} // namespace crash_handler
|
|
||||||
-} // namespace torch
|
|
||||||
diff --git a/torch/csrc/utils/init.cpp b/torch/csrc/utils/init.cpp
|
|
||||||
index 8472f208..04a5d1b3 100644
|
|
||||||
--- a/torch/csrc/utils/init.cpp
|
|
||||||
+++ b/torch/csrc/utils/init.cpp
|
|
||||||
@@ -1,7 +1,6 @@
|
|
||||||
#include <ATen/core/ivalue.h>
|
|
||||||
#include <torch/csrc/utils/init.h>
|
|
||||||
#include <torch/csrc/utils/throughput_benchmark.h>
|
|
||||||
-#include <torch/csrc/utils/crash_handler.h>
|
|
||||||
|
|
||||||
#include <pybind11/functional.h>
|
|
||||||
|
|
||||||
@@ -51,17 +50,4 @@ void initThroughputBenchmarkBindings(PyObject* module) {
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace throughput_benchmark
|
|
||||||
-
|
|
||||||
-namespace crash_handler {
|
|
||||||
-
|
|
||||||
-void initCrashHandlerBindings(PyObject* module) {
|
|
||||||
- auto m = pybind11::handle(module).cast<pybind11::module>();
|
|
||||||
-
|
|
||||||
- m.def("_enable_minidumps", enable_minidumps)
|
|
||||||
- .def("_is_enabled_on_exceptions", is_enabled_on_exceptions)
|
|
||||||
- .def("_enable_minidumps_on_exceptions", enable_minidumps_on_exceptions)
|
|
||||||
- .def("_disable_minidumps", disable_minidumps)
|
|
||||||
- .def("_get_minidump_directory", get_minidump_directory);
|
|
||||||
-}
|
|
||||||
-} // namespace crash_handler
|
|
||||||
} // namespace torch
|
|
||||||
diff --git a/torch/csrc/utils/init.h b/torch/csrc/utils/init.h
|
|
||||||
index 45a1a3a7..bf6dd216 100644
|
|
||||||
--- a/torch/csrc/utils/init.h
|
|
||||||
+++ b/torch/csrc/utils/init.h
|
|
||||||
@@ -8,9 +8,4 @@ namespace throughput_benchmark {
|
|
||||||
void initThroughputBenchmarkBindings(PyObject* module);
|
|
||||||
|
|
||||||
} // namespace throughput_benchmark
|
|
||||||
-
|
|
||||||
-namespace crash_handler {
|
|
||||||
-void initCrashHandlerBindings(PyObject* module);
|
|
||||||
-
|
|
||||||
-} // namespace crash_handler
|
|
||||||
} // namespace torch
|
|
||||||
diff --git a/torch/testing/_internal/common_utils.py b/torch/testing/_internal/common_utils.py
|
|
||||||
index 6e67b776..81656500 100644
|
|
||||||
--- a/torch/testing/_internal/common_utils.py
|
|
||||||
+++ b/torch/testing/_internal/common_utils.py
|
|
||||||
@@ -3061,21 +3061,6 @@ def bytes_to_scalar(byte_list: List[int], dtype: torch.dtype, device: torch.devi
|
|
||||||
return torch.tensor(res, device=device, dtype=dtype)
|
|
||||||
|
|
||||||
|
|
||||||
-def has_breakpad():
|
|
||||||
- # We always build with breakpad in CI
|
|
||||||
- if IS_IN_CI:
|
|
||||||
- return True
|
|
||||||
-
|
|
||||||
- # If not on a special build, check that the library was actually linked in
|
|
||||||
- try:
|
|
||||||
- torch._C._get_minidump_directory() # type: ignore[attr-defined]
|
|
||||||
- return True
|
|
||||||
- except RuntimeError as e:
|
|
||||||
- if "Minidump handler is uninintialized" in str(e):
|
|
||||||
- return True
|
|
||||||
- return False
|
|
||||||
-
|
|
||||||
-
|
|
||||||
def sandcastle_skip_if(condition, reason):
|
|
||||||
"""
|
|
||||||
Similar to unittest.skipIf, however in the sandcastle environment it just
|
|
||||||
--
|
|
||||||
2.27.0
|
|
||||||
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
diff -Naur a/third_party/gloo/gloo/common/linux.cc b/third_party/gloo/gloo/common/linux.cc
|
|
||||||
--- a/third_party/gloo/gloo/common/linux.cc 2022-03-11 01:01:52.000000000 +0800
|
|
||||||
+++ b/third_party/gloo/gloo/common/linux.cc 2023-02-16 11:09:11.493631531 +0800
|
|
||||||
@@ -187,29 +187,28 @@
|
|
||||||
static int getInterfaceSpeedGLinkSettings(int sock, struct ifreq* ifr) {
|
|
||||||
#if LINUX_VERSION_CODE >= KERNEL_VERSION(4,6,0)
|
|
||||||
constexpr auto link_mode_data_nwords = 3 * 127;
|
|
||||||
- struct {
|
|
||||||
- struct ethtool_link_settings req;
|
|
||||||
- __u32 link_mode_data[link_mode_data_nwords];
|
|
||||||
- } ecmd;
|
|
||||||
+ constexpr auto bufsize = sizeof(struct ethtool_link_settings) + sizeof(__u32)*link_mode_data_nwords;
|
|
||||||
+ char buf[bufsize];
|
|
||||||
+ struct ethtool_link_settings* ecmd = (struct ethtool_link_settings*)buf;
|
|
||||||
int rv;
|
|
||||||
|
|
||||||
- ifr->ifr_data = (__caddr_t)&ecmd;
|
|
||||||
- memset(&ecmd, 0, sizeof(ecmd));
|
|
||||||
- ecmd.req.cmd = ETHTOOL_GLINKSETTINGS;
|
|
||||||
+ ifr->ifr_data = (__caddr_t)buf;
|
|
||||||
+ memset(buf, 0, bufsize);
|
|
||||||
+ ecmd->cmd = ETHTOOL_GLINKSETTINGS;
|
|
||||||
|
|
||||||
rv = ioctl(sock, SIOCETHTOOL, ifr);
|
|
||||||
- if (rv < 0 || ecmd.req.link_mode_masks_nwords >= 0) {
|
|
||||||
+ if (rv < 0 || ecmd->link_mode_masks_nwords >= 0) {
|
|
||||||
return SPEED_UNKNOWN;
|
|
||||||
}
|
|
||||||
|
|
||||||
- ecmd.req.cmd = ETHTOOL_GLINKSETTINGS;
|
|
||||||
- ecmd.req.link_mode_masks_nwords = -ecmd.req.link_mode_masks_nwords;
|
|
||||||
+ ecmd->cmd = ETHTOOL_GLINKSETTINGS;
|
|
||||||
+ ecmd->link_mode_masks_nwords = - (ecmd->link_mode_masks_nwords);
|
|
||||||
rv = ioctl(sock, SIOCETHTOOL, ifr);
|
|
||||||
if (rv < 0) {
|
|
||||||
return SPEED_UNKNOWN;
|
|
||||||
}
|
|
||||||
|
|
||||||
- return ecmd.req.speed;
|
|
||||||
+ return ecmd->speed;
|
|
||||||
#else
|
|
||||||
(void)sock;
|
|
||||||
(void)ifr;
|
|
||||||
diff -Naur a/third_party/gloo/gloo/test/tls_tcp_test.cc b/third_party/gloo/gloo/test/tls_tcp_test.cc
|
|
||||||
--- a/third_party/gloo/gloo/test/tls_tcp_test.cc 2022-03-11 01:01:52.000000000 +0800
|
|
||||||
+++ b/third_party/gloo/gloo/test/tls_tcp_test.cc 2023-02-16 11:12:33.779302224 +0800
|
|
||||||
@@ -72,7 +72,7 @@
|
|
||||||
}
|
|
||||||
} catch (::gloo::IoException e) {
|
|
||||||
exception_thrown = true;
|
|
||||||
- ASSERT_THAT(e.what(), ::testing::ContainsRegex("unknown ca"));
|
|
||||||
+ ASSERT_THAT(e.what(), ::testing::ContainsRegex("[unknown ca|Connect timeout|Connection refused]"));
|
|
||||||
} catch (::gloo::EnforceNotMet e) {
|
|
||||||
exception_thrown = true;
|
|
||||||
ASSERT_THAT(e.what(),
|
|
||||||
Binary file not shown.
BIN
pytorch-v2.0.1.tar.gz
Normal file
BIN
pytorch-v2.0.1.tar.gz
Normal file
Binary file not shown.
48
pytorch.spec
48
pytorch.spec
@ -1,15 +1,12 @@
|
|||||||
%global _empty_manifest_terminate_build 0
|
%global _empty_manifest_terminate_build 0
|
||||||
Name: pytorch
|
Name: pytorch
|
||||||
Version: 1.11.0
|
Version: 2.0.1
|
||||||
Release: 2
|
Release: 1
|
||||||
Summary: Tensors and Dynamic neural networks in Python with strong GPU acceleration
|
Summary: Tensors and Dynamic neural networks in Python with strong GPU acceleration
|
||||||
License: BSD-3
|
License: BSD-3
|
||||||
URL: https://pytorch.org/
|
URL: https://pytorch.org/
|
||||||
Source0: https://github.com/pytorch/pytorch/releases/download/v%{version}/pytorch-v%{version}.tar.gz
|
Source0: https://github.com/pytorch/pytorch/releases/download/v%{version}/pytorch-v%{version}.tar.gz
|
||||||
|
|
||||||
Patch0001: 0001-Remove-breakpad-dependency.patch
|
|
||||||
Patch0002: 0002-Update-gloo-for-gcc11-compile.patch
|
|
||||||
|
|
||||||
BuildRequires: g++
|
BuildRequires: g++
|
||||||
Requires: python3-future
|
Requires: python3-future
|
||||||
Requires: python3-numpy
|
Requires: python3-numpy
|
||||||
@ -25,10 +22,18 @@ Summary: Tensors and Dynamic neural networks in Python with strong GPU accelerat
|
|||||||
Provides: python-torch
|
Provides: python-torch
|
||||||
BuildRequires: python3-devel
|
BuildRequires: python3-devel
|
||||||
BuildRequires: python3-setuptools
|
BuildRequires: python3-setuptools
|
||||||
|
BuildRequires: python3-setuptools_scm
|
||||||
|
BuildRequires: python3-pbr
|
||||||
|
BuildRequires: python3-pip
|
||||||
|
BuildRequires: python3-wheel
|
||||||
|
BuildRequires: python3-hatchling
|
||||||
|
|
||||||
|
BuildRequires: python3-astunparse
|
||||||
|
BuildRequires: python3-numpy
|
||||||
BuildRequires: python3-pyyaml
|
BuildRequires: python3-pyyaml
|
||||||
BuildRequires: cmake
|
BuildRequires: cmake
|
||||||
BuildRequires: python3-numpy
|
|
||||||
BuildRequires: python3-typing-extensions
|
BuildRequires: python3-typing-extensions
|
||||||
|
BuildRequires: python3-requests
|
||||||
|
|
||||||
%description -n python3-pytorch
|
%description -n python3-pytorch
|
||||||
PyTorch is a Python package that provides two high-level features:
|
PyTorch is a Python package that provides two high-level features:
|
||||||
@ -49,43 +54,40 @@ You can reuse your favorite Python packages such as NumPy, SciPy and Cython to e
|
|||||||
%autosetup -p1 -n %{name}-v%{version}
|
%autosetup -p1 -n %{name}-v%{version}
|
||||||
|
|
||||||
%build
|
%build
|
||||||
%py3_build
|
export CFLAGS+=" -Wno-error=maybe-uninitialized -Wno-error=uninitialized -Wno-error=restrict"
|
||||||
|
export CXXFLAGS+=" -Wno-error=maybe-uninitialized -Wno-error=uninitialized -Wno-error=restrict"
|
||||||
|
%pyproject_build
|
||||||
|
|
||||||
%install
|
%install
|
||||||
%py3_install
|
%pyproject_install
|
||||||
install -d -m755 %{buildroot}/%{_pkgdocdir}
|
install -d -m755 %{buildroot}/%{_pkgdocdir}
|
||||||
if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
|
if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
|
||||||
if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
|
if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
|
||||||
if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
|
if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
|
||||||
if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
|
if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
|
||||||
pushd %{buildroot}
|
pushd %{buildroot}
|
||||||
if [ -d usr/lib ]; then
|
|
||||||
find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
|
|
||||||
fi
|
|
||||||
if [ -d usr/lib64 ]; then
|
|
||||||
find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
|
|
||||||
fi
|
|
||||||
if [ -d usr/bin ]; then
|
|
||||||
find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
|
|
||||||
fi
|
|
||||||
if [ -d usr/sbin ]; then
|
|
||||||
find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
|
|
||||||
fi
|
|
||||||
touch doclist.lst
|
touch doclist.lst
|
||||||
if [ -d usr/share/man ]; then
|
if [ -d usr/share/man ]; then
|
||||||
find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
|
find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
|
||||||
fi
|
fi
|
||||||
popd
|
popd
|
||||||
mv %{buildroot}/filelist.lst .
|
|
||||||
mv %{buildroot}/doclist.lst .
|
mv %{buildroot}/doclist.lst .
|
||||||
|
|
||||||
%files -n python3-pytorch -f filelist.lst
|
%files -n python3-pytorch
|
||||||
%dir %{python3_sitearch}/*
|
%doc *.md
|
||||||
|
%license LICENSE
|
||||||
|
%{_bindir}/convert-caffe2-to-onnx
|
||||||
|
%{_bindir}/convert-onnx-to-caffe2
|
||||||
|
%{_bindir}/torchrun
|
||||||
|
%{python3_sitearch}/*
|
||||||
|
|
||||||
%files help -f doclist.lst
|
%files help -f doclist.lst
|
||||||
%{_docdir}/*
|
%{_docdir}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Mon Jul 24 2023 Dongxing Wang <dxwangk@isoftstone.com> - 2.0.1-1
|
||||||
|
- upgrade to 2.0.1
|
||||||
|
|
||||||
* Thu Feb 16 2023 Dongxing Wang <dxwangk@isoftstone.com> - 1.11.0-2
|
* Thu Feb 16 2023 Dongxing Wang <dxwangk@isoftstone.com> - 1.11.0-2
|
||||||
- fixes: error: the CXX 11 compiler error (linux.cc flexible array member not at end of struct)
|
- fixes: error: the CXX 11 compiler error (linux.cc flexible array member not at end of struct)
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user