Merge branch 'master' into master
diff --git a/BUILD.bazel b/BUILD.bazel
index 6d82829..41a0985 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -38,7 +38,7 @@
 
 config_setting(
     name = "windows",
-    values = { "cpu": "x64_windows" },
+    values = {"cpu": "x64_windows"},
 )
 
 config_setting(
@@ -51,7 +51,6 @@
     values = {"define": "absl=1"},
 )
 
-
 # Google Test including Google Mock
 cc_library(
     name = "gtest",
@@ -70,7 +69,7 @@
             "googlemock/src/gmock_main.cc",
         ],
     ),
-    hdrs =glob([
+    hdrs = glob([
         "googletest/include/gtest/*.h",
         "googlemock/include/gmock/*.h",
     ]),
@@ -81,6 +80,14 @@
             "//conditions:default": ["-pthread"],
         },
     ),
+    defines = select(
+        {
+            ":has_absl": [
+                "GTEST_HAS_ABSL=1",
+            ],
+            "//conditions:default": [],
+        },
+    ),
     includes = [
         "googlemock",
         "googlemock/include",
@@ -94,21 +101,19 @@
             "-pthread",
         ],
     }),
-    defines = select ({
-        ":has_absl": [
-        "GTEST_HAS_ABSL=1",
-        ],
-        "//conditions:default": [],
-    }
+    deps = select(
+        {
+            ":has_absl": [
+                "@com_google_absl//absl/debugging:failure_signal_handler",
+                "@com_google_absl//absl/debugging:stacktrace",
+                "@com_google_absl//absl/debugging:symbolize",
+                "@com_google_absl//absl/strings",
+                "@com_google_absl//absl/types:optional",
+                "@com_google_absl//absl/types:variant",
+            ],
+            "//conditions:default": [],
+        },
     ),
-    deps = select ({
-        ":has_absl": [
-        "@com_google_absl//absl/types:optional",
-        "@com_google_absl//absl/strings"
-        ],
-        "//conditions:default": [],
-    }
-    )
 )
 
 cc_library(
diff --git a/googletest/CMakeLists.txt b/googletest/CMakeLists.txt
index b09c46e..2e412d7 100644
--- a/googletest/CMakeLists.txt
+++ b/googletest/CMakeLists.txt
@@ -284,7 +284,7 @@
   py_test(gtest_list_tests_unittest)
 
   cxx_executable(gtest_output_test_ test gtest)
-  py_test(gtest_output_test)
+  py_test(gtest_output_test --no_stacktrace_support)
 
   cxx_executable(gtest_shuffle_test_ test gtest)
   py_test(gtest_shuffle_test)
@@ -307,6 +307,6 @@
   py_test(gtest_json_outfiles_test)
 
   cxx_executable(gtest_xml_output_unittest_ test gtest)
-  py_test(gtest_xml_output_unittest)
+  py_test(gtest_xml_output_unittest --no_stacktrace_support)
   py_test(gtest_json_output_unittest)
 endif()
diff --git a/googletest/cmake/internal_utils.cmake b/googletest/cmake/internal_utils.cmake
index 6448918..be7af38 100644
--- a/googletest/cmake/internal_utils.cmake
+++ b/googletest/cmake/internal_utils.cmake
@@ -257,14 +257,14 @@
         add_test(
           NAME ${name}
           COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
-              --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>)
+              --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})
       else (CMAKE_CONFIGURATION_TYPES)
 	# Single-configuration build generators like Makefile generators
 	# don't have subdirs below CMAKE_CURRENT_BINARY_DIR.
         add_test(
           NAME ${name}
           COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
-              --build_dir=${CMAKE_CURRENT_BINARY_DIR})
+              --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})
       endif (CMAKE_CONFIGURATION_TYPES)
     else (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
       # ${CMAKE_CURRENT_BINARY_DIR} is known at configuration time, so we can
@@ -274,7 +274,7 @@
       add_test(
         ${name}
         ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
-          --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE})
+          --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN})
     endif (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
   endif(PYTHONINTERP_FOUND)
 endfunction()
diff --git a/googletest/include/gtest/gtest-printers.h b/googletest/include/gtest/gtest-printers.h
index 373946b..66d54b9 100644
--- a/googletest/include/gtest/gtest-printers.h
+++ b/googletest/include/gtest/gtest-printers.h
@@ -114,6 +114,7 @@
 #if GTEST_HAS_ABSL
 #include "absl/strings/string_view.h"
 #include "absl/types/optional.h"
+#include "absl/types/variant.h"
 #endif  // GTEST_HAS_ABSL
 
 namespace testing {
@@ -787,6 +788,28 @@
   }
 };
 
+// Printer for absl::variant
+
+template <typename... T>
+class UniversalPrinter<::absl::variant<T...>> {
+ public:
+  static void Print(const ::absl::variant<T...>& value, ::std::ostream* os) {
+    *os << '(';
+    absl::visit(Visitor{os}, value);
+    *os << ')';
+  }
+
+ private:
+  struct Visitor {
+    template <typename U>
+    void operator()(const U& u) const {
+      *os << "'" << GetTypeName<U>() << "' with value ";
+      UniversalPrint(u, os);
+    }
+    ::std::ostream* os;
+  };
+};
+
 #endif  // GTEST_HAS_ABSL
 
 // UniversalPrintArray(begin, len, os) prints an array of 'len'
diff --git a/googletest/src/gtest-internal-inl.h b/googletest/src/gtest-internal-inl.h
index e77c8b6..c5a4265 100644
--- a/googletest/src/gtest-internal-inl.h
+++ b/googletest/src/gtest-internal-inl.h
@@ -446,6 +446,16 @@
   virtual void UponLeavingGTest();
 
  private:
+#if GTEST_HAS_ABSL
+  Mutex mutex_;  // Protects all internal state.
+
+  // We save the stack frame below the frame that calls user code.
+  // We do this because the address of the frame immediately below
+  // the user code changes between the call to UponLeavingGTest()
+  // and any calls to the stack trace code from within the user code.
+  void* caller_frame_ = nullptr;
+#endif  // GTEST_HAS_ABSL
+
   GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetter);
 };
 
diff --git a/googletest/src/gtest.cc b/googletest/src/gtest.cc
index ce6c07f..9c25c99 100644
--- a/googletest/src/gtest.cc
+++ b/googletest/src/gtest.cc
@@ -139,6 +139,13 @@
 # define vsnprintf _vsnprintf
 #endif  // GTEST_OS_WINDOWS
 
+#if GTEST_HAS_ABSL
+#include "absl/debugging/failure_signal_handler.h"
+#include "absl/debugging/stacktrace.h"
+#include "absl/debugging/symbolize.h"
+#include "absl/strings/str_cat.h"
+#endif  // GTEST_HAS_ABSL
+
 namespace testing {
 
 using internal::CountIf;
@@ -228,6 +235,13 @@
     "exclude).  A test is run if it matches one of the positive "
     "patterns and does not match any of the negative patterns.");
 
+GTEST_DEFINE_bool_(
+    install_failure_signal_handler,
+    internal::BoolFromGTestEnv("install_failure_signal_handler", false),
+    "If true and supported on the current platform, " GTEST_NAME_ " should "
+    "install a signal handler that dumps debugging information when fatal "
+    "signals are raised.");
+
 GTEST_DEFINE_bool_(list_tests, false,
                    "List all tests without running them.");
 
@@ -4243,12 +4257,67 @@
 const char* const OsStackTraceGetterInterface::kElidedFramesMarker =
     "... " GTEST_NAME_ " internal frames ...";
 
-std::string OsStackTraceGetter::CurrentStackTrace(int /*max_depth*/,
-                                                  int /*skip_count*/) {
+std::string OsStackTraceGetter::CurrentStackTrace(int max_depth, int skip_count)
+    GTEST_LOCK_EXCLUDED_(mutex_) {
+#if GTEST_HAS_ABSL
+  std::string result;
+
+  if (max_depth <= 0) {
+    return result;
+  }
+
+  max_depth = std::min(max_depth, kMaxStackTraceDepth);
+
+  std::vector<void*> raw_stack(max_depth);
+  // Skips the frames requested by the caller, plus this function.
+  const int raw_stack_size =
+      absl::GetStackTrace(&raw_stack[0], max_depth, skip_count + 1);
+
+  void* caller_frame = nullptr;
+  {
+    MutexLock lock(&mutex_);
+    caller_frame = caller_frame_;
+  }
+
+  for (int i = 0; i < raw_stack_size; ++i) {
+    if (raw_stack[i] == caller_frame &&
+        !GTEST_FLAG(show_internal_stack_frames)) {
+      // Add a marker to the trace and stop adding frames.
+      absl::StrAppend(&result, kElidedFramesMarker, "\n");
+      break;
+    }
+
+    char tmp[1024];
+    const char* symbol = "(unknown)";
+    if (absl::Symbolize(raw_stack[i], tmp, sizeof(tmp))) {
+      symbol = tmp;
+    }
+
+    char line[1024];
+    snprintf(line, sizeof(line), "  %p: %s\n", raw_stack[i], symbol);
+    result += line;
+  }
+
+  return result;
+
+#else  // !GTEST_HAS_ABSL
+  static_cast<void>(max_depth);
+  static_cast<void>(skip_count);
   return "";
+#endif  // GTEST_HAS_ABSL
 }
 
-void OsStackTraceGetter::UponLeavingGTest() {}
+void OsStackTraceGetter::UponLeavingGTest() GTEST_LOCK_EXCLUDED_(mutex_) {
+#if GTEST_HAS_ABSL
+  void* caller_frame = nullptr;
+  if (absl::GetStackTrace(&caller_frame, 1, 3) <= 0) {
+    caller_frame = nullptr;
+  }
+
+  MutexLock lock(&mutex_);
+  caller_frame_ = caller_frame;
+#endif  // GTEST_HAS_ABSL
+}
 
 // A helper class that creates the premature-exit file in its
 // constructor and deletes the file in its destructor.
@@ -4865,6 +4934,13 @@
     // Configures listeners for streaming test results to the specified server.
     ConfigureStreamingOutput();
 #endif  // GTEST_CAN_STREAM_RESULTS_
+
+#if GTEST_HAS_ABSL
+    if (GTEST_FLAG(install_failure_signal_handler)) {
+      absl::FailureSignalHandlerOptions options;
+      absl::InstallFailureSignalHandler(options);
+    }
+#endif  // GTEST_HAS_ABSL
   }
 }
 
@@ -5769,6 +5845,10 @@
     g_argvs.push_back(StreamableToString(argv[i]));
   }
 
+#if GTEST_HAS_ABSL
+  absl::InitializeSymbolizer(g_argvs[0].c_str());
+#endif  // GTEST_HAS_ABSL
+
   ParseGoogleTestFlagsOnly(argc, argv);
   GetUnitTestImpl()->PostFlagParsingInit();
 }
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel
index 365a855..405feee 100644
--- a/googletest/test/BUILD.bazel
+++ b/googletest/test/BUILD.bazel
@@ -34,35 +34,48 @@
 
 licenses(["notice"])
 
-""" gtest own tests """
+config_setting(
+    name = "windows",
+    values = {"cpu": "x64_windows"},
+)
+
+config_setting(
+    name = "windows_msvc",
+    values = {"cpu": "x64_windows_msvc"},
+)
+
+config_setting(
+    name = "has_absl",
+    values = {"define": "absl=1"},
+)
 
 #on windows exclude gtest-tuple.h and gtest-tuple_test.cc
 cc_test(
     name = "gtest_all_test",
     size = "small",
-    srcs =  glob(
-                    include = [
-                        "gtest-*.cc",
-                        "*.h",
-                        "googletest/include/gtest/**/*.h",
-                    ],
-                    exclude = [
-                        "gtest-unittest-api_test.cc",
-                        "gtest-tuple_test.cc",
-                        "googletest/src/gtest-all.cc",
-                        "gtest_all_test.cc",
-                        "gtest-death-test_ex_test.cc",
-                        "gtest-listener_test.cc",
-                        "gtest-unittest-api_test.cc",
-                        "gtest-param-test_test.cc",
-                    ],
-                ) + select({
+    srcs = glob(
+        include = [
+            "gtest-*.cc",
+            "*.h",
+            "googletest/include/gtest/**/*.h",
+        ],
+        exclude = [
+            "gtest-unittest-api_test.cc",
+            "gtest-tuple_test.cc",
+            "googletest/src/gtest-all.cc",
+            "gtest_all_test.cc",
+            "gtest-death-test_ex_test.cc",
+            "gtest-listener_test.cc",
+            "gtest-unittest-api_test.cc",
+            "gtest-param-test_test.cc",
+        ],
+    ) + select({
         "//:windows": [],
         "//:windows_msvc": [],
         "//conditions:default": [
             "gtest-tuple_test.cc",
         ],
-        }),
+    }),
     copts = select({
         "//:windows": ["-DGTEST_USE_OWN_TR1_TUPLE=0"],
         "//:windows_msvc": ["-DGTEST_USE_OWN_TR1_TUPLE=0"],
@@ -135,7 +148,6 @@
     name = "gtest_test_utils",
     testonly = 1,
     srcs = ["gtest_test_utils.py"],
-
 )
 
 cc_binary(
@@ -144,6 +156,7 @@
     srcs = ["gtest_help_test_.cc"],
     deps = ["//:gtest_main"],
 )
+
 py_test(
     name = "gtest_help_test",
     size = "small",
@@ -163,6 +176,10 @@
     name = "gtest_output_test",
     size = "small",
     srcs = ["gtest_output_test.py"],
+    args = select({
+        ":has_absl": [],
+        "//conditions:default": ["--no_stacktrace_support"],
+    }),
     data = [
         "gtest_output_test_golden_lin.txt",
         ":gtest_output_test_",
@@ -176,6 +193,7 @@
     srcs = ["gtest_color_test_.cc"],
     deps = ["//:gtest"],
 )
+
 py_test(
     name = "gtest_color_test",
     size = "small",
@@ -327,6 +345,10 @@
         "gtest_xml_output_unittest.py",
         "gtest_xml_test_utils.py",
     ],
+    args = select({
+        ":has_absl": [],
+        "//conditions:default": ["--no_stacktrace_support"],
+    }),
     data = [
         # We invoke gtest_no_test_unittest to verify the XML output
         # when the test program contains no test definition.
diff --git a/googletest/test/gtest_output_test.py b/googletest/test/gtest_output_test.py
index f83d3be..63763b9 100755
--- a/googletest/test/gtest_output_test.py
+++ b/googletest/test/gtest_output_test.py
@@ -52,6 +52,9 @@
 GENGOLDEN_FLAG = '--gengolden'
 CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
 
+# The flag indicating stacktraces are not supported
+NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'
+
 IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
 IS_WINDOWS = os.name == 'nt'
 
@@ -252,13 +255,12 @@
 SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
 SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
 SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
-SUPPORTS_STACK_TRACES = IS_LINUX
+SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
 
 CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
                             SUPPORTS_TYPED_TESTS and
                             SUPPORTS_THREADS and
-                            SUPPORTS_STACK_TRACES and
-                            not IS_WINDOWS)
+                            SUPPORTS_STACK_TRACES)
 
 class GTestOutputTest(gtest_test_utils.TestCase):
   def RemoveUnsupportedTests(self, test_output):
@@ -325,7 +327,11 @@
 
 
 if __name__ == '__main__':
-  if sys.argv[1:] == [GENGOLDEN_FLAG]:
+  if NO_STACKTRACE_SUPPORT_FLAG in sys.argv:
+    # unittest.main() can't handle unknown flags
+    sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)
+
+  if GENGOLDEN_FLAG in sys.argv:
     if CAN_GENERATE_GOLDEN_FILE:
       output = GetOutputOfAllCommands()
       golden_file = open(GOLDEN_PATH, 'wb')
diff --git a/googletest/test/gtest_output_test_golden_lin.txt b/googletest/test/gtest_output_test_golden_lin.txt
index cbcb720..02a77a8 100644
--- a/googletest/test/gtest_output_test_golden_lin.txt
+++ b/googletest/test/gtest_output_test_golden_lin.txt
@@ -4,10 +4,14 @@
 Value of: false
   Actual: false
 Expected: true
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Expected equality of these values:
   2
   3
+Stack trace: (omitted)
+
 [==========] Running 68 tests from 30 test cases.
 [----------] Global test environment set-up.
 FooEnvironment::SetUp() called.
@@ -40,12 +44,16 @@
     Which is: "\"Line"
   actual
     Which is: "actual \"string\""
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Expected equality of these values:
   golden
     Which is: "\"Line"
   actual
     Which is: "actual \"string\""
+Stack trace: (omitted)
+
 [  FAILED  ] NonfatalFailureTest.EscapesStringOperands
 [ RUN      ] NonfatalFailureTest.DiffForLongStrings
 gtest_output_test_.cc:#: Failure
@@ -58,6 +66,8 @@
 -\"Line\0 1\"
  Line 2
 
+Stack trace: (omitted)
+
 [  FAILED  ] NonfatalFailureTest.DiffForLongStrings
 [----------] 3 tests from FatalFailureTest
 [ RUN      ] FatalFailureTest.FatalFailureInSubroutine
@@ -67,6 +77,8 @@
   1
   x
     Which is: 2
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine
 [ RUN      ] FatalFailureTest.FatalFailureInNestedSubroutine
 (expecting a failure that x should be 1)
@@ -75,6 +87,8 @@
   1
   x
     Which is: 2
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine
 [ RUN      ] FatalFailureTest.NonfatalFailureInSubroutine
 (expecting a failure on false)
@@ -82,6 +96,8 @@
 Value of: false
   Actual: false
 Expected: true
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine
 [----------] 1 test from LoggingTest
 [ RUN      ] LoggingTest.InterleavingLoggingAndAssertions
@@ -90,10 +106,14 @@
 i == 1
 gtest_output_test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 9
+Stack trace: (omitted)
+
 i == 2
 i == 3
 gtest_output_test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 6
+Stack trace: (omitted)
+
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions
 [----------] 7 tests from SCOPED_TRACETest
 [ RUN      ] SCOPED_TRACETest.AcceptedValues
@@ -105,20 +125,28 @@
 gtest_output_test_.cc:#: 1337
 gtest_output_test_.cc:#: std::string
 gtest_output_test_.cc:#: literal string
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.AcceptedValues
 [ RUN      ] SCOPED_TRACETest.ObeysScopes
 (expected to fail)
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and shouldn't have a trace.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and should have a trace.
 Google Test trace:
 gtest_output_test_.cc:#: Expected trace
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and shouldn't have a trace.
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.ObeysScopes
 [ RUN      ] SCOPED_TRACETest.WorksInLoop
 (expected to fail)
@@ -129,6 +157,8 @@
     Which is: 1
 Google Test trace:
 gtest_output_test_.cc:#: i = 1
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Expected equality of these values:
   1
@@ -136,6 +166,8 @@
     Which is: 2
 Google Test trace:
 gtest_output_test_.cc:#: i = 2
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.WorksInLoop
 [ RUN      ] SCOPED_TRACETest.WorksInSubroutine
 (expected to fail)
@@ -146,6 +178,8 @@
     Which is: 1
 Google Test trace:
 gtest_output_test_.cc:#: n = 1
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Expected equality of these values:
   1
@@ -153,6 +187,8 @@
     Which is: 2
 Google Test trace:
 gtest_output_test_.cc:#: n = 2
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.WorksInSubroutine
 [ RUN      ] SCOPED_TRACETest.CanBeNested
 (expected to fail)
@@ -164,6 +200,8 @@
 Google Test trace:
 gtest_output_test_.cc:#: n = 2
 gtest_output_test_.cc:#: 
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.CanBeNested
 [ RUN      ] SCOPED_TRACETest.CanBeRepeated
 (expected to fail)
@@ -172,12 +210,16 @@
 This failure is expected, and should contain trace point A.
 Google Test trace:
 gtest_output_test_.cc:#: A
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A and B.
 Google Test trace:
 gtest_output_test_.cc:#: B
 gtest_output_test_.cc:#: A
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A, B, and C.
@@ -185,6 +227,8 @@
 gtest_output_test_.cc:#: C
 gtest_output_test_.cc:#: B
 gtest_output_test_.cc:#: A
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A, B, and D.
@@ -192,6 +236,8 @@
 gtest_output_test_.cc:#: D
 gtest_output_test_.cc:#: B
 gtest_output_test_.cc:#: A
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.CanBeRepeated
 [ RUN      ] SCOPED_TRACETest.WorksConcurrently
 (expecting 6 failures)
@@ -200,27 +246,39 @@
 Expected failure #1 (in thread B, only trace B alive).
 Google Test trace:
 gtest_output_test_.cc:#: Trace B
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #2 (in thread A, trace A & B both alive).
 Google Test trace:
 gtest_output_test_.cc:#: Trace A
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #3 (in thread B, trace A & B both alive).
 Google Test trace:
 gtest_output_test_.cc:#: Trace B
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #4 (in thread B, only trace A alive).
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #5 (in thread A, only trace A alive).
 Google Test trace:
 gtest_output_test_.cc:#: Trace A
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #6 (in thread A, no trace alive).
+Stack trace: (omitted)
+
 [  FAILED  ] SCOPED_TRACETest.WorksConcurrently
 [----------] 1 test from ScopedTraceTest
 [ RUN      ] ScopedTraceTest.WithExplicitFileAndLine
@@ -229,6 +287,8 @@
 Check that the trace is attached to a particular location.
 Google Test trace:
 explicit_file.cc:123: expected trace message
+Stack trace: (omitted)
+
 [  FAILED  ] ScopedTraceTest.WithExplicitFileAndLine
 [----------] 1 test from NonFatalFailureInFixtureConstructorTest
 [ RUN      ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
@@ -236,18 +296,28 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #1, in the test fixture c'tor.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #2, in SetUp().
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #3, in the test body.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #4, in TearDown.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #5, in the test fixture d'tor.
+Stack trace: (omitted)
+
 [  FAILED  ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
 [----------] 1 test from FatalFailureInFixtureConstructorTest
 [ RUN      ] FatalFailureInFixtureConstructorTest.FailureInConstructor
@@ -255,9 +325,13 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #1, in the test fixture c'tor.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #2, in the test fixture d'tor.
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureInFixtureConstructorTest.FailureInConstructor
 [----------] 1 test from NonFatalFailureInSetUpTest
 [ RUN      ] NonFatalFailureInSetUpTest.FailureInSetUp
@@ -265,15 +339,23 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #1, in SetUp().
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #2, in the test function.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #3, in TearDown().
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #4, in the test fixture d'tor.
+Stack trace: (omitted)
+
 [  FAILED  ] NonFatalFailureInSetUpTest.FailureInSetUp
 [----------] 1 test from FatalFailureInSetUpTest
 [ RUN      ] FatalFailureInSetUpTest.FailureInSetUp
@@ -281,18 +363,26 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #1, in SetUp().
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #2, in TearDown().
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected failure #3, in the test fixture d'tor.
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureInSetUpTest.FailureInSetUp
 [----------] 1 test from AddFailureAtTest
 [ RUN      ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
 foo.cc:42: Failure
 Failed
 Expected failure in foo.cc
+Stack trace: (omitted)
+
 [  FAILED  ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
 [----------] 4 tests from MixedUpTestCaseTest
 [ RUN      ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
@@ -309,6 +399,8 @@
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
+Stack trace: (omitted)
+
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFail
 [ RUN      ] MixedUpTestCaseTest.ThisShouldFailToo
 gtest.cc:#: Failure
@@ -320,6 +412,8 @@
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
+Stack trace: (omitted)
+
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFailToo
 [----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
 [ RUN      ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
@@ -334,6 +428,8 @@
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
+Stack trace: (omitted)
+
 [  FAILED  ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 [----------] 2 tests from TEST_F_before_TEST_in_same_test_case
 [ RUN      ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
@@ -348,6 +444,8 @@
 test DefinedUsingTESTAndShouldFail is defined using TEST.  You probably
 want to change the TEST to TEST_F or move it to another test
 case.
+Stack trace: (omitted)
+
 [  FAILED  ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
 [----------] 2 tests from TEST_before_TEST_F_in_same_test_case
 [ RUN      ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
@@ -362,6 +460,8 @@
 test DefinedUsingTEST is defined using TEST.  You probably
 want to change the TEST to TEST_F or move it to another test
 case.
+Stack trace: (omitted)
+
 [  FAILED  ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
 [----------] 8 tests from ExpectNonfatalFailureTest
 [ RUN      ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
@@ -375,6 +475,8 @@
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
 (expecting a failure)
@@ -384,10 +486,16 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure 1.
+Stack trace: (omitted)
+
 
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure 2.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
@@ -398,6 +506,10 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
@@ -405,12 +517,16 @@
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
 [----------] 8 tests from ExpectFatalFailureTest
 [ RUN      ] ExpectFatalFailureTest.CanReferenceGlobalVariables
@@ -424,6 +540,8 @@
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
 [ RUN      ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
 (expecting a failure)
@@ -433,10 +551,16 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
 
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
 [ RUN      ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
@@ -447,6 +571,10 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
 [ RUN      ] ExpectFatalFailureTest.FailsWhenStatementReturns
@@ -454,12 +582,16 @@
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementReturns
 [ RUN      ] ExpectFatalFailureTest.FailsWhenStatementThrows
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementThrows
 [----------] 2 tests from TypedTest/0, where TypeParam = int
 [ RUN      ] TypedTest/0.Success
@@ -471,6 +603,8 @@
   TypeParam()
     Which is: 0
 Expected failure
+Stack trace: (omitted)
+
 [  FAILED  ] TypedTest/0.Failure, where TypeParam = int
 [----------] 2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char
 [ RUN      ] Unsigned/TypedTestP/0.Success
@@ -483,8 +617,10 @@
   TypeParam()
     Which is: '\0'
 Expected failure
+Stack trace: (omitted)
+
 [  FAILED  ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
-[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned
+[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
 [ RUN      ] Unsigned/TypedTestP/1.Success
 [       OK ] Unsigned/TypedTestP/1.Success
 [ RUN      ] Unsigned/TypedTestP/1.Failure
@@ -495,7 +631,9 @@
   TypeParam()
     Which is: 0
 Expected failure
-[  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned
+Stack trace: (omitted)
+
+[  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
 [----------] 4 tests from ExpectFailureTest
 [ RUN      ] ExpectFailureTest.ExpectFatalFailure
 (expecting 1 failure)
@@ -504,6 +642,10 @@
   Actual:
 gtest_output_test_.cc:#: Success:
 Succeeded
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -512,6 +654,10 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -520,6 +666,10 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailure
 [ RUN      ] ExpectFailureTest.ExpectNonFatalFailure
@@ -529,6 +679,10 @@
   Actual:
 gtest_output_test_.cc:#: Success:
 Succeeded
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -537,6 +691,10 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -545,6 +703,10 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailure
 [ RUN      ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
@@ -554,6 +716,10 @@
   Actual:
 gtest_output_test_.cc:#: Success:
 Succeeded
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -562,6 +728,10 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -570,6 +740,10 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
 [ RUN      ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
@@ -579,6 +753,10 @@
   Actual:
 gtest_output_test_.cc:#: Success:
 Succeeded
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -587,6 +765,10 @@
 gtest_output_test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
@@ -595,6 +777,10 @@
 gtest_output_test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
+
+Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
 [----------] 2 tests from ExpectFailureWithThreadsTest
@@ -603,18 +789,26 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectFatalFailure
 [ RUN      ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
 (expecting 2 failures)
 gtest_output_test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
+Stack trace: (omitted)
+
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
 [----------] 1 test from ScopedFakeTestPartResultReporterTest
 [ RUN      ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
@@ -622,9 +816,13 @@
 gtest_output_test_.cc:#: Failure
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
 gtest_output_test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
 [  FAILED  ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
 [----------] 1 test from PrintingFailingParams/FailingParamTest
 [ RUN      ] PrintingFailingParams/FailingParamTest.Fails/0
@@ -633,6 +831,8 @@
   1
   GetParam()
     Which is: 2
+Stack trace: (omitted)
+
 [  FAILED  ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
 [----------] 2 tests from PrintingStrings/ParamTest
 [ RUN      ] PrintingStrings/ParamTest.Success/a
@@ -644,16 +844,22 @@
   GetParam()
     Which is: "a"
 Expected failure
+Stack trace: (omitted)
+
 [  FAILED  ] PrintingStrings/ParamTest.Failure/a, where GetParam() = "a"
 [----------] Global test environment tear-down
 BarEnvironment::TearDown() called.
 gtest_output_test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
+Stack trace: (omitted)
+
 FooEnvironment::TearDown() called.
 gtest_output_test_.cc:#: Failure
 Failed
 Expected fatal failure.
+Stack trace: (omitted)
+
 [==========] 68 tests from 30 test cases ran.
 [  PASSED  ] 22 tests.
 [  FAILED  ] 46 tests, listed below:
@@ -693,7 +899,7 @@
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementThrows
 [  FAILED  ] TypedTest/0.Failure, where TypeParam = int
 [  FAILED  ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
-[  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned
+[  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailure
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailure
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
@@ -718,6 +924,8 @@
   1
   x
     Which is: 2
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine (? ms)
 [ RUN      ] FatalFailureTest.FatalFailureInNestedSubroutine
 (expecting a failure that x should be 1)
@@ -726,6 +934,8 @@
   1
   x
     Which is: 2
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
 [ RUN      ] FatalFailureTest.NonfatalFailureInSubroutine
 (expecting a failure on false)
@@ -733,6 +943,8 @@
 Value of: false
   Actual: false
 Expected: true
+Stack trace: (omitted)
+
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
 [----------] 3 tests from FatalFailureTest (? ms total)
 
@@ -743,10 +955,14 @@
 i == 1
 gtest_output_test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 9
+Stack trace: (omitted)
+
 i == 2
 i == 3
 gtest_output_test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 6
+Stack trace: (omitted)
+
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
 [----------] 1 test from LoggingTest (? ms total)
 
diff --git a/googletest/test/gtest_xml_output_unittest.py b/googletest/test/gtest_xml_output_unittest.py
index 6ffb6e3..faedd4e 100755
--- a/googletest/test/gtest_xml_output_unittest.py
+++ b/googletest/test/gtest_xml_output_unittest.py
@@ -47,17 +47,22 @@
 GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'
 GTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_'
 
+# The flag indicating stacktraces are not supported
+NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'
+
 # The environment variables for test sharding.
 TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
 SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
 SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
 
-SUPPORTS_STACK_TRACES = False
+SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
 
 if SUPPORTS_STACK_TRACES:
   STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
 else:
   STACK_TRACE_TEMPLATE = ''
+  # unittest.main() can't handle unknown flags
+  sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)
 
 EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
 <testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">