diff --git a/BUILD.bazel b/BUILD.bazel index 41a09857..4e5bb0f7 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -144,9 +144,9 @@ cc_library( cc_test( name = "gtest_samples", size = "small", - #All Samples except: - #sample9 ( main ) - #sample10 (main and takes a command line option and needs to be separate) + # All Samples except: + # sample9 (main) + # sample10 (main and takes a command line option and needs to be separate) srcs = [ "googletest/samples/sample1_unittest.cc", "googletest/samples/sample2_unittest.cc", diff --git a/appveyor.yml b/appveyor.yml index 55744664..f734a0c4 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -23,6 +23,7 @@ environment: - compiler: gcc-6.3.0-posix generator: "MinGW Makefiles" cxx_path: 'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin' + enabled_on_pr: yes configuration: - Debug diff --git a/googlemock/CMakeLists.txt b/googlemock/CMakeLists.txt index 2a913080..78762b0c 100644 --- a/googlemock/CMakeLists.txt +++ b/googlemock/CMakeLists.txt @@ -145,6 +145,20 @@ if (gmock_build_tests) # 'make test' or ctest. enable_testing() + if (WIN32) + file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/$/RunTest.ps1" + CONTENT +"$project_bin = \"${CMAKE_BINARY_DIR}/bin/$\" +$env:Path = \"$project_bin;$env:Path\" +& $args") + elseif (MINGW) + file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1" + CONTENT +"$project_bin = (cygpath --windows ${CMAKE_BINARY_DIR}/bin) +$env:Path = \"$project_bin;$env:Path\" +& $args") + endif() + ############################################################ # C++ tests built with standard compiler flags. diff --git a/googlemock/include/gmock/gmock-generated-function-mockers.h b/googlemock/include/gmock/gmock-generated-function-mockers.h index 5792d3d5..aab45820 100644 --- a/googlemock/include/gmock/gmock-generated-function-mockers.h +++ b/googlemock/include/gmock/gmock-generated-function-mockers.h @@ -1175,9 +1175,7 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { - return [this](A0 a0) -> R { - return this->Call(::std::move(a0)); - }; + return [this](A0 a0) -> R { return this->Call(::std::forward(a0)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1195,7 +1193,7 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1) -> R { - return this->Call(::std::move(a0), ::std::move(a1)); + return this->Call(::std::forward(a0), ::std::forward(a1)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1214,7 +1212,8 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1233,8 +1232,8 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1254,8 +1253,9 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1275,8 +1275,9 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4), ::std::move(a5)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4), ::std::forward(a5)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1296,8 +1297,10 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4), ::std::forward(a5), + ::std::forward(a6)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1317,9 +1320,10 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6), - ::std::move(a7)); + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4), ::std::forward(a5), + ::std::forward(a6), ::std::forward(a7)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1339,10 +1343,12 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, - A8 a8) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6), - ::std::move(a7), ::std::move(a8)); + A8 a8) -> R { + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4), ::std::forward(a5), + ::std::forward(a6), ::std::forward(a7), + ::std::forward(a8)); }; } #endif // GTEST_HAS_STD_FUNCTION_ @@ -1362,11 +1368,13 @@ class MockFunction { #if GTEST_HAS_STD_FUNCTION_ ::std::function AsStdFunction() { - return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, - A8 a8, A9 a9) -> R { - return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2), - ::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6), - ::std::move(a7), ::std::move(a8), ::std::move(a9)); + return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8, + A9 a9) -> R { + return this->Call(::std::forward(a0), ::std::forward(a1), + ::std::forward(a2), ::std::forward(a3), + ::std::forward(a4), ::std::forward(a5), + ::std::forward(a6), ::std::forward(a7), + ::std::forward(a8), ::std::forward(a9)); }; } #endif // GTEST_HAS_STD_FUNCTION_ diff --git a/googlemock/include/gmock/gmock-generated-function-mockers.h.pump b/googlemock/include/gmock/gmock-generated-function-mockers.h.pump index 82f9512f..106abe84 100644 --- a/googlemock/include/gmock/gmock-generated-function-mockers.h.pump +++ b/googlemock/include/gmock/gmock-generated-function-mockers.h.pump @@ -320,7 +320,7 @@ class MockFunction; $for i [[ $range j 0..i-1 $var ArgTypes = [[$for j, [[A$j]]]] -$var ArgValues = [[$for j, [[::std::move(a$j)]]]] +$var ArgValues = [[$for j, [[::std::forward(a$j)]]]] $var ArgDecls = [[$for j, [[A$j a$j]]]] template class MockFunction { diff --git a/googlemock/test/gmock-generated-function-mockers_test.cc b/googlemock/test/gmock-generated-function-mockers_test.cc index f16833b2..4c490694 100644 --- a/googlemock/test/gmock-generated-function-mockers_test.cc +++ b/googlemock/test/gmock-generated-function-mockers_test.cc @@ -617,6 +617,17 @@ TEST(MockFunctionTest, AsStdFunctionReturnsReference) { value = 2; EXPECT_EQ(2, ref); } + +TEST(MockFunctionTest, AsStdFunctionWithReferenceParameter) { + MockFunction foo; + auto call = [](const std::function &f, int &i) { + return f(i); + }; + int i = 42; + EXPECT_CALL(foo, Call(i)).WillOnce(Return(-1)); + EXPECT_EQ(-1, call(foo.AsStdFunction(), i)); +} + #endif // GTEST_HAS_STD_FUNCTION_ struct MockMethodSizes0 { diff --git a/googletest/CMakeLists.txt b/googletest/CMakeLists.txt index 9ee79408..925f9c20 100644 --- a/googletest/CMakeLists.txt +++ b/googletest/CMakeLists.txt @@ -187,6 +187,20 @@ if (gtest_build_tests) # 'make test' or ctest. enable_testing() + if (WIN32) + file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/$/RunTest.ps1" + CONTENT +"$project_bin = \"${CMAKE_BINARY_DIR}/bin/$\" +$env:Path = \"$project_bin;$env:Path\" +& $args") + elseif (MINGW) + file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1" + CONTENT +"$project_bin = (cygpath --windows ${CMAKE_BINARY_DIR}/bin) +$env:Path = \"$project_bin;$env:Path\" +& $args") + endif() + ############################################################ # C++ tests built with standard compiler flags. @@ -217,6 +231,7 @@ if (gtest_build_tests) test/gtest-typed-test2_test.cc) cxx_test(gtest_unittest gtest_main) cxx_test(gtest-unittest-api_test gtest) + cxx_test(gtest_skip_test gtest_main) ############################################################ # C++ tests built with non-standard compiler flags. diff --git a/googletest/cmake/internal_utils.cmake b/googletest/cmake/internal_utils.cmake index 8c1f9ba9..d1883b2c 100644 --- a/googletest/cmake/internal_utils.cmake +++ b/googletest/cmake/internal_utils.cmake @@ -167,6 +167,22 @@ function(cxx_library_with_type name type cxx_flags) set_target_properties(${name} PROPERTIES DEBUG_POSTFIX "d") + # Set the output directory for build artifacts + set_target_properties(${name} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" + LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" + ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" + PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin") + # make PDBs match library name + get_target_property(pdb_debug_postfix ${name} DEBUG_POSTFIX) + set_target_properties(${name} + PROPERTIES + PDB_NAME "${name}" + PDB_NAME_DEBUG "${name}${pdb_debug_postfix}" + COMPILE_PDB_NAME "${name}" + COMPILE_PDB_NAME_DEBUG "${name}${pdb_debug_postfix}") + if (BUILD_SHARED_LIBS OR type STREQUAL "SHARED") set_target_properties(${name} PROPERTIES @@ -244,7 +260,13 @@ find_package(PythonInterp) # from the given source files with the given compiler flags. function(cxx_test_with_flags name cxx_flags libs) cxx_executable_with_flags(${name} "${cxx_flags}" "${libs}" ${ARGN}) - add_test(NAME ${name} COMMAND ${name}) + if (WIN32 OR MINGW) + add_test(NAME ${name} + COMMAND "powershell" "-Command" "${CMAKE_CURRENT_BINARY_DIR}/$/RunTest.ps1" "$") + else() + add_test(NAME ${name} + COMMAND "$") + endif() endfunction() # cxx_test(name libs srcs...) @@ -263,33 +285,51 @@ endfunction() # test/name.py. It does nothing if Python is not installed. function(py_test name) if (PYTHONINTERP_FOUND) - if (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1) + if ("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" VERSION_GREATER 3.1) if (CMAKE_CONFIGURATION_TYPES) - # Multi-configuration build generators as for Visual Studio save - # output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug, - # Release etc.), so we have to provide it here. - add_test( - NAME ${name} - COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + # Multi-configuration build generators as for Visual Studio save + # output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug, + # Release etc.), so we have to provide it here. + if (WIN32 OR MINGW) + add_test(NAME ${name} + COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/$/RunTest.ps1 + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$ ${ARGN}) + else() + add_test(NAME ${name} + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$ ${ARGN}) + endif() else (CMAKE_CONFIGURATION_TYPES) - # Single-configuration build generators like Makefile generators - # don't have subdirs below CMAKE_CURRENT_BINARY_DIR. - add_test( - NAME ${name} - COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + # Single-configuration build generators like Makefile generators + # don't have subdirs below CMAKE_CURRENT_BINARY_DIR. + if (WIN32 OR MINGW) + add_test(NAME ${name} + COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1 + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN}) + else() + add_test(NAME ${name} + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN}) + endif() endif (CMAKE_CONFIGURATION_TYPES) - else (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1) + else() # ${CMAKE_CURRENT_BINARY_DIR} is known at configuration time, so we can # directly bind it from cmake. ${CTEST_CONFIGURATION_TYPE} is known # only at ctest runtime (by calling ctest -c ), so # we have to escape $ to delay variable substitution here. - add_test( - ${name} - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py - --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN}) - endif (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1) + if (WIN32 OR MINGW) + add_test(NAME ${name} + COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1 + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN}) + else() + add_test(NAME ${name} + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py + --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN}) + endif() + endif() endif(PYTHONINTERP_FOUND) endfunction() @@ -306,6 +346,18 @@ function(install_project) RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}") + if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC") + # Install PDBs + foreach(t ${ARGN}) + get_target_property(t_pdb_name ${t} COMPILE_PDB_NAME) + get_target_property(t_pdb_name_debug ${t} COMPILE_PDB_NAME_DEBUG) + get_target_property(t_pdb_output_directory ${t} PDB_OUTPUT_DIRECTORY) + install(FILES + "${t_pdb_output_directory}/\${CMAKE_INSTALL_CONFIG_NAME}/$,${t_pdb_name_debug},${t_pdb_name}>.pdb" + DESTINATION ${CMAKE_INSTALL_LIBDIR} + OPTIONAL) + endforeach() + endif() # Configure and install pkgconfig files. foreach(t ${ARGN}) set(configured_pc "${generated_dir}/${t}.pc") diff --git a/googletest/include/gtest/gtest-test-part.h b/googletest/include/gtest/gtest-test-part.h index 1c7b89e0..7b30aff6 100644 --- a/googletest/include/gtest/gtest-test-part.h +++ b/googletest/include/gtest/gtest-test-part.h @@ -53,7 +53,8 @@ class GTEST_API_ TestPartResult { enum Type { kSuccess, // Succeeded. kNonFatalFailure, // Failed but the test can continue. - kFatalFailure // Failed and the test should be terminated. + kFatalFailure, // Failed and the test should be terminated. + kSkip // Skipped. }; // C'tor. TestPartResult does NOT have a default constructor. @@ -89,18 +90,21 @@ class GTEST_API_ TestPartResult { // Gets the message associated with the test part. const char* message() const { return message_.c_str(); } + // Returns true iff the test part was skipped. + bool skipped() const { return type_ == kSkip; } + // Returns true iff the test part passed. bool passed() const { return type_ == kSuccess; } - // Returns true iff the test part failed. - bool failed() const { return type_ != kSuccess; } - // Returns true iff the test part non-fatally failed. bool nonfatally_failed() const { return type_ == kNonFatalFailure; } // Returns true iff the test part fatally failed. bool fatally_failed() const { return type_ == kFatalFailure; } + // Returns true iff the test part failed. + bool failed() const { return fatally_failed() || nonfatally_failed(); } + private: Type type_; diff --git a/googletest/include/gtest/gtest.h b/googletest/include/gtest/gtest.h index 5df4b0a3..ba99737c 100644 --- a/googletest/include/gtest/gtest.h +++ b/googletest/include/gtest/gtest.h @@ -440,6 +440,9 @@ class GTEST_API_ Test { // Returns true iff the current test has a non-fatal failure. static bool HasNonfatalFailure(); + // Returns true iff the current test was skipped. + static bool IsSkipped(); + // Returns true iff the current test has a (either fatal or // non-fatal) failure. static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); } @@ -574,7 +577,10 @@ class GTEST_API_ TestResult { int test_property_count() const; // Returns true iff the test passed (i.e. no test part failed). - bool Passed() const { return !Failed(); } + bool Passed() const { return !Skipped() && !Failed(); } + + // Returns true iff the test was skipped. + bool Skipped() const; // Returns true iff the test failed. bool Failed() const; @@ -854,6 +860,9 @@ class GTEST_API_ TestCase { // Gets the number of successful tests in this test case. int successful_test_count() const; + // Gets the number of skipped tests in this test case. + int skipped_test_count() const; + // Gets the number of failed tests in this test case. int failed_test_count() const; @@ -936,6 +945,11 @@ class GTEST_API_ TestCase { return test_info->should_run() && test_info->result()->Passed(); } + // Returns true iff test skipped. + static bool TestSkipped(const TestInfo* test_info) { + return test_info->should_run() && test_info->result()->Skipped(); + } + // Returns true iff test failed. static bool TestFailed(const TestInfo* test_info) { return test_info->should_run() && test_info->result()->Failed(); @@ -1258,6 +1272,9 @@ class GTEST_API_ UnitTest { // Gets the number of successful tests. int successful_test_count() const; + // Gets the number of skipped tests. + int skipped_test_count() const; + // Gets the number of failed tests. int failed_test_count() const; @@ -1798,11 +1815,8 @@ class WithParamInterface { virtual ~WithParamInterface() {} // The current parameter value. Is also available in the test fixture's - // constructor. This member function is non-static, even though it only - // references static data, to reduce the opportunity for incorrect uses - // like writing 'WithParamInterface::GetParam()' for a test that - // uses a fixture whose parameter type is int. - const ParamType& GetParam() const { + // constructor. + static const ParamType& GetParam() { GTEST_CHECK_(parameter_ != NULL) << "GetParam() can only be called inside a value-parameterized test " << "-- did you intend to write TEST_P instead of TEST_F?"; @@ -1835,6 +1849,11 @@ class TestWithParam : public Test, public WithParamInterface { // Macros for indicating success/failure in test code. +// Skips test in runtime. +// Skipping test aborts current function. +// Skipped tests are neither successful nor failed. +#define GTEST_SKIP() GTEST_SKIP_("Skipped") + // ADD_FAILURE unconditionally adds a failure to the current test. // SUCCEED generates a success - it doesn't automatically make the // current test successful, as a test is only successful when it has diff --git a/googletest/include/gtest/internal/gtest-internal.h b/googletest/include/gtest/internal/gtest-internal.h index b762f61f..380a11c4 100644 --- a/googletest/include/gtest/internal/gtest-internal.h +++ b/googletest/include/gtest/internal/gtest-internal.h @@ -1208,7 +1208,10 @@ class NativeArray { #define GTEST_SUCCESS_(message) \ GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess) -// Suppress MSVC warning 4702 (unreachable code) for the code following +#define GTEST_SKIP_(message) \ + return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSkip) + +// Suppress MSVC warning 4072 (unreachable code) for the code following // statement if it returns or throws (or doesn't return or throw in some // situations). #define GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) \ diff --git a/googletest/src/gtest-internal-inl.h b/googletest/src/gtest-internal-inl.h index 47900414..f79b1ad0 100644 --- a/googletest/src/gtest-internal-inl.h +++ b/googletest/src/gtest-internal-inl.h @@ -544,6 +544,9 @@ class GTEST_API_ UnitTestImpl { // Gets the number of successful tests. int successful_test_count() const; + // Gets the number of skipped tests. + int skipped_test_count() const; + // Gets the number of failed tests. int failed_test_count() const; diff --git a/googletest/src/gtest-test-part.cc b/googletest/src/gtest-test-part.cc index c88860d9..2855e3e9 100644 --- a/googletest/src/gtest-test-part.cc +++ b/googletest/src/gtest-test-part.cc @@ -47,12 +47,16 @@ std::string TestPartResult::ExtractSummary(const char* message) { // Prints a TestPartResult object. std::ostream& operator<<(std::ostream& os, const TestPartResult& result) { - return os - << result.file_name() << ":" << result.line_number() << ": " - << (result.type() == TestPartResult::kSuccess ? "Success" : - result.type() == TestPartResult::kFatalFailure ? "Fatal failure" : - "Non-fatal failure") << ":\n" - << result.message() << std::endl; + return os << result.file_name() << ":" << result.line_number() << ": " + << (result.type() == TestPartResult::kSuccess + ? "Success" + : result.type() == TestPartResult::kSkip + ? "Skipped" + : result.type() == TestPartResult::kFatalFailure + ? "Fatal failure" + : "Non-fatal failure") + << ":\n" + << result.message() << std::endl; } // Appends a TestPartResult to the array. diff --git a/googletest/src/gtest.cc b/googletest/src/gtest.cc index 96b07c68..996be23d 100644 --- a/googletest/src/gtest.cc +++ b/googletest/src/gtest.cc @@ -796,6 +796,11 @@ int UnitTestImpl::successful_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count); } +// Gets the number of skipped tests. +int UnitTestImpl::skipped_test_count() const { + return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count); +} + // Gets the number of failed tests. int UnitTestImpl::failed_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count); @@ -2207,6 +2212,16 @@ void TestResult::Clear() { elapsed_time_ = 0; } +// Returns true off the test part was skipped. +static bool TestPartSkipped(const TestPartResult& result) { + return result.skipped(); +} + +// Returns true iff the test was skipped. +bool TestResult::Skipped() const { + return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0; +} + // Returns true iff the test failed. bool TestResult::Failed() const { for (int i = 0; i < total_part_count(); ++i) { @@ -2537,6 +2552,11 @@ bool Test::HasNonfatalFailure() { HasNonfatalFailure(); } +// Returns true iff the current test was skipped. +bool Test::IsSkipped() { + return internal::GetUnitTestImpl()->current_test_result()->Skipped(); +} + // class TestInfo // Constructs a TestInfo object. It assumes ownership of the test factory @@ -2715,6 +2735,11 @@ int TestCase::successful_test_count() const { return CountIf(test_info_list_, TestPassed); } +// Gets the number of successful tests in this test case. +int TestCase::skipped_test_count() const { + return CountIf(test_info_list_, TestSkipped); +} + // Gets the number of failed tests in this test case. int TestCase::failed_test_count() const { return CountIf(test_info_list_, TestFailed); @@ -2866,6 +2891,8 @@ static std::string FormatTestCaseCount(int test_case_count) { // between the two when viewing the test result. static const char * TestPartResultTypeToString(TestPartResult::Type type) { switch (type) { + case TestPartResult::kSkip: + return "Skipped"; case TestPartResult::kSuccess: return "Success"; @@ -3119,6 +3146,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener { private: static void PrintFailedTests(const UnitTest& unit_test); + static void PrintSkippedTests(const UnitTest& unit_test); }; // Fired before each iteration of tests starts. @@ -3187,18 +3215,25 @@ void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) { // Called after an assertion failure. void PrettyUnitTestResultPrinter::OnTestPartResult( const TestPartResult& result) { - // If the test part succeeded, we don't need to do anything. - if (result.type() == TestPartResult::kSuccess) - return; - - // Print failure message from the assertion (e.g. expected this and got that). - PrintTestPartResult(result); - fflush(stdout); + switch (result.type()) { + // If the test part succeeded, or was skipped, + // we don't need to do anything. + case TestPartResult::kSkip: + case TestPartResult::kSuccess: + return; + default: + // Print failure message from the assertion + // (e.g. expected this and got that). + PrintTestPartResult(result); + fflush(stdout); + } } void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { if (test_info.result()->Passed()) { ColoredPrintf(COLOR_GREEN, "[ OK ] "); + } else if (test_info.result()->Skipped()) { + ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); } else { ColoredPrintf(COLOR_RED, "[ FAILED ] "); } @@ -3248,7 +3283,7 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) { } for (int j = 0; j < test_case.total_test_count(); ++j) { const TestInfo& test_info = *test_case.GetTestInfo(j); - if (!test_info.should_run() || test_info.result()->Passed()) { + if (!test_info.should_run() || !test_info.result()->Failed()) { continue; } ColoredPrintf(COLOR_RED, "[ FAILED ] "); @@ -3259,6 +3294,30 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) { } } +// Internal helper for printing the list of skipped tests. +void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) { + const int skipped_test_count = unit_test.skipped_test_count(); + if (skipped_test_count == 0) { + return; + } + + for (int i = 0; i < unit_test.total_test_case_count(); ++i) { + const TestCase& test_case = *unit_test.GetTestCase(i); + if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) { + continue; + } + for (int j = 0; j < test_case.total_test_count(); ++j) { + const TestInfo& test_info = *test_case.GetTestInfo(j); + if (!test_info.should_run() || !test_info.result()->Skipped()) { + continue; + } + ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); + printf("%s.%s", test_case.name(), test_info.name()); + printf("\n"); + } + } +} + void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, int /*iteration*/) { ColoredPrintf(COLOR_GREEN, "[==========] "); @@ -3273,6 +3332,13 @@ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, ColoredPrintf(COLOR_GREEN, "[ PASSED ] "); printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str()); + const int skipped_test_count = unit_test.skipped_test_count(); + if (skipped_test_count > 0) { + ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); + printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str()); + PrintSkippedTests(unit_test); + } + int num_failures = unit_test.failed_test_count(); if (!unit_test.Passed()) { const int failed_test_count = unit_test.failed_test_count(); @@ -4540,6 +4606,11 @@ int UnitTest::successful_test_count() const { return impl()->successful_test_count(); } +// Gets the number of skipped tests. +int UnitTest::skipped_test_count() const { + return impl()->skipped_test_count(); +} + // Gets the number of failed tests. int UnitTest::failed_test_count() const { return impl()->failed_test_count(); } @@ -4660,7 +4731,8 @@ void UnitTest::AddTestPartResult( impl_->GetTestPartResultReporterForCurrentThread()-> ReportTestPartResult(result); - if (result_type != TestPartResult::kSuccess) { + if (result_type != TestPartResult::kSuccess && + result_type != TestPartResult::kSkip) { // gtest_break_on_failure takes precedence over // gtest_throw_on_failure. This allows a user to set the latter // in the code (perhaps in order to use Google Test assertions diff --git a/googletest/test/googletest-test-part-test.cc b/googletest/test/googletest-test-part-test.cc index cd2d6f9e..8c9459e1 100644 --- a/googletest/test/googletest-test-part-test.cc +++ b/googletest/test/googletest-test-part-test.cc @@ -46,9 +46,10 @@ class TestPartResultTest : public Test { TestPartResultTest() : r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"), r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"), - r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!") {} + r3_(TestPartResult::kFatalFailure, nullptr, -1, "Failure!"), + r4_(TestPartResult::kSkip, "foo/bar.cc", 2, "Skipped!") {} - TestPartResult r1_, r2_, r3_; + TestPartResult r1_, r2_, r3_, r4_; }; @@ -79,6 +80,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) { EXPECT_FALSE(success.failed()); EXPECT_FALSE(success.nonfatally_failed()); EXPECT_FALSE(success.fatally_failed()); + EXPECT_FALSE(success.skipped()); const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure, "file.cc", @@ -88,6 +90,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) { EXPECT_TRUE(nonfatal_failure.failed()); EXPECT_TRUE(nonfatal_failure.nonfatally_failed()); EXPECT_FALSE(nonfatal_failure.fatally_failed()); + EXPECT_FALSE(nonfatal_failure.skipped()); const TestPartResult fatal_failure(TestPartResult::kFatalFailure, "file.cc", @@ -97,6 +100,14 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) { EXPECT_TRUE(fatal_failure.failed()); EXPECT_FALSE(fatal_failure.nonfatally_failed()); EXPECT_TRUE(fatal_failure.fatally_failed()); + EXPECT_FALSE(fatal_failure.skipped()); + + const TestPartResult skip(TestPartResult::kSkip, "file.cc", 42, "message"); + EXPECT_FALSE(skip.passed()); + EXPECT_FALSE(skip.failed()); + EXPECT_FALSE(skip.nonfatally_failed()); + EXPECT_FALSE(skip.fatally_failed()); + EXPECT_TRUE(skip.skipped()); } // Tests TestPartResult::type(). @@ -104,23 +115,27 @@ TEST_F(TestPartResultTest, type) { EXPECT_EQ(TestPartResult::kSuccess, r1_.type()); EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type()); EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type()); + EXPECT_EQ(TestPartResult::kSkip, r4_.type()); } // Tests TestPartResult::file_name(). TEST_F(TestPartResultTest, file_name) { EXPECT_STREQ("foo/bar.cc", r1_.file_name()); EXPECT_STREQ(NULL, r3_.file_name()); + EXPECT_STREQ("foo/bar.cc", r4_.file_name()); } // Tests TestPartResult::line_number(). TEST_F(TestPartResultTest, line_number) { EXPECT_EQ(10, r1_.line_number()); EXPECT_EQ(-1, r2_.line_number()); + EXPECT_EQ(2, r4_.line_number()); } // Tests TestPartResult::message(). TEST_F(TestPartResultTest, message) { EXPECT_STREQ("Success!", r1_.message()); + EXPECT_STREQ("Skipped!", r4_.message()); } // Tests TestPartResult::passed(). @@ -128,6 +143,7 @@ TEST_F(TestPartResultTest, Passed) { EXPECT_TRUE(r1_.passed()); EXPECT_FALSE(r2_.passed()); EXPECT_FALSE(r3_.passed()); + EXPECT_FALSE(r4_.passed()); } // Tests TestPartResult::failed(). @@ -135,6 +151,15 @@ TEST_F(TestPartResultTest, Failed) { EXPECT_FALSE(r1_.failed()); EXPECT_TRUE(r2_.failed()); EXPECT_TRUE(r3_.failed()); + EXPECT_FALSE(r4_.failed()); +} + +// Tests TestPartResult::failed(). +TEST_F(TestPartResultTest, Skipped) { + EXPECT_FALSE(r1_.skipped()); + EXPECT_FALSE(r2_.skipped()); + EXPECT_FALSE(r3_.skipped()); + EXPECT_TRUE(r4_.skipped()); } // Tests TestPartResult::fatally_failed(). @@ -142,6 +167,7 @@ TEST_F(TestPartResultTest, FatallyFailed) { EXPECT_FALSE(r1_.fatally_failed()); EXPECT_FALSE(r2_.fatally_failed()); EXPECT_TRUE(r3_.fatally_failed()); + EXPECT_FALSE(r4_.fatally_failed()); } // Tests TestPartResult::nonfatally_failed(). @@ -149,6 +175,7 @@ TEST_F(TestPartResultTest, NonfatallyFailed) { EXPECT_FALSE(r1_.nonfatally_failed()); EXPECT_TRUE(r2_.nonfatally_failed()); EXPECT_FALSE(r3_.nonfatally_failed()); + EXPECT_FALSE(r4_.nonfatally_failed()); } // Tests the TestPartResultArray class. diff --git a/googletest/test/gtest_all_test.cc b/googletest/test/gtest_all_test.cc index e61e36b1..f948a104 100644 --- a/googletest/test/gtest_all_test.cc +++ b/googletest/test/gtest_all_test.cc @@ -37,10 +37,11 @@ #include "test/googletest-message-test.cc" #include "test/googletest-options-test.cc" #include "test/googletest-port-test.cc" +#include "test/googletest-test-part-test.cc" +#include "test/gtest-typed-test2_test.cc" +#include "test/gtest-typed-test_test.cc" #include "test/gtest_pred_impl_unittest.cc" #include "test/gtest_prod_test.cc" -#include "test/googletest-test-part-test.cc" -#include "test/gtest-typed-test_test.cc" -#include "test/gtest-typed-test2_test.cc" +#include "test/gtest_skip_test.cc" #include "test/gtest_unittest.cc" #include "test/production.cc" diff --git a/googletest/test/gtest_skip_test.cc b/googletest/test/gtest_skip_test.cc new file mode 100644 index 00000000..ee810933 --- /dev/null +++ b/googletest/test/gtest_skip_test.cc @@ -0,0 +1,38 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: arseny.aprelev@gmail.com (Arseny Aprelev) +// + +#include "gtest/gtest.h" + +TEST(SkipTest, DoesSkip) { + GTEST_SKIP(); + EXPECT_EQ(0, 1); +}