Merge branch 'master' into python3-tests

This commit is contained in:
Gennadiy Civil 2018-10-04 15:08:14 -04:00 committed by GitHub
commit 4b82df5bb3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 351 additions and 79 deletions

View File

@ -144,9 +144,9 @@ cc_library(
cc_test(
name = "gtest_samples",
size = "small",
#All Samples except:
#sample9 ( main )
#sample10 (main and takes a command line option and needs to be separate)
# All Samples except:
# sample9 (main)
# sample10 (main and takes a command line option and needs to be separate)
srcs = [
"googletest/samples/sample1_unittest.cc",
"googletest/samples/sample2_unittest.cc",

View File

@ -23,6 +23,7 @@ environment:
- compiler: gcc-6.3.0-posix
generator: "MinGW Makefiles"
cxx_path: 'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin'
enabled_on_pr: yes
configuration:
- Debug

View File

@ -145,6 +145,20 @@ if (gmock_build_tests)
# 'make test' or ctest.
enable_testing()
if (WIN32)
file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1"
CONTENT
"$project_bin = \"${CMAKE_BINARY_DIR}/bin/$<CONFIG>\"
$env:Path = \"$project_bin;$env:Path\"
& $args")
elseif (MINGW)
file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1"
CONTENT
"$project_bin = (cygpath --windows ${CMAKE_BINARY_DIR}/bin)
$env:Path = \"$project_bin;$env:Path\"
& $args")
endif()
############################################################
# C++ tests built with standard compiler flags.

View File

@ -1175,9 +1175,7 @@ class MockFunction<R(A0)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0)> AsStdFunction() {
return [this](A0 a0) -> R {
return this->Call(::std::move(a0));
};
return [this](A0 a0) -> R { return this->Call(::std::forward<A0>(a0)); };
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1195,7 +1193,7 @@ class MockFunction<R(A0, A1)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1)> AsStdFunction() {
return [this](A0 a0, A1 a1) -> R {
return this->Call(::std::move(a0), ::std::move(a1));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1214,7 +1212,8 @@ class MockFunction<R(A0, A1, A2)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1233,8 +1232,8 @@ class MockFunction<R(A0, A1, A2, A3)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1254,8 +1253,9 @@ class MockFunction<R(A0, A1, A2, A3, A4)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1275,8 +1275,9 @@ class MockFunction<R(A0, A1, A2, A3, A4, A5)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4, A5)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4), ::std::move(a5));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4), ::std::forward<A5>(a5));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1296,8 +1297,10 @@ class MockFunction<R(A0, A1, A2, A3, A4, A5, A6)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4, A5, A6)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4), ::std::forward<A5>(a5),
::std::forward<A6>(a6));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1317,9 +1320,10 @@ class MockFunction<R(A0, A1, A2, A3, A4, A5, A6, A7)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4, A5, A6, A7)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6),
::std::move(a7));
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4), ::std::forward<A5>(a5),
::std::forward<A6>(a6), ::std::forward<A7>(a7));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1339,10 +1343,12 @@ class MockFunction<R(A0, A1, A2, A3, A4, A5, A6, A7, A8)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4, A5, A6, A7, A8)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7,
A8 a8) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6),
::std::move(a7), ::std::move(a8));
A8 a8) -> R {
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4), ::std::forward<A5>(a5),
::std::forward<A6>(a6), ::std::forward<A7>(a7),
::std::forward<A8>(a8));
};
}
#endif // GTEST_HAS_STD_FUNCTION_
@ -1362,11 +1368,13 @@ class MockFunction<R(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9)> {
#if GTEST_HAS_STD_FUNCTION_
::std::function<R(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9)> AsStdFunction() {
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7,
A8 a8, A9 a9) -> R {
return this->Call(::std::move(a0), ::std::move(a1), ::std::move(a2),
::std::move(a3), ::std::move(a4), ::std::move(a5), ::std::move(a6),
::std::move(a7), ::std::move(a8), ::std::move(a9));
return [this](A0 a0, A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8,
A9 a9) -> R {
return this->Call(::std::forward<A0>(a0), ::std::forward<A1>(a1),
::std::forward<A2>(a2), ::std::forward<A3>(a3),
::std::forward<A4>(a4), ::std::forward<A5>(a5),
::std::forward<A6>(a6), ::std::forward<A7>(a7),
::std::forward<A8>(a8), ::std::forward<A9>(a9));
};
}
#endif // GTEST_HAS_STD_FUNCTION_

View File

@ -320,7 +320,7 @@ class MockFunction;
$for i [[
$range j 0..i-1
$var ArgTypes = [[$for j, [[A$j]]]]
$var ArgValues = [[$for j, [[::std::move(a$j)]]]]
$var ArgValues = [[$for j, [[::std::forward<A$j>(a$j)]]]]
$var ArgDecls = [[$for j, [[A$j a$j]]]]
template <typename R$for j [[, typename A$j]]>
class MockFunction<R($ArgTypes)> {

View File

@ -617,6 +617,17 @@ TEST(MockFunctionTest, AsStdFunctionReturnsReference) {
value = 2;
EXPECT_EQ(2, ref);
}
TEST(MockFunctionTest, AsStdFunctionWithReferenceParameter) {
MockFunction<int(int &)> foo;
auto call = [](const std::function<int(int& )> &f, int &i) {
return f(i);
};
int i = 42;
EXPECT_CALL(foo, Call(i)).WillOnce(Return(-1));
EXPECT_EQ(-1, call(foo.AsStdFunction(), i));
}
#endif // GTEST_HAS_STD_FUNCTION_
struct MockMethodSizes0 {

View File

@ -187,6 +187,20 @@ if (gtest_build_tests)
# 'make test' or ctest.
enable_testing()
if (WIN32)
file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1"
CONTENT
"$project_bin = \"${CMAKE_BINARY_DIR}/bin/$<CONFIG>\"
$env:Path = \"$project_bin;$env:Path\"
& $args")
elseif (MINGW)
file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1"
CONTENT
"$project_bin = (cygpath --windows ${CMAKE_BINARY_DIR}/bin)
$env:Path = \"$project_bin;$env:Path\"
& $args")
endif()
############################################################
# C++ tests built with standard compiler flags.
@ -217,6 +231,7 @@ if (gtest_build_tests)
test/gtest-typed-test2_test.cc)
cxx_test(gtest_unittest gtest_main)
cxx_test(gtest-unittest-api_test gtest)
cxx_test(gtest_skip_test gtest_main)
############################################################
# C++ tests built with non-standard compiler flags.

View File

@ -167,6 +167,22 @@ function(cxx_library_with_type name type cxx_flags)
set_target_properties(${name}
PROPERTIES
DEBUG_POSTFIX "d")
# Set the output directory for build artifacts
set_target_properties(${name}
PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
# make PDBs match library name
get_target_property(pdb_debug_postfix ${name} DEBUG_POSTFIX)
set_target_properties(${name}
PROPERTIES
PDB_NAME "${name}"
PDB_NAME_DEBUG "${name}${pdb_debug_postfix}"
COMPILE_PDB_NAME "${name}"
COMPILE_PDB_NAME_DEBUG "${name}${pdb_debug_postfix}")
if (BUILD_SHARED_LIBS OR type STREQUAL "SHARED")
set_target_properties(${name}
PROPERTIES
@ -244,7 +260,13 @@ find_package(PythonInterp)
# from the given source files with the given compiler flags.
function(cxx_test_with_flags name cxx_flags libs)
cxx_executable_with_flags(${name} "${cxx_flags}" "${libs}" ${ARGN})
add_test(NAME ${name} COMMAND ${name})
if (WIN32 OR MINGW)
add_test(NAME ${name}
COMMAND "powershell" "-Command" "${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1" "$<TARGET_FILE:${name}>")
else()
add_test(NAME ${name}
COMMAND "$<TARGET_FILE:${name}>")
endif()
endfunction()
# cxx_test(name libs srcs...)
@ -263,33 +285,51 @@ endfunction()
# test/name.py. It does nothing if Python is not installed.
function(py_test name)
if (PYTHONINTERP_FOUND)
if (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
if ("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" VERSION_GREATER 3.1)
if (CMAKE_CONFIGURATION_TYPES)
# Multi-configuration build generators as for Visual Studio save
# output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug,
# Release etc.), so we have to provide it here.
add_test(
NAME ${name}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
# Multi-configuration build generators as for Visual Studio save
# output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug,
# Release etc.), so we have to provide it here.
if (WIN32 OR MINGW)
add_test(NAME ${name}
COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})
else()
add_test(NAME ${name}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})
endif()
else (CMAKE_CONFIGURATION_TYPES)
# Single-configuration build generators like Makefile generators
# don't have subdirs below CMAKE_CURRENT_BINARY_DIR.
add_test(
NAME ${name}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
# Single-configuration build generators like Makefile generators
# don't have subdirs below CMAKE_CURRENT_BINARY_DIR.
if (WIN32 OR MINGW)
add_test(NAME ${name}
COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})
else()
add_test(NAME ${name}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})
endif()
endif (CMAKE_CONFIGURATION_TYPES)
else (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
else()
# ${CMAKE_CURRENT_BINARY_DIR} is known at configuration time, so we can
# directly bind it from cmake. ${CTEST_CONFIGURATION_TYPE} is known
# only at ctest runtime (by calling ctest -c <Configuration>), so
# we have to escape $ to delay variable substitution here.
add_test(
${name}
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN})
endif (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
if (WIN32 OR MINGW)
add_test(NAME ${name}
COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN})
else()
add_test(NAME ${name}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
--build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN})
endif()
endif()
endif(PYTHONINTERP_FOUND)
endfunction()
@ -306,6 +346,18 @@ function(install_project)
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}"
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
# Install PDBs
foreach(t ${ARGN})
get_target_property(t_pdb_name ${t} COMPILE_PDB_NAME)
get_target_property(t_pdb_name_debug ${t} COMPILE_PDB_NAME_DEBUG)
get_target_property(t_pdb_output_directory ${t} PDB_OUTPUT_DIRECTORY)
install(FILES
"${t_pdb_output_directory}/\${CMAKE_INSTALL_CONFIG_NAME}/$<IF:$<CONFIG:Debug>,${t_pdb_name_debug},${t_pdb_name}>.pdb"
DESTINATION ${CMAKE_INSTALL_LIBDIR}
OPTIONAL)
endforeach()
endif()
# Configure and install pkgconfig files.
foreach(t ${ARGN})
set(configured_pc "${generated_dir}/${t}.pc")

View File

@ -53,7 +53,8 @@ class GTEST_API_ TestPartResult {
enum Type {
kSuccess, // Succeeded.
kNonFatalFailure, // Failed but the test can continue.
kFatalFailure // Failed and the test should be terminated.
kFatalFailure, // Failed and the test should be terminated.
kSkip // Skipped.
};
// C'tor. TestPartResult does NOT have a default constructor.
@ -89,18 +90,21 @@ class GTEST_API_ TestPartResult {
// Gets the message associated with the test part.
const char* message() const { return message_.c_str(); }
// Returns true iff the test part was skipped.
bool skipped() const { return type_ == kSkip; }
// Returns true iff the test part passed.
bool passed() const { return type_ == kSuccess; }
// Returns true iff the test part failed.
bool failed() const { return type_ != kSuccess; }
// Returns true iff the test part non-fatally failed.
bool nonfatally_failed() const { return type_ == kNonFatalFailure; }
// Returns true iff the test part fatally failed.
bool fatally_failed() const { return type_ == kFatalFailure; }
// Returns true iff the test part failed.
bool failed() const { return fatally_failed() || nonfatally_failed(); }
private:
Type type_;

View File

@ -440,6 +440,9 @@ class GTEST_API_ Test {
// Returns true iff the current test has a non-fatal failure.
static bool HasNonfatalFailure();
// Returns true iff the current test was skipped.
static bool IsSkipped();
// Returns true iff the current test has a (either fatal or
// non-fatal) failure.
static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }
@ -574,7 +577,10 @@ class GTEST_API_ TestResult {
int test_property_count() const;
// Returns true iff the test passed (i.e. no test part failed).
bool Passed() const { return !Failed(); }
bool Passed() const { return !Skipped() && !Failed(); }
// Returns true iff the test was skipped.
bool Skipped() const;
// Returns true iff the test failed.
bool Failed() const;
@ -854,6 +860,9 @@ class GTEST_API_ TestCase {
// Gets the number of successful tests in this test case.
int successful_test_count() const;
// Gets the number of skipped tests in this test case.
int skipped_test_count() const;
// Gets the number of failed tests in this test case.
int failed_test_count() const;
@ -936,6 +945,11 @@ class GTEST_API_ TestCase {
return test_info->should_run() && test_info->result()->Passed();
}
// Returns true iff test skipped.
static bool TestSkipped(const TestInfo* test_info) {
return test_info->should_run() && test_info->result()->Skipped();
}
// Returns true iff test failed.
static bool TestFailed(const TestInfo* test_info) {
return test_info->should_run() && test_info->result()->Failed();
@ -1258,6 +1272,9 @@ class GTEST_API_ UnitTest {
// Gets the number of successful tests.
int successful_test_count() const;
// Gets the number of skipped tests.
int skipped_test_count() const;
// Gets the number of failed tests.
int failed_test_count() const;
@ -1798,11 +1815,8 @@ class WithParamInterface {
virtual ~WithParamInterface() {}
// The current parameter value. Is also available in the test fixture's
// constructor. This member function is non-static, even though it only
// references static data, to reduce the opportunity for incorrect uses
// like writing 'WithParamInterface<bool>::GetParam()' for a test that
// uses a fixture whose parameter type is int.
const ParamType& GetParam() const {
// constructor.
static const ParamType& GetParam() {
GTEST_CHECK_(parameter_ != NULL)
<< "GetParam() can only be called inside a value-parameterized test "
<< "-- did you intend to write TEST_P instead of TEST_F?";
@ -1835,6 +1849,11 @@ class TestWithParam : public Test, public WithParamInterface<T> {
// Macros for indicating success/failure in test code.
// Skips test in runtime.
// Skipping test aborts current function.
// Skipped tests are neither successful nor failed.
#define GTEST_SKIP() GTEST_SKIP_("Skipped")
// ADD_FAILURE unconditionally adds a failure to the current test.
// SUCCEED generates a success - it doesn't automatically make the
// current test successful, as a test is only successful when it has

View File

@ -1208,7 +1208,10 @@ class NativeArray {
#define GTEST_SUCCESS_(message) \
GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
// Suppress MSVC warning 4702 (unreachable code) for the code following
#define GTEST_SKIP_(message) \
return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSkip)
// Suppress MSVC warning 4072 (unreachable code) for the code following
// statement if it returns or throws (or doesn't return or throw in some
// situations).
#define GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) \

View File

@ -544,6 +544,9 @@ class GTEST_API_ UnitTestImpl {
// Gets the number of successful tests.
int successful_test_count() const;
// Gets the number of skipped tests.
int skipped_test_count() const;
// Gets the number of failed tests.
int failed_test_count() const;

View File

@ -47,12 +47,16 @@ std::string TestPartResult::ExtractSummary(const char* message) {
// Prints a TestPartResult object.
std::ostream& operator<<(std::ostream& os, const TestPartResult& result) {
return os
<< result.file_name() << ":" << result.line_number() << ": "
<< (result.type() == TestPartResult::kSuccess ? "Success" :
result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
"Non-fatal failure") << ":\n"
<< result.message() << std::endl;
return os << result.file_name() << ":" << result.line_number() << ": "
<< (result.type() == TestPartResult::kSuccess
? "Success"
: result.type() == TestPartResult::kSkip
? "Skipped"
: result.type() == TestPartResult::kFatalFailure
? "Fatal failure"
: "Non-fatal failure")
<< ":\n"
<< result.message() << std::endl;
}
// Appends a TestPartResult to the array.

View File

@ -796,6 +796,11 @@ int UnitTestImpl::successful_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
}
// Gets the number of skipped tests.
int UnitTestImpl::skipped_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count);
}
// Gets the number of failed tests.
int UnitTestImpl::failed_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
@ -2207,6 +2212,16 @@ void TestResult::Clear() {
elapsed_time_ = 0;
}
// Returns true off the test part was skipped.
static bool TestPartSkipped(const TestPartResult& result) {
return result.skipped();
}
// Returns true iff the test was skipped.
bool TestResult::Skipped() const {
return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0;
}
// Returns true iff the test failed.
bool TestResult::Failed() const {
for (int i = 0; i < total_part_count(); ++i) {
@ -2537,6 +2552,11 @@ bool Test::HasNonfatalFailure() {
HasNonfatalFailure();
}
// Returns true iff the current test was skipped.
bool Test::IsSkipped() {
return internal::GetUnitTestImpl()->current_test_result()->Skipped();
}
// class TestInfo
// Constructs a TestInfo object. It assumes ownership of the test factory
@ -2715,6 +2735,11 @@ int TestCase::successful_test_count() const {
return CountIf(test_info_list_, TestPassed);
}
// Gets the number of successful tests in this test case.
int TestCase::skipped_test_count() const {
return CountIf(test_info_list_, TestSkipped);
}
// Gets the number of failed tests in this test case.
int TestCase::failed_test_count() const {
return CountIf(test_info_list_, TestFailed);
@ -2866,6 +2891,8 @@ static std::string FormatTestCaseCount(int test_case_count) {
// between the two when viewing the test result.
static const char * TestPartResultTypeToString(TestPartResult::Type type) {
switch (type) {
case TestPartResult::kSkip:
return "Skipped";
case TestPartResult::kSuccess:
return "Success";
@ -3119,6 +3146,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener {
private:
static void PrintFailedTests(const UnitTest& unit_test);
static void PrintSkippedTests(const UnitTest& unit_test);
};
// Fired before each iteration of tests starts.
@ -3187,18 +3215,25 @@ void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
// Called after an assertion failure.
void PrettyUnitTestResultPrinter::OnTestPartResult(
const TestPartResult& result) {
// If the test part succeeded, we don't need to do anything.
if (result.type() == TestPartResult::kSuccess)
return;
// Print failure message from the assertion (e.g. expected this and got that).
PrintTestPartResult(result);
fflush(stdout);
switch (result.type()) {
// If the test part succeeded, or was skipped,
// we don't need to do anything.
case TestPartResult::kSkip:
case TestPartResult::kSuccess:
return;
default:
// Print failure message from the assertion
// (e.g. expected this and got that).
PrintTestPartResult(result);
fflush(stdout);
}
}
void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
if (test_info.result()->Passed()) {
ColoredPrintf(COLOR_GREEN, "[ OK ] ");
} else if (test_info.result()->Skipped()) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
} else {
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
}
@ -3248,7 +3283,7 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
}
for (int j = 0; j < test_case.total_test_count(); ++j) {
const TestInfo& test_info = *test_case.GetTestInfo(j);
if (!test_info.should_run() || test_info.result()->Passed()) {
if (!test_info.should_run() || !test_info.result()->Failed()) {
continue;
}
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
@ -3259,6 +3294,30 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
}
}
// Internal helper for printing the list of skipped tests.
void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {
const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count == 0) {
return;
}
for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
const TestCase& test_case = *unit_test.GetTestCase(i);
if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) {
continue;
}
for (int j = 0; j < test_case.total_test_count(); ++j) {
const TestInfo& test_info = *test_case.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Skipped()) {
continue;
}
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
printf("%s.%s", test_case.name(), test_info.name());
printf("\n");
}
}
}
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
int /*iteration*/) {
ColoredPrintf(COLOR_GREEN, "[==========] ");
@ -3273,6 +3332,13 @@ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count > 0) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str());
PrintSkippedTests(unit_test);
}
int num_failures = unit_test.failed_test_count();
if (!unit_test.Passed()) {
const int failed_test_count = unit_test.failed_test_count();
@ -4540,6 +4606,11 @@ int UnitTest::successful_test_count() const {
return impl()->successful_test_count();
}
// Gets the number of skipped tests.
int UnitTest::skipped_test_count() const {
return impl()->skipped_test_count();
}
// Gets the number of failed tests.
int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
@ -4660,7 +4731,8 @@ void UnitTest::AddTestPartResult(
impl_->GetTestPartResultReporterForCurrentThread()->
ReportTestPartResult(result);
if (result_type != TestPartResult::kSuccess) {
if (result_type != TestPartResult::kSuccess &&
result_type != TestPartResult::kSkip) {
// gtest_break_on_failure takes precedence over
// gtest_throw_on_failure. This allows a user to set the latter
// in the code (perhaps in order to use Google Test assertions

View File

@ -46,9 +46,10 @@ class TestPartResultTest : public Test {
TestPartResultTest()
: r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"),
r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"),
r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!") {}
r3_(TestPartResult::kFatalFailure, nullptr, -1, "Failure!"),
r4_(TestPartResult::kSkip, "foo/bar.cc", 2, "Skipped!") {}
TestPartResult r1_, r2_, r3_;
TestPartResult r1_, r2_, r3_, r4_;
};
@ -79,6 +80,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_FALSE(success.failed());
EXPECT_FALSE(success.nonfatally_failed());
EXPECT_FALSE(success.fatally_failed());
EXPECT_FALSE(success.skipped());
const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
"file.cc",
@ -88,6 +90,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_TRUE(nonfatal_failure.failed());
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
EXPECT_FALSE(nonfatal_failure.fatally_failed());
EXPECT_FALSE(nonfatal_failure.skipped());
const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
"file.cc",
@ -97,6 +100,14 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_TRUE(fatal_failure.failed());
EXPECT_FALSE(fatal_failure.nonfatally_failed());
EXPECT_TRUE(fatal_failure.fatally_failed());
EXPECT_FALSE(fatal_failure.skipped());
const TestPartResult skip(TestPartResult::kSkip, "file.cc", 42, "message");
EXPECT_FALSE(skip.passed());
EXPECT_FALSE(skip.failed());
EXPECT_FALSE(skip.nonfatally_failed());
EXPECT_FALSE(skip.fatally_failed());
EXPECT_TRUE(skip.skipped());
}
// Tests TestPartResult::type().
@ -104,23 +115,27 @@ TEST_F(TestPartResultTest, type) {
EXPECT_EQ(TestPartResult::kSuccess, r1_.type());
EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());
EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());
EXPECT_EQ(TestPartResult::kSkip, r4_.type());
}
// Tests TestPartResult::file_name().
TEST_F(TestPartResultTest, file_name) {
EXPECT_STREQ("foo/bar.cc", r1_.file_name());
EXPECT_STREQ(NULL, r3_.file_name());
EXPECT_STREQ("foo/bar.cc", r4_.file_name());
}
// Tests TestPartResult::line_number().
TEST_F(TestPartResultTest, line_number) {
EXPECT_EQ(10, r1_.line_number());
EXPECT_EQ(-1, r2_.line_number());
EXPECT_EQ(2, r4_.line_number());
}
// Tests TestPartResult::message().
TEST_F(TestPartResultTest, message) {
EXPECT_STREQ("Success!", r1_.message());
EXPECT_STREQ("Skipped!", r4_.message());
}
// Tests TestPartResult::passed().
@ -128,6 +143,7 @@ TEST_F(TestPartResultTest, Passed) {
EXPECT_TRUE(r1_.passed());
EXPECT_FALSE(r2_.passed());
EXPECT_FALSE(r3_.passed());
EXPECT_FALSE(r4_.passed());
}
// Tests TestPartResult::failed().
@ -135,6 +151,15 @@ TEST_F(TestPartResultTest, Failed) {
EXPECT_FALSE(r1_.failed());
EXPECT_TRUE(r2_.failed());
EXPECT_TRUE(r3_.failed());
EXPECT_FALSE(r4_.failed());
}
// Tests TestPartResult::failed().
TEST_F(TestPartResultTest, Skipped) {
EXPECT_FALSE(r1_.skipped());
EXPECT_FALSE(r2_.skipped());
EXPECT_FALSE(r3_.skipped());
EXPECT_TRUE(r4_.skipped());
}
// Tests TestPartResult::fatally_failed().
@ -142,6 +167,7 @@ TEST_F(TestPartResultTest, FatallyFailed) {
EXPECT_FALSE(r1_.fatally_failed());
EXPECT_FALSE(r2_.fatally_failed());
EXPECT_TRUE(r3_.fatally_failed());
EXPECT_FALSE(r4_.fatally_failed());
}
// Tests TestPartResult::nonfatally_failed().
@ -149,6 +175,7 @@ TEST_F(TestPartResultTest, NonfatallyFailed) {
EXPECT_FALSE(r1_.nonfatally_failed());
EXPECT_TRUE(r2_.nonfatally_failed());
EXPECT_FALSE(r3_.nonfatally_failed());
EXPECT_FALSE(r4_.nonfatally_failed());
}
// Tests the TestPartResultArray class.

View File

@ -37,10 +37,11 @@
#include "test/googletest-message-test.cc"
#include "test/googletest-options-test.cc"
#include "test/googletest-port-test.cc"
#include "test/googletest-test-part-test.cc"
#include "test/gtest-typed-test2_test.cc"
#include "test/gtest-typed-test_test.cc"
#include "test/gtest_pred_impl_unittest.cc"
#include "test/gtest_prod_test.cc"
#include "test/googletest-test-part-test.cc"
#include "test/gtest-typed-test_test.cc"
#include "test/gtest-typed-test2_test.cc"
#include "test/gtest_skip_test.cc"
#include "test/gtest_unittest.cc"
#include "test/production.cc"

View File

@ -0,0 +1,38 @@
// Copyright 2008 Google Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: arseny.aprelev@gmail.com (Arseny Aprelev)
//
#include "gtest/gtest.h"
TEST(SkipTest, DoesSkip) {
GTEST_SKIP();
EXPECT_EQ(0, 1);
}