Removes the Windows golden file (by Vlad Losev); implements test result streaming (by Nikhil Jindal and cleaned up by Zhanyong Wan).
This commit is contained in:
parent
b83585c4de
commit
a9f380f5c7
|
@ -102,7 +102,6 @@ EXTRA_DIST += \
|
||||||
test/gtest_list_tests_unittest.py \
|
test/gtest_list_tests_unittest.py \
|
||||||
test/gtest_output_test.py \
|
test/gtest_output_test.py \
|
||||||
test/gtest_output_test_golden_lin.txt \
|
test/gtest_output_test_golden_lin.txt \
|
||||||
test/gtest_output_test_golden_win.txt \
|
|
||||||
test/gtest_shuffle_test.py \
|
test/gtest_shuffle_test.py \
|
||||||
test/gtest_test_utils.py \
|
test/gtest_test_utils.py \
|
||||||
test/gtest_throw_on_failure_test.py \
|
test/gtest_throw_on_failure_test.py \
|
||||||
|
|
|
@ -137,6 +137,11 @@ GTEST_DECLARE_int32_(stack_trace_depth);
|
||||||
// non-zero code otherwise.
|
// non-zero code otherwise.
|
||||||
GTEST_DECLARE_bool_(throw_on_failure);
|
GTEST_DECLARE_bool_(throw_on_failure);
|
||||||
|
|
||||||
|
// When this flag is set with a "host:port" string, on supported
|
||||||
|
// platforms test results are streamed to the specified port on
|
||||||
|
// the specified host machine.
|
||||||
|
GTEST_DECLARE_string_(stream_result_to);
|
||||||
|
|
||||||
// The upper limit for valid stack trace depths.
|
// The upper limit for valid stack trace depths.
|
||||||
const int kMaxStackTraceDepth = 100;
|
const int kMaxStackTraceDepth = 100;
|
||||||
|
|
||||||
|
@ -155,8 +160,6 @@ class WindowsDeathTest;
|
||||||
class UnitTestImpl* GetUnitTestImpl();
|
class UnitTestImpl* GetUnitTestImpl();
|
||||||
void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
|
void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
|
||||||
const String& message);
|
const String& message);
|
||||||
class PrettyUnitTestResultPrinter;
|
|
||||||
class XmlUnitTestResultPrinter;
|
|
||||||
|
|
||||||
// Converts a streamable value to a String. A NULL pointer is
|
// Converts a streamable value to a String. A NULL pointer is
|
||||||
// converted to "(null)". When the input value is a ::string,
|
// converted to "(null)". When the input value is a ::string,
|
||||||
|
|
|
@ -537,6 +537,11 @@
|
||||||
#define GTEST_WIDE_STRING_USES_UTF16_ \
|
#define GTEST_WIDE_STRING_USES_UTF16_ \
|
||||||
(GTEST_OS_WINDOWS || GTEST_OS_CYGWIN || GTEST_OS_SYMBIAN || GTEST_OS_AIX)
|
(GTEST_OS_WINDOWS || GTEST_OS_CYGWIN || GTEST_OS_SYMBIAN || GTEST_OS_AIX)
|
||||||
|
|
||||||
|
// Determines whether test results can be streamed to a socket.
|
||||||
|
#if GTEST_OS_LINUX
|
||||||
|
#define GTEST_CAN_STREAM_RESULTS_ 1
|
||||||
|
#endif
|
||||||
|
|
||||||
// Defines some utility macros.
|
// Defines some utility macros.
|
||||||
|
|
||||||
// The GNU compiler emits a warning if nested "if" statements are followed by
|
// The GNU compiler emits a warning if nested "if" statements are followed by
|
||||||
|
|
|
@ -93,6 +93,7 @@ const char kRandomSeedFlag[] = "random_seed";
|
||||||
const char kRepeatFlag[] = "repeat";
|
const char kRepeatFlag[] = "repeat";
|
||||||
const char kShuffleFlag[] = "shuffle";
|
const char kShuffleFlag[] = "shuffle";
|
||||||
const char kStackTraceDepthFlag[] = "stack_trace_depth";
|
const char kStackTraceDepthFlag[] = "stack_trace_depth";
|
||||||
|
const char kStreamResultToFlag[] = "stream_result_to";
|
||||||
const char kThrowOnFailureFlag[] = "throw_on_failure";
|
const char kThrowOnFailureFlag[] = "throw_on_failure";
|
||||||
|
|
||||||
// A valid random seed must be in [1, kMaxRandomSeed].
|
// A valid random seed must be in [1, kMaxRandomSeed].
|
||||||
|
@ -165,6 +166,7 @@ class GTestFlagSaver {
|
||||||
repeat_ = GTEST_FLAG(repeat);
|
repeat_ = GTEST_FLAG(repeat);
|
||||||
shuffle_ = GTEST_FLAG(shuffle);
|
shuffle_ = GTEST_FLAG(shuffle);
|
||||||
stack_trace_depth_ = GTEST_FLAG(stack_trace_depth);
|
stack_trace_depth_ = GTEST_FLAG(stack_trace_depth);
|
||||||
|
stream_result_to_ = GTEST_FLAG(stream_result_to);
|
||||||
throw_on_failure_ = GTEST_FLAG(throw_on_failure);
|
throw_on_failure_ = GTEST_FLAG(throw_on_failure);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,6 +187,7 @@ class GTestFlagSaver {
|
||||||
GTEST_FLAG(repeat) = repeat_;
|
GTEST_FLAG(repeat) = repeat_;
|
||||||
GTEST_FLAG(shuffle) = shuffle_;
|
GTEST_FLAG(shuffle) = shuffle_;
|
||||||
GTEST_FLAG(stack_trace_depth) = stack_trace_depth_;
|
GTEST_FLAG(stack_trace_depth) = stack_trace_depth_;
|
||||||
|
GTEST_FLAG(stream_result_to) = stream_result_to_;
|
||||||
GTEST_FLAG(throw_on_failure) = throw_on_failure_;
|
GTEST_FLAG(throw_on_failure) = throw_on_failure_;
|
||||||
}
|
}
|
||||||
private:
|
private:
|
||||||
|
@ -205,6 +208,7 @@ class GTestFlagSaver {
|
||||||
internal::Int32 repeat_;
|
internal::Int32 repeat_;
|
||||||
bool shuffle_;
|
bool shuffle_;
|
||||||
internal::Int32 stack_trace_depth_;
|
internal::Int32 stack_trace_depth_;
|
||||||
|
String stream_result_to_;
|
||||||
bool throw_on_failure_;
|
bool throw_on_failure_;
|
||||||
} GTEST_ATTRIBUTE_UNUSED_;
|
} GTEST_ATTRIBUTE_UNUSED_;
|
||||||
|
|
||||||
|
@ -741,6 +745,12 @@ class GTEST_API_ UnitTestImpl {
|
||||||
// UnitTestOptions. Must not be called before InitGoogleTest.
|
// UnitTestOptions. Must not be called before InitGoogleTest.
|
||||||
void ConfigureXmlOutput();
|
void ConfigureXmlOutput();
|
||||||
|
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
// Initializes the event listener for streaming test results to a socket.
|
||||||
|
// Must not be called before InitGoogleTest.
|
||||||
|
void ConfigureStreamingOutput();
|
||||||
|
#endif
|
||||||
|
|
||||||
// Performs initialization dependent upon flag values obtained in
|
// Performs initialization dependent upon flag values obtained in
|
||||||
// ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
|
// ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
|
||||||
// ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
|
// ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
|
||||||
|
|
248
src/gtest.cc
248
src/gtest.cc
|
@ -43,7 +43,7 @@
|
||||||
#include <wctype.h>
|
#include <wctype.h>
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <ostream>
|
#include <ostream> // NOLINT
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
@ -53,16 +53,15 @@
|
||||||
// gettimeofday().
|
// gettimeofday().
|
||||||
#define GTEST_HAS_GETTIMEOFDAY_ 1
|
#define GTEST_HAS_GETTIMEOFDAY_ 1
|
||||||
|
|
||||||
#include <fcntl.h>
|
#include <fcntl.h> // NOLINT
|
||||||
#include <limits.h>
|
#include <limits.h> // NOLINT
|
||||||
#include <sched.h>
|
#include <sched.h> // NOLINT
|
||||||
// Declares vsnprintf(). This header is not available on Windows.
|
// Declares vsnprintf(). This header is not available on Windows.
|
||||||
#include <strings.h>
|
#include <strings.h> // NOLINT
|
||||||
#include <sys/mman.h>
|
#include <sys/mman.h> // NOLINT
|
||||||
#include <sys/time.h>
|
#include <sys/time.h> // NOLINT
|
||||||
#include <unistd.h>
|
#include <unistd.h> // NOLINT
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#elif GTEST_OS_SYMBIAN
|
#elif GTEST_OS_SYMBIAN
|
||||||
#define GTEST_HAS_GETTIMEOFDAY_ 1
|
#define GTEST_HAS_GETTIMEOFDAY_ 1
|
||||||
|
@ -119,6 +118,11 @@
|
||||||
#include <stdexcept>
|
#include <stdexcept>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
#include <arpa/inet.h> // NOLINT
|
||||||
|
#include <netdb.h> // NOLINT
|
||||||
|
#endif
|
||||||
|
|
||||||
// Indicates that this translation unit is part of Google Test's
|
// Indicates that this translation unit is part of Google Test's
|
||||||
// implementation. It must come before gtest-internal-inl.h is
|
// implementation. It must come before gtest-internal-inl.h is
|
||||||
// included, or there will be a compiler error. This trick is to
|
// included, or there will be a compiler error. This trick is to
|
||||||
|
@ -258,6 +262,13 @@ GTEST_DEFINE_int32_(
|
||||||
"The maximum number of stack frames to print when an "
|
"The maximum number of stack frames to print when an "
|
||||||
"assertion fails. The valid range is 0 through 100, inclusive.");
|
"assertion fails. The valid range is 0 through 100, inclusive.");
|
||||||
|
|
||||||
|
GTEST_DEFINE_string_(
|
||||||
|
stream_result_to,
|
||||||
|
internal::StringFromGTestEnv("stream_result_to", ""),
|
||||||
|
"This flag specifies the host name and the port number on which to stream "
|
||||||
|
"test results. Example: \"localhost:555\". The flag is effective only on "
|
||||||
|
"Linux.");
|
||||||
|
|
||||||
GTEST_DEFINE_bool_(
|
GTEST_DEFINE_bool_(
|
||||||
throw_on_failure,
|
throw_on_failure,
|
||||||
internal::BoolFromGTestEnv("throw_on_failure", false),
|
internal::BoolFromGTestEnv("throw_on_failure", false),
|
||||||
|
@ -2286,8 +2297,8 @@ void TestInfo::Run() {
|
||||||
factory_, &internal::TestFactoryBase::CreateTest,
|
factory_, &internal::TestFactoryBase::CreateTest,
|
||||||
"the test fixture's constructor");
|
"the test fixture's constructor");
|
||||||
|
|
||||||
// Runs the test only if the test object was created and its constructor didn't
|
// Runs the test only if the test object was created and its
|
||||||
// generate a fatal failure.
|
// constructor didn't generate a fatal failure.
|
||||||
if ((test != NULL) && !Test::HasFatalFailure()) {
|
if ((test != NULL) && !Test::HasFatalFailure()) {
|
||||||
// This doesn't throw as all user code that can throw are wrapped into
|
// This doesn't throw as all user code that can throw are wrapped into
|
||||||
// exception handling code.
|
// exception handling code.
|
||||||
|
@ -2800,8 +2811,8 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
||||||
int /*iteration*/) {
|
int /*iteration*/) {
|
||||||
ColoredPrintf(COLOR_GREEN, "[==========] ");
|
ColoredPrintf(COLOR_GREEN, "[==========] ");
|
||||||
printf("%s from %s ran.",
|
printf("%s from %s ran.",
|
||||||
FormatTestCount(unit_test.test_to_run_count()).c_str(),
|
FormatTestCount(unit_test.test_to_run_count()).c_str(),
|
||||||
|
@ -3266,6 +3277,182 @@ String XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes(
|
||||||
|
|
||||||
// End XmlUnitTestResultPrinter
|
// End XmlUnitTestResultPrinter
|
||||||
|
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
|
||||||
|
// Streams test results to the given port on the given host machine.
|
||||||
|
class StreamingListener : public EmptyTestEventListener {
|
||||||
|
public:
|
||||||
|
// Escapes '=', '&', '%', and '\n' characters in str as "%xx".
|
||||||
|
static string UrlEncode(const char* str);
|
||||||
|
|
||||||
|
StreamingListener(const string& host, const string& port)
|
||||||
|
: sockfd_(-1), host_name_(host), port_num_(port) {
|
||||||
|
MakeConnection();
|
||||||
|
Send("gtest_streaming_protocol_version=1.0\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual ~StreamingListener() {
|
||||||
|
if (sockfd_ != -1)
|
||||||
|
CloseConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestProgramStart(const UnitTest& /* unit_test */) {
|
||||||
|
Send("event=TestProgramStart\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestProgramEnd(const UnitTest& unit_test) {
|
||||||
|
// Note that Google Test current only report elapsed time for each
|
||||||
|
// test iteration, not for the entire test program.
|
||||||
|
Send(String::Format("event=TestProgramEnd&passed=%d\n",
|
||||||
|
unit_test.Passed()));
|
||||||
|
|
||||||
|
// Notify the streaming server to stop.
|
||||||
|
CloseConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestIterationStart(const UnitTest& /* unit_test */, int iteration) {
|
||||||
|
Send(String::Format("event=TestIterationStart&iteration=%d\n",
|
||||||
|
iteration));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestIterationEnd(const UnitTest& unit_test, int /* iteration */) {
|
||||||
|
Send(String::Format("event=TestIterationEnd&passed=%d&elapsed_time=%sms\n",
|
||||||
|
unit_test.Passed(),
|
||||||
|
StreamableToString(unit_test.elapsed_time()).c_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestCaseStart(const TestCase& test_case) {
|
||||||
|
Send(String::Format("event=TestCaseStart&name=%s\n", test_case.name()));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestCaseEnd(const TestCase& test_case) {
|
||||||
|
Send(String::Format("event=TestCaseEnd&passed=%d&elapsed_time=%sms\n",
|
||||||
|
test_case.Passed(),
|
||||||
|
StreamableToString(test_case.elapsed_time()).c_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestStart(const TestInfo& test_info) {
|
||||||
|
Send(String::Format("event=TestStart&name=%s\n", test_info.name()));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestEnd(const TestInfo& test_info) {
|
||||||
|
Send(String::Format(
|
||||||
|
"event=TestEnd&passed=%d&elapsed_time=%sms\n",
|
||||||
|
(test_info.result())->Passed(),
|
||||||
|
StreamableToString((test_info.result())->elapsed_time()).c_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnTestPartResult(const TestPartResult& test_part_result) {
|
||||||
|
const char* file_name = test_part_result.file_name();
|
||||||
|
if (file_name == NULL)
|
||||||
|
file_name = "";
|
||||||
|
Send(String::Format("event=TestPartResult&file=%s&line=%d&message=",
|
||||||
|
UrlEncode(file_name).c_str(),
|
||||||
|
test_part_result.line_number()));
|
||||||
|
Send(UrlEncode(test_part_result.message()) + "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Creates a client socket and connects to the server.
|
||||||
|
void MakeConnection();
|
||||||
|
|
||||||
|
// Closes the socket.
|
||||||
|
void CloseConnection() {
|
||||||
|
GTEST_CHECK_(sockfd_ != -1)
|
||||||
|
<< "CloseConnection() can be called only when there is a connection.";
|
||||||
|
|
||||||
|
close(sockfd_);
|
||||||
|
sockfd_ = -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sends a string to the socket.
|
||||||
|
void Send(const string& message) {
|
||||||
|
GTEST_CHECK_(sockfd_ != -1)
|
||||||
|
<< "Send() can be called only when there is a connection.";
|
||||||
|
|
||||||
|
const int len = static_cast<int>(message.length());
|
||||||
|
if (write(sockfd_, message.c_str(), len) != len) {
|
||||||
|
GTEST_LOG_(WARNING)
|
||||||
|
<< "stream_result_to: failed to stream to "
|
||||||
|
<< host_name_ << ":" << port_num_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int sockfd_; // socket file descriptor
|
||||||
|
const string host_name_;
|
||||||
|
const string port_num_;
|
||||||
|
|
||||||
|
GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamingListener);
|
||||||
|
}; // class StreamingListener
|
||||||
|
|
||||||
|
// Checks if str contains '=', '&', '%' or '\n' characters. If yes,
|
||||||
|
// replaces them by "%xx" where xx is their hexadecimal value. For
|
||||||
|
// example, replaces "=" with "%3D". This algorithm is O(strlen(str))
|
||||||
|
// in both time and space -- important as the input str may contain an
|
||||||
|
// arbitrarily long test failure message and stack trace.
|
||||||
|
string StreamingListener::UrlEncode(const char* str) {
|
||||||
|
string result;
|
||||||
|
result.reserve(strlen(str) + 1);
|
||||||
|
for (char ch = *str; ch != '\0'; ch = *++str) {
|
||||||
|
switch (ch) {
|
||||||
|
case '%':
|
||||||
|
case '=':
|
||||||
|
case '&':
|
||||||
|
case '\n':
|
||||||
|
result.append(String::Format("%%%02x", static_cast<unsigned char>(ch)));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
result.push_back(ch);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void StreamingListener::MakeConnection() {
|
||||||
|
GTEST_CHECK_(sockfd_ == -1)
|
||||||
|
<< "MakeConnection() can't be called when there is already a connection.";
|
||||||
|
|
||||||
|
addrinfo hints;
|
||||||
|
memset(&hints, 0, sizeof(hints));
|
||||||
|
hints.ai_family = AF_UNSPEC; // To allow both IPv4 and IPv6 addresses.
|
||||||
|
hints.ai_socktype = SOCK_STREAM;
|
||||||
|
addrinfo* servinfo = NULL;
|
||||||
|
|
||||||
|
// Use the getaddrinfo() to get a linked list of IP addresses for
|
||||||
|
// the given host name.
|
||||||
|
const int error_num = getaddrinfo(
|
||||||
|
host_name_.c_str(), port_num_.c_str(), &hints, &servinfo);
|
||||||
|
if (error_num != 0) {
|
||||||
|
GTEST_LOG_(WARNING) << "stream_result_to: getaddrinfo() failed: "
|
||||||
|
<< gai_strerror(error_num);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop through all the results and connect to the first we can.
|
||||||
|
for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != NULL;
|
||||||
|
cur_addr = cur_addr->ai_next) {
|
||||||
|
sockfd_ = socket(
|
||||||
|
cur_addr->ai_family, cur_addr->ai_socktype, cur_addr->ai_protocol);
|
||||||
|
if (sockfd_ != -1) {
|
||||||
|
// Connect the client socket to the server socket.
|
||||||
|
if (connect(sockfd_, cur_addr->ai_addr, cur_addr->ai_addrlen) == -1) {
|
||||||
|
close(sockfd_);
|
||||||
|
sockfd_ = -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
freeaddrinfo(servinfo); // all done with this structure
|
||||||
|
|
||||||
|
if (sockfd_ == -1) {
|
||||||
|
GTEST_LOG_(WARNING) << "stream_result_to: failed to connect to "
|
||||||
|
<< host_name_ << ":" << port_num_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// End of class Streaming Listener
|
||||||
|
#endif // GTEST_CAN_STREAM_RESULTS__
|
||||||
|
|
||||||
// Class ScopedTrace
|
// Class ScopedTrace
|
||||||
|
|
||||||
// Pushes the given source file location and message onto a per-thread
|
// Pushes the given source file location and message onto a per-thread
|
||||||
|
@ -3770,6 +3957,25 @@ void UnitTestImpl::ConfigureXmlOutput() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
// Initializes event listeners for streaming test results in String form.
|
||||||
|
// Must not be called before InitGoogleTest.
|
||||||
|
void UnitTestImpl::ConfigureStreamingOutput() {
|
||||||
|
const string& target = GTEST_FLAG(stream_result_to);
|
||||||
|
if (!target.empty()) {
|
||||||
|
const size_t pos = target.find(':');
|
||||||
|
if (pos != string::npos) {
|
||||||
|
listeners()->Append(new StreamingListener(target.substr(0, pos),
|
||||||
|
target.substr(pos+1)));
|
||||||
|
} else {
|
||||||
|
printf("WARNING: unrecognized streaming target \"%s\" ignored.\n",
|
||||||
|
target.c_str());
|
||||||
|
fflush(stdout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif // GTEST_CAN_STREAM_RESULTS_
|
||||||
|
|
||||||
// Performs initialization dependent upon flag values obtained in
|
// Performs initialization dependent upon flag values obtained in
|
||||||
// ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
|
// ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to
|
||||||
// ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
|
// ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest
|
||||||
|
@ -3793,6 +3999,11 @@ void UnitTestImpl::PostFlagParsingInit() {
|
||||||
// Configures listeners for XML output. This makes it possible for users
|
// Configures listeners for XML output. This makes it possible for users
|
||||||
// to shut down the default XML output before invoking RUN_ALL_TESTS.
|
// to shut down the default XML output before invoking RUN_ALL_TESTS.
|
||||||
ConfigureXmlOutput();
|
ConfigureXmlOutput();
|
||||||
|
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
// Configures listeners for streaming test results to the specified server.
|
||||||
|
ConfigureStreamingOutput();
|
||||||
|
#endif // GTEST_CAN_STREAM_RESULTS_
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4471,6 +4682,10 @@ static const char kColorEncodedHelpMessage[] =
|
||||||
GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
|
GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
|
||||||
" Generate an XML report in the given directory or with the given file\n"
|
" Generate an XML report in the given directory or with the given file\n"
|
||||||
" name. @YFILE_PATH@D defaults to @Gtest_details.xml@D.\n"
|
" name. @YFILE_PATH@D defaults to @Gtest_details.xml@D.\n"
|
||||||
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
" @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n"
|
||||||
|
" Stream test results to the given server.\n"
|
||||||
|
#endif // GTEST_CAN_STREAM_RESULTS_
|
||||||
"\n"
|
"\n"
|
||||||
"Assertion Behavior:\n"
|
"Assertion Behavior:\n"
|
||||||
#if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
|
#if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
|
||||||
|
@ -4481,10 +4696,8 @@ static const char kColorEncodedHelpMessage[] =
|
||||||
" Turn assertion failures into debugger break-points.\n"
|
" Turn assertion failures into debugger break-points.\n"
|
||||||
" @G--" GTEST_FLAG_PREFIX_ "throw_on_failure@D\n"
|
" @G--" GTEST_FLAG_PREFIX_ "throw_on_failure@D\n"
|
||||||
" Turn assertion failures into C++ exceptions.\n"
|
" Turn assertion failures into C++ exceptions.\n"
|
||||||
#if GTEST_OS_WINDOWS
|
|
||||||
" @G--" GTEST_FLAG_PREFIX_ "catch_exceptions@D\n"
|
" @G--" GTEST_FLAG_PREFIX_ "catch_exceptions@D\n"
|
||||||
" Suppress pop-ups caused by exceptions.\n"
|
" Suppress pop-ups caused by exceptions.\n"
|
||||||
#endif // GTEST_OS_WINDOWS
|
|
||||||
"\n"
|
"\n"
|
||||||
"Except for @G--" GTEST_FLAG_PREFIX_ "list_tests@D, you can alternatively set "
|
"Except for @G--" GTEST_FLAG_PREFIX_ "list_tests@D, you can alternatively set "
|
||||||
"the corresponding\n"
|
"the corresponding\n"
|
||||||
|
@ -4534,7 +4747,10 @@ void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) {
|
||||||
ParseBoolFlag(arg, kShuffleFlag, >EST_FLAG(shuffle)) ||
|
ParseBoolFlag(arg, kShuffleFlag, >EST_FLAG(shuffle)) ||
|
||||||
ParseInt32Flag(arg, kStackTraceDepthFlag,
|
ParseInt32Flag(arg, kStackTraceDepthFlag,
|
||||||
>EST_FLAG(stack_trace_depth)) ||
|
>EST_FLAG(stack_trace_depth)) ||
|
||||||
ParseBoolFlag(arg, kThrowOnFailureFlag, >EST_FLAG(throw_on_failure))
|
ParseStringFlag(arg, kStreamResultToFlag,
|
||||||
|
>EST_FLAG(stream_result_to)) ||
|
||||||
|
ParseBoolFlag(arg, kThrowOnFailureFlag,
|
||||||
|
>EST_FLAG(throw_on_failure))
|
||||||
) {
|
) {
|
||||||
// Yes. Shift the remainder of the argv list left by one. Note
|
// Yes. Shift the remainder of the argv list left by one. Note
|
||||||
// that argv has (*argc + 1) elements, the last one always being
|
// that argv has (*argc + 1) elements, the last one always being
|
||||||
|
|
|
@ -44,12 +44,13 @@ import re
|
||||||
import gtest_test_utils
|
import gtest_test_utils
|
||||||
|
|
||||||
|
|
||||||
|
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
|
||||||
IS_WINDOWS = os.name == 'nt'
|
IS_WINDOWS = os.name == 'nt'
|
||||||
|
|
||||||
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
|
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
|
||||||
FLAG_PREFIX = '--gtest_'
|
FLAG_PREFIX = '--gtest_'
|
||||||
CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions'
|
|
||||||
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
|
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
|
||||||
|
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
|
||||||
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
|
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
|
||||||
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
|
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
|
||||||
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
|
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
|
||||||
|
@ -72,7 +73,8 @@ HELP_REGEX = re.compile(
|
||||||
FLAG_PREFIX + r'print_time.*' +
|
FLAG_PREFIX + r'print_time.*' +
|
||||||
FLAG_PREFIX + r'output=.*' +
|
FLAG_PREFIX + r'output=.*' +
|
||||||
FLAG_PREFIX + r'break_on_failure.*' +
|
FLAG_PREFIX + r'break_on_failure.*' +
|
||||||
FLAG_PREFIX + r'throw_on_failure.*',
|
FLAG_PREFIX + r'throw_on_failure.*' +
|
||||||
|
FLAG_PREFIX + r'catch_exceptions.*',
|
||||||
re.DOTALL)
|
re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
@ -109,10 +111,11 @@ class GTestHelpTest(gtest_test_utils.TestCase):
|
||||||
exit_code, output = RunWithFlag(flag)
|
exit_code, output = RunWithFlag(flag)
|
||||||
self.assertEquals(0, exit_code)
|
self.assertEquals(0, exit_code)
|
||||||
self.assert_(HELP_REGEX.search(output), output)
|
self.assert_(HELP_REGEX.search(output), output)
|
||||||
if IS_WINDOWS:
|
|
||||||
self.assert_(CATCH_EXCEPTIONS_FLAG in output, output)
|
if IS_LINUX:
|
||||||
|
self.assert_(STREAM_RESULT_TO_FLAG in output, output)
|
||||||
else:
|
else:
|
||||||
self.assert_(CATCH_EXCEPTIONS_FLAG not in output, output)
|
self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
|
||||||
|
|
||||||
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
|
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
|
||||||
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
|
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
|
||||||
|
|
|
@ -52,10 +52,8 @@ CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
|
||||||
|
|
||||||
IS_WINDOWS = os.name == 'nt'
|
IS_WINDOWS = os.name == 'nt'
|
||||||
|
|
||||||
if IS_WINDOWS:
|
# TODO(vladl@google.com): remove the _lin suffix.
|
||||||
GOLDEN_NAME = 'gtest_output_test_golden_win.txt'
|
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
|
||||||
else:
|
|
||||||
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
|
|
||||||
|
|
||||||
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
|
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
|
||||||
|
|
||||||
|
@ -138,6 +136,20 @@ def RemoveTypeInfoDetails(test_output):
|
||||||
return re.sub(r'unsigned int', 'unsigned', test_output)
|
return re.sub(r'unsigned int', 'unsigned', test_output)
|
||||||
|
|
||||||
|
|
||||||
|
def NormalizeToCurrentPlatform(test_output):
|
||||||
|
"""Normalizes platform specific output details for easier comparison."""
|
||||||
|
|
||||||
|
if IS_WINDOWS:
|
||||||
|
# Removes the color information that is not present on Windows.
|
||||||
|
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
|
||||||
|
# Changes failure message headers into the Windows format.
|
||||||
|
test_output = re.sub(r': Failure\n', r': error: ', test_output)
|
||||||
|
# Changes file(line_number) to file:line_number.
|
||||||
|
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
|
||||||
|
|
||||||
|
return test_output
|
||||||
|
|
||||||
|
|
||||||
def RemoveTestCounts(output):
|
def RemoveTestCounts(output):
|
||||||
"""Removes test counts from a Google Test program's output."""
|
"""Removes test counts from a Google Test program's output."""
|
||||||
|
|
||||||
|
@ -240,7 +252,7 @@ SUPPORTS_STACK_TRACES = False
|
||||||
|
|
||||||
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
|
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
|
||||||
SUPPORTS_TYPED_TESTS and
|
SUPPORTS_TYPED_TESTS and
|
||||||
(SUPPORTS_THREADS or IS_WINDOWS))
|
SUPPORTS_THREADS)
|
||||||
|
|
||||||
|
|
||||||
class GTestOutputTest(gtest_test_utils.TestCase):
|
class GTestOutputTest(gtest_test_utils.TestCase):
|
||||||
|
@ -284,9 +296,10 @@ class GTestOutputTest(gtest_test_utils.TestCase):
|
||||||
if CAN_GENERATE_GOLDEN_FILE:
|
if CAN_GENERATE_GOLDEN_FILE:
|
||||||
self.assertEqual(normalized_golden, normalized_actual)
|
self.assertEqual(normalized_golden, normalized_actual)
|
||||||
else:
|
else:
|
||||||
normalized_actual = RemoveTestCounts(normalized_actual)
|
normalized_actual = NormalizeToCurrentPlatform(
|
||||||
normalized_golden = RemoveTestCounts(self.RemoveUnsupportedTests(
|
RemoveTestCounts(normalized_actual))
|
||||||
normalized_golden))
|
normalized_golden = NormalizeToCurrentPlatform(
|
||||||
|
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
|
||||||
|
|
||||||
# This code is very handy when debugging golden file differences:
|
# This code is very handy when debugging golden file differences:
|
||||||
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
|
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
|
||||||
|
@ -312,14 +325,9 @@ if __name__ == '__main__':
|
||||||
else:
|
else:
|
||||||
message = (
|
message = (
|
||||||
"""Unable to write a golden file when compiled in an environment
|
"""Unable to write a golden file when compiled in an environment
|
||||||
that does not support all the required features (death tests""")
|
that does not support all the required features (death tests, typed tests,
|
||||||
if IS_WINDOWS:
|
and multiple threads). Please generate the golden file using a binary built
|
||||||
message += (
|
with those features enabled.""")
|
||||||
"""\nand typed tests). Please check that you are using VC++ 8.0 SP1
|
|
||||||
or higher as your compiler.""")
|
|
||||||
else:
|
|
||||||
message += """\ntyped tests, and threads). Please generate the
|
|
||||||
golden file using a binary built with those features enabled."""
|
|
||||||
|
|
||||||
sys.stderr.write(message)
|
sys.stderr.write(message)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -1,577 +0,0 @@
|
||||||
The non-test part of the code is expected to have 2 failures.
|
|
||||||
|
|
||||||
gtest_output_test_.cc:#: error: Value of: false
|
|
||||||
Actual: false
|
|
||||||
Expected: true
|
|
||||||
gtest_output_test_.cc:#: error: Value of: 3
|
|
||||||
Expected: 2
|
|
||||||
[==========] Running 58 tests from 25 test cases.
|
|
||||||
[----------] Global test environment set-up.
|
|
||||||
FooEnvironment::SetUp() called.
|
|
||||||
BarEnvironment::SetUp() called.
|
|
||||||
[----------] 1 test from ADeathTest
|
|
||||||
[ RUN ] ADeathTest.ShouldRunFirst
|
|
||||||
[ OK ] ADeathTest.ShouldRunFirst
|
|
||||||
[----------] 1 test from ATypedDeathTest/0, where TypeParam = int
|
|
||||||
[ RUN ] ATypedDeathTest/0.ShouldRunFirst
|
|
||||||
[ OK ] ATypedDeathTest/0.ShouldRunFirst
|
|
||||||
[----------] 1 test from ATypedDeathTest/1, where TypeParam = double
|
|
||||||
[ RUN ] ATypedDeathTest/1.ShouldRunFirst
|
|
||||||
[ OK ] ATypedDeathTest/1.ShouldRunFirst
|
|
||||||
[----------] 1 test from My/ATypeParamDeathTest/0, where TypeParam = int
|
|
||||||
[ RUN ] My/ATypeParamDeathTest/0.ShouldRunFirst
|
|
||||||
[ OK ] My/ATypeParamDeathTest/0.ShouldRunFirst
|
|
||||||
[----------] 1 test from My/ATypeParamDeathTest/1, where TypeParam = double
|
|
||||||
[ RUN ] My/ATypeParamDeathTest/1.ShouldRunFirst
|
|
||||||
[ OK ] My/ATypeParamDeathTest/1.ShouldRunFirst
|
|
||||||
[----------] 2 tests from PassingTest
|
|
||||||
[ RUN ] PassingTest.PassingTest1
|
|
||||||
[ OK ] PassingTest.PassingTest1
|
|
||||||
[ RUN ] PassingTest.PassingTest2
|
|
||||||
[ OK ] PassingTest.PassingTest2
|
|
||||||
[----------] 3 tests from FatalFailureTest
|
|
||||||
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
|
|
||||||
(expecting a failure that x should be 1)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: x
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
|
|
||||||
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
|
|
||||||
(expecting a failure that x should be 1)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: x
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
|
|
||||||
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
|
|
||||||
(expecting a failure on false)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: false
|
|
||||||
Actual: false
|
|
||||||
Expected: true
|
|
||||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
|
|
||||||
[----------] 1 test from LoggingTest
|
|
||||||
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
|
|
||||||
(expecting 2 failures on (3) >= (a[i]))
|
|
||||||
i == 0
|
|
||||||
i == 1
|
|
||||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
|
|
||||||
i == 2
|
|
||||||
i == 3
|
|
||||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
|
|
||||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
|
|
||||||
[----------] 5 tests from SCOPED_TRACETest
|
|
||||||
[ RUN ] SCOPED_TRACETest.ObeysScopes
|
|
||||||
(expected to fail)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and shouldn't have a trace.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and should have a trace.
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: Expected trace
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and shouldn't have a trace.
|
|
||||||
[ FAILED ] SCOPED_TRACETest.ObeysScopes
|
|
||||||
[ RUN ] SCOPED_TRACETest.WorksInLoop
|
|
||||||
(expected to fail)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: n
|
|
||||||
Actual: 1
|
|
||||||
Expected: 2
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: i = 1
|
|
||||||
gtest_output_test_.cc:#: error: Value of: n
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: i = 2
|
|
||||||
[ FAILED ] SCOPED_TRACETest.WorksInLoop
|
|
||||||
[ RUN ] SCOPED_TRACETest.WorksInSubroutine
|
|
||||||
(expected to fail)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: n
|
|
||||||
Actual: 1
|
|
||||||
Expected: 2
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: n = 1
|
|
||||||
gtest_output_test_.cc:#: error: Value of: n
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: n = 2
|
|
||||||
[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
|
|
||||||
[ RUN ] SCOPED_TRACETest.CanBeNested
|
|
||||||
(expected to fail)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: n
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: n = 2
|
|
||||||
gtest_output_test_.cc:#:
|
|
||||||
[ FAILED ] SCOPED_TRACETest.CanBeNested
|
|
||||||
[ RUN ] SCOPED_TRACETest.CanBeRepeated
|
|
||||||
(expected to fail)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and should contain trace point A.
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: A
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and should contain trace point A and B.
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: B
|
|
||||||
gtest_output_test_.cc:#: A
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and should contain trace point A, B, and C.
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: C
|
|
||||||
gtest_output_test_.cc:#: B
|
|
||||||
gtest_output_test_.cc:#: A
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
This failure is expected, and should contain trace point A, B, and D.
|
|
||||||
Google Test trace:
|
|
||||||
gtest_output_test_.cc:#: D
|
|
||||||
gtest_output_test_.cc:#: B
|
|
||||||
gtest_output_test_.cc:#: A
|
|
||||||
[ FAILED ] SCOPED_TRACETest.CanBeRepeated
|
|
||||||
[----------] 1 test from NonFatalFailureInFixtureConstructorTest
|
|
||||||
[ RUN ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
(expecting 5 failures)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #1, in the test fixture c'tor.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #2, in SetUp().
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #3, in the test body.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #4, in TearDown.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #5, in the test fixture d'tor.
|
|
||||||
[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
[----------] 1 test from FatalFailureInFixtureConstructorTest
|
|
||||||
[ RUN ] FatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
(expecting 2 failures)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #1, in the test fixture c'tor.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #2, in the test fixture d'tor.
|
|
||||||
[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
[----------] 1 test from NonFatalFailureInSetUpTest
|
|
||||||
[ RUN ] NonFatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
(expecting 4 failures)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #1, in SetUp().
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #2, in the test function.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #3, in TearDown().
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #4, in the test fixture d'tor.
|
|
||||||
[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
[----------] 1 test from FatalFailureInSetUpTest
|
|
||||||
[ RUN ] FatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
(expecting 3 failures)
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #1, in SetUp().
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #2, in TearDown().
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected failure #3, in the test fixture d'tor.
|
|
||||||
[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
[----------] 1 test from AddFailureAtTest
|
|
||||||
[ RUN ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
|
|
||||||
foo.cc(42): error: Failed
|
|
||||||
Expected failure in foo.cc
|
|
||||||
[ FAILED ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
|
|
||||||
[----------] 4 tests from MixedUpTestCaseTest
|
|
||||||
[ RUN ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
|
|
||||||
[ OK ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
|
|
||||||
[ RUN ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
|
|
||||||
[ OK ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
|
|
||||||
[ RUN ] MixedUpTestCaseTest.ThisShouldFail
|
|
||||||
gtest.cc:#: error: Failed
|
|
||||||
All tests in the same test case must use the same test fixture
|
|
||||||
class. However, in test case MixedUpTestCaseTest,
|
|
||||||
you defined test FirstTestFromNamespaceFoo and test ThisShouldFail
|
|
||||||
using two different test fixture classes. This can happen if
|
|
||||||
the two classes are from different namespaces or translation
|
|
||||||
units and have the same name. You should probably rename one
|
|
||||||
of the classes to put the tests into different test cases.
|
|
||||||
[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
|
|
||||||
[ RUN ] MixedUpTestCaseTest.ThisShouldFailToo
|
|
||||||
gtest.cc:#: error: Failed
|
|
||||||
All tests in the same test case must use the same test fixture
|
|
||||||
class. However, in test case MixedUpTestCaseTest,
|
|
||||||
you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo
|
|
||||||
using two different test fixture classes. This can happen if
|
|
||||||
the two classes are from different namespaces or translation
|
|
||||||
units and have the same name. You should probably rename one
|
|
||||||
of the classes to put the tests into different test cases.
|
|
||||||
[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
|
|
||||||
[----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
|
|
||||||
[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
|
|
||||||
[ OK ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
|
|
||||||
[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
|
|
||||||
gtest.cc:#: error: Failed
|
|
||||||
All tests in the same test case must use the same test fixture
|
|
||||||
class. However, in test case MixedUpTestCaseWithSameTestNameTest,
|
|
||||||
you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail
|
|
||||||
using two different test fixture classes. This can happen if
|
|
||||||
the two classes are from different namespaces or translation
|
|
||||||
units and have the same name. You should probably rename one
|
|
||||||
of the classes to put the tests into different test cases.
|
|
||||||
[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
|
|
||||||
[----------] 2 tests from TEST_F_before_TEST_in_same_test_case
|
|
||||||
[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
|
|
||||||
[ OK ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
|
|
||||||
[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
|
|
||||||
gtest.cc:#: error: Failed
|
|
||||||
All tests in the same test case must use the same test fixture
|
|
||||||
class, so mixing TEST_F and TEST in the same test case is
|
|
||||||
illegal. In test case TEST_F_before_TEST_in_same_test_case,
|
|
||||||
test DefinedUsingTEST_F is defined using TEST_F but
|
|
||||||
test DefinedUsingTESTAndShouldFail is defined using TEST. You probably
|
|
||||||
want to change the TEST to TEST_F or move it to another test
|
|
||||||
case.
|
|
||||||
[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
|
|
||||||
[----------] 2 tests from TEST_before_TEST_F_in_same_test_case
|
|
||||||
[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
|
|
||||||
[ OK ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
|
|
||||||
[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
|
|
||||||
gtest.cc:#: error: Failed
|
|
||||||
All tests in the same test case must use the same test fixture
|
|
||||||
class, so mixing TEST_F and TEST in the same test case is
|
|
||||||
illegal. In test case TEST_before_TEST_F_in_same_test_case,
|
|
||||||
test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but
|
|
||||||
test DefinedUsingTEST is defined using TEST. You probably
|
|
||||||
want to change the TEST to TEST_F or move it to another test
|
|
||||||
case.
|
|
||||||
[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
|
|
||||||
[----------] 8 tests from ExpectNonfatalFailureTest
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
|
|
||||||
[ OK ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
|
|
||||||
[ OK ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
|
|
||||||
[ OK ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual: 2 failures
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure 1.
|
|
||||||
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure 2.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
|
|
||||||
[ RUN ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
|
|
||||||
[----------] 8 tests from ExpectFatalFailureTest
|
|
||||||
[ RUN ] ExpectFatalFailureTest.CanReferenceGlobalVariables
|
|
||||||
[ OK ] ExpectFatalFailureTest.CanReferenceGlobalVariables
|
|
||||||
[ RUN ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
|
|
||||||
[ OK ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
|
|
||||||
[ RUN ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
|
|
||||||
[ OK ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
|
|
||||||
[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
|
|
||||||
[ RUN ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual: 2 failures
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
|
|
||||||
[ RUN ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
|
|
||||||
[ RUN ] ExpectFatalFailureTest.FailsWhenStatementReturns
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
|
|
||||||
[ RUN ] ExpectFatalFailureTest.FailsWhenStatementThrows
|
|
||||||
(expecting a failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual: 0 failures
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
|
|
||||||
[----------] 2 tests from TypedTest/0, where TypeParam = int
|
|
||||||
[ RUN ] TypedTest/0.Success
|
|
||||||
[ OK ] TypedTest/0.Success
|
|
||||||
[ RUN ] TypedTest/0.Failure
|
|
||||||
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
|
||||||
Actual: 0
|
|
||||||
Expected: 1
|
|
||||||
Expected failure
|
|
||||||
[ FAILED ] TypedTest/0.Failure, where TypeParam = int
|
|
||||||
[----------] 2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char
|
|
||||||
[ RUN ] Unsigned/TypedTestP/0.Success
|
|
||||||
[ OK ] Unsigned/TypedTestP/0.Success
|
|
||||||
[ RUN ] Unsigned/TypedTestP/0.Failure
|
|
||||||
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
|
||||||
Actual: '\0'
|
|
||||||
Expected: 1U
|
|
||||||
Which is: 1
|
|
||||||
Expected failure
|
|
||||||
[ FAILED ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
|
|
||||||
[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
|
|
||||||
[ RUN ] Unsigned/TypedTestP/1.Success
|
|
||||||
[ OK ] Unsigned/TypedTestP/1.Success
|
|
||||||
[ RUN ] Unsigned/TypedTestP/1.Failure
|
|
||||||
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
|
||||||
Actual: 0
|
|
||||||
Expected: 1U
|
|
||||||
Which is: 1
|
|
||||||
Expected failure
|
|
||||||
[ FAILED ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
|
|
||||||
[----------] 4 tests from ExpectFailureTest
|
|
||||||
[ RUN ] ExpectFailureTest.ExpectFatalFailure
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Success:
|
|
||||||
Succeeded
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure containing "Some other fatal failure expected."
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectFatalFailure
|
|
||||||
[ RUN ] ExpectFailureTest.ExpectNonFatalFailure
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Success:
|
|
||||||
Succeeded
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure containing "Some other non-fatal failure."
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
|
|
||||||
[ RUN ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Success:
|
|
||||||
Succeeded
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 fatal failure containing "Some other fatal failure expected."
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
|
|
||||||
[ RUN ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Success:
|
|
||||||
Succeeded
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
|
|
||||||
(expecting 1 failure)
|
|
||||||
gtest.cc:#: error: Expected: 1 non-fatal failure containing "Some other non-fatal failure."
|
|
||||||
Actual:
|
|
||||||
gtest_output_test_.cc:#: Non-fatal failure:
|
|
||||||
Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
|
|
||||||
[----------] 1 test from PrintingFailingParams/FailingParamTest
|
|
||||||
[ RUN ] PrintingFailingParams/FailingParamTest.Fails/0
|
|
||||||
gtest_output_test_.cc:#: error: Value of: GetParam()
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
[ FAILED ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
|
|
||||||
[----------] Global test environment tear-down
|
|
||||||
BarEnvironment::TearDown() called.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected non-fatal failure.
|
|
||||||
FooEnvironment::TearDown() called.
|
|
||||||
gtest_output_test_.cc:#: error: Failed
|
|
||||||
Expected fatal failure.
|
|
||||||
[==========] 58 tests from 25 test cases ran.
|
|
||||||
[ PASSED ] 21 tests.
|
|
||||||
[ FAILED ] 37 tests, listed below:
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
|
|
||||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
|
|
||||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
|
|
||||||
[ FAILED ] SCOPED_TRACETest.ObeysScopes
|
|
||||||
[ FAILED ] SCOPED_TRACETest.WorksInLoop
|
|
||||||
[ FAILED ] SCOPED_TRACETest.WorksInSubroutine
|
|
||||||
[ FAILED ] SCOPED_TRACETest.CanBeNested
|
|
||||||
[ FAILED ] SCOPED_TRACETest.CanBeRepeated
|
|
||||||
[ FAILED ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
[ FAILED ] FatalFailureInFixtureConstructorTest.FailureInConstructor
|
|
||||||
[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
|
|
||||||
[ FAILED ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
|
|
||||||
[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
|
|
||||||
[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
|
|
||||||
[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
|
|
||||||
[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
|
|
||||||
[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
|
|
||||||
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
|
|
||||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementThrows
|
|
||||||
[ FAILED ] TypedTest/0.Failure, where TypeParam = int
|
|
||||||
[ FAILED ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
|
|
||||||
[ FAILED ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectFatalFailure
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectNonFatalFailure
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
|
|
||||||
[ FAILED ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
|
|
||||||
[ FAILED ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
|
|
||||||
|
|
||||||
37 FAILED TESTS
|
|
||||||
YOU HAVE 1 DISABLED TEST
|
|
||||||
|
|
||||||
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
|
|
||||||
[==========] Running 4 tests from 2 test cases.
|
|
||||||
[----------] Global test environment set-up.
|
|
||||||
[----------] 3 tests from FatalFailureTest
|
|
||||||
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
|
|
||||||
(expecting a failure that x should be 1)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: x
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
|
|
||||||
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
|
|
||||||
(expecting a failure that x should be 1)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: x
|
|
||||||
Actual: 2
|
|
||||||
Expected: 1
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
|
|
||||||
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
|
|
||||||
(expecting a failure on false)
|
|
||||||
gtest_output_test_.cc:#: error: Value of: false
|
|
||||||
Actual: false
|
|
||||||
Expected: true
|
|
||||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
|
|
||||||
[----------] 3 tests from FatalFailureTest (? ms total)
|
|
||||||
|
|
||||||
[----------] 1 test from LoggingTest
|
|
||||||
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
|
|
||||||
(expecting 2 failures on (3) >= (a[i]))
|
|
||||||
i == 0
|
|
||||||
i == 1
|
|
||||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
|
|
||||||
i == 2
|
|
||||||
i == 3
|
|
||||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
|
|
||||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
|
|
||||||
[----------] 1 test from LoggingTest (? ms total)
|
|
||||||
|
|
||||||
[----------] Global test environment tear-down
|
|
||||||
[==========] 4 tests from 2 test cases ran. (? ms total)
|
|
||||||
[ PASSED ] 0 tests.
|
|
||||||
[ FAILED ] 4 tests, listed below:
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
|
|
||||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
|
|
||||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
|
|
||||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
|
|
||||||
|
|
||||||
4 FAILED TESTS
|
|
||||||
YOU HAVE 1 DISABLED TEST
|
|
||||||
|
|
||||||
Note: Google Test filter = *DISABLED_*
|
|
||||||
[==========] Running 1 test from 1 test case.
|
|
||||||
[----------] Global test environment set-up.
|
|
||||||
[----------] 1 test from DisabledTestsWarningTest
|
|
||||||
[ RUN ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
|
|
||||||
[ OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
|
|
||||||
[----------] Global test environment tear-down
|
|
||||||
[==========] 1 test from 1 test case ran.
|
|
||||||
[ PASSED ] 1 test.
|
|
||||||
Note: Google Test filter = PassingTest.*
|
|
||||||
Note: This is test shard 1 of 2.
|
|
||||||
[==========] Running 1 test from 1 test case.
|
|
||||||
[----------] Global test environment set-up.
|
|
||||||
[----------] 1 test from PassingTest
|
|
||||||
[ RUN ] PassingTest.PassingTest2
|
|
||||||
[ OK ] PassingTest.PassingTest2
|
|
||||||
[----------] Global test environment tear-down
|
|
||||||
[==========] 1 test from 1 test case ran.
|
|
||||||
[ PASSED ] 1 test.
|
|
||||||
|
|
||||||
YOU HAVE 1 DISABLED TEST
|
|
||||||
|
|
|
@ -52,6 +52,7 @@ TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
|
||||||
|| testing::GTEST_FLAG(show_internal_stack_frames)
|
|| testing::GTEST_FLAG(show_internal_stack_frames)
|
||||||
|| testing::GTEST_FLAG(shuffle)
|
|| testing::GTEST_FLAG(shuffle)
|
||||||
|| testing::GTEST_FLAG(stack_trace_depth) > 0
|
|| testing::GTEST_FLAG(stack_trace_depth) > 0
|
||||||
|
|| testing::GTEST_FLAG(stream_result_to) != "unknown"
|
||||||
|| testing::GTEST_FLAG(throw_on_failure);
|
|| testing::GTEST_FLAG(throw_on_failure);
|
||||||
EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused.
|
EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused.
|
||||||
}
|
}
|
||||||
|
@ -125,6 +126,7 @@ using testing::GTEST_FLAG(repeat);
|
||||||
using testing::GTEST_FLAG(show_internal_stack_frames);
|
using testing::GTEST_FLAG(show_internal_stack_frames);
|
||||||
using testing::GTEST_FLAG(shuffle);
|
using testing::GTEST_FLAG(shuffle);
|
||||||
using testing::GTEST_FLAG(stack_trace_depth);
|
using testing::GTEST_FLAG(stack_trace_depth);
|
||||||
|
using testing::GTEST_FLAG(stream_result_to);
|
||||||
using testing::GTEST_FLAG(throw_on_failure);
|
using testing::GTEST_FLAG(throw_on_failure);
|
||||||
using testing::IsNotSubstring;
|
using testing::IsNotSubstring;
|
||||||
using testing::IsSubstring;
|
using testing::IsSubstring;
|
||||||
|
@ -1718,6 +1720,7 @@ class GTestFlagSaverTest : public Test {
|
||||||
GTEST_FLAG(repeat) = 1;
|
GTEST_FLAG(repeat) = 1;
|
||||||
GTEST_FLAG(shuffle) = false;
|
GTEST_FLAG(shuffle) = false;
|
||||||
GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
|
GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
|
||||||
|
GTEST_FLAG(stream_result_to) = "";
|
||||||
GTEST_FLAG(throw_on_failure) = false;
|
GTEST_FLAG(throw_on_failure) = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1744,6 +1747,7 @@ class GTestFlagSaverTest : public Test {
|
||||||
EXPECT_EQ(1, GTEST_FLAG(repeat));
|
EXPECT_EQ(1, GTEST_FLAG(repeat));
|
||||||
EXPECT_FALSE(GTEST_FLAG(shuffle));
|
EXPECT_FALSE(GTEST_FLAG(shuffle));
|
||||||
EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));
|
EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));
|
||||||
|
EXPECT_STREQ("", GTEST_FLAG(stream_result_to).c_str());
|
||||||
EXPECT_FALSE(GTEST_FLAG(throw_on_failure));
|
EXPECT_FALSE(GTEST_FLAG(throw_on_failure));
|
||||||
|
|
||||||
GTEST_FLAG(also_run_disabled_tests) = true;
|
GTEST_FLAG(also_run_disabled_tests) = true;
|
||||||
|
@ -1759,6 +1763,7 @@ class GTestFlagSaverTest : public Test {
|
||||||
GTEST_FLAG(repeat) = 100;
|
GTEST_FLAG(repeat) = 100;
|
||||||
GTEST_FLAG(shuffle) = true;
|
GTEST_FLAG(shuffle) = true;
|
||||||
GTEST_FLAG(stack_trace_depth) = 1;
|
GTEST_FLAG(stack_trace_depth) = 1;
|
||||||
|
GTEST_FLAG(stream_result_to) = "localhost:1234";
|
||||||
GTEST_FLAG(throw_on_failure) = true;
|
GTEST_FLAG(throw_on_failure) = true;
|
||||||
}
|
}
|
||||||
private:
|
private:
|
||||||
|
@ -5142,6 +5147,7 @@ struct Flags {
|
||||||
repeat(1),
|
repeat(1),
|
||||||
shuffle(false),
|
shuffle(false),
|
||||||
stack_trace_depth(kMaxStackTraceDepth),
|
stack_trace_depth(kMaxStackTraceDepth),
|
||||||
|
stream_result_to(""),
|
||||||
throw_on_failure(false) {}
|
throw_on_failure(false) {}
|
||||||
|
|
||||||
// Factory methods.
|
// Factory methods.
|
||||||
|
@ -5242,6 +5248,14 @@ struct Flags {
|
||||||
return flags;
|
return flags;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Creates a Flags struct where the GTEST_FLAG(stream_result_to) flag has
|
||||||
|
// the given value.
|
||||||
|
static Flags StreamResultTo(const char* stream_result_to) {
|
||||||
|
Flags flags;
|
||||||
|
flags.stream_result_to = stream_result_to;
|
||||||
|
return flags;
|
||||||
|
}
|
||||||
|
|
||||||
// Creates a Flags struct where the gtest_throw_on_failure flag has
|
// Creates a Flags struct where the gtest_throw_on_failure flag has
|
||||||
// the given value.
|
// the given value.
|
||||||
static Flags ThrowOnFailure(bool throw_on_failure) {
|
static Flags ThrowOnFailure(bool throw_on_failure) {
|
||||||
|
@ -5263,6 +5277,7 @@ struct Flags {
|
||||||
Int32 repeat;
|
Int32 repeat;
|
||||||
bool shuffle;
|
bool shuffle;
|
||||||
Int32 stack_trace_depth;
|
Int32 stack_trace_depth;
|
||||||
|
const char* stream_result_to;
|
||||||
bool throw_on_failure;
|
bool throw_on_failure;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -5283,6 +5298,7 @@ class InitGoogleTestTest : public Test {
|
||||||
GTEST_FLAG(repeat) = 1;
|
GTEST_FLAG(repeat) = 1;
|
||||||
GTEST_FLAG(shuffle) = false;
|
GTEST_FLAG(shuffle) = false;
|
||||||
GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
|
GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
|
||||||
|
GTEST_FLAG(stream_result_to) = "";
|
||||||
GTEST_FLAG(throw_on_failure) = false;
|
GTEST_FLAG(throw_on_failure) = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5311,8 +5327,10 @@ class InitGoogleTestTest : public Test {
|
||||||
EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));
|
EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));
|
||||||
EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
|
EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
|
||||||
EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));
|
EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));
|
||||||
EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));
|
|
||||||
EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));
|
EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));
|
||||||
|
EXPECT_STREQ(expected.stream_result_to,
|
||||||
|
GTEST_FLAG(stream_result_to).c_str());
|
||||||
|
EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parses a command line (specified by argc1 and argv1), then
|
// Parses a command line (specified by argc1 and argv1), then
|
||||||
|
@ -5973,6 +5991,22 @@ TEST_F(InitGoogleTestTest, StackTraceDepth) {
|
||||||
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false);
|
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(InitGoogleTestTest, StreamResultTo) {
|
||||||
|
const char* argv[] = {
|
||||||
|
"foo.exe",
|
||||||
|
"--gtest_stream_result_to=localhost:1234",
|
||||||
|
NULL
|
||||||
|
};
|
||||||
|
|
||||||
|
const char* argv2[] = {
|
||||||
|
"foo.exe",
|
||||||
|
NULL
|
||||||
|
};
|
||||||
|
|
||||||
|
GTEST_TEST_PARSING_FLAGS_(
|
||||||
|
argv, argv2, Flags::StreamResultTo("localhost:1234"), false);
|
||||||
|
}
|
||||||
|
|
||||||
// Tests parsing --gtest_throw_on_failure.
|
// Tests parsing --gtest_throw_on_failure.
|
||||||
TEST_F(InitGoogleTestTest, ThrowOnFailureWithoutValue) {
|
TEST_F(InitGoogleTestTest, ThrowOnFailureWithoutValue) {
|
||||||
const char* argv[] = {
|
const char* argv[] = {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user