Makes gtest compile clean with gcc -Wall -Werror (by Zhanyong Wan); refactors scons script (by Vlad Losev).
This commit is contained in:
parent
c214ebc830
commit
16b9431ae0
19
run_tests.py
19
run_tests.py
|
@ -98,16 +98,16 @@ KNOWN BUILD DIRECTORIES
|
||||||
defines them as follows (the default build directory is the first one
|
defines them as follows (the default build directory is the first one
|
||||||
listed in each group):
|
listed in each group):
|
||||||
On Windows:
|
On Windows:
|
||||||
<gtest root>/scons/build/win-dbg8/gtest/scons/
|
<gtest root>/scons/build/win-dbg8/scons/
|
||||||
<gtest root>/scons/build/win-opt8/gtest/scons/
|
<gtest root>/scons/build/win-opt8/scons/
|
||||||
<gtest root>/scons/build/win-dbg/gtest/scons/
|
<gtest root>/scons/build/win-dbg/scons/
|
||||||
<gtest root>/scons/build/win-opt/gtest/scons/
|
<gtest root>/scons/build/win-opt/scons/
|
||||||
On Mac:
|
On Mac:
|
||||||
<gtest root>/scons/build/mac-dbg/gtest/scons/
|
<gtest root>/scons/build/mac-dbg/scons/
|
||||||
<gtest root>/scons/build/mac-opt/gtest/scons/
|
<gtest root>/scons/build/mac-opt/scons/
|
||||||
On other platforms:
|
On other platforms:
|
||||||
<gtest root>/scons/build/dbg/gtest/scons/
|
<gtest root>/scons/build/dbg/scons/
|
||||||
<gtest root>/scons/build/opt/gtest/scons/
|
<gtest root>/scons/build/opt/scons/
|
||||||
|
|
||||||
AUTHOR
|
AUTHOR
|
||||||
Written by Zhanyong Wan (wan@google.com)
|
Written by Zhanyong Wan (wan@google.com)
|
||||||
|
@ -177,8 +177,7 @@ class TestRunner(object):
|
||||||
"""Returns the build directory for a given configuration."""
|
"""Returns the build directory for a given configuration."""
|
||||||
|
|
||||||
return self.os.path.normpath(
|
return self.os.path.normpath(
|
||||||
self.os.path.join(self.script_dir,
|
self.os.path.join(self.script_dir, 'scons/build', config, 'scons'))
|
||||||
'scons/build/%s/gtest/scons' % config))
|
|
||||||
|
|
||||||
def Run(self, args):
|
def Run(self, args):
|
||||||
"""Runs the executable with given args (args[0] is the executable name).
|
"""Runs the executable with given args (args[0] is the executable name).
|
||||||
|
|
|
@ -109,13 +109,25 @@ def NewEnvironment(env, type):
|
||||||
return new_env;
|
return new_env;
|
||||||
|
|
||||||
|
|
||||||
|
def Remove(env, attribute, value):
|
||||||
|
"""Removes the given attribute value from the environment."""
|
||||||
|
|
||||||
|
attribute_values = env[attribute]
|
||||||
|
if value in attribute_values:
|
||||||
|
attribute_values.remove(value)
|
||||||
|
|
||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
env = NewEnvironment(env, '')
|
env = NewEnvironment(env, '')
|
||||||
|
|
||||||
# Note: The relative paths in SConscript files are relative to the location of
|
# Note: The relative paths in SConscript files are relative to the location
|
||||||
# the SConscript file itself. To make a path relative to the location of the
|
# of the SConscript file itself. To make a path relative to the location of
|
||||||
# main SConstruct file, prepend the path with the # sign.
|
# the main SConstruct file, prepend the path with the # sign.
|
||||||
|
#
|
||||||
|
# But if a project uses variant builds without source duplication, the above
|
||||||
|
# rule gets muddied a bit. In that case the paths must be counted from the
|
||||||
|
# location of the copy of the SConscript file in scons/build/<config>/scons.
|
||||||
|
#
|
||||||
# Include paths to gtest headers are relative to either the gtest
|
# Include paths to gtest headers are relative to either the gtest
|
||||||
# directory or the 'include' subdirectory of it, and this SConscript
|
# directory or the 'include' subdirectory of it, and this SConscript
|
||||||
# file is one directory deeper than the gtest directory.
|
# file is one directory deeper than the gtest directory.
|
||||||
|
@ -124,32 +136,33 @@ env.Prepend(CPPPATH = ['..', '../include'])
|
||||||
env_use_own_tuple = NewEnvironment(env, 'use_own_tuple')
|
env_use_own_tuple = NewEnvironment(env, 'use_own_tuple')
|
||||||
env_use_own_tuple.Append(CPPDEFINES = 'GTEST_USE_OWN_TR1_TUPLE=1')
|
env_use_own_tuple.Append(CPPDEFINES = 'GTEST_USE_OWN_TR1_TUPLE=1')
|
||||||
|
|
||||||
env_with_exceptions = NewEnvironment(env, 'ex')
|
# Needed to allow gtest_unittest.cc, which triggers a gcc warning when
|
||||||
|
# testing EXPECT_EQ(NULL, ptr), to compile.
|
||||||
|
env_warning_ok = NewEnvironment(env, 'warning_ok')
|
||||||
|
if env_warning_ok['PLATFORM'] == 'win32':
|
||||||
|
Remove(env_warning_ok, 'CCFLAGS', '-WX')
|
||||||
|
else:
|
||||||
|
Remove(env_warning_ok, 'CCFLAGS', '-Werror')
|
||||||
|
|
||||||
|
env_with_exceptions = NewEnvironment(env_warning_ok, 'ex')
|
||||||
if env_with_exceptions['PLATFORM'] == 'win32':
|
if env_with_exceptions['PLATFORM'] == 'win32':
|
||||||
env_with_exceptions.Append(CCFLAGS=['/EHsc'])
|
env_with_exceptions.Append(CCFLAGS=['/EHsc'])
|
||||||
env_with_exceptions.Append(CPPDEFINES='_HAS_EXCEPTIONS=1')
|
env_with_exceptions.Append(CPPDEFINES='_HAS_EXCEPTIONS=1')
|
||||||
cppdefines = env_with_exceptions['CPPDEFINES']
|
|
||||||
# Undoes the _TYPEINFO_ hack, which is unnecessary and only creates
|
# Undoes the _TYPEINFO_ hack, which is unnecessary and only creates
|
||||||
# trouble when exceptions are enabled.
|
# trouble when exceptions are enabled.
|
||||||
if '_TYPEINFO_' in cppdefines:
|
Remove(env_with_exceptions, 'CPPDEFINES', '_TYPEINFO_')
|
||||||
cppdefines.remove('_TYPEINFO_')
|
Remove(env_with_exceptions, 'CPPDEFINES', '_HAS_EXCEPTIONS=0')
|
||||||
if '_HAS_EXCEPTIONS=0' in cppdefines:
|
|
||||||
cppdefines.remove('_HAS_EXCEPTIONS=0')
|
|
||||||
else:
|
else:
|
||||||
env_with_exceptions.Append(CCFLAGS='-fexceptions')
|
env_with_exceptions.Append(CCFLAGS='-fexceptions')
|
||||||
ccflags = env_with_exceptions['CCFLAGS']
|
Remove(env_with_exceptions, 'CCFLAGS', '-fno-exceptions')
|
||||||
if '-fno-exceptions' in ccflags:
|
|
||||||
ccflags.remove('-fno-exceptions')
|
|
||||||
|
|
||||||
# We need to disable some optimization flags for some tests on
|
# We need to disable some optimization flags for some tests on
|
||||||
# Windows; otherwise the redirection of stdout does not work
|
# Windows; otherwise the redirection of stdout does not work
|
||||||
# (apparently because of a compiler bug).
|
# (apparently because of a compiler bug).
|
||||||
env_less_optimized = NewEnvironment(env, 'less_optimized')
|
env_less_optimized = NewEnvironment(env, 'less_optimized')
|
||||||
if env_less_optimized['PLATFORM'] == 'win32':
|
if env_less_optimized['PLATFORM'] == 'win32':
|
||||||
linker_flags = env_less_optimized['LINKFLAGS']
|
|
||||||
for flag in ['/O1', '/Os', '/Og', '/Oy']:
|
for flag in ['/O1', '/Os', '/Og', '/Oy']:
|
||||||
if flag in linker_flags:
|
Remove(env_less_optimized, 'LINKFLAGS', flag)
|
||||||
linker_flags.remove(flag)
|
|
||||||
|
|
||||||
# Assuming POSIX-like environment with GCC.
|
# Assuming POSIX-like environment with GCC.
|
||||||
# TODO(vladl@google.com): sniff presence of pthread_atfork instead of
|
# TODO(vladl@google.com): sniff presence of pthread_atfork instead of
|
||||||
|
@ -159,7 +172,7 @@ if env_with_threads['PLATFORM'] != 'win32':
|
||||||
env_with_threads.Append(CCFLAGS=['-pthread'])
|
env_with_threads.Append(CCFLAGS=['-pthread'])
|
||||||
env_with_threads.Append(LINKFLAGS=['-pthread'])
|
env_with_threads.Append(LINKFLAGS=['-pthread'])
|
||||||
|
|
||||||
env_without_rtti = NewEnvironment(env, 'no_rtti')
|
env_without_rtti = NewEnvironment(env_warning_ok, 'no_rtti')
|
||||||
if env_without_rtti['PLATFORM'] == 'win32':
|
if env_without_rtti['PLATFORM'] == 'win32':
|
||||||
env_without_rtti.Append(CCFLAGS=['/GR-'])
|
env_without_rtti.Append(CCFLAGS=['/GR-'])
|
||||||
else:
|
else:
|
||||||
|
@ -169,12 +182,19 @@ else:
|
||||||
############################################################
|
############################################################
|
||||||
# Helpers for creating build targets.
|
# Helpers for creating build targets.
|
||||||
|
|
||||||
|
# Caches object file targets built by GtestObject to allow passing the
|
||||||
|
# same source file with the same environment twice into the function as a
|
||||||
|
# convenience.
|
||||||
|
_all_objects = {}
|
||||||
|
|
||||||
def GtestObject(build_env, source):
|
def GtestObject(build_env, source):
|
||||||
"""Returns a target to build an object file from the given .cc source file."""
|
"""Returns a target to build an object file from the given .cc source file."""
|
||||||
|
|
||||||
return build_env.Object(
|
object_name = os.path.basename(source).rstrip('.cc') + build_env['OBJ_SUFFIX']
|
||||||
target=os.path.basename(source).rstrip('.cc') + build_env['OBJ_SUFFIX'],
|
if object_name not in _all_objects:
|
||||||
|
_all_objects[object_name] = build_env.Object(target=object_name,
|
||||||
source=source)
|
source=source)
|
||||||
|
return _all_objects[object_name]
|
||||||
|
|
||||||
|
|
||||||
def GtestStaticLibraries(build_env):
|
def GtestStaticLibraries(build_env):
|
||||||
|
@ -206,17 +226,16 @@ def GtestBinary(build_env, target, gtest_libs, sources):
|
||||||
gtest_libs: The gtest library or the list of libraries to link.
|
gtest_libs: The gtest library or the list of libraries to link.
|
||||||
sources: A list of source files in the target.
|
sources: A list of source files in the target.
|
||||||
"""
|
"""
|
||||||
if build_env['OBJ_SUFFIX']:
|
|
||||||
srcs = [] # The object targets corresponding to sources.
|
srcs = [] # The object targets corresponding to sources.
|
||||||
for src in sources:
|
for src in sources:
|
||||||
if type(src) is str:
|
if type(src) is str:
|
||||||
srcs.append(GtestObject(build_env, src))
|
srcs.append(GtestObject(build_env, src))
|
||||||
else:
|
else:
|
||||||
srcs.append(src)
|
srcs.append(src)
|
||||||
else:
|
|
||||||
srcs = sources
|
|
||||||
|
|
||||||
if type(gtest_libs) != type(list()):
|
if not gtest_libs:
|
||||||
|
gtest_libs = []
|
||||||
|
elif type(gtest_libs) != type(list()):
|
||||||
gtest_libs = [gtest_libs]
|
gtest_libs = [gtest_libs]
|
||||||
binary = build_env.Program(target=target, source=srcs, LIBS=gtest_libs)
|
binary = build_env.Program(target=target, source=srcs, LIBS=gtest_libs)
|
||||||
if 'EXE_OUTPUT' in build_env.Dictionary():
|
if 'EXE_OUTPUT' in build_env.Dictionary():
|
||||||
|
@ -301,11 +320,11 @@ GtestTest(env, 'gtest_xml_outfile1_test_', gtest_main)
|
||||||
GtestTest(env, 'gtest_xml_outfile2_test_', gtest_main)
|
GtestTest(env, 'gtest_xml_outfile2_test_', gtest_main)
|
||||||
GtestTest(env, 'gtest_xml_output_unittest_', gtest_main)
|
GtestTest(env, 'gtest_xml_output_unittest_', gtest_main)
|
||||||
GtestTest(env, 'gtest-unittest-api_test', gtest)
|
GtestTest(env, 'gtest-unittest-api_test', gtest)
|
||||||
GtestTest(env, 'gtest_unittest', gtest_main)
|
|
||||||
|
|
||||||
############################################################
|
############################################################
|
||||||
# Tests targets using custom environments.
|
# Tests targets using custom environments.
|
||||||
|
|
||||||
|
GtestTest(env_warning_ok, 'gtest_unittest', gtest_main)
|
||||||
GtestTest(env_with_exceptions, 'gtest_output_test_', gtest_ex)
|
GtestTest(env_with_exceptions, 'gtest_output_test_', gtest_ex)
|
||||||
GtestTest(env_with_exceptions, 'gtest_throw_on_failure_ex_test', gtest_ex)
|
GtestTest(env_with_exceptions, 'gtest_throw_on_failure_ex_test', gtest_ex)
|
||||||
GtestTest(env_with_threads, 'gtest-death-test_test', gtest_main)
|
GtestTest(env_with_threads, 'gtest-death-test_test', gtest_main)
|
||||||
|
@ -332,14 +351,15 @@ GtestBinary(env_without_rtti, 'gtest_no_rtti_test', gtest_main_no_rtti,
|
||||||
# my_environment = Environment(variables = vars, ...)
|
# my_environment = Environment(variables = vars, ...)
|
||||||
# Then, in the command line use GTEST_BUILD_SAMPLES=true to enable them.
|
# Then, in the command line use GTEST_BUILD_SAMPLES=true to enable them.
|
||||||
if env.get('GTEST_BUILD_SAMPLES', False):
|
if env.get('GTEST_BUILD_SAMPLES', False):
|
||||||
sample1_obj = env.Object('../samples/sample1.cc')
|
GtestSample(env, 'sample1_unittest',
|
||||||
GtestSample(env, 'sample1_unittest', additional_sources=[sample1_obj])
|
additional_sources=['../samples/sample1.cc'])
|
||||||
GtestSample(env, 'sample2_unittest',
|
GtestSample(env, 'sample2_unittest',
|
||||||
additional_sources=['../samples/sample2.cc'])
|
additional_sources=['../samples/sample2.cc'])
|
||||||
GtestSample(env, 'sample3_unittest')
|
GtestSample(env, 'sample3_unittest')
|
||||||
GtestSample(env, 'sample4_unittest',
|
GtestSample(env, 'sample4_unittest',
|
||||||
additional_sources=['../samples/sample4.cc'])
|
additional_sources=['../samples/sample4.cc'])
|
||||||
GtestSample(env, 'sample5_unittest', additional_sources=[sample1_obj])
|
GtestSample(env, 'sample5_unittest',
|
||||||
|
additional_sources=['../samples/sample1.cc'])
|
||||||
GtestSample(env, 'sample6_unittest')
|
GtestSample(env, 'sample6_unittest')
|
||||||
GtestSample(env, 'sample7_unittest')
|
GtestSample(env, 'sample7_unittest')
|
||||||
GtestSample(env, 'sample8_unittest')
|
GtestSample(env, 'sample8_unittest')
|
||||||
|
|
|
@ -93,7 +93,7 @@ const char kShuffleFlag[] = "shuffle";
|
||||||
const char kThrowOnFailureFlag[] = "throw_on_failure";
|
const char kThrowOnFailureFlag[] = "throw_on_failure";
|
||||||
|
|
||||||
// A valid random seed must be in [1, kMaxRandomSeed].
|
// A valid random seed must be in [1, kMaxRandomSeed].
|
||||||
const unsigned int kMaxRandomSeed = 99999;
|
const int kMaxRandomSeed = 99999;
|
||||||
|
|
||||||
// Returns the current time in milliseconds.
|
// Returns the current time in milliseconds.
|
||||||
TimeInMillis GetTimeInMillis();
|
TimeInMillis GetTimeInMillis();
|
||||||
|
@ -108,7 +108,8 @@ inline int GetRandomSeedFromFlag(Int32 random_seed_flag) {
|
||||||
// Normalizes the actual seed to range [1, kMaxRandomSeed] such that
|
// Normalizes the actual seed to range [1, kMaxRandomSeed] such that
|
||||||
// it's easy to type.
|
// it's easy to type.
|
||||||
const int normalized_seed =
|
const int normalized_seed =
|
||||||
static_cast<int>((raw_seed - 1U) % kMaxRandomSeed) + 1;
|
static_cast<int>((raw_seed - 1U) %
|
||||||
|
static_cast<unsigned int>(kMaxRandomSeed)) + 1;
|
||||||
return normalized_seed;
|
return normalized_seed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1057,16 +1057,16 @@ TEST(ParseNaturalNumberTest, AcceptsValidNumbers) {
|
||||||
|
|
||||||
result = 0;
|
result = 0;
|
||||||
ASSERT_TRUE(ParseNaturalNumber(String("123"), &result));
|
ASSERT_TRUE(ParseNaturalNumber(String("123"), &result));
|
||||||
EXPECT_EQ(123, result);
|
EXPECT_EQ(123U, result);
|
||||||
|
|
||||||
// Check 0 as an edge case.
|
// Check 0 as an edge case.
|
||||||
result = 1;
|
result = 1;
|
||||||
ASSERT_TRUE(ParseNaturalNumber(String("0"), &result));
|
ASSERT_TRUE(ParseNaturalNumber(String("0"), &result));
|
||||||
EXPECT_EQ(0, result);
|
EXPECT_EQ(0U, result);
|
||||||
|
|
||||||
result = 1;
|
result = 1;
|
||||||
ASSERT_TRUE(ParseNaturalNumber(String("00000"), &result));
|
ASSERT_TRUE(ParseNaturalNumber(String("00000"), &result));
|
||||||
EXPECT_EQ(0, result);
|
EXPECT_EQ(0U, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ParseNaturalNumberTest, AcceptsTypeLimits) {
|
TEST(ParseNaturalNumberTest, AcceptsTypeLimits) {
|
||||||
|
|
|
@ -91,7 +91,7 @@ void* ThreadFunc(void* data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(GetThreadCountTest, ReturnsCorrectValue) {
|
TEST(GetThreadCountTest, ReturnsCorrectValue) {
|
||||||
EXPECT_EQ(1, GetThreadCount());
|
EXPECT_EQ(1U, GetThreadCount());
|
||||||
pthread_mutex_t mutex;
|
pthread_mutex_t mutex;
|
||||||
pthread_attr_t attr;
|
pthread_attr_t attr;
|
||||||
pthread_t thread_id;
|
pthread_t thread_id;
|
||||||
|
@ -106,7 +106,7 @@ TEST(GetThreadCountTest, ReturnsCorrectValue) {
|
||||||
const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
|
const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
|
||||||
ASSERT_EQ(0, pthread_attr_destroy(&attr));
|
ASSERT_EQ(0, pthread_attr_destroy(&attr));
|
||||||
ASSERT_EQ(0, status);
|
ASSERT_EQ(0, status);
|
||||||
EXPECT_EQ(2, GetThreadCount());
|
EXPECT_EQ(2U, GetThreadCount());
|
||||||
pthread_mutex_unlock(&mutex);
|
pthread_mutex_unlock(&mutex);
|
||||||
|
|
||||||
void* dummy;
|
void* dummy;
|
||||||
|
@ -124,12 +124,12 @@ TEST(GetThreadCountTest, ReturnsCorrectValue) {
|
||||||
time.tv_nsec = 100L * 1000 * 1000; // .1 seconds.
|
time.tv_nsec = 100L * 1000 * 1000; // .1 seconds.
|
||||||
nanosleep(&time, NULL);
|
nanosleep(&time, NULL);
|
||||||
}
|
}
|
||||||
EXPECT_EQ(1, GetThreadCount());
|
EXPECT_EQ(1U, GetThreadCount());
|
||||||
pthread_mutex_destroy(&mutex);
|
pthread_mutex_destroy(&mutex);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
|
TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
|
||||||
EXPECT_EQ(0, GetThreadCount());
|
EXPECT_EQ(0U, GetThreadCount());
|
||||||
}
|
}
|
||||||
#endif // GTEST_OS_MAC
|
#endif // GTEST_OS_MAC
|
||||||
|
|
||||||
|
|
|
@ -100,10 +100,10 @@ TYPED_TEST(CommonTest, ValuesAreCorrect) {
|
||||||
// Typedefs in the fixture class template can be visited via the
|
// Typedefs in the fixture class template can be visited via the
|
||||||
// "typename TestFixture::" prefix.
|
// "typename TestFixture::" prefix.
|
||||||
typename TestFixture::List empty;
|
typename TestFixture::List empty;
|
||||||
EXPECT_EQ(0, empty.size());
|
EXPECT_EQ(0U, empty.size());
|
||||||
|
|
||||||
typename TestFixture::IntSet empty2;
|
typename TestFixture::IntSet empty2;
|
||||||
EXPECT_EQ(0, empty2.size());
|
EXPECT_EQ(0U, empty2.size());
|
||||||
|
|
||||||
// Non-static members of the fixture class must be visited via
|
// Non-static members of the fixture class must be visited via
|
||||||
// 'this', as required by C++ for class templates.
|
// 'this', as required by C++ for class templates.
|
||||||
|
|
|
@ -55,7 +55,7 @@ TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {
|
||||||
|
|
||||||
TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
|
TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
|
||||||
TypeParam container;
|
TypeParam container;
|
||||||
EXPECT_EQ(0, container.size());
|
EXPECT_EQ(0U, container.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
REGISTER_TYPED_TEST_CASE_P(ContainerTest,
|
REGISTER_TYPED_TEST_CASE_P(ContainerTest,
|
||||||
|
|
|
@ -743,11 +743,11 @@ class TypedTestP : public testing::Test {
|
||||||
TYPED_TEST_CASE_P(TypedTestP);
|
TYPED_TEST_CASE_P(TypedTestP);
|
||||||
|
|
||||||
TYPED_TEST_P(TypedTestP, Success) {
|
TYPED_TEST_P(TypedTestP, Success) {
|
||||||
EXPECT_EQ(0, TypeParam());
|
EXPECT_EQ(0U, TypeParam());
|
||||||
}
|
}
|
||||||
|
|
||||||
TYPED_TEST_P(TypedTestP, Failure) {
|
TYPED_TEST_P(TypedTestP, Failure) {
|
||||||
EXPECT_EQ(1, TypeParam()) << "Expected failure";
|
EXPECT_EQ(1U, TypeParam()) << "Expected failure";
|
||||||
}
|
}
|
||||||
|
|
||||||
REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
|
REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
|
||||||
|
|
|
@ -390,7 +390,8 @@ Expected failure
|
||||||
gtest_output_test_.cc:#: Failure
|
gtest_output_test_.cc:#: Failure
|
||||||
Value of: TypeParam()
|
Value of: TypeParam()
|
||||||
Actual: \0
|
Actual: \0
|
||||||
Expected: 1
|
Expected: 1U
|
||||||
|
Which is: 1
|
||||||
Expected failure
|
Expected failure
|
||||||
[0;31m[ FAILED ] [mUnsigned/TypedTestP/0.Failure
|
[0;31m[ FAILED ] [mUnsigned/TypedTestP/0.Failure
|
||||||
[0;32m[----------] [m2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
|
[0;32m[----------] [m2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
|
||||||
|
@ -400,7 +401,8 @@ Expected failure
|
||||||
gtest_output_test_.cc:#: Failure
|
gtest_output_test_.cc:#: Failure
|
||||||
Value of: TypeParam()
|
Value of: TypeParam()
|
||||||
Actual: 0
|
Actual: 0
|
||||||
Expected: 1
|
Expected: 1U
|
||||||
|
Which is: 1
|
||||||
Expected failure
|
Expected failure
|
||||||
[0;31m[ FAILED ] [mUnsigned/TypedTestP/1.Failure
|
[0;31m[ FAILED ] [mUnsigned/TypedTestP/1.Failure
|
||||||
[0;32m[----------] [m4 tests from ExpectFailureTest
|
[0;32m[----------] [m4 tests from ExpectFailureTest
|
||||||
|
|
|
@ -376,7 +376,8 @@ Expected failure
|
||||||
[ RUN ] Unsigned/TypedTestP/0.Failure
|
[ RUN ] Unsigned/TypedTestP/0.Failure
|
||||||
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
||||||
Actual: \0
|
Actual: \0
|
||||||
Expected: 1
|
Expected: 1U
|
||||||
|
Which is: 1
|
||||||
Expected failure
|
Expected failure
|
||||||
[ FAILED ] Unsigned/TypedTestP/0.Failure
|
[ FAILED ] Unsigned/TypedTestP/0.Failure
|
||||||
[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
|
[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
|
||||||
|
@ -385,7 +386,8 @@ Expected failure
|
||||||
[ RUN ] Unsigned/TypedTestP/1.Failure
|
[ RUN ] Unsigned/TypedTestP/1.Failure
|
||||||
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
gtest_output_test_.cc:#: error: Value of: TypeParam()
|
||||||
Actual: 0
|
Actual: 0
|
||||||
Expected: 1
|
Expected: 1U
|
||||||
|
Which is: 1
|
||||||
Expected failure
|
Expected failure
|
||||||
[ FAILED ] Unsigned/TypedTestP/1.Failure
|
[ FAILED ] Unsigned/TypedTestP/1.Failure
|
||||||
[----------] 4 tests from ExpectFailureTest
|
[----------] 4 tests from ExpectFailureTest
|
||||||
|
|
|
@ -41,6 +41,12 @@ import unittest
|
||||||
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
|
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
|
||||||
import run_tests
|
import run_tests
|
||||||
|
|
||||||
|
|
||||||
|
GTEST_DBG_DIR = 'scons/build/dbg/scons'
|
||||||
|
GTEST_OPT_DIR = 'scons/build/opt/scons'
|
||||||
|
GTEST_OTHER_DIR = 'scons/build/other/scons'
|
||||||
|
|
||||||
|
|
||||||
def AddExeExtension(path):
|
def AddExeExtension(path):
|
||||||
"""Appends .exe to the path on Windows or Cygwin."""
|
"""Appends .exe to the path on Windows or Cygwin."""
|
||||||
|
|
||||||
|
@ -182,9 +188,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.fake_os = FakeOs(FakePath(
|
self.fake_os = FakeOs(FakePath(
|
||||||
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
known_paths=[
|
known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
|
AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
|
||||||
AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
|
|
||||||
'test/gtest_color_test.py']))
|
'test/gtest_color_test.py']))
|
||||||
self.fake_configurations = ['dbg', 'opt']
|
self.fake_configurations = ['dbg', 'opt']
|
||||||
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
@ -202,19 +207,17 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# An explicitly specified directory.
|
# An explicitly specified directory.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
|
[GTEST_DBG_DIR, 'gtest_unittest'],
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# A particular configuration.
|
# A particular configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -224,8 +227,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/other/gtest/scons',
|
[(GTEST_OTHER_DIR, GTEST_OTHER_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/other/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# All available configurations
|
# All available configurations
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -235,10 +237,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest'),
|
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
|
||||||
('scons/build/opt/gtest/scons',
|
|
||||||
'scons/build/opt/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# All built configurations (unbuilt don't cause failure).
|
# All built configurations (unbuilt don't cause failure).
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -248,47 +248,40 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
True,
|
True,
|
||||||
available_configurations=self.fake_configurations + ['unbuilt']),
|
available_configurations=self.fake_configurations + ['unbuilt']),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest'),
|
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
|
||||||
('scons/build/opt/gtest/scons',
|
|
||||||
'scons/build/opt/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# A combination of an explicit directory and a configuration.
|
# A combination of an explicit directory and a configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
|
[GTEST_DBG_DIR, 'gtest_unittest'],
|
||||||
'opt',
|
'opt',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest'),
|
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
|
||||||
('scons/build/opt/gtest/scons',
|
|
||||||
'scons/build/opt/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# Same test specified in an explicit directory and via a configuration.
|
# Same test specified in an explicit directory and via a configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
|
[GTEST_DBG_DIR, 'gtest_unittest'],
|
||||||
'dbg',
|
'dbg',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# All built configurations + explicit directory + explicit configuration.
|
# All built configurations + explicit directory + explicit configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
|
[GTEST_DBG_DIR, 'gtest_unittest'],
|
||||||
'opt',
|
'opt',
|
||||||
True,
|
True,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest'),
|
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
|
||||||
('scons/build/opt/gtest/scons',
|
|
||||||
'scons/build/opt/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
def testPythonTestsOnly(self):
|
def testPythonTestsOnly(self):
|
||||||
"""Exercises GetTestsToRun with parameters designating Python tests only."""
|
"""Exercises GetTestsToRun with parameters designating Python tests only."""
|
||||||
|
@ -300,17 +293,17 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# An explicitly specified directory.
|
# An explicitly specified directory.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'],
|
[GTEST_DBG_DIR, 'test/gtest_color_test.py'],
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# A particular configuration.
|
# A particular configuration.
|
||||||
|
@ -320,7 +313,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'other',
|
'other',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/other/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_OTHER_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# All available configurations
|
# All available configurations
|
||||||
|
@ -330,8 +323,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'all',
|
'all',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
|
||||||
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
|
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# All built configurations (unbuilt don't cause failure).
|
# All built configurations (unbuilt don't cause failure).
|
||||||
|
@ -341,40 +334,40 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
True,
|
True,
|
||||||
available_configurations=self.fake_configurations + ['unbuilt']),
|
available_configurations=self.fake_configurations + ['unbuilt']),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
|
||||||
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
|
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# A combination of an explicit directory and a configuration.
|
# A combination of an explicit directory and a configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
|
[GTEST_DBG_DIR, 'gtest_color_test.py'],
|
||||||
'opt',
|
'opt',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
|
||||||
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
|
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# Same test specified in an explicit directory and via a configuration.
|
# Same test specified in an explicit directory and via a configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
|
[GTEST_DBG_DIR, 'gtest_color_test.py'],
|
||||||
'dbg',
|
'dbg',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
# All built configurations + explicit directory + explicit configuration.
|
# All built configurations + explicit directory + explicit configuration.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
self.test_runner.GetTestsToRun(
|
self.test_runner.GetTestsToRun(
|
||||||
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
|
[GTEST_DBG_DIR, 'gtest_color_test.py'],
|
||||||
'opt',
|
'opt',
|
||||||
True,
|
True,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
|
||||||
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
|
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
def testCombinationOfBinaryAndPythonTests(self):
|
def testCombinationOfBinaryAndPythonTests(self):
|
||||||
|
@ -389,9 +382,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# Specifying both binary and Python tests.
|
# Specifying both binary and Python tests.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -400,9 +392,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# Specifying binary tests suppresses Python tests.
|
# Specifying binary tests suppresses Python tests.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -412,8 +403,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# Specifying Python tests suppresses binary tests.
|
# Specifying Python tests suppresses binary tests.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -422,7 +412,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]))
|
[]))
|
||||||
|
|
||||||
def testIgnoresNonTestFiles(self):
|
def testIgnoresNonTestFiles(self):
|
||||||
|
@ -430,8 +420,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
|
|
||||||
self.fake_os = FakeOs(FakePath(
|
self.fake_os = FakeOs(FakePath(
|
||||||
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
known_paths=[
|
known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_nontest'),
|
||||||
AddExeExtension('scons/build/dbg/gtest/scons/gtest_nontest'),
|
|
||||||
'test/']))
|
'test/']))
|
||||||
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
injected_subprocess=None,
|
injected_subprocess=None,
|
||||||
|
@ -453,8 +442,8 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
current_dir=os.path.abspath('/a/b/c'),
|
current_dir=os.path.abspath('/a/b/c'),
|
||||||
known_paths=[
|
known_paths=[
|
||||||
'/a/b/c/',
|
'/a/b/c/',
|
||||||
AddExeExtension('/d/scons/build/dbg/gtest/scons/gtest_unittest'),
|
AddExeExtension('/d/' + GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
AddExeExtension('/d/scons/build/opt/gtest/scons/gtest_unittest'),
|
AddExeExtension('/d/' + GTEST_OPT_DIR + '/gtest_unittest'),
|
||||||
'/d/test/gtest_color_test.py']))
|
'/d/test/gtest_color_test.py']))
|
||||||
self.fake_configurations = ['dbg', 'opt']
|
self.fake_configurations = ['dbg', 'opt']
|
||||||
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
@ -468,8 +457,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([],
|
([],
|
||||||
[('/d/scons/build/dbg/gtest/scons',
|
[('/d/' + GTEST_DBG_DIR, '/d/' + GTEST_DBG_DIR + '/gtest_unittest')]))
|
||||||
'/d/scons/build/dbg/gtest/scons/gtest_unittest')]))
|
|
||||||
|
|
||||||
# A Python test.
|
# A Python test.
|
||||||
self.AssertResultsEqual(
|
self.AssertResultsEqual(
|
||||||
|
@ -478,8 +466,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
'',
|
'',
|
||||||
False,
|
False,
|
||||||
available_configurations=self.fake_configurations),
|
available_configurations=self.fake_configurations),
|
||||||
([('/d/scons/build/dbg/gtest/scons', '/d/test/gtest_color_test.py')],
|
([('/d/' + GTEST_DBG_DIR, '/d/test/gtest_color_test.py')], []))
|
||||||
[]))
|
|
||||||
|
|
||||||
|
|
||||||
def testNonTestBinary(self):
|
def testNonTestBinary(self):
|
||||||
|
@ -508,7 +495,7 @@ class GetTestsToRunTest(unittest.TestCase):
|
||||||
|
|
||||||
self.fake_os = FakeOs(FakePath(
|
self.fake_os = FakeOs(FakePath(
|
||||||
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
known_paths=['scons/build/dbg/gtest/scons/gtest_test', 'test/']))
|
known_paths=['/d/' + GTEST_DBG_DIR + '/gtest_test', 'test/']))
|
||||||
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
injected_subprocess=None,
|
injected_subprocess=None,
|
||||||
injected_script_dir='.')
|
injected_script_dir='.')
|
||||||
|
@ -540,8 +527,8 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.fake_os = FakeOs(FakePath(
|
self.fake_os = FakeOs(FakePath(
|
||||||
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
known_paths=[
|
known_paths=[
|
||||||
AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
|
AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
|
||||||
AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
|
AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
|
||||||
'test/gtest_color_test.py']))
|
'test/gtest_color_test.py']))
|
||||||
self.fake_configurations = ['dbg', 'opt']
|
self.fake_configurations = ['dbg', 'opt']
|
||||||
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
@ -554,7 +541,7 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.fake_os.spawn_impl = self.SpawnSuccess
|
self.fake_os.spawn_impl = self.SpawnSuccess
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
[(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]),
|
[]),
|
||||||
0)
|
0)
|
||||||
self.assertEqual(self.num_spawn_calls, 1)
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
@ -566,8 +553,7 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[],
|
[],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]),
|
|
||||||
0)
|
0)
|
||||||
self.assertEqual(self.num_spawn_calls, 1)
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
@ -577,7 +563,7 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.fake_os.spawn_impl = self.SpawnFailure
|
self.fake_os.spawn_impl = self.SpawnFailure
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
|
[(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
|
||||||
[]),
|
[]),
|
||||||
1)
|
1)
|
||||||
self.assertEqual(self.num_spawn_calls, 1)
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
@ -589,8 +575,7 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[],
|
[],
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]),
|
|
||||||
1)
|
1)
|
||||||
self.assertEqual(self.num_spawn_calls, 1)
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
@ -600,10 +585,8 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.fake_os.spawn_impl = self.SpawnSuccess
|
self.fake_os.spawn_impl = self.SpawnSuccess
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')],
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
|
||||||
[('scons/build/dbg/gtest/scons',
|
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]),
|
|
||||||
0)
|
0)
|
||||||
self.assertEqual(self.num_spawn_calls, 2)
|
self.assertEqual(self.num_spawn_calls, 2)
|
||||||
|
|
||||||
|
@ -621,10 +604,8 @@ class RunTestsTest(unittest.TestCase):
|
||||||
self.fake_os.spawn_impl = SpawnImpl
|
self.fake_os.spawn_impl = SpawnImpl
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.test_runner.RunTests(
|
self.test_runner.RunTests(
|
||||||
[('scons/build/dbg/gtest/scons',
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')],
|
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
|
||||||
[('scons/build/dbg/gtest/scons',
|
|
||||||
'scons/build/dbg/gtest/scons/gtest_unittest')]),
|
|
||||||
0)
|
0)
|
||||||
self.assertEqual(self.num_spawn_calls, 2)
|
self.assertEqual(self.num_spawn_calls, 2)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user