More refactoring for the event listener API, by Vlad Losev.

This commit is contained in:
zhanyong.wan
2009-07-16 00:36:55 +00:00
parent 3a47ddf8ea
commit c214ebc830
11 changed files with 729 additions and 324 deletions

View File

@@ -957,16 +957,11 @@ TEST_F(MacroLogicDeathTest, ChildDoesNotDie) {
EXPECT_TRUE(factory_->TestDeleted());
}
// Returns the number of successful parts in the current test.
static size_t GetSuccessfulTestPartCount() {
return GetUnitTestImpl()->current_test_result()->successful_part_count();
}
// Tests that a successful death test does not register a successful
// test part.
TEST(SuccessRegistrationDeathTest, NoSuccessPart) {
EXPECT_DEATH(_exit(1), "");
EXPECT_EQ(0u, GetSuccessfulTestPartCount());
EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
}
TEST(StreamingAssertionsDeathTest, DeathTest) {

View File

@@ -0,0 +1,363 @@
// Copyright 2009 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: vladl@google.com (Vlad Losev)
//
// The Google C++ Testing Framework (Google Test)
//
// This file contains tests verifying correctness of data provided via
// UnitTest's public methods.
#include <gtest/gtest.h>
#include <string.h> // For strcmp.
#include <algorithm>
using ::testing::AddGlobalTestEnvironment;
using ::testing::Environment;
using ::testing::InitGoogleTest;
using ::testing::Test;
using ::testing::TestInfo;
using ::testing::TestPartResult;
using ::testing::UnitTest;
using ::testing::internal::TestCase;
using ::testing::internal::TestProperty;
#if GTEST_HAS_TYPED_TEST
using ::testing::Types;
using ::testing::internal::GetTypeName;
using ::testing::internal::String;
#endif // GTEST_HAS_TYPED_TEST
namespace testing {
namespace internal {
template <typename T>
struct LessByName {
bool operator()(const T* a, const T* b) {
return strcmp(a->name(), b->name()) < 0;
}
};
class UnitTestAccessor {
public:
// Returns the array of pointers to all test cases sorted by the test case
// name. The caller is responsible for deleting the array.
static TestCase const** const GetSortedTestCases() {
UnitTest* unit_test = UnitTest::GetInstance();
TestCase const** const test_cases =
new const TestCase*[unit_test->total_test_case_count()];
for (int i = 0; i < unit_test->total_test_case_count(); ++i)
test_cases[i] = unit_test->GetTestCase(i);
std::sort(test_cases,
test_cases + unit_test->total_test_case_count(),
LessByName<TestCase>());
return test_cases;
}
// Returns the test case by its name. The caller doesn't own the returned
// pointer.
static const TestCase* FindTestCase(const char* name) {
UnitTest* unit_test = UnitTest::GetInstance();
for (int i = 0; i < unit_test->total_test_case_count(); ++i) {
const TestCase* test_case = unit_test->GetTestCase(i);
if (0 == strcmp(test_case->name(), name))
return test_case;
}
return NULL;
}
// Returns the array of pointers to all tests in a particular test case
// sorted by the test name. The caller is responsible for deleting the
// array.
static TestInfo const** const GetSortedTests(const TestCase* test_case) {
TestInfo const** const tests =
new const TestInfo*[test_case->total_test_count()];
for (int i = 0; i < test_case->total_test_count(); ++i)
tests[i] = test_case->GetTestInfo(i);
std::sort(tests,
tests + test_case->total_test_count(),
LessByName<TestInfo>());
return tests;
}
};
// TODO(vladl@google.com): Put tests into the internal namespace after
// UnitTest methods are published.
} // namespace internal
using internal::UnitTestAccessor;
#if GTEST_HAS_TYPED_TEST
template <typename T> class TestCaseWithCommentTest : public Test {};
TYPED_TEST_CASE(TestCaseWithCommentTest, Types<int>);
TYPED_TEST(TestCaseWithCommentTest, Dummy) {}
const int kTypedTestCases = 1;
const int kTypedTests = 1;
String GetExpectedTestCaseComment() {
Message comment;
comment << "TypeParam = " << GetTypeName<int>().c_str();
return comment.GetString();
}
#else
const int kTypedTestCases = 0;
const int kTypedTests = 0;
#endif // GTEST_HAS_TYPED_TEST
// We can only test the accessors that do not change value while tests run.
// Since tests can be run in any order, the values the accessors that track
// test execution (such as failed_test_count) can not be predicted.
TEST(ApiTest, UnitTestImmutableAccessorsWork) {
UnitTest* unit_test = UnitTest::GetInstance();
ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
EXPECT_EQ(1 + kTypedTestCases, unit_test->test_case_to_run_count());
EXPECT_EQ(2, unit_test->disabled_test_count());
EXPECT_EQ(5 + kTypedTests, unit_test->total_test_count());
EXPECT_EQ(3 + kTypedTests, unit_test->test_to_run_count());
const TestCase** const test_cases = UnitTestAccessor::GetSortedTestCases();
EXPECT_STREQ("ApiTest", test_cases[0]->name());
EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
#if GTEST_HAS_TYPED_TEST
EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
#endif // GTEST_HAS_TYPED_TEST
delete[] test_cases;
// The following lines initiate actions to verify certain methods in
// FinalSuccessChecker::TearDown.
// Records a test property to verify TestResult::GetTestProperty().
RecordProperty("key", "value");
}
TEST(ApiTest, TestCaseImmutableAccessorsWork) {
const TestCase* test_case = UnitTestAccessor::FindTestCase("ApiTest");
ASSERT_TRUE(test_case != NULL);
EXPECT_STREQ("ApiTest", test_case->name());
EXPECT_STREQ("", test_case->comment());
EXPECT_TRUE(test_case->should_run());
EXPECT_EQ(1, test_case->disabled_test_count());
EXPECT_EQ(3, test_case->test_to_run_count());
ASSERT_EQ(4, test_case->total_test_count());
const TestInfo** tests = UnitTestAccessor::GetSortedTests(test_case);
EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
EXPECT_STREQ("", tests[0]->comment());
EXPECT_STREQ("", tests[0]->test_case_comment());
EXPECT_FALSE(tests[0]->should_run());
EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
EXPECT_STREQ("", tests[1]->comment());
EXPECT_STREQ("", tests[1]->test_case_comment());
EXPECT_TRUE(tests[1]->should_run());
EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
EXPECT_STREQ("", tests[2]->comment());
EXPECT_STREQ("", tests[2]->test_case_comment());
EXPECT_TRUE(tests[2]->should_run());
EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
EXPECT_STREQ("", tests[3]->comment());
EXPECT_STREQ("", tests[3]->test_case_comment());
EXPECT_TRUE(tests[3]->should_run());
delete[] tests;
tests = NULL;
#if GTEST_HAS_TYPED_TEST
test_case = UnitTestAccessor::FindTestCase("TestCaseWithCommentTest/0");
ASSERT_TRUE(test_case != NULL);
EXPECT_STREQ("TestCaseWithCommentTest/0", test_case->name());
EXPECT_STREQ(GetExpectedTestCaseComment().c_str(), test_case->comment());
EXPECT_TRUE(test_case->should_run());
EXPECT_EQ(0, test_case->disabled_test_count());
EXPECT_EQ(1, test_case->test_to_run_count());
ASSERT_EQ(1, test_case->total_test_count());
tests = UnitTestAccessor::GetSortedTests(test_case);
EXPECT_STREQ("Dummy", tests[0]->name());
EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
EXPECT_STREQ("", tests[0]->comment());
EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
tests[0]->test_case_comment());
EXPECT_TRUE(tests[0]->should_run());
delete[] tests;
#endif // GTEST_HAS_TYPED_TEST
}
TEST(ApiTest, TestCaseDisabledAccessorsWork) {
const TestCase* test_case = UnitTestAccessor::FindTestCase("DISABLED_Test");
ASSERT_TRUE(test_case != NULL);
EXPECT_STREQ("DISABLED_Test", test_case->name());
EXPECT_STREQ("", test_case->comment());
EXPECT_FALSE(test_case->should_run());
EXPECT_EQ(1, test_case->disabled_test_count());
EXPECT_EQ(0, test_case->test_to_run_count());
ASSERT_EQ(1, test_case->total_test_count());
const TestInfo* const test_info = test_case->GetTestInfo(0);
EXPECT_STREQ("Dummy2", test_info->name());
EXPECT_STREQ("DISABLED_Test", test_info->test_case_name());
EXPECT_STREQ("", test_info->comment());
EXPECT_STREQ("", test_info->test_case_comment());
EXPECT_FALSE(test_info->should_run());
}
// These two tests are here to provide support for testing
// test_case_to_run_count, disabled_test_count, and test_to_run_count.
TEST(ApiTest, DISABLED_Dummy1) {}
TEST(DISABLED_Test, Dummy2) {}
class FinalSuccessChecker : public Environment {
protected:
virtual void TearDown() {
UnitTest* unit_test = UnitTest::GetInstance();
EXPECT_EQ(1 + kTypedTestCases, unit_test->successful_test_case_count());
EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count());
EXPECT_EQ(0, unit_test->failed_test_case_count());
EXPECT_EQ(0, unit_test->failed_test_count());
EXPECT_TRUE(unit_test->Passed());
EXPECT_FALSE(unit_test->Failed());
ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
const TestCase** const test_cases = UnitTestAccessor::GetSortedTestCases();
EXPECT_STREQ("ApiTest", test_cases[0]->name());
EXPECT_STREQ("", test_cases[0]->comment());
EXPECT_TRUE(test_cases[0]->should_run());
EXPECT_EQ(1, test_cases[0]->disabled_test_count());
ASSERT_EQ(4, test_cases[0]->total_test_count());
EXPECT_EQ(3, test_cases[0]->successful_test_count());
EXPECT_EQ(0, test_cases[0]->failed_test_count());
EXPECT_TRUE(test_cases[0]->Passed());
EXPECT_FALSE(test_cases[0]->Failed());
EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
EXPECT_STREQ("", test_cases[1]->comment());
EXPECT_FALSE(test_cases[1]->should_run());
EXPECT_EQ(1, test_cases[1]->disabled_test_count());
ASSERT_EQ(1, test_cases[1]->total_test_count());
EXPECT_EQ(0, test_cases[1]->successful_test_count());
EXPECT_EQ(0, test_cases[1]->failed_test_count());
#if GTEST_HAS_TYPED_TEST
EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
test_cases[2]->comment());
EXPECT_TRUE(test_cases[2]->should_run());
EXPECT_EQ(0, test_cases[2]->disabled_test_count());
ASSERT_EQ(1, test_cases[2]->total_test_count());
EXPECT_EQ(1, test_cases[2]->successful_test_count());
EXPECT_EQ(0, test_cases[2]->failed_test_count());
EXPECT_TRUE(test_cases[2]->Passed());
EXPECT_FALSE(test_cases[2]->Failed());
#endif // GTEST_HAS_TYPED_TEST
const TestCase* test_case = UnitTestAccessor::FindTestCase("ApiTest");
const TestInfo** tests = UnitTestAccessor::GetSortedTests(test_case);
EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
EXPECT_FALSE(tests[0]->should_run());
EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
EXPECT_STREQ("", tests[1]->comment());
EXPECT_STREQ("", tests[1]->test_case_comment());
EXPECT_TRUE(tests[1]->should_run());
EXPECT_TRUE(tests[1]->result()->Passed());
EXPECT_EQ(0, tests[1]->result()->test_property_count());
EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
EXPECT_STREQ("", tests[2]->comment());
EXPECT_STREQ("", tests[2]->test_case_comment());
EXPECT_TRUE(tests[2]->should_run());
EXPECT_TRUE(tests[2]->result()->Passed());
EXPECT_EQ(0, tests[2]->result()->test_property_count());
EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
EXPECT_STREQ("", tests[3]->comment());
EXPECT_STREQ("", tests[3]->test_case_comment());
EXPECT_TRUE(tests[3]->should_run());
EXPECT_TRUE(tests[3]->result()->Passed());
EXPECT_EQ(1, tests[3]->result()->test_property_count());
const TestProperty& property = tests[3]->result()->GetTestProperty(0);
EXPECT_STREQ("key", property.key());
EXPECT_STREQ("value", property.value());
delete[] tests;
#if GTEST_HAS_TYPED_TEST
test_case = UnitTestAccessor::FindTestCase("TestCaseWithCommentTest/0");
tests = UnitTestAccessor::GetSortedTests(test_case);
EXPECT_STREQ("Dummy", tests[0]->name());
EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
EXPECT_STREQ("", tests[0]->comment());
EXPECT_STREQ(GetExpectedTestCaseComment().c_str(),
tests[0]->test_case_comment());
EXPECT_TRUE(tests[0]->should_run());
EXPECT_TRUE(tests[0]->result()->Passed());
EXPECT_EQ(0, tests[0]->result()->test_property_count());
delete[] tests;
#endif // GTEST_HAS_TYPED_TEST
delete[] test_cases;
}
};
} // namespace testing
int main(int argc, char **argv) {
InitGoogleTest(&argc, argv);
AddGlobalTestEnvironment(new testing::FinalSuccessChecker());
return RUN_ALL_TESTS();
}

View File

@@ -94,26 +94,6 @@ const char* FormatTimeInMillisAsSeconds(TimeInMillis ms);
bool ParseInt32Flag(const char* str, const char* flag, Int32* value);
// TestResult contains some private methods that should be hidden from
// Google Test user but are required for testing. This class allow our tests
// to access them.
class TestResultAccessor {
public:
static void RecordProperty(TestResult* test_result,
const TestProperty& property) {
test_result->RecordProperty(property);
}
static void ClearTestPartResults(TestResult* test_result) {
test_result->ClearTestPartResults();
}
static const Vector<testing::TestPartResult>& test_part_results(
const TestResult& test_result) {
return test_result.test_part_results();
}
};
} // namespace internal
} // namespace testing
@@ -138,8 +118,8 @@ using testing::FloatLE;
using testing::GTEST_FLAG(also_run_disabled_tests);
using testing::GTEST_FLAG(break_on_failure);
using testing::GTEST_FLAG(catch_exceptions);
using testing::GTEST_FLAG(death_test_use_fork);
using testing::GTEST_FLAG(color);
using testing::GTEST_FLAG(death_test_use_fork);
using testing::GTEST_FLAG(filter);
using testing::GTEST_FLAG(list_tests);
using testing::GTEST_FLAG(output);
@@ -155,12 +135,12 @@ using testing::IsSubstring;
using testing::Message;
using testing::ScopedFakeTestPartResultReporter;
using testing::StaticAssertTypeEq;
using testing::Test;
using testing::TestPartResult;
using testing::TestPartResultArray;
using testing::TPRT_FATAL_FAILURE;
using testing::TPRT_NONFATAL_FAILURE;
using testing::TPRT_SUCCESS;
using testing::Test;
using testing::TestPartResult;
using testing::TestPartResultArray;
using testing::UnitTest;
using testing::internal::kMaxRandomSeed;
using testing::internal::kTestTypeIdInGoogleTest;
@@ -168,14 +148,13 @@ using testing::internal::AppendUserMessage;
using testing::internal::CodePointToUtf8;
using testing::internal::EqFailure;
using testing::internal::FloatingPoint;
using testing::internal::GTestFlagSaver;
using testing::internal::GetCurrentOsStackTraceExceptTop;
using testing::internal::GetFailedPartCount;
using testing::internal::GetNextRandomSeed;
using testing::internal::GetRandomSeedFromFlag;
using testing::internal::GetTestTypeId;
using testing::internal::GetTypeId;
using testing::internal::GetUnitTestImpl;
using testing::internal::GTestFlagSaver;
using testing::internal::Int32;
using testing::internal::Int32FromEnvOrDie;
using testing::internal::ShouldRunTestOnShard;
@@ -190,6 +169,7 @@ using testing::internal::TestResultAccessor;
using testing::internal::ThreadLocal;
using testing::internal::Vector;
using testing::internal::WideStringToUtf8;
using testing::internal::kTestTypeIdInGoogleTest;
// This line tests that we can define tests in an unnamed namespace.
namespace {
@@ -1227,6 +1207,68 @@ TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailureOnAllThreads) {
#endif // GTEST_IS_THREADSAFE && GTEST_HAS_PTHREAD
// Tests the TestProperty class.
TEST(TestPropertyTest, ConstructorWorks) {
const TestProperty property("key", "value");
EXPECT_STREQ("key", property.key());
EXPECT_STREQ("value", property.value());
}
TEST(TestPropertyTest, SetValue) {
TestProperty property("key", "value_1");
EXPECT_STREQ("key", property.key());
property.SetValue("value_2");
EXPECT_STREQ("key", property.key());
EXPECT_STREQ("value_2", property.value());
}
// Tests the TestPartResult class.
TEST(TestPartResultTest, ConstructorWorks) {
Message message;
message << "something is terribly wrong";
message << static_cast<const char*>(testing::internal::kStackTraceMarker);
message << "some unimportant stack trace";
const TestPartResult result(TPRT_NONFATAL_FAILURE,
"some_file.cc",
42,
message.GetString().c_str());
EXPECT_EQ(TPRT_NONFATAL_FAILURE, result.type());
EXPECT_STREQ("some_file.cc", result.file_name());
EXPECT_EQ(42, result.line_number());
EXPECT_STREQ(message.GetString().c_str(), result.message());
EXPECT_STREQ("something is terribly wrong", result.summary());
}
TEST(TestPartResultTest, ResultAccessorsWork) {
const TestPartResult success(TPRT_SUCCESS, "file.cc", 42, "message");
EXPECT_TRUE(success.passed());
EXPECT_FALSE(success.failed());
EXPECT_FALSE(success.nonfatally_failed());
EXPECT_FALSE(success.fatally_failed());
const TestPartResult nonfatal_failure(TPRT_NONFATAL_FAILURE,
"file.cc",
42,
"message");
EXPECT_FALSE(nonfatal_failure.passed());
EXPECT_TRUE(nonfatal_failure.failed());
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
EXPECT_FALSE(nonfatal_failure.fatally_failed());
const TestPartResult fatal_failure(TPRT_FATAL_FAILURE,
"file.cc",
42,
"message");
EXPECT_FALSE(fatal_failure.passed());
EXPECT_TRUE(fatal_failure.failed());
EXPECT_FALSE(fatal_failure.nonfatally_failed());
EXPECT_TRUE(fatal_failure.fatally_failed());
}
// Tests the TestResult class
// The test fixture for testing TestResult.
@@ -1298,34 +1340,6 @@ class TestResultTest : public Test {
}
};
// Tests TestResult::total_part_count().
TEST_F(TestResultTest, test_part_results) {
ASSERT_EQ(0, r0->total_part_count());
ASSERT_EQ(1, r1->total_part_count());
ASSERT_EQ(2, r2->total_part_count());
}
// Tests TestResult::successful_part_count().
TEST_F(TestResultTest, successful_part_count) {
ASSERT_EQ(0, r0->successful_part_count());
ASSERT_EQ(1, r1->successful_part_count());
ASSERT_EQ(1, r2->successful_part_count());
}
// Tests TestResult::failed_part_count().
TEST_F(TestResultTest, failed_part_count) {
ASSERT_EQ(0, r0->failed_part_count());
ASSERT_EQ(0, r1->failed_part_count());
ASSERT_EQ(1, r2->failed_part_count());
}
// Tests testing::internal::GetFailedPartCount().
TEST_F(TestResultTest, GetFailedPartCount) {
ASSERT_EQ(0, GetFailedPartCount(r0));
ASSERT_EQ(0, GetFailedPartCount(r1));
ASSERT_EQ(1, GetFailedPartCount(r2));
}
// Tests TestResult::total_part_count().
TEST_F(TestResultTest, total_part_count) {
ASSERT_EQ(0, r0->total_part_count());
@@ -3778,42 +3792,37 @@ TEST(AssertionSyntaxTest, WorksWithConst) {
} // namespace
// Returns the number of successful parts in the current test.
static size_t GetSuccessfulPartCount() {
return GetUnitTestImpl()->current_test_result()->successful_part_count();
}
namespace testing {
// Tests that Google Test tracks SUCCEED*.
TEST(SuccessfulAssertionTest, SUCCEED) {
SUCCEED();
SUCCEED() << "OK";
EXPECT_EQ(2, GetSuccessfulPartCount());
EXPECT_EQ(2, GetUnitTestImpl()->current_test_result()->total_part_count());
}
// Tests that Google Test doesn't track successful EXPECT_*.
TEST(SuccessfulAssertionTest, EXPECT) {
EXPECT_TRUE(true);
EXPECT_EQ(0, GetSuccessfulPartCount());
EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
}
// Tests that Google Test doesn't track successful EXPECT_STR*.
TEST(SuccessfulAssertionTest, EXPECT_STR) {
EXPECT_STREQ("", "");
EXPECT_EQ(0, GetSuccessfulPartCount());
EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
}
// Tests that Google Test doesn't track successful ASSERT_*.
TEST(SuccessfulAssertionTest, ASSERT) {
ASSERT_TRUE(true);
EXPECT_EQ(0, GetSuccessfulPartCount());
EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
}
// Tests that Google Test doesn't track successful ASSERT_STR*.
TEST(SuccessfulAssertionTest, ASSERT_STR) {
ASSERT_STREQ("", "");
EXPECT_EQ(0, GetSuccessfulPartCount());
EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
}
} // namespace testing

View File

@@ -182,9 +182,10 @@ class GetTestsToRunTest(unittest.TestCase):
def setUp(self):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
known_paths=[AddExeExtension('scons/build/dbg/scons/gtest_unittest'),
AddExeExtension('scons/build/opt/scons/gtest_unittest'),
'test/gtest_color_test.py']))
known_paths=[
AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
@@ -201,17 +202,19 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_unittest'],
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# A particular configuration.
self.AssertResultsEqual(
@@ -221,8 +224,8 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/other/scons',
'scons/build/other/scons/gtest_unittest')]))
[('scons/build/other/gtest/scons',
'scons/build/other/gtest/scons/gtest_unittest')]))
# All available configurations
self.AssertResultsEqual(
@@ -232,8 +235,10 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest'),
('scons/build/opt/gtest/scons',
'scons/build/opt/gtest/scons/gtest_unittest')]))
# All built configurations (unbuilt don't cause failure).
self.AssertResultsEqual(
@@ -243,40 +248,47 @@ class GetTestsToRunTest(unittest.TestCase):
True,
available_configurations=self.fake_configurations + ['unbuilt']),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest'),
('scons/build/opt/gtest/scons',
'scons/build/opt/gtest/scons/gtest_unittest')]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_unittest'],
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
'opt',
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest'),
('scons/build/opt/gtest/scons',
'scons/build/opt/gtest/scons/gtest_unittest')]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_unittest'],
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
'dbg',
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_unittest'],
['scons/build/dbg/gtest/scons', 'gtest_unittest'],
'opt',
True,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest'),
('scons/build/opt/gtest/scons',
'scons/build/opt/gtest/scons/gtest_unittest')]))
def testPythonTestsOnly(self):
"""Exercises GetTestsToRun with parameters designating Python tests only."""
@@ -288,17 +300,17 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'test/gtest_color_test.py'],
['scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]))
# A particular configuration.
@@ -308,7 +320,7 @@ class GetTestsToRunTest(unittest.TestCase):
'other',
False,
available_configurations=self.fake_configurations),
([('scons/build/other/scons', 'test/gtest_color_test.py')],
([('scons/build/other/gtest/scons', 'test/gtest_color_test.py')],
[]))
# All available configurations
@@ -318,8 +330,8 @@ class GetTestsToRunTest(unittest.TestCase):
'all',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
('scons/build/opt/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
[]))
# All built configurations (unbuilt don't cause failure).
@@ -329,40 +341,40 @@ class GetTestsToRunTest(unittest.TestCase):
'',
True,
available_configurations=self.fake_configurations + ['unbuilt']),
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
('scons/build/opt/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
[]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_color_test.py'],
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
'opt',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
('scons/build/opt/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
[]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_color_test.py'],
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
'dbg',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['scons/build/dbg/scons', 'gtest_color_test.py'],
['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
'opt',
True,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
('scons/build/opt/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
[]))
def testCombinationOfBinaryAndPythonTests(self):
@@ -377,8 +389,9 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# Specifying both binary and Python tests.
self.AssertResultsEqual(
@@ -387,8 +400,9 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# Specifying binary tests suppresses Python tests.
self.AssertResultsEqual(
@@ -398,7 +412,8 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]))
# Specifying Python tests suppresses binary tests.
self.AssertResultsEqual(
@@ -407,7 +422,7 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]))
def testIgnoresNonTestFiles(self):
@@ -415,8 +430,9 @@ class GetTestsToRunTest(unittest.TestCase):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
known_paths=[AddExeExtension('scons/build/dbg/scons/gtest_nontest'),
'test/']))
known_paths=[
AddExeExtension('scons/build/dbg/gtest/scons/gtest_nontest'),
'test/']))
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
injected_script_dir='.')
@@ -435,10 +451,11 @@ class GetTestsToRunTest(unittest.TestCase):
# directory /a/b/c/.
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath('/a/b/c'),
known_paths=['/a/b/c/',
AddExeExtension('/d/scons/build/dbg/scons/gtest_unittest'),
AddExeExtension('/d/scons/build/opt/scons/gtest_unittest'),
'/d/test/gtest_color_test.py']))
known_paths=[
'/a/b/c/',
AddExeExtension('/d/scons/build/dbg/gtest/scons/gtest_unittest'),
AddExeExtension('/d/scons/build/opt/gtest/scons/gtest_unittest'),
'/d/test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
@@ -451,8 +468,8 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
[('/d/scons/build/dbg/scons',
'/d/scons/build/dbg/scons/gtest_unittest')]))
[('/d/scons/build/dbg/gtest/scons',
'/d/scons/build/dbg/gtest/scons/gtest_unittest')]))
# A Python test.
self.AssertResultsEqual(
@@ -461,7 +478,7 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
([('/d/scons/build/dbg/scons', '/d/test/gtest_color_test.py')],
([('/d/scons/build/dbg/gtest/scons', '/d/test/gtest_color_test.py')],
[]))
@@ -491,7 +508,7 @@ class GetTestsToRunTest(unittest.TestCase):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
known_paths=['scons/build/dbg/scons/gtest_test', 'test/']))
known_paths=['scons/build/dbg/gtest/scons/gtest_test', 'test/']))
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
injected_script_dir='.')
@@ -522,9 +539,10 @@ class RunTestsTest(unittest.TestCase):
def setUp(self):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
known_paths=[AddExeExtension('scons/build/dbg/scons/gtest_unittest'),
AddExeExtension('scons/build/opt/scons/gtest_unittest'),
'test/gtest_color_test.py']))
known_paths=[
AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None)
@@ -536,7 +554,7 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
[('scons/build/dbg/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]),
0)
self.assertEqual(self.num_spawn_calls, 1)
@@ -548,8 +566,8 @@ class RunTestsTest(unittest.TestCase):
self.assertEqual(
self.test_runner.RunTests(
[],
[('scons/build/dbg/scons',
'scons/build/dbg/scons/gtest_unittest')]),
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 1)
@@ -559,7 +577,7 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnFailure
self.assertEqual(
self.test_runner.RunTests(
[('scons/build/dbg/scons', 'test/gtest_color_test.py')],
[('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
[]),
1)
self.assertEqual(self.num_spawn_calls, 1)
@@ -571,8 +589,8 @@ class RunTestsTest(unittest.TestCase):
self.assertEqual(
self.test_runner.RunTests(
[],
[('scons/build/dbg/scons',
'scons/build/dbg/scons/gtest_unittest')]),
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]),
1)
self.assertEqual(self.num_spawn_calls, 1)
@@ -582,9 +600,10 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')],
[('scons/build/dbg/scons',
'scons/build/dbg/scons/gtest_unittest')]),
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')],
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)
@@ -602,9 +621,10 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = SpawnImpl
self.assertEqual(
self.test_runner.RunTests(
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')],
[('scons/build/dbg/scons',
'scons/build/dbg/scons/gtest_unittest')]),
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')],
[('scons/build/dbg/gtest/scons',
'scons/build/dbg/gtest/scons/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)