Reduces the flakiness of gtest-port_test on Mac; improves the Python tests; hides methods that we don't want to publish; makes win-dbg8 the default scons configuration (all by Vlad Losev).
This commit is contained in:
		@@ -142,8 +142,11 @@ const int kMaxStackTraceDepth = 100;
 | 
			
		||||
namespace internal {
 | 
			
		||||
 | 
			
		||||
class AssertHelper;
 | 
			
		||||
class DefaultGlobalTestPartResultReporter;
 | 
			
		||||
class ExecDeathTest;
 | 
			
		||||
class GTestFlagSaver;
 | 
			
		||||
class TestCase;                        // A collection of related tests.
 | 
			
		||||
class TestInfoImpl;
 | 
			
		||||
class UnitTestImpl* GetUnitTestImpl();
 | 
			
		||||
void ReportFailureInUnknownLocation(TestPartResultType result_type,
 | 
			
		||||
                                    const String& message);
 | 
			
		||||
@@ -402,16 +405,6 @@ class TestResult {
 | 
			
		||||
  // D'tor.  Do not inherit from TestResult.
 | 
			
		||||
  ~TestResult();
 | 
			
		||||
 | 
			
		||||
  // Gets the list of TestPartResults.
 | 
			
		||||
  const internal::List<TestPartResult>& test_part_results() const {
 | 
			
		||||
    return *test_part_results_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Gets the list of TestProperties.
 | 
			
		||||
  const internal::List<internal::TestProperty>& test_properties() const {
 | 
			
		||||
    return *test_properties_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Gets the number of successful test parts.
 | 
			
		||||
  int successful_part_count() const;
 | 
			
		||||
 | 
			
		||||
@@ -440,9 +433,6 @@ class TestResult {
 | 
			
		||||
  // Returns the elapsed time, in milliseconds.
 | 
			
		||||
  TimeInMillis elapsed_time() const { return elapsed_time_; }
 | 
			
		||||
 | 
			
		||||
  // Sets the elapsed time.
 | 
			
		||||
  void set_elapsed_time(TimeInMillis elapsed) { elapsed_time_ = elapsed; }
 | 
			
		||||
 | 
			
		||||
  // Returns the i-th test part result among all the results. i can range
 | 
			
		||||
  // from 0 to test_property_count() - 1. If i is not in that range, returns
 | 
			
		||||
  // NULL.
 | 
			
		||||
@@ -452,8 +442,28 @@ class TestResult {
 | 
			
		||||
  // test_property_count() - 1. If i is not in that range, returns NULL.
 | 
			
		||||
  const TestProperty* GetTestProperty(int i) const;
 | 
			
		||||
 | 
			
		||||
  // Adds a test part result to the list.
 | 
			
		||||
  void AddTestPartResult(const TestPartResult& test_part_result);
 | 
			
		||||
 private:
 | 
			
		||||
  friend class DefaultGlobalTestPartResultReporter;
 | 
			
		||||
  friend class ExecDeathTest;
 | 
			
		||||
  friend class TestInfoImpl;
 | 
			
		||||
  friend class TestResultAccessor;
 | 
			
		||||
  friend class UnitTestImpl;
 | 
			
		||||
  friend class WindowsDeathTest;
 | 
			
		||||
  friend class testing::TestInfo;
 | 
			
		||||
  friend class testing::UnitTest;
 | 
			
		||||
 | 
			
		||||
  // Gets the list of TestPartResults.
 | 
			
		||||
  const internal::List<TestPartResult>& test_part_results() const {
 | 
			
		||||
    return *test_part_results_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Gets the list of TestProperties.
 | 
			
		||||
  const internal::List<internal::TestProperty>& test_properties() const {
 | 
			
		||||
    return *test_properties_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Sets the elapsed time.
 | 
			
		||||
  void set_elapsed_time(TimeInMillis elapsed) { elapsed_time_ = elapsed; }
 | 
			
		||||
 | 
			
		||||
  // Adds a test property to the list. The property is validated and may add
 | 
			
		||||
  // a non-fatal failure if invalid (e.g., if it conflicts with reserved
 | 
			
		||||
@@ -467,6 +477,9 @@ class TestResult {
 | 
			
		||||
  // TODO(russr): Validate attribute names are legal and human readable.
 | 
			
		||||
  static bool ValidateTestProperty(const internal::TestProperty& test_property);
 | 
			
		||||
 | 
			
		||||
  // Adds a test part result to the list.
 | 
			
		||||
  void AddTestPartResult(const TestPartResult& test_part_result);
 | 
			
		||||
 | 
			
		||||
  // Returns the death test count.
 | 
			
		||||
  int death_test_count() const { return death_test_count_; }
 | 
			
		||||
 | 
			
		||||
@@ -478,7 +491,7 @@ class TestResult {
 | 
			
		||||
 | 
			
		||||
  // Clears the object.
 | 
			
		||||
  void Clear();
 | 
			
		||||
 private:
 | 
			
		||||
 | 
			
		||||
  // Protects mutable state of the property list and of owned properties, whose
 | 
			
		||||
  // values may be updated.
 | 
			
		||||
  internal::Mutex test_properites_mutex_;
 | 
			
		||||
@@ -527,9 +540,6 @@ class TestInfo {
 | 
			
		||||
  // Returns the test comment.
 | 
			
		||||
  const char* comment() const;
 | 
			
		||||
 | 
			
		||||
  // Returns true if this test matches the user-specified filter.
 | 
			
		||||
  bool matches_filter() const;
 | 
			
		||||
 | 
			
		||||
  // Returns true if this test should run, that is if the test is not disabled
 | 
			
		||||
  // (or it is disabled but the also_run_disabled_tests flag has been specified)
 | 
			
		||||
  // and its full name matches the user-specified filter.
 | 
			
		||||
@@ -550,6 +560,7 @@ class TestInfo {
 | 
			
		||||
 | 
			
		||||
  // Returns the result of the test.
 | 
			
		||||
  const internal::TestResult* result() const;
 | 
			
		||||
 | 
			
		||||
 private:
 | 
			
		||||
#if GTEST_HAS_DEATH_TEST
 | 
			
		||||
  friend class internal::DefaultDeathTestFactory;
 | 
			
		||||
@@ -566,6 +577,9 @@ class TestInfo {
 | 
			
		||||
      Test::TearDownTestCaseFunc tear_down_tc,
 | 
			
		||||
      internal::TestFactoryBase* factory);
 | 
			
		||||
 | 
			
		||||
  // Returns true if this test matches the user-specified filter.
 | 
			
		||||
  bool matches_filter() const;
 | 
			
		||||
 | 
			
		||||
  // Increments the number of death tests encountered in this test so
 | 
			
		||||
  // far.
 | 
			
		||||
  int increment_death_test_count();
 | 
			
		||||
@@ -620,17 +634,6 @@ class TestCase {
 | 
			
		||||
  // Returns true if any test in this test case should run.
 | 
			
		||||
  bool should_run() const { return should_run_; }
 | 
			
		||||
 | 
			
		||||
  // Sets the should_run member.
 | 
			
		||||
  void set_should_run(bool should) { should_run_ = should; }
 | 
			
		||||
 | 
			
		||||
  // Gets the (mutable) list of TestInfos in this TestCase.
 | 
			
		||||
  internal::List<TestInfo*>& test_info_list() { return *test_info_list_; }
 | 
			
		||||
 | 
			
		||||
  // Gets the (immutable) list of TestInfos in this TestCase.
 | 
			
		||||
  const internal::List<TestInfo *> & test_info_list() const {
 | 
			
		||||
    return *test_info_list_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Gets the number of successful tests in this test case.
 | 
			
		||||
  int successful_test_count() const;
 | 
			
		||||
 | 
			
		||||
@@ -659,14 +662,25 @@ class TestCase {
 | 
			
		||||
  // total_test_count() - 1. If i is not in that range, returns NULL.
 | 
			
		||||
  const TestInfo* GetTestInfo(int i) const;
 | 
			
		||||
 | 
			
		||||
 private:
 | 
			
		||||
  friend class testing::Test;
 | 
			
		||||
  friend class UnitTestImpl;
 | 
			
		||||
 | 
			
		||||
  // Gets the (mutable) list of TestInfos in this TestCase.
 | 
			
		||||
  internal::List<TestInfo*>& test_info_list() { return *test_info_list_; }
 | 
			
		||||
 | 
			
		||||
  // Gets the (immutable) list of TestInfos in this TestCase.
 | 
			
		||||
  const internal::List<TestInfo *> & test_info_list() const {
 | 
			
		||||
    return *test_info_list_;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Sets the should_run member.
 | 
			
		||||
  void set_should_run(bool should) { should_run_ = should; }
 | 
			
		||||
 | 
			
		||||
  // Adds a TestInfo to this test case.  Will delete the TestInfo upon
 | 
			
		||||
  // destruction of the TestCase object.
 | 
			
		||||
  void AddTestInfo(TestInfo * test_info);
 | 
			
		||||
 | 
			
		||||
  // Finds and returns a TestInfo with the given name.  If one doesn't
 | 
			
		||||
  // exist, returns NULL.
 | 
			
		||||
  TestInfo* GetTestInfo(const char* test_name);
 | 
			
		||||
 | 
			
		||||
  // Clears the results of all tests in this test case.
 | 
			
		||||
  void ClearResult();
 | 
			
		||||
 | 
			
		||||
@@ -693,7 +707,6 @@ class TestCase {
 | 
			
		||||
  // Returns true if the given test should run.
 | 
			
		||||
  static bool ShouldRunTest(const TestInfo *test_info);
 | 
			
		||||
 | 
			
		||||
 private:
 | 
			
		||||
  // Name of the test case.
 | 
			
		||||
  internal::String name_;
 | 
			
		||||
  // Comment on the test case.
 | 
			
		||||
 
 | 
			
		||||
@@ -98,10 +98,10 @@ KNOWN BUILD DIRECTORIES
 | 
			
		||||
      defines them as follows (the default build directory is the first one
 | 
			
		||||
      listed in each group):
 | 
			
		||||
      On Windows:
 | 
			
		||||
              <gtest root>/scons/build/win-dbg/scons/
 | 
			
		||||
              <gtest root>/scons/build/win-opt/scons/
 | 
			
		||||
              <gtest root>/scons/build/win-dbg8/scons/
 | 
			
		||||
              <gtest root>/scons/build/win-opt8/scons/
 | 
			
		||||
              <gtest root>/scons/build/win-dbg/scons/
 | 
			
		||||
              <gtest root>/scons/build/win-opt/scons/
 | 
			
		||||
      On Mac:
 | 
			
		||||
              <gtest root>/scons/build/mac-dbg/scons/
 | 
			
		||||
              <gtest root>/scons/build/mac-opt/scons/
 | 
			
		||||
@@ -137,7 +137,7 @@ IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
 | 
			
		||||
# Definition of CONFIGS must match that of the build directory names in the
 | 
			
		||||
# SConstruct script. The first list item is the default build configuration.
 | 
			
		||||
if IS_WINDOWS:
 | 
			
		||||
  CONFIGS = ('win-dbg', 'win-dbg8', 'win-opt', 'win-opt8')
 | 
			
		||||
  CONFIGS = ('win-dbg8', 'win-opt8', 'win-dbg', 'win-opt')
 | 
			
		||||
elif IS_MAC:
 | 
			
		||||
  CONFIGS = ('mac-dbg', 'mac-opt')
 | 
			
		||||
else:
 | 
			
		||||
 
 | 
			
		||||
@@ -1116,11 +1116,6 @@ inline UnitTestImpl* GetUnitTestImpl() {
 | 
			
		||||
  return UnitTest::GetInstance()->impl();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Clears all test part results of the current test.
 | 
			
		||||
inline void ClearCurrentTestPartResults() {
 | 
			
		||||
  GetUnitTestImpl()->current_test_result()->ClearTestPartResults();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Internal helper functions for implementing the simple regular
 | 
			
		||||
// expression matcher.
 | 
			
		||||
bool IsInSet(char ch, const char* str);
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										11
									
								
								src/gtest.cc
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								src/gtest.cc
									
									
									
									
									
								
							@@ -2290,17 +2290,6 @@ class TestNameIs {
 | 
			
		||||
 | 
			
		||||
}  // namespace
 | 
			
		||||
 | 
			
		||||
// Finds and returns a TestInfo with the given name.  If one doesn't
 | 
			
		||||
// exist, returns NULL.
 | 
			
		||||
TestInfo * TestCase::GetTestInfo(const char* test_name) {
 | 
			
		||||
  // Can we find a TestInfo with the given name?
 | 
			
		||||
  internal::ListNode<TestInfo *> * const node = test_info_list_->FindIf(
 | 
			
		||||
      TestNameIs(test_name));
 | 
			
		||||
 | 
			
		||||
  // Returns the TestInfo found.
 | 
			
		||||
  return node ? node->element() : NULL;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
namespace internal {
 | 
			
		||||
 | 
			
		||||
// This method expands all parameterized tests registered with macros TEST_P
 | 
			
		||||
 
 | 
			
		||||
@@ -35,6 +35,7 @@
 | 
			
		||||
 | 
			
		||||
#if GTEST_OS_MAC
 | 
			
		||||
#include <pthread.h>
 | 
			
		||||
#include <time.h>
 | 
			
		||||
#endif  // GTEST_OS_MAC
 | 
			
		||||
 | 
			
		||||
#include <gtest/gtest.h>
 | 
			
		||||
@@ -110,6 +111,19 @@ TEST(GetThreadCountTest, ReturnsCorrectValue) {
 | 
			
		||||
 | 
			
		||||
  void* dummy;
 | 
			
		||||
  ASSERT_EQ(0, pthread_join(thread_id, &dummy));
 | 
			
		||||
 | 
			
		||||
  // MacOS X may not immediately report the updated thread count after
 | 
			
		||||
  // joining a thread, causing flakiness in this test. To counter that, we
 | 
			
		||||
  // wait for up to .5 seconds for the OS to report the correct value.
 | 
			
		||||
  for (int i = 0; i < 5; ++i) {
 | 
			
		||||
    if (GetThreadCount() == 1)
 | 
			
		||||
      break;
 | 
			
		||||
 | 
			
		||||
    timespec time;
 | 
			
		||||
    time.tv_sec = 0;
 | 
			
		||||
    time.tv_nsec = 100L * 1000 * 1000;  // .1 seconds.
 | 
			
		||||
    nanosleep(&time, NULL);
 | 
			
		||||
  }
 | 
			
		||||
  EXPECT_EQ(1, GetThreadCount());
 | 
			
		||||
  pthread_mutex_destroy(&mutex);
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -58,10 +58,13 @@ def UsesColor(term, color_env_var, color_flag):
 | 
			
		||||
 | 
			
		||||
  SetEnvVar('TERM', term)
 | 
			
		||||
  SetEnvVar(COLOR_ENV_VAR, color_env_var)
 | 
			
		||||
  cmd = COMMAND
 | 
			
		||||
  if color_flag is not None:
 | 
			
		||||
    cmd += ' --%s=%s' % (COLOR_FLAG, color_flag)
 | 
			
		||||
  return gtest_test_utils.GetExitStatus(os.system(cmd))
 | 
			
		||||
 | 
			
		||||
  if color_flag is None:
 | 
			
		||||
    args = []
 | 
			
		||||
  else:
 | 
			
		||||
    args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
 | 
			
		||||
  p = gtest_test_utils.Subprocess([COMMAND] + args)
 | 
			
		||||
  return not p.exited or p.exit_code
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GTestColorTest(gtest_test_utils.TestCase):
 | 
			
		||||
 
 | 
			
		||||
@@ -59,52 +59,44 @@ def SetEnvVar(env_var, value):
 | 
			
		||||
    del os.environ[env_var]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetFlag(command, flag):
 | 
			
		||||
def GetFlag(flag):
 | 
			
		||||
  """Runs gtest_env_var_test_ and returns its output."""
 | 
			
		||||
 | 
			
		||||
  cmd = command
 | 
			
		||||
  args = [COMMAND]
 | 
			
		||||
  if flag is not None:
 | 
			
		||||
    cmd += ' %s' % (flag,)
 | 
			
		||||
  stdin, stdout = os.popen2(cmd, 'b')
 | 
			
		||||
  stdin.close()
 | 
			
		||||
  line = stdout.readline()
 | 
			
		||||
  stdout.close()
 | 
			
		||||
  return line
 | 
			
		||||
    args += [flag]
 | 
			
		||||
  return gtest_test_utils.Subprocess(args).output
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def TestFlag(command, flag, test_val, default_val):
 | 
			
		||||
def TestFlag(flag, test_val, default_val):
 | 
			
		||||
  """Verifies that the given flag is affected by the corresponding env var."""
 | 
			
		||||
 | 
			
		||||
  env_var = 'GTEST_' + flag.upper()
 | 
			
		||||
  SetEnvVar(env_var, test_val)
 | 
			
		||||
  AssertEq(test_val, GetFlag(command, flag))
 | 
			
		||||
  AssertEq(test_val, GetFlag(flag))
 | 
			
		||||
  SetEnvVar(env_var, None)
 | 
			
		||||
  AssertEq(default_val, GetFlag(command, flag))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def TestEnvVarAffectsFlag(command):
 | 
			
		||||
  """An environment variable should affect the corresponding flag."""
 | 
			
		||||
 | 
			
		||||
  TestFlag(command, 'break_on_failure', '1', '0')
 | 
			
		||||
  TestFlag(command, 'color', 'yes', 'auto')
 | 
			
		||||
  TestFlag(command, 'filter', 'FooTest.Bar', '*')
 | 
			
		||||
  TestFlag(command, 'output', 'tmp/foo.xml', '')
 | 
			
		||||
  TestFlag(command, 'print_time', '0', '1')
 | 
			
		||||
  TestFlag(command, 'repeat', '999', '1')
 | 
			
		||||
  TestFlag(command, 'throw_on_failure', '1', '0')
 | 
			
		||||
  TestFlag(command, 'death_test_style', 'threadsafe', 'fast')
 | 
			
		||||
 | 
			
		||||
  if IS_WINDOWS:
 | 
			
		||||
    TestFlag(command, 'catch_exceptions', '1', '0')
 | 
			
		||||
 | 
			
		||||
  if IS_LINUX:
 | 
			
		||||
    TestFlag(command, 'death_test_use_fork', '1', '0')
 | 
			
		||||
    TestFlag(command, 'stack_trace_depth', '0', '100')
 | 
			
		||||
  AssertEq(default_val, GetFlag(flag))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GTestEnvVarTest(gtest_test_utils.TestCase):
 | 
			
		||||
  def testEnvVarAffectsFlag(self):
 | 
			
		||||
    TestEnvVarAffectsFlag(COMMAND)
 | 
			
		||||
    """Tests that environment variable should affect the corresponding flag."""
 | 
			
		||||
 | 
			
		||||
    TestFlag('break_on_failure', '1', '0')
 | 
			
		||||
    TestFlag('color', 'yes', 'auto')
 | 
			
		||||
    TestFlag('filter', 'FooTest.Bar', '*')
 | 
			
		||||
    TestFlag('output', 'tmp/foo.xml', '')
 | 
			
		||||
    TestFlag('print_time', '0', '1')
 | 
			
		||||
    TestFlag('repeat', '999', '1')
 | 
			
		||||
    TestFlag('throw_on_failure', '1', '0')
 | 
			
		||||
    TestFlag('death_test_style', 'threadsafe', 'fast')
 | 
			
		||||
 | 
			
		||||
    if IS_WINDOWS:
 | 
			
		||||
      TestFlag('catch_exceptions', '1', '0')
 | 
			
		||||
 | 
			
		||||
    if IS_LINUX:
 | 
			
		||||
      TestFlag('death_test_use_fork', '1', '0')
 | 
			
		||||
      TestFlag('stack_trace_depth', '0', '100')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
 
 | 
			
		||||
@@ -129,14 +129,20 @@ def SetEnvVar(env_var, value):
 | 
			
		||||
    del os.environ[env_var]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Run(command):
 | 
			
		||||
  """Runs a test program and returns its exit code and a list of tests run."""
 | 
			
		||||
def RunAndReturnOutput(args = None):
 | 
			
		||||
  """Runs the test program and returns its output."""
 | 
			
		||||
 | 
			
		||||
  stdout_file = os.popen(command, 'r')
 | 
			
		||||
  return gtest_test_utils.Subprocess([COMMAND] + (args or [])).output
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def RunAndExtractTestList(args = None):
 | 
			
		||||
  """Runs the test program and returns its exit code and a list of tests run."""
 | 
			
		||||
 | 
			
		||||
  p = gtest_test_utils.Subprocess([COMMAND] + (args or []))
 | 
			
		||||
  tests_run = []
 | 
			
		||||
  test_case = ''
 | 
			
		||||
  test = ''
 | 
			
		||||
  for line in stdout_file:
 | 
			
		||||
  for line in p.output.split('\n'):
 | 
			
		||||
    match = TEST_CASE_REGEX.match(line)
 | 
			
		||||
    if match is not None:
 | 
			
		||||
      test_case = match.group(1)
 | 
			
		||||
@@ -144,9 +150,8 @@ def Run(command):
 | 
			
		||||
      match = TEST_REGEX.match(line)
 | 
			
		||||
      if match is not None:
 | 
			
		||||
        test = match.group(1)
 | 
			
		||||
        tests_run += [test_case + '.' + test]
 | 
			
		||||
  exit_code = stdout_file.close()
 | 
			
		||||
  return (tests_run, exit_code)
 | 
			
		||||
        tests_run.append(test_case + '.' + test)
 | 
			
		||||
  return (tests_run, p.exit_code)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
 | 
			
		||||
@@ -168,7 +173,7 @@ def RunWithSharding(total_shards, shard_index, command):
 | 
			
		||||
 | 
			
		||||
  extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
 | 
			
		||||
               TOTAL_SHARDS_ENV_VAR: str(total_shards)}
 | 
			
		||||
  return InvokeWithModifiedEnv(extra_env, Run, command)
 | 
			
		||||
  return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
 | 
			
		||||
 | 
			
		||||
# The unit test.
 | 
			
		||||
 | 
			
		||||
@@ -220,7 +225,7 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    # pylint: disable-msg=C6403
 | 
			
		||||
    if not IS_WINDOWS or gtest_filter != '':
 | 
			
		||||
      SetEnvVar(FILTER_ENV_VAR, gtest_filter)
 | 
			
		||||
      tests_run = Run(COMMAND)[0]
 | 
			
		||||
      tests_run = RunAndExtractTestList()[0]
 | 
			
		||||
      SetEnvVar(FILTER_ENV_VAR, None)
 | 
			
		||||
      self.AssertSetEqual(tests_run, tests_to_run)
 | 
			
		||||
    # pylint: enable-msg=C6403
 | 
			
		||||
@@ -228,15 +233,15 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    # Next, tests using --gtest_filter.
 | 
			
		||||
 | 
			
		||||
    if gtest_filter is None:
 | 
			
		||||
      command = COMMAND
 | 
			
		||||
      args = []
 | 
			
		||||
    else:
 | 
			
		||||
      command = '%s --%s=%s' % (COMMAND, FILTER_FLAG, gtest_filter)
 | 
			
		||||
      args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
 | 
			
		||||
 | 
			
		||||
    tests_run = Run(command)[0]
 | 
			
		||||
    tests_run = RunAndExtractTestList(args)[0]
 | 
			
		||||
    self.AssertSetEqual(tests_run, tests_to_run)
 | 
			
		||||
 | 
			
		||||
  def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
 | 
			
		||||
                               command=COMMAND, check_exit_0=False):
 | 
			
		||||
                               args=None, check_exit_0=False):
 | 
			
		||||
    """Checks that binary runs correct tests for the given filter and shard.
 | 
			
		||||
 | 
			
		||||
    Runs all shards of gtest_filter_unittest_ with the given filter, and
 | 
			
		||||
@@ -247,7 +252,7 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
      gtest_filter: A filter to apply to the tests.
 | 
			
		||||
      total_shards: A total number of shards to split test run into.
 | 
			
		||||
      tests_to_run: A set of tests expected to run.
 | 
			
		||||
      command:      A command to invoke the test binary.
 | 
			
		||||
      args   :      Arguments to pass to the to the test binary.
 | 
			
		||||
      check_exit_0: When set to a true value, make sure that all shards
 | 
			
		||||
                    return 0.
 | 
			
		||||
    """
 | 
			
		||||
@@ -264,9 +269,9 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
      SetEnvVar(FILTER_ENV_VAR, gtest_filter)
 | 
			
		||||
      partition = []
 | 
			
		||||
      for i in range(0, total_shards):
 | 
			
		||||
        (tests_run, exit_code) = RunWithSharding(total_shards, i, command)
 | 
			
		||||
        (tests_run, exit_code) = RunWithSharding(total_shards, i, args)
 | 
			
		||||
        if check_exit_0:
 | 
			
		||||
          self.assert_(exit_code is None)
 | 
			
		||||
          self.assertEqual(0, exit_code)
 | 
			
		||||
        partition.append(tests_run)
 | 
			
		||||
 | 
			
		||||
      self.AssertPartitionIsValid(tests_to_run, partition)
 | 
			
		||||
@@ -287,11 +292,11 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
 | 
			
		||||
 | 
			
		||||
    # Construct the command line.
 | 
			
		||||
    command = '%s --%s' % (COMMAND, ALSO_RUN_DISABED_TESTS_FLAG)
 | 
			
		||||
    args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
 | 
			
		||||
    if gtest_filter is not None:
 | 
			
		||||
      command = '%s --%s=%s' % (command, FILTER_FLAG, gtest_filter)
 | 
			
		||||
      args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
 | 
			
		||||
 | 
			
		||||
    tests_run = Run(command)[0]
 | 
			
		||||
    tests_run = RunAndExtractTestList(args)[0]
 | 
			
		||||
    self.AssertSetEqual(tests_run, tests_to_run)
 | 
			
		||||
 | 
			
		||||
  def setUp(self):
 | 
			
		||||
@@ -304,7 +309,7 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    global param_tests_present
 | 
			
		||||
    if param_tests_present is None:
 | 
			
		||||
      param_tests_present = PARAM_TEST_REGEX.search(
 | 
			
		||||
          '\n'.join(os.popen(COMMAND, 'r').readlines())) is not None
 | 
			
		||||
          RunAndReturnOutput()) is not None
 | 
			
		||||
 | 
			
		||||
  def testDefaultBehavior(self):
 | 
			
		||||
    """Tests the behavior of not specifying the filter."""
 | 
			
		||||
@@ -529,8 +534,8 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    """Tests that the filter flag overrides the filtering env. variable."""
 | 
			
		||||
 | 
			
		||||
    SetEnvVar(FILTER_ENV_VAR, 'Foo*')
 | 
			
		||||
    command = '%s --%s=%s' % (COMMAND, FILTER_FLAG, '*One')
 | 
			
		||||
    tests_run = Run(command)[0]
 | 
			
		||||
    args = ['--%s=%s' % (FILTER_FLAG, '*One')]
 | 
			
		||||
    tests_run = RunAndExtractTestList(args)[0]
 | 
			
		||||
    SetEnvVar(FILTER_ENV_VAR, None)
 | 
			
		||||
 | 
			
		||||
    self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
 | 
			
		||||
@@ -543,11 +548,9 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    self.assert_(not os.path.exists(shard_status_file))
 | 
			
		||||
 | 
			
		||||
    extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
 | 
			
		||||
    stdout_file = InvokeWithModifiedEnv(extra_env, os.popen, COMMAND, 'r')
 | 
			
		||||
    try:
 | 
			
		||||
      stdout_file.readlines()
 | 
			
		||||
      InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
 | 
			
		||||
    finally:
 | 
			
		||||
      stdout_file.close()
 | 
			
		||||
      self.assert_(os.path.exists(shard_status_file))
 | 
			
		||||
      os.remove(shard_status_file)
 | 
			
		||||
 | 
			
		||||
@@ -559,12 +562,11 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
    self.assert_(not os.path.exists(shard_status_file))
 | 
			
		||||
 | 
			
		||||
    extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
 | 
			
		||||
    stdout_file = InvokeWithModifiedEnv(extra_env, os.popen,
 | 
			
		||||
                                        '%s --gtest_list_tests' % COMMAND, 'r')
 | 
			
		||||
    try:
 | 
			
		||||
      stdout_file.readlines()
 | 
			
		||||
      InvokeWithModifiedEnv(extra_env,
 | 
			
		||||
                            RunAndReturnOutput,
 | 
			
		||||
                            ['--gtest_list_tests'])
 | 
			
		||||
    finally:
 | 
			
		||||
      stdout_file.close()
 | 
			
		||||
      self.assert_(os.path.exists(shard_status_file))
 | 
			
		||||
      os.remove(shard_status_file)
 | 
			
		||||
 | 
			
		||||
@@ -581,12 +583,12 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
        'SeqP/ParamTest.TestY/1',
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
    for command in (COMMAND + ' --gtest_death_test_style=threadsafe',
 | 
			
		||||
                    COMMAND + ' --gtest_death_test_style=fast'):
 | 
			
		||||
    for flag in ['--gtest_death_test_style=threadsafe',
 | 
			
		||||
                 '--gtest_death_test_style=fast']:
 | 
			
		||||
      self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
 | 
			
		||||
                                    check_exit_0=True, command=command)
 | 
			
		||||
                                    check_exit_0=True, args=[flag])
 | 
			
		||||
      self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
 | 
			
		||||
                                    check_exit_0=True, command=command)
 | 
			
		||||
                                    check_exit_0=True, args=[flag])
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  gtest_test_utils.Main()
 | 
			
		||||
 
 | 
			
		||||
@@ -39,7 +39,6 @@ Google Test) the command line flags.
 | 
			
		||||
 | 
			
		||||
__author__ = 'phanna@google.com (Patrick Hanna)'
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import gtest_test_utils
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -89,15 +88,11 @@ FooTest.
 | 
			
		||||
# Utilities.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Run(command):
 | 
			
		||||
  """Runs a command and returns the list of tests printed."""
 | 
			
		||||
def Run(args):
 | 
			
		||||
  """Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
 | 
			
		||||
 | 
			
		||||
  stdout_file = os.popen(command, 'r')
 | 
			
		||||
 | 
			
		||||
  output = stdout_file.read()
 | 
			
		||||
 | 
			
		||||
  stdout_file.close()
 | 
			
		||||
  return output
 | 
			
		||||
  return gtest_test_utils.Subprocess([EXE_PATH] + args,
 | 
			
		||||
                                     capture_stderr=False).output
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# The unit test.
 | 
			
		||||
@@ -122,23 +117,23 @@ class GTestListTestsUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
 | 
			
		||||
    if flag_value is None:
 | 
			
		||||
      flag = ''
 | 
			
		||||
      flag_expression = "not set"
 | 
			
		||||
      flag_expression = 'not set'
 | 
			
		||||
    elif flag_value == '0':
 | 
			
		||||
      flag = ' --%s=0' % LIST_TESTS_FLAG
 | 
			
		||||
      flag_expression = "0"
 | 
			
		||||
      flag = '--%s=0' % LIST_TESTS_FLAG
 | 
			
		||||
      flag_expression = '0'
 | 
			
		||||
    else:
 | 
			
		||||
      flag = ' --%s' % LIST_TESTS_FLAG
 | 
			
		||||
      flag_expression = "1"
 | 
			
		||||
      flag = '--%s' % LIST_TESTS_FLAG
 | 
			
		||||
      flag_expression = '1'
 | 
			
		||||
 | 
			
		||||
    command = EXE_PATH + flag
 | 
			
		||||
    args = [flag]
 | 
			
		||||
 | 
			
		||||
    if other_flag is not None:
 | 
			
		||||
      command += " " + other_flag
 | 
			
		||||
      args += [other_flag]
 | 
			
		||||
 | 
			
		||||
    output = Run(command)
 | 
			
		||||
    output = Run(args)
 | 
			
		||||
 | 
			
		||||
    msg = ('when %s is %s, the output of "%s" is "%s".' %
 | 
			
		||||
           (LIST_TESTS_FLAG, flag_expression, command, output))
 | 
			
		||||
           (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output))
 | 
			
		||||
 | 
			
		||||
    if expected_output is not None:
 | 
			
		||||
      self.assert_(output == expected_output, msg)
 | 
			
		||||
@@ -165,17 +160,17 @@ class GTestListTestsUnitTest(gtest_test_utils.TestCase):
 | 
			
		||||
  def testOverrideNonFilterFlags(self):
 | 
			
		||||
    """Tests that --gtest_list_tests overrides the non-filter flags."""
 | 
			
		||||
 | 
			
		||||
    self.RunAndVerify(flag_value="1",
 | 
			
		||||
    self.RunAndVerify(flag_value='1',
 | 
			
		||||
                      expected_output=EXPECTED_OUTPUT_NO_FILTER,
 | 
			
		||||
                      other_flag="--gtest_break_on_failure")
 | 
			
		||||
                      other_flag='--gtest_break_on_failure')
 | 
			
		||||
 | 
			
		||||
  def testWithFilterFlags(self):
 | 
			
		||||
    """Tests that --gtest_list_tests takes into account the
 | 
			
		||||
    --gtest_filter flag."""
 | 
			
		||||
 | 
			
		||||
    self.RunAndVerify(flag_value="1",
 | 
			
		||||
    self.RunAndVerify(flag_value='1',
 | 
			
		||||
                      expected_output=EXPECTED_OUTPUT_FILTER_FOO,
 | 
			
		||||
                      other_flag="--gtest_filter=Foo*")
 | 
			
		||||
                      other_flag='--gtest_filter=Foo*')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
 
 | 
			
		||||
@@ -42,7 +42,6 @@ __author__ = 'wan@google.com (Zhanyong Wan)'
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import string
 | 
			
		||||
import sys
 | 
			
		||||
import gtest_test_utils
 | 
			
		||||
 | 
			
		||||
@@ -61,18 +60,22 @@ PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
 | 
			
		||||
 | 
			
		||||
# At least one command we exercise must not have the
 | 
			
		||||
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
 | 
			
		||||
COMMAND_LIST_TESTS = ({}, PROGRAM_PATH + ' --gtest_list_tests')
 | 
			
		||||
COMMAND_WITH_COLOR = ({}, PROGRAM_PATH + ' --gtest_color=yes')
 | 
			
		||||
COMMAND_WITH_TIME = ({}, PROGRAM_PATH + ' --gtest_print_time '
 | 
			
		||||
                     '--gtest_internal_skip_environment_and_ad_hoc_tests '
 | 
			
		||||
                     '--gtest_filter="FatalFailureTest.*:LoggingTest.*"')
 | 
			
		||||
COMMAND_WITH_DISABLED = ({}, PROGRAM_PATH + ' --gtest_also_run_disabled_tests '
 | 
			
		||||
                         '--gtest_internal_skip_environment_and_ad_hoc_tests '
 | 
			
		||||
                         '--gtest_filter="*DISABLED_*"')
 | 
			
		||||
COMMAND_WITH_SHARDING = ({'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
 | 
			
		||||
                         PROGRAM_PATH +
 | 
			
		||||
                         ' --gtest_internal_skip_environment_and_ad_hoc_tests '
 | 
			
		||||
                         ' --gtest_filter="PassingTest.*"')
 | 
			
		||||
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
 | 
			
		||||
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
 | 
			
		||||
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
 | 
			
		||||
                          '--gtest_print_time',
 | 
			
		||||
                          '--gtest_internal_skip_environment_and_ad_hoc_tests',
 | 
			
		||||
                          '--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
 | 
			
		||||
COMMAND_WITH_DISABLED = (
 | 
			
		||||
    {}, [PROGRAM_PATH,
 | 
			
		||||
         '--gtest_also_run_disabled_tests',
 | 
			
		||||
         '--gtest_internal_skip_environment_and_ad_hoc_tests',
 | 
			
		||||
         '--gtest_filter=*DISABLED_*'])
 | 
			
		||||
COMMAND_WITH_SHARDING = (
 | 
			
		||||
    {'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
 | 
			
		||||
    [PROGRAM_PATH,
 | 
			
		||||
     '--gtest_internal_skip_environment_and_ad_hoc_tests',
 | 
			
		||||
     '--gtest_filter=PassingTest.*'])
 | 
			
		||||
 | 
			
		||||
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
 | 
			
		||||
 | 
			
		||||
@@ -167,24 +170,24 @@ def NormalizeOutput(output):
 | 
			
		||||
  return output
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IterShellCommandOutput(env_cmd, stdin_string=None):
 | 
			
		||||
  """Runs a command in a sub-process, and iterates the lines in its STDOUT.
 | 
			
		||||
def GetShellCommandOutput(env_cmd):
 | 
			
		||||
  """Runs a command in a sub-process, and returns its output in a string.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
 | 
			
		||||
             environment variables to set, and element 1 is a string with
 | 
			
		||||
             the command and any flags.
 | 
			
		||||
 | 
			
		||||
    env_cmd:           The shell command. A 2-tuple where element 0 is a dict
 | 
			
		||||
                       of extra environment variables to set, and element 1
 | 
			
		||||
                       is a string with the command and any flags.
 | 
			
		||||
    stdin_string:      The string to be fed to the STDIN of the sub-process;
 | 
			
		||||
                       If None, the sub-process will inherit the STDIN
 | 
			
		||||
                       from the parent process.
 | 
			
		||||
  Returns:
 | 
			
		||||
    A string with the command's combined standard and diagnostic output.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
  # Spawns cmd in a sub-process, and gets its standard I/O file objects.
 | 
			
		||||
  # Set and save the environment properly.
 | 
			
		||||
  old_env_vars = dict(os.environ)
 | 
			
		||||
  os.environ.update(env_cmd[0])
 | 
			
		||||
  stdin_file, stdout_file = os.popen2(env_cmd[1], 'b')
 | 
			
		||||
  p = gtest_test_utils.Subprocess(env_cmd[1])
 | 
			
		||||
 | 
			
		||||
  # Changes made by os.environ.clear are not inheritable by child processes
 | 
			
		||||
  # until Python 2.6. To produce inheritable changes we have to delete
 | 
			
		||||
  # environment items with the del statement.
 | 
			
		||||
@@ -192,39 +195,7 @@ def IterShellCommandOutput(env_cmd, stdin_string=None):
 | 
			
		||||
    del os.environ[key]
 | 
			
		||||
  os.environ.update(old_env_vars)
 | 
			
		||||
 | 
			
		||||
  # If the caller didn't specify a string for STDIN, gets it from the
 | 
			
		||||
  # parent process.
 | 
			
		||||
  if stdin_string is None:
 | 
			
		||||
    stdin_string = sys.stdin.read()
 | 
			
		||||
 | 
			
		||||
  # Feeds the STDIN string to the sub-process.
 | 
			
		||||
  stdin_file.write(stdin_string)
 | 
			
		||||
  stdin_file.close()
 | 
			
		||||
 | 
			
		||||
  while True:
 | 
			
		||||
    line = stdout_file.readline()
 | 
			
		||||
    if not line:  # EOF
 | 
			
		||||
      stdout_file.close()
 | 
			
		||||
      break
 | 
			
		||||
 | 
			
		||||
    yield line
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetShellCommandOutput(env_cmd, stdin_string=None):
 | 
			
		||||
  """Runs a command in a sub-process, and returns its STDOUT in a string.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
 | 
			
		||||
    env_cmd:           The shell command. A 2-tuple where element 0 is a dict
 | 
			
		||||
                       of extra environment variables to set, and element 1
 | 
			
		||||
                       is a string with the command and any flags.
 | 
			
		||||
    stdin_string:      The string to be fed to the STDIN of the sub-process;
 | 
			
		||||
                       If None, the sub-process will inherit the STDIN
 | 
			
		||||
                       from the parent process.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
  lines = list(IterShellCommandOutput(env_cmd, stdin_string))
 | 
			
		||||
  return string.join(lines, '')
 | 
			
		||||
  return p.output
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetCommandOutput(env_cmd):
 | 
			
		||||
@@ -239,7 +210,7 @@ def GetCommandOutput(env_cmd):
 | 
			
		||||
 | 
			
		||||
  # Disables exception pop-ups on Windows.
 | 
			
		||||
  os.environ['GTEST_CATCH_EXCEPTIONS'] = '1'
 | 
			
		||||
  return NormalizeOutput(GetShellCommandOutput(env_cmd, ''))
 | 
			
		||||
  return NormalizeOutput(GetShellCommandOutput(env_cmd))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetOutputOfAllCommands():
 | 
			
		||||
@@ -251,7 +222,7 @@ def GetOutputOfAllCommands():
 | 
			
		||||
          GetCommandOutput(COMMAND_WITH_SHARDING))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS, '')
 | 
			
		||||
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
 | 
			
		||||
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
 | 
			
		||||
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
 | 
			
		||||
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
 | 
			
		||||
 
 | 
			
		||||
@@ -190,7 +190,7 @@ def GetExitStatus(exit_code):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Subprocess:
 | 
			
		||||
  def __init__(self, command, working_dir=None):
 | 
			
		||||
  def __init__(self, command, working_dir=None, capture_stderr=True):
 | 
			
		||||
    """Changes into a specified directory, if provided, and executes a command.
 | 
			
		||||
    Restores the old directory afterwards. Execution results are returned
 | 
			
		||||
    via the following attributes:
 | 
			
		||||
@@ -203,8 +203,10 @@ class Subprocess:
 | 
			
		||||
                             combined in a string.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      command: A command to run, in the form of sys.argv.
 | 
			
		||||
      working_dir: A directory to change into.
 | 
			
		||||
      command:        The command to run, in the form of sys.argv.
 | 
			
		||||
      working_dir:    The directory to change into.
 | 
			
		||||
      capture_stderr: Determines whether to capture stderr in the output member
 | 
			
		||||
                      or to discard it.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # The subprocess module is the preferrable way of running programs
 | 
			
		||||
@@ -215,8 +217,13 @@ class Subprocess:
 | 
			
		||||
    # functionality (Popen4) under Windows. This allows us to support Mac
 | 
			
		||||
    # OS X 10.4 Tiger, which has python 2.3 installed.
 | 
			
		||||
    if _SUBPROCESS_MODULE_AVAILABLE:
 | 
			
		||||
      if capture_stderr:
 | 
			
		||||
        stderr = subprocess.STDOUT
 | 
			
		||||
      else:
 | 
			
		||||
        stderr = subprocess.PIPE
 | 
			
		||||
 | 
			
		||||
      p = subprocess.Popen(command,
 | 
			
		||||
                           stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
 | 
			
		||||
                           stdout=subprocess.PIPE, stderr=stderr,
 | 
			
		||||
                           cwd=working_dir, universal_newlines=True)
 | 
			
		||||
      # communicate returns a tuple with the file obect for the child's
 | 
			
		||||
      # output.
 | 
			
		||||
@@ -227,7 +234,10 @@ class Subprocess:
 | 
			
		||||
      try:
 | 
			
		||||
        if working_dir is not None:
 | 
			
		||||
          os.chdir(working_dir)
 | 
			
		||||
        p = popen2.Popen4(command)
 | 
			
		||||
        if capture_stderr:
 | 
			
		||||
          p = popen2.Popen4(command)
 | 
			
		||||
        else:
 | 
			
		||||
          p = popen2.Popen3(command)
 | 
			
		||||
        p.tochild.close()
 | 
			
		||||
        self.output = p.fromchild.read()
 | 
			
		||||
        ret_code = p.wait()
 | 
			
		||||
 
 | 
			
		||||
@@ -79,7 +79,29 @@ TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
 | 
			
		||||
namespace testing {
 | 
			
		||||
namespace internal {
 | 
			
		||||
const char* FormatTimeInMillisAsSeconds(TimeInMillis ms);
 | 
			
		||||
 | 
			
		||||
bool ParseInt32Flag(const char* str, const char* flag, Int32* value);
 | 
			
		||||
 | 
			
		||||
// TestResult contains some private methods that should be hidden from
 | 
			
		||||
// Google Test user but are required for testing. This class allow our tests
 | 
			
		||||
// to access them.
 | 
			
		||||
class TestResultAccessor {
 | 
			
		||||
 public:
 | 
			
		||||
  static void RecordProperty(TestResult* test_result,
 | 
			
		||||
                             const TestProperty& property) {
 | 
			
		||||
    test_result->RecordProperty(property);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static void ClearTestPartResults(TestResult* test_result) {
 | 
			
		||||
    test_result->ClearTestPartResults();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static const List<testing::TestPartResult>& test_part_results(
 | 
			
		||||
      const TestResult& test_result) {
 | 
			
		||||
    return test_result.test_part_results();
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace internal
 | 
			
		||||
}  // namespace testing
 | 
			
		||||
 | 
			
		||||
@@ -128,7 +150,6 @@ using testing::TPRT_SUCCESS;
 | 
			
		||||
using testing::UnitTest;
 | 
			
		||||
using testing::internal::kTestTypeIdInGoogleTest;
 | 
			
		||||
using testing::internal::AppendUserMessage;
 | 
			
		||||
using testing::internal::ClearCurrentTestPartResults;
 | 
			
		||||
using testing::internal::CodePointToUtf8;
 | 
			
		||||
using testing::internal::EqFailure;
 | 
			
		||||
using testing::internal::FloatingPoint;
 | 
			
		||||
@@ -146,14 +167,21 @@ using testing::internal::ShouldShard;
 | 
			
		||||
using testing::internal::ShouldUseColor;
 | 
			
		||||
using testing::internal::StreamableToString;
 | 
			
		||||
using testing::internal::String;
 | 
			
		||||
using testing::internal::TestCase;
 | 
			
		||||
using testing::internal::TestProperty;
 | 
			
		||||
using testing::internal::TestResult;
 | 
			
		||||
using testing::internal::TestResultAccessor;
 | 
			
		||||
using testing::internal::ThreadLocal;
 | 
			
		||||
using testing::internal::WideStringToUtf8;
 | 
			
		||||
 | 
			
		||||
// This line tests that we can define tests in an unnamed namespace.
 | 
			
		||||
namespace {
 | 
			
		||||
 | 
			
		||||
static void ClearCurrentTestPartResults() {
 | 
			
		||||
  TestResultAccessor::ClearTestPartResults(
 | 
			
		||||
      GetUnitTestImpl()->current_test_result());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests GetTypeId.
 | 
			
		||||
 | 
			
		||||
TEST(GetTypeIdTest, ReturnsSameValueForSameType) {
 | 
			
		||||
@@ -1076,9 +1104,9 @@ class TestResultTest : public Test {
 | 
			
		||||
    // this is a hack).
 | 
			
		||||
    TPRList * list1, * list2;
 | 
			
		||||
    list1 = const_cast<List<TestPartResult> *>(
 | 
			
		||||
        & r1->test_part_results());
 | 
			
		||||
        &TestResultAccessor::test_part_results(*r1));
 | 
			
		||||
    list2 = const_cast<List<TestPartResult> *>(
 | 
			
		||||
        & r2->test_part_results());
 | 
			
		||||
        &TestResultAccessor::test_part_results(*r2));
 | 
			
		||||
 | 
			
		||||
    // r0 is an empty TestResult.
 | 
			
		||||
 | 
			
		||||
@@ -1115,39 +1143,39 @@ class TestResultTest : public Test {
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_part_results().
 | 
			
		||||
// Tests TestResult::total_part_count().
 | 
			
		||||
TEST_F(TestResultTest, test_part_results) {
 | 
			
		||||
  ASSERT_EQ(0u, r0->test_part_results().size());
 | 
			
		||||
  ASSERT_EQ(1u, r1->test_part_results().size());
 | 
			
		||||
  ASSERT_EQ(2u, r2->test_part_results().size());
 | 
			
		||||
  ASSERT_EQ(0, r0->total_part_count());
 | 
			
		||||
  ASSERT_EQ(1, r1->total_part_count());
 | 
			
		||||
  ASSERT_EQ(2, r2->total_part_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::successful_part_count().
 | 
			
		||||
TEST_F(TestResultTest, successful_part_count) {
 | 
			
		||||
  ASSERT_EQ(0u, r0->successful_part_count());
 | 
			
		||||
  ASSERT_EQ(1u, r1->successful_part_count());
 | 
			
		||||
  ASSERT_EQ(1u, r2->successful_part_count());
 | 
			
		||||
  ASSERT_EQ(0, r0->successful_part_count());
 | 
			
		||||
  ASSERT_EQ(1, r1->successful_part_count());
 | 
			
		||||
  ASSERT_EQ(1, r2->successful_part_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::failed_part_count().
 | 
			
		||||
TEST_F(TestResultTest, failed_part_count) {
 | 
			
		||||
  ASSERT_EQ(0u, r0->failed_part_count());
 | 
			
		||||
  ASSERT_EQ(0u, r1->failed_part_count());
 | 
			
		||||
  ASSERT_EQ(1u, r2->failed_part_count());
 | 
			
		||||
  ASSERT_EQ(0, r0->failed_part_count());
 | 
			
		||||
  ASSERT_EQ(0, r1->failed_part_count());
 | 
			
		||||
  ASSERT_EQ(1, r2->failed_part_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests testing::internal::GetFailedPartCount().
 | 
			
		||||
TEST_F(TestResultTest, GetFailedPartCount) {
 | 
			
		||||
  ASSERT_EQ(0u, GetFailedPartCount(r0));
 | 
			
		||||
  ASSERT_EQ(0u, GetFailedPartCount(r1));
 | 
			
		||||
  ASSERT_EQ(1u, GetFailedPartCount(r2));
 | 
			
		||||
  ASSERT_EQ(0, GetFailedPartCount(r0));
 | 
			
		||||
  ASSERT_EQ(0, GetFailedPartCount(r1));
 | 
			
		||||
  ASSERT_EQ(1, GetFailedPartCount(r2));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::total_part_count().
 | 
			
		||||
TEST_F(TestResultTest, total_part_count) {
 | 
			
		||||
  ASSERT_EQ(0u, r0->total_part_count());
 | 
			
		||||
  ASSERT_EQ(1u, r1->total_part_count());
 | 
			
		||||
  ASSERT_EQ(2u, r2->total_part_count());
 | 
			
		||||
  ASSERT_EQ(0, r0->total_part_count());
 | 
			
		||||
  ASSERT_EQ(1, r1->total_part_count());
 | 
			
		||||
  ASSERT_EQ(2, r2->total_part_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::Passed().
 | 
			
		||||
@@ -1172,76 +1200,60 @@ TEST_F(TestResultTest, GetTestPartResult) {
 | 
			
		||||
  EXPECT_TRUE(r2->GetTestPartResult(-1) == NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_properties() has no properties when none are added.
 | 
			
		||||
// Tests TestResult has no properties when none are added.
 | 
			
		||||
TEST(TestResultPropertyTest, NoPropertiesFoundWhenNoneAreAdded) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  ASSERT_EQ(0u, test_result.test_properties().size());
 | 
			
		||||
  ASSERT_EQ(0, test_result.test_property_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_properties() has the expected property when added.
 | 
			
		||||
// Tests TestResult has the expected property when added.
 | 
			
		||||
TEST(TestResultPropertyTest, OnePropertyFoundWhenAdded) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  TestProperty property("key_1", "1");
 | 
			
		||||
  test_result.RecordProperty(property);
 | 
			
		||||
  const List<TestProperty>& properties = test_result.test_properties();
 | 
			
		||||
  ASSERT_EQ(1u, properties.size());
 | 
			
		||||
  TestProperty actual_property = properties.Head()->element();
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property.key());
 | 
			
		||||
  EXPECT_STREQ("1", actual_property.value());
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property);
 | 
			
		||||
  ASSERT_EQ(1, test_result.test_property_count());
 | 
			
		||||
  const TestProperty* actual_property = test_result.GetTestProperty(0);
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property->key());
 | 
			
		||||
  EXPECT_STREQ("1", actual_property->value());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_properties() has multiple properties when added.
 | 
			
		||||
// Tests TestResult has multiple properties when added.
 | 
			
		||||
TEST(TestResultPropertyTest, MultiplePropertiesFoundWhenAdded) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  TestProperty property_1("key_1", "1");
 | 
			
		||||
  TestProperty property_2("key_2", "2");
 | 
			
		||||
  test_result.RecordProperty(property_1);
 | 
			
		||||
  test_result.RecordProperty(property_2);
 | 
			
		||||
  const List<TestProperty>& properties = test_result.test_properties();
 | 
			
		||||
  ASSERT_EQ(2u, properties.size());
 | 
			
		||||
  TestProperty actual_property_1 = properties.Head()->element();
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property_1.key());
 | 
			
		||||
  EXPECT_STREQ("1", actual_property_1.value());
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_1);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_2);
 | 
			
		||||
  ASSERT_EQ(2, test_result.test_property_count());
 | 
			
		||||
  const TestProperty* actual_property_1 = test_result.GetTestProperty(0);
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property_1->key());
 | 
			
		||||
  EXPECT_STREQ("1", actual_property_1->value());
 | 
			
		||||
 | 
			
		||||
  TestProperty actual_property_2 = properties.Last()->element();
 | 
			
		||||
  EXPECT_STREQ("key_2", actual_property_2.key());
 | 
			
		||||
  EXPECT_STREQ("2", actual_property_2.value());
 | 
			
		||||
  const TestProperty* actual_property_2 = test_result.GetTestProperty(1);
 | 
			
		||||
  EXPECT_STREQ("key_2", actual_property_2->key());
 | 
			
		||||
  EXPECT_STREQ("2", actual_property_2->value());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_properties() overrides values for duplicate keys.
 | 
			
		||||
// Tests TestResult::RecordProperty() overrides values for duplicate keys.
 | 
			
		||||
TEST(TestResultPropertyTest, OverridesValuesForDuplicateKeys) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  TestProperty property_1_1("key_1", "1");
 | 
			
		||||
  TestProperty property_2_1("key_2", "2");
 | 
			
		||||
  TestProperty property_1_2("key_1", "12");
 | 
			
		||||
  TestProperty property_2_2("key_2", "22");
 | 
			
		||||
  test_result.RecordProperty(property_1_1);
 | 
			
		||||
  test_result.RecordProperty(property_2_1);
 | 
			
		||||
  test_result.RecordProperty(property_1_2);
 | 
			
		||||
  test_result.RecordProperty(property_2_2);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_1_1);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_2_1);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_1_2);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_2_2);
 | 
			
		||||
 | 
			
		||||
  const List<TestProperty>& properties = test_result.test_properties();
 | 
			
		||||
  ASSERT_EQ(2u, properties.size());
 | 
			
		||||
  TestProperty actual_property_1 = properties.Head()->element();
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property_1.key());
 | 
			
		||||
  EXPECT_STREQ("12", actual_property_1.value());
 | 
			
		||||
 | 
			
		||||
  TestProperty actual_property_2 = properties.Last()->element();
 | 
			
		||||
  EXPECT_STREQ("key_2", actual_property_2.key());
 | 
			
		||||
  EXPECT_STREQ("22", actual_property_2.value());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::test_property_count().
 | 
			
		||||
TEST(TestResultPropertyTest, TestPropertyCount) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  TestProperty property_1("key_1", "1");
 | 
			
		||||
  TestProperty property_2("key_2", "2");
 | 
			
		||||
 | 
			
		||||
  ASSERT_EQ(0, test_result.test_property_count());
 | 
			
		||||
  test_result.RecordProperty(property_1);
 | 
			
		||||
  ASSERT_EQ(1, test_result.test_property_count());
 | 
			
		||||
  test_result.RecordProperty(property_2);
 | 
			
		||||
  ASSERT_EQ(2, test_result.test_property_count());
 | 
			
		||||
  const TestProperty* actual_property_1 = test_result.GetTestProperty(0);
 | 
			
		||||
  EXPECT_STREQ("key_1", actual_property_1->key());
 | 
			
		||||
  EXPECT_STREQ("12", actual_property_1->value());
 | 
			
		||||
 | 
			
		||||
  const TestProperty* actual_property_2 = test_result.GetTestProperty(1);
 | 
			
		||||
  EXPECT_STREQ("key_2", actual_property_2->key());
 | 
			
		||||
  EXPECT_STREQ("22", actual_property_2->value());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests TestResult::GetTestProperty().
 | 
			
		||||
@@ -1250,9 +1262,9 @@ TEST(TestResultPropertyTest, GetTestProperty) {
 | 
			
		||||
  TestProperty property_1("key_1", "1");
 | 
			
		||||
  TestProperty property_2("key_2", "2");
 | 
			
		||||
  TestProperty property_3("key_3", "3");
 | 
			
		||||
  test_result.RecordProperty(property_1);
 | 
			
		||||
  test_result.RecordProperty(property_2);
 | 
			
		||||
  test_result.RecordProperty(property_3);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_1);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_2);
 | 
			
		||||
  TestResultAccessor::RecordProperty(&test_result, property_3);
 | 
			
		||||
 | 
			
		||||
  const TestProperty* fetched_property_1 = test_result.GetTestProperty(0);
 | 
			
		||||
  const TestProperty* fetched_property_2 = test_result.GetTestProperty(1);
 | 
			
		||||
@@ -1280,8 +1292,10 @@ TEST(TestResultPropertyTest, GetTestProperty) {
 | 
			
		||||
void ExpectNonFatalFailureRecordingPropertyWithReservedKey(const char* key) {
 | 
			
		||||
  TestResult test_result;
 | 
			
		||||
  TestProperty property(key, "1");
 | 
			
		||||
  EXPECT_NONFATAL_FAILURE(test_result.RecordProperty(property), "Reserved key");
 | 
			
		||||
  ASSERT_TRUE(test_result.test_properties().IsEmpty()) << "Not recorded";
 | 
			
		||||
  EXPECT_NONFATAL_FAILURE(
 | 
			
		||||
      TestResultAccessor::RecordProperty(&test_result, property),
 | 
			
		||||
      "Reserved key");
 | 
			
		||||
  ASSERT_EQ(0, test_result.test_property_count()) << "Not recorded";
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Attempting to recording a property with the Reserved literal "name"
 | 
			
		||||
@@ -4415,9 +4429,16 @@ namespace testing {
 | 
			
		||||
 | 
			
		||||
class TestInfoTest : public Test {
 | 
			
		||||
 protected:
 | 
			
		||||
  static TestInfo * GetTestInfo(const char* test_name) {
 | 
			
		||||
    return GetUnitTestImpl()->GetTestCase("TestInfoTest", "", NULL, NULL)->
 | 
			
		||||
        GetTestInfo(test_name);
 | 
			
		||||
  static const TestInfo* GetTestInfo(const char* test_name) {
 | 
			
		||||
    const TestCase* const test_case = GetUnitTestImpl()->
 | 
			
		||||
        GetTestCase("TestInfoTest", "", NULL, NULL);
 | 
			
		||||
 | 
			
		||||
    for (int i = 0; i < test_case->total_test_count(); ++i) {
 | 
			
		||||
      const TestInfo* const test_info = test_case->GetTestInfo(i);
 | 
			
		||||
      if (strcmp(test_name, test_info->name()) == 0)
 | 
			
		||||
        return test_info;
 | 
			
		||||
    }
 | 
			
		||||
    return NULL;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static const TestResult* GetTestResult(
 | 
			
		||||
@@ -4428,7 +4449,7 @@ class TestInfoTest : public Test {
 | 
			
		||||
 | 
			
		||||
// Tests TestInfo::test_case_name() and TestInfo::name().
 | 
			
		||||
TEST_F(TestInfoTest, Names) {
 | 
			
		||||
  TestInfo * const test_info = GetTestInfo("Names");
 | 
			
		||||
  const TestInfo* const test_info = GetTestInfo("Names");
 | 
			
		||||
 | 
			
		||||
  ASSERT_STREQ("TestInfoTest", test_info->test_case_name());
 | 
			
		||||
  ASSERT_STREQ("Names", test_info->name());
 | 
			
		||||
@@ -4436,13 +4457,13 @@ TEST_F(TestInfoTest, Names) {
 | 
			
		||||
 | 
			
		||||
// Tests TestInfo::result().
 | 
			
		||||
TEST_F(TestInfoTest, result) {
 | 
			
		||||
  TestInfo * const test_info = GetTestInfo("result");
 | 
			
		||||
  const TestInfo* const test_info = GetTestInfo("result");
 | 
			
		||||
 | 
			
		||||
  // Initially, there is no TestPartResult for this test.
 | 
			
		||||
  ASSERT_EQ(0u, GetTestResult(test_info)->total_part_count());
 | 
			
		||||
  ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
 | 
			
		||||
 | 
			
		||||
  // After the previous assertion, there is still none.
 | 
			
		||||
  ASSERT_EQ(0u, GetTestResult(test_info)->total_part_count());
 | 
			
		||||
  ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tests setting up and tearing down a test case.
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user