Changeset 115466 in webkit


Ignore:
Timestamp:
Apr 27, 2012 12:21:16 PM (12 years ago)
Author:
rniwa@webkit.org
Message:

Chromium Windows Perf bots timeout due to not output
https://bugs.webkit.org/show_bug.cgi?id=84940

Reviewed by Dirk Pranke.

Dirk and I investigated the issue on the bot but we couldn't figure out what is going wrong.
Since we run-perf-tests don't need any of fancy feature printer provides, just use python's
built-in logging module instead. Printing out to stdout and stderr seem to work so hopefully
this will fix the issue on the bot.

  • Scripts/run-perf-tests:
  • Scripts/webkitpy/performance_tests/perftest.py:

(PerfTest.run):
(PerfTest.run_failed):
(PerfTest.parse_output):
(PerfTest.output_statistics):
(ChromiumStylePerfTest.parse_output):
(PageLoadingPerfTest.run):

  • Scripts/webkitpy/performance_tests/perftest_unittest.py:

(MainTest.test_parse_output):
(MainTest.test_parse_output_with_failing_line):
(TestPageLoadingPerfTest):
(TestPageLoadingPerfTest.test_run):
(TestPageLoadingPerfTest.test_run_with_bad_output):

  • Scripts/webkitpy/performance_tests/perftestsrunner.py:

(PerfTestsRunner.init):
(PerfTestsRunner._parse_args):
(PerfTestsRunner.run):
(PerfTestsRunner._upload_json):
(PerfTestsRunner._print_status):
(PerfTestsRunner._run_tests_set):
(PerfTestsRunner._run_single_test):

  • Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:

(create_runner):
(test_run_test_set):
(test_run_test_set_kills_drt_per_run):
(test_run_test_pause_before_testing):
(test_run_test_set_for_parser_tests):
(test_run_test_set_with_json_output):
(test_run_test_set_with_json_source):
(test_run_test_set_with_multiple_repositories):
(test_upload_json):
(test_parse_args):

Location:
trunk/Tools
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/Tools/ChangeLog

    r115453 r115466  
     12012-04-27  Ryosuke Niwa  <rniwa@webkit.org>
     2
     3        Chromium Windows Perf bots timeout due to not output
     4        https://bugs.webkit.org/show_bug.cgi?id=84940
     5
     6        Reviewed by Dirk Pranke.
     7
     8        Dirk and I investigated the issue on the bot but we couldn't figure out what is going wrong.
     9        Since we run-perf-tests don't need any of fancy feature printer provides, just use python's
     10        built-in logging module instead. Printing out to stdout and stderr seem to work so hopefully
     11        this will fix the issue on the bot.
     12
     13        * Scripts/run-perf-tests:
     14        * Scripts/webkitpy/performance_tests/perftest.py:
     15        (PerfTest.run):
     16        (PerfTest.run_failed):
     17        (PerfTest.parse_output):
     18        (PerfTest.output_statistics):
     19        (ChromiumStylePerfTest.parse_output):
     20        (PageLoadingPerfTest.run):
     21        * Scripts/webkitpy/performance_tests/perftest_unittest.py:
     22        (MainTest.test_parse_output):
     23        (MainTest.test_parse_output_with_failing_line):
     24        (TestPageLoadingPerfTest):
     25        (TestPageLoadingPerfTest.test_run):
     26        (TestPageLoadingPerfTest.test_run_with_bad_output):
     27        * Scripts/webkitpy/performance_tests/perftestsrunner.py:
     28        (PerfTestsRunner.__init__):
     29        (PerfTestsRunner._parse_args):
     30        (PerfTestsRunner.run):
     31        (PerfTestsRunner._upload_json):
     32        (PerfTestsRunner._print_status):
     33        (PerfTestsRunner._run_tests_set):
     34        (PerfTestsRunner._run_single_test):
     35        * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
     36        (create_runner):
     37        (test_run_test_set):
     38        (test_run_test_set_kills_drt_per_run):
     39        (test_run_test_pause_before_testing):
     40        (test_run_test_set_for_parser_tests):
     41        (test_run_test_set_with_json_output):
     42        (test_run_test_set_with_json_source):
     43        (test_run_test_set_with_multiple_repositories):
     44        (test_upload_json):
     45        (test_parse_args):
     46
    1472012-04-27  Dirk Pranke  <dpranke@chromium.org>
    248
  • trunk/Tools/Scripts/run-perf-tests

    r104856 r115466  
    11#!/usr/bin/env python
    2 # Copyright (C) 2011 Google Inc. All rights reserved.
     2# Copyright (C) 2012 Google Inc. All rights reserved.
    33#
    44# Redistribution and use in source and binary forms, with or without
     
    3535from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
    3636
    37 _log = logging.getLogger(__name__)
    38 
    3937if '__main__' == __name__:
    4038    logging.basicConfig(level=logging.INFO, format="%(message)s")
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py

    r115410 r115466  
    2929
    3030
     31import logging
    3132import math
    3233import re
    3334
    3435from webkitpy.layout_tests.port.driver import DriverInput
     36
     37
     38_log = logging.getLogger(__name__)
    3539
    3640
     
    4650        return self._path_or_url
    4751
    48     def run(self, driver, timeout_ms, printer, buildbot_output):
     52    def run(self, driver, timeout_ms):
    4953        output = driver.run_test(DriverInput(self.path_or_url(), timeout_ms, None, False))
    50         if self.run_failed(output, printer):
     54        if self.run_failed(output):
    5155            return None
    52         return self.parse_output(output, printer, buildbot_output)
    53 
    54     def run_failed(self, output, printer):
     56        return self.parse_output(output)
     57
     58    def run_failed(self, output):
    5559        if output.text == None or output.error:
    5660            pass
    5761        elif output.timeout:
    58             printer.write('timeout: %s' % self.test_name())
     62            _log.error('timeout: %s' % self.test_name())
    5963        elif output.crash:
    60             printer.write('crash: %s' % self.test_name())
     64            _log.error('crash: %s' % self.test_name())
    6165        else:
    6266            return False
    6367
    6468        if output.error:
    65             printer.write('error: %s\n%s' % (self.test_name(), output.error))
     69            _log.error('error: %s\n%s' % (self.test_name(), output.error))
    6670
    6771        return True
     
    8791        return False
    8892
    89     def parse_output(self, output, printer, buildbot_output):
     93    def parse_output(self, output):
    9094        got_a_result = False
    9195        test_failed = False
     
    104108            if not self._should_ignore_line_in_parser_test_result(line):
    105109                test_failed = True
    106                 printer.write("%s" % line)
     110                _log.error(line)
    107111
    108112        if test_failed or set(self._statistics_keys) != set(results.keys()):
     
    112116
    113117        test_name = re.sub(r'\.\w+$', '', self._test_name)
    114         self.output_statistics(test_name, results, buildbot_output)
     118        self.output_statistics(test_name, results)
    115119
    116120        return {test_name: results}
    117121
    118     def output_statistics(self, test_name, results, buildbot_output):
     122    def output_statistics(self, test_name, results):
    119123        unit = results['unit']
    120         buildbot_output.write('RESULT %s= %s %s\n' % (test_name.replace('/', ': '), results['avg'], unit))
    121         buildbot_output.write(', '.join(['%s= %s %s' % (key, results[key], unit) for key in self._statistics_keys[1:]]) + '\n')
     124        _log.info('RESULT %s= %s %s' % (test_name.replace('/', ': '), results['avg'], unit))
     125        _log.info(', '.join(['%s= %s %s' % (key, results[key], unit) for key in self._statistics_keys[1:]]))
    122126
    123127
     
    128132        super(ChromiumStylePerfTest, self).__init__(test_name, path_or_url)
    129133
    130     def parse_output(self, output, printer, buildbot_output):
     134    def parse_output(self, output):
    131135        test_failed = False
    132136        got_a_result = False
     
    137141                # FIXME: Store the unit
    138142                results[self.test_name() + ':' + resultLine.group('name').replace(' ', '')] = float(resultLine.group('value'))
    139                 buildbot_output.write("%s\n" % line)
     143                _log.info(line)
    140144            elif not len(line) == 0:
    141145                test_failed = True
    142                 printer.write("%s" % line)
     146                _log.error(line)
    143147        return results if results and not test_failed else None
    144148
     
    148152        super(PageLoadingPerfTest, self).__init__(test_name, path_or_url)
    149153
    150     def run(self, driver, timeout_ms, printer, buildbot_output):
     154    def run(self, driver, timeout_ms):
    151155        test_times = []
    152156
    153157        for i in range(0, 20):
    154158            output = driver.run_test(DriverInput(self.path_or_url(), timeout_ms, None, False))
    155             if self.run_failed(output, printer):
     159            if self.run_failed(output):
    156160                return None
    157161            if i == 0:
     
    178182            'stdev': math.sqrt(squareSum),
    179183            'unit': 'ms'}
    180         self.output_statistics(self.test_name(), results, buildbot_output)
     184        self.output_statistics(self.test_name(), results)
    181185        return {self.test_name(): results}
    182186
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py

    r115410 r115466  
    3232import unittest
    3333
     34from webkitpy.common.system.outputcapture import OutputCapture
    3435from webkitpy.layout_tests.port.driver import DriverOutput
    3536from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
     
    3940
    4041
    41 class MockPrinter(object):
    42     def __init__(self):
    43         self.written_lines = []
    44 
    45     def write(self, line):
    46         self.written_lines.append(line)
    47 
    48 
    4942class MainTest(unittest.TestCase):
    5043    def test_parse_output(self):
    51         printer = MockPrinter()
    52         buildbot_output = StringIO.StringIO()
    5344        output = DriverOutput('\n'.join([
    5445            'Running 20 times',
     
    6051            'min 1080',
    6152            'max 1120']), image=None, image_hash=None, audio=None)
    62         test = PerfTest('some-test', '/path/some-dir/some-test')
    63         self.assertEqual(test.parse_output(output, printer, buildbot_output),
    64             {'some-test': {'avg': 1100.0, 'median': 1101.0, 'min': 1080.0, 'max': 1120.0, 'stdev': 11.0, 'unit': 'ms'}})
    65         self.assertEqual(printer.written_lines, [])
     53        output_capture = OutputCapture()
     54        output_capture.capture_output()
     55        try:
     56            test = PerfTest('some-test', '/path/some-dir/some-test')
     57            self.assertEqual(test.parse_output(output),
     58                {'some-test': {'avg': 1100.0, 'median': 1101.0, 'min': 1080.0, 'max': 1120.0, 'stdev': 11.0, 'unit': 'ms'}})
     59        finally:
     60            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
     61        self.assertEqual(actual_stdout, '')
     62        self.assertEqual(actual_stderr, '')
     63        self.assertEqual(actual_logs, 'RESULT some-test= 1100.0 ms\nmedian= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n')
    6664
    6765    def test_parse_output_with_failing_line(self):
    68         printer = MockPrinter()
    69         buildbot_output = StringIO.StringIO()
    7066        output = DriverOutput('\n'.join([
    7167            'Running 20 times',
     
    7975            'min 1080',
    8076            'max 1120']), image=None, image_hash=None, audio=None)
    81         test = PerfTest('some-test', '/path/some-dir/some-test')
    82         self.assertEqual(test.parse_output(output, printer, buildbot_output), None)
    83         self.assertEqual(printer.written_lines, ['some-unrecognizable-line'])
     77        output_capture = OutputCapture()
     78        output_capture.capture_output()
     79        try:
     80            test = PerfTest('some-test', '/path/some-dir/some-test')
     81            self.assertEqual(test.parse_output(output), None)
     82        finally:
     83            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
     84        self.assertEqual(actual_stdout, '')
     85        self.assertEqual(actual_stderr, '')
     86        self.assertEqual(actual_logs, 'some-unrecognizable-line\n')
    8487
    8588
    8689class TestPageLoadingPerfTest(unittest.TestCase):
    87     def assertWritten(self, stream, contents):
    88         self.assertEquals(stream.buflist, contents)
    89 
    9090    class MockDriver(object):
    9191        def __init__(self, values):
     
    102102
    103103    def test_run(self):
    104         printer = MockPrinter()
    105         buildbot_output = StringIO.StringIO()
    106104        test = PageLoadingPerfTest('some-test', '/path/some-dir/some-test')
    107105        driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
    108         self.assertEqual(test.run(driver, None, printer, buildbot_output),
    109             {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': math.sqrt(570 * 1000 * 1000), 'min': 2000, 'unit': 'ms'}})
    110         self.assertEqual(printer.written_lines, [])
    111         self.assertWritten(buildbot_output, ['RESULT some-test= 11000.0 ms\n', 'median= 11000 ms, stdev= 23874.6727726 ms, min= 2000 ms, max= 20000 ms\n'])
     106        output_capture = OutputCapture()
     107        output_capture.capture_output()
     108        try:
     109            self.assertEqual(test.run(driver, None),
     110                {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': math.sqrt(570 * 1000 * 1000), 'min': 2000, 'unit': 'ms'}})
     111        finally:
     112            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
     113        self.assertEqual(actual_stdout, '')
     114        self.assertEqual(actual_stderr, '')
     115        self.assertEqual(actual_logs, 'RESULT some-test= 11000.0 ms\nmedian= 11000 ms, stdev= 23874.6727726 ms, min= 2000 ms, max= 20000 ms\n')
    112116
    113117    def test_run_with_bad_output(self):
    114         printer = MockPrinter()
    115         buildbot_output = StringIO.StringIO()
    116         test = PageLoadingPerfTest('some-test', '/path/some-dir/some-test')
    117         driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
    118         self.assertEqual(test.run(driver, None, printer, buildbot_output), None)
    119         self.assertEqual(printer.written_lines, ['error: some-test\nsome error'])
     118        output_capture = OutputCapture()
     119        output_capture.capture_output()
     120        try:
     121            test = PageLoadingPerfTest('some-test', '/path/some-dir/some-test')
     122            driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
     123            self.assertEqual(test.run(driver, None), None)
     124        finally:
     125            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
     126        self.assertEqual(actual_stdout, '')
     127        self.assertEqual(actual_stderr, '')
     128        self.assertEqual(actual_logs, 'error: some-test\nsome error\n')
    120129
    121130
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py

    r115410 r115466  
    5353    _EXIT_CODE_FAILED_UPLOADING = -3
    5454
    55     def __init__(self, regular_output=sys.stderr, buildbot_output=sys.stdout, args=None, port=None):
    56         self._buildbot_output = buildbot_output
     55    def __init__(self, args=None, port=None):
    5756        self._options, self._args = PerfTestsRunner._parse_args(args)
    5857        if port:
     
    6362            self._port = self._host.port_factory.get(self._options.platform, self._options)
    6463        self._host._initialize_scm()
    65         self._printer = printing.Printer(self._port, self._options, regular_output, buildbot_output)
    6664        self._webkit_base_dir_len = len(self._port.webkit_base())
    6765        self._base_path = self._port.perf_tests_dir()
     
    7169    @staticmethod
    7270    def _parse_args(args=None):
    73         print_options = printing.print_options()
    74 
    7571        perf_option_list = [
    7672            optparse.make_option('--debug', action='store_const', const='Debug', dest="configuration",
     
    10399                help="Use WebKitTestRunner rather than DumpRenderTree."),
    104100            ]
    105 
    106         option_list = (perf_option_list + print_options)
    107         return optparse.OptionParser(option_list=option_list).parse_args(args)
     101        return optparse.OptionParser(option_list=(perf_option_list)).parse_args(args)
    108102
    109103    def _collect_tests(self):
     
    134128
    135129    def run(self):
    136         if self._options.help_printing:
    137             self._printer.help_printing()
    138             self._printer.cleanup()
    139             return 0
    140 
    141130        if not self._port.check_build(needs_http=False):
    142131            _log.error("Build not up to date for %s" % self._port._path_to_driver())
     
    146135        # in a try/finally to ensure that we clean up the logging configuration.
    147136        unexpected = -1
    148         try:
    149             tests = self._collect_tests()
    150             unexpected = self._run_tests_set(sorted(list(tests), key=lambda test: test.test_name()), self._port)
    151         finally:
    152             self._printer.cleanup()
     137        tests = self._collect_tests()
     138        unexpected = self._run_tests_set(sorted(list(tests), key=lambda test: test.test_name()), self._port)
    153139
    154140        options = self._options
     
    210196            return False
    211197
    212         self._printer.write("JSON file uploaded.")
     198        _log.info("JSON file uploaded.")
    213199        return True
    214200
     
    220206        if unexpected:
    221207            status += " (%d didn't run)" % unexpected
    222         self._printer.write(status)
     208        _log.info(status)
    223209
    224210    def _run_tests_set(self, tests, port):
     
    237223                    return unexpected
    238224
    239             self._printer.write('Running %s (%d of %d)' % (test.test_name(), expected + unexpected + 1, len(tests)))
     225            _log.info('Running %s (%d of %d)' % (test.test_name(), expected + unexpected + 1, len(tests)))
    240226            if self._run_single_test(test, driver):
    241227                expected = expected + 1
     
    243229                unexpected = unexpected + 1
    244230
    245             self._printer.write('')
     231            _log.info('')
    246232
    247233            driver.stop()
     
    252238        start_time = time.time()
    253239
    254         new_results = test.run(driver, self._options.time_out_ms, self._printer, self._buildbot_output)
     240        new_results = test.run(driver, self._options.time_out_ms)
    255241        if new_results:
    256242            self._results.update(new_results)
    257243        else:
    258             self._printer.write('FAILED')
    259 
    260         self._printer.write("Finished: %f s" % (time.time() - start_time))
     244            _log.error('FAILED')
     245
     246        _log.debug("Finished: %f s" % (time.time() - start_time))
    261247
    262248        return new_results != None
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py

    r115410 r115466  
    112112            """do nothing"""
    113113
    114     def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver):
    115         buildbot_output = buildbot_output or StringIO.StringIO()
    116         regular_output = regular_output or StringIO.StringIO()
    117 
     114    def create_runner(self, args=[], driver_class=TestDriver):
    118115        options, parsed_args = PerfTestsRunner._parse_args(args)
    119116        test_port = TestPort(host=MockHost(), options=options)
    120117        test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
    121118
    122         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
     119        runner = PerfTestsRunner(args=args, port=test_port)
    123120        runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
    124121        runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
     
    162159
    163160    def test_run_test_set(self):
    164         buildbot_output = StringIO.StringIO()
    165         runner = self.create_runner(buildbot_output)
     161        runner = self.create_runner()
     162        tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
     163            'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
     164        output = OutputCapture()
     165        output.capture_output()
     166        try:
     167            unexpected_result_count = runner._run_tests_set(tests, runner._port)
     168        finally:
     169            stdout, stderr, log = output.restore_output()
     170        self.assertEqual(unexpected_result_count, len(tests) - 1)
     171        self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
     172
     173    def test_run_test_set_kills_drt_per_run(self):
     174
     175        class TestDriverWithStopCount(MainTest.TestDriver):
     176            stop_count = 0
     177
     178            def stop(self):
     179                TestDriverWithStopCount.stop_count += 1
     180
     181        runner = self.create_runner(driver_class=TestDriverWithStopCount)
     182
    166183        tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
    167184            'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
    168185        unexpected_result_count = runner._run_tests_set(tests, runner._port)
    169         self.assertEqual(unexpected_result_count, len(tests) - 1)
    170         self.assertWritten(buildbot_output, ['RESULT group_name: test_name= 42 ms\n'])
    171 
    172     def test_run_test_set_kills_drt_per_run(self):
    173 
    174         class TestDriverWithStopCount(MainTest.TestDriver):
    175             stop_count = 0
    176 
    177             def stop(self):
    178                 TestDriverWithStopCount.stop_count += 1
    179 
    180         buildbot_output = StringIO.StringIO()
    181         runner = self.create_runner(buildbot_output, driver_class=TestDriverWithStopCount)
    182         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
    183             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
    184 
    185         unexpected_result_count = runner._run_tests_set(tests, runner._port)
     186
    186187        self.assertEqual(TestDriverWithStopCount.stop_count, 6)
    187188
     
    193194                TestDriverWithStartCount.start_count += 1
    194195
    195         buildbot_output = StringIO.StringIO()
    196         regular_output = StringIO.StringIO()
    197         runner = self.create_runner(buildbot_output, args=["--pause-before-testing"], regular_output=regular_output, driver_class=TestDriverWithStartCount)
     196        runner = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
    198197        tests = self._tests_for_runner(runner, ['inspector/pass.html'])
    199198
     199        output = OutputCapture()
     200        output.capture_output()
    200201        try:
    201             output = OutputCapture()
    202             output.capture_output()
    203202            unexpected_result_count = runner._run_tests_set(tests, runner._port)
    204203            self.assertEqual(TestDriverWithStartCount.start_count, 1)
    205204        finally:
    206             _, stderr, _ = output.restore_output()
    207             self.assertEqual(stderr, "Ready to run test?\n")
    208             self.assertTrue("Running inspector/pass.html (1 of 1)" in regular_output.getvalue())
     205            stdout, stderr, log = output.restore_output()
     206        self.assertEqual(stderr, "Ready to run test?\n")
     207        self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
    209208
    210209    def test_run_test_set_for_parser_tests(self):
    211         buildbot_output = StringIO.StringIO()
    212         runner = self.create_runner(buildbot_output)
     210        runner = self.create_runner()
    213211        tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
    214         unexpected_result_count = runner._run_tests_set(tests, runner._port)
     212        output = OutputCapture()
     213        output.capture_output()
     214        try:
     215            unexpected_result_count = runner._run_tests_set(tests, runner._port)
     216        finally:
     217            stdout, stderr, log = output.restore_output()
    215218        self.assertEqual(unexpected_result_count, 0)
    216         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
    217                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
    218                                              'RESULT Parser: some-parser= 1100.0 ms\n',
    219                                              'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n'])
     219        self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
     220        'RESULT Bindings: event-target-wrapper= 1489.05 ms',
     221        'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
     222        '',
     223        'Running Parser/some-parser.html (2 of 2)',
     224        'RESULT Parser: some-parser= 1100.0 ms',
     225        'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
     226        '', '']))
    220227
    221228    def test_run_test_set_with_json_output(self):
    222         buildbot_output = StringIO.StringIO()
    223         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
     229        runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
    224230        runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
    225231        runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
    226232        runner._timestamp = 123456789
    227         self.assertEqual(runner.run(), 0)
    228         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
    229                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
    230                                              'RESULT group_name: test_name= 42 ms\n'])
     233        output_capture = OutputCapture()
     234        output_capture.capture_output()
     235        try:
     236            self.assertEqual(runner.run(), 0)
     237        finally:
     238            stdout, stderr, logs = output_capture.restore_output()
     239
     240        self.assertEqual(logs,
     241            '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
     242                       'RESULT Bindings: event-target-wrapper= 1489.05 ms',
     243                       'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
     244                       '',
     245                       'Running inspector/pass.html (2 of 2)',
     246                       'RESULT group_name: test_name= 42 ms',
     247                       '', '']))
    231248
    232249        self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
     
    237254
    238255    def test_run_test_set_with_json_source(self):
    239         buildbot_output = StringIO.StringIO()
    240         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json',
    241             '--source-json-path=/mock-checkout/source.json'])
     256        runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json', '--source-json-path=/mock-checkout/source.json'])
    242257        runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
    243258        runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
    244259        runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
    245260        runner._timestamp = 123456789
    246         self.assertEqual(runner.run(), 0)
    247         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
    248                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
    249                                              'RESULT group_name: test_name= 42 ms\n'])
     261        output_capture = OutputCapture()
     262        output_capture.capture_output()
     263        try:
     264            self.assertEqual(runner.run(), 0)
     265        finally:
     266            stdout, stderr, logs = output_capture.restore_output()
     267
     268        self.assertEqual(logs, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
     269            'RESULT Bindings: event-target-wrapper= 1489.05 ms',
     270            'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
     271            '',
     272            'Running inspector/pass.html (2 of 2)',
     273            'RESULT group_name: test_name= 42 ms',
     274            '', '']))
    250275
    251276        self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
     
    257282
    258283    def test_run_test_set_with_multiple_repositories(self):
    259         buildbot_output = StringIO.StringIO()
    260         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
     284        runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
    261285        runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
    262286        runner._timestamp = 123456789
    263287        runner._port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
    264288        self.assertEqual(runner.run(), 0)
    265 
    266289        self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
    267290            "timestamp": 123456789, "results": {"inspector/pass.html:group_name:test_name": 42.0}, "webkit-revision": 5678, "some-revision": 5678})
     
    298321
    299322    def test_upload_json(self):
    300         regular_output = StringIO.StringIO()
    301         runner = self.create_runner(regular_output=regular_output)
     323        runner = self.create_runner()
    302324        runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
    303325
     
    391413        runner = self.create_runner()
    392414        options, args = PerfTestsRunner._parse_args([
    393                 '--verbose',
    394415                '--build-directory=folder42',
    395416                '--platform=platform42',
     
    400421                '--source-json-path=a/source.json',
    401422                '--test-results-server=somehost',
    402                 '--debug', 'an_arg'])
     423                '--debug'])
    403424        self.assertEqual(options.build, True)
    404         self.assertEqual(options.verbose, True)
    405         self.assertEqual(options.help_printing, None)
    406425        self.assertEqual(options.build_directory, 'folder42')
    407426        self.assertEqual(options.platform, 'platform42')
     
    410429        self.assertEqual(options.time_out_ms, '42')
    411430        self.assertEqual(options.configuration, 'Debug')
    412         self.assertEqual(options.print_options, None)
    413431        self.assertEqual(options.output_json_path, 'a/output.json')
    414432        self.assertEqual(options.source_json_path, 'a/source.json')
Note: See TracChangeset for help on using the changeset viewer.