Changeset 138042 in webkit


Ignore:
Timestamp:
Dec 18, 2012 10:55:53 AM (11 years ago)
Author:
rniwa@webkit.org
Message:

Consider removing --pause-before-testing option
https://bugs.webkit.org/show_bug.cgi?id=105248

Reviewed by Eric Seidel.

Remove --pause-before-testing option.

  • Tools/Scripts/webkitpy/layout_tests/port/driver.py:

(DriverProxy.start): Removed.

  • Scripts/webkitpy/performance_tests/perftest.py:

(PerfTest.init):
(PerfTest.test_path): Renamed from path_or_url.
(PerfTest.run):
(PerfTest): Removed "driver" argument.
(PerfTest._run_with_driver):
(PerfTest.run_single):
(ChromiumStylePerfTest.init):
(PageLoadingPerfTest.init):
(PageLoadingPerfTest.run_single):
(PageLoadingPerfTest._run_with_driver): Renamed from run so that PerfTest.run can create a driver for us.
(ReplayPerfTest.init):
(ReplayPerfTest.prepare):

  • Scripts/webkitpy/performance_tests/perftest_unittest.py:

(TestPageLoadingPerfTest.test_run):
(TestPageLoadingPerfTest.test_run_with_memory_output):
(TestPageLoadingPerfTest.test_run_with_bad_output):

  • Scripts/webkitpy/performance_tests/perftestsrunner.py:

(PerfTestsRunner._parse_args):
(PerfTestsRunner._run_tests_set): Removed the code to create a driver and pause before running the test.
(PerfTestsRunner._run_single_test): Removed "driver" argument.

  • Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:

(run_test):
(test_run_test_pause_before_testing): Removed.

Location:
trunk/Tools
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/Tools/ChangeLog

    r138038 r138042  
     12012-12-18  Ryosuke Niwa  <rniwa@webkit.org>
     2
     3        Consider removing --pause-before-testing option
     4        https://bugs.webkit.org/show_bug.cgi?id=105248
     5
     6        Reviewed by Eric Seidel.
     7
     8        Remove --pause-before-testing option.
     9
     10        * Tools/Scripts/webkitpy/layout_tests/port/driver.py:
     11        (DriverProxy.start): Removed.
     12        * Scripts/webkitpy/performance_tests/perftest.py:
     13        (PerfTest.__init__):
     14        (PerfTest.test_path): Renamed from path_or_url.
     15        (PerfTest.run):
     16        (PerfTest): Removed "driver" argument.
     17        (PerfTest._run_with_driver):
     18        (PerfTest.run_single):
     19        (ChromiumStylePerfTest.__init__):
     20        (PageLoadingPerfTest.__init__):
     21        (PageLoadingPerfTest.run_single):
     22        (PageLoadingPerfTest._run_with_driver): Renamed from run so that PerfTest.run can create a driver for us.
     23        (ReplayPerfTest.__init__):
     24        (ReplayPerfTest.prepare):
     25        * Scripts/webkitpy/performance_tests/perftest_unittest.py:
     26        (TestPageLoadingPerfTest.test_run):
     27        (TestPageLoadingPerfTest.test_run_with_memory_output):
     28        (TestPageLoadingPerfTest.test_run_with_bad_output):
     29        * Scripts/webkitpy/performance_tests/perftestsrunner.py:
     30        (PerfTestsRunner._parse_args):
     31        (PerfTestsRunner._run_tests_set): Removed the code to create a driver and pause before running the test.
     32        (PerfTestsRunner._run_single_test): Removed "driver" argument.
     33        * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
     34        (run_test):
     35        (test_run_test_pause_before_testing): Removed.
     36
    1372012-12-18  Alberto Garcia  <agarcia@igalia.com>
    238
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/driver.py

    r137714 r138042  
    542542        return self._running_drivers[cmd_line_key].run_test(driver_input, stop_when_done)
    543543
    544     def start(self):
    545         # FIXME: Callers shouldn't normally call this, since this routine
    546         # may not be specifying the correct combination of pixel test and
    547         # per_test args.
    548         #
    549         # The only reason we have this routine at all is so the perftestrunner
    550         # can pause before running a test; it might be better to push that
    551         # into run_test() directly.
    552         self._driver.start(self._port.get_option('pixel_tests'), [])
    553 
    554544    def has_crashed(self):
    555545        return any(driver.has_crashed() for driver in self._running_drivers.values())
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py

    r137180 r138042  
    5454
    5555class PerfTest(object):
    56     def __init__(self, port, test_name, path_or_url):
     56    def __init__(self, port, test_name, test_path):
    5757        self._port = port
    5858        self._test_name = test_name
    59         self._path_or_url = path_or_url
     59        self._test_path = test_path
    6060
    6161    def test_name(self):
    6262        return self._test_name
    6363
    64     def path_or_url(self):
    65         return self._path_or_url
     64    def test_path(self):
     65        return self._test_path
    6666
    6767    def prepare(self, time_out_ms):
    6868        return True
    6969
    70     def run(self, driver, time_out_ms):
    71         output = self.run_single(driver, self.path_or_url(), time_out_ms)
     70    def run(self, time_out_ms):
     71        driver = self._port.create_driver(worker_number=0, no_timeout=True)
     72        try:
     73            return self._run_with_driver(driver, time_out_ms)
     74        finally:
     75            driver.stop()
     76
     77    def _run_with_driver(self, driver, time_out_ms):
     78        output = self.run_single(driver, self.test_path(), time_out_ms)
    7279        self._filter_stderr(output)
    7380        if self.run_failed(output):
     
    7582        return self.parse_output(output)
    7683
    77     def run_single(self, driver, path_or_url, time_out_ms, should_run_pixel_test=False):
    78         return driver.run_test(DriverInput(path_or_url, time_out_ms, image_hash=None, should_run_pixel_test=should_run_pixel_test), stop_when_done=False)
     84    def run_single(self, driver, test_path, time_out_ms, should_run_pixel_test=False):
     85        return driver.run_test(DriverInput(test_path, time_out_ms, image_hash=None, should_run_pixel_test=should_run_pixel_test), stop_when_done=False)
    7986
    8087    def run_failed(self, output):
     
    209216    _chromium_style_result_regex = re.compile(r'^RESULT\s+(?P<name>[^=]+)\s*=\s+(?P<value>\d+(\.\d+)?)\s*(?P<unit>\w+)$')
    210217
    211     def __init__(self, port, test_name, path_or_url):
    212         super(ChromiumStylePerfTest, self).__init__(port, test_name, path_or_url)
     218    def __init__(self, port, test_name, test_path):
     219        super(ChromiumStylePerfTest, self).__init__(port, test_name, test_path)
    213220
    214221    def parse_output(self, output):
     
    230237    _FORCE_GC_FILE = 'resources/force-gc.html'
    231238
    232     def __init__(self, port, test_name, path_or_url):
    233         super(PageLoadingPerfTest, self).__init__(port, test_name, path_or_url)
     239    def __init__(self, port, test_name, test_path):
     240        super(PageLoadingPerfTest, self).__init__(port, test_name, test_path)
    234241        self.force_gc_test = self._port.host.filesystem.join(self._port.perf_tests_dir(), self._FORCE_GC_FILE)
    235242
    236     def run_single(self, driver, path_or_url, time_out_ms, should_run_pixel_test=False):
     243    def run_single(self, driver, test_path, time_out_ms, should_run_pixel_test=False):
    237244        # Force GC to prevent pageload noise. See https://bugs.webkit.org/show_bug.cgi?id=98203
    238245        super(PageLoadingPerfTest, self).run_single(driver, self.force_gc_test, time_out_ms, False)
    239         return super(PageLoadingPerfTest, self).run_single(driver, path_or_url, time_out_ms, should_run_pixel_test)
     246        return super(PageLoadingPerfTest, self).run_single(driver, test_path, time_out_ms, should_run_pixel_test)
    240247
    241248    def calculate_statistics(self, values):
     
    259266        return result
    260267
    261     def run(self, driver, time_out_ms):
     268    def _run_with_driver(self, driver, time_out_ms):
    262269        results = {}
    263270        results.setdefault(self.test_name(), {'unit': 'ms', 'values': []})
    264271
    265272        for i in range(0, 20):
    266             output = self.run_single(driver, self.path_or_url(), time_out_ms)
     273            output = self.run_single(driver, self.test_path(), time_out_ms)
    267274            if not output or self.run_failed(output):
    268275                return None
     
    326333
    327334class ReplayPerfTest(PageLoadingPerfTest):
    328     def __init__(self, port, test_name, path_or_url):
    329         super(ReplayPerfTest, self).__init__(port, test_name, path_or_url)
     335    def __init__(self, port, test_name, test_path):
     336        super(ReplayPerfTest, self).__init__(port, test_name, test_path)
    330337
    331338    def _start_replay_server(self, archive, record):
     
    340347    def prepare(self, time_out_ms):
    341348        filesystem = self._port.host.filesystem
    342         path_without_ext = filesystem.splitext(self.path_or_url())[0]
     349        path_without_ext = filesystem.splitext(self.test_path())[0]
    343350
    344351        self._archive_path = filesystem.join(path_without_ext + '.wpr')
    345352        self._expected_image_path = filesystem.join(path_without_ext + '-expected.png')
    346         self._url = filesystem.read_text_file(self.path_or_url()).split('\n')[0]
     353        self._url = filesystem.read_text_file(self.test_path()).split('\n')[0]
    347354
    348355        if filesystem.isfile(self._archive_path) and filesystem.isfile(self._expected_image_path):
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py

    r136548 r138042  
    170170        output_capture.capture_output()
    171171        try:
    172             self.assertEqual(test.run(driver, None),
     172            self.assertEqual(test._run_with_driver(driver, None),
    173173                {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': 5627.314338711378, 'min': 2000, 'unit': 'ms',
    174174                    'values': [i * 1000 for i in range(2, 21)]}})
     
    188188        output_capture.capture_output()
    189189        try:
    190             self.assertEqual(test.run(driver, None),
     190            self.assertEqual(test._run_with_driver(driver, None),
    191191                {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': 5627.314338711378, 'min': 2000, 'unit': 'ms',
    192192                    'values': [i * 1000 for i in range(2, 21)]},
     
    210210            test = PageLoadingPerfTest(port, 'some-test', '/path/some-dir/some-test')
    211211            driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20], test)
    212             self.assertEqual(test.run(driver, None), None)
     212            self.assertEqual(test._run_with_driver(driver, None), None)
    213213        finally:
    214214            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py

    r137714 r138042  
    9999            optparse.make_option("--time-out-ms", default=600 * 1000,
    100100                help="Set the timeout for each test"),
    101             optparse.make_option("--pause-before-testing", dest="pause_before_testing", action="store_true", default=False,
    102                 help="Pause before running the tests to let user attach a performance monitor."),
    103101            optparse.make_option("--no-results", action="store_false", dest="generate_results", default=True,
    104102                help="Do no generate results JSON and results page."),
     
    317315
    318316        for test in tests:
    319             driver = port.create_driver(worker_number=0, no_timeout=True)
    320 
    321             if self._options.pause_before_testing:
    322                 driver.start()
    323                 if not self._host.user.confirm("Ready to run test?"):
    324                     driver.stop()
    325                     return unexpected
    326 
    327317            _log.info('Running %s (%d of %d)' % (test.test_name(), expected + unexpected + 1, len(tests)))
    328             if self._run_single_test(test, driver):
     318            if self._run_single_test(test):
    329319                expected = expected + 1
    330320            else:
     
    333323            _log.info('')
    334324
    335             driver.stop()
    336 
    337325        return unexpected
    338326
    339     def _run_single_test(self, test, driver):
     327    def _run_single_test(self, test):
    340328        start_time = time.time()
    341 
    342         new_results = test.run(driver, self._options.time_out_ms)
     329        new_results = test.run(self._options.time_out_ms)
    343330        if new_results:
    344331            self._results.update(new_results)
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py

    r136548 r138042  
    163163    def run_test(self, test_name):
    164164        runner, port = self.create_runner()
    165         driver = MainTest.TestDriver()
    166         return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)), driver)
     165        return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)))
    167166
    168167    def test_run_passing_test(self):
     
    224223
    225224        self.assertEqual(TestDriverWithStopCount.stop_count, 6)
    226 
    227     def test_run_test_pause_before_testing(self):
    228         class TestDriverWithStartCount(MainTest.TestDriver):
    229             start_count = 0
    230 
    231             def start(self):
    232                 TestDriverWithStartCount.start_count += 1
    233 
    234         runner, port = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
    235         tests = self._tests_for_runner(runner, ['inspector/pass.html'])
    236 
    237         output = OutputCapture()
    238         output.capture_output()
    239         try:
    240             unexpected_result_count = runner._run_tests_set(tests, port)
    241             self.assertEqual(TestDriverWithStartCount.start_count, 1)
    242         finally:
    243             stdout, stderr, log = output.restore_output()
    244         self.assertEqual(self.normalizeFinishedTime(log),
    245             "Ready to run test?\nRunning inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\nFinished: 0.1 s\n\n")
    246225
    247226    def test_run_test_set_for_parser_tests(self):
Note: See TracChangeset for help on using the changeset viewer.