Changeset 107284 in webkit


Ignore:
Timestamp:
Feb 9, 2012 1:26:34 PM (12 years ago)
Author:
abarth@webkit.org
Message:

run-perf-tests should have an option to pause before running tests so we can attach Instruments
https://bugs.webkit.org/show_bug.cgi?id=78271

Reviewed by Ryosuke Niwa.

This lets me attach instruments to profile the performance of the test.

  • Scripts/webkitpy/layout_tests/port/chromium.py:

(ChromiumDriver.start):

  • Scripts/webkitpy/layout_tests/port/driver.py:

(Driver.start):
(DriverProxy.start):

  • Scripts/webkitpy/layout_tests/port/server_process.py:

(ServerProcess.start):

  • Scripts/webkitpy/layout_tests/port/test.py:

(TestDriver.start):

  • Scripts/webkitpy/layout_tests/port/webkit.py:

(WebKitDriver.start):

  • Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py:

(get_tests_run.RecordingTestDriver.start):

  • Scripts/webkitpy/performance_tests/perftestsrunner.py:

(PerfTestsRunner._parse_args):
(PerfTestsRunner._run_tests_set):

  • Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:

(start):
(test_run_test_set_kills_drt_per_run.TestDriverWithStopCount):
(test_run_test_set_kills_drt_per_run):
(test_run_test_set_kills_drt_per_run.TestDriverWithStartCount):
(test_run_test_set_kills_drt_per_run.TestDriverWithStartCount.start):

Location:
trunk/Tools
Files:
9 edited

Legend:

Unmodified
Added
Removed
  • trunk/Tools/ChangeLog

    r107275 r107284  
     12012-02-09  Adam Barth  <abarth@webkit.org>
     2
     3        run-perf-tests should have an option to pause before running tests so we can attach Instruments
     4        https://bugs.webkit.org/show_bug.cgi?id=78271
     5
     6        Reviewed by Ryosuke Niwa.
     7
     8        This lets me attach instruments to profile the performance of the test.
     9
     10        * Scripts/webkitpy/layout_tests/port/chromium.py:
     11        (ChromiumDriver.start):
     12        * Scripts/webkitpy/layout_tests/port/driver.py:
     13        (Driver.start):
     14        (DriverProxy.start):
     15        * Scripts/webkitpy/layout_tests/port/server_process.py:
     16        (ServerProcess.start):
     17        * Scripts/webkitpy/layout_tests/port/test.py:
     18        (TestDriver.start):
     19        * Scripts/webkitpy/layout_tests/port/webkit.py:
     20        (WebKitDriver.start):
     21        * Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py:
     22        (get_tests_run.RecordingTestDriver.start):
     23        * Scripts/webkitpy/performance_tests/perftestsrunner.py:
     24        (PerfTestsRunner._parse_args):
     25        (PerfTestsRunner._run_tests_set):
     26        * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
     27        (start):
     28        (test_run_test_set_kills_drt_per_run.TestDriverWithStopCount):
     29        (test_run_test_set_kills_drt_per_run):
     30        (test_run_test_set_kills_drt_per_run.TestDriverWithStartCount):
     31        (test_run_test_set_kills_drt_per_run.TestDriverWithStartCount.start):
     32
    1332012-02-09  Adam Barth  <abarth@webkit.org>
    234
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/chromium.py

    r106687 r107284  
    593593            crash=crash, crashed_process_name=crashed_process_name, test_time=run_time, timeout=timeout, error=error)
    594594
     595    def start(self):
     596        if not self._proc:
     597            self._start()
     598
    595599    def stop(self):
    596600        if not self._proc:
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/driver.py

    r105981 r107284  
    157157        return False
    158158
     159    def start(self):
     160        raise NotImplementedError('Driver.start')
     161
    159162    def stop(self):
    160163        raise NotImplementedError('Driver.stop')
     
    193196        return self._driver.has_crashed() or self._reftest_driver.has_crashed()
    194197
     198    def start(self):
     199        self._driver.start()
     200        self._reftest_driver.start()
     201
    195202    def stop(self):
    196203        self._driver.stop()
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/server_process.py

    r106079 r107284  
    261261            self._wait_for_data_and_update_buffers(deadline)
    262262
     263    def start(self):
     264        if not self._proc:
     265            self._start()
     266
    263267    def stop(self):
    264268        if not self._proc:
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/test.py

    r107136 r107284  
    500500            test_time=time.time() - start_time, timeout=test.timeout, error=test.error)
    501501
     502    def start(self):
     503        pass
     504
    502505    def stop(self):
    503506        pass
  • trunk/Tools/Scripts/webkitpy/layout_tests/port/webkit.py

    r107113 r107284  
    646646        return block
    647647
     648    def start(self):
     649        if not self._server_process:
     650            self._start()
     651
    648652    def stop(self):
    649653        if self._server_process:
  • trunk/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py

    r107151 r107284  
    141141            self._current_test_batch = None
    142142
     143        def start(self):
     144            pass
     145
    143146        def stop(self):
    144147            self._current_test_batch = None
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py

    r107275 r107284  
    9292            optparse.make_option("--time-out-ms", default=600 * 1000,
    9393                help="Set the timeout for each test"),
     94            optparse.make_option("--pause-before-testing", dest="pause_before_testing", action="store_true", default=False,
     95                help="Pause before running the tests to let user attach a performance monitor."),
    9496            optparse.make_option("--output-json-path",
    9597                help="Filename of the JSON file that summaries the results"),
     
    218220            driver = port.create_driver(worker_number=1, no_timeout=True)
    219221
     222            if self._options.pause_before_testing:
     223                driver.start()
     224                if not self._host.user.confirm("Ready to run test?"):
     225                    driver.stop()
     226                    return unexpected
     227
    220228            relative_test_path = self._host.filesystem.relpath(test, self._base_path)
    221229            self._printer.write('Running %s (%d of %d)' % (relative_test_path, expected + unexpected + 1, len(tests)))
  • trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py

    r107144 r107284  
    102102            return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
    103103
     104        def start(self):
     105            """do nothing"""
     106
    104107        def stop(self):
    105108            """do nothing"""
     
    158161            stop_count = 0
    159162
    160             def __init__(self):
    161                 TestDriverWithStopCount.sotp_count = 0
    162 
    163163            def stop(self):
    164164                TestDriverWithStopCount.stop_count += 1
     
    173173        unexpected_result_count = runner._run_tests_set(tests, runner._port)
    174174        self.assertEqual(TestDriverWithStopCount.stop_count, 6)
     175
     176    def test_run_test_set_kills_drt_per_run(self):
     177        class TestDriverWithStartCount(MainTest.TestDriver):
     178            start_count = 0
     179
     180            def start(self):
     181                TestDriverWithStartCount.start_count += 1
     182
     183        buildbot_output = array_stream.ArrayStream()
     184        runner = self.create_runner(buildbot_output, args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
     185
     186        dirname = runner._base_path + '/inspector/'
     187        tests = [dirname + 'pass.html']
     188
     189        try:
     190            output = OutputCapture()
     191            output.capture_output()
     192            unexpected_result_count = runner._run_tests_set(tests, runner._port)
     193            self.assertEqual(TestDriverWithStartCount.start_count, 1)
     194        finally:
     195            _, stderr, logs = output.restore_output()
     196            self.assertEqual(stderr, "Ready to run test?\n")
     197            self.assertEqual(logs, "Running inspector/pass.html (1 of 1)\n\n")
    175198
    176199    def test_run_test_set_for_parser_tests(self):
Note: See TracChangeset for help on using the changeset viewer.