Changeset 138042 in webkit
- Timestamp:
- Dec 18, 2012 10:55:53 AM (11 years ago)
- Location:
- trunk/Tools
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Tools/ChangeLog
r138038 r138042 1 2012-12-18 Ryosuke Niwa <rniwa@webkit.org> 2 3 Consider removing --pause-before-testing option 4 https://bugs.webkit.org/show_bug.cgi?id=105248 5 6 Reviewed by Eric Seidel. 7 8 Remove --pause-before-testing option. 9 10 * Tools/Scripts/webkitpy/layout_tests/port/driver.py: 11 (DriverProxy.start): Removed. 12 * Scripts/webkitpy/performance_tests/perftest.py: 13 (PerfTest.__init__): 14 (PerfTest.test_path): Renamed from path_or_url. 15 (PerfTest.run): 16 (PerfTest): Removed "driver" argument. 17 (PerfTest._run_with_driver): 18 (PerfTest.run_single): 19 (ChromiumStylePerfTest.__init__): 20 (PageLoadingPerfTest.__init__): 21 (PageLoadingPerfTest.run_single): 22 (PageLoadingPerfTest._run_with_driver): Renamed from run so that PerfTest.run can create a driver for us. 23 (ReplayPerfTest.__init__): 24 (ReplayPerfTest.prepare): 25 * Scripts/webkitpy/performance_tests/perftest_unittest.py: 26 (TestPageLoadingPerfTest.test_run): 27 (TestPageLoadingPerfTest.test_run_with_memory_output): 28 (TestPageLoadingPerfTest.test_run_with_bad_output): 29 * Scripts/webkitpy/performance_tests/perftestsrunner.py: 30 (PerfTestsRunner._parse_args): 31 (PerfTestsRunner._run_tests_set): Removed the code to create a driver and pause before running the test. 32 (PerfTestsRunner._run_single_test): Removed "driver" argument. 33 * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py: 34 (run_test): 35 (test_run_test_pause_before_testing): Removed. 36 1 37 2012-12-18 Alberto Garcia <agarcia@igalia.com> 2 38 -
trunk/Tools/Scripts/webkitpy/layout_tests/port/driver.py
r137714 r138042 542 542 return self._running_drivers[cmd_line_key].run_test(driver_input, stop_when_done) 543 543 544 def start(self):545 # FIXME: Callers shouldn't normally call this, since this routine546 # may not be specifying the correct combination of pixel test and547 # per_test args.548 #549 # The only reason we have this routine at all is so the perftestrunner550 # can pause before running a test; it might be better to push that551 # into run_test() directly.552 self._driver.start(self._port.get_option('pixel_tests'), [])553 554 544 def has_crashed(self): 555 545 return any(driver.has_crashed() for driver in self._running_drivers.values()) -
trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py
r137180 r138042 54 54 55 55 class PerfTest(object): 56 def __init__(self, port, test_name, path_or_url):56 def __init__(self, port, test_name, test_path): 57 57 self._port = port 58 58 self._test_name = test_name 59 self._ path_or_url = path_or_url59 self._test_path = test_path 60 60 61 61 def test_name(self): 62 62 return self._test_name 63 63 64 def path_or_url(self):65 return self._ path_or_url64 def test_path(self): 65 return self._test_path 66 66 67 67 def prepare(self, time_out_ms): 68 68 return True 69 69 70 def run(self, driver, time_out_ms): 71 output = self.run_single(driver, self.path_or_url(), time_out_ms) 70 def run(self, time_out_ms): 71 driver = self._port.create_driver(worker_number=0, no_timeout=True) 72 try: 73 return self._run_with_driver(driver, time_out_ms) 74 finally: 75 driver.stop() 76 77 def _run_with_driver(self, driver, time_out_ms): 78 output = self.run_single(driver, self.test_path(), time_out_ms) 72 79 self._filter_stderr(output) 73 80 if self.run_failed(output): … … 75 82 return self.parse_output(output) 76 83 77 def run_single(self, driver, path_or_url, time_out_ms, should_run_pixel_test=False):78 return driver.run_test(DriverInput( path_or_url, time_out_ms, image_hash=None, should_run_pixel_test=should_run_pixel_test), stop_when_done=False)84 def run_single(self, driver, test_path, time_out_ms, should_run_pixel_test=False): 85 return driver.run_test(DriverInput(test_path, time_out_ms, image_hash=None, should_run_pixel_test=should_run_pixel_test), stop_when_done=False) 79 86 80 87 def run_failed(self, output): … … 209 216 _chromium_style_result_regex = re.compile(r'^RESULT\s+(?P<name>[^=]+)\s*=\s+(?P<value>\d+(\.\d+)?)\s*(?P<unit>\w+)$') 210 217 211 def __init__(self, port, test_name, path_or_url):212 super(ChromiumStylePerfTest, self).__init__(port, test_name, path_or_url)218 def __init__(self, port, test_name, test_path): 219 super(ChromiumStylePerfTest, self).__init__(port, test_name, test_path) 213 220 214 221 def parse_output(self, output): … … 230 237 _FORCE_GC_FILE = 'resources/force-gc.html' 231 238 232 def __init__(self, port, test_name, path_or_url):233 super(PageLoadingPerfTest, self).__init__(port, test_name, path_or_url)239 def __init__(self, port, test_name, test_path): 240 super(PageLoadingPerfTest, self).__init__(port, test_name, test_path) 234 241 self.force_gc_test = self._port.host.filesystem.join(self._port.perf_tests_dir(), self._FORCE_GC_FILE) 235 242 236 def run_single(self, driver, path_or_url, time_out_ms, should_run_pixel_test=False):243 def run_single(self, driver, test_path, time_out_ms, should_run_pixel_test=False): 237 244 # Force GC to prevent pageload noise. See https://bugs.webkit.org/show_bug.cgi?id=98203 238 245 super(PageLoadingPerfTest, self).run_single(driver, self.force_gc_test, time_out_ms, False) 239 return super(PageLoadingPerfTest, self).run_single(driver, path_or_url, time_out_ms, should_run_pixel_test)246 return super(PageLoadingPerfTest, self).run_single(driver, test_path, time_out_ms, should_run_pixel_test) 240 247 241 248 def calculate_statistics(self, values): … … 259 266 return result 260 267 261 def run(self, driver, time_out_ms):268 def _run_with_driver(self, driver, time_out_ms): 262 269 results = {} 263 270 results.setdefault(self.test_name(), {'unit': 'ms', 'values': []}) 264 271 265 272 for i in range(0, 20): 266 output = self.run_single(driver, self. path_or_url(), time_out_ms)273 output = self.run_single(driver, self.test_path(), time_out_ms) 267 274 if not output or self.run_failed(output): 268 275 return None … … 326 333 327 334 class ReplayPerfTest(PageLoadingPerfTest): 328 def __init__(self, port, test_name, path_or_url):329 super(ReplayPerfTest, self).__init__(port, test_name, path_or_url)335 def __init__(self, port, test_name, test_path): 336 super(ReplayPerfTest, self).__init__(port, test_name, test_path) 330 337 331 338 def _start_replay_server(self, archive, record): … … 340 347 def prepare(self, time_out_ms): 341 348 filesystem = self._port.host.filesystem 342 path_without_ext = filesystem.splitext(self. path_or_url())[0]349 path_without_ext = filesystem.splitext(self.test_path())[0] 343 350 344 351 self._archive_path = filesystem.join(path_without_ext + '.wpr') 345 352 self._expected_image_path = filesystem.join(path_without_ext + '-expected.png') 346 self._url = filesystem.read_text_file(self. path_or_url()).split('\n')[0]353 self._url = filesystem.read_text_file(self.test_path()).split('\n')[0] 347 354 348 355 if filesystem.isfile(self._archive_path) and filesystem.isfile(self._expected_image_path): -
trunk/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py
r136548 r138042 170 170 output_capture.capture_output() 171 171 try: 172 self.assertEqual(test. run(driver, None),172 self.assertEqual(test._run_with_driver(driver, None), 173 173 {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': 5627.314338711378, 'min': 2000, 'unit': 'ms', 174 174 'values': [i * 1000 for i in range(2, 21)]}}) … … 188 188 output_capture.capture_output() 189 189 try: 190 self.assertEqual(test. run(driver, None),190 self.assertEqual(test._run_with_driver(driver, None), 191 191 {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': 5627.314338711378, 'min': 2000, 'unit': 'ms', 192 192 'values': [i * 1000 for i in range(2, 21)]}, … … 210 210 test = PageLoadingPerfTest(port, 'some-test', '/path/some-dir/some-test') 211 211 driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20], test) 212 self.assertEqual(test. run(driver, None), None)212 self.assertEqual(test._run_with_driver(driver, None), None) 213 213 finally: 214 214 actual_stdout, actual_stderr, actual_logs = output_capture.restore_output() -
trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
r137714 r138042 99 99 optparse.make_option("--time-out-ms", default=600 * 1000, 100 100 help="Set the timeout for each test"), 101 optparse.make_option("--pause-before-testing", dest="pause_before_testing", action="store_true", default=False,102 help="Pause before running the tests to let user attach a performance monitor."),103 101 optparse.make_option("--no-results", action="store_false", dest="generate_results", default=True, 104 102 help="Do no generate results JSON and results page."), … … 317 315 318 316 for test in tests: 319 driver = port.create_driver(worker_number=0, no_timeout=True)320 321 if self._options.pause_before_testing:322 driver.start()323 if not self._host.user.confirm("Ready to run test?"):324 driver.stop()325 return unexpected326 327 317 _log.info('Running %s (%d of %d)' % (test.test_name(), expected + unexpected + 1, len(tests))) 328 if self._run_single_test(test , driver):318 if self._run_single_test(test): 329 319 expected = expected + 1 330 320 else: … … 333 323 _log.info('') 334 324 335 driver.stop()336 337 325 return unexpected 338 326 339 def _run_single_test(self, test , driver):327 def _run_single_test(self, test): 340 328 start_time = time.time() 341 342 new_results = test.run(driver, self._options.time_out_ms) 329 new_results = test.run(self._options.time_out_ms) 343 330 if new_results: 344 331 self._results.update(new_results) -
trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py
r136548 r138042 163 163 def run_test(self, test_name): 164 164 runner, port = self.create_runner() 165 driver = MainTest.TestDriver() 166 return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)), driver) 165 return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name))) 167 166 168 167 def test_run_passing_test(self): … … 224 223 225 224 self.assertEqual(TestDriverWithStopCount.stop_count, 6) 226 227 def test_run_test_pause_before_testing(self):228 class TestDriverWithStartCount(MainTest.TestDriver):229 start_count = 0230 231 def start(self):232 TestDriverWithStartCount.start_count += 1233 234 runner, port = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)235 tests = self._tests_for_runner(runner, ['inspector/pass.html'])236 237 output = OutputCapture()238 output.capture_output()239 try:240 unexpected_result_count = runner._run_tests_set(tests, port)241 self.assertEqual(TestDriverWithStartCount.start_count, 1)242 finally:243 stdout, stderr, log = output.restore_output()244 self.assertEqual(self.normalizeFinishedTime(log),245 "Ready to run test?\nRunning inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\nFinished: 0.1 s\n\n")246 225 247 226 def test_run_test_set_for_parser_tests(self):
Note: See TracChangeset
for help on using the changeset viewer.