Changeset 124349 in webkit
- Timestamp:
- Aug 1, 2012 11:23:25 AM (12 years ago)
- Location:
- trunk/Tools
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Tools/ChangeLog
r124344 r124349 1 2012-08-01 Ryosuke Niwa <rniwa@webkit.org> 2 3 run-perf-tests throws an exception when the output json is malformed 4 https://bugs.webkit.org/show_bug.cgi?id=92887 5 6 Reviewed by Dirk Pranke. 7 8 Catch exceptions and gracefully fail. Also split _generate_json into smaller methods. 9 10 * Scripts/webkitpy/performance_tests/perftestsrunner.py: 11 (PerfTestsRunner): 12 (PerfTestsRunner.run): 13 (PerfTestsRunner._generate_output): Extracted from _generate_json. 14 (PerfTestsRunner._merge_source_json): Ditto; catch all exceptions since they are too many 15 exceptions to consder here. 16 (PerfTestsRunner._merge_outputs): Ditto. 17 (PerfTestsRunner._generate_output_files): Extracted from _generate_json. 18 * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py: 19 (_test_run_with_json_output): Don't assert logs when we except an non-zero exit code. 20 (create_runner_and_setup_results_template): Extracted from test_run_generates_results_page. 21 (test_run_generates_results_page): 22 (test_run_with_bad_output_json): Added. 23 (test_run_with_bad_json_source): Added. 24 (test_run_with_upload_json): 25 1 26 2012-08-01 Thiago Marcos P. Santos <thiago.santos@intel.com> 2 27 -
trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
r123985 r124349 50 50 class PerfTestsRunner(object): 51 51 _default_branch = 'webkit-trunk' 52 _EXIT_CODE_BAD_BUILD = -1 53 _EXIT_CODE_BAD_JSON = -2 54 _EXIT_CODE_FAILED_UPLOADING = -3 55 _EXIT_CODE_BAD_PREPARATION = -4 52 EXIT_CODE_BAD_BUILD = -1 53 EXIT_CODE_BAD_SOURCE_JSON = -2 54 EXIT_CODE_BAD_MERGE = -3 55 EXIT_CODE_FAILED_UPLOADING = -4 56 EXIT_CODE_BAD_PREPARATION = -5 56 57 57 58 def __init__(self, args=None, port=None): … … 143 144 if not self._port.check_build(needs_http=False): 144 145 _log.error("Build not up to date for %s" % self._port._path_to_driver()) 145 return self. _EXIT_CODE_BAD_BUILD146 return self.EXIT_CODE_BAD_BUILD 146 147 147 148 tests = self._collect_tests() … … 150 151 for test in tests: 151 152 if not test.prepare(self._options.time_out_ms): 152 return self. _EXIT_CODE_BAD_PREPARATION153 return self.EXIT_CODE_BAD_PREPARATION 153 154 154 155 unexpected = self._run_tests_set(sorted(list(tests), key=lambda test: test.test_name()), self._port) … … 158 159 # FIXME: Add --branch or auto-detect the branch we're in 159 160 test_results_server = options.test_results_server 160 branch = self._default_branch if test_results_server else None 161 build_number = int(options.build_number) if options.build_number else None 162 163 if not self._generate_json(self._timestamp, options.output_json_path, options.source_json_path, 164 not test_results_server, 165 branch, options.platform, options.builder_name, build_number) and not unexpected: 166 return self._EXIT_CODE_BAD_JSON 161 162 output = self._generate_output(self._timestamp, options.platform, options.builder_name, options.build_number) 163 164 if options.source_json_path: 165 output = self._merge_source_json(options.source_json_path, output) 166 if not output: 167 return self.EXIT_CODE_BAD_SOURCE_JSON 168 169 if not test_results_server: 170 output = self._merge_outputs(self._options.output_json_path, output) 171 if not output: 172 return self.EXIT_CODE_BAD_MERGE 173 174 self._generate_output_files(self._options.output_json_path, output, not test_results_server) 167 175 168 176 if test_results_server and not self._upload_json(test_results_server, options.output_json_path): 169 return self. _EXIT_CODE_FAILED_UPLOADING177 return self.EXIT_CODE_FAILED_UPLOADING 170 178 171 179 return unexpected 172 180 173 def _generate_json(self, timestamp, output_json_path, source_json_path, should_generate_results_page, 174 branch, platform, builder_name, build_number): 175 176 contents = {'timestamp': int(timestamp), 'results': self._results} 181 def _generate_output(self, timestamp, platform, builder_name, build_number): 182 contents = {'results': self._results} 177 183 for (name, path) in self._port.repository_paths(): 178 184 contents[name + '-revision'] = self._host.scm().svn_revision(path) 179 185 180 for key, value in {'branch': branch, 'platform': platform, 'builder-name': builder_name, 'build-number': build_number}.items(): 186 for key, value in {'timestamp': int(timestamp), 'branch': self._default_branch, 'platform': platform, 187 'builder-name': builder_name, 'build-number': int(build_number) if build_number else None}.items(): 181 188 if value: 182 189 contents[key] = value 183 190 191 return contents 192 193 def _merge_source_json(self, source_json_path, output): 194 try: 195 source_json_file = self._host.filesystem.open_text_file_for_reading(source_json_path) 196 source_json = json.load(source_json_file) 197 return dict(source_json.items() + output.items()) 198 except Exception, error: 199 _log.error("Failed to merge source JSON file %s: %s" % (source_json_path, error)) 200 return None 201 202 def _merge_outputs(self, output_json_path, output): 203 if not self._host.filesystem.isfile(output_json_path): 204 return [output] 205 try: 206 existing_outputs = json.loads(self._host.filesystem.read_text_file(output_json_path)) 207 return existing_outputs + [output] 208 except Exception, error: 209 _log.error("Failed to merge output JSON file %s: %s" % (output_json_path, error)) 210 return None 211 212 def _generate_output_files(self, output_json_path, output, should_generate_results_page): 184 213 filesystem = self._host.filesystem 185 succeeded = False 186 if source_json_path: 187 try: 188 source_json_file = filesystem.open_text_file_for_reading(source_json_path) 189 source_json = json.load(source_json_file) 190 contents = dict(source_json.items() + contents.items()) 191 succeeded = True 192 except IOError, error: 193 _log.error("Failed to read %s: %s" % (source_json_path, error)) 194 except ValueError, error: 195 _log.error("Failed to parse %s: %s" % (source_json_path, error)) 196 except TypeError, error: 197 _log.error("Failed to merge JSON files: %s" % error) 198 if not succeeded: 199 return False 200 201 if should_generate_results_page: 202 if filesystem.isfile(output_json_path): 203 existing_contents = json.loads(filesystem.read_text_file(output_json_path)) 204 existing_contents.append(contents) 205 contents = existing_contents 206 else: 207 contents = [contents] 208 209 serialized_contents = json.dumps(contents) 210 filesystem.write_text_file(output_json_path, serialized_contents) 214 215 json_output = json.dumps(output) 216 filesystem.write_text_file(output_json_path, json_output) 211 217 212 218 if should_generate_results_page: … … 218 224 219 225 results_page = template.replace('<?WebKitPerfTestRunnerInsertionPoint?>', 220 '<script>%s</script><script id="json">%s</script>' % (jquery, serialized_contents))226 '<script>%s</script><script id="json">%s</script>' % (jquery, json_output)) 221 227 222 228 filesystem.write_text_file(filesystem.splitext(output_json_path)[0] + '.html', results_page) 223 224 return True225 229 226 230 def _upload_json(self, test_results_server, json_path, file_uploader=FileUploader): -
trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py
r124148 r124349 247 247 stdout, stderr, logs = output_capture.restore_output() 248 248 249 self.assertEqual(logs, '\n'.join([ 250 'Running 2 tests', 251 'Running Bindings/event-target-wrapper.html (1 of 2)', 252 'RESULT Bindings: event-target-wrapper= 1489.05 ms', 253 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms', 254 '', 255 'Running inspector/pass.html (2 of 2)', 256 'RESULT group_name: test_name= 42 ms', 257 '', 258 ''])) 249 if not expected_exit_code: 250 self.assertEqual(logs, '\n'.join([ 251 'Running 2 tests', 252 'Running Bindings/event-target-wrapper.html (1 of 2)', 253 'RESULT Bindings: event-target-wrapper= 1489.05 ms', 254 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms', 255 '', 256 'Running inspector/pass.html (2 of 2)', 257 'RESULT group_name: test_name= 42 ms', 258 '', 259 ''])) 259 260 260 261 return uploaded[0] … … 270 271 "webkit-revision": 5678, "branch": "webkit-trunk"}) 271 272 273 def create_runner_and_setup_results_template(self, args): 274 runner, port = self.create_runner(args) 275 filesystem = port.host.filesystem 276 filesystem.write_text_file(runner._base_path + '/resources/results-template.html', 'BEGIN<?WebKitPerfTestRunnerInsertionPoint?>END') 277 filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js', 'jquery content') 278 return runner, port 279 272 280 def test_run_generates_results_page(self): 273 runner, port = self.create_runner (args=['--output-json-path=/mock-checkout/output.json'])281 runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json']) 274 282 filesystem = port.host.filesystem 275 filesystem.write_text_file(runner._base_path + '/resources/results-template.html',276 'BEGIN<?WebKitPerfTestRunnerInsertionPoint?>END')277 filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js',278 'jquery content')279 280 283 self._test_run_with_json_output(runner, filesystem) 281 284 282 285 expected_entry = {"timestamp": 123456789, "results": {"Bindings/event-target-wrapper": 283 286 {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"}, 284 "inspector/pass.html:group_name:test_name": 42}, "webkit-revision": 5678 }287 "inspector/pass.html:group_name:test_name": 42}, "webkit-revision": 5678, "branch": "webkit-trunk"} 285 288 286 289 self.maxDiff = None … … 295 298 self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'), 296 299 'BEGIN<script>jquery content</script><script id="json">' + json_output + '</script>END') 300 301 def test_run_with_bad_output_json(self): 302 runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json']) 303 port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad json') 304 self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE) 305 port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}') 306 self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE) 297 307 298 308 def test_run_with_json_source(self): … … 308 318 "key": "value"}) 309 319 320 def test_run_with_bad_json_source(self): 321 runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json', 322 '--source-json-path=/mock-checkout/source.json', '--test-results-server=some.host']) 323 self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) 324 port.host.filesystem.write_text_file('/mock-checkout/source.json', 'bad json') 325 self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) 326 port.host.filesystem.write_text_file('/mock-checkout/source.json', '["another bad json"]') 327 self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON) 328 310 329 def test_run_with_multiple_repositories(self): 311 330 runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json', … … 329 348 self.assertEqual(generated_json['build-number'], 123) 330 349 331 self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False, expected_exit_code= -3)350 self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING) 332 351 333 352 def test_upload_json(self):
Note: See TracChangeset
for help on using the changeset viewer.