Changeset 159052 in webkit
- Timestamp:
- Nov 11, 2013 9:37:06 AM (10 years ago)
- Location:
- trunk/Tools
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Tools/ChangeLog
r159051 r159052 1 2013-11-11 Tamas Gergely <gertom@inf.u-szeged.hu> 2 3 The feeder queue will not feed patches older than one week. 4 https://bugs.webkit.org/show_bug.cgi?id=107152 5 6 Reviewed by Csaba Osztrogonác. 7 8 The feeder queue will not feed patches older than one week. 9 Unit test added to this scenario. 10 11 * Scripts/webkitpy/common/net/bugzilla/bugzilla.py: 12 (BugzillaQueries._parse_attachment_ids_request_query): 13 Handle date parameter. 14 (BugzillaQueries): 15 (BugzillaQueries._fetch_attachment_ids_request_query): 16 Propagate date. 17 (BugzillaQueries.fetch_attachment_ids_from_review_queue): 18 Propagate date. 19 * Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py: 20 (MockBugzillaQueries.fetch_attachment_ids_from_review_queue): 21 Handle date. 22 * Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py: 23 (test_request_page_parsing): 24 Unit test added. 25 * Scripts/webkitpy/tool/bot/feeders.py: 26 (EWSFeeder.feed): 27 Query patch ids with date parameter (now - 1 week). 28 1 29 2013-11-11 Tamas Gergely <gertom@inf.u-szeged.hu> 2 30 -
trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py
r153535 r159052 2 2 # Copyright (c) 2009 Apple Inc. All rights reserved. 3 3 # Copyright (c) 2010 Research In Motion Limited. All rights reserved. 4 # Copyright (c) 2013 University of Szeged. All rights reserved. 4 5 # 5 6 # Redistribution and use in source and binary forms, with or without … … 181 182 for bug_link_cell in soup('td', "first-child")] 182 183 183 def _parse_attachment_ids_request_query(self, page): 184 def _parse_attachment_ids_request_query(self, page, since=None): 185 # Formats 184 186 digits = re.compile("\d+") 185 187 attachment_href = re.compile("attachment.cgi\?id=\d+&action=review") 186 attachment_links = SoupStrainer("a", href=attachment_href) 187 return [int(digits.search(tag["href"]).group(0)) 188 # if no date is given, return all ids 189 if not since: 190 attachment_links = SoupStrainer("a", href=attachment_href) 191 return [int(digits.search(tag["href"]).group(0)) 188 192 for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)] 189 193 190 def _fetch_attachment_ids_request_query(self, query): 191 return self._parse_attachment_ids_request_query(self._load_query(query)) 194 # Parse the main table only 195 date_format = re.compile("\d{4}-\d{2}-\d{2} \d{2}:\d{2}") 196 mtab = SoupStrainer("table", {"class": "requests"}) 197 soup = BeautifulSoup(page, parseOnlyThese=mtab) 198 patch_ids = [] 199 200 for row in soup.findAll("tr"): 201 patch_tag = row.find("a", {"href": attachment_href}) 202 if not patch_tag: 203 continue 204 patch_id = int(digits.search(patch_tag["href"]).group(0)) 205 date_tag = row.find("td", text=date_format) 206 if date_tag and datetime.strptime(date_format.search(date_tag).group(0), "%Y-%m-%d %H:%M") < since: 207 _log.info("Patch is old: %d (%s)" % (patch_id, date_tag)) 208 continue 209 patch_ids.append(patch_id) 210 return patch_ids 211 212 def _fetch_attachment_ids_request_query(self, query, since=None): 213 return self._parse_attachment_ids_request_query(self._load_query(query), since) 192 214 193 215 def _parse_quips(self, page): … … 253 275 # NOTE: This is the only client of _fetch_attachment_ids_request_query 254 276 # This method only makes one request to bugzilla. 255 def fetch_attachment_ids_from_review_queue(self ):277 def fetch_attachment_ids_from_review_queue(self, since=None): 256 278 review_queue_url = "request.cgi?action=queue&type=review&group=type" 257 return self._fetch_attachment_ids_request_query(review_queue_url )279 return self._fetch_attachment_ids_request_query(review_queue_url, since) 258 280 259 281 # This only works if your account has edituser privileges. -
trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py
r139224 r159052 252 252 return map(lambda bug: bug.id(), bugs_with_commit_queued_patches) 253 253 254 def fetch_attachment_ids_from_review_queue(self ):254 def fetch_attachment_ids_from_review_queue(self, since=None): 255 255 unreviewed_patches = sum([bug.unreviewed_patches() 256 256 for bug in self._all_bugs()], []) 257 if since: 258 unreviewed_pacthes = [patch for patch in unreviewed_patches 259 if patch.attach_date() >= since] 257 260 return map(lambda patch: patch.id(), unreviewed_patches) 258 261 -
trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py
r149630 r159052 430 430 queries = BugzillaQueries(None) 431 431 self.assertEqual([40511, 40722, 40723], queries._parse_attachment_ids_request_query(self._sample_request_page)) 432 self.assertEqual([40722, 40723], queries._parse_attachment_ids_request_query(self._sample_request_page, datetime.datetime(2009, 10, 4, 11, 38, 44))) 432 433 433 434 def test_quip_page_parsing(self): -
trunk/Tools/Scripts/webkitpy/tool/bot/feeders.py
r135744 r159052 27 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 28 29 from datetime import datetime, timedelta 29 30 import logging 30 31 … … 91 92 92 93 def feed(self): 93 ids_needing_review = set(self._tool.bugs.queries.fetch_attachment_ids_from_review_queue( ))94 ids_needing_review = set(self._tool.bugs.queries.fetch_attachment_ids_from_review_queue(datetime.today() - timedelta(7))) 94 95 new_ids = ids_needing_review.difference(self._ids_sent_to_server) 95 96 _log.info("Feeding EWS (%s, %s new)" % (pluralize("r? patch", len(ids_needing_review)), len(new_ids)))
Note: See TracChangeset
for help on using the changeset viewer.