--- a/pylib/mozreview/mozreview/resources/batch_review_request.py
+++ b/pylib/mozreview/mozreview/resources/batch_review_request.py
@@ -243,55 +243,66 @@ class BatchReviewRequestResource(WebAPIR
'repo_id': ['Not an integer']}}
except Repository.DoesNotExist:
logger.warn('repo %d does not exist' % repo_id)
return INVALID_FORM_DATA, {
'fields': {
'repo_id': ['Invalid repo_id']}}
if not repo.is_accessible_by(user):
+ logger.warn('repo %s not accessible by %s' % (repo.id, user))
return self.get_no_access_error(request)
try:
with transaction.atomic():
squashed_rr, node_to_rid, review_data, warnings = self._process_submission(
request, local_site, repo, identifier, commits)
+ logger.info('%s: finished processing %d' % (
+ identifier, squashed_rr.id))
return 200, {
self.item_result_key: {
'nodes': node_to_rid,
'squashed_rr': squashed_rr.id,
'review_requests': review_data,
'warnings': warnings,
}}
# Need to catch outside the transaction so db changes are rolled back.
except DiffProcessingException:
+ logging.exception('diff processing exception')
return INVALID_FORM_DATA, {
'fields': {
'commits': ['error processing squashed diff']}}
except SubmissionException as e:
+ logging.exception('submission exception')
return e.value
def _process_submission(self, request, local_site, repo, identifier, commits):
user = request.user
+
+ logger.info('processing batch submission %s to %s with %d commits' % (
+ identifier, repo.name, len(commits)))
+
try:
squashed_rr = ReviewRequest.objects.get(commit_id=identifier,
repository=repo)
if not squashed_rr.is_mutable_by(user):
logger.warn('%s not mutable by %s' % (squashed_rr.id, user))
raise SubmissionException(self.get_no_access_error(request))
if squashed_rr.status != ReviewRequest.PENDING_REVIEW:
logger.warn('%s is not a pending review request; cannot edit' %
squashed_rr.id)
raise SubmissionException((INVALID_FORM_DATA, {
'fields': {
'identifier': ['Parent review request is '
'submitted or discarded']}}))
+ logger.info('using squashed review request %d' % squashed_rr.id)
+
except ReviewRequest.DoesNotExist:
squashed_rr = ReviewRequest.objects.create(
user=user, repository=repo, commit_id=identifier,
local_site=local_site)
squashed_rr.extra_data.update({
'p2rb': True,
'p2rb.is_squashed': True,
@@ -306,16 +317,18 @@ class BatchReviewRequestResource(WebAPIR
# The diffs on diffsets can't be updated, only replaced. So always
# construct the diffset.
try:
# TODO consider moving diffset creation outside of the transaction
# since it can be quite time consuming.
# Calling create_from_data() instead of create_from_upload() skips
# diff size validation. We allow unlimited diff sizes, so no biggie.
+ logger.info('%s: generating squashed diffset for %d' % (
+ identifier, squashed_rr.id))
diffset = DiffSet.objects.create_from_data(
repository=repo,
diff_file_name='diff',
diff_file_contents=commits['squashed']['diff'],
parent_diff_file_name=None,
parent_diff_file_contents=None,
diffset_history=None,
basedir='',
@@ -327,31 +340,38 @@ class BatchReviewRequestResource(WebAPIR
update_diffset_history(squashed_rr, diffset)
diffset.save()
except Exception:
logger.exception('error processing squashed diff')
raise DiffProcessingException()
update_review_request_draft_diffset(squashed_rr, diffset)
+ logger.info('%s: updated squashed diffset for %d' % (
+ identifier, squashed_rr.id))
# TODO: We need to take into account the commits data from the squashed
# review request's draft. This data represents the mapping from commit
# to rid in the event that we would have published. We're overwritting
# this data. This will only come into play if we start trusting the server
# instead of the client when matching review request ids. Bug 1047516
previous_commits = get_previous_commits(squashed_rr)
remaining_nodes = get_remaining_nodes(previous_commits)
discard_on_publish_rids = get_discard_on_publish_rids(squashed_rr)
unpublished_rids = get_unpublished_rids(squashed_rr)
unclaimed_rids = get_unclaimed_rids(previous_commits,
discard_on_publish_rids,
unpublished_rids)
+ logger.info('%s: %d previous commits; %d discard on publish; '
+ '%d unpublished' % (identifier, len(previous_commits),
+ len(discard_on_publish_rids),
+ len(unpublished_rids)))
+
# Previously pushed nodes which have been processed and had their review
# request updated or did not require updating.
processed_nodes = set()
node_to_rid = {}
# A mapping from review request id to the corresponding ReviewRequest.
review_requests = {}
@@ -370,44 +390,55 @@ class BatchReviewRequestResource(WebAPIR
if node not in remaining_nodes:
continue
# If the commit appears in an old review request, by definition of
# commits deriving from content, the commit has not changed and there
# is nothing to update. Update our accounting and move on.
rid = remaining_nodes[node]
+ logger.info('%s: commit %s unchanged; using existing request %d' % (
+ identifier, node, rid))
+
del remaining_nodes[node]
unclaimed_rids.remove(rid)
processed_nodes.add(node)
node_to_rid[node] = rid
rr = ReviewRequest.objects.get(pk=rid)
review_requests[rid] = rr
review_data[rid] = get_review_request_data(rr)
try:
discard_on_publish_rids.remove(rid)
except ValueError:
pass
+ logger.info('%s: %d/%d commits mapped exactly' % (
+ identifier, len(processed_nodes),
+ len(commits['individual'])))
+
# Find commits that map to a previous version.
for commit in commits['individual']:
node = commit['id']
if node in processed_nodes:
continue
# The client may have sent obsolescence data saying which commit this
# commit has derived from. Use that data (if available) to try to find
# a mapping to an old review request.
for precursor in commit['precursors']:
rid = remaining_nodes.get(precursor)
if not rid:
continue
+ logger.info('%s: found precursor to commit %s; '
+ 'using existing review request %d' % (
+ identifier, node, rid))
+
del remaining_nodes[precursor]
unclaimed_rids.remove(rid)
rr = ReviewRequest.objects.get(pk=rid)
draft, warns = update_review_request(local_site, request,
reviewer_cache, rr, commit)
squashed_reviewers.update(u for u in draft.target_people.all())
warnings.extend(warns)
@@ -418,16 +449,20 @@ class BatchReviewRequestResource(WebAPIR
try:
discard_on_publish_rids.remove(rid)
except ValueError:
pass
break
+ logger.info('%s: %d/%d mapped exactly or to precursors' % (
+ identifier, len(processed_nodes),
+ len(commits['individual'])))
+
# Now do a pass over the commits that didn't map cleanly.
for commit in commits['individual']:
node = commit['id']
if node in processed_nodes:
continue
# We haven't seen this commit before *and* our mapping above didn't
# do anything useful with it.
@@ -438,16 +473,20 @@ class BatchReviewRequestResource(WebAPIR
# For now, match the commit up against the next one in the index.
# The unclaimed rids list contains review requests which were created
# when previously updating this review identifier, but not published.
# If we have more commits than were previously published we'll start
# reusing these private review requests before creating new ones.
if unclaimed_rids:
assumed_old_rid = unclaimed_rids.pop(0)
+
+ logger.info('%s: mapping %s to unclaimed request %d' % (
+ identifier, node, assumed_old_rid))
+
rr = ReviewRequest.objects.get(pk=assumed_old_rid)
draft, warns = update_review_request(local_site, request,
reviewer_cache, rr, commit)
squashed_reviewers.update(u for u in draft.target_people.all())
warnings.extend(warns)
processed_nodes.add(commit['id'])
node_to_rid[node] = assumed_old_rid
review_requests[assumed_old_rid] = rr
@@ -466,31 +505,34 @@ class BatchReviewRequestResource(WebAPIR
repository=repo,
commit_id=None,
local_site=local_site)
rr.extra_data['p2rb'] = True
rr.extra_data['p2rb.is_squashed'] = False
rr.extra_data['p2rb.identifier'] = identifier
rr.save(update_fields=['extra_data'])
- logger.info('created commit review request #%d' % rr.id)
+ logger.info('%s: created review request %d for commit %s' % (
+ identifier, rr.id, node))
draft, warns = update_review_request(local_site, request,
reviewer_cache, rr, commit)
squashed_reviewers.update(u for u in draft.target_people.all())
warnings.extend(warns)
processed_nodes.add(commit['id'])
node_to_rid[node] = rr.id
review_requests[rr.id] = rr
review_data[rr.id] = get_review_request_data(rr)
unpublished_rids.append(rr.id)
# At this point every incoming commit has been accounted for.
# If there are any remaining review requests, they must belong to
# deleted commits. (Or, we made a mistake and updated the wrong review
# request)
+ logger.info('%s: %d unclaimed review requests left over' % (
+ identifier, len(unclaimed_rids)))
for rid in unclaimed_rids:
rr = ReviewRequest.objects.get(pk=rid)
if rr.public and rid not in discard_on_publish_rids:
# This review request has already been published so we'll need to
# discard it when we publish the squashed review request.
discard_on_publish_rids.append(rid)
elif not rr.public and rid not in unpublished_rids: