|
1 # phabricator.py - simple Phabricator integration |
|
2 # |
|
3 # Copyright 2017 Facebook, Inc. |
|
4 # |
|
5 # This software may be used and distributed according to the terms of the |
|
6 # GNU General Public License version 2 or any later version. |
|
7 """simple Phabricator integration |
|
8 |
|
9 This extension provides a ``phabsend`` command which sends a stack of |
|
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of |
|
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command |
|
12 to update statuses in batch. |
|
13 |
|
14 By default, Phabricator requires ``Test Plan`` which might prevent some |
|
15 changeset from being sent. The requirement could be disabled by changing |
|
16 ``differential.require-test-plan-field`` config server side. |
|
17 |
|
18 Config:: |
|
19 |
|
20 [phabricator] |
|
21 # Phabricator URL |
|
22 url = https://phab.example.com/ |
|
23 |
|
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its |
|
25 # callsign is "FOO". |
|
26 callsign = FOO |
|
27 |
|
28 # curl command to use. If not set (default), use builtin HTTP library to |
|
29 # communicate. If set, use the specified curl command. This could be useful |
|
30 # if you need to specify advanced options that is not easily supported by |
|
31 # the internal library. |
|
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent |
|
33 |
|
34 [auth] |
|
35 example.schemes = https |
|
36 example.prefix = phab.example.com |
|
37 |
|
38 # API token. Get it from https://$HOST/conduit/login/ |
|
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx |
|
40 """ |
|
41 |
|
42 from __future__ import absolute_import |
|
43 |
|
44 import itertools |
|
45 import json |
|
46 import operator |
|
47 import re |
|
48 |
|
49 from mercurial.node import bin, nullid |
|
50 from mercurial.i18n import _ |
|
51 from mercurial import ( |
|
52 cmdutil, |
|
53 context, |
|
54 encoding, |
|
55 error, |
|
56 httpconnection as httpconnectionmod, |
|
57 mdiff, |
|
58 obsutil, |
|
59 parser, |
|
60 patch, |
|
61 registrar, |
|
62 scmutil, |
|
63 smartset, |
|
64 tags, |
|
65 url as urlmod, |
|
66 util, |
|
67 ) |
|
68 from mercurial.utils import ( |
|
69 procutil, |
|
70 stringutil, |
|
71 ) |
|
72 |
|
73 cmdtable = {} |
|
74 command = registrar.command(cmdtable) |
|
75 |
|
76 configtable = {} |
|
77 configitem = registrar.configitem(configtable) |
|
78 |
|
79 # developer config: phabricator.batchsize |
|
80 configitem(b'phabricator', b'batchsize', |
|
81 default=12, |
|
82 ) |
|
83 configitem(b'phabricator', b'callsign', |
|
84 default=None, |
|
85 ) |
|
86 configitem(b'phabricator', b'curlcmd', |
|
87 default=None, |
|
88 ) |
|
89 # developer config: phabricator.repophid |
|
90 configitem(b'phabricator', b'repophid', |
|
91 default=None, |
|
92 ) |
|
93 configitem(b'phabricator', b'url', |
|
94 default=None, |
|
95 ) |
|
96 configitem(b'phabsend', b'confirm', |
|
97 default=False, |
|
98 ) |
|
99 |
|
100 colortable = { |
|
101 b'phabricator.action.created': b'green', |
|
102 b'phabricator.action.skipped': b'magenta', |
|
103 b'phabricator.action.updated': b'magenta', |
|
104 b'phabricator.desc': b'', |
|
105 b'phabricator.drev': b'bold', |
|
106 b'phabricator.node': b'', |
|
107 } |
|
108 |
|
109 _VCR_FLAGS = [ |
|
110 (b'', b'test-vcr', b'', |
|
111 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
|
112 b', otherwise will mock all http requests using the specified vcr file.' |
|
113 b' (ADVANCED)' |
|
114 )), |
|
115 ] |
|
116 |
|
117 def vcrcommand(name, flags, spec): |
|
118 fullflags = flags + _VCR_FLAGS |
|
119 def decorate(fn): |
|
120 def inner(*args, **kwargs): |
|
121 cassette = kwargs.pop(r'test_vcr', None) |
|
122 if cassette: |
|
123 import hgdemandimport |
|
124 with hgdemandimport.deactivated(): |
|
125 import vcr as vcrmod |
|
126 import vcr.stubs as stubs |
|
127 vcr = vcrmod.VCR( |
|
128 serializer=r'json', |
|
129 custom_patches=[ |
|
130 (urlmod, 'httpconnection', stubs.VCRHTTPConnection), |
|
131 (urlmod, 'httpsconnection', stubs.VCRHTTPSConnection), |
|
132 ]) |
|
133 with vcr.use_cassette(cassette): |
|
134 return fn(*args, **kwargs) |
|
135 return fn(*args, **kwargs) |
|
136 inner.__name__ = fn.__name__ |
|
137 return command(name, fullflags, spec)(inner) |
|
138 return decorate |
|
139 |
|
140 def urlencodenested(params): |
|
141 """like urlencode, but works with nested parameters. |
|
142 |
|
143 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
|
144 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
|
145 urlencode. Note: the encoding is consistent with PHP's http_build_query. |
|
146 """ |
|
147 flatparams = util.sortdict() |
|
148 def process(prefix, obj): |
|
149 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj)) |
|
150 if items is None: |
|
151 flatparams[prefix] = obj |
|
152 else: |
|
153 for k, v in items(obj): |
|
154 if prefix: |
|
155 process(b'%s[%s]' % (prefix, k), v) |
|
156 else: |
|
157 process(k, v) |
|
158 process(b'', params) |
|
159 return util.urlreq.urlencode(flatparams) |
|
160 |
|
161 printed_token_warning = False |
|
162 |
|
163 def readlegacytoken(repo, url): |
|
164 """Transitional support for old phabricator tokens. |
|
165 |
|
166 Remove before the 4.7 release. |
|
167 """ |
|
168 groups = {} |
|
169 for key, val in repo.ui.configitems(b'phabricator.auth'): |
|
170 if b'.' not in key: |
|
171 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n") |
|
172 % key) |
|
173 continue |
|
174 group, setting = key.rsplit(b'.', 1) |
|
175 groups.setdefault(group, {})[setting] = val |
|
176 |
|
177 token = None |
|
178 for group, auth in groups.iteritems(): |
|
179 if url != auth.get(b'url'): |
|
180 continue |
|
181 token = auth.get(b'token') |
|
182 if token: |
|
183 break |
|
184 |
|
185 global printed_token_warning |
|
186 |
|
187 if token and not printed_token_warning: |
|
188 printed_token_warning = True |
|
189 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please ' |
|
190 b'migrate to auth.phabtoken.\n')) |
|
191 return token |
|
192 |
|
193 def readurltoken(repo): |
|
194 """return conduit url, token and make sure they exist |
|
195 |
|
196 Currently read from [auth] config section. In the future, it might |
|
197 make sense to read from .arcconfig and .arcrc as well. |
|
198 """ |
|
199 url = repo.ui.config(b'phabricator', b'url') |
|
200 if not url: |
|
201 raise error.Abort(_(b'config %s.%s is required') |
|
202 % (b'phabricator', b'url')) |
|
203 |
|
204 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user) |
|
205 token = None |
|
206 |
|
207 if res: |
|
208 group, auth = res |
|
209 |
|
210 repo.ui.debug(b"using auth.%s.* for authentication\n" % group) |
|
211 |
|
212 token = auth.get(b'phabtoken') |
|
213 |
|
214 if not token: |
|
215 token = readlegacytoken(repo, url) |
|
216 if not token: |
|
217 raise error.Abort(_(b'Can\'t find conduit token associated to %s') |
|
218 % (url,)) |
|
219 |
|
220 return url, token |
|
221 |
|
222 def callconduit(repo, name, params): |
|
223 """call Conduit API, params is a dict. return json.loads result, or None""" |
|
224 host, token = readurltoken(repo) |
|
225 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() |
|
226 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params)) |
|
227 params = params.copy() |
|
228 params[b'api.token'] = token |
|
229 data = urlencodenested(params) |
|
230 curlcmd = repo.ui.config(b'phabricator', b'curlcmd') |
|
231 if curlcmd: |
|
232 sin, sout = procutil.popen2(b'%s -d @- %s' |
|
233 % (curlcmd, procutil.shellquote(url))) |
|
234 sin.write(data) |
|
235 sin.close() |
|
236 body = sout.read() |
|
237 else: |
|
238 urlopener = urlmod.opener(repo.ui, authinfo) |
|
239 request = util.urlreq.request(url, data=data) |
|
240 body = urlopener.open(request).read() |
|
241 repo.ui.debug(b'Conduit Response: %s\n' % body) |
|
242 parsed = json.loads(body) |
|
243 if parsed.get(r'error_code'): |
|
244 msg = (_(b'Conduit Error (%s): %s') |
|
245 % (parsed[r'error_code'], parsed[r'error_info'])) |
|
246 raise error.Abort(msg) |
|
247 return parsed[r'result'] |
|
248 |
|
249 @vcrcommand(b'debugcallconduit', [], _(b'METHOD')) |
|
250 def debugcallconduit(ui, repo, name): |
|
251 """call Conduit API |
|
252 |
|
253 Call parameters are read from stdin as a JSON blob. Result will be written |
|
254 to stdout as a JSON blob. |
|
255 """ |
|
256 params = json.loads(ui.fin.read()) |
|
257 result = callconduit(repo, name, params) |
|
258 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': ')) |
|
259 ui.write(b'%s\n' % s) |
|
260 |
|
261 def getrepophid(repo): |
|
262 """given callsign, return repository PHID or None""" |
|
263 # developer config: phabricator.repophid |
|
264 repophid = repo.ui.config(b'phabricator', b'repophid') |
|
265 if repophid: |
|
266 return repophid |
|
267 callsign = repo.ui.config(b'phabricator', b'callsign') |
|
268 if not callsign: |
|
269 return None |
|
270 query = callconduit(repo, b'diffusion.repository.search', |
|
271 {b'constraints': {b'callsigns': [callsign]}}) |
|
272 if len(query[r'data']) == 0: |
|
273 return None |
|
274 repophid = encoding.strtolocal(query[r'data'][0][r'phid']) |
|
275 repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
|
276 return repophid |
|
277 |
|
278 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z') |
|
279 _differentialrevisiondescre = re.compile( |
|
280 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) |
|
281 |
|
282 def getoldnodedrevmap(repo, nodelist): |
|
283 """find previous nodes that has been sent to Phabricator |
|
284 |
|
285 return {node: (oldnode, Differential diff, Differential Revision ID)} |
|
286 for node in nodelist with known previous sent versions, or associated |
|
287 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could |
|
288 be ``None``. |
|
289 |
|
290 Examines commit messages like "Differential Revision:" to get the |
|
291 association information. |
|
292 |
|
293 If such commit message line is not found, examines all precursors and their |
|
294 tags. Tags with format like "D1234" are considered a match and the node |
|
295 with that tag, and the number after "D" (ex. 1234) will be returned. |
|
296 |
|
297 The ``old node``, if not None, is guaranteed to be the last diff of |
|
298 corresponding Differential Revision, and exist in the repo. |
|
299 """ |
|
300 url, token = readurltoken(repo) |
|
301 unfi = repo.unfiltered() |
|
302 nodemap = unfi.changelog.nodemap |
|
303 |
|
304 result = {} # {node: (oldnode?, lastdiff?, drev)} |
|
305 toconfirm = {} # {node: (force, {precnode}, drev)} |
|
306 for node in nodelist: |
|
307 ctx = unfi[node] |
|
308 # For tags like "D123", put them into "toconfirm" to verify later |
|
309 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node])) |
|
310 for n in precnodes: |
|
311 if n in nodemap: |
|
312 for tag in unfi.nodetags(n): |
|
313 m = _differentialrevisiontagre.match(tag) |
|
314 if m: |
|
315 toconfirm[node] = (0, set(precnodes), int(m.group(1))) |
|
316 continue |
|
317 |
|
318 # Check commit message |
|
319 m = _differentialrevisiondescre.search(ctx.description()) |
|
320 if m: |
|
321 toconfirm[node] = (1, set(precnodes), int(m.group(b'id'))) |
|
322 |
|
323 # Double check if tags are genuine by collecting all old nodes from |
|
324 # Phabricator, and expect precursors overlap with it. |
|
325 if toconfirm: |
|
326 drevs = [drev for force, precs, drev in toconfirm.values()] |
|
327 alldiffs = callconduit(unfi, b'differential.querydiffs', |
|
328 {b'revisionIDs': drevs}) |
|
329 getnode = lambda d: bin(encoding.unitolocal( |
|
330 getdiffmeta(d).get(r'node', b''))) or None |
|
331 for newnode, (force, precset, drev) in toconfirm.items(): |
|
332 diffs = [d for d in alldiffs.values() |
|
333 if int(d[r'revisionID']) == drev] |
|
334 |
|
335 # "precursors" as known by Phabricator |
|
336 phprecset = set(getnode(d) for d in diffs) |
|
337 |
|
338 # Ignore if precursors (Phabricator and local repo) do not overlap, |
|
339 # and force is not set (when commit message says nothing) |
|
340 if not force and not bool(phprecset & precset): |
|
341 tagname = b'D%d' % drev |
|
342 tags.tag(repo, tagname, nullid, message=None, user=None, |
|
343 date=None, local=True) |
|
344 unfi.ui.warn(_(b'D%s: local tag removed - does not match ' |
|
345 b'Differential history\n') % drev) |
|
346 continue |
|
347 |
|
348 # Find the last node using Phabricator metadata, and make sure it |
|
349 # exists in the repo |
|
350 oldnode = lastdiff = None |
|
351 if diffs: |
|
352 lastdiff = max(diffs, key=lambda d: int(d[r'id'])) |
|
353 oldnode = getnode(lastdiff) |
|
354 if oldnode and oldnode not in nodemap: |
|
355 oldnode = None |
|
356 |
|
357 result[newnode] = (oldnode, lastdiff, drev) |
|
358 |
|
359 return result |
|
360 |
|
361 def getdiff(ctx, diffopts): |
|
362 """plain-text diff without header (user, commit message, etc)""" |
|
363 output = util.stringio() |
|
364 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(), |
|
365 None, opts=diffopts): |
|
366 output.write(chunk) |
|
367 return output.getvalue() |
|
368 |
|
369 def creatediff(ctx): |
|
370 """create a Differential Diff""" |
|
371 repo = ctx.repo() |
|
372 repophid = getrepophid(repo) |
|
373 # Create a "Differential Diff" via "differential.createrawdiff" API |
|
374 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))} |
|
375 if repophid: |
|
376 params[b'repositoryPHID'] = repophid |
|
377 diff = callconduit(repo, b'differential.createrawdiff', params) |
|
378 if not diff: |
|
379 raise error.Abort(_(b'cannot create diff for %s') % ctx) |
|
380 return diff |
|
381 |
|
382 def writediffproperties(ctx, diff): |
|
383 """write metadata to diff so patches could be applied losslessly""" |
|
384 params = { |
|
385 b'diff_id': diff[r'id'], |
|
386 b'name': b'hg:meta', |
|
387 b'data': json.dumps({ |
|
388 b'user': ctx.user(), |
|
389 b'date': b'%d %d' % ctx.date(), |
|
390 b'node': ctx.hex(), |
|
391 b'parent': ctx.p1().hex(), |
|
392 }), |
|
393 } |
|
394 callconduit(ctx.repo(), b'differential.setdiffproperty', params) |
|
395 |
|
396 params = { |
|
397 b'diff_id': diff[r'id'], |
|
398 b'name': b'local:commits', |
|
399 b'data': json.dumps({ |
|
400 ctx.hex(): { |
|
401 b'author': stringutil.person(ctx.user()), |
|
402 b'authorEmail': stringutil.email(ctx.user()), |
|
403 b'time': ctx.date()[0], |
|
404 }, |
|
405 }), |
|
406 } |
|
407 callconduit(ctx.repo(), b'differential.setdiffproperty', params) |
|
408 |
|
409 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None, |
|
410 olddiff=None, actions=None): |
|
411 """create or update a Differential Revision |
|
412 |
|
413 If revid is None, create a new Differential Revision, otherwise update |
|
414 revid. If parentrevid is not None, set it as a dependency. |
|
415 |
|
416 If oldnode is not None, check if the patch content (without commit message |
|
417 and metadata) has changed before creating another diff. |
|
418 |
|
419 If actions is not None, they will be appended to the transaction. |
|
420 """ |
|
421 repo = ctx.repo() |
|
422 if oldnode: |
|
423 diffopts = mdiff.diffopts(git=True, context=32767) |
|
424 oldctx = repo.unfiltered()[oldnode] |
|
425 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)) |
|
426 else: |
|
427 neednewdiff = True |
|
428 |
|
429 transactions = [] |
|
430 if neednewdiff: |
|
431 diff = creatediff(ctx) |
|
432 transactions.append({b'type': b'update', b'value': diff[r'phid']}) |
|
433 else: |
|
434 # Even if we don't need to upload a new diff because the patch content |
|
435 # does not change. We might still need to update its metadata so |
|
436 # pushers could know the correct node metadata. |
|
437 assert olddiff |
|
438 diff = olddiff |
|
439 writediffproperties(ctx, diff) |
|
440 |
|
441 # Use a temporary summary to set dependency. There might be better ways but |
|
442 # I cannot find them for now. But do not do that if we are updating an |
|
443 # existing revision (revid is not None) since that introduces visible |
|
444 # churns (someone edited "Summary" twice) on the web page. |
|
445 if parentrevid and revid is None: |
|
446 summary = b'Depends on D%s' % parentrevid |
|
447 transactions += [{b'type': b'summary', b'value': summary}, |
|
448 {b'type': b'summary', b'value': b' '}] |
|
449 |
|
450 if actions: |
|
451 transactions += actions |
|
452 |
|
453 # Parse commit message and update related fields. |
|
454 desc = ctx.description() |
|
455 info = callconduit(repo, b'differential.parsecommitmessage', |
|
456 {b'corpus': desc}) |
|
457 for k, v in info[r'fields'].items(): |
|
458 if k in [b'title', b'summary', b'testPlan']: |
|
459 transactions.append({b'type': k, b'value': v}) |
|
460 |
|
461 params = {b'transactions': transactions} |
|
462 if revid is not None: |
|
463 # Update an existing Differential Revision |
|
464 params[b'objectIdentifier'] = revid |
|
465 |
|
466 revision = callconduit(repo, b'differential.revision.edit', params) |
|
467 if not revision: |
|
468 raise error.Abort(_(b'cannot create revision for %s') % ctx) |
|
469 |
|
470 return revision, diff |
|
471 |
|
472 def userphids(repo, names): |
|
473 """convert user names to PHIDs""" |
|
474 query = {b'constraints': {b'usernames': names}} |
|
475 result = callconduit(repo, b'user.search', query) |
|
476 # username not found is not an error of the API. So check if we have missed |
|
477 # some names here. |
|
478 data = result[r'data'] |
|
479 resolved = set(entry[r'fields'][r'username'] for entry in data) |
|
480 unresolved = set(names) - resolved |
|
481 if unresolved: |
|
482 raise error.Abort(_(b'unknown username: %s') |
|
483 % b' '.join(sorted(unresolved))) |
|
484 return [entry[r'phid'] for entry in data] |
|
485 |
|
486 @vcrcommand(b'phabsend', |
|
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
|
488 (b'', b'amend', True, _(b'update commit messages')), |
|
489 (b'', b'reviewer', [], _(b'specify reviewers')), |
|
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))], |
|
491 _(b'REV [OPTIONS]')) |
|
492 def phabsend(ui, repo, *revs, **opts): |
|
493 """upload changesets to Phabricator |
|
494 |
|
495 If there are multiple revisions specified, they will be send as a stack |
|
496 with a linear dependencies relationship using the order specified by the |
|
497 revset. |
|
498 |
|
499 For the first time uploading changesets, local tags will be created to |
|
500 maintain the association. After the first time, phabsend will check |
|
501 obsstore and tags information so it can figure out whether to update an |
|
502 existing Differential Revision, or create a new one. |
|
503 |
|
504 If --amend is set, update commit messages so they have the |
|
505 ``Differential Revision`` URL, remove related tags. This is similar to what |
|
506 arcanist will do, and is more desired in author-push workflows. Otherwise, |
|
507 use local tags to record the ``Differential Revision`` association. |
|
508 |
|
509 The --confirm option lets you confirm changesets before sending them. You |
|
510 can also add following to your configuration file to make it default |
|
511 behaviour:: |
|
512 |
|
513 [phabsend] |
|
514 confirm = true |
|
515 |
|
516 phabsend will check obsstore and the above association to decide whether to |
|
517 update an existing Differential Revision, or create a new one. |
|
518 """ |
|
519 revs = list(revs) + opts.get(b'rev', []) |
|
520 revs = scmutil.revrange(repo, revs) |
|
521 |
|
522 if not revs: |
|
523 raise error.Abort(_(b'phabsend requires at least one changeset')) |
|
524 if opts.get(b'amend'): |
|
525 cmdutil.checkunfinished(repo) |
|
526 |
|
527 # {newnode: (oldnode, olddiff, olddrev} |
|
528 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
|
529 |
|
530 confirm = ui.configbool(b'phabsend', b'confirm') |
|
531 confirm |= bool(opts.get(b'confirm')) |
|
532 if confirm: |
|
533 confirmed = _confirmbeforesend(repo, revs, oldmap) |
|
534 if not confirmed: |
|
535 raise error.Abort(_(b'phabsend cancelled')) |
|
536 |
|
537 actions = [] |
|
538 reviewers = opts.get(b'reviewer', []) |
|
539 if reviewers: |
|
540 phids = userphids(repo, reviewers) |
|
541 actions.append({b'type': b'reviewers.add', b'value': phids}) |
|
542 |
|
543 drevids = [] # [int] |
|
544 diffmap = {} # {newnode: diff} |
|
545 |
|
546 # Send patches one by one so we know their Differential Revision IDs and |
|
547 # can provide dependency relationship |
|
548 lastrevid = None |
|
549 for rev in revs: |
|
550 ui.debug(b'sending rev %d\n' % rev) |
|
551 ctx = repo[rev] |
|
552 |
|
553 # Get Differential Revision ID |
|
554 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
|
555 if oldnode != ctx.node() or opts.get(b'amend'): |
|
556 # Create or update Differential Revision |
|
557 revision, diff = createdifferentialrevision( |
|
558 ctx, revid, lastrevid, oldnode, olddiff, actions) |
|
559 diffmap[ctx.node()] = diff |
|
560 newrevid = int(revision[r'object'][r'id']) |
|
561 if revid: |
|
562 action = b'updated' |
|
563 else: |
|
564 action = b'created' |
|
565 |
|
566 # Create a local tag to note the association, if commit message |
|
567 # does not have it already |
|
568 m = _differentialrevisiondescre.search(ctx.description()) |
|
569 if not m or int(m.group(b'id')) != newrevid: |
|
570 tagname = b'D%d' % newrevid |
|
571 tags.tag(repo, tagname, ctx.node(), message=None, user=None, |
|
572 date=None, local=True) |
|
573 else: |
|
574 # Nothing changed. But still set "newrevid" so the next revision |
|
575 # could depend on this one. |
|
576 newrevid = revid |
|
577 action = b'skipped' |
|
578 |
|
579 actiondesc = ui.label( |
|
580 {b'created': _(b'created'), |
|
581 b'skipped': _(b'skipped'), |
|
582 b'updated': _(b'updated')}[action], |
|
583 b'phabricator.action.%s' % action) |
|
584 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev') |
|
585 nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
|
586 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
|
587 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, |
|
588 desc)) |
|
589 drevids.append(newrevid) |
|
590 lastrevid = newrevid |
|
591 |
|
592 # Update commit messages and remove tags |
|
593 if opts.get(b'amend'): |
|
594 unfi = repo.unfiltered() |
|
595 drevs = callconduit(repo, b'differential.query', {b'ids': drevids}) |
|
596 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
|
597 wnode = unfi[b'.'].node() |
|
598 mapping = {} # {oldnode: [newnode]} |
|
599 for i, rev in enumerate(revs): |
|
600 old = unfi[rev] |
|
601 drevid = drevids[i] |
|
602 drev = [d for d in drevs if int(d[r'id']) == drevid][0] |
|
603 newdesc = getdescfromdrev(drev) |
|
604 newdesc = encoding.unitolocal(newdesc) |
|
605 # Make sure commit message contain "Differential Revision" |
|
606 if old.description() != newdesc: |
|
607 parents = [ |
|
608 mapping.get(old.p1().node(), (old.p1(),))[0], |
|
609 mapping.get(old.p2().node(), (old.p2(),))[0], |
|
610 ] |
|
611 new = context.metadataonlyctx( |
|
612 repo, old, parents=parents, text=newdesc, |
|
613 user=old.user(), date=old.date(), extra=old.extra()) |
|
614 |
|
615 newnode = new.commit() |
|
616 |
|
617 mapping[old.node()] = [newnode] |
|
618 # Update diff property |
|
619 writediffproperties(unfi[newnode], diffmap[old.node()]) |
|
620 # Remove local tags since it's no longer necessary |
|
621 tagname = b'D%d' % drevid |
|
622 if tagname in repo.tags(): |
|
623 tags.tag(repo, tagname, nullid, message=None, user=None, |
|
624 date=None, local=True) |
|
625 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
|
626 if wnode in mapping: |
|
627 unfi.setparents(mapping[wnode][0]) |
|
628 |
|
629 # Map from "hg:meta" keys to header understood by "hg import". The order is |
|
630 # consistent with "hg export" output. |
|
631 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'), |
|
632 (r'node', b'Node ID'), (r'parent', b'Parent ')]) |
|
633 |
|
634 def _confirmbeforesend(repo, revs, oldmap): |
|
635 url, token = readurltoken(repo) |
|
636 ui = repo.ui |
|
637 for rev in revs: |
|
638 ctx = repo[rev] |
|
639 desc = ctx.description().splitlines()[0] |
|
640 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
|
641 if drevid: |
|
642 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev') |
|
643 else: |
|
644 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
|
645 |
|
646 ui.write(_(b'%s - %s: %s\n') |
|
647 % (drevdesc, |
|
648 ui.label(bytes(ctx), b'phabricator.node'), |
|
649 ui.label(desc, b'phabricator.desc'))) |
|
650 |
|
651 if ui.promptchoice(_(b'Send the above changes to %s (yn)?' |
|
652 b'$$ &Yes $$ &No') % url): |
|
653 return False |
|
654 |
|
655 return True |
|
656 |
|
657 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed', |
|
658 b'abandoned'} |
|
659 |
|
660 def _getstatusname(drev): |
|
661 """get normalized status name from a Differential Revision""" |
|
662 return drev[r'statusName'].replace(b' ', b'').lower() |
|
663 |
|
664 # Small language to specify differential revisions. Support symbols: (), :X, |
|
665 # +, and -. |
|
666 |
|
667 _elements = { |
|
668 # token-type: binding-strength, primary, prefix, infix, suffix |
|
669 b'(': (12, None, (b'group', 1, b')'), None, None), |
|
670 b':': (8, None, (b'ancestors', 8), None, None), |
|
671 b'&': (5, None, None, (b'and_', 5), None), |
|
672 b'+': (4, None, None, (b'add', 4), None), |
|
673 b'-': (4, None, None, (b'sub', 4), None), |
|
674 b')': (0, None, None, None, None), |
|
675 b'symbol': (0, b'symbol', None, None, None), |
|
676 b'end': (0, None, None, None, None), |
|
677 } |
|
678 |
|
679 def _tokenize(text): |
|
680 view = memoryview(text) # zero-copy slice |
|
681 special = b'():+-& ' |
|
682 pos = 0 |
|
683 length = len(text) |
|
684 while pos < length: |
|
685 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special, |
|
686 view[pos:])) |
|
687 if symbol: |
|
688 yield (b'symbol', symbol, pos) |
|
689 pos += len(symbol) |
|
690 else: # special char, ignore space |
|
691 if text[pos] != b' ': |
|
692 yield (text[pos], None, pos) |
|
693 pos += 1 |
|
694 yield (b'end', None, pos) |
|
695 |
|
696 def _parse(text): |
|
697 tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
|
698 if pos != len(text): |
|
699 raise error.ParseError(b'invalid token', pos) |
|
700 return tree |
|
701 |
|
702 def _parsedrev(symbol): |
|
703 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
|
704 if symbol.startswith(b'D') and symbol[1:].isdigit(): |
|
705 return int(symbol[1:]) |
|
706 if symbol.isdigit(): |
|
707 return int(symbol) |
|
708 |
|
709 def _prefetchdrevs(tree): |
|
710 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
|
711 drevs = set() |
|
712 ancestordrevs = set() |
|
713 op = tree[0] |
|
714 if op == b'symbol': |
|
715 r = _parsedrev(tree[1]) |
|
716 if r: |
|
717 drevs.add(r) |
|
718 elif op == b'ancestors': |
|
719 r, a = _prefetchdrevs(tree[1]) |
|
720 drevs.update(r) |
|
721 ancestordrevs.update(r) |
|
722 ancestordrevs.update(a) |
|
723 else: |
|
724 for t in tree[1:]: |
|
725 r, a = _prefetchdrevs(t) |
|
726 drevs.update(r) |
|
727 ancestordrevs.update(a) |
|
728 return drevs, ancestordrevs |
|
729 |
|
730 def querydrev(repo, spec): |
|
731 """return a list of "Differential Revision" dicts |
|
732 |
|
733 spec is a string using a simple query language, see docstring in phabread |
|
734 for details. |
|
735 |
|
736 A "Differential Revision dict" looks like: |
|
737 |
|
738 { |
|
739 "id": "2", |
|
740 "phid": "PHID-DREV-672qvysjcczopag46qty", |
|
741 "title": "example", |
|
742 "uri": "https://phab.example.com/D2", |
|
743 "dateCreated": "1499181406", |
|
744 "dateModified": "1499182103", |
|
745 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye", |
|
746 "status": "0", |
|
747 "statusName": "Needs Review", |
|
748 "properties": [], |
|
749 "branch": null, |
|
750 "summary": "", |
|
751 "testPlan": "", |
|
752 "lineCount": "2", |
|
753 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72", |
|
754 "diffs": [ |
|
755 "3", |
|
756 "4", |
|
757 ], |
|
758 "commits": [], |
|
759 "reviewers": [], |
|
760 "ccs": [], |
|
761 "hashes": [], |
|
762 "auxiliary": { |
|
763 "phabricator:projects": [], |
|
764 "phabricator:depends-on": [ |
|
765 "PHID-DREV-gbapp366kutjebt7agcd" |
|
766 ] |
|
767 }, |
|
768 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv", |
|
769 "sourcePath": null |
|
770 } |
|
771 """ |
|
772 def fetch(params): |
|
773 """params -> single drev or None""" |
|
774 key = (params.get(r'ids') or params.get(r'phids') or [None])[0] |
|
775 if key in prefetched: |
|
776 return prefetched[key] |
|
777 drevs = callconduit(repo, b'differential.query', params) |
|
778 # Fill prefetched with the result |
|
779 for drev in drevs: |
|
780 prefetched[drev[r'phid']] = drev |
|
781 prefetched[int(drev[r'id'])] = drev |
|
782 if key not in prefetched: |
|
783 raise error.Abort(_(b'cannot get Differential Revision %r') |
|
784 % params) |
|
785 return prefetched[key] |
|
786 |
|
787 def getstack(topdrevids): |
|
788 """given a top, get a stack from the bottom, [id] -> [id]""" |
|
789 visited = set() |
|
790 result = [] |
|
791 queue = [{r'ids': [i]} for i in topdrevids] |
|
792 while queue: |
|
793 params = queue.pop() |
|
794 drev = fetch(params) |
|
795 if drev[r'id'] in visited: |
|
796 continue |
|
797 visited.add(drev[r'id']) |
|
798 result.append(int(drev[r'id'])) |
|
799 auxiliary = drev.get(r'auxiliary', {}) |
|
800 depends = auxiliary.get(r'phabricator:depends-on', []) |
|
801 for phid in depends: |
|
802 queue.append({b'phids': [phid]}) |
|
803 result.reverse() |
|
804 return smartset.baseset(result) |
|
805 |
|
806 # Initialize prefetch cache |
|
807 prefetched = {} # {id or phid: drev} |
|
808 |
|
809 tree = _parse(spec) |
|
810 drevs, ancestordrevs = _prefetchdrevs(tree) |
|
811 |
|
812 # developer config: phabricator.batchsize |
|
813 batchsize = repo.ui.configint(b'phabricator', b'batchsize') |
|
814 |
|
815 # Prefetch Differential Revisions in batch |
|
816 tofetch = set(drevs) |
|
817 for r in ancestordrevs: |
|
818 tofetch.update(range(max(1, r - batchsize), r + 1)) |
|
819 if drevs: |
|
820 fetch({r'ids': list(tofetch)}) |
|
821 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) |
|
822 |
|
823 # Walk through the tree, return smartsets |
|
824 def walk(tree): |
|
825 op = tree[0] |
|
826 if op == b'symbol': |
|
827 drev = _parsedrev(tree[1]) |
|
828 if drev: |
|
829 return smartset.baseset([drev]) |
|
830 elif tree[1] in _knownstatusnames: |
|
831 drevs = [r for r in validids |
|
832 if _getstatusname(prefetched[r]) == tree[1]] |
|
833 return smartset.baseset(drevs) |
|
834 else: |
|
835 raise error.Abort(_(b'unknown symbol: %s') % tree[1]) |
|
836 elif op in {b'and_', b'add', b'sub'}: |
|
837 assert len(tree) == 3 |
|
838 return getattr(operator, op)(walk(tree[1]), walk(tree[2])) |
|
839 elif op == b'group': |
|
840 return walk(tree[1]) |
|
841 elif op == b'ancestors': |
|
842 return getstack(walk(tree[1])) |
|
843 else: |
|
844 raise error.ProgrammingError(b'illegal tree: %r' % tree) |
|
845 |
|
846 return [prefetched[r] for r in walk(tree)] |
|
847 |
|
848 def getdescfromdrev(drev): |
|
849 """get description (commit message) from "Differential Revision" |
|
850 |
|
851 This is similar to differential.getcommitmessage API. But we only care |
|
852 about limited fields: title, summary, test plan, and URL. |
|
853 """ |
|
854 title = drev[r'title'] |
|
855 summary = drev[r'summary'].rstrip() |
|
856 testplan = drev[r'testPlan'].rstrip() |
|
857 if testplan: |
|
858 testplan = b'Test Plan:\n%s' % testplan |
|
859 uri = b'Differential Revision: %s' % drev[r'uri'] |
|
860 return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) |
|
861 |
|
862 def getdiffmeta(diff): |
|
863 """get commit metadata (date, node, user, p1) from a diff object |
|
864 |
|
865 The metadata could be "hg:meta", sent by phabsend, like: |
|
866 |
|
867 "properties": { |
|
868 "hg:meta": { |
|
869 "date": "1499571514 25200", |
|
870 "node": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
871 "user": "Foo Bar <foo@example.com>", |
|
872 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16" |
|
873 } |
|
874 } |
|
875 |
|
876 Or converted from "local:commits", sent by "arc", like: |
|
877 |
|
878 "properties": { |
|
879 "local:commits": { |
|
880 "98c08acae292b2faf60a279b4189beb6cff1414d": { |
|
881 "author": "Foo Bar", |
|
882 "time": 1499546314, |
|
883 "branch": "default", |
|
884 "tag": "", |
|
885 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
886 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
887 "local": "1000", |
|
888 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"], |
|
889 "summary": "...", |
|
890 "message": "...", |
|
891 "authorEmail": "foo@example.com" |
|
892 } |
|
893 } |
|
894 } |
|
895 |
|
896 Note: metadata extracted from "local:commits" will lose time zone |
|
897 information. |
|
898 """ |
|
899 props = diff.get(r'properties') or {} |
|
900 meta = props.get(r'hg:meta') |
|
901 if not meta and props.get(r'local:commits'): |
|
902 commit = sorted(props[r'local:commits'].values())[0] |
|
903 meta = { |
|
904 r'date': r'%d 0' % commit[r'time'], |
|
905 r'node': commit[r'rev'], |
|
906 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']), |
|
907 } |
|
908 if len(commit.get(r'parents', ())) >= 1: |
|
909 meta[r'parent'] = commit[r'parents'][0] |
|
910 return meta or {} |
|
911 |
|
912 def readpatch(repo, drevs, write): |
|
913 """generate plain-text patch readable by 'hg import' |
|
914 |
|
915 write is usually ui.write. drevs is what "querydrev" returns, results of |
|
916 "differential.query". |
|
917 """ |
|
918 # Prefetch hg:meta property for all diffs |
|
919 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs)) |
|
920 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids}) |
|
921 |
|
922 # Generate patch for each drev |
|
923 for drev in drevs: |
|
924 repo.ui.note(_(b'reading D%s\n') % drev[r'id']) |
|
925 |
|
926 diffid = max(int(v) for v in drev[r'diffs']) |
|
927 body = callconduit(repo, b'differential.getrawdiff', |
|
928 {b'diffID': diffid}) |
|
929 desc = getdescfromdrev(drev) |
|
930 header = b'# HG changeset patch\n' |
|
931 |
|
932 # Try to preserve metadata from hg:meta property. Write hg patch |
|
933 # headers that can be read by the "import" command. See patchheadermap |
|
934 # and extract in mercurial/patch.py for supported headers. |
|
935 meta = getdiffmeta(diffs[str(diffid)]) |
|
936 for k in _metanamemap.keys(): |
|
937 if k in meta: |
|
938 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
|
939 |
|
940 content = b'%s%s\n%s' % (header, desc, body) |
|
941 write(encoding.unitolocal(content)) |
|
942 |
|
943 @vcrcommand(b'phabread', |
|
944 [(b'', b'stack', False, _(b'read dependencies'))], |
|
945 _(b'DREVSPEC [OPTIONS]')) |
|
946 def phabread(ui, repo, spec, **opts): |
|
947 """print patches from Phabricator suitable for importing |
|
948 |
|
949 DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
|
950 the number ``123``. It could also have common operators like ``+``, ``-``, |
|
951 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to |
|
952 select a stack. |
|
953 |
|
954 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision`` |
|
955 could be used to filter patches by status. For performance reason, they |
|
956 only represent a subset of non-status selections and cannot be used alone. |
|
957 |
|
958 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude |
|
959 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a |
|
960 stack up to D9. |
|
961 |
|
962 If --stack is given, follow dependencies information and read all patches. |
|
963 It is equivalent to the ``:`` operator. |
|
964 """ |
|
965 if opts.get(b'stack'): |
|
966 spec = b':(%s)' % spec |
|
967 drevs = querydrev(repo, spec) |
|
968 readpatch(repo, drevs, ui.write) |
|
969 |
|
970 @vcrcommand(b'phabupdate', |
|
971 [(b'', b'accept', False, _(b'accept revisions')), |
|
972 (b'', b'reject', False, _(b'reject revisions')), |
|
973 (b'', b'abandon', False, _(b'abandon revisions')), |
|
974 (b'', b'reclaim', False, _(b'reclaim revisions')), |
|
975 (b'm', b'comment', b'', _(b'comment on the last revision')), |
|
976 ], _(b'DREVSPEC [OPTIONS]')) |
|
977 def phabupdate(ui, repo, spec, **opts): |
|
978 """update Differential Revision in batch |
|
979 |
|
980 DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
|
981 """ |
|
982 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] |
|
983 if len(flags) > 1: |
|
984 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) |
|
985 |
|
986 actions = [] |
|
987 for f in flags: |
|
988 actions.append({b'type': f, b'value': b'true'}) |
|
989 |
|
990 drevs = querydrev(repo, spec) |
|
991 for i, drev in enumerate(drevs): |
|
992 if i + 1 == len(drevs) and opts.get(b'comment'): |
|
993 actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
|
994 if actions: |
|
995 params = {b'objectIdentifier': drev[r'phid'], |
|
996 b'transactions': actions} |
|
997 callconduit(repo, b'differential.revision.edit', params) |
|
998 |
|
999 templatekeyword = registrar.templatekeyword() |
|
1000 |
|
1001 @templatekeyword(b'phabreview', requires={b'ctx'}) |
|
1002 def template_review(context, mapping): |
|
1003 """:phabreview: Object describing the review for this changeset. |
|
1004 Has attributes `url` and `id`. |
|
1005 """ |
|
1006 ctx = context.resource(mapping, b'ctx') |
|
1007 m = _differentialrevisiondescre.search(ctx.description()) |
|
1008 if m: |
|
1009 return { |
|
1010 b'url': m.group(b'url'), |
|
1011 b'id': b"D{}".format(m.group(b'id')), |
|
1012 } |