##// END OF EJS Templates
phabricator: add --blocker argument to phabsend to specify blocking reviewers...
Ian Moody -
r42637:f33d3ee1 default
parent child Browse files
Show More
@@ -1,1082 +1,1090 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
128 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def hgmatcher(r1, r2):
130 def hgmatcher(r1, r2):
131 if r1.uri != r2.uri or r1.method != r2.method:
131 if r1.uri != r2.uri or r1.method != r2.method:
132 return False
132 return False
133 r1params = r1.body.split(b'&')
133 r1params = r1.body.split(b'&')
134 r2params = r2.body.split(b'&')
134 r2params = r2.body.split(b'&')
135 return set(r1params) == set(r2params)
135 return set(r1params) == set(r2params)
136
136
137 def sanitiserequest(request):
137 def sanitiserequest(request):
138 request.body = re.sub(
138 request.body = re.sub(
139 r'cli-[a-z0-9]+',
139 r'cli-[a-z0-9]+',
140 r'cli-hahayouwish',
140 r'cli-hahayouwish',
141 request.body
141 request.body
142 )
142 )
143 return request
143 return request
144
144
145 def sanitiseresponse(response):
145 def sanitiseresponse(response):
146 if r'set-cookie' in response[r'headers']:
146 if r'set-cookie' in response[r'headers']:
147 del response[r'headers'][r'set-cookie']
147 del response[r'headers'][r'set-cookie']
148 return response
148 return response
149
149
150 def decorate(fn):
150 def decorate(fn):
151 def inner(*args, **kwargs):
151 def inner(*args, **kwargs):
152 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
152 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
153 if cassette:
153 if cassette:
154 import hgdemandimport
154 import hgdemandimport
155 with hgdemandimport.deactivated():
155 with hgdemandimport.deactivated():
156 import vcr as vcrmod
156 import vcr as vcrmod
157 import vcr.stubs as stubs
157 import vcr.stubs as stubs
158 vcr = vcrmod.VCR(
158 vcr = vcrmod.VCR(
159 serializer=r'json',
159 serializer=r'json',
160 before_record_request=sanitiserequest,
160 before_record_request=sanitiserequest,
161 before_record_response=sanitiseresponse,
161 before_record_response=sanitiseresponse,
162 custom_patches=[
162 custom_patches=[
163 (urlmod, r'httpconnection',
163 (urlmod, r'httpconnection',
164 stubs.VCRHTTPConnection),
164 stubs.VCRHTTPConnection),
165 (urlmod, r'httpsconnection',
165 (urlmod, r'httpsconnection',
166 stubs.VCRHTTPSConnection),
166 stubs.VCRHTTPSConnection),
167 ])
167 ])
168 vcr.register_matcher(r'hgmatcher', hgmatcher)
168 vcr.register_matcher(r'hgmatcher', hgmatcher)
169 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
169 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
170 return fn(*args, **kwargs)
170 return fn(*args, **kwargs)
171 return fn(*args, **kwargs)
171 return fn(*args, **kwargs)
172 inner.__name__ = fn.__name__
172 inner.__name__ = fn.__name__
173 inner.__doc__ = fn.__doc__
173 inner.__doc__ = fn.__doc__
174 return command(name, fullflags, spec, helpcategory=helpcategory,
174 return command(name, fullflags, spec, helpcategory=helpcategory,
175 optionalrepo=optionalrepo)(inner)
175 optionalrepo=optionalrepo)(inner)
176 return decorate
176 return decorate
177
177
178 def urlencodenested(params):
178 def urlencodenested(params):
179 """like urlencode, but works with nested parameters.
179 """like urlencode, but works with nested parameters.
180
180
181 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
181 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
182 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
182 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
183 urlencode. Note: the encoding is consistent with PHP's http_build_query.
183 urlencode. Note: the encoding is consistent with PHP's http_build_query.
184 """
184 """
185 flatparams = util.sortdict()
185 flatparams = util.sortdict()
186 def process(prefix, obj):
186 def process(prefix, obj):
187 if isinstance(obj, bool):
187 if isinstance(obj, bool):
188 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
188 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
189 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
189 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
190 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
190 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
191 if items is None:
191 if items is None:
192 flatparams[prefix] = obj
192 flatparams[prefix] = obj
193 else:
193 else:
194 for k, v in items(obj):
194 for k, v in items(obj):
195 if prefix:
195 if prefix:
196 process(b'%s[%s]' % (prefix, k), v)
196 process(b'%s[%s]' % (prefix, k), v)
197 else:
197 else:
198 process(k, v)
198 process(k, v)
199 process(b'', params)
199 process(b'', params)
200 return util.urlreq.urlencode(flatparams)
200 return util.urlreq.urlencode(flatparams)
201
201
202 def readurltoken(ui):
202 def readurltoken(ui):
203 """return conduit url, token and make sure they exist
203 """return conduit url, token and make sure they exist
204
204
205 Currently read from [auth] config section. In the future, it might
205 Currently read from [auth] config section. In the future, it might
206 make sense to read from .arcconfig and .arcrc as well.
206 make sense to read from .arcconfig and .arcrc as well.
207 """
207 """
208 url = ui.config(b'phabricator', b'url')
208 url = ui.config(b'phabricator', b'url')
209 if not url:
209 if not url:
210 raise error.Abort(_(b'config %s.%s is required')
210 raise error.Abort(_(b'config %s.%s is required')
211 % (b'phabricator', b'url'))
211 % (b'phabricator', b'url'))
212
212
213 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
213 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
214 token = None
214 token = None
215
215
216 if res:
216 if res:
217 group, auth = res
217 group, auth = res
218
218
219 ui.debug(b"using auth.%s.* for authentication\n" % group)
219 ui.debug(b"using auth.%s.* for authentication\n" % group)
220
220
221 token = auth.get(b'phabtoken')
221 token = auth.get(b'phabtoken')
222
222
223 if not token:
223 if not token:
224 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
224 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
225 % (url,))
225 % (url,))
226
226
227 return url, token
227 return url, token
228
228
229 def callconduit(ui, name, params):
229 def callconduit(ui, name, params):
230 """call Conduit API, params is a dict. return json.loads result, or None"""
230 """call Conduit API, params is a dict. return json.loads result, or None"""
231 host, token = readurltoken(ui)
231 host, token = readurltoken(ui)
232 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
232 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
233 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
233 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
234 params = params.copy()
234 params = params.copy()
235 params[b'api.token'] = token
235 params[b'api.token'] = token
236 data = urlencodenested(params)
236 data = urlencodenested(params)
237 curlcmd = ui.config(b'phabricator', b'curlcmd')
237 curlcmd = ui.config(b'phabricator', b'curlcmd')
238 if curlcmd:
238 if curlcmd:
239 sin, sout = procutil.popen2(b'%s -d @- %s'
239 sin, sout = procutil.popen2(b'%s -d @- %s'
240 % (curlcmd, procutil.shellquote(url)))
240 % (curlcmd, procutil.shellquote(url)))
241 sin.write(data)
241 sin.write(data)
242 sin.close()
242 sin.close()
243 body = sout.read()
243 body = sout.read()
244 else:
244 else:
245 urlopener = urlmod.opener(ui, authinfo)
245 urlopener = urlmod.opener(ui, authinfo)
246 request = util.urlreq.request(pycompat.strurl(url), data=data)
246 request = util.urlreq.request(pycompat.strurl(url), data=data)
247 with contextlib.closing(urlopener.open(request)) as rsp:
247 with contextlib.closing(urlopener.open(request)) as rsp:
248 body = rsp.read()
248 body = rsp.read()
249 ui.debug(b'Conduit Response: %s\n' % body)
249 ui.debug(b'Conduit Response: %s\n' % body)
250 parsed = pycompat.rapply(
250 parsed = pycompat.rapply(
251 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
251 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
252 else x,
252 else x,
253 json.loads(body)
253 json.loads(body)
254 )
254 )
255 if parsed.get(b'error_code'):
255 if parsed.get(b'error_code'):
256 msg = (_(b'Conduit Error (%s): %s')
256 msg = (_(b'Conduit Error (%s): %s')
257 % (parsed[b'error_code'], parsed[b'error_info']))
257 % (parsed[b'error_code'], parsed[b'error_info']))
258 raise error.Abort(msg)
258 raise error.Abort(msg)
259 return parsed[b'result']
259 return parsed[b'result']
260
260
261 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
261 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
262 def debugcallconduit(ui, repo, name):
262 def debugcallconduit(ui, repo, name):
263 """call Conduit API
263 """call Conduit API
264
264
265 Call parameters are read from stdin as a JSON blob. Result will be written
265 Call parameters are read from stdin as a JSON blob. Result will be written
266 to stdout as a JSON blob.
266 to stdout as a JSON blob.
267 """
267 """
268 # json.loads only accepts bytes from 3.6+
268 # json.loads only accepts bytes from 3.6+
269 rawparams = encoding.unifromlocal(ui.fin.read())
269 rawparams = encoding.unifromlocal(ui.fin.read())
270 # json.loads only returns unicode strings
270 # json.loads only returns unicode strings
271 params = pycompat.rapply(lambda x:
271 params = pycompat.rapply(lambda x:
272 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
272 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
273 json.loads(rawparams)
273 json.loads(rawparams)
274 )
274 )
275 # json.dumps only accepts unicode strings
275 # json.dumps only accepts unicode strings
276 result = pycompat.rapply(lambda x:
276 result = pycompat.rapply(lambda x:
277 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
277 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
278 callconduit(ui, name, params)
278 callconduit(ui, name, params)
279 )
279 )
280 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
280 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
281 ui.write(b'%s\n' % encoding.unitolocal(s))
281 ui.write(b'%s\n' % encoding.unitolocal(s))
282
282
283 def getrepophid(repo):
283 def getrepophid(repo):
284 """given callsign, return repository PHID or None"""
284 """given callsign, return repository PHID or None"""
285 # developer config: phabricator.repophid
285 # developer config: phabricator.repophid
286 repophid = repo.ui.config(b'phabricator', b'repophid')
286 repophid = repo.ui.config(b'phabricator', b'repophid')
287 if repophid:
287 if repophid:
288 return repophid
288 return repophid
289 callsign = repo.ui.config(b'phabricator', b'callsign')
289 callsign = repo.ui.config(b'phabricator', b'callsign')
290 if not callsign:
290 if not callsign:
291 return None
291 return None
292 query = callconduit(repo.ui, b'diffusion.repository.search',
292 query = callconduit(repo.ui, b'diffusion.repository.search',
293 {b'constraints': {b'callsigns': [callsign]}})
293 {b'constraints': {b'callsigns': [callsign]}})
294 if len(query[b'data']) == 0:
294 if len(query[b'data']) == 0:
295 return None
295 return None
296 repophid = query[b'data'][0][b'phid']
296 repophid = query[b'data'][0][b'phid']
297 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
297 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
298 return repophid
298 return repophid
299
299
300 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
300 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
301 _differentialrevisiondescre = re.compile(
301 _differentialrevisiondescre = re.compile(
302 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
302 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
303
303
304 def getoldnodedrevmap(repo, nodelist):
304 def getoldnodedrevmap(repo, nodelist):
305 """find previous nodes that has been sent to Phabricator
305 """find previous nodes that has been sent to Phabricator
306
306
307 return {node: (oldnode, Differential diff, Differential Revision ID)}
307 return {node: (oldnode, Differential diff, Differential Revision ID)}
308 for node in nodelist with known previous sent versions, or associated
308 for node in nodelist with known previous sent versions, or associated
309 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
309 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
310 be ``None``.
310 be ``None``.
311
311
312 Examines commit messages like "Differential Revision:" to get the
312 Examines commit messages like "Differential Revision:" to get the
313 association information.
313 association information.
314
314
315 If such commit message line is not found, examines all precursors and their
315 If such commit message line is not found, examines all precursors and their
316 tags. Tags with format like "D1234" are considered a match and the node
316 tags. Tags with format like "D1234" are considered a match and the node
317 with that tag, and the number after "D" (ex. 1234) will be returned.
317 with that tag, and the number after "D" (ex. 1234) will be returned.
318
318
319 The ``old node``, if not None, is guaranteed to be the last diff of
319 The ``old node``, if not None, is guaranteed to be the last diff of
320 corresponding Differential Revision, and exist in the repo.
320 corresponding Differential Revision, and exist in the repo.
321 """
321 """
322 unfi = repo.unfiltered()
322 unfi = repo.unfiltered()
323 nodemap = unfi.changelog.nodemap
323 nodemap = unfi.changelog.nodemap
324
324
325 result = {} # {node: (oldnode?, lastdiff?, drev)}
325 result = {} # {node: (oldnode?, lastdiff?, drev)}
326 toconfirm = {} # {node: (force, {precnode}, drev)}
326 toconfirm = {} # {node: (force, {precnode}, drev)}
327 for node in nodelist:
327 for node in nodelist:
328 ctx = unfi[node]
328 ctx = unfi[node]
329 # For tags like "D123", put them into "toconfirm" to verify later
329 # For tags like "D123", put them into "toconfirm" to verify later
330 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
330 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
331 for n in precnodes:
331 for n in precnodes:
332 if n in nodemap:
332 if n in nodemap:
333 for tag in unfi.nodetags(n):
333 for tag in unfi.nodetags(n):
334 m = _differentialrevisiontagre.match(tag)
334 m = _differentialrevisiontagre.match(tag)
335 if m:
335 if m:
336 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
336 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
337 continue
337 continue
338
338
339 # Check commit message
339 # Check commit message
340 m = _differentialrevisiondescre.search(ctx.description())
340 m = _differentialrevisiondescre.search(ctx.description())
341 if m:
341 if m:
342 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
342 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
343
343
344 # Double check if tags are genuine by collecting all old nodes from
344 # Double check if tags are genuine by collecting all old nodes from
345 # Phabricator, and expect precursors overlap with it.
345 # Phabricator, and expect precursors overlap with it.
346 if toconfirm:
346 if toconfirm:
347 drevs = [drev for force, precs, drev in toconfirm.values()]
347 drevs = [drev for force, precs, drev in toconfirm.values()]
348 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
348 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
349 {b'revisionIDs': drevs})
349 {b'revisionIDs': drevs})
350 getnode = lambda d: bin(
350 getnode = lambda d: bin(
351 getdiffmeta(d).get(b'node', b'')) or None
351 getdiffmeta(d).get(b'node', b'')) or None
352 for newnode, (force, precset, drev) in toconfirm.items():
352 for newnode, (force, precset, drev) in toconfirm.items():
353 diffs = [d for d in alldiffs.values()
353 diffs = [d for d in alldiffs.values()
354 if int(d[b'revisionID']) == drev]
354 if int(d[b'revisionID']) == drev]
355
355
356 # "precursors" as known by Phabricator
356 # "precursors" as known by Phabricator
357 phprecset = set(getnode(d) for d in diffs)
357 phprecset = set(getnode(d) for d in diffs)
358
358
359 # Ignore if precursors (Phabricator and local repo) do not overlap,
359 # Ignore if precursors (Phabricator and local repo) do not overlap,
360 # and force is not set (when commit message says nothing)
360 # and force is not set (when commit message says nothing)
361 if not force and not bool(phprecset & precset):
361 if not force and not bool(phprecset & precset):
362 tagname = b'D%d' % drev
362 tagname = b'D%d' % drev
363 tags.tag(repo, tagname, nullid, message=None, user=None,
363 tags.tag(repo, tagname, nullid, message=None, user=None,
364 date=None, local=True)
364 date=None, local=True)
365 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
365 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
366 b'Differential history\n') % drev)
366 b'Differential history\n') % drev)
367 continue
367 continue
368
368
369 # Find the last node using Phabricator metadata, and make sure it
369 # Find the last node using Phabricator metadata, and make sure it
370 # exists in the repo
370 # exists in the repo
371 oldnode = lastdiff = None
371 oldnode = lastdiff = None
372 if diffs:
372 if diffs:
373 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
373 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
374 oldnode = getnode(lastdiff)
374 oldnode = getnode(lastdiff)
375 if oldnode and oldnode not in nodemap:
375 if oldnode and oldnode not in nodemap:
376 oldnode = None
376 oldnode = None
377
377
378 result[newnode] = (oldnode, lastdiff, drev)
378 result[newnode] = (oldnode, lastdiff, drev)
379
379
380 return result
380 return result
381
381
382 def getdiff(ctx, diffopts):
382 def getdiff(ctx, diffopts):
383 """plain-text diff without header (user, commit message, etc)"""
383 """plain-text diff without header (user, commit message, etc)"""
384 output = util.stringio()
384 output = util.stringio()
385 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
385 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
386 None, opts=diffopts):
386 None, opts=diffopts):
387 output.write(chunk)
387 output.write(chunk)
388 return output.getvalue()
388 return output.getvalue()
389
389
390 def creatediff(ctx):
390 def creatediff(ctx):
391 """create a Differential Diff"""
391 """create a Differential Diff"""
392 repo = ctx.repo()
392 repo = ctx.repo()
393 repophid = getrepophid(repo)
393 repophid = getrepophid(repo)
394 # Create a "Differential Diff" via "differential.createrawdiff" API
394 # Create a "Differential Diff" via "differential.createrawdiff" API
395 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
395 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
396 if repophid:
396 if repophid:
397 params[b'repositoryPHID'] = repophid
397 params[b'repositoryPHID'] = repophid
398 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
398 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
399 if not diff:
399 if not diff:
400 raise error.Abort(_(b'cannot create diff for %s') % ctx)
400 raise error.Abort(_(b'cannot create diff for %s') % ctx)
401 return diff
401 return diff
402
402
403 def writediffproperties(ctx, diff):
403 def writediffproperties(ctx, diff):
404 """write metadata to diff so patches could be applied losslessly"""
404 """write metadata to diff so patches could be applied losslessly"""
405 params = {
405 params = {
406 b'diff_id': diff[b'id'],
406 b'diff_id': diff[b'id'],
407 b'name': b'hg:meta',
407 b'name': b'hg:meta',
408 b'data': templatefilters.json({
408 b'data': templatefilters.json({
409 b'user': ctx.user(),
409 b'user': ctx.user(),
410 b'date': b'%d %d' % ctx.date(),
410 b'date': b'%d %d' % ctx.date(),
411 b'branch': ctx.branch(),
411 b'branch': ctx.branch(),
412 b'node': ctx.hex(),
412 b'node': ctx.hex(),
413 b'parent': ctx.p1().hex(),
413 b'parent': ctx.p1().hex(),
414 }),
414 }),
415 }
415 }
416 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
416 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
417
417
418 params = {
418 params = {
419 b'diff_id': diff[b'id'],
419 b'diff_id': diff[b'id'],
420 b'name': b'local:commits',
420 b'name': b'local:commits',
421 b'data': templatefilters.json({
421 b'data': templatefilters.json({
422 ctx.hex(): {
422 ctx.hex(): {
423 b'author': stringutil.person(ctx.user()),
423 b'author': stringutil.person(ctx.user()),
424 b'authorEmail': stringutil.email(ctx.user()),
424 b'authorEmail': stringutil.email(ctx.user()),
425 b'time': int(ctx.date()[0]),
425 b'time': int(ctx.date()[0]),
426 b'commit': ctx.hex(),
426 b'commit': ctx.hex(),
427 b'parents': [ctx.p1().hex()],
427 b'parents': [ctx.p1().hex()],
428 b'branch': ctx.branch(),
428 b'branch': ctx.branch(),
429 },
429 },
430 }),
430 }),
431 }
431 }
432 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
432 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
433
433
434 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
434 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
435 olddiff=None, actions=None, comment=None):
435 olddiff=None, actions=None, comment=None):
436 """create or update a Differential Revision
436 """create or update a Differential Revision
437
437
438 If revid is None, create a new Differential Revision, otherwise update
438 If revid is None, create a new Differential Revision, otherwise update
439 revid. If parentrevid is not None, set it as a dependency.
439 revid. If parentrevid is not None, set it as a dependency.
440
440
441 If oldnode is not None, check if the patch content (without commit message
441 If oldnode is not None, check if the patch content (without commit message
442 and metadata) has changed before creating another diff.
442 and metadata) has changed before creating another diff.
443
443
444 If actions is not None, they will be appended to the transaction.
444 If actions is not None, they will be appended to the transaction.
445 """
445 """
446 repo = ctx.repo()
446 repo = ctx.repo()
447 if oldnode:
447 if oldnode:
448 diffopts = mdiff.diffopts(git=True, context=32767)
448 diffopts = mdiff.diffopts(git=True, context=32767)
449 oldctx = repo.unfiltered()[oldnode]
449 oldctx = repo.unfiltered()[oldnode]
450 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
450 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
451 else:
451 else:
452 neednewdiff = True
452 neednewdiff = True
453
453
454 transactions = []
454 transactions = []
455 if neednewdiff:
455 if neednewdiff:
456 diff = creatediff(ctx)
456 diff = creatediff(ctx)
457 transactions.append({b'type': b'update', b'value': diff[b'phid']})
457 transactions.append({b'type': b'update', b'value': diff[b'phid']})
458 if comment:
458 if comment:
459 transactions.append({b'type': b'comment', b'value': comment})
459 transactions.append({b'type': b'comment', b'value': comment})
460 else:
460 else:
461 # Even if we don't need to upload a new diff because the patch content
461 # Even if we don't need to upload a new diff because the patch content
462 # does not change. We might still need to update its metadata so
462 # does not change. We might still need to update its metadata so
463 # pushers could know the correct node metadata.
463 # pushers could know the correct node metadata.
464 assert olddiff
464 assert olddiff
465 diff = olddiff
465 diff = olddiff
466 writediffproperties(ctx, diff)
466 writediffproperties(ctx, diff)
467
467
468 # Use a temporary summary to set dependency. There might be better ways but
468 # Use a temporary summary to set dependency. There might be better ways but
469 # I cannot find them for now. But do not do that if we are updating an
469 # I cannot find them for now. But do not do that if we are updating an
470 # existing revision (revid is not None) since that introduces visible
470 # existing revision (revid is not None) since that introduces visible
471 # churns (someone edited "Summary" twice) on the web page.
471 # churns (someone edited "Summary" twice) on the web page.
472 if parentrevid and revid is None:
472 if parentrevid and revid is None:
473 summary = b'Depends on D%d' % parentrevid
473 summary = b'Depends on D%d' % parentrevid
474 transactions += [{b'type': b'summary', b'value': summary},
474 transactions += [{b'type': b'summary', b'value': summary},
475 {b'type': b'summary', b'value': b' '}]
475 {b'type': b'summary', b'value': b' '}]
476
476
477 if actions:
477 if actions:
478 transactions += actions
478 transactions += actions
479
479
480 # Parse commit message and update related fields.
480 # Parse commit message and update related fields.
481 desc = ctx.description()
481 desc = ctx.description()
482 info = callconduit(repo.ui, b'differential.parsecommitmessage',
482 info = callconduit(repo.ui, b'differential.parsecommitmessage',
483 {b'corpus': desc})
483 {b'corpus': desc})
484 for k, v in info[b'fields'].items():
484 for k, v in info[b'fields'].items():
485 if k in [b'title', b'summary', b'testPlan']:
485 if k in [b'title', b'summary', b'testPlan']:
486 transactions.append({b'type': k, b'value': v})
486 transactions.append({b'type': k, b'value': v})
487
487
488 params = {b'transactions': transactions}
488 params = {b'transactions': transactions}
489 if revid is not None:
489 if revid is not None:
490 # Update an existing Differential Revision
490 # Update an existing Differential Revision
491 params[b'objectIdentifier'] = revid
491 params[b'objectIdentifier'] = revid
492
492
493 revision = callconduit(repo.ui, b'differential.revision.edit', params)
493 revision = callconduit(repo.ui, b'differential.revision.edit', params)
494 if not revision:
494 if not revision:
495 raise error.Abort(_(b'cannot create revision for %s') % ctx)
495 raise error.Abort(_(b'cannot create revision for %s') % ctx)
496
496
497 return revision, diff
497 return revision, diff
498
498
499 def userphids(repo, names):
499 def userphids(repo, names):
500 """convert user names to PHIDs"""
500 """convert user names to PHIDs"""
501 names = [name.lower() for name in names]
501 names = [name.lower() for name in names]
502 query = {b'constraints': {b'usernames': names}}
502 query = {b'constraints': {b'usernames': names}}
503 result = callconduit(repo.ui, b'user.search', query)
503 result = callconduit(repo.ui, b'user.search', query)
504 # username not found is not an error of the API. So check if we have missed
504 # username not found is not an error of the API. So check if we have missed
505 # some names here.
505 # some names here.
506 data = result[b'data']
506 data = result[b'data']
507 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
507 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
508 unresolved = set(names) - resolved
508 unresolved = set(names) - resolved
509 if unresolved:
509 if unresolved:
510 raise error.Abort(_(b'unknown username: %s')
510 raise error.Abort(_(b'unknown username: %s')
511 % b' '.join(sorted(unresolved)))
511 % b' '.join(sorted(unresolved)))
512 return [entry[b'phid'] for entry in data]
512 return [entry[b'phid'] for entry in data]
513
513
514 @vcrcommand(b'phabsend',
514 @vcrcommand(b'phabsend',
515 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
515 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
516 (b'', b'amend', True, _(b'update commit messages')),
516 (b'', b'amend', True, _(b'update commit messages')),
517 (b'', b'reviewer', [], _(b'specify reviewers')),
517 (b'', b'reviewer', [], _(b'specify reviewers')),
518 (b'', b'blocker', [], _(b'specify blocking reviewers')),
518 (b'm', b'comment', b'',
519 (b'm', b'comment', b'',
519 _(b'add a comment to Revisions with new/updated Diffs')),
520 _(b'add a comment to Revisions with new/updated Diffs')),
520 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
521 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
521 _(b'REV [OPTIONS]'),
522 _(b'REV [OPTIONS]'),
522 helpcategory=command.CATEGORY_IMPORT_EXPORT)
523 helpcategory=command.CATEGORY_IMPORT_EXPORT)
523 def phabsend(ui, repo, *revs, **opts):
524 def phabsend(ui, repo, *revs, **opts):
524 """upload changesets to Phabricator
525 """upload changesets to Phabricator
525
526
526 If there are multiple revisions specified, they will be send as a stack
527 If there are multiple revisions specified, they will be send as a stack
527 with a linear dependencies relationship using the order specified by the
528 with a linear dependencies relationship using the order specified by the
528 revset.
529 revset.
529
530
530 For the first time uploading changesets, local tags will be created to
531 For the first time uploading changesets, local tags will be created to
531 maintain the association. After the first time, phabsend will check
532 maintain the association. After the first time, phabsend will check
532 obsstore and tags information so it can figure out whether to update an
533 obsstore and tags information so it can figure out whether to update an
533 existing Differential Revision, or create a new one.
534 existing Differential Revision, or create a new one.
534
535
535 If --amend is set, update commit messages so they have the
536 If --amend is set, update commit messages so they have the
536 ``Differential Revision`` URL, remove related tags. This is similar to what
537 ``Differential Revision`` URL, remove related tags. This is similar to what
537 arcanist will do, and is more desired in author-push workflows. Otherwise,
538 arcanist will do, and is more desired in author-push workflows. Otherwise,
538 use local tags to record the ``Differential Revision`` association.
539 use local tags to record the ``Differential Revision`` association.
539
540
540 The --confirm option lets you confirm changesets before sending them. You
541 The --confirm option lets you confirm changesets before sending them. You
541 can also add following to your configuration file to make it default
542 can also add following to your configuration file to make it default
542 behaviour::
543 behaviour::
543
544
544 [phabsend]
545 [phabsend]
545 confirm = true
546 confirm = true
546
547
547 phabsend will check obsstore and the above association to decide whether to
548 phabsend will check obsstore and the above association to decide whether to
548 update an existing Differential Revision, or create a new one.
549 update an existing Differential Revision, or create a new one.
549 """
550 """
550 opts = pycompat.byteskwargs(opts)
551 opts = pycompat.byteskwargs(opts)
551 revs = list(revs) + opts.get(b'rev', [])
552 revs = list(revs) + opts.get(b'rev', [])
552 revs = scmutil.revrange(repo, revs)
553 revs = scmutil.revrange(repo, revs)
553
554
554 if not revs:
555 if not revs:
555 raise error.Abort(_(b'phabsend requires at least one changeset'))
556 raise error.Abort(_(b'phabsend requires at least one changeset'))
556 if opts.get(b'amend'):
557 if opts.get(b'amend'):
557 cmdutil.checkunfinished(repo)
558 cmdutil.checkunfinished(repo)
558
559
559 # {newnode: (oldnode, olddiff, olddrev}
560 # {newnode: (oldnode, olddiff, olddrev}
560 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
561 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
561
562
562 confirm = ui.configbool(b'phabsend', b'confirm')
563 confirm = ui.configbool(b'phabsend', b'confirm')
563 confirm |= bool(opts.get(b'confirm'))
564 confirm |= bool(opts.get(b'confirm'))
564 if confirm:
565 if confirm:
565 confirmed = _confirmbeforesend(repo, revs, oldmap)
566 confirmed = _confirmbeforesend(repo, revs, oldmap)
566 if not confirmed:
567 if not confirmed:
567 raise error.Abort(_(b'phabsend cancelled'))
568 raise error.Abort(_(b'phabsend cancelled'))
568
569
569 actions = []
570 actions = []
570 reviewers = opts.get(b'reviewer', [])
571 reviewers = opts.get(b'reviewer', [])
572 blockers = opts.get(b'blocker', [])
573 phids = []
571 if reviewers:
574 if reviewers:
572 phids = userphids(repo, reviewers)
575 phids.extend(userphids(repo, reviewers))
576 if blockers:
577 phids.extend(map(
578 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
579 ))
580 if phids:
573 actions.append({b'type': b'reviewers.add', b'value': phids})
581 actions.append({b'type': b'reviewers.add', b'value': phids})
574
582
575 drevids = [] # [int]
583 drevids = [] # [int]
576 diffmap = {} # {newnode: diff}
584 diffmap = {} # {newnode: diff}
577
585
578 # Send patches one by one so we know their Differential Revision IDs and
586 # Send patches one by one so we know their Differential Revision IDs and
579 # can provide dependency relationship
587 # can provide dependency relationship
580 lastrevid = None
588 lastrevid = None
581 for rev in revs:
589 for rev in revs:
582 ui.debug(b'sending rev %d\n' % rev)
590 ui.debug(b'sending rev %d\n' % rev)
583 ctx = repo[rev]
591 ctx = repo[rev]
584
592
585 # Get Differential Revision ID
593 # Get Differential Revision ID
586 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
594 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
587 if oldnode != ctx.node() or opts.get(b'amend'):
595 if oldnode != ctx.node() or opts.get(b'amend'):
588 # Create or update Differential Revision
596 # Create or update Differential Revision
589 revision, diff = createdifferentialrevision(
597 revision, diff = createdifferentialrevision(
590 ctx, revid, lastrevid, oldnode, olddiff, actions,
598 ctx, revid, lastrevid, oldnode, olddiff, actions,
591 opts.get(b'comment'))
599 opts.get(b'comment'))
592 diffmap[ctx.node()] = diff
600 diffmap[ctx.node()] = diff
593 newrevid = int(revision[b'object'][b'id'])
601 newrevid = int(revision[b'object'][b'id'])
594 if revid:
602 if revid:
595 action = b'updated'
603 action = b'updated'
596 else:
604 else:
597 action = b'created'
605 action = b'created'
598
606
599 # Create a local tag to note the association, if commit message
607 # Create a local tag to note the association, if commit message
600 # does not have it already
608 # does not have it already
601 m = _differentialrevisiondescre.search(ctx.description())
609 m = _differentialrevisiondescre.search(ctx.description())
602 if not m or int(m.group(r'id')) != newrevid:
610 if not m or int(m.group(r'id')) != newrevid:
603 tagname = b'D%d' % newrevid
611 tagname = b'D%d' % newrevid
604 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
612 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
605 date=None, local=True)
613 date=None, local=True)
606 else:
614 else:
607 # Nothing changed. But still set "newrevid" so the next revision
615 # Nothing changed. But still set "newrevid" so the next revision
608 # could depend on this one.
616 # could depend on this one.
609 newrevid = revid
617 newrevid = revid
610 action = b'skipped'
618 action = b'skipped'
611
619
612 actiondesc = ui.label(
620 actiondesc = ui.label(
613 {b'created': _(b'created'),
621 {b'created': _(b'created'),
614 b'skipped': _(b'skipped'),
622 b'skipped': _(b'skipped'),
615 b'updated': _(b'updated')}[action],
623 b'updated': _(b'updated')}[action],
616 b'phabricator.action.%s' % action)
624 b'phabricator.action.%s' % action)
617 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
625 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
618 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
626 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
619 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
627 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
620 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
628 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
621 desc))
629 desc))
622 drevids.append(newrevid)
630 drevids.append(newrevid)
623 lastrevid = newrevid
631 lastrevid = newrevid
624
632
625 # Update commit messages and remove tags
633 # Update commit messages and remove tags
626 if opts.get(b'amend'):
634 if opts.get(b'amend'):
627 unfi = repo.unfiltered()
635 unfi = repo.unfiltered()
628 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
636 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
629 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
637 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
630 wnode = unfi[b'.'].node()
638 wnode = unfi[b'.'].node()
631 mapping = {} # {oldnode: [newnode]}
639 mapping = {} # {oldnode: [newnode]}
632 for i, rev in enumerate(revs):
640 for i, rev in enumerate(revs):
633 old = unfi[rev]
641 old = unfi[rev]
634 drevid = drevids[i]
642 drevid = drevids[i]
635 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
643 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
636 newdesc = getdescfromdrev(drev)
644 newdesc = getdescfromdrev(drev)
637 # Make sure commit message contain "Differential Revision"
645 # Make sure commit message contain "Differential Revision"
638 if old.description() != newdesc:
646 if old.description() != newdesc:
639 if old.phase() == phases.public:
647 if old.phase() == phases.public:
640 ui.warn(_("warning: not updating public commit %s\n")
648 ui.warn(_("warning: not updating public commit %s\n")
641 % scmutil.formatchangeid(old))
649 % scmutil.formatchangeid(old))
642 continue
650 continue
643 parents = [
651 parents = [
644 mapping.get(old.p1().node(), (old.p1(),))[0],
652 mapping.get(old.p1().node(), (old.p1(),))[0],
645 mapping.get(old.p2().node(), (old.p2(),))[0],
653 mapping.get(old.p2().node(), (old.p2(),))[0],
646 ]
654 ]
647 new = context.metadataonlyctx(
655 new = context.metadataonlyctx(
648 repo, old, parents=parents, text=newdesc,
656 repo, old, parents=parents, text=newdesc,
649 user=old.user(), date=old.date(), extra=old.extra())
657 user=old.user(), date=old.date(), extra=old.extra())
650
658
651 newnode = new.commit()
659 newnode = new.commit()
652
660
653 mapping[old.node()] = [newnode]
661 mapping[old.node()] = [newnode]
654 # Update diff property
662 # Update diff property
655 writediffproperties(unfi[newnode], diffmap[old.node()])
663 writediffproperties(unfi[newnode], diffmap[old.node()])
656 # Remove local tags since it's no longer necessary
664 # Remove local tags since it's no longer necessary
657 tagname = b'D%d' % drevid
665 tagname = b'D%d' % drevid
658 if tagname in repo.tags():
666 if tagname in repo.tags():
659 tags.tag(repo, tagname, nullid, message=None, user=None,
667 tags.tag(repo, tagname, nullid, message=None, user=None,
660 date=None, local=True)
668 date=None, local=True)
661 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
669 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
662 if wnode in mapping:
670 if wnode in mapping:
663 unfi.setparents(mapping[wnode][0])
671 unfi.setparents(mapping[wnode][0])
664
672
665 # Map from "hg:meta" keys to header understood by "hg import". The order is
673 # Map from "hg:meta" keys to header understood by "hg import". The order is
666 # consistent with "hg export" output.
674 # consistent with "hg export" output.
667 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
675 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
668 (b'branch', b'Branch'), (b'node', b'Node ID'),
676 (b'branch', b'Branch'), (b'node', b'Node ID'),
669 (b'parent', b'Parent ')])
677 (b'parent', b'Parent ')])
670
678
671 def _confirmbeforesend(repo, revs, oldmap):
679 def _confirmbeforesend(repo, revs, oldmap):
672 url, token = readurltoken(repo.ui)
680 url, token = readurltoken(repo.ui)
673 ui = repo.ui
681 ui = repo.ui
674 for rev in revs:
682 for rev in revs:
675 ctx = repo[rev]
683 ctx = repo[rev]
676 desc = ctx.description().splitlines()[0]
684 desc = ctx.description().splitlines()[0]
677 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
685 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
678 if drevid:
686 if drevid:
679 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
687 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
680 else:
688 else:
681 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
689 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
682
690
683 ui.write(_(b'%s - %s: %s\n')
691 ui.write(_(b'%s - %s: %s\n')
684 % (drevdesc,
692 % (drevdesc,
685 ui.label(bytes(ctx), b'phabricator.node'),
693 ui.label(bytes(ctx), b'phabricator.node'),
686 ui.label(desc, b'phabricator.desc')))
694 ui.label(desc, b'phabricator.desc')))
687
695
688 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
696 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
689 b'$$ &Yes $$ &No') % url):
697 b'$$ &Yes $$ &No') % url):
690 return False
698 return False
691
699
692 return True
700 return True
693
701
694 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
702 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
695 b'abandoned'}
703 b'abandoned'}
696
704
697 def _getstatusname(drev):
705 def _getstatusname(drev):
698 """get normalized status name from a Differential Revision"""
706 """get normalized status name from a Differential Revision"""
699 return drev[b'statusName'].replace(b' ', b'').lower()
707 return drev[b'statusName'].replace(b' ', b'').lower()
700
708
701 # Small language to specify differential revisions. Support symbols: (), :X,
709 # Small language to specify differential revisions. Support symbols: (), :X,
702 # +, and -.
710 # +, and -.
703
711
704 _elements = {
712 _elements = {
705 # token-type: binding-strength, primary, prefix, infix, suffix
713 # token-type: binding-strength, primary, prefix, infix, suffix
706 b'(': (12, None, (b'group', 1, b')'), None, None),
714 b'(': (12, None, (b'group', 1, b')'), None, None),
707 b':': (8, None, (b'ancestors', 8), None, None),
715 b':': (8, None, (b'ancestors', 8), None, None),
708 b'&': (5, None, None, (b'and_', 5), None),
716 b'&': (5, None, None, (b'and_', 5), None),
709 b'+': (4, None, None, (b'add', 4), None),
717 b'+': (4, None, None, (b'add', 4), None),
710 b'-': (4, None, None, (b'sub', 4), None),
718 b'-': (4, None, None, (b'sub', 4), None),
711 b')': (0, None, None, None, None),
719 b')': (0, None, None, None, None),
712 b'symbol': (0, b'symbol', None, None, None),
720 b'symbol': (0, b'symbol', None, None, None),
713 b'end': (0, None, None, None, None),
721 b'end': (0, None, None, None, None),
714 }
722 }
715
723
716 def _tokenize(text):
724 def _tokenize(text):
717 view = memoryview(text) # zero-copy slice
725 view = memoryview(text) # zero-copy slice
718 special = b'():+-& '
726 special = b'():+-& '
719 pos = 0
727 pos = 0
720 length = len(text)
728 length = len(text)
721 while pos < length:
729 while pos < length:
722 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
730 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
723 pycompat.iterbytestr(view[pos:])))
731 pycompat.iterbytestr(view[pos:])))
724 if symbol:
732 if symbol:
725 yield (b'symbol', symbol, pos)
733 yield (b'symbol', symbol, pos)
726 pos += len(symbol)
734 pos += len(symbol)
727 else: # special char, ignore space
735 else: # special char, ignore space
728 if text[pos] != b' ':
736 if text[pos] != b' ':
729 yield (text[pos], None, pos)
737 yield (text[pos], None, pos)
730 pos += 1
738 pos += 1
731 yield (b'end', None, pos)
739 yield (b'end', None, pos)
732
740
733 def _parse(text):
741 def _parse(text):
734 tree, pos = parser.parser(_elements).parse(_tokenize(text))
742 tree, pos = parser.parser(_elements).parse(_tokenize(text))
735 if pos != len(text):
743 if pos != len(text):
736 raise error.ParseError(b'invalid token', pos)
744 raise error.ParseError(b'invalid token', pos)
737 return tree
745 return tree
738
746
739 def _parsedrev(symbol):
747 def _parsedrev(symbol):
740 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
748 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
741 if symbol.startswith(b'D') and symbol[1:].isdigit():
749 if symbol.startswith(b'D') and symbol[1:].isdigit():
742 return int(symbol[1:])
750 return int(symbol[1:])
743 if symbol.isdigit():
751 if symbol.isdigit():
744 return int(symbol)
752 return int(symbol)
745
753
746 def _prefetchdrevs(tree):
754 def _prefetchdrevs(tree):
747 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
755 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
748 drevs = set()
756 drevs = set()
749 ancestordrevs = set()
757 ancestordrevs = set()
750 op = tree[0]
758 op = tree[0]
751 if op == b'symbol':
759 if op == b'symbol':
752 r = _parsedrev(tree[1])
760 r = _parsedrev(tree[1])
753 if r:
761 if r:
754 drevs.add(r)
762 drevs.add(r)
755 elif op == b'ancestors':
763 elif op == b'ancestors':
756 r, a = _prefetchdrevs(tree[1])
764 r, a = _prefetchdrevs(tree[1])
757 drevs.update(r)
765 drevs.update(r)
758 ancestordrevs.update(r)
766 ancestordrevs.update(r)
759 ancestordrevs.update(a)
767 ancestordrevs.update(a)
760 else:
768 else:
761 for t in tree[1:]:
769 for t in tree[1:]:
762 r, a = _prefetchdrevs(t)
770 r, a = _prefetchdrevs(t)
763 drevs.update(r)
771 drevs.update(r)
764 ancestordrevs.update(a)
772 ancestordrevs.update(a)
765 return drevs, ancestordrevs
773 return drevs, ancestordrevs
766
774
767 def querydrev(repo, spec):
775 def querydrev(repo, spec):
768 """return a list of "Differential Revision" dicts
776 """return a list of "Differential Revision" dicts
769
777
770 spec is a string using a simple query language, see docstring in phabread
778 spec is a string using a simple query language, see docstring in phabread
771 for details.
779 for details.
772
780
773 A "Differential Revision dict" looks like:
781 A "Differential Revision dict" looks like:
774
782
775 {
783 {
776 "id": "2",
784 "id": "2",
777 "phid": "PHID-DREV-672qvysjcczopag46qty",
785 "phid": "PHID-DREV-672qvysjcczopag46qty",
778 "title": "example",
786 "title": "example",
779 "uri": "https://phab.example.com/D2",
787 "uri": "https://phab.example.com/D2",
780 "dateCreated": "1499181406",
788 "dateCreated": "1499181406",
781 "dateModified": "1499182103",
789 "dateModified": "1499182103",
782 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
790 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
783 "status": "0",
791 "status": "0",
784 "statusName": "Needs Review",
792 "statusName": "Needs Review",
785 "properties": [],
793 "properties": [],
786 "branch": null,
794 "branch": null,
787 "summary": "",
795 "summary": "",
788 "testPlan": "",
796 "testPlan": "",
789 "lineCount": "2",
797 "lineCount": "2",
790 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
798 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
791 "diffs": [
799 "diffs": [
792 "3",
800 "3",
793 "4",
801 "4",
794 ],
802 ],
795 "commits": [],
803 "commits": [],
796 "reviewers": [],
804 "reviewers": [],
797 "ccs": [],
805 "ccs": [],
798 "hashes": [],
806 "hashes": [],
799 "auxiliary": {
807 "auxiliary": {
800 "phabricator:projects": [],
808 "phabricator:projects": [],
801 "phabricator:depends-on": [
809 "phabricator:depends-on": [
802 "PHID-DREV-gbapp366kutjebt7agcd"
810 "PHID-DREV-gbapp366kutjebt7agcd"
803 ]
811 ]
804 },
812 },
805 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
813 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
806 "sourcePath": null
814 "sourcePath": null
807 }
815 }
808 """
816 """
809 def fetch(params):
817 def fetch(params):
810 """params -> single drev or None"""
818 """params -> single drev or None"""
811 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
819 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
812 if key in prefetched:
820 if key in prefetched:
813 return prefetched[key]
821 return prefetched[key]
814 drevs = callconduit(repo.ui, b'differential.query', params)
822 drevs = callconduit(repo.ui, b'differential.query', params)
815 # Fill prefetched with the result
823 # Fill prefetched with the result
816 for drev in drevs:
824 for drev in drevs:
817 prefetched[drev[b'phid']] = drev
825 prefetched[drev[b'phid']] = drev
818 prefetched[int(drev[b'id'])] = drev
826 prefetched[int(drev[b'id'])] = drev
819 if key not in prefetched:
827 if key not in prefetched:
820 raise error.Abort(_(b'cannot get Differential Revision %r')
828 raise error.Abort(_(b'cannot get Differential Revision %r')
821 % params)
829 % params)
822 return prefetched[key]
830 return prefetched[key]
823
831
824 def getstack(topdrevids):
832 def getstack(topdrevids):
825 """given a top, get a stack from the bottom, [id] -> [id]"""
833 """given a top, get a stack from the bottom, [id] -> [id]"""
826 visited = set()
834 visited = set()
827 result = []
835 result = []
828 queue = [{b'ids': [i]} for i in topdrevids]
836 queue = [{b'ids': [i]} for i in topdrevids]
829 while queue:
837 while queue:
830 params = queue.pop()
838 params = queue.pop()
831 drev = fetch(params)
839 drev = fetch(params)
832 if drev[b'id'] in visited:
840 if drev[b'id'] in visited:
833 continue
841 continue
834 visited.add(drev[b'id'])
842 visited.add(drev[b'id'])
835 result.append(int(drev[b'id']))
843 result.append(int(drev[b'id']))
836 auxiliary = drev.get(b'auxiliary', {})
844 auxiliary = drev.get(b'auxiliary', {})
837 depends = auxiliary.get(b'phabricator:depends-on', [])
845 depends = auxiliary.get(b'phabricator:depends-on', [])
838 for phid in depends:
846 for phid in depends:
839 queue.append({b'phids': [phid]})
847 queue.append({b'phids': [phid]})
840 result.reverse()
848 result.reverse()
841 return smartset.baseset(result)
849 return smartset.baseset(result)
842
850
843 # Initialize prefetch cache
851 # Initialize prefetch cache
844 prefetched = {} # {id or phid: drev}
852 prefetched = {} # {id or phid: drev}
845
853
846 tree = _parse(spec)
854 tree = _parse(spec)
847 drevs, ancestordrevs = _prefetchdrevs(tree)
855 drevs, ancestordrevs = _prefetchdrevs(tree)
848
856
849 # developer config: phabricator.batchsize
857 # developer config: phabricator.batchsize
850 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
858 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
851
859
852 # Prefetch Differential Revisions in batch
860 # Prefetch Differential Revisions in batch
853 tofetch = set(drevs)
861 tofetch = set(drevs)
854 for r in ancestordrevs:
862 for r in ancestordrevs:
855 tofetch.update(range(max(1, r - batchsize), r + 1))
863 tofetch.update(range(max(1, r - batchsize), r + 1))
856 if drevs:
864 if drevs:
857 fetch({b'ids': list(tofetch)})
865 fetch({b'ids': list(tofetch)})
858 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
866 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
859
867
860 # Walk through the tree, return smartsets
868 # Walk through the tree, return smartsets
861 def walk(tree):
869 def walk(tree):
862 op = tree[0]
870 op = tree[0]
863 if op == b'symbol':
871 if op == b'symbol':
864 drev = _parsedrev(tree[1])
872 drev = _parsedrev(tree[1])
865 if drev:
873 if drev:
866 return smartset.baseset([drev])
874 return smartset.baseset([drev])
867 elif tree[1] in _knownstatusnames:
875 elif tree[1] in _knownstatusnames:
868 drevs = [r for r in validids
876 drevs = [r for r in validids
869 if _getstatusname(prefetched[r]) == tree[1]]
877 if _getstatusname(prefetched[r]) == tree[1]]
870 return smartset.baseset(drevs)
878 return smartset.baseset(drevs)
871 else:
879 else:
872 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
880 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
873 elif op in {b'and_', b'add', b'sub'}:
881 elif op in {b'and_', b'add', b'sub'}:
874 assert len(tree) == 3
882 assert len(tree) == 3
875 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
883 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
876 elif op == b'group':
884 elif op == b'group':
877 return walk(tree[1])
885 return walk(tree[1])
878 elif op == b'ancestors':
886 elif op == b'ancestors':
879 return getstack(walk(tree[1]))
887 return getstack(walk(tree[1]))
880 else:
888 else:
881 raise error.ProgrammingError(b'illegal tree: %r' % tree)
889 raise error.ProgrammingError(b'illegal tree: %r' % tree)
882
890
883 return [prefetched[r] for r in walk(tree)]
891 return [prefetched[r] for r in walk(tree)]
884
892
885 def getdescfromdrev(drev):
893 def getdescfromdrev(drev):
886 """get description (commit message) from "Differential Revision"
894 """get description (commit message) from "Differential Revision"
887
895
888 This is similar to differential.getcommitmessage API. But we only care
896 This is similar to differential.getcommitmessage API. But we only care
889 about limited fields: title, summary, test plan, and URL.
897 about limited fields: title, summary, test plan, and URL.
890 """
898 """
891 title = drev[b'title']
899 title = drev[b'title']
892 summary = drev[b'summary'].rstrip()
900 summary = drev[b'summary'].rstrip()
893 testplan = drev[b'testPlan'].rstrip()
901 testplan = drev[b'testPlan'].rstrip()
894 if testplan:
902 if testplan:
895 testplan = b'Test Plan:\n%s' % testplan
903 testplan = b'Test Plan:\n%s' % testplan
896 uri = b'Differential Revision: %s' % drev[b'uri']
904 uri = b'Differential Revision: %s' % drev[b'uri']
897 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
905 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
898
906
899 def getdiffmeta(diff):
907 def getdiffmeta(diff):
900 """get commit metadata (date, node, user, p1) from a diff object
908 """get commit metadata (date, node, user, p1) from a diff object
901
909
902 The metadata could be "hg:meta", sent by phabsend, like:
910 The metadata could be "hg:meta", sent by phabsend, like:
903
911
904 "properties": {
912 "properties": {
905 "hg:meta": {
913 "hg:meta": {
906 "date": "1499571514 25200",
914 "date": "1499571514 25200",
907 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
915 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
908 "user": "Foo Bar <foo@example.com>",
916 "user": "Foo Bar <foo@example.com>",
909 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
917 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
910 }
918 }
911 }
919 }
912
920
913 Or converted from "local:commits", sent by "arc", like:
921 Or converted from "local:commits", sent by "arc", like:
914
922
915 "properties": {
923 "properties": {
916 "local:commits": {
924 "local:commits": {
917 "98c08acae292b2faf60a279b4189beb6cff1414d": {
925 "98c08acae292b2faf60a279b4189beb6cff1414d": {
918 "author": "Foo Bar",
926 "author": "Foo Bar",
919 "time": 1499546314,
927 "time": 1499546314,
920 "branch": "default",
928 "branch": "default",
921 "tag": "",
929 "tag": "",
922 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
930 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
923 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
931 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
924 "local": "1000",
932 "local": "1000",
925 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
933 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
926 "summary": "...",
934 "summary": "...",
927 "message": "...",
935 "message": "...",
928 "authorEmail": "foo@example.com"
936 "authorEmail": "foo@example.com"
929 }
937 }
930 }
938 }
931 }
939 }
932
940
933 Note: metadata extracted from "local:commits" will lose time zone
941 Note: metadata extracted from "local:commits" will lose time zone
934 information.
942 information.
935 """
943 """
936 props = diff.get(b'properties') or {}
944 props = diff.get(b'properties') or {}
937 meta = props.get(b'hg:meta')
945 meta = props.get(b'hg:meta')
938 if not meta:
946 if not meta:
939 if props.get(b'local:commits'):
947 if props.get(b'local:commits'):
940 commit = sorted(props[b'local:commits'].values())[0]
948 commit = sorted(props[b'local:commits'].values())[0]
941 meta = {}
949 meta = {}
942 if b'author' in commit and b'authorEmail' in commit:
950 if b'author' in commit and b'authorEmail' in commit:
943 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
951 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
944 commit[b'authorEmail'])
952 commit[b'authorEmail'])
945 if b'time' in commit:
953 if b'time' in commit:
946 meta[b'date'] = b'%d 0' % commit[b'time']
954 meta[b'date'] = b'%d 0' % commit[b'time']
947 if b'branch' in commit:
955 if b'branch' in commit:
948 meta[b'branch'] = commit[b'branch']
956 meta[b'branch'] = commit[b'branch']
949 node = commit.get(b'commit', commit.get(b'rev'))
957 node = commit.get(b'commit', commit.get(b'rev'))
950 if node:
958 if node:
951 meta[b'node'] = node
959 meta[b'node'] = node
952 if len(commit.get(b'parents', ())) >= 1:
960 if len(commit.get(b'parents', ())) >= 1:
953 meta[b'parent'] = commit[b'parents'][0]
961 meta[b'parent'] = commit[b'parents'][0]
954 else:
962 else:
955 meta = {}
963 meta = {}
956 if b'date' not in meta and b'dateCreated' in diff:
964 if b'date' not in meta and b'dateCreated' in diff:
957 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
965 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
958 if b'branch' not in meta and diff.get(b'branch'):
966 if b'branch' not in meta and diff.get(b'branch'):
959 meta[b'branch'] = diff[b'branch']
967 meta[b'branch'] = diff[b'branch']
960 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
968 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
961 meta[b'parent'] = diff[b'sourceControlBaseRevision']
969 meta[b'parent'] = diff[b'sourceControlBaseRevision']
962 return meta
970 return meta
963
971
964 def readpatch(repo, drevs, write):
972 def readpatch(repo, drevs, write):
965 """generate plain-text patch readable by 'hg import'
973 """generate plain-text patch readable by 'hg import'
966
974
967 write is usually ui.write. drevs is what "querydrev" returns, results of
975 write is usually ui.write. drevs is what "querydrev" returns, results of
968 "differential.query".
976 "differential.query".
969 """
977 """
970 # Prefetch hg:meta property for all diffs
978 # Prefetch hg:meta property for all diffs
971 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
979 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
972 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
980 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
973
981
974 # Generate patch for each drev
982 # Generate patch for each drev
975 for drev in drevs:
983 for drev in drevs:
976 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
984 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
977
985
978 diffid = max(int(v) for v in drev[b'diffs'])
986 diffid = max(int(v) for v in drev[b'diffs'])
979 body = callconduit(repo.ui, b'differential.getrawdiff',
987 body = callconduit(repo.ui, b'differential.getrawdiff',
980 {b'diffID': diffid})
988 {b'diffID': diffid})
981 desc = getdescfromdrev(drev)
989 desc = getdescfromdrev(drev)
982 header = b'# HG changeset patch\n'
990 header = b'# HG changeset patch\n'
983
991
984 # Try to preserve metadata from hg:meta property. Write hg patch
992 # Try to preserve metadata from hg:meta property. Write hg patch
985 # headers that can be read by the "import" command. See patchheadermap
993 # headers that can be read by the "import" command. See patchheadermap
986 # and extract in mercurial/patch.py for supported headers.
994 # and extract in mercurial/patch.py for supported headers.
987 meta = getdiffmeta(diffs[b'%d' % diffid])
995 meta = getdiffmeta(diffs[b'%d' % diffid])
988 for k in _metanamemap.keys():
996 for k in _metanamemap.keys():
989 if k in meta:
997 if k in meta:
990 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
998 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
991
999
992 content = b'%s%s\n%s' % (header, desc, body)
1000 content = b'%s%s\n%s' % (header, desc, body)
993 write(content)
1001 write(content)
994
1002
995 @vcrcommand(b'phabread',
1003 @vcrcommand(b'phabread',
996 [(b'', b'stack', False, _(b'read dependencies'))],
1004 [(b'', b'stack', False, _(b'read dependencies'))],
997 _(b'DREVSPEC [OPTIONS]'),
1005 _(b'DREVSPEC [OPTIONS]'),
998 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1006 helpcategory=command.CATEGORY_IMPORT_EXPORT)
999 def phabread(ui, repo, spec, **opts):
1007 def phabread(ui, repo, spec, **opts):
1000 """print patches from Phabricator suitable for importing
1008 """print patches from Phabricator suitable for importing
1001
1009
1002 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1010 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1003 the number ``123``. It could also have common operators like ``+``, ``-``,
1011 the number ``123``. It could also have common operators like ``+``, ``-``,
1004 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1012 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1005 select a stack.
1013 select a stack.
1006
1014
1007 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1015 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1008 could be used to filter patches by status. For performance reason, they
1016 could be used to filter patches by status. For performance reason, they
1009 only represent a subset of non-status selections and cannot be used alone.
1017 only represent a subset of non-status selections and cannot be used alone.
1010
1018
1011 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1019 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1012 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1020 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1013 stack up to D9.
1021 stack up to D9.
1014
1022
1015 If --stack is given, follow dependencies information and read all patches.
1023 If --stack is given, follow dependencies information and read all patches.
1016 It is equivalent to the ``:`` operator.
1024 It is equivalent to the ``:`` operator.
1017 """
1025 """
1018 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1019 if opts.get(b'stack'):
1027 if opts.get(b'stack'):
1020 spec = b':(%s)' % spec
1028 spec = b':(%s)' % spec
1021 drevs = querydrev(repo, spec)
1029 drevs = querydrev(repo, spec)
1022 readpatch(repo, drevs, ui.write)
1030 readpatch(repo, drevs, ui.write)
1023
1031
1024 @vcrcommand(b'phabupdate',
1032 @vcrcommand(b'phabupdate',
1025 [(b'', b'accept', False, _(b'accept revisions')),
1033 [(b'', b'accept', False, _(b'accept revisions')),
1026 (b'', b'reject', False, _(b'reject revisions')),
1034 (b'', b'reject', False, _(b'reject revisions')),
1027 (b'', b'abandon', False, _(b'abandon revisions')),
1035 (b'', b'abandon', False, _(b'abandon revisions')),
1028 (b'', b'reclaim', False, _(b'reclaim revisions')),
1036 (b'', b'reclaim', False, _(b'reclaim revisions')),
1029 (b'm', b'comment', b'', _(b'comment on the last revision')),
1037 (b'm', b'comment', b'', _(b'comment on the last revision')),
1030 ], _(b'DREVSPEC [OPTIONS]'),
1038 ], _(b'DREVSPEC [OPTIONS]'),
1031 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1039 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1032 def phabupdate(ui, repo, spec, **opts):
1040 def phabupdate(ui, repo, spec, **opts):
1033 """update Differential Revision in batch
1041 """update Differential Revision in batch
1034
1042
1035 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1043 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1036 """
1044 """
1037 opts = pycompat.byteskwargs(opts)
1045 opts = pycompat.byteskwargs(opts)
1038 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1046 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1039 if len(flags) > 1:
1047 if len(flags) > 1:
1040 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1048 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1041
1049
1042 actions = []
1050 actions = []
1043 for f in flags:
1051 for f in flags:
1044 actions.append({b'type': f, b'value': b'true'})
1052 actions.append({b'type': f, b'value': b'true'})
1045
1053
1046 drevs = querydrev(repo, spec)
1054 drevs = querydrev(repo, spec)
1047 for i, drev in enumerate(drevs):
1055 for i, drev in enumerate(drevs):
1048 if i + 1 == len(drevs) and opts.get(b'comment'):
1056 if i + 1 == len(drevs) and opts.get(b'comment'):
1049 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1057 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1050 if actions:
1058 if actions:
1051 params = {b'objectIdentifier': drev[b'phid'],
1059 params = {b'objectIdentifier': drev[b'phid'],
1052 b'transactions': actions}
1060 b'transactions': actions}
1053 callconduit(ui, b'differential.revision.edit', params)
1061 callconduit(ui, b'differential.revision.edit', params)
1054
1062
1055 templatekeyword = registrar.templatekeyword()
1063 templatekeyword = registrar.templatekeyword()
1056
1064
1057 @templatekeyword(b'phabreview', requires={b'ctx'})
1065 @templatekeyword(b'phabreview', requires={b'ctx'})
1058 def template_review(context, mapping):
1066 def template_review(context, mapping):
1059 """:phabreview: Object describing the review for this changeset.
1067 """:phabreview: Object describing the review for this changeset.
1060 Has attributes `url` and `id`.
1068 Has attributes `url` and `id`.
1061 """
1069 """
1062 ctx = context.resource(mapping, b'ctx')
1070 ctx = context.resource(mapping, b'ctx')
1063 m = _differentialrevisiondescre.search(ctx.description())
1071 m = _differentialrevisiondescre.search(ctx.description())
1064 if m:
1072 if m:
1065 return templateutil.hybriddict({
1073 return templateutil.hybriddict({
1066 b'url': m.group(r'url'),
1074 b'url': m.group(r'url'),
1067 b'id': b"D%s" % m.group(r'id'),
1075 b'id': b"D%s" % m.group(r'id'),
1068 })
1076 })
1069 else:
1077 else:
1070 tags = ctx.repo().nodetags(ctx.node())
1078 tags = ctx.repo().nodetags(ctx.node())
1071 for t in tags:
1079 for t in tags:
1072 if _differentialrevisiontagre.match(t):
1080 if _differentialrevisiontagre.match(t):
1073 url = ctx.repo().ui.config(b'phabricator', b'url')
1081 url = ctx.repo().ui.config(b'phabricator', b'url')
1074 if not url.endswith(b'/'):
1082 if not url.endswith(b'/'):
1075 url += b'/'
1083 url += b'/'
1076 url += t
1084 url += t
1077
1085
1078 return templateutil.hybriddict({
1086 return templateutil.hybriddict({
1079 b'url': url,
1087 b'url': url,
1080 b'id': t,
1088 b'id': t,
1081 })
1089 })
1082 return None
1090 return None
General Comments 0
You need to be logged in to leave comments. Login now