##// END OF EJS Templates
py3: fix phabricator's use of json.loads() for py3.5...
Ian Moody -
r43317:0f90c2d2 default
parent child Browse files
Show More
@@ -1,1093 +1,1094 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 exthelper,
57 exthelper,
58 httpconnection as httpconnectionmod,
58 httpconnection as httpconnectionmod,
59 mdiff,
59 mdiff,
60 obsutil,
60 obsutil,
61 parser,
61 parser,
62 patch,
62 patch,
63 phases,
63 phases,
64 pycompat,
64 pycompat,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 eh = exthelper.exthelper()
84 eh = exthelper.exthelper()
85
85
86 cmdtable = eh.cmdtable
86 cmdtable = eh.cmdtable
87 command = eh.command
87 command = eh.command
88 configtable = eh.configtable
88 configtable = eh.configtable
89 templatekeyword = eh.templatekeyword
89 templatekeyword = eh.templatekeyword
90
90
91 # developer config: phabricator.batchsize
91 # developer config: phabricator.batchsize
92 eh.configitem(b'phabricator', b'batchsize',
92 eh.configitem(b'phabricator', b'batchsize',
93 default=12,
93 default=12,
94 )
94 )
95 eh.configitem(b'phabricator', b'callsign',
95 eh.configitem(b'phabricator', b'callsign',
96 default=None,
96 default=None,
97 )
97 )
98 eh.configitem(b'phabricator', b'curlcmd',
98 eh.configitem(b'phabricator', b'curlcmd',
99 default=None,
99 default=None,
100 )
100 )
101 # developer config: phabricator.repophid
101 # developer config: phabricator.repophid
102 eh.configitem(b'phabricator', b'repophid',
102 eh.configitem(b'phabricator', b'repophid',
103 default=None,
103 default=None,
104 )
104 )
105 eh.configitem(b'phabricator', b'url',
105 eh.configitem(b'phabricator', b'url',
106 default=None,
106 default=None,
107 )
107 )
108 eh.configitem(b'phabsend', b'confirm',
108 eh.configitem(b'phabsend', b'confirm',
109 default=False,
109 default=False,
110 )
110 )
111
111
112 colortable = {
112 colortable = {
113 b'phabricator.action.created': b'green',
113 b'phabricator.action.created': b'green',
114 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.skipped': b'magenta',
115 b'phabricator.action.updated': b'magenta',
115 b'phabricator.action.updated': b'magenta',
116 b'phabricator.desc': b'',
116 b'phabricator.desc': b'',
117 b'phabricator.drev': b'bold',
117 b'phabricator.drev': b'bold',
118 b'phabricator.node': b'',
118 b'phabricator.node': b'',
119 }
119 }
120
120
121 _VCR_FLAGS = [
121 _VCR_FLAGS = [
122 (b'', b'test-vcr', b'',
122 (b'', b'test-vcr', b'',
123 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
124 b', otherwise will mock all http requests using the specified vcr file.'
124 b', otherwise will mock all http requests using the specified vcr file.'
125 b' (ADVANCED)'
125 b' (ADVANCED)'
126 )),
126 )),
127 ]
127 ]
128
128
129 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
129 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
130 fullflags = flags + _VCR_FLAGS
130 fullflags = flags + _VCR_FLAGS
131 def hgmatcher(r1, r2):
131 def hgmatcher(r1, r2):
132 if r1.uri != r2.uri or r1.method != r2.method:
132 if r1.uri != r2.uri or r1.method != r2.method:
133 return False
133 return False
134 r1params = r1.body.split(b'&')
134 r1params = r1.body.split(b'&')
135 r2params = r2.body.split(b'&')
135 r2params = r2.body.split(b'&')
136 return set(r1params) == set(r2params)
136 return set(r1params) == set(r2params)
137
137
138 def sanitiserequest(request):
138 def sanitiserequest(request):
139 request.body = re.sub(
139 request.body = re.sub(
140 br'cli-[a-z0-9]+',
140 br'cli-[a-z0-9]+',
141 br'cli-hahayouwish',
141 br'cli-hahayouwish',
142 request.body
142 request.body
143 )
143 )
144 return request
144 return request
145
145
146 def sanitiseresponse(response):
146 def sanitiseresponse(response):
147 if r'set-cookie' in response[r'headers']:
147 if r'set-cookie' in response[r'headers']:
148 del response[r'headers'][r'set-cookie']
148 del response[r'headers'][r'set-cookie']
149 return response
149 return response
150
150
151 def decorate(fn):
151 def decorate(fn):
152 def inner(*args, **kwargs):
152 def inner(*args, **kwargs):
153 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
153 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
154 if cassette:
154 if cassette:
155 import hgdemandimport
155 import hgdemandimport
156 with hgdemandimport.deactivated():
156 with hgdemandimport.deactivated():
157 import vcr as vcrmod
157 import vcr as vcrmod
158 import vcr.stubs as stubs
158 import vcr.stubs as stubs
159 vcr = vcrmod.VCR(
159 vcr = vcrmod.VCR(
160 serializer=r'json',
160 serializer=r'json',
161 before_record_request=sanitiserequest,
161 before_record_request=sanitiserequest,
162 before_record_response=sanitiseresponse,
162 before_record_response=sanitiseresponse,
163 custom_patches=[
163 custom_patches=[
164 (urlmod, r'httpconnection',
164 (urlmod, r'httpconnection',
165 stubs.VCRHTTPConnection),
165 stubs.VCRHTTPConnection),
166 (urlmod, r'httpsconnection',
166 (urlmod, r'httpsconnection',
167 stubs.VCRHTTPSConnection),
167 stubs.VCRHTTPSConnection),
168 ])
168 ])
169 vcr.register_matcher(r'hgmatcher', hgmatcher)
169 vcr.register_matcher(r'hgmatcher', hgmatcher)
170 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
170 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
171 return fn(*args, **kwargs)
171 return fn(*args, **kwargs)
172 return fn(*args, **kwargs)
172 return fn(*args, **kwargs)
173 inner.__name__ = fn.__name__
173 inner.__name__ = fn.__name__
174 inner.__doc__ = fn.__doc__
174 inner.__doc__ = fn.__doc__
175 return command(name, fullflags, spec, helpcategory=helpcategory,
175 return command(name, fullflags, spec, helpcategory=helpcategory,
176 optionalrepo=optionalrepo)(inner)
176 optionalrepo=optionalrepo)(inner)
177 return decorate
177 return decorate
178
178
179 def urlencodenested(params):
179 def urlencodenested(params):
180 """like urlencode, but works with nested parameters.
180 """like urlencode, but works with nested parameters.
181
181
182 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
182 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
183 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
183 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
184 urlencode. Note: the encoding is consistent with PHP's http_build_query.
184 urlencode. Note: the encoding is consistent with PHP's http_build_query.
185 """
185 """
186 flatparams = util.sortdict()
186 flatparams = util.sortdict()
187 def process(prefix, obj):
187 def process(prefix, obj):
188 if isinstance(obj, bool):
188 if isinstance(obj, bool):
189 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
189 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
190 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
190 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
191 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
191 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
192 if items is None:
192 if items is None:
193 flatparams[prefix] = obj
193 flatparams[prefix] = obj
194 else:
194 else:
195 for k, v in items(obj):
195 for k, v in items(obj):
196 if prefix:
196 if prefix:
197 process(b'%s[%s]' % (prefix, k), v)
197 process(b'%s[%s]' % (prefix, k), v)
198 else:
198 else:
199 process(k, v)
199 process(k, v)
200 process(b'', params)
200 process(b'', params)
201 return util.urlreq.urlencode(flatparams)
201 return util.urlreq.urlencode(flatparams)
202
202
203 def readurltoken(ui):
203 def readurltoken(ui):
204 """return conduit url, token and make sure they exist
204 """return conduit url, token and make sure they exist
205
205
206 Currently read from [auth] config section. In the future, it might
206 Currently read from [auth] config section. In the future, it might
207 make sense to read from .arcconfig and .arcrc as well.
207 make sense to read from .arcconfig and .arcrc as well.
208 """
208 """
209 url = ui.config(b'phabricator', b'url')
209 url = ui.config(b'phabricator', b'url')
210 if not url:
210 if not url:
211 raise error.Abort(_(b'config %s.%s is required')
211 raise error.Abort(_(b'config %s.%s is required')
212 % (b'phabricator', b'url'))
212 % (b'phabricator', b'url'))
213
213
214 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
214 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
215 token = None
215 token = None
216
216
217 if res:
217 if res:
218 group, auth = res
218 group, auth = res
219
219
220 ui.debug(b"using auth.%s.* for authentication\n" % group)
220 ui.debug(b"using auth.%s.* for authentication\n" % group)
221
221
222 token = auth.get(b'phabtoken')
222 token = auth.get(b'phabtoken')
223
223
224 if not token:
224 if not token:
225 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
225 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
226 % (url,))
226 % (url,))
227
227
228 return url, token
228 return url, token
229
229
230 def callconduit(ui, name, params):
230 def callconduit(ui, name, params):
231 """call Conduit API, params is a dict. return json.loads result, or None"""
231 """call Conduit API, params is a dict. return json.loads result, or None"""
232 host, token = readurltoken(ui)
232 host, token = readurltoken(ui)
233 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
233 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
234 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
234 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
235 params = params.copy()
235 params = params.copy()
236 params[b'api.token'] = token
236 params[b'api.token'] = token
237 data = urlencodenested(params)
237 data = urlencodenested(params)
238 curlcmd = ui.config(b'phabricator', b'curlcmd')
238 curlcmd = ui.config(b'phabricator', b'curlcmd')
239 if curlcmd:
239 if curlcmd:
240 sin, sout = procutil.popen2(b'%s -d @- %s'
240 sin, sout = procutil.popen2(b'%s -d @- %s'
241 % (curlcmd, procutil.shellquote(url)))
241 % (curlcmd, procutil.shellquote(url)))
242 sin.write(data)
242 sin.write(data)
243 sin.close()
243 sin.close()
244 body = sout.read()
244 body = sout.read()
245 else:
245 else:
246 urlopener = urlmod.opener(ui, authinfo)
246 urlopener = urlmod.opener(ui, authinfo)
247 request = util.urlreq.request(pycompat.strurl(url), data=data)
247 request = util.urlreq.request(pycompat.strurl(url), data=data)
248 with contextlib.closing(urlopener.open(request)) as rsp:
248 with contextlib.closing(urlopener.open(request)) as rsp:
249 body = rsp.read()
249 body = rsp.read()
250 ui.debug(b'Conduit Response: %s\n' % body)
250 ui.debug(b'Conduit Response: %s\n' % body)
251 parsed = pycompat.rapply(
251 parsed = pycompat.rapply(
252 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
252 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
253 else x,
253 else x,
254 json.loads(body)
254 # json.loads only accepts bytes from py3.6+
255 json.loads(encoding.unifromlocal(body))
255 )
256 )
256 if parsed.get(b'error_code'):
257 if parsed.get(b'error_code'):
257 msg = (_(b'Conduit Error (%s): %s')
258 msg = (_(b'Conduit Error (%s): %s')
258 % (parsed[b'error_code'], parsed[b'error_info']))
259 % (parsed[b'error_code'], parsed[b'error_info']))
259 raise error.Abort(msg)
260 raise error.Abort(msg)
260 return parsed[b'result']
261 return parsed[b'result']
261
262
262 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
263 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
263 def debugcallconduit(ui, repo, name):
264 def debugcallconduit(ui, repo, name):
264 """call Conduit API
265 """call Conduit API
265
266
266 Call parameters are read from stdin as a JSON blob. Result will be written
267 Call parameters are read from stdin as a JSON blob. Result will be written
267 to stdout as a JSON blob.
268 to stdout as a JSON blob.
268 """
269 """
269 # json.loads only accepts bytes from 3.6+
270 # json.loads only accepts bytes from 3.6+
270 rawparams = encoding.unifromlocal(ui.fin.read())
271 rawparams = encoding.unifromlocal(ui.fin.read())
271 # json.loads only returns unicode strings
272 # json.loads only returns unicode strings
272 params = pycompat.rapply(lambda x:
273 params = pycompat.rapply(lambda x:
273 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
274 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
274 json.loads(rawparams)
275 json.loads(rawparams)
275 )
276 )
276 # json.dumps only accepts unicode strings
277 # json.dumps only accepts unicode strings
277 result = pycompat.rapply(lambda x:
278 result = pycompat.rapply(lambda x:
278 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
279 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
279 callconduit(ui, name, params)
280 callconduit(ui, name, params)
280 )
281 )
281 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
282 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
282 ui.write(b'%s\n' % encoding.unitolocal(s))
283 ui.write(b'%s\n' % encoding.unitolocal(s))
283
284
284 def getrepophid(repo):
285 def getrepophid(repo):
285 """given callsign, return repository PHID or None"""
286 """given callsign, return repository PHID or None"""
286 # developer config: phabricator.repophid
287 # developer config: phabricator.repophid
287 repophid = repo.ui.config(b'phabricator', b'repophid')
288 repophid = repo.ui.config(b'phabricator', b'repophid')
288 if repophid:
289 if repophid:
289 return repophid
290 return repophid
290 callsign = repo.ui.config(b'phabricator', b'callsign')
291 callsign = repo.ui.config(b'phabricator', b'callsign')
291 if not callsign:
292 if not callsign:
292 return None
293 return None
293 query = callconduit(repo.ui, b'diffusion.repository.search',
294 query = callconduit(repo.ui, b'diffusion.repository.search',
294 {b'constraints': {b'callsigns': [callsign]}})
295 {b'constraints': {b'callsigns': [callsign]}})
295 if len(query[b'data']) == 0:
296 if len(query[b'data']) == 0:
296 return None
297 return None
297 repophid = query[b'data'][0][b'phid']
298 repophid = query[b'data'][0][b'phid']
298 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
299 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
299 return repophid
300 return repophid
300
301
301 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
302 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
302 _differentialrevisiondescre = re.compile(
303 _differentialrevisiondescre = re.compile(
303 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
304 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
304
305
305 def getoldnodedrevmap(repo, nodelist):
306 def getoldnodedrevmap(repo, nodelist):
306 """find previous nodes that has been sent to Phabricator
307 """find previous nodes that has been sent to Phabricator
307
308
308 return {node: (oldnode, Differential diff, Differential Revision ID)}
309 return {node: (oldnode, Differential diff, Differential Revision ID)}
309 for node in nodelist with known previous sent versions, or associated
310 for node in nodelist with known previous sent versions, or associated
310 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
311 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
311 be ``None``.
312 be ``None``.
312
313
313 Examines commit messages like "Differential Revision:" to get the
314 Examines commit messages like "Differential Revision:" to get the
314 association information.
315 association information.
315
316
316 If such commit message line is not found, examines all precursors and their
317 If such commit message line is not found, examines all precursors and their
317 tags. Tags with format like "D1234" are considered a match and the node
318 tags. Tags with format like "D1234" are considered a match and the node
318 with that tag, and the number after "D" (ex. 1234) will be returned.
319 with that tag, and the number after "D" (ex. 1234) will be returned.
319
320
320 The ``old node``, if not None, is guaranteed to be the last diff of
321 The ``old node``, if not None, is guaranteed to be the last diff of
321 corresponding Differential Revision, and exist in the repo.
322 corresponding Differential Revision, and exist in the repo.
322 """
323 """
323 unfi = repo.unfiltered()
324 unfi = repo.unfiltered()
324 nodemap = unfi.changelog.nodemap
325 nodemap = unfi.changelog.nodemap
325
326
326 result = {} # {node: (oldnode?, lastdiff?, drev)}
327 result = {} # {node: (oldnode?, lastdiff?, drev)}
327 toconfirm = {} # {node: (force, {precnode}, drev)}
328 toconfirm = {} # {node: (force, {precnode}, drev)}
328 for node in nodelist:
329 for node in nodelist:
329 ctx = unfi[node]
330 ctx = unfi[node]
330 # For tags like "D123", put them into "toconfirm" to verify later
331 # For tags like "D123", put them into "toconfirm" to verify later
331 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
332 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
332 for n in precnodes:
333 for n in precnodes:
333 if n in nodemap:
334 if n in nodemap:
334 for tag in unfi.nodetags(n):
335 for tag in unfi.nodetags(n):
335 m = _differentialrevisiontagre.match(tag)
336 m = _differentialrevisiontagre.match(tag)
336 if m:
337 if m:
337 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
338 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
338 continue
339 continue
339
340
340 # Check commit message
341 # Check commit message
341 m = _differentialrevisiondescre.search(ctx.description())
342 m = _differentialrevisiondescre.search(ctx.description())
342 if m:
343 if m:
343 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
344 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
344
345
345 # Double check if tags are genuine by collecting all old nodes from
346 # Double check if tags are genuine by collecting all old nodes from
346 # Phabricator, and expect precursors overlap with it.
347 # Phabricator, and expect precursors overlap with it.
347 if toconfirm:
348 if toconfirm:
348 drevs = [drev for force, precs, drev in toconfirm.values()]
349 drevs = [drev for force, precs, drev in toconfirm.values()]
349 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
350 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
350 {b'revisionIDs': drevs})
351 {b'revisionIDs': drevs})
351 getnode = lambda d: bin(
352 getnode = lambda d: bin(
352 getdiffmeta(d).get(b'node', b'')) or None
353 getdiffmeta(d).get(b'node', b'')) or None
353 for newnode, (force, precset, drev) in toconfirm.items():
354 for newnode, (force, precset, drev) in toconfirm.items():
354 diffs = [d for d in alldiffs.values()
355 diffs = [d for d in alldiffs.values()
355 if int(d[b'revisionID']) == drev]
356 if int(d[b'revisionID']) == drev]
356
357
357 # "precursors" as known by Phabricator
358 # "precursors" as known by Phabricator
358 phprecset = set(getnode(d) for d in diffs)
359 phprecset = set(getnode(d) for d in diffs)
359
360
360 # Ignore if precursors (Phabricator and local repo) do not overlap,
361 # Ignore if precursors (Phabricator and local repo) do not overlap,
361 # and force is not set (when commit message says nothing)
362 # and force is not set (when commit message says nothing)
362 if not force and not bool(phprecset & precset):
363 if not force and not bool(phprecset & precset):
363 tagname = b'D%d' % drev
364 tagname = b'D%d' % drev
364 tags.tag(repo, tagname, nullid, message=None, user=None,
365 tags.tag(repo, tagname, nullid, message=None, user=None,
365 date=None, local=True)
366 date=None, local=True)
366 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
367 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
367 b'Differential history\n') % drev)
368 b'Differential history\n') % drev)
368 continue
369 continue
369
370
370 # Find the last node using Phabricator metadata, and make sure it
371 # Find the last node using Phabricator metadata, and make sure it
371 # exists in the repo
372 # exists in the repo
372 oldnode = lastdiff = None
373 oldnode = lastdiff = None
373 if diffs:
374 if diffs:
374 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
375 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
375 oldnode = getnode(lastdiff)
376 oldnode = getnode(lastdiff)
376 if oldnode and oldnode not in nodemap:
377 if oldnode and oldnode not in nodemap:
377 oldnode = None
378 oldnode = None
378
379
379 result[newnode] = (oldnode, lastdiff, drev)
380 result[newnode] = (oldnode, lastdiff, drev)
380
381
381 return result
382 return result
382
383
383 def getdiff(ctx, diffopts):
384 def getdiff(ctx, diffopts):
384 """plain-text diff without header (user, commit message, etc)"""
385 """plain-text diff without header (user, commit message, etc)"""
385 output = util.stringio()
386 output = util.stringio()
386 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
387 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
387 None, opts=diffopts):
388 None, opts=diffopts):
388 output.write(chunk)
389 output.write(chunk)
389 return output.getvalue()
390 return output.getvalue()
390
391
391 def creatediff(ctx):
392 def creatediff(ctx):
392 """create a Differential Diff"""
393 """create a Differential Diff"""
393 repo = ctx.repo()
394 repo = ctx.repo()
394 repophid = getrepophid(repo)
395 repophid = getrepophid(repo)
395 # Create a "Differential Diff" via "differential.createrawdiff" API
396 # Create a "Differential Diff" via "differential.createrawdiff" API
396 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
397 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
397 if repophid:
398 if repophid:
398 params[b'repositoryPHID'] = repophid
399 params[b'repositoryPHID'] = repophid
399 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
400 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
400 if not diff:
401 if not diff:
401 raise error.Abort(_(b'cannot create diff for %s') % ctx)
402 raise error.Abort(_(b'cannot create diff for %s') % ctx)
402 return diff
403 return diff
403
404
404 def writediffproperties(ctx, diff):
405 def writediffproperties(ctx, diff):
405 """write metadata to diff so patches could be applied losslessly"""
406 """write metadata to diff so patches could be applied losslessly"""
406 params = {
407 params = {
407 b'diff_id': diff[b'id'],
408 b'diff_id': diff[b'id'],
408 b'name': b'hg:meta',
409 b'name': b'hg:meta',
409 b'data': templatefilters.json({
410 b'data': templatefilters.json({
410 b'user': ctx.user(),
411 b'user': ctx.user(),
411 b'date': b'%d %d' % ctx.date(),
412 b'date': b'%d %d' % ctx.date(),
412 b'branch': ctx.branch(),
413 b'branch': ctx.branch(),
413 b'node': ctx.hex(),
414 b'node': ctx.hex(),
414 b'parent': ctx.p1().hex(),
415 b'parent': ctx.p1().hex(),
415 }),
416 }),
416 }
417 }
417 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
418 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
418
419
419 params = {
420 params = {
420 b'diff_id': diff[b'id'],
421 b'diff_id': diff[b'id'],
421 b'name': b'local:commits',
422 b'name': b'local:commits',
422 b'data': templatefilters.json({
423 b'data': templatefilters.json({
423 ctx.hex(): {
424 ctx.hex(): {
424 b'author': stringutil.person(ctx.user()),
425 b'author': stringutil.person(ctx.user()),
425 b'authorEmail': stringutil.email(ctx.user()),
426 b'authorEmail': stringutil.email(ctx.user()),
426 b'time': int(ctx.date()[0]),
427 b'time': int(ctx.date()[0]),
427 b'commit': ctx.hex(),
428 b'commit': ctx.hex(),
428 b'parents': [ctx.p1().hex()],
429 b'parents': [ctx.p1().hex()],
429 b'branch': ctx.branch(),
430 b'branch': ctx.branch(),
430 },
431 },
431 }),
432 }),
432 }
433 }
433 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
434 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
434
435
435 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
436 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
436 oldnode=None, olddiff=None, actions=None,
437 oldnode=None, olddiff=None, actions=None,
437 comment=None):
438 comment=None):
438 """create or update a Differential Revision
439 """create or update a Differential Revision
439
440
440 If revid is None, create a new Differential Revision, otherwise update
441 If revid is None, create a new Differential Revision, otherwise update
441 revid. If parentrevphid is not None, set it as a dependency.
442 revid. If parentrevphid is not None, set it as a dependency.
442
443
443 If oldnode is not None, check if the patch content (without commit message
444 If oldnode is not None, check if the patch content (without commit message
444 and metadata) has changed before creating another diff.
445 and metadata) has changed before creating another diff.
445
446
446 If actions is not None, they will be appended to the transaction.
447 If actions is not None, they will be appended to the transaction.
447 """
448 """
448 repo = ctx.repo()
449 repo = ctx.repo()
449 if oldnode:
450 if oldnode:
450 diffopts = mdiff.diffopts(git=True, context=32767)
451 diffopts = mdiff.diffopts(git=True, context=32767)
451 oldctx = repo.unfiltered()[oldnode]
452 oldctx = repo.unfiltered()[oldnode]
452 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
453 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
453 else:
454 else:
454 neednewdiff = True
455 neednewdiff = True
455
456
456 transactions = []
457 transactions = []
457 if neednewdiff:
458 if neednewdiff:
458 diff = creatediff(ctx)
459 diff = creatediff(ctx)
459 transactions.append({b'type': b'update', b'value': diff[b'phid']})
460 transactions.append({b'type': b'update', b'value': diff[b'phid']})
460 if comment:
461 if comment:
461 transactions.append({b'type': b'comment', b'value': comment})
462 transactions.append({b'type': b'comment', b'value': comment})
462 else:
463 else:
463 # Even if we don't need to upload a new diff because the patch content
464 # Even if we don't need to upload a new diff because the patch content
464 # does not change. We might still need to update its metadata so
465 # does not change. We might still need to update its metadata so
465 # pushers could know the correct node metadata.
466 # pushers could know the correct node metadata.
466 assert olddiff
467 assert olddiff
467 diff = olddiff
468 diff = olddiff
468 writediffproperties(ctx, diff)
469 writediffproperties(ctx, diff)
469
470
470 # Set the parent Revision every time, so commit re-ordering is picked-up
471 # Set the parent Revision every time, so commit re-ordering is picked-up
471 if parentrevphid:
472 if parentrevphid:
472 transactions.append({b'type': b'parents.set',
473 transactions.append({b'type': b'parents.set',
473 b'value': [parentrevphid]})
474 b'value': [parentrevphid]})
474
475
475 if actions:
476 if actions:
476 transactions += actions
477 transactions += actions
477
478
478 # Parse commit message and update related fields.
479 # Parse commit message and update related fields.
479 desc = ctx.description()
480 desc = ctx.description()
480 info = callconduit(repo.ui, b'differential.parsecommitmessage',
481 info = callconduit(repo.ui, b'differential.parsecommitmessage',
481 {b'corpus': desc})
482 {b'corpus': desc})
482 for k, v in info[b'fields'].items():
483 for k, v in info[b'fields'].items():
483 if k in [b'title', b'summary', b'testPlan']:
484 if k in [b'title', b'summary', b'testPlan']:
484 transactions.append({b'type': k, b'value': v})
485 transactions.append({b'type': k, b'value': v})
485
486
486 params = {b'transactions': transactions}
487 params = {b'transactions': transactions}
487 if revid is not None:
488 if revid is not None:
488 # Update an existing Differential Revision
489 # Update an existing Differential Revision
489 params[b'objectIdentifier'] = revid
490 params[b'objectIdentifier'] = revid
490
491
491 revision = callconduit(repo.ui, b'differential.revision.edit', params)
492 revision = callconduit(repo.ui, b'differential.revision.edit', params)
492 if not revision:
493 if not revision:
493 raise error.Abort(_(b'cannot create revision for %s') % ctx)
494 raise error.Abort(_(b'cannot create revision for %s') % ctx)
494
495
495 return revision, diff
496 return revision, diff
496
497
497 def userphids(repo, names):
498 def userphids(repo, names):
498 """convert user names to PHIDs"""
499 """convert user names to PHIDs"""
499 names = [name.lower() for name in names]
500 names = [name.lower() for name in names]
500 query = {b'constraints': {b'usernames': names}}
501 query = {b'constraints': {b'usernames': names}}
501 result = callconduit(repo.ui, b'user.search', query)
502 result = callconduit(repo.ui, b'user.search', query)
502 # username not found is not an error of the API. So check if we have missed
503 # username not found is not an error of the API. So check if we have missed
503 # some names here.
504 # some names here.
504 data = result[b'data']
505 data = result[b'data']
505 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
506 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
506 unresolved = set(names) - resolved
507 unresolved = set(names) - resolved
507 if unresolved:
508 if unresolved:
508 raise error.Abort(_(b'unknown username: %s')
509 raise error.Abort(_(b'unknown username: %s')
509 % b' '.join(sorted(unresolved)))
510 % b' '.join(sorted(unresolved)))
510 return [entry[b'phid'] for entry in data]
511 return [entry[b'phid'] for entry in data]
511
512
512 @vcrcommand(b'phabsend',
513 @vcrcommand(b'phabsend',
513 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
514 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
514 (b'', b'amend', True, _(b'update commit messages')),
515 (b'', b'amend', True, _(b'update commit messages')),
515 (b'', b'reviewer', [], _(b'specify reviewers')),
516 (b'', b'reviewer', [], _(b'specify reviewers')),
516 (b'', b'blocker', [], _(b'specify blocking reviewers')),
517 (b'', b'blocker', [], _(b'specify blocking reviewers')),
517 (b'm', b'comment', b'',
518 (b'm', b'comment', b'',
518 _(b'add a comment to Revisions with new/updated Diffs')),
519 _(b'add a comment to Revisions with new/updated Diffs')),
519 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
520 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
520 _(b'REV [OPTIONS]'),
521 _(b'REV [OPTIONS]'),
521 helpcategory=command.CATEGORY_IMPORT_EXPORT)
522 helpcategory=command.CATEGORY_IMPORT_EXPORT)
522 def phabsend(ui, repo, *revs, **opts):
523 def phabsend(ui, repo, *revs, **opts):
523 """upload changesets to Phabricator
524 """upload changesets to Phabricator
524
525
525 If there are multiple revisions specified, they will be send as a stack
526 If there are multiple revisions specified, they will be send as a stack
526 with a linear dependencies relationship using the order specified by the
527 with a linear dependencies relationship using the order specified by the
527 revset.
528 revset.
528
529
529 For the first time uploading changesets, local tags will be created to
530 For the first time uploading changesets, local tags will be created to
530 maintain the association. After the first time, phabsend will check
531 maintain the association. After the first time, phabsend will check
531 obsstore and tags information so it can figure out whether to update an
532 obsstore and tags information so it can figure out whether to update an
532 existing Differential Revision, or create a new one.
533 existing Differential Revision, or create a new one.
533
534
534 If --amend is set, update commit messages so they have the
535 If --amend is set, update commit messages so they have the
535 ``Differential Revision`` URL, remove related tags. This is similar to what
536 ``Differential Revision`` URL, remove related tags. This is similar to what
536 arcanist will do, and is more desired in author-push workflows. Otherwise,
537 arcanist will do, and is more desired in author-push workflows. Otherwise,
537 use local tags to record the ``Differential Revision`` association.
538 use local tags to record the ``Differential Revision`` association.
538
539
539 The --confirm option lets you confirm changesets before sending them. You
540 The --confirm option lets you confirm changesets before sending them. You
540 can also add following to your configuration file to make it default
541 can also add following to your configuration file to make it default
541 behaviour::
542 behaviour::
542
543
543 [phabsend]
544 [phabsend]
544 confirm = true
545 confirm = true
545
546
546 phabsend will check obsstore and the above association to decide whether to
547 phabsend will check obsstore and the above association to decide whether to
547 update an existing Differential Revision, or create a new one.
548 update an existing Differential Revision, or create a new one.
548 """
549 """
549 opts = pycompat.byteskwargs(opts)
550 opts = pycompat.byteskwargs(opts)
550 revs = list(revs) + opts.get(b'rev', [])
551 revs = list(revs) + opts.get(b'rev', [])
551 revs = scmutil.revrange(repo, revs)
552 revs = scmutil.revrange(repo, revs)
552
553
553 if not revs:
554 if not revs:
554 raise error.Abort(_(b'phabsend requires at least one changeset'))
555 raise error.Abort(_(b'phabsend requires at least one changeset'))
555 if opts.get(b'amend'):
556 if opts.get(b'amend'):
556 cmdutil.checkunfinished(repo)
557 cmdutil.checkunfinished(repo)
557
558
558 # {newnode: (oldnode, olddiff, olddrev}
559 # {newnode: (oldnode, olddiff, olddrev}
559 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
560 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
560
561
561 confirm = ui.configbool(b'phabsend', b'confirm')
562 confirm = ui.configbool(b'phabsend', b'confirm')
562 confirm |= bool(opts.get(b'confirm'))
563 confirm |= bool(opts.get(b'confirm'))
563 if confirm:
564 if confirm:
564 confirmed = _confirmbeforesend(repo, revs, oldmap)
565 confirmed = _confirmbeforesend(repo, revs, oldmap)
565 if not confirmed:
566 if not confirmed:
566 raise error.Abort(_(b'phabsend cancelled'))
567 raise error.Abort(_(b'phabsend cancelled'))
567
568
568 actions = []
569 actions = []
569 reviewers = opts.get(b'reviewer', [])
570 reviewers = opts.get(b'reviewer', [])
570 blockers = opts.get(b'blocker', [])
571 blockers = opts.get(b'blocker', [])
571 phids = []
572 phids = []
572 if reviewers:
573 if reviewers:
573 phids.extend(userphids(repo, reviewers))
574 phids.extend(userphids(repo, reviewers))
574 if blockers:
575 if blockers:
575 phids.extend(map(
576 phids.extend(map(
576 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
577 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
577 ))
578 ))
578 if phids:
579 if phids:
579 actions.append({b'type': b'reviewers.add', b'value': phids})
580 actions.append({b'type': b'reviewers.add', b'value': phids})
580
581
581 drevids = [] # [int]
582 drevids = [] # [int]
582 diffmap = {} # {newnode: diff}
583 diffmap = {} # {newnode: diff}
583
584
584 # Send patches one by one so we know their Differential Revision PHIDs and
585 # Send patches one by one so we know their Differential Revision PHIDs and
585 # can provide dependency relationship
586 # can provide dependency relationship
586 lastrevphid = None
587 lastrevphid = None
587 for rev in revs:
588 for rev in revs:
588 ui.debug(b'sending rev %d\n' % rev)
589 ui.debug(b'sending rev %d\n' % rev)
589 ctx = repo[rev]
590 ctx = repo[rev]
590
591
591 # Get Differential Revision ID
592 # Get Differential Revision ID
592 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
593 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
593 if oldnode != ctx.node() or opts.get(b'amend'):
594 if oldnode != ctx.node() or opts.get(b'amend'):
594 # Create or update Differential Revision
595 # Create or update Differential Revision
595 revision, diff = createdifferentialrevision(
596 revision, diff = createdifferentialrevision(
596 ctx, revid, lastrevphid, oldnode, olddiff, actions,
597 ctx, revid, lastrevphid, oldnode, olddiff, actions,
597 opts.get(b'comment'))
598 opts.get(b'comment'))
598 diffmap[ctx.node()] = diff
599 diffmap[ctx.node()] = diff
599 newrevid = int(revision[b'object'][b'id'])
600 newrevid = int(revision[b'object'][b'id'])
600 newrevphid = revision[b'object'][b'phid']
601 newrevphid = revision[b'object'][b'phid']
601 if revid:
602 if revid:
602 action = b'updated'
603 action = b'updated'
603 else:
604 else:
604 action = b'created'
605 action = b'created'
605
606
606 # Create a local tag to note the association, if commit message
607 # Create a local tag to note the association, if commit message
607 # does not have it already
608 # does not have it already
608 m = _differentialrevisiondescre.search(ctx.description())
609 m = _differentialrevisiondescre.search(ctx.description())
609 if not m or int(m.group(r'id')) != newrevid:
610 if not m or int(m.group(r'id')) != newrevid:
610 tagname = b'D%d' % newrevid
611 tagname = b'D%d' % newrevid
611 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
612 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
612 date=None, local=True)
613 date=None, local=True)
613 else:
614 else:
614 # Nothing changed. But still set "newrevphid" so the next revision
615 # Nothing changed. But still set "newrevphid" so the next revision
615 # could depend on this one and "newrevid" for the summary line.
616 # could depend on this one and "newrevid" for the summary line.
616 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
617 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
617 newrevid = revid
618 newrevid = revid
618 action = b'skipped'
619 action = b'skipped'
619
620
620 actiondesc = ui.label(
621 actiondesc = ui.label(
621 {b'created': _(b'created'),
622 {b'created': _(b'created'),
622 b'skipped': _(b'skipped'),
623 b'skipped': _(b'skipped'),
623 b'updated': _(b'updated')}[action],
624 b'updated': _(b'updated')}[action],
624 b'phabricator.action.%s' % action)
625 b'phabricator.action.%s' % action)
625 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
626 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
626 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
627 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
627 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
628 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
628 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
629 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
629 desc))
630 desc))
630 drevids.append(newrevid)
631 drevids.append(newrevid)
631 lastrevphid = newrevphid
632 lastrevphid = newrevphid
632
633
633 # Update commit messages and remove tags
634 # Update commit messages and remove tags
634 if opts.get(b'amend'):
635 if opts.get(b'amend'):
635 unfi = repo.unfiltered()
636 unfi = repo.unfiltered()
636 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
637 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
637 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
638 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
638 wnode = unfi[b'.'].node()
639 wnode = unfi[b'.'].node()
639 mapping = {} # {oldnode: [newnode]}
640 mapping = {} # {oldnode: [newnode]}
640 for i, rev in enumerate(revs):
641 for i, rev in enumerate(revs):
641 old = unfi[rev]
642 old = unfi[rev]
642 drevid = drevids[i]
643 drevid = drevids[i]
643 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
644 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
644 newdesc = getdescfromdrev(drev)
645 newdesc = getdescfromdrev(drev)
645 # Make sure commit message contain "Differential Revision"
646 # Make sure commit message contain "Differential Revision"
646 if old.description() != newdesc:
647 if old.description() != newdesc:
647 if old.phase() == phases.public:
648 if old.phase() == phases.public:
648 ui.warn(_("warning: not updating public commit %s\n")
649 ui.warn(_("warning: not updating public commit %s\n")
649 % scmutil.formatchangeid(old))
650 % scmutil.formatchangeid(old))
650 continue
651 continue
651 parents = [
652 parents = [
652 mapping.get(old.p1().node(), (old.p1(),))[0],
653 mapping.get(old.p1().node(), (old.p1(),))[0],
653 mapping.get(old.p2().node(), (old.p2(),))[0],
654 mapping.get(old.p2().node(), (old.p2(),))[0],
654 ]
655 ]
655 new = context.metadataonlyctx(
656 new = context.metadataonlyctx(
656 repo, old, parents=parents, text=newdesc,
657 repo, old, parents=parents, text=newdesc,
657 user=old.user(), date=old.date(), extra=old.extra())
658 user=old.user(), date=old.date(), extra=old.extra())
658
659
659 newnode = new.commit()
660 newnode = new.commit()
660
661
661 mapping[old.node()] = [newnode]
662 mapping[old.node()] = [newnode]
662 # Update diff property
663 # Update diff property
663 # If it fails just warn and keep going, otherwise the DREV
664 # If it fails just warn and keep going, otherwise the DREV
664 # associations will be lost
665 # associations will be lost
665 try:
666 try:
666 writediffproperties(unfi[newnode], diffmap[old.node()])
667 writediffproperties(unfi[newnode], diffmap[old.node()])
667 except util.urlerr.urlerror:
668 except util.urlerr.urlerror:
668 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
669 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
669 # Remove local tags since it's no longer necessary
670 # Remove local tags since it's no longer necessary
670 tagname = b'D%d' % drevid
671 tagname = b'D%d' % drevid
671 if tagname in repo.tags():
672 if tagname in repo.tags():
672 tags.tag(repo, tagname, nullid, message=None, user=None,
673 tags.tag(repo, tagname, nullid, message=None, user=None,
673 date=None, local=True)
674 date=None, local=True)
674 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
675 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
675 if wnode in mapping:
676 if wnode in mapping:
676 unfi.setparents(mapping[wnode][0])
677 unfi.setparents(mapping[wnode][0])
677
678
678 # Map from "hg:meta" keys to header understood by "hg import". The order is
679 # Map from "hg:meta" keys to header understood by "hg import". The order is
679 # consistent with "hg export" output.
680 # consistent with "hg export" output.
680 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
681 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
681 (b'branch', b'Branch'), (b'node', b'Node ID'),
682 (b'branch', b'Branch'), (b'node', b'Node ID'),
682 (b'parent', b'Parent ')])
683 (b'parent', b'Parent ')])
683
684
684 def _confirmbeforesend(repo, revs, oldmap):
685 def _confirmbeforesend(repo, revs, oldmap):
685 url, token = readurltoken(repo.ui)
686 url, token = readurltoken(repo.ui)
686 ui = repo.ui
687 ui = repo.ui
687 for rev in revs:
688 for rev in revs:
688 ctx = repo[rev]
689 ctx = repo[rev]
689 desc = ctx.description().splitlines()[0]
690 desc = ctx.description().splitlines()[0]
690 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
691 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
691 if drevid:
692 if drevid:
692 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
693 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
693 else:
694 else:
694 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
695 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
695
696
696 ui.write(_(b'%s - %s: %s\n')
697 ui.write(_(b'%s - %s: %s\n')
697 % (drevdesc,
698 % (drevdesc,
698 ui.label(bytes(ctx), b'phabricator.node'),
699 ui.label(bytes(ctx), b'phabricator.node'),
699 ui.label(desc, b'phabricator.desc')))
700 ui.label(desc, b'phabricator.desc')))
700
701
701 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
702 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
702 b'$$ &Yes $$ &No') % url):
703 b'$$ &Yes $$ &No') % url):
703 return False
704 return False
704
705
705 return True
706 return True
706
707
707 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
708 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
708 b'abandoned'}
709 b'abandoned'}
709
710
710 def _getstatusname(drev):
711 def _getstatusname(drev):
711 """get normalized status name from a Differential Revision"""
712 """get normalized status name from a Differential Revision"""
712 return drev[b'statusName'].replace(b' ', b'').lower()
713 return drev[b'statusName'].replace(b' ', b'').lower()
713
714
714 # Small language to specify differential revisions. Support symbols: (), :X,
715 # Small language to specify differential revisions. Support symbols: (), :X,
715 # +, and -.
716 # +, and -.
716
717
717 _elements = {
718 _elements = {
718 # token-type: binding-strength, primary, prefix, infix, suffix
719 # token-type: binding-strength, primary, prefix, infix, suffix
719 b'(': (12, None, (b'group', 1, b')'), None, None),
720 b'(': (12, None, (b'group', 1, b')'), None, None),
720 b':': (8, None, (b'ancestors', 8), None, None),
721 b':': (8, None, (b'ancestors', 8), None, None),
721 b'&': (5, None, None, (b'and_', 5), None),
722 b'&': (5, None, None, (b'and_', 5), None),
722 b'+': (4, None, None, (b'add', 4), None),
723 b'+': (4, None, None, (b'add', 4), None),
723 b'-': (4, None, None, (b'sub', 4), None),
724 b'-': (4, None, None, (b'sub', 4), None),
724 b')': (0, None, None, None, None),
725 b')': (0, None, None, None, None),
725 b'symbol': (0, b'symbol', None, None, None),
726 b'symbol': (0, b'symbol', None, None, None),
726 b'end': (0, None, None, None, None),
727 b'end': (0, None, None, None, None),
727 }
728 }
728
729
729 def _tokenize(text):
730 def _tokenize(text):
730 view = memoryview(text) # zero-copy slice
731 view = memoryview(text) # zero-copy slice
731 special = b'():+-& '
732 special = b'():+-& '
732 pos = 0
733 pos = 0
733 length = len(text)
734 length = len(text)
734 while pos < length:
735 while pos < length:
735 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
736 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
736 pycompat.iterbytestr(view[pos:])))
737 pycompat.iterbytestr(view[pos:])))
737 if symbol:
738 if symbol:
738 yield (b'symbol', symbol, pos)
739 yield (b'symbol', symbol, pos)
739 pos += len(symbol)
740 pos += len(symbol)
740 else: # special char, ignore space
741 else: # special char, ignore space
741 if text[pos] != b' ':
742 if text[pos] != b' ':
742 yield (text[pos], None, pos)
743 yield (text[pos], None, pos)
743 pos += 1
744 pos += 1
744 yield (b'end', None, pos)
745 yield (b'end', None, pos)
745
746
746 def _parse(text):
747 def _parse(text):
747 tree, pos = parser.parser(_elements).parse(_tokenize(text))
748 tree, pos = parser.parser(_elements).parse(_tokenize(text))
748 if pos != len(text):
749 if pos != len(text):
749 raise error.ParseError(b'invalid token', pos)
750 raise error.ParseError(b'invalid token', pos)
750 return tree
751 return tree
751
752
752 def _parsedrev(symbol):
753 def _parsedrev(symbol):
753 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
754 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
754 if symbol.startswith(b'D') and symbol[1:].isdigit():
755 if symbol.startswith(b'D') and symbol[1:].isdigit():
755 return int(symbol[1:])
756 return int(symbol[1:])
756 if symbol.isdigit():
757 if symbol.isdigit():
757 return int(symbol)
758 return int(symbol)
758
759
759 def _prefetchdrevs(tree):
760 def _prefetchdrevs(tree):
760 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
761 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
761 drevs = set()
762 drevs = set()
762 ancestordrevs = set()
763 ancestordrevs = set()
763 op = tree[0]
764 op = tree[0]
764 if op == b'symbol':
765 if op == b'symbol':
765 r = _parsedrev(tree[1])
766 r = _parsedrev(tree[1])
766 if r:
767 if r:
767 drevs.add(r)
768 drevs.add(r)
768 elif op == b'ancestors':
769 elif op == b'ancestors':
769 r, a = _prefetchdrevs(tree[1])
770 r, a = _prefetchdrevs(tree[1])
770 drevs.update(r)
771 drevs.update(r)
771 ancestordrevs.update(r)
772 ancestordrevs.update(r)
772 ancestordrevs.update(a)
773 ancestordrevs.update(a)
773 else:
774 else:
774 for t in tree[1:]:
775 for t in tree[1:]:
775 r, a = _prefetchdrevs(t)
776 r, a = _prefetchdrevs(t)
776 drevs.update(r)
777 drevs.update(r)
777 ancestordrevs.update(a)
778 ancestordrevs.update(a)
778 return drevs, ancestordrevs
779 return drevs, ancestordrevs
779
780
780 def querydrev(repo, spec):
781 def querydrev(repo, spec):
781 """return a list of "Differential Revision" dicts
782 """return a list of "Differential Revision" dicts
782
783
783 spec is a string using a simple query language, see docstring in phabread
784 spec is a string using a simple query language, see docstring in phabread
784 for details.
785 for details.
785
786
786 A "Differential Revision dict" looks like:
787 A "Differential Revision dict" looks like:
787
788
788 {
789 {
789 "id": "2",
790 "id": "2",
790 "phid": "PHID-DREV-672qvysjcczopag46qty",
791 "phid": "PHID-DREV-672qvysjcczopag46qty",
791 "title": "example",
792 "title": "example",
792 "uri": "https://phab.example.com/D2",
793 "uri": "https://phab.example.com/D2",
793 "dateCreated": "1499181406",
794 "dateCreated": "1499181406",
794 "dateModified": "1499182103",
795 "dateModified": "1499182103",
795 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
796 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
796 "status": "0",
797 "status": "0",
797 "statusName": "Needs Review",
798 "statusName": "Needs Review",
798 "properties": [],
799 "properties": [],
799 "branch": null,
800 "branch": null,
800 "summary": "",
801 "summary": "",
801 "testPlan": "",
802 "testPlan": "",
802 "lineCount": "2",
803 "lineCount": "2",
803 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
804 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
804 "diffs": [
805 "diffs": [
805 "3",
806 "3",
806 "4",
807 "4",
807 ],
808 ],
808 "commits": [],
809 "commits": [],
809 "reviewers": [],
810 "reviewers": [],
810 "ccs": [],
811 "ccs": [],
811 "hashes": [],
812 "hashes": [],
812 "auxiliary": {
813 "auxiliary": {
813 "phabricator:projects": [],
814 "phabricator:projects": [],
814 "phabricator:depends-on": [
815 "phabricator:depends-on": [
815 "PHID-DREV-gbapp366kutjebt7agcd"
816 "PHID-DREV-gbapp366kutjebt7agcd"
816 ]
817 ]
817 },
818 },
818 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
819 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
819 "sourcePath": null
820 "sourcePath": null
820 }
821 }
821 """
822 """
822 def fetch(params):
823 def fetch(params):
823 """params -> single drev or None"""
824 """params -> single drev or None"""
824 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
825 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
825 if key in prefetched:
826 if key in prefetched:
826 return prefetched[key]
827 return prefetched[key]
827 drevs = callconduit(repo.ui, b'differential.query', params)
828 drevs = callconduit(repo.ui, b'differential.query', params)
828 # Fill prefetched with the result
829 # Fill prefetched with the result
829 for drev in drevs:
830 for drev in drevs:
830 prefetched[drev[b'phid']] = drev
831 prefetched[drev[b'phid']] = drev
831 prefetched[int(drev[b'id'])] = drev
832 prefetched[int(drev[b'id'])] = drev
832 if key not in prefetched:
833 if key not in prefetched:
833 raise error.Abort(_(b'cannot get Differential Revision %r')
834 raise error.Abort(_(b'cannot get Differential Revision %r')
834 % params)
835 % params)
835 return prefetched[key]
836 return prefetched[key]
836
837
837 def getstack(topdrevids):
838 def getstack(topdrevids):
838 """given a top, get a stack from the bottom, [id] -> [id]"""
839 """given a top, get a stack from the bottom, [id] -> [id]"""
839 visited = set()
840 visited = set()
840 result = []
841 result = []
841 queue = [{b'ids': [i]} for i in topdrevids]
842 queue = [{b'ids': [i]} for i in topdrevids]
842 while queue:
843 while queue:
843 params = queue.pop()
844 params = queue.pop()
844 drev = fetch(params)
845 drev = fetch(params)
845 if drev[b'id'] in visited:
846 if drev[b'id'] in visited:
846 continue
847 continue
847 visited.add(drev[b'id'])
848 visited.add(drev[b'id'])
848 result.append(int(drev[b'id']))
849 result.append(int(drev[b'id']))
849 auxiliary = drev.get(b'auxiliary', {})
850 auxiliary = drev.get(b'auxiliary', {})
850 depends = auxiliary.get(b'phabricator:depends-on', [])
851 depends = auxiliary.get(b'phabricator:depends-on', [])
851 for phid in depends:
852 for phid in depends:
852 queue.append({b'phids': [phid]})
853 queue.append({b'phids': [phid]})
853 result.reverse()
854 result.reverse()
854 return smartset.baseset(result)
855 return smartset.baseset(result)
855
856
856 # Initialize prefetch cache
857 # Initialize prefetch cache
857 prefetched = {} # {id or phid: drev}
858 prefetched = {} # {id or phid: drev}
858
859
859 tree = _parse(spec)
860 tree = _parse(spec)
860 drevs, ancestordrevs = _prefetchdrevs(tree)
861 drevs, ancestordrevs = _prefetchdrevs(tree)
861
862
862 # developer config: phabricator.batchsize
863 # developer config: phabricator.batchsize
863 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
864 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
864
865
865 # Prefetch Differential Revisions in batch
866 # Prefetch Differential Revisions in batch
866 tofetch = set(drevs)
867 tofetch = set(drevs)
867 for r in ancestordrevs:
868 for r in ancestordrevs:
868 tofetch.update(range(max(1, r - batchsize), r + 1))
869 tofetch.update(range(max(1, r - batchsize), r + 1))
869 if drevs:
870 if drevs:
870 fetch({b'ids': list(tofetch)})
871 fetch({b'ids': list(tofetch)})
871 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
872 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
872
873
873 # Walk through the tree, return smartsets
874 # Walk through the tree, return smartsets
874 def walk(tree):
875 def walk(tree):
875 op = tree[0]
876 op = tree[0]
876 if op == b'symbol':
877 if op == b'symbol':
877 drev = _parsedrev(tree[1])
878 drev = _parsedrev(tree[1])
878 if drev:
879 if drev:
879 return smartset.baseset([drev])
880 return smartset.baseset([drev])
880 elif tree[1] in _knownstatusnames:
881 elif tree[1] in _knownstatusnames:
881 drevs = [r for r in validids
882 drevs = [r for r in validids
882 if _getstatusname(prefetched[r]) == tree[1]]
883 if _getstatusname(prefetched[r]) == tree[1]]
883 return smartset.baseset(drevs)
884 return smartset.baseset(drevs)
884 else:
885 else:
885 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
886 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
886 elif op in {b'and_', b'add', b'sub'}:
887 elif op in {b'and_', b'add', b'sub'}:
887 assert len(tree) == 3
888 assert len(tree) == 3
888 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
889 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
889 elif op == b'group':
890 elif op == b'group':
890 return walk(tree[1])
891 return walk(tree[1])
891 elif op == b'ancestors':
892 elif op == b'ancestors':
892 return getstack(walk(tree[1]))
893 return getstack(walk(tree[1]))
893 else:
894 else:
894 raise error.ProgrammingError(b'illegal tree: %r' % tree)
895 raise error.ProgrammingError(b'illegal tree: %r' % tree)
895
896
896 return [prefetched[r] for r in walk(tree)]
897 return [prefetched[r] for r in walk(tree)]
897
898
898 def getdescfromdrev(drev):
899 def getdescfromdrev(drev):
899 """get description (commit message) from "Differential Revision"
900 """get description (commit message) from "Differential Revision"
900
901
901 This is similar to differential.getcommitmessage API. But we only care
902 This is similar to differential.getcommitmessage API. But we only care
902 about limited fields: title, summary, test plan, and URL.
903 about limited fields: title, summary, test plan, and URL.
903 """
904 """
904 title = drev[b'title']
905 title = drev[b'title']
905 summary = drev[b'summary'].rstrip()
906 summary = drev[b'summary'].rstrip()
906 testplan = drev[b'testPlan'].rstrip()
907 testplan = drev[b'testPlan'].rstrip()
907 if testplan:
908 if testplan:
908 testplan = b'Test Plan:\n%s' % testplan
909 testplan = b'Test Plan:\n%s' % testplan
909 uri = b'Differential Revision: %s' % drev[b'uri']
910 uri = b'Differential Revision: %s' % drev[b'uri']
910 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
911 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
911
912
912 def getdiffmeta(diff):
913 def getdiffmeta(diff):
913 """get commit metadata (date, node, user, p1) from a diff object
914 """get commit metadata (date, node, user, p1) from a diff object
914
915
915 The metadata could be "hg:meta", sent by phabsend, like:
916 The metadata could be "hg:meta", sent by phabsend, like:
916
917
917 "properties": {
918 "properties": {
918 "hg:meta": {
919 "hg:meta": {
919 "date": "1499571514 25200",
920 "date": "1499571514 25200",
920 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
921 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
921 "user": "Foo Bar <foo@example.com>",
922 "user": "Foo Bar <foo@example.com>",
922 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
923 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
923 }
924 }
924 }
925 }
925
926
926 Or converted from "local:commits", sent by "arc", like:
927 Or converted from "local:commits", sent by "arc", like:
927
928
928 "properties": {
929 "properties": {
929 "local:commits": {
930 "local:commits": {
930 "98c08acae292b2faf60a279b4189beb6cff1414d": {
931 "98c08acae292b2faf60a279b4189beb6cff1414d": {
931 "author": "Foo Bar",
932 "author": "Foo Bar",
932 "time": 1499546314,
933 "time": 1499546314,
933 "branch": "default",
934 "branch": "default",
934 "tag": "",
935 "tag": "",
935 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
936 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
936 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
937 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
937 "local": "1000",
938 "local": "1000",
938 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
939 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
939 "summary": "...",
940 "summary": "...",
940 "message": "...",
941 "message": "...",
941 "authorEmail": "foo@example.com"
942 "authorEmail": "foo@example.com"
942 }
943 }
943 }
944 }
944 }
945 }
945
946
946 Note: metadata extracted from "local:commits" will lose time zone
947 Note: metadata extracted from "local:commits" will lose time zone
947 information.
948 information.
948 """
949 """
949 props = diff.get(b'properties') or {}
950 props = diff.get(b'properties') or {}
950 meta = props.get(b'hg:meta')
951 meta = props.get(b'hg:meta')
951 if not meta:
952 if not meta:
952 if props.get(b'local:commits'):
953 if props.get(b'local:commits'):
953 commit = sorted(props[b'local:commits'].values())[0]
954 commit = sorted(props[b'local:commits'].values())[0]
954 meta = {}
955 meta = {}
955 if b'author' in commit and b'authorEmail' in commit:
956 if b'author' in commit and b'authorEmail' in commit:
956 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
957 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
957 commit[b'authorEmail'])
958 commit[b'authorEmail'])
958 if b'time' in commit:
959 if b'time' in commit:
959 meta[b'date'] = b'%d 0' % int(commit[b'time'])
960 meta[b'date'] = b'%d 0' % int(commit[b'time'])
960 if b'branch' in commit:
961 if b'branch' in commit:
961 meta[b'branch'] = commit[b'branch']
962 meta[b'branch'] = commit[b'branch']
962 node = commit.get(b'commit', commit.get(b'rev'))
963 node = commit.get(b'commit', commit.get(b'rev'))
963 if node:
964 if node:
964 meta[b'node'] = node
965 meta[b'node'] = node
965 if len(commit.get(b'parents', ())) >= 1:
966 if len(commit.get(b'parents', ())) >= 1:
966 meta[b'parent'] = commit[b'parents'][0]
967 meta[b'parent'] = commit[b'parents'][0]
967 else:
968 else:
968 meta = {}
969 meta = {}
969 if b'date' not in meta and b'dateCreated' in diff:
970 if b'date' not in meta and b'dateCreated' in diff:
970 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
971 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
971 if b'branch' not in meta and diff.get(b'branch'):
972 if b'branch' not in meta and diff.get(b'branch'):
972 meta[b'branch'] = diff[b'branch']
973 meta[b'branch'] = diff[b'branch']
973 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
974 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
974 meta[b'parent'] = diff[b'sourceControlBaseRevision']
975 meta[b'parent'] = diff[b'sourceControlBaseRevision']
975 return meta
976 return meta
976
977
977 def readpatch(repo, drevs, write):
978 def readpatch(repo, drevs, write):
978 """generate plain-text patch readable by 'hg import'
979 """generate plain-text patch readable by 'hg import'
979
980
980 write is usually ui.write. drevs is what "querydrev" returns, results of
981 write is usually ui.write. drevs is what "querydrev" returns, results of
981 "differential.query".
982 "differential.query".
982 """
983 """
983 # Prefetch hg:meta property for all diffs
984 # Prefetch hg:meta property for all diffs
984 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
985 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
985 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
986 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
986
987
987 # Generate patch for each drev
988 # Generate patch for each drev
988 for drev in drevs:
989 for drev in drevs:
989 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
990 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
990
991
991 diffid = max(int(v) for v in drev[b'diffs'])
992 diffid = max(int(v) for v in drev[b'diffs'])
992 body = callconduit(repo.ui, b'differential.getrawdiff',
993 body = callconduit(repo.ui, b'differential.getrawdiff',
993 {b'diffID': diffid})
994 {b'diffID': diffid})
994 desc = getdescfromdrev(drev)
995 desc = getdescfromdrev(drev)
995 header = b'# HG changeset patch\n'
996 header = b'# HG changeset patch\n'
996
997
997 # Try to preserve metadata from hg:meta property. Write hg patch
998 # Try to preserve metadata from hg:meta property. Write hg patch
998 # headers that can be read by the "import" command. See patchheadermap
999 # headers that can be read by the "import" command. See patchheadermap
999 # and extract in mercurial/patch.py for supported headers.
1000 # and extract in mercurial/patch.py for supported headers.
1000 meta = getdiffmeta(diffs[b'%d' % diffid])
1001 meta = getdiffmeta(diffs[b'%d' % diffid])
1001 for k in _metanamemap.keys():
1002 for k in _metanamemap.keys():
1002 if k in meta:
1003 if k in meta:
1003 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1004 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1004
1005
1005 content = b'%s%s\n%s' % (header, desc, body)
1006 content = b'%s%s\n%s' % (header, desc, body)
1006 write(content)
1007 write(content)
1007
1008
1008 @vcrcommand(b'phabread',
1009 @vcrcommand(b'phabread',
1009 [(b'', b'stack', False, _(b'read dependencies'))],
1010 [(b'', b'stack', False, _(b'read dependencies'))],
1010 _(b'DREVSPEC [OPTIONS]'),
1011 _(b'DREVSPEC [OPTIONS]'),
1011 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1012 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1012 def phabread(ui, repo, spec, **opts):
1013 def phabread(ui, repo, spec, **opts):
1013 """print patches from Phabricator suitable for importing
1014 """print patches from Phabricator suitable for importing
1014
1015
1015 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1016 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1016 the number ``123``. It could also have common operators like ``+``, ``-``,
1017 the number ``123``. It could also have common operators like ``+``, ``-``,
1017 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1018 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1018 select a stack.
1019 select a stack.
1019
1020
1020 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1021 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1021 could be used to filter patches by status. For performance reason, they
1022 could be used to filter patches by status. For performance reason, they
1022 only represent a subset of non-status selections and cannot be used alone.
1023 only represent a subset of non-status selections and cannot be used alone.
1023
1024
1024 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1025 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1025 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1026 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1026 stack up to D9.
1027 stack up to D9.
1027
1028
1028 If --stack is given, follow dependencies information and read all patches.
1029 If --stack is given, follow dependencies information and read all patches.
1029 It is equivalent to the ``:`` operator.
1030 It is equivalent to the ``:`` operator.
1030 """
1031 """
1031 opts = pycompat.byteskwargs(opts)
1032 opts = pycompat.byteskwargs(opts)
1032 if opts.get(b'stack'):
1033 if opts.get(b'stack'):
1033 spec = b':(%s)' % spec
1034 spec = b':(%s)' % spec
1034 drevs = querydrev(repo, spec)
1035 drevs = querydrev(repo, spec)
1035 readpatch(repo, drevs, ui.write)
1036 readpatch(repo, drevs, ui.write)
1036
1037
1037 @vcrcommand(b'phabupdate',
1038 @vcrcommand(b'phabupdate',
1038 [(b'', b'accept', False, _(b'accept revisions')),
1039 [(b'', b'accept', False, _(b'accept revisions')),
1039 (b'', b'reject', False, _(b'reject revisions')),
1040 (b'', b'reject', False, _(b'reject revisions')),
1040 (b'', b'abandon', False, _(b'abandon revisions')),
1041 (b'', b'abandon', False, _(b'abandon revisions')),
1041 (b'', b'reclaim', False, _(b'reclaim revisions')),
1042 (b'', b'reclaim', False, _(b'reclaim revisions')),
1042 (b'm', b'comment', b'', _(b'comment on the last revision')),
1043 (b'm', b'comment', b'', _(b'comment on the last revision')),
1043 ], _(b'DREVSPEC [OPTIONS]'),
1044 ], _(b'DREVSPEC [OPTIONS]'),
1044 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1045 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1045 def phabupdate(ui, repo, spec, **opts):
1046 def phabupdate(ui, repo, spec, **opts):
1046 """update Differential Revision in batch
1047 """update Differential Revision in batch
1047
1048
1048 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1049 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1049 """
1050 """
1050 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1051 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1052 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1052 if len(flags) > 1:
1053 if len(flags) > 1:
1053 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1054 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1054
1055
1055 actions = []
1056 actions = []
1056 for f in flags:
1057 for f in flags:
1057 actions.append({b'type': f, b'value': b'true'})
1058 actions.append({b'type': f, b'value': b'true'})
1058
1059
1059 drevs = querydrev(repo, spec)
1060 drevs = querydrev(repo, spec)
1060 for i, drev in enumerate(drevs):
1061 for i, drev in enumerate(drevs):
1061 if i + 1 == len(drevs) and opts.get(b'comment'):
1062 if i + 1 == len(drevs) and opts.get(b'comment'):
1062 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1063 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1063 if actions:
1064 if actions:
1064 params = {b'objectIdentifier': drev[b'phid'],
1065 params = {b'objectIdentifier': drev[b'phid'],
1065 b'transactions': actions}
1066 b'transactions': actions}
1066 callconduit(ui, b'differential.revision.edit', params)
1067 callconduit(ui, b'differential.revision.edit', params)
1067
1068
1068 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1069 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1069 def template_review(context, mapping):
1070 def template_review(context, mapping):
1070 """:phabreview: Object describing the review for this changeset.
1071 """:phabreview: Object describing the review for this changeset.
1071 Has attributes `url` and `id`.
1072 Has attributes `url` and `id`.
1072 """
1073 """
1073 ctx = context.resource(mapping, b'ctx')
1074 ctx = context.resource(mapping, b'ctx')
1074 m = _differentialrevisiondescre.search(ctx.description())
1075 m = _differentialrevisiondescre.search(ctx.description())
1075 if m:
1076 if m:
1076 return templateutil.hybriddict({
1077 return templateutil.hybriddict({
1077 b'url': m.group(r'url'),
1078 b'url': m.group(r'url'),
1078 b'id': b"D%s" % m.group(r'id'),
1079 b'id': b"D%s" % m.group(r'id'),
1079 })
1080 })
1080 else:
1081 else:
1081 tags = ctx.repo().nodetags(ctx.node())
1082 tags = ctx.repo().nodetags(ctx.node())
1082 for t in tags:
1083 for t in tags:
1083 if _differentialrevisiontagre.match(t):
1084 if _differentialrevisiontagre.match(t):
1084 url = ctx.repo().ui.config(b'phabricator', b'url')
1085 url = ctx.repo().ui.config(b'phabricator', b'url')
1085 if not url.endswith(b'/'):
1086 if not url.endswith(b'/'):
1086 url += b'/'
1087 url += b'/'
1087 url += t
1088 url += t
1088
1089
1089 return templateutil.hybriddict({
1090 return templateutil.hybriddict({
1090 b'url': url,
1091 b'url': url,
1091 b'id': t,
1092 b'id': t,
1092 })
1093 })
1093 return None
1094 return None
General Comments 0
You need to be logged in to leave comments. Login now