##// END OF EJS Templates
phabricator: change conduit data format to match arcanist...
Ian Moody -
r43555:f5aa4a53 default
parent child Browse files
Show More
@@ -1,1600 +1,1607 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = r1.body.split(b'&')
147 r1params = r1.body.split(b'&')
148 r2params = r2.body.split(b'&')
148 r2params = r2.body.split(b'&')
149 return set(r1params) == set(r2params)
149 return set(r1params) == set(r2params)
150
150
151 def sanitiserequest(request):
151 def sanitiserequest(request):
152 request.body = re.sub(
152 request.body = re.sub(
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 )
154 )
155 return request
155 return request
156
156
157 def sanitiseresponse(response):
157 def sanitiseresponse(response):
158 if r'set-cookie' in response[r'headers']:
158 if r'set-cookie' in response[r'headers']:
159 del response[r'headers'][r'set-cookie']
159 del response[r'headers'][r'set-cookie']
160 return response
160 return response
161
161
162 def decorate(fn):
162 def decorate(fn):
163 def inner(*args, **kwargs):
163 def inner(*args, **kwargs):
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 if cassette:
165 if cassette:
166 import hgdemandimport
166 import hgdemandimport
167
167
168 with hgdemandimport.deactivated():
168 with hgdemandimport.deactivated():
169 import vcr as vcrmod
169 import vcr as vcrmod
170 import vcr.stubs as stubs
170 import vcr.stubs as stubs
171
171
172 vcr = vcrmod.VCR(
172 vcr = vcrmod.VCR(
173 serializer=r'json',
173 serializer=r'json',
174 before_record_request=sanitiserequest,
174 before_record_request=sanitiserequest,
175 before_record_response=sanitiseresponse,
175 before_record_response=sanitiseresponse,
176 custom_patches=[
176 custom_patches=[
177 (
177 (
178 urlmod,
178 urlmod,
179 r'httpconnection',
179 r'httpconnection',
180 stubs.VCRHTTPConnection,
180 stubs.VCRHTTPConnection,
181 ),
181 ),
182 (
182 (
183 urlmod,
183 urlmod,
184 r'httpsconnection',
184 r'httpsconnection',
185 stubs.VCRHTTPSConnection,
185 stubs.VCRHTTPSConnection,
186 ),
186 ),
187 ],
187 ],
188 )
188 )
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
193
193
194 inner.__name__ = fn.__name__
194 inner.__name__ = fn.__name__
195 inner.__doc__ = fn.__doc__
195 inner.__doc__ = fn.__doc__
196 return command(
196 return command(
197 name,
197 name,
198 fullflags,
198 fullflags,
199 spec,
199 spec,
200 helpcategory=helpcategory,
200 helpcategory=helpcategory,
201 optionalrepo=optionalrepo,
201 optionalrepo=optionalrepo,
202 )(inner)
202 )(inner)
203
203
204 return decorate
204 return decorate
205
205
206
206
207 def urlencodenested(params):
207 def urlencodenested(params):
208 """like urlencode, but works with nested parameters.
208 """like urlencode, but works with nested parameters.
209
209
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 """
213 """
214 flatparams = util.sortdict()
214 flatparams = util.sortdict()
215
215
216 def process(prefix, obj):
216 def process(prefix, obj):
217 if isinstance(obj, bool):
217 if isinstance(obj, bool):
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 if items is None:
221 if items is None:
222 flatparams[prefix] = obj
222 flatparams[prefix] = obj
223 else:
223 else:
224 for k, v in items(obj):
224 for k, v in items(obj):
225 if prefix:
225 if prefix:
226 process(b'%s[%s]' % (prefix, k), v)
226 process(b'%s[%s]' % (prefix, k), v)
227 else:
227 else:
228 process(k, v)
228 process(k, v)
229
229
230 process(b'', params)
230 process(b'', params)
231 return util.urlreq.urlencode(flatparams)
231 return util.urlreq.urlencode(flatparams)
232
232
233
233
234 def readurltoken(ui):
234 def readurltoken(ui):
235 """return conduit url, token and make sure they exist
235 """return conduit url, token and make sure they exist
236
236
237 Currently read from [auth] config section. In the future, it might
237 Currently read from [auth] config section. In the future, it might
238 make sense to read from .arcconfig and .arcrc as well.
238 make sense to read from .arcconfig and .arcrc as well.
239 """
239 """
240 url = ui.config(b'phabricator', b'url')
240 url = ui.config(b'phabricator', b'url')
241 if not url:
241 if not url:
242 raise error.Abort(
242 raise error.Abort(
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 )
244 )
245
245
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 token = None
247 token = None
248
248
249 if res:
249 if res:
250 group, auth = res
250 group, auth = res
251
251
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253
253
254 token = auth.get(b'phabtoken')
254 token = auth.get(b'phabtoken')
255
255
256 if not token:
256 if not token:
257 raise error.Abort(
257 raise error.Abort(
258 _(b'Can\'t find conduit token associated to %s') % (url,)
258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 )
259 )
260
260
261 return url, token
261 return url, token
262
262
263
263
264 def callconduit(ui, name, params):
264 def callconduit(ui, name, params):
265 """call Conduit API, params is a dict. return json.loads result, or None"""
265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 host, token = readurltoken(ui)
266 host, token = readurltoken(ui)
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 params = params.copy()
269 params = params.copy()
270 params[b'api.token'] = token
270 params[b'__conduit__'] = {
271 data = urlencodenested(params)
271 b'token': token,
272 }
273 rawdata = {
274 b'params': templatefilters.json(params),
275 b'output': b'json',
276 b'__conduit__': 1,
277 }
278 data = urlencodenested(rawdata)
272 curlcmd = ui.config(b'phabricator', b'curlcmd')
279 curlcmd = ui.config(b'phabricator', b'curlcmd')
273 if curlcmd:
280 if curlcmd:
274 sin, sout = procutil.popen2(
281 sin, sout = procutil.popen2(
275 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
276 )
283 )
277 sin.write(data)
284 sin.write(data)
278 sin.close()
285 sin.close()
279 body = sout.read()
286 body = sout.read()
280 else:
287 else:
281 urlopener = urlmod.opener(ui, authinfo)
288 urlopener = urlmod.opener(ui, authinfo)
282 request = util.urlreq.request(pycompat.strurl(url), data=data)
289 request = util.urlreq.request(pycompat.strurl(url), data=data)
283 with contextlib.closing(urlopener.open(request)) as rsp:
290 with contextlib.closing(urlopener.open(request)) as rsp:
284 body = rsp.read()
291 body = rsp.read()
285 ui.debug(b'Conduit Response: %s\n' % body)
292 ui.debug(b'Conduit Response: %s\n' % body)
286 parsed = pycompat.rapply(
293 parsed = pycompat.rapply(
287 lambda x: encoding.unitolocal(x)
294 lambda x: encoding.unitolocal(x)
288 if isinstance(x, pycompat.unicode)
295 if isinstance(x, pycompat.unicode)
289 else x,
296 else x,
290 # json.loads only accepts bytes from py3.6+
297 # json.loads only accepts bytes from py3.6+
291 json.loads(encoding.unifromlocal(body)),
298 json.loads(encoding.unifromlocal(body)),
292 )
299 )
293 if parsed.get(b'error_code'):
300 if parsed.get(b'error_code'):
294 msg = _(b'Conduit Error (%s): %s') % (
301 msg = _(b'Conduit Error (%s): %s') % (
295 parsed[b'error_code'],
302 parsed[b'error_code'],
296 parsed[b'error_info'],
303 parsed[b'error_info'],
297 )
304 )
298 raise error.Abort(msg)
305 raise error.Abort(msg)
299 return parsed[b'result']
306 return parsed[b'result']
300
307
301
308
302 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
303 def debugcallconduit(ui, repo, name):
310 def debugcallconduit(ui, repo, name):
304 """call Conduit API
311 """call Conduit API
305
312
306 Call parameters are read from stdin as a JSON blob. Result will be written
313 Call parameters are read from stdin as a JSON blob. Result will be written
307 to stdout as a JSON blob.
314 to stdout as a JSON blob.
308 """
315 """
309 # json.loads only accepts bytes from 3.6+
316 # json.loads only accepts bytes from 3.6+
310 rawparams = encoding.unifromlocal(ui.fin.read())
317 rawparams = encoding.unifromlocal(ui.fin.read())
311 # json.loads only returns unicode strings
318 # json.loads only returns unicode strings
312 params = pycompat.rapply(
319 params = pycompat.rapply(
313 lambda x: encoding.unitolocal(x)
320 lambda x: encoding.unitolocal(x)
314 if isinstance(x, pycompat.unicode)
321 if isinstance(x, pycompat.unicode)
315 else x,
322 else x,
316 json.loads(rawparams),
323 json.loads(rawparams),
317 )
324 )
318 # json.dumps only accepts unicode strings
325 # json.dumps only accepts unicode strings
319 result = pycompat.rapply(
326 result = pycompat.rapply(
320 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
321 callconduit(ui, name, params),
328 callconduit(ui, name, params),
322 )
329 )
323 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
324 ui.write(b'%s\n' % encoding.unitolocal(s))
331 ui.write(b'%s\n' % encoding.unitolocal(s))
325
332
326
333
327 def getrepophid(repo):
334 def getrepophid(repo):
328 """given callsign, return repository PHID or None"""
335 """given callsign, return repository PHID or None"""
329 # developer config: phabricator.repophid
336 # developer config: phabricator.repophid
330 repophid = repo.ui.config(b'phabricator', b'repophid')
337 repophid = repo.ui.config(b'phabricator', b'repophid')
331 if repophid:
338 if repophid:
332 return repophid
339 return repophid
333 callsign = repo.ui.config(b'phabricator', b'callsign')
340 callsign = repo.ui.config(b'phabricator', b'callsign')
334 if not callsign:
341 if not callsign:
335 return None
342 return None
336 query = callconduit(
343 query = callconduit(
337 repo.ui,
344 repo.ui,
338 b'diffusion.repository.search',
345 b'diffusion.repository.search',
339 {b'constraints': {b'callsigns': [callsign]}},
346 {b'constraints': {b'callsigns': [callsign]}},
340 )
347 )
341 if len(query[b'data']) == 0:
348 if len(query[b'data']) == 0:
342 return None
349 return None
343 repophid = query[b'data'][0][b'phid']
350 repophid = query[b'data'][0][b'phid']
344 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
345 return repophid
352 return repophid
346
353
347
354
348 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
349 _differentialrevisiondescre = re.compile(
356 _differentialrevisiondescre = re.compile(
350 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
351 )
358 )
352
359
353
360
354 def getoldnodedrevmap(repo, nodelist):
361 def getoldnodedrevmap(repo, nodelist):
355 """find previous nodes that has been sent to Phabricator
362 """find previous nodes that has been sent to Phabricator
356
363
357 return {node: (oldnode, Differential diff, Differential Revision ID)}
364 return {node: (oldnode, Differential diff, Differential Revision ID)}
358 for node in nodelist with known previous sent versions, or associated
365 for node in nodelist with known previous sent versions, or associated
359 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
360 be ``None``.
367 be ``None``.
361
368
362 Examines commit messages like "Differential Revision:" to get the
369 Examines commit messages like "Differential Revision:" to get the
363 association information.
370 association information.
364
371
365 If such commit message line is not found, examines all precursors and their
372 If such commit message line is not found, examines all precursors and their
366 tags. Tags with format like "D1234" are considered a match and the node
373 tags. Tags with format like "D1234" are considered a match and the node
367 with that tag, and the number after "D" (ex. 1234) will be returned.
374 with that tag, and the number after "D" (ex. 1234) will be returned.
368
375
369 The ``old node``, if not None, is guaranteed to be the last diff of
376 The ``old node``, if not None, is guaranteed to be the last diff of
370 corresponding Differential Revision, and exist in the repo.
377 corresponding Differential Revision, and exist in the repo.
371 """
378 """
372 unfi = repo.unfiltered()
379 unfi = repo.unfiltered()
373 nodemap = unfi.changelog.nodemap
380 nodemap = unfi.changelog.nodemap
374
381
375 result = {} # {node: (oldnode?, lastdiff?, drev)}
382 result = {} # {node: (oldnode?, lastdiff?, drev)}
376 toconfirm = {} # {node: (force, {precnode}, drev)}
383 toconfirm = {} # {node: (force, {precnode}, drev)}
377 for node in nodelist:
384 for node in nodelist:
378 ctx = unfi[node]
385 ctx = unfi[node]
379 # For tags like "D123", put them into "toconfirm" to verify later
386 # For tags like "D123", put them into "toconfirm" to verify later
380 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
381 for n in precnodes:
388 for n in precnodes:
382 if n in nodemap:
389 if n in nodemap:
383 for tag in unfi.nodetags(n):
390 for tag in unfi.nodetags(n):
384 m = _differentialrevisiontagre.match(tag)
391 m = _differentialrevisiontagre.match(tag)
385 if m:
392 if m:
386 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
387 continue
394 continue
388
395
389 # Check commit message
396 # Check commit message
390 m = _differentialrevisiondescre.search(ctx.description())
397 m = _differentialrevisiondescre.search(ctx.description())
391 if m:
398 if m:
392 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
393
400
394 # Double check if tags are genuine by collecting all old nodes from
401 # Double check if tags are genuine by collecting all old nodes from
395 # Phabricator, and expect precursors overlap with it.
402 # Phabricator, and expect precursors overlap with it.
396 if toconfirm:
403 if toconfirm:
397 drevs = [drev for force, precs, drev in toconfirm.values()]
404 drevs = [drev for force, precs, drev in toconfirm.values()]
398 alldiffs = callconduit(
405 alldiffs = callconduit(
399 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
400 )
407 )
401 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
402 for newnode, (force, precset, drev) in toconfirm.items():
409 for newnode, (force, precset, drev) in toconfirm.items():
403 diffs = [
410 diffs = [
404 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
405 ]
412 ]
406
413
407 # "precursors" as known by Phabricator
414 # "precursors" as known by Phabricator
408 phprecset = set(getnode(d) for d in diffs)
415 phprecset = set(getnode(d) for d in diffs)
409
416
410 # Ignore if precursors (Phabricator and local repo) do not overlap,
417 # Ignore if precursors (Phabricator and local repo) do not overlap,
411 # and force is not set (when commit message says nothing)
418 # and force is not set (when commit message says nothing)
412 if not force and not bool(phprecset & precset):
419 if not force and not bool(phprecset & precset):
413 tagname = b'D%d' % drev
420 tagname = b'D%d' % drev
414 tags.tag(
421 tags.tag(
415 repo,
422 repo,
416 tagname,
423 tagname,
417 nullid,
424 nullid,
418 message=None,
425 message=None,
419 user=None,
426 user=None,
420 date=None,
427 date=None,
421 local=True,
428 local=True,
422 )
429 )
423 unfi.ui.warn(
430 unfi.ui.warn(
424 _(
431 _(
425 b'D%s: local tag removed - does not match '
432 b'D%s: local tag removed - does not match '
426 b'Differential history\n'
433 b'Differential history\n'
427 )
434 )
428 % drev
435 % drev
429 )
436 )
430 continue
437 continue
431
438
432 # Find the last node using Phabricator metadata, and make sure it
439 # Find the last node using Phabricator metadata, and make sure it
433 # exists in the repo
440 # exists in the repo
434 oldnode = lastdiff = None
441 oldnode = lastdiff = None
435 if diffs:
442 if diffs:
436 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
437 oldnode = getnode(lastdiff)
444 oldnode = getnode(lastdiff)
438 if oldnode and oldnode not in nodemap:
445 if oldnode and oldnode not in nodemap:
439 oldnode = None
446 oldnode = None
440
447
441 result[newnode] = (oldnode, lastdiff, drev)
448 result[newnode] = (oldnode, lastdiff, drev)
442
449
443 return result
450 return result
444
451
445
452
446 def getdiff(ctx, diffopts):
453 def getdiff(ctx, diffopts):
447 """plain-text diff without header (user, commit message, etc)"""
454 """plain-text diff without header (user, commit message, etc)"""
448 output = util.stringio()
455 output = util.stringio()
449 for chunk, _label in patch.diffui(
456 for chunk, _label in patch.diffui(
450 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
451 ):
458 ):
452 output.write(chunk)
459 output.write(chunk)
453 return output.getvalue()
460 return output.getvalue()
454
461
455
462
456 class DiffChangeType(object):
463 class DiffChangeType(object):
457 ADD = 1
464 ADD = 1
458 CHANGE = 2
465 CHANGE = 2
459 DELETE = 3
466 DELETE = 3
460 MOVE_AWAY = 4
467 MOVE_AWAY = 4
461 COPY_AWAY = 5
468 COPY_AWAY = 5
462 MOVE_HERE = 6
469 MOVE_HERE = 6
463 COPY_HERE = 7
470 COPY_HERE = 7
464 MULTICOPY = 8
471 MULTICOPY = 8
465
472
466
473
467 class DiffFileType(object):
474 class DiffFileType(object):
468 TEXT = 1
475 TEXT = 1
469 IMAGE = 2
476 IMAGE = 2
470 BINARY = 3
477 BINARY = 3
471
478
472
479
473 @attr.s
480 @attr.s
474 class phabhunk(dict):
481 class phabhunk(dict):
475 """Represents a Differential hunk, which is owned by a Differential change
482 """Represents a Differential hunk, which is owned by a Differential change
476 """
483 """
477
484
478 oldOffset = attr.ib(default=0) # camelcase-required
485 oldOffset = attr.ib(default=0) # camelcase-required
479 oldLength = attr.ib(default=0) # camelcase-required
486 oldLength = attr.ib(default=0) # camelcase-required
480 newOffset = attr.ib(default=0) # camelcase-required
487 newOffset = attr.ib(default=0) # camelcase-required
481 newLength = attr.ib(default=0) # camelcase-required
488 newLength = attr.ib(default=0) # camelcase-required
482 corpus = attr.ib(default='')
489 corpus = attr.ib(default='')
483 # These get added to the phabchange's equivalents
490 # These get added to the phabchange's equivalents
484 addLines = attr.ib(default=0) # camelcase-required
491 addLines = attr.ib(default=0) # camelcase-required
485 delLines = attr.ib(default=0) # camelcase-required
492 delLines = attr.ib(default=0) # camelcase-required
486
493
487
494
488 @attr.s
495 @attr.s
489 class phabchange(object):
496 class phabchange(object):
490 """Represents a Differential change, owns Differential hunks and owned by a
497 """Represents a Differential change, owns Differential hunks and owned by a
491 Differential diff. Each one represents one file in a diff.
498 Differential diff. Each one represents one file in a diff.
492 """
499 """
493
500
494 currentPath = attr.ib(default=None) # camelcase-required
501 currentPath = attr.ib(default=None) # camelcase-required
495 oldPath = attr.ib(default=None) # camelcase-required
502 oldPath = attr.ib(default=None) # camelcase-required
496 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
497 metadata = attr.ib(default=attr.Factory(dict))
504 metadata = attr.ib(default=attr.Factory(dict))
498 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
500 type = attr.ib(default=DiffChangeType.CHANGE)
507 type = attr.ib(default=DiffChangeType.CHANGE)
501 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
502 commitHash = attr.ib(default=None) # camelcase-required
509 commitHash = attr.ib(default=None) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
510 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
511 delLines = attr.ib(default=0) # camelcase-required
505 hunks = attr.ib(default=attr.Factory(list))
512 hunks = attr.ib(default=attr.Factory(list))
506
513
507 def copynewmetadatatoold(self):
514 def copynewmetadatatoold(self):
508 for key in list(self.metadata.keys()):
515 for key in list(self.metadata.keys()):
509 newkey = key.replace(b'new:', b'old:')
516 newkey = key.replace(b'new:', b'old:')
510 self.metadata[newkey] = self.metadata[key]
517 self.metadata[newkey] = self.metadata[key]
511
518
512 def addoldmode(self, value):
519 def addoldmode(self, value):
513 self.oldProperties[b'unix:filemode'] = value
520 self.oldProperties[b'unix:filemode'] = value
514
521
515 def addnewmode(self, value):
522 def addnewmode(self, value):
516 self.newProperties[b'unix:filemode'] = value
523 self.newProperties[b'unix:filemode'] = value
517
524
518 def addhunk(self, hunk):
525 def addhunk(self, hunk):
519 if not isinstance(hunk, phabhunk):
526 if not isinstance(hunk, phabhunk):
520 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
521 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
522 # It's useful to include these stats since the Phab web UI shows them,
529 # It's useful to include these stats since the Phab web UI shows them,
523 # and uses them to estimate how large a change a Revision is. Also used
530 # and uses them to estimate how large a change a Revision is. Also used
524 # in email subjects for the [+++--] bit.
531 # in email subjects for the [+++--] bit.
525 self.addLines += hunk.addLines
532 self.addLines += hunk.addLines
526 self.delLines += hunk.delLines
533 self.delLines += hunk.delLines
527
534
528
535
529 @attr.s
536 @attr.s
530 class phabdiff(object):
537 class phabdiff(object):
531 """Represents a Differential diff, owns Differential changes. Corresponds
538 """Represents a Differential diff, owns Differential changes. Corresponds
532 to a commit.
539 to a commit.
533 """
540 """
534
541
535 # Doesn't seem to be any reason to send this (output of uname -n)
542 # Doesn't seem to be any reason to send this (output of uname -n)
536 sourceMachine = attr.ib(default=b'') # camelcase-required
543 sourceMachine = attr.ib(default=b'') # camelcase-required
537 sourcePath = attr.ib(default=b'/') # camelcase-required
544 sourcePath = attr.ib(default=b'/') # camelcase-required
538 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
539 sourceControlPath = attr.ib(default=b'/') # camelcase-required
546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
540 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
541 branch = attr.ib(default=b'default')
548 branch = attr.ib(default=b'default')
542 bookmark = attr.ib(default=None)
549 bookmark = attr.ib(default=None)
543 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
544 lintStatus = attr.ib(default=b'none') # camelcase-required
551 lintStatus = attr.ib(default=b'none') # camelcase-required
545 unitStatus = attr.ib(default=b'none') # camelcase-required
552 unitStatus = attr.ib(default=b'none') # camelcase-required
546 changes = attr.ib(default=attr.Factory(dict))
553 changes = attr.ib(default=attr.Factory(dict))
547 repositoryPHID = attr.ib(default=None) # camelcase-required
554 repositoryPHID = attr.ib(default=None) # camelcase-required
548
555
549 def addchange(self, change):
556 def addchange(self, change):
550 if not isinstance(change, phabchange):
557 if not isinstance(change, phabchange):
551 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
552 self.changes[change.currentPath] = pycompat.byteskwargs(
559 self.changes[change.currentPath] = pycompat.byteskwargs(
553 attr.asdict(change)
560 attr.asdict(change)
554 )
561 )
555
562
556
563
557 def maketext(pchange, ctx, fname):
564 def maketext(pchange, ctx, fname):
558 """populate the phabchange for a text file"""
565 """populate the phabchange for a text file"""
559 repo = ctx.repo()
566 repo = ctx.repo()
560 fmatcher = match.exact([fname])
567 fmatcher = match.exact([fname])
561 diffopts = mdiff.diffopts(git=True, context=32767)
568 diffopts = mdiff.diffopts(git=True, context=32767)
562 _pfctx, _fctx, header, fhunks = next(
569 _pfctx, _fctx, header, fhunks = next(
563 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
564 )
571 )
565
572
566 for fhunk in fhunks:
573 for fhunk in fhunks:
567 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
568 corpus = b''.join(lines[1:])
575 corpus = b''.join(lines[1:])
569 shunk = list(header)
576 shunk = list(header)
570 shunk.extend(lines)
577 shunk.extend(lines)
571 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
572 patch.diffstatdata(util.iterlines(shunk))
579 patch.diffstatdata(util.iterlines(shunk))
573 )
580 )
574 pchange.addhunk(
581 pchange.addhunk(
575 phabhunk(
582 phabhunk(
576 oldOffset,
583 oldOffset,
577 oldLength,
584 oldLength,
578 newOffset,
585 newOffset,
579 newLength,
586 newLength,
580 corpus,
587 corpus,
581 addLines,
588 addLines,
582 delLines,
589 delLines,
583 )
590 )
584 )
591 )
585
592
586
593
587 def uploadchunks(fctx, fphid):
594 def uploadchunks(fctx, fphid):
588 """upload large binary files as separate chunks.
595 """upload large binary files as separate chunks.
589 Phab requests chunking over 8MiB, and splits into 4MiB chunks
596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
590 """
597 """
591 ui = fctx.repo().ui
598 ui = fctx.repo().ui
592 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
593 progress = ui.makeprogress(
600 progress = ui.makeprogress(
594 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
595 )
602 )
596 for chunk in chunks:
603 for chunk in chunks:
597 progress.increment()
604 progress.increment()
598 if chunk[b'complete']:
605 if chunk[b'complete']:
599 continue
606 continue
600 bstart = int(chunk[b'byteStart'])
607 bstart = int(chunk[b'byteStart'])
601 bend = int(chunk[b'byteEnd'])
608 bend = int(chunk[b'byteEnd'])
602 callconduit(
609 callconduit(
603 ui,
610 ui,
604 b'file.uploadchunk',
611 b'file.uploadchunk',
605 {
612 {
606 b'filePHID': fphid,
613 b'filePHID': fphid,
607 b'byteStart': bstart,
614 b'byteStart': bstart,
608 b'data': base64.b64encode(fctx.data()[bstart:bend]),
615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
609 b'dataEncoding': b'base64',
616 b'dataEncoding': b'base64',
610 },
617 },
611 )
618 )
612 progress.complete()
619 progress.complete()
613
620
614
621
615 def uploadfile(fctx):
622 def uploadfile(fctx):
616 """upload binary files to Phabricator"""
623 """upload binary files to Phabricator"""
617 repo = fctx.repo()
624 repo = fctx.repo()
618 ui = repo.ui
625 ui = repo.ui
619 fname = fctx.path()
626 fname = fctx.path()
620 size = fctx.size()
627 size = fctx.size()
621 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
622
629
623 # an allocate call is required first to see if an upload is even required
630 # an allocate call is required first to see if an upload is even required
624 # (Phab might already have it) and to determine if chunking is needed
631 # (Phab might already have it) and to determine if chunking is needed
625 allocateparams = {
632 allocateparams = {
626 b'name': fname,
633 b'name': fname,
627 b'contentLength': size,
634 b'contentLength': size,
628 b'contentHash': fhash,
635 b'contentHash': fhash,
629 }
636 }
630 filealloc = callconduit(ui, b'file.allocate', allocateparams)
637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
631 fphid = filealloc[b'filePHID']
638 fphid = filealloc[b'filePHID']
632
639
633 if filealloc[b'upload']:
640 if filealloc[b'upload']:
634 ui.write(_(b'uploading %s\n') % bytes(fctx))
641 ui.write(_(b'uploading %s\n') % bytes(fctx))
635 if not fphid:
642 if not fphid:
636 uploadparams = {
643 uploadparams = {
637 b'name': fname,
644 b'name': fname,
638 b'data_base64': base64.b64encode(fctx.data()),
645 b'data_base64': base64.b64encode(fctx.data()),
639 }
646 }
640 fphid = callconduit(ui, b'file.upload', uploadparams)
647 fphid = callconduit(ui, b'file.upload', uploadparams)
641 else:
648 else:
642 uploadchunks(fctx, fphid)
649 uploadchunks(fctx, fphid)
643 else:
650 else:
644 ui.debug(b'server already has %s\n' % bytes(fctx))
651 ui.debug(b'server already has %s\n' % bytes(fctx))
645
652
646 if not fphid:
653 if not fphid:
647 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
648
655
649 return fphid
656 return fphid
650
657
651
658
652 def addoldbinary(pchange, fctx, originalfname):
659 def addoldbinary(pchange, fctx, originalfname):
653 """add the metadata for the previous version of a binary file to the
660 """add the metadata for the previous version of a binary file to the
654 phabchange for the new version
661 phabchange for the new version
655 """
662 """
656 oldfctx = fctx.p1()[originalfname]
663 oldfctx = fctx.p1()[originalfname]
657 if fctx.cmp(oldfctx):
664 if fctx.cmp(oldfctx):
658 # Files differ, add the old one
665 # Files differ, add the old one
659 pchange.metadata[b'old:file:size'] = oldfctx.size()
666 pchange.metadata[b'old:file:size'] = oldfctx.size()
660 mimeguess, _enc = mimetypes.guess_type(
667 mimeguess, _enc = mimetypes.guess_type(
661 encoding.unifromlocal(oldfctx.path())
668 encoding.unifromlocal(oldfctx.path())
662 )
669 )
663 if mimeguess:
670 if mimeguess:
664 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
665 mimeguess
672 mimeguess
666 )
673 )
667 fphid = uploadfile(oldfctx)
674 fphid = uploadfile(oldfctx)
668 pchange.metadata[b'old:binary-phid'] = fphid
675 pchange.metadata[b'old:binary-phid'] = fphid
669 else:
676 else:
670 # If it's left as IMAGE/BINARY web UI might try to display it
677 # If it's left as IMAGE/BINARY web UI might try to display it
671 pchange.fileType = DiffFileType.TEXT
678 pchange.fileType = DiffFileType.TEXT
672 pchange.copynewmetadatatoold()
679 pchange.copynewmetadatatoold()
673
680
674
681
675 def makebinary(pchange, fctx):
682 def makebinary(pchange, fctx):
676 """populate the phabchange for a binary file"""
683 """populate the phabchange for a binary file"""
677 pchange.fileType = DiffFileType.BINARY
684 pchange.fileType = DiffFileType.BINARY
678 fphid = uploadfile(fctx)
685 fphid = uploadfile(fctx)
679 pchange.metadata[b'new:binary-phid'] = fphid
686 pchange.metadata[b'new:binary-phid'] = fphid
680 pchange.metadata[b'new:file:size'] = fctx.size()
687 pchange.metadata[b'new:file:size'] = fctx.size()
681 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
682 if mimeguess:
689 if mimeguess:
683 mimeguess = pycompat.bytestr(mimeguess)
690 mimeguess = pycompat.bytestr(mimeguess)
684 pchange.metadata[b'new:file:mime-type'] = mimeguess
691 pchange.metadata[b'new:file:mime-type'] = mimeguess
685 if mimeguess.startswith(b'image/'):
692 if mimeguess.startswith(b'image/'):
686 pchange.fileType = DiffFileType.IMAGE
693 pchange.fileType = DiffFileType.IMAGE
687
694
688
695
689 # Copied from mercurial/patch.py
696 # Copied from mercurial/patch.py
690 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
691
698
692
699
693 def addremoved(pdiff, ctx, removed):
700 def addremoved(pdiff, ctx, removed):
694 """add removed files to the phabdiff. Shouldn't include moves"""
701 """add removed files to the phabdiff. Shouldn't include moves"""
695 for fname in removed:
702 for fname in removed:
696 pchange = phabchange(
703 pchange = phabchange(
697 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
704 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
698 )
705 )
699 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
706 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
700 fctx = ctx.p1()[fname]
707 fctx = ctx.p1()[fname]
701 if not fctx.isbinary():
708 if not fctx.isbinary():
702 maketext(pchange, ctx, fname)
709 maketext(pchange, ctx, fname)
703
710
704 pdiff.addchange(pchange)
711 pdiff.addchange(pchange)
705
712
706
713
707 def addmodified(pdiff, ctx, modified):
714 def addmodified(pdiff, ctx, modified):
708 """add modified files to the phabdiff"""
715 """add modified files to the phabdiff"""
709 for fname in modified:
716 for fname in modified:
710 fctx = ctx[fname]
717 fctx = ctx[fname]
711 pchange = phabchange(currentPath=fname, oldPath=fname)
718 pchange = phabchange(currentPath=fname, oldPath=fname)
712 filemode = gitmode[ctx[fname].flags()]
719 filemode = gitmode[ctx[fname].flags()]
713 originalmode = gitmode[ctx.p1()[fname].flags()]
720 originalmode = gitmode[ctx.p1()[fname].flags()]
714 if filemode != originalmode:
721 if filemode != originalmode:
715 pchange.addoldmode(originalmode)
722 pchange.addoldmode(originalmode)
716 pchange.addnewmode(filemode)
723 pchange.addnewmode(filemode)
717
724
718 if fctx.isbinary():
725 if fctx.isbinary():
719 makebinary(pchange, fctx)
726 makebinary(pchange, fctx)
720 addoldbinary(pchange, fctx, fname)
727 addoldbinary(pchange, fctx, fname)
721 else:
728 else:
722 maketext(pchange, ctx, fname)
729 maketext(pchange, ctx, fname)
723
730
724 pdiff.addchange(pchange)
731 pdiff.addchange(pchange)
725
732
726
733
727 def addadded(pdiff, ctx, added, removed):
734 def addadded(pdiff, ctx, added, removed):
728 """add file adds to the phabdiff, both new files and copies/moves"""
735 """add file adds to the phabdiff, both new files and copies/moves"""
729 # Keep track of files that've been recorded as moved/copied, so if there are
736 # Keep track of files that've been recorded as moved/copied, so if there are
730 # additional copies we can mark them (moves get removed from removed)
737 # additional copies we can mark them (moves get removed from removed)
731 copiedchanges = {}
738 copiedchanges = {}
732 movedchanges = {}
739 movedchanges = {}
733 for fname in added:
740 for fname in added:
734 fctx = ctx[fname]
741 fctx = ctx[fname]
735 pchange = phabchange(currentPath=fname)
742 pchange = phabchange(currentPath=fname)
736
743
737 filemode = gitmode[ctx[fname].flags()]
744 filemode = gitmode[ctx[fname].flags()]
738 renamed = fctx.renamed()
745 renamed = fctx.renamed()
739
746
740 if renamed:
747 if renamed:
741 originalfname = renamed[0]
748 originalfname = renamed[0]
742 originalmode = gitmode[ctx.p1()[originalfname].flags()]
749 originalmode = gitmode[ctx.p1()[originalfname].flags()]
743 pchange.oldPath = originalfname
750 pchange.oldPath = originalfname
744
751
745 if originalfname in removed:
752 if originalfname in removed:
746 origpchange = phabchange(
753 origpchange = phabchange(
747 currentPath=originalfname,
754 currentPath=originalfname,
748 oldPath=originalfname,
755 oldPath=originalfname,
749 type=DiffChangeType.MOVE_AWAY,
756 type=DiffChangeType.MOVE_AWAY,
750 awayPaths=[fname],
757 awayPaths=[fname],
751 )
758 )
752 movedchanges[originalfname] = origpchange
759 movedchanges[originalfname] = origpchange
753 removed.remove(originalfname)
760 removed.remove(originalfname)
754 pchange.type = DiffChangeType.MOVE_HERE
761 pchange.type = DiffChangeType.MOVE_HERE
755 elif originalfname in movedchanges:
762 elif originalfname in movedchanges:
756 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
763 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
757 movedchanges[originalfname].awayPaths.append(fname)
764 movedchanges[originalfname].awayPaths.append(fname)
758 pchange.type = DiffChangeType.COPY_HERE
765 pchange.type = DiffChangeType.COPY_HERE
759 else: # pure copy
766 else: # pure copy
760 if originalfname not in copiedchanges:
767 if originalfname not in copiedchanges:
761 origpchange = phabchange(
768 origpchange = phabchange(
762 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
769 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
763 )
770 )
764 copiedchanges[originalfname] = origpchange
771 copiedchanges[originalfname] = origpchange
765 else:
772 else:
766 origpchange = copiedchanges[originalfname]
773 origpchange = copiedchanges[originalfname]
767 origpchange.awayPaths.append(fname)
774 origpchange.awayPaths.append(fname)
768 pchange.type = DiffChangeType.COPY_HERE
775 pchange.type = DiffChangeType.COPY_HERE
769
776
770 if filemode != originalmode:
777 if filemode != originalmode:
771 pchange.addoldmode(originalmode)
778 pchange.addoldmode(originalmode)
772 pchange.addnewmode(filemode)
779 pchange.addnewmode(filemode)
773 else: # Brand-new file
780 else: # Brand-new file
774 pchange.addnewmode(gitmode[fctx.flags()])
781 pchange.addnewmode(gitmode[fctx.flags()])
775 pchange.type = DiffChangeType.ADD
782 pchange.type = DiffChangeType.ADD
776
783
777 if fctx.isbinary():
784 if fctx.isbinary():
778 makebinary(pchange, fctx)
785 makebinary(pchange, fctx)
779 if renamed:
786 if renamed:
780 addoldbinary(pchange, fctx, originalfname)
787 addoldbinary(pchange, fctx, originalfname)
781 else:
788 else:
782 maketext(pchange, ctx, fname)
789 maketext(pchange, ctx, fname)
783
790
784 pdiff.addchange(pchange)
791 pdiff.addchange(pchange)
785
792
786 for _path, copiedchange in copiedchanges.items():
793 for _path, copiedchange in copiedchanges.items():
787 pdiff.addchange(copiedchange)
794 pdiff.addchange(copiedchange)
788 for _path, movedchange in movedchanges.items():
795 for _path, movedchange in movedchanges.items():
789 pdiff.addchange(movedchange)
796 pdiff.addchange(movedchange)
790
797
791
798
792 def creatediff(ctx):
799 def creatediff(ctx):
793 """create a Differential Diff"""
800 """create a Differential Diff"""
794 repo = ctx.repo()
801 repo = ctx.repo()
795 repophid = getrepophid(repo)
802 repophid = getrepophid(repo)
796 # Create a "Differential Diff" via "differential.createrawdiff" API
803 # Create a "Differential Diff" via "differential.createrawdiff" API
797 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
804 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
798 if repophid:
805 if repophid:
799 params[b'repositoryPHID'] = repophid
806 params[b'repositoryPHID'] = repophid
800 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
807 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
801 if not diff:
808 if not diff:
802 raise error.Abort(_(b'cannot create diff for %s') % ctx)
809 raise error.Abort(_(b'cannot create diff for %s') % ctx)
803 return diff
810 return diff
804
811
805
812
806 def writediffproperties(ctx, diff):
813 def writediffproperties(ctx, diff):
807 """write metadata to diff so patches could be applied losslessly"""
814 """write metadata to diff so patches could be applied losslessly"""
808 params = {
815 params = {
809 b'diff_id': diff[b'id'],
816 b'diff_id': diff[b'id'],
810 b'name': b'hg:meta',
817 b'name': b'hg:meta',
811 b'data': templatefilters.json(
818 b'data': templatefilters.json(
812 {
819 {
813 b'user': ctx.user(),
820 b'user': ctx.user(),
814 b'date': b'%d %d' % ctx.date(),
821 b'date': b'%d %d' % ctx.date(),
815 b'branch': ctx.branch(),
822 b'branch': ctx.branch(),
816 b'node': ctx.hex(),
823 b'node': ctx.hex(),
817 b'parent': ctx.p1().hex(),
824 b'parent': ctx.p1().hex(),
818 }
825 }
819 ),
826 ),
820 }
827 }
821 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
828 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
822
829
823 params = {
830 params = {
824 b'diff_id': diff[b'id'],
831 b'diff_id': diff[b'id'],
825 b'name': b'local:commits',
832 b'name': b'local:commits',
826 b'data': templatefilters.json(
833 b'data': templatefilters.json(
827 {
834 {
828 ctx.hex(): {
835 ctx.hex(): {
829 b'author': stringutil.person(ctx.user()),
836 b'author': stringutil.person(ctx.user()),
830 b'authorEmail': stringutil.email(ctx.user()),
837 b'authorEmail': stringutil.email(ctx.user()),
831 b'time': int(ctx.date()[0]),
838 b'time': int(ctx.date()[0]),
832 b'commit': ctx.hex(),
839 b'commit': ctx.hex(),
833 b'parents': [ctx.p1().hex()],
840 b'parents': [ctx.p1().hex()],
834 b'branch': ctx.branch(),
841 b'branch': ctx.branch(),
835 },
842 },
836 }
843 }
837 ),
844 ),
838 }
845 }
839 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
846 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
840
847
841
848
842 def createdifferentialrevision(
849 def createdifferentialrevision(
843 ctx,
850 ctx,
844 revid=None,
851 revid=None,
845 parentrevphid=None,
852 parentrevphid=None,
846 oldnode=None,
853 oldnode=None,
847 olddiff=None,
854 olddiff=None,
848 actions=None,
855 actions=None,
849 comment=None,
856 comment=None,
850 ):
857 ):
851 """create or update a Differential Revision
858 """create or update a Differential Revision
852
859
853 If revid is None, create a new Differential Revision, otherwise update
860 If revid is None, create a new Differential Revision, otherwise update
854 revid. If parentrevphid is not None, set it as a dependency.
861 revid. If parentrevphid is not None, set it as a dependency.
855
862
856 If oldnode is not None, check if the patch content (without commit message
863 If oldnode is not None, check if the patch content (without commit message
857 and metadata) has changed before creating another diff.
864 and metadata) has changed before creating another diff.
858
865
859 If actions is not None, they will be appended to the transaction.
866 If actions is not None, they will be appended to the transaction.
860 """
867 """
861 repo = ctx.repo()
868 repo = ctx.repo()
862 if oldnode:
869 if oldnode:
863 diffopts = mdiff.diffopts(git=True, context=32767)
870 diffopts = mdiff.diffopts(git=True, context=32767)
864 oldctx = repo.unfiltered()[oldnode]
871 oldctx = repo.unfiltered()[oldnode]
865 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
872 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
866 else:
873 else:
867 neednewdiff = True
874 neednewdiff = True
868
875
869 transactions = []
876 transactions = []
870 if neednewdiff:
877 if neednewdiff:
871 diff = creatediff(ctx)
878 diff = creatediff(ctx)
872 transactions.append({b'type': b'update', b'value': diff[b'phid']})
879 transactions.append({b'type': b'update', b'value': diff[b'phid']})
873 if comment:
880 if comment:
874 transactions.append({b'type': b'comment', b'value': comment})
881 transactions.append({b'type': b'comment', b'value': comment})
875 else:
882 else:
876 # Even if we don't need to upload a new diff because the patch content
883 # Even if we don't need to upload a new diff because the patch content
877 # does not change. We might still need to update its metadata so
884 # does not change. We might still need to update its metadata so
878 # pushers could know the correct node metadata.
885 # pushers could know the correct node metadata.
879 assert olddiff
886 assert olddiff
880 diff = olddiff
887 diff = olddiff
881 writediffproperties(ctx, diff)
888 writediffproperties(ctx, diff)
882
889
883 # Set the parent Revision every time, so commit re-ordering is picked-up
890 # Set the parent Revision every time, so commit re-ordering is picked-up
884 if parentrevphid:
891 if parentrevphid:
885 transactions.append(
892 transactions.append(
886 {b'type': b'parents.set', b'value': [parentrevphid]}
893 {b'type': b'parents.set', b'value': [parentrevphid]}
887 )
894 )
888
895
889 if actions:
896 if actions:
890 transactions += actions
897 transactions += actions
891
898
892 # Parse commit message and update related fields.
899 # Parse commit message and update related fields.
893 desc = ctx.description()
900 desc = ctx.description()
894 info = callconduit(
901 info = callconduit(
895 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
902 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
896 )
903 )
897 for k, v in info[b'fields'].items():
904 for k, v in info[b'fields'].items():
898 if k in [b'title', b'summary', b'testPlan']:
905 if k in [b'title', b'summary', b'testPlan']:
899 transactions.append({b'type': k, b'value': v})
906 transactions.append({b'type': k, b'value': v})
900
907
901 params = {b'transactions': transactions}
908 params = {b'transactions': transactions}
902 if revid is not None:
909 if revid is not None:
903 # Update an existing Differential Revision
910 # Update an existing Differential Revision
904 params[b'objectIdentifier'] = revid
911 params[b'objectIdentifier'] = revid
905
912
906 revision = callconduit(repo.ui, b'differential.revision.edit', params)
913 revision = callconduit(repo.ui, b'differential.revision.edit', params)
907 if not revision:
914 if not revision:
908 raise error.Abort(_(b'cannot create revision for %s') % ctx)
915 raise error.Abort(_(b'cannot create revision for %s') % ctx)
909
916
910 return revision, diff
917 return revision, diff
911
918
912
919
913 def userphids(repo, names):
920 def userphids(repo, names):
914 """convert user names to PHIDs"""
921 """convert user names to PHIDs"""
915 names = [name.lower() for name in names]
922 names = [name.lower() for name in names]
916 query = {b'constraints': {b'usernames': names}}
923 query = {b'constraints': {b'usernames': names}}
917 result = callconduit(repo.ui, b'user.search', query)
924 result = callconduit(repo.ui, b'user.search', query)
918 # username not found is not an error of the API. So check if we have missed
925 # username not found is not an error of the API. So check if we have missed
919 # some names here.
926 # some names here.
920 data = result[b'data']
927 data = result[b'data']
921 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
928 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
922 unresolved = set(names) - resolved
929 unresolved = set(names) - resolved
923 if unresolved:
930 if unresolved:
924 raise error.Abort(
931 raise error.Abort(
925 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
932 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
926 )
933 )
927 return [entry[b'phid'] for entry in data]
934 return [entry[b'phid'] for entry in data]
928
935
929
936
930 @vcrcommand(
937 @vcrcommand(
931 b'phabsend',
938 b'phabsend',
932 [
939 [
933 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
940 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
934 (b'', b'amend', True, _(b'update commit messages')),
941 (b'', b'amend', True, _(b'update commit messages')),
935 (b'', b'reviewer', [], _(b'specify reviewers')),
942 (b'', b'reviewer', [], _(b'specify reviewers')),
936 (b'', b'blocker', [], _(b'specify blocking reviewers')),
943 (b'', b'blocker', [], _(b'specify blocking reviewers')),
937 (
944 (
938 b'm',
945 b'm',
939 b'comment',
946 b'comment',
940 b'',
947 b'',
941 _(b'add a comment to Revisions with new/updated Diffs'),
948 _(b'add a comment to Revisions with new/updated Diffs'),
942 ),
949 ),
943 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
950 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
944 ],
951 ],
945 _(b'REV [OPTIONS]'),
952 _(b'REV [OPTIONS]'),
946 helpcategory=command.CATEGORY_IMPORT_EXPORT,
953 helpcategory=command.CATEGORY_IMPORT_EXPORT,
947 )
954 )
948 def phabsend(ui, repo, *revs, **opts):
955 def phabsend(ui, repo, *revs, **opts):
949 """upload changesets to Phabricator
956 """upload changesets to Phabricator
950
957
951 If there are multiple revisions specified, they will be send as a stack
958 If there are multiple revisions specified, they will be send as a stack
952 with a linear dependencies relationship using the order specified by the
959 with a linear dependencies relationship using the order specified by the
953 revset.
960 revset.
954
961
955 For the first time uploading changesets, local tags will be created to
962 For the first time uploading changesets, local tags will be created to
956 maintain the association. After the first time, phabsend will check
963 maintain the association. After the first time, phabsend will check
957 obsstore and tags information so it can figure out whether to update an
964 obsstore and tags information so it can figure out whether to update an
958 existing Differential Revision, or create a new one.
965 existing Differential Revision, or create a new one.
959
966
960 If --amend is set, update commit messages so they have the
967 If --amend is set, update commit messages so they have the
961 ``Differential Revision`` URL, remove related tags. This is similar to what
968 ``Differential Revision`` URL, remove related tags. This is similar to what
962 arcanist will do, and is more desired in author-push workflows. Otherwise,
969 arcanist will do, and is more desired in author-push workflows. Otherwise,
963 use local tags to record the ``Differential Revision`` association.
970 use local tags to record the ``Differential Revision`` association.
964
971
965 The --confirm option lets you confirm changesets before sending them. You
972 The --confirm option lets you confirm changesets before sending them. You
966 can also add following to your configuration file to make it default
973 can also add following to your configuration file to make it default
967 behaviour::
974 behaviour::
968
975
969 [phabsend]
976 [phabsend]
970 confirm = true
977 confirm = true
971
978
972 phabsend will check obsstore and the above association to decide whether to
979 phabsend will check obsstore and the above association to decide whether to
973 update an existing Differential Revision, or create a new one.
980 update an existing Differential Revision, or create a new one.
974 """
981 """
975 opts = pycompat.byteskwargs(opts)
982 opts = pycompat.byteskwargs(opts)
976 revs = list(revs) + opts.get(b'rev', [])
983 revs = list(revs) + opts.get(b'rev', [])
977 revs = scmutil.revrange(repo, revs)
984 revs = scmutil.revrange(repo, revs)
978
985
979 if not revs:
986 if not revs:
980 raise error.Abort(_(b'phabsend requires at least one changeset'))
987 raise error.Abort(_(b'phabsend requires at least one changeset'))
981 if opts.get(b'amend'):
988 if opts.get(b'amend'):
982 cmdutil.checkunfinished(repo)
989 cmdutil.checkunfinished(repo)
983
990
984 # {newnode: (oldnode, olddiff, olddrev}
991 # {newnode: (oldnode, olddiff, olddrev}
985 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
992 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
986
993
987 confirm = ui.configbool(b'phabsend', b'confirm')
994 confirm = ui.configbool(b'phabsend', b'confirm')
988 confirm |= bool(opts.get(b'confirm'))
995 confirm |= bool(opts.get(b'confirm'))
989 if confirm:
996 if confirm:
990 confirmed = _confirmbeforesend(repo, revs, oldmap)
997 confirmed = _confirmbeforesend(repo, revs, oldmap)
991 if not confirmed:
998 if not confirmed:
992 raise error.Abort(_(b'phabsend cancelled'))
999 raise error.Abort(_(b'phabsend cancelled'))
993
1000
994 actions = []
1001 actions = []
995 reviewers = opts.get(b'reviewer', [])
1002 reviewers = opts.get(b'reviewer', [])
996 blockers = opts.get(b'blocker', [])
1003 blockers = opts.get(b'blocker', [])
997 phids = []
1004 phids = []
998 if reviewers:
1005 if reviewers:
999 phids.extend(userphids(repo, reviewers))
1006 phids.extend(userphids(repo, reviewers))
1000 if blockers:
1007 if blockers:
1001 phids.extend(
1008 phids.extend(
1002 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1009 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1003 )
1010 )
1004 if phids:
1011 if phids:
1005 actions.append({b'type': b'reviewers.add', b'value': phids})
1012 actions.append({b'type': b'reviewers.add', b'value': phids})
1006
1013
1007 drevids = [] # [int]
1014 drevids = [] # [int]
1008 diffmap = {} # {newnode: diff}
1015 diffmap = {} # {newnode: diff}
1009
1016
1010 # Send patches one by one so we know their Differential Revision PHIDs and
1017 # Send patches one by one so we know their Differential Revision PHIDs and
1011 # can provide dependency relationship
1018 # can provide dependency relationship
1012 lastrevphid = None
1019 lastrevphid = None
1013 for rev in revs:
1020 for rev in revs:
1014 ui.debug(b'sending rev %d\n' % rev)
1021 ui.debug(b'sending rev %d\n' % rev)
1015 ctx = repo[rev]
1022 ctx = repo[rev]
1016
1023
1017 # Get Differential Revision ID
1024 # Get Differential Revision ID
1018 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1025 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1019 if oldnode != ctx.node() or opts.get(b'amend'):
1026 if oldnode != ctx.node() or opts.get(b'amend'):
1020 # Create or update Differential Revision
1027 # Create or update Differential Revision
1021 revision, diff = createdifferentialrevision(
1028 revision, diff = createdifferentialrevision(
1022 ctx,
1029 ctx,
1023 revid,
1030 revid,
1024 lastrevphid,
1031 lastrevphid,
1025 oldnode,
1032 oldnode,
1026 olddiff,
1033 olddiff,
1027 actions,
1034 actions,
1028 opts.get(b'comment'),
1035 opts.get(b'comment'),
1029 )
1036 )
1030 diffmap[ctx.node()] = diff
1037 diffmap[ctx.node()] = diff
1031 newrevid = int(revision[b'object'][b'id'])
1038 newrevid = int(revision[b'object'][b'id'])
1032 newrevphid = revision[b'object'][b'phid']
1039 newrevphid = revision[b'object'][b'phid']
1033 if revid:
1040 if revid:
1034 action = b'updated'
1041 action = b'updated'
1035 else:
1042 else:
1036 action = b'created'
1043 action = b'created'
1037
1044
1038 # Create a local tag to note the association, if commit message
1045 # Create a local tag to note the association, if commit message
1039 # does not have it already
1046 # does not have it already
1040 m = _differentialrevisiondescre.search(ctx.description())
1047 m = _differentialrevisiondescre.search(ctx.description())
1041 if not m or int(m.group(r'id')) != newrevid:
1048 if not m or int(m.group(r'id')) != newrevid:
1042 tagname = b'D%d' % newrevid
1049 tagname = b'D%d' % newrevid
1043 tags.tag(
1050 tags.tag(
1044 repo,
1051 repo,
1045 tagname,
1052 tagname,
1046 ctx.node(),
1053 ctx.node(),
1047 message=None,
1054 message=None,
1048 user=None,
1055 user=None,
1049 date=None,
1056 date=None,
1050 local=True,
1057 local=True,
1051 )
1058 )
1052 else:
1059 else:
1053 # Nothing changed. But still set "newrevphid" so the next revision
1060 # Nothing changed. But still set "newrevphid" so the next revision
1054 # could depend on this one and "newrevid" for the summary line.
1061 # could depend on this one and "newrevid" for the summary line.
1055 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1062 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1056 newrevid = revid
1063 newrevid = revid
1057 action = b'skipped'
1064 action = b'skipped'
1058
1065
1059 actiondesc = ui.label(
1066 actiondesc = ui.label(
1060 {
1067 {
1061 b'created': _(b'created'),
1068 b'created': _(b'created'),
1062 b'skipped': _(b'skipped'),
1069 b'skipped': _(b'skipped'),
1063 b'updated': _(b'updated'),
1070 b'updated': _(b'updated'),
1064 }[action],
1071 }[action],
1065 b'phabricator.action.%s' % action,
1072 b'phabricator.action.%s' % action,
1066 )
1073 )
1067 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1074 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1068 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1075 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1069 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1076 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1070 ui.write(
1077 ui.write(
1071 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1078 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1072 )
1079 )
1073 drevids.append(newrevid)
1080 drevids.append(newrevid)
1074 lastrevphid = newrevphid
1081 lastrevphid = newrevphid
1075
1082
1076 # Update commit messages and remove tags
1083 # Update commit messages and remove tags
1077 if opts.get(b'amend'):
1084 if opts.get(b'amend'):
1078 unfi = repo.unfiltered()
1085 unfi = repo.unfiltered()
1079 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1086 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1080 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1087 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1081 wnode = unfi[b'.'].node()
1088 wnode = unfi[b'.'].node()
1082 mapping = {} # {oldnode: [newnode]}
1089 mapping = {} # {oldnode: [newnode]}
1083 for i, rev in enumerate(revs):
1090 for i, rev in enumerate(revs):
1084 old = unfi[rev]
1091 old = unfi[rev]
1085 drevid = drevids[i]
1092 drevid = drevids[i]
1086 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1093 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1087 newdesc = getdescfromdrev(drev)
1094 newdesc = getdescfromdrev(drev)
1088 # Make sure commit message contain "Differential Revision"
1095 # Make sure commit message contain "Differential Revision"
1089 if old.description() != newdesc:
1096 if old.description() != newdesc:
1090 if old.phase() == phases.public:
1097 if old.phase() == phases.public:
1091 ui.warn(
1098 ui.warn(
1092 _(b"warning: not updating public commit %s\n")
1099 _(b"warning: not updating public commit %s\n")
1093 % scmutil.formatchangeid(old)
1100 % scmutil.formatchangeid(old)
1094 )
1101 )
1095 continue
1102 continue
1096 parents = [
1103 parents = [
1097 mapping.get(old.p1().node(), (old.p1(),))[0],
1104 mapping.get(old.p1().node(), (old.p1(),))[0],
1098 mapping.get(old.p2().node(), (old.p2(),))[0],
1105 mapping.get(old.p2().node(), (old.p2(),))[0],
1099 ]
1106 ]
1100 new = context.metadataonlyctx(
1107 new = context.metadataonlyctx(
1101 repo,
1108 repo,
1102 old,
1109 old,
1103 parents=parents,
1110 parents=parents,
1104 text=newdesc,
1111 text=newdesc,
1105 user=old.user(),
1112 user=old.user(),
1106 date=old.date(),
1113 date=old.date(),
1107 extra=old.extra(),
1114 extra=old.extra(),
1108 )
1115 )
1109
1116
1110 newnode = new.commit()
1117 newnode = new.commit()
1111
1118
1112 mapping[old.node()] = [newnode]
1119 mapping[old.node()] = [newnode]
1113 # Update diff property
1120 # Update diff property
1114 # If it fails just warn and keep going, otherwise the DREV
1121 # If it fails just warn and keep going, otherwise the DREV
1115 # associations will be lost
1122 # associations will be lost
1116 try:
1123 try:
1117 writediffproperties(unfi[newnode], diffmap[old.node()])
1124 writediffproperties(unfi[newnode], diffmap[old.node()])
1118 except util.urlerr.urlerror:
1125 except util.urlerr.urlerror:
1119 ui.warnnoi18n(
1126 ui.warnnoi18n(
1120 b'Failed to update metadata for D%s\n' % drevid
1127 b'Failed to update metadata for D%s\n' % drevid
1121 )
1128 )
1122 # Remove local tags since it's no longer necessary
1129 # Remove local tags since it's no longer necessary
1123 tagname = b'D%d' % drevid
1130 tagname = b'D%d' % drevid
1124 if tagname in repo.tags():
1131 if tagname in repo.tags():
1125 tags.tag(
1132 tags.tag(
1126 repo,
1133 repo,
1127 tagname,
1134 tagname,
1128 nullid,
1135 nullid,
1129 message=None,
1136 message=None,
1130 user=None,
1137 user=None,
1131 date=None,
1138 date=None,
1132 local=True,
1139 local=True,
1133 )
1140 )
1134 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1141 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1135 if wnode in mapping:
1142 if wnode in mapping:
1136 unfi.setparents(mapping[wnode][0])
1143 unfi.setparents(mapping[wnode][0])
1137
1144
1138
1145
1139 # Map from "hg:meta" keys to header understood by "hg import". The order is
1146 # Map from "hg:meta" keys to header understood by "hg import". The order is
1140 # consistent with "hg export" output.
1147 # consistent with "hg export" output.
1141 _metanamemap = util.sortdict(
1148 _metanamemap = util.sortdict(
1142 [
1149 [
1143 (b'user', b'User'),
1150 (b'user', b'User'),
1144 (b'date', b'Date'),
1151 (b'date', b'Date'),
1145 (b'branch', b'Branch'),
1152 (b'branch', b'Branch'),
1146 (b'node', b'Node ID'),
1153 (b'node', b'Node ID'),
1147 (b'parent', b'Parent '),
1154 (b'parent', b'Parent '),
1148 ]
1155 ]
1149 )
1156 )
1150
1157
1151
1158
1152 def _confirmbeforesend(repo, revs, oldmap):
1159 def _confirmbeforesend(repo, revs, oldmap):
1153 url, token = readurltoken(repo.ui)
1160 url, token = readurltoken(repo.ui)
1154 ui = repo.ui
1161 ui = repo.ui
1155 for rev in revs:
1162 for rev in revs:
1156 ctx = repo[rev]
1163 ctx = repo[rev]
1157 desc = ctx.description().splitlines()[0]
1164 desc = ctx.description().splitlines()[0]
1158 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1165 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1159 if drevid:
1166 if drevid:
1160 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1167 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1161 else:
1168 else:
1162 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1169 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1163
1170
1164 ui.write(
1171 ui.write(
1165 _(b'%s - %s: %s\n')
1172 _(b'%s - %s: %s\n')
1166 % (
1173 % (
1167 drevdesc,
1174 drevdesc,
1168 ui.label(bytes(ctx), b'phabricator.node'),
1175 ui.label(bytes(ctx), b'phabricator.node'),
1169 ui.label(desc, b'phabricator.desc'),
1176 ui.label(desc, b'phabricator.desc'),
1170 )
1177 )
1171 )
1178 )
1172
1179
1173 if ui.promptchoice(
1180 if ui.promptchoice(
1174 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1181 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1175 ):
1182 ):
1176 return False
1183 return False
1177
1184
1178 return True
1185 return True
1179
1186
1180
1187
1181 _knownstatusnames = {
1188 _knownstatusnames = {
1182 b'accepted',
1189 b'accepted',
1183 b'needsreview',
1190 b'needsreview',
1184 b'needsrevision',
1191 b'needsrevision',
1185 b'closed',
1192 b'closed',
1186 b'abandoned',
1193 b'abandoned',
1187 }
1194 }
1188
1195
1189
1196
1190 def _getstatusname(drev):
1197 def _getstatusname(drev):
1191 """get normalized status name from a Differential Revision"""
1198 """get normalized status name from a Differential Revision"""
1192 return drev[b'statusName'].replace(b' ', b'').lower()
1199 return drev[b'statusName'].replace(b' ', b'').lower()
1193
1200
1194
1201
1195 # Small language to specify differential revisions. Support symbols: (), :X,
1202 # Small language to specify differential revisions. Support symbols: (), :X,
1196 # +, and -.
1203 # +, and -.
1197
1204
1198 _elements = {
1205 _elements = {
1199 # token-type: binding-strength, primary, prefix, infix, suffix
1206 # token-type: binding-strength, primary, prefix, infix, suffix
1200 b'(': (12, None, (b'group', 1, b')'), None, None),
1207 b'(': (12, None, (b'group', 1, b')'), None, None),
1201 b':': (8, None, (b'ancestors', 8), None, None),
1208 b':': (8, None, (b'ancestors', 8), None, None),
1202 b'&': (5, None, None, (b'and_', 5), None),
1209 b'&': (5, None, None, (b'and_', 5), None),
1203 b'+': (4, None, None, (b'add', 4), None),
1210 b'+': (4, None, None, (b'add', 4), None),
1204 b'-': (4, None, None, (b'sub', 4), None),
1211 b'-': (4, None, None, (b'sub', 4), None),
1205 b')': (0, None, None, None, None),
1212 b')': (0, None, None, None, None),
1206 b'symbol': (0, b'symbol', None, None, None),
1213 b'symbol': (0, b'symbol', None, None, None),
1207 b'end': (0, None, None, None, None),
1214 b'end': (0, None, None, None, None),
1208 }
1215 }
1209
1216
1210
1217
1211 def _tokenize(text):
1218 def _tokenize(text):
1212 view = memoryview(text) # zero-copy slice
1219 view = memoryview(text) # zero-copy slice
1213 special = b'():+-& '
1220 special = b'():+-& '
1214 pos = 0
1221 pos = 0
1215 length = len(text)
1222 length = len(text)
1216 while pos < length:
1223 while pos < length:
1217 symbol = b''.join(
1224 symbol = b''.join(
1218 itertools.takewhile(
1225 itertools.takewhile(
1219 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1226 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1220 )
1227 )
1221 )
1228 )
1222 if symbol:
1229 if symbol:
1223 yield (b'symbol', symbol, pos)
1230 yield (b'symbol', symbol, pos)
1224 pos += len(symbol)
1231 pos += len(symbol)
1225 else: # special char, ignore space
1232 else: # special char, ignore space
1226 if text[pos] != b' ':
1233 if text[pos] != b' ':
1227 yield (text[pos], None, pos)
1234 yield (text[pos], None, pos)
1228 pos += 1
1235 pos += 1
1229 yield (b'end', None, pos)
1236 yield (b'end', None, pos)
1230
1237
1231
1238
1232 def _parse(text):
1239 def _parse(text):
1233 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1240 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1234 if pos != len(text):
1241 if pos != len(text):
1235 raise error.ParseError(b'invalid token', pos)
1242 raise error.ParseError(b'invalid token', pos)
1236 return tree
1243 return tree
1237
1244
1238
1245
1239 def _parsedrev(symbol):
1246 def _parsedrev(symbol):
1240 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1247 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1241 if symbol.startswith(b'D') and symbol[1:].isdigit():
1248 if symbol.startswith(b'D') and symbol[1:].isdigit():
1242 return int(symbol[1:])
1249 return int(symbol[1:])
1243 if symbol.isdigit():
1250 if symbol.isdigit():
1244 return int(symbol)
1251 return int(symbol)
1245
1252
1246
1253
1247 def _prefetchdrevs(tree):
1254 def _prefetchdrevs(tree):
1248 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1255 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1249 drevs = set()
1256 drevs = set()
1250 ancestordrevs = set()
1257 ancestordrevs = set()
1251 op = tree[0]
1258 op = tree[0]
1252 if op == b'symbol':
1259 if op == b'symbol':
1253 r = _parsedrev(tree[1])
1260 r = _parsedrev(tree[1])
1254 if r:
1261 if r:
1255 drevs.add(r)
1262 drevs.add(r)
1256 elif op == b'ancestors':
1263 elif op == b'ancestors':
1257 r, a = _prefetchdrevs(tree[1])
1264 r, a = _prefetchdrevs(tree[1])
1258 drevs.update(r)
1265 drevs.update(r)
1259 ancestordrevs.update(r)
1266 ancestordrevs.update(r)
1260 ancestordrevs.update(a)
1267 ancestordrevs.update(a)
1261 else:
1268 else:
1262 for t in tree[1:]:
1269 for t in tree[1:]:
1263 r, a = _prefetchdrevs(t)
1270 r, a = _prefetchdrevs(t)
1264 drevs.update(r)
1271 drevs.update(r)
1265 ancestordrevs.update(a)
1272 ancestordrevs.update(a)
1266 return drevs, ancestordrevs
1273 return drevs, ancestordrevs
1267
1274
1268
1275
1269 def querydrev(repo, spec):
1276 def querydrev(repo, spec):
1270 """return a list of "Differential Revision" dicts
1277 """return a list of "Differential Revision" dicts
1271
1278
1272 spec is a string using a simple query language, see docstring in phabread
1279 spec is a string using a simple query language, see docstring in phabread
1273 for details.
1280 for details.
1274
1281
1275 A "Differential Revision dict" looks like:
1282 A "Differential Revision dict" looks like:
1276
1283
1277 {
1284 {
1278 "id": "2",
1285 "id": "2",
1279 "phid": "PHID-DREV-672qvysjcczopag46qty",
1286 "phid": "PHID-DREV-672qvysjcczopag46qty",
1280 "title": "example",
1287 "title": "example",
1281 "uri": "https://phab.example.com/D2",
1288 "uri": "https://phab.example.com/D2",
1282 "dateCreated": "1499181406",
1289 "dateCreated": "1499181406",
1283 "dateModified": "1499182103",
1290 "dateModified": "1499182103",
1284 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1291 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1285 "status": "0",
1292 "status": "0",
1286 "statusName": "Needs Review",
1293 "statusName": "Needs Review",
1287 "properties": [],
1294 "properties": [],
1288 "branch": null,
1295 "branch": null,
1289 "summary": "",
1296 "summary": "",
1290 "testPlan": "",
1297 "testPlan": "",
1291 "lineCount": "2",
1298 "lineCount": "2",
1292 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1299 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1293 "diffs": [
1300 "diffs": [
1294 "3",
1301 "3",
1295 "4",
1302 "4",
1296 ],
1303 ],
1297 "commits": [],
1304 "commits": [],
1298 "reviewers": [],
1305 "reviewers": [],
1299 "ccs": [],
1306 "ccs": [],
1300 "hashes": [],
1307 "hashes": [],
1301 "auxiliary": {
1308 "auxiliary": {
1302 "phabricator:projects": [],
1309 "phabricator:projects": [],
1303 "phabricator:depends-on": [
1310 "phabricator:depends-on": [
1304 "PHID-DREV-gbapp366kutjebt7agcd"
1311 "PHID-DREV-gbapp366kutjebt7agcd"
1305 ]
1312 ]
1306 },
1313 },
1307 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1314 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1308 "sourcePath": null
1315 "sourcePath": null
1309 }
1316 }
1310 """
1317 """
1311
1318
1312 def fetch(params):
1319 def fetch(params):
1313 """params -> single drev or None"""
1320 """params -> single drev or None"""
1314 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1321 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1315 if key in prefetched:
1322 if key in prefetched:
1316 return prefetched[key]
1323 return prefetched[key]
1317 drevs = callconduit(repo.ui, b'differential.query', params)
1324 drevs = callconduit(repo.ui, b'differential.query', params)
1318 # Fill prefetched with the result
1325 # Fill prefetched with the result
1319 for drev in drevs:
1326 for drev in drevs:
1320 prefetched[drev[b'phid']] = drev
1327 prefetched[drev[b'phid']] = drev
1321 prefetched[int(drev[b'id'])] = drev
1328 prefetched[int(drev[b'id'])] = drev
1322 if key not in prefetched:
1329 if key not in prefetched:
1323 raise error.Abort(
1330 raise error.Abort(
1324 _(b'cannot get Differential Revision %r') % params
1331 _(b'cannot get Differential Revision %r') % params
1325 )
1332 )
1326 return prefetched[key]
1333 return prefetched[key]
1327
1334
1328 def getstack(topdrevids):
1335 def getstack(topdrevids):
1329 """given a top, get a stack from the bottom, [id] -> [id]"""
1336 """given a top, get a stack from the bottom, [id] -> [id]"""
1330 visited = set()
1337 visited = set()
1331 result = []
1338 result = []
1332 queue = [{b'ids': [i]} for i in topdrevids]
1339 queue = [{b'ids': [i]} for i in topdrevids]
1333 while queue:
1340 while queue:
1334 params = queue.pop()
1341 params = queue.pop()
1335 drev = fetch(params)
1342 drev = fetch(params)
1336 if drev[b'id'] in visited:
1343 if drev[b'id'] in visited:
1337 continue
1344 continue
1338 visited.add(drev[b'id'])
1345 visited.add(drev[b'id'])
1339 result.append(int(drev[b'id']))
1346 result.append(int(drev[b'id']))
1340 auxiliary = drev.get(b'auxiliary', {})
1347 auxiliary = drev.get(b'auxiliary', {})
1341 depends = auxiliary.get(b'phabricator:depends-on', [])
1348 depends = auxiliary.get(b'phabricator:depends-on', [])
1342 for phid in depends:
1349 for phid in depends:
1343 queue.append({b'phids': [phid]})
1350 queue.append({b'phids': [phid]})
1344 result.reverse()
1351 result.reverse()
1345 return smartset.baseset(result)
1352 return smartset.baseset(result)
1346
1353
1347 # Initialize prefetch cache
1354 # Initialize prefetch cache
1348 prefetched = {} # {id or phid: drev}
1355 prefetched = {} # {id or phid: drev}
1349
1356
1350 tree = _parse(spec)
1357 tree = _parse(spec)
1351 drevs, ancestordrevs = _prefetchdrevs(tree)
1358 drevs, ancestordrevs = _prefetchdrevs(tree)
1352
1359
1353 # developer config: phabricator.batchsize
1360 # developer config: phabricator.batchsize
1354 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1361 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1355
1362
1356 # Prefetch Differential Revisions in batch
1363 # Prefetch Differential Revisions in batch
1357 tofetch = set(drevs)
1364 tofetch = set(drevs)
1358 for r in ancestordrevs:
1365 for r in ancestordrevs:
1359 tofetch.update(range(max(1, r - batchsize), r + 1))
1366 tofetch.update(range(max(1, r - batchsize), r + 1))
1360 if drevs:
1367 if drevs:
1361 fetch({b'ids': list(tofetch)})
1368 fetch({b'ids': list(tofetch)})
1362 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1369 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1363
1370
1364 # Walk through the tree, return smartsets
1371 # Walk through the tree, return smartsets
1365 def walk(tree):
1372 def walk(tree):
1366 op = tree[0]
1373 op = tree[0]
1367 if op == b'symbol':
1374 if op == b'symbol':
1368 drev = _parsedrev(tree[1])
1375 drev = _parsedrev(tree[1])
1369 if drev:
1376 if drev:
1370 return smartset.baseset([drev])
1377 return smartset.baseset([drev])
1371 elif tree[1] in _knownstatusnames:
1378 elif tree[1] in _knownstatusnames:
1372 drevs = [
1379 drevs = [
1373 r
1380 r
1374 for r in validids
1381 for r in validids
1375 if _getstatusname(prefetched[r]) == tree[1]
1382 if _getstatusname(prefetched[r]) == tree[1]
1376 ]
1383 ]
1377 return smartset.baseset(drevs)
1384 return smartset.baseset(drevs)
1378 else:
1385 else:
1379 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1386 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1380 elif op in {b'and_', b'add', b'sub'}:
1387 elif op in {b'and_', b'add', b'sub'}:
1381 assert len(tree) == 3
1388 assert len(tree) == 3
1382 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1389 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1383 elif op == b'group':
1390 elif op == b'group':
1384 return walk(tree[1])
1391 return walk(tree[1])
1385 elif op == b'ancestors':
1392 elif op == b'ancestors':
1386 return getstack(walk(tree[1]))
1393 return getstack(walk(tree[1]))
1387 else:
1394 else:
1388 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1395 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1389
1396
1390 return [prefetched[r] for r in walk(tree)]
1397 return [prefetched[r] for r in walk(tree)]
1391
1398
1392
1399
1393 def getdescfromdrev(drev):
1400 def getdescfromdrev(drev):
1394 """get description (commit message) from "Differential Revision"
1401 """get description (commit message) from "Differential Revision"
1395
1402
1396 This is similar to differential.getcommitmessage API. But we only care
1403 This is similar to differential.getcommitmessage API. But we only care
1397 about limited fields: title, summary, test plan, and URL.
1404 about limited fields: title, summary, test plan, and URL.
1398 """
1405 """
1399 title = drev[b'title']
1406 title = drev[b'title']
1400 summary = drev[b'summary'].rstrip()
1407 summary = drev[b'summary'].rstrip()
1401 testplan = drev[b'testPlan'].rstrip()
1408 testplan = drev[b'testPlan'].rstrip()
1402 if testplan:
1409 if testplan:
1403 testplan = b'Test Plan:\n%s' % testplan
1410 testplan = b'Test Plan:\n%s' % testplan
1404 uri = b'Differential Revision: %s' % drev[b'uri']
1411 uri = b'Differential Revision: %s' % drev[b'uri']
1405 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1412 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1406
1413
1407
1414
1408 def getdiffmeta(diff):
1415 def getdiffmeta(diff):
1409 """get commit metadata (date, node, user, p1) from a diff object
1416 """get commit metadata (date, node, user, p1) from a diff object
1410
1417
1411 The metadata could be "hg:meta", sent by phabsend, like:
1418 The metadata could be "hg:meta", sent by phabsend, like:
1412
1419
1413 "properties": {
1420 "properties": {
1414 "hg:meta": {
1421 "hg:meta": {
1415 "date": "1499571514 25200",
1422 "date": "1499571514 25200",
1416 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1423 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1417 "user": "Foo Bar <foo@example.com>",
1424 "user": "Foo Bar <foo@example.com>",
1418 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1425 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1419 }
1426 }
1420 }
1427 }
1421
1428
1422 Or converted from "local:commits", sent by "arc", like:
1429 Or converted from "local:commits", sent by "arc", like:
1423
1430
1424 "properties": {
1431 "properties": {
1425 "local:commits": {
1432 "local:commits": {
1426 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1433 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1427 "author": "Foo Bar",
1434 "author": "Foo Bar",
1428 "time": 1499546314,
1435 "time": 1499546314,
1429 "branch": "default",
1436 "branch": "default",
1430 "tag": "",
1437 "tag": "",
1431 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1438 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1432 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1439 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1433 "local": "1000",
1440 "local": "1000",
1434 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1441 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1435 "summary": "...",
1442 "summary": "...",
1436 "message": "...",
1443 "message": "...",
1437 "authorEmail": "foo@example.com"
1444 "authorEmail": "foo@example.com"
1438 }
1445 }
1439 }
1446 }
1440 }
1447 }
1441
1448
1442 Note: metadata extracted from "local:commits" will lose time zone
1449 Note: metadata extracted from "local:commits" will lose time zone
1443 information.
1450 information.
1444 """
1451 """
1445 props = diff.get(b'properties') or {}
1452 props = diff.get(b'properties') or {}
1446 meta = props.get(b'hg:meta')
1453 meta = props.get(b'hg:meta')
1447 if not meta:
1454 if not meta:
1448 if props.get(b'local:commits'):
1455 if props.get(b'local:commits'):
1449 commit = sorted(props[b'local:commits'].values())[0]
1456 commit = sorted(props[b'local:commits'].values())[0]
1450 meta = {}
1457 meta = {}
1451 if b'author' in commit and b'authorEmail' in commit:
1458 if b'author' in commit and b'authorEmail' in commit:
1452 meta[b'user'] = b'%s <%s>' % (
1459 meta[b'user'] = b'%s <%s>' % (
1453 commit[b'author'],
1460 commit[b'author'],
1454 commit[b'authorEmail'],
1461 commit[b'authorEmail'],
1455 )
1462 )
1456 if b'time' in commit:
1463 if b'time' in commit:
1457 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1464 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1458 if b'branch' in commit:
1465 if b'branch' in commit:
1459 meta[b'branch'] = commit[b'branch']
1466 meta[b'branch'] = commit[b'branch']
1460 node = commit.get(b'commit', commit.get(b'rev'))
1467 node = commit.get(b'commit', commit.get(b'rev'))
1461 if node:
1468 if node:
1462 meta[b'node'] = node
1469 meta[b'node'] = node
1463 if len(commit.get(b'parents', ())) >= 1:
1470 if len(commit.get(b'parents', ())) >= 1:
1464 meta[b'parent'] = commit[b'parents'][0]
1471 meta[b'parent'] = commit[b'parents'][0]
1465 else:
1472 else:
1466 meta = {}
1473 meta = {}
1467 if b'date' not in meta and b'dateCreated' in diff:
1474 if b'date' not in meta and b'dateCreated' in diff:
1468 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1475 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1469 if b'branch' not in meta and diff.get(b'branch'):
1476 if b'branch' not in meta and diff.get(b'branch'):
1470 meta[b'branch'] = diff[b'branch']
1477 meta[b'branch'] = diff[b'branch']
1471 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1478 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1472 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1479 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1473 return meta
1480 return meta
1474
1481
1475
1482
1476 def readpatch(repo, drevs, write):
1483 def readpatch(repo, drevs, write):
1477 """generate plain-text patch readable by 'hg import'
1484 """generate plain-text patch readable by 'hg import'
1478
1485
1479 write is usually ui.write. drevs is what "querydrev" returns, results of
1486 write is usually ui.write. drevs is what "querydrev" returns, results of
1480 "differential.query".
1487 "differential.query".
1481 """
1488 """
1482 # Prefetch hg:meta property for all diffs
1489 # Prefetch hg:meta property for all diffs
1483 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1490 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1484 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1491 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1485
1492
1486 # Generate patch for each drev
1493 # Generate patch for each drev
1487 for drev in drevs:
1494 for drev in drevs:
1488 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1495 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1489
1496
1490 diffid = max(int(v) for v in drev[b'diffs'])
1497 diffid = max(int(v) for v in drev[b'diffs'])
1491 body = callconduit(
1498 body = callconduit(
1492 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1499 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1493 )
1500 )
1494 desc = getdescfromdrev(drev)
1501 desc = getdescfromdrev(drev)
1495 header = b'# HG changeset patch\n'
1502 header = b'# HG changeset patch\n'
1496
1503
1497 # Try to preserve metadata from hg:meta property. Write hg patch
1504 # Try to preserve metadata from hg:meta property. Write hg patch
1498 # headers that can be read by the "import" command. See patchheadermap
1505 # headers that can be read by the "import" command. See patchheadermap
1499 # and extract in mercurial/patch.py for supported headers.
1506 # and extract in mercurial/patch.py for supported headers.
1500 meta = getdiffmeta(diffs[b'%d' % diffid])
1507 meta = getdiffmeta(diffs[b'%d' % diffid])
1501 for k in _metanamemap.keys():
1508 for k in _metanamemap.keys():
1502 if k in meta:
1509 if k in meta:
1503 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1510 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1504
1511
1505 content = b'%s%s\n%s' % (header, desc, body)
1512 content = b'%s%s\n%s' % (header, desc, body)
1506 write(content)
1513 write(content)
1507
1514
1508
1515
1509 @vcrcommand(
1516 @vcrcommand(
1510 b'phabread',
1517 b'phabread',
1511 [(b'', b'stack', False, _(b'read dependencies'))],
1518 [(b'', b'stack', False, _(b'read dependencies'))],
1512 _(b'DREVSPEC [OPTIONS]'),
1519 _(b'DREVSPEC [OPTIONS]'),
1513 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1520 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1514 )
1521 )
1515 def phabread(ui, repo, spec, **opts):
1522 def phabread(ui, repo, spec, **opts):
1516 """print patches from Phabricator suitable for importing
1523 """print patches from Phabricator suitable for importing
1517
1524
1518 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1525 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1519 the number ``123``. It could also have common operators like ``+``, ``-``,
1526 the number ``123``. It could also have common operators like ``+``, ``-``,
1520 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1527 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1521 select a stack.
1528 select a stack.
1522
1529
1523 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1530 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1524 could be used to filter patches by status. For performance reason, they
1531 could be used to filter patches by status. For performance reason, they
1525 only represent a subset of non-status selections and cannot be used alone.
1532 only represent a subset of non-status selections and cannot be used alone.
1526
1533
1527 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1534 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1528 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1535 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1529 stack up to D9.
1536 stack up to D9.
1530
1537
1531 If --stack is given, follow dependencies information and read all patches.
1538 If --stack is given, follow dependencies information and read all patches.
1532 It is equivalent to the ``:`` operator.
1539 It is equivalent to the ``:`` operator.
1533 """
1540 """
1534 opts = pycompat.byteskwargs(opts)
1541 opts = pycompat.byteskwargs(opts)
1535 if opts.get(b'stack'):
1542 if opts.get(b'stack'):
1536 spec = b':(%s)' % spec
1543 spec = b':(%s)' % spec
1537 drevs = querydrev(repo, spec)
1544 drevs = querydrev(repo, spec)
1538 readpatch(repo, drevs, ui.write)
1545 readpatch(repo, drevs, ui.write)
1539
1546
1540
1547
1541 @vcrcommand(
1548 @vcrcommand(
1542 b'phabupdate',
1549 b'phabupdate',
1543 [
1550 [
1544 (b'', b'accept', False, _(b'accept revisions')),
1551 (b'', b'accept', False, _(b'accept revisions')),
1545 (b'', b'reject', False, _(b'reject revisions')),
1552 (b'', b'reject', False, _(b'reject revisions')),
1546 (b'', b'abandon', False, _(b'abandon revisions')),
1553 (b'', b'abandon', False, _(b'abandon revisions')),
1547 (b'', b'reclaim', False, _(b'reclaim revisions')),
1554 (b'', b'reclaim', False, _(b'reclaim revisions')),
1548 (b'm', b'comment', b'', _(b'comment on the last revision')),
1555 (b'm', b'comment', b'', _(b'comment on the last revision')),
1549 ],
1556 ],
1550 _(b'DREVSPEC [OPTIONS]'),
1557 _(b'DREVSPEC [OPTIONS]'),
1551 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1558 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1552 )
1559 )
1553 def phabupdate(ui, repo, spec, **opts):
1560 def phabupdate(ui, repo, spec, **opts):
1554 """update Differential Revision in batch
1561 """update Differential Revision in batch
1555
1562
1556 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1563 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1557 """
1564 """
1558 opts = pycompat.byteskwargs(opts)
1565 opts = pycompat.byteskwargs(opts)
1559 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1566 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1560 if len(flags) > 1:
1567 if len(flags) > 1:
1561 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1568 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1562
1569
1563 actions = []
1570 actions = []
1564 for f in flags:
1571 for f in flags:
1565 actions.append({b'type': f, b'value': b'true'})
1572 actions.append({b'type': f, b'value': b'true'})
1566
1573
1567 drevs = querydrev(repo, spec)
1574 drevs = querydrev(repo, spec)
1568 for i, drev in enumerate(drevs):
1575 for i, drev in enumerate(drevs):
1569 if i + 1 == len(drevs) and opts.get(b'comment'):
1576 if i + 1 == len(drevs) and opts.get(b'comment'):
1570 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1577 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1571 if actions:
1578 if actions:
1572 params = {
1579 params = {
1573 b'objectIdentifier': drev[b'phid'],
1580 b'objectIdentifier': drev[b'phid'],
1574 b'transactions': actions,
1581 b'transactions': actions,
1575 }
1582 }
1576 callconduit(ui, b'differential.revision.edit', params)
1583 callconduit(ui, b'differential.revision.edit', params)
1577
1584
1578
1585
1579 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1586 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1580 def template_review(context, mapping):
1587 def template_review(context, mapping):
1581 """:phabreview: Object describing the review for this changeset.
1588 """:phabreview: Object describing the review for this changeset.
1582 Has attributes `url` and `id`.
1589 Has attributes `url` and `id`.
1583 """
1590 """
1584 ctx = context.resource(mapping, b'ctx')
1591 ctx = context.resource(mapping, b'ctx')
1585 m = _differentialrevisiondescre.search(ctx.description())
1592 m = _differentialrevisiondescre.search(ctx.description())
1586 if m:
1593 if m:
1587 return templateutil.hybriddict(
1594 return templateutil.hybriddict(
1588 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1595 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1589 )
1596 )
1590 else:
1597 else:
1591 tags = ctx.repo().nodetags(ctx.node())
1598 tags = ctx.repo().nodetags(ctx.node())
1592 for t in tags:
1599 for t in tags:
1593 if _differentialrevisiontagre.match(t):
1600 if _differentialrevisiontagre.match(t):
1594 url = ctx.repo().ui.config(b'phabricator', b'url')
1601 url = ctx.repo().ui.config(b'phabricator', b'url')
1595 if not url.endswith(b'/'):
1602 if not url.endswith(b'/'):
1596 url += b'/'
1603 url += b'/'
1597 url += t
1604 url += t
1598
1605
1599 return templateutil.hybriddict({b'url': url, b'id': t,})
1606 return templateutil.hybriddict({b'url': url, b'id': t,})
1600 return None
1607 return None
General Comments 0
You need to be logged in to leave comments. Login now