##// END OF EJS Templates
phabricator: add a "phabstatus" show view...
Denis Laxalde -
r44291:70060915 default
parent child Browse files
Show More
@@ -1,1653 +1,1723 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
16 changesets.
17
14 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
17
21
18 Config::
22 Config::
19
23
20 [phabricator]
24 [phabricator]
21 # Phabricator URL
25 # Phabricator URL
22 url = https://phab.example.com/
26 url = https://phab.example.com/
23
27
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
29 # callsign is "FOO".
26 callsign = FOO
30 callsign = FOO
27
31
28 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
35 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
37
34 [auth]
38 [auth]
35 example.schemes = https
39 example.schemes = https
36 example.prefix = phab.example.com
40 example.prefix = phab.example.com
37
41
38 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
44 """
41
45
42 from __future__ import absolute_import
46 from __future__ import absolute_import
43
47
44 import base64
48 import base64
45 import contextlib
49 import contextlib
46 import hashlib
50 import hashlib
47 import itertools
51 import itertools
48 import json
52 import json
49 import mimetypes
53 import mimetypes
50 import operator
54 import operator
51 import re
55 import re
52
56
53 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
58 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
57 from mercurial import (
61 from mercurial import (
58 cmdutil,
62 cmdutil,
59 context,
63 context,
60 encoding,
64 encoding,
61 error,
65 error,
62 exthelper,
66 exthelper,
67 graphmod,
63 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 logcmdutil,
64 match,
70 match,
65 mdiff,
71 mdiff,
66 obsutil,
72 obsutil,
67 parser,
73 parser,
68 patch,
74 patch,
69 phases,
75 phases,
70 pycompat,
76 pycompat,
71 scmutil,
77 scmutil,
72 smartset,
78 smartset,
73 tags,
79 tags,
74 templatefilters,
80 templatefilters,
75 templateutil,
81 templateutil,
76 url as urlmod,
82 url as urlmod,
77 util,
83 util,
78 )
84 )
79 from mercurial.utils import (
85 from mercurial.utils import (
80 procutil,
86 procutil,
81 stringutil,
87 stringutil,
82 )
88 )
89 from . import show
90
83
91
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
94 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
95 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
96 testedwith = b'ships-with-hg-core'
89
97
90 eh = exthelper.exthelper()
98 eh = exthelper.exthelper()
91
99
92 cmdtable = eh.cmdtable
100 cmdtable = eh.cmdtable
93 command = eh.command
101 command = eh.command
94 configtable = eh.configtable
102 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
103 templatekeyword = eh.templatekeyword
96
104
97 # developer config: phabricator.batchsize
105 # developer config: phabricator.batchsize
98 eh.configitem(
106 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
107 b'phabricator', b'batchsize', default=12,
100 )
108 )
101 eh.configitem(
109 eh.configitem(
102 b'phabricator', b'callsign', default=None,
110 b'phabricator', b'callsign', default=None,
103 )
111 )
104 eh.configitem(
112 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
113 b'phabricator', b'curlcmd', default=None,
106 )
114 )
107 # developer config: phabricator.repophid
115 # developer config: phabricator.repophid
108 eh.configitem(
116 eh.configitem(
109 b'phabricator', b'repophid', default=None,
117 b'phabricator', b'repophid', default=None,
110 )
118 )
111 eh.configitem(
119 eh.configitem(
112 b'phabricator', b'url', default=None,
120 b'phabricator', b'url', default=None,
113 )
121 )
114 eh.configitem(
122 eh.configitem(
115 b'phabsend', b'confirm', default=False,
123 b'phabsend', b'confirm', default=False,
116 )
124 )
117
125
118 colortable = {
126 colortable = {
119 b'phabricator.action.created': b'green',
127 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
128 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
129 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
130 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
131 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
132 b'phabricator.node': b'',
125 }
133 }
126
134
127 _VCR_FLAGS = [
135 _VCR_FLAGS = [
128 (
136 (
129 b'',
137 b'',
130 b'test-vcr',
138 b'test-vcr',
131 b'',
139 b'',
132 _(
140 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
141 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
142 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
143 b' (ADVANCED)'
136 ),
144 ),
137 ),
145 ),
138 ]
146 ]
139
147
140
148
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
149 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
150 fullflags = flags + _VCR_FLAGS
143
151
144 def hgmatcher(r1, r2):
152 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
153 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
154 return False
147 r1params = util.urlreq.parseqs(r1.body)
155 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
156 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
157 for key in r1params:
150 if key not in r2params:
158 if key not in r2params:
151 return False
159 return False
152 value = r1params[key][0]
160 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
161 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
162 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = pycompat.json_loads(value)
163 r1json = pycompat.json_loads(value)
156 r2json = pycompat.json_loads(r2params[key][0])
164 r2json = pycompat.json_loads(r2params[key][0])
157 if r1json != r2json:
165 if r1json != r2json:
158 return False
166 return False
159 elif r2params[key][0] != value:
167 elif r2params[key][0] != value:
160 return False
168 return False
161 return True
169 return True
162
170
163 def sanitiserequest(request):
171 def sanitiserequest(request):
164 request.body = re.sub(
172 request.body = re.sub(
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
173 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 )
174 )
167 return request
175 return request
168
176
169 def sanitiseresponse(response):
177 def sanitiseresponse(response):
170 if 'set-cookie' in response['headers']:
178 if 'set-cookie' in response['headers']:
171 del response['headers']['set-cookie']
179 del response['headers']['set-cookie']
172 return response
180 return response
173
181
174 def decorate(fn):
182 def decorate(fn):
175 def inner(*args, **kwargs):
183 def inner(*args, **kwargs):
176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
184 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
177 if cassette:
185 if cassette:
178 import hgdemandimport
186 import hgdemandimport
179
187
180 with hgdemandimport.deactivated():
188 with hgdemandimport.deactivated():
181 import vcr as vcrmod
189 import vcr as vcrmod
182 import vcr.stubs as stubs
190 import vcr.stubs as stubs
183
191
184 vcr = vcrmod.VCR(
192 vcr = vcrmod.VCR(
185 serializer='json',
193 serializer='json',
186 before_record_request=sanitiserequest,
194 before_record_request=sanitiserequest,
187 before_record_response=sanitiseresponse,
195 before_record_response=sanitiseresponse,
188 custom_patches=[
196 custom_patches=[
189 (
197 (
190 urlmod,
198 urlmod,
191 'httpconnection',
199 'httpconnection',
192 stubs.VCRHTTPConnection,
200 stubs.VCRHTTPConnection,
193 ),
201 ),
194 (
202 (
195 urlmod,
203 urlmod,
196 'httpsconnection',
204 'httpsconnection',
197 stubs.VCRHTTPSConnection,
205 stubs.VCRHTTPSConnection,
198 ),
206 ),
199 ],
207 ],
200 )
208 )
201 vcr.register_matcher('hgmatcher', hgmatcher)
209 vcr.register_matcher('hgmatcher', hgmatcher)
202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
210 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
203 return fn(*args, **kwargs)
211 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
212 return fn(*args, **kwargs)
205
213
206 inner.__name__ = fn.__name__
214 inner.__name__ = fn.__name__
207 inner.__doc__ = fn.__doc__
215 inner.__doc__ = fn.__doc__
208 return command(
216 return command(
209 name,
217 name,
210 fullflags,
218 fullflags,
211 spec,
219 spec,
212 helpcategory=helpcategory,
220 helpcategory=helpcategory,
213 optionalrepo=optionalrepo,
221 optionalrepo=optionalrepo,
214 )(inner)
222 )(inner)
215
223
216 return decorate
224 return decorate
217
225
218
226
219 def urlencodenested(params):
227 def urlencodenested(params):
220 """like urlencode, but works with nested parameters.
228 """like urlencode, but works with nested parameters.
221
229
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
230 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
231 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
232 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 """
233 """
226 flatparams = util.sortdict()
234 flatparams = util.sortdict()
227
235
228 def process(prefix, obj):
236 def process(prefix, obj):
229 if isinstance(obj, bool):
237 if isinstance(obj, bool):
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
238 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
239 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
240 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 if items is None:
241 if items is None:
234 flatparams[prefix] = obj
242 flatparams[prefix] = obj
235 else:
243 else:
236 for k, v in items(obj):
244 for k, v in items(obj):
237 if prefix:
245 if prefix:
238 process(b'%s[%s]' % (prefix, k), v)
246 process(b'%s[%s]' % (prefix, k), v)
239 else:
247 else:
240 process(k, v)
248 process(k, v)
241
249
242 process(b'', params)
250 process(b'', params)
243 return util.urlreq.urlencode(flatparams)
251 return util.urlreq.urlencode(flatparams)
244
252
245
253
246 def readurltoken(ui):
254 def readurltoken(ui):
247 """return conduit url, token and make sure they exist
255 """return conduit url, token and make sure they exist
248
256
249 Currently read from [auth] config section. In the future, it might
257 Currently read from [auth] config section. In the future, it might
250 make sense to read from .arcconfig and .arcrc as well.
258 make sense to read from .arcconfig and .arcrc as well.
251 """
259 """
252 url = ui.config(b'phabricator', b'url')
260 url = ui.config(b'phabricator', b'url')
253 if not url:
261 if not url:
254 raise error.Abort(
262 raise error.Abort(
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
263 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 )
264 )
257
265
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
266 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 token = None
267 token = None
260
268
261 if res:
269 if res:
262 group, auth = res
270 group, auth = res
263
271
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
272 ui.debug(b"using auth.%s.* for authentication\n" % group)
265
273
266 token = auth.get(b'phabtoken')
274 token = auth.get(b'phabtoken')
267
275
268 if not token:
276 if not token:
269 raise error.Abort(
277 raise error.Abort(
270 _(b'Can\'t find conduit token associated to %s') % (url,)
278 _(b'Can\'t find conduit token associated to %s') % (url,)
271 )
279 )
272
280
273 return url, token
281 return url, token
274
282
275
283
276 def callconduit(ui, name, params):
284 def callconduit(ui, name, params):
277 """call Conduit API, params is a dict. return json.loads result, or None"""
285 """call Conduit API, params is a dict. return json.loads result, or None"""
278 host, token = readurltoken(ui)
286 host, token = readurltoken(ui)
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
287 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
288 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 params = params.copy()
289 params = params.copy()
282 params[b'__conduit__'] = {
290 params[b'__conduit__'] = {
283 b'token': token,
291 b'token': token,
284 }
292 }
285 rawdata = {
293 rawdata = {
286 b'params': templatefilters.json(params),
294 b'params': templatefilters.json(params),
287 b'output': b'json',
295 b'output': b'json',
288 b'__conduit__': 1,
296 b'__conduit__': 1,
289 }
297 }
290 data = urlencodenested(rawdata)
298 data = urlencodenested(rawdata)
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
299 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 if curlcmd:
300 if curlcmd:
293 sin, sout = procutil.popen2(
301 sin, sout = procutil.popen2(
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
302 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 )
303 )
296 sin.write(data)
304 sin.write(data)
297 sin.close()
305 sin.close()
298 body = sout.read()
306 body = sout.read()
299 else:
307 else:
300 urlopener = urlmod.opener(ui, authinfo)
308 urlopener = urlmod.opener(ui, authinfo)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
309 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 with contextlib.closing(urlopener.open(request)) as rsp:
310 with contextlib.closing(urlopener.open(request)) as rsp:
303 body = rsp.read()
311 body = rsp.read()
304 ui.debug(b'Conduit Response: %s\n' % body)
312 ui.debug(b'Conduit Response: %s\n' % body)
305 parsed = pycompat.rapply(
313 parsed = pycompat.rapply(
306 lambda x: encoding.unitolocal(x)
314 lambda x: encoding.unitolocal(x)
307 if isinstance(x, pycompat.unicode)
315 if isinstance(x, pycompat.unicode)
308 else x,
316 else x,
309 # json.loads only accepts bytes from py3.6+
317 # json.loads only accepts bytes from py3.6+
310 pycompat.json_loads(encoding.unifromlocal(body)),
318 pycompat.json_loads(encoding.unifromlocal(body)),
311 )
319 )
312 if parsed.get(b'error_code'):
320 if parsed.get(b'error_code'):
313 msg = _(b'Conduit Error (%s): %s') % (
321 msg = _(b'Conduit Error (%s): %s') % (
314 parsed[b'error_code'],
322 parsed[b'error_code'],
315 parsed[b'error_info'],
323 parsed[b'error_info'],
316 )
324 )
317 raise error.Abort(msg)
325 raise error.Abort(msg)
318 return parsed[b'result']
326 return parsed[b'result']
319
327
320
328
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
329 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 def debugcallconduit(ui, repo, name):
330 def debugcallconduit(ui, repo, name):
323 """call Conduit API
331 """call Conduit API
324
332
325 Call parameters are read from stdin as a JSON blob. Result will be written
333 Call parameters are read from stdin as a JSON blob. Result will be written
326 to stdout as a JSON blob.
334 to stdout as a JSON blob.
327 """
335 """
328 # json.loads only accepts bytes from 3.6+
336 # json.loads only accepts bytes from 3.6+
329 rawparams = encoding.unifromlocal(ui.fin.read())
337 rawparams = encoding.unifromlocal(ui.fin.read())
330 # json.loads only returns unicode strings
338 # json.loads only returns unicode strings
331 params = pycompat.rapply(
339 params = pycompat.rapply(
332 lambda x: encoding.unitolocal(x)
340 lambda x: encoding.unitolocal(x)
333 if isinstance(x, pycompat.unicode)
341 if isinstance(x, pycompat.unicode)
334 else x,
342 else x,
335 pycompat.json_loads(rawparams),
343 pycompat.json_loads(rawparams),
336 )
344 )
337 # json.dumps only accepts unicode strings
345 # json.dumps only accepts unicode strings
338 result = pycompat.rapply(
346 result = pycompat.rapply(
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
347 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 callconduit(ui, name, params),
348 callconduit(ui, name, params),
341 )
349 )
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
350 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
351 ui.write(b'%s\n' % encoding.unitolocal(s))
344
352
345
353
346 def getrepophid(repo):
354 def getrepophid(repo):
347 """given callsign, return repository PHID or None"""
355 """given callsign, return repository PHID or None"""
348 # developer config: phabricator.repophid
356 # developer config: phabricator.repophid
349 repophid = repo.ui.config(b'phabricator', b'repophid')
357 repophid = repo.ui.config(b'phabricator', b'repophid')
350 if repophid:
358 if repophid:
351 return repophid
359 return repophid
352 callsign = repo.ui.config(b'phabricator', b'callsign')
360 callsign = repo.ui.config(b'phabricator', b'callsign')
353 if not callsign:
361 if not callsign:
354 return None
362 return None
355 query = callconduit(
363 query = callconduit(
356 repo.ui,
364 repo.ui,
357 b'diffusion.repository.search',
365 b'diffusion.repository.search',
358 {b'constraints': {b'callsigns': [callsign]}},
366 {b'constraints': {b'callsigns': [callsign]}},
359 )
367 )
360 if len(query[b'data']) == 0:
368 if len(query[b'data']) == 0:
361 return None
369 return None
362 repophid = query[b'data'][0][b'phid']
370 repophid = query[b'data'][0][b'phid']
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
371 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 return repophid
372 return repophid
365
373
366
374
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
375 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 _differentialrevisiondescre = re.compile(
376 _differentialrevisiondescre = re.compile(
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
377 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 )
378 )
371
379
372
380
373 def getoldnodedrevmap(repo, nodelist):
381 def getoldnodedrevmap(repo, nodelist):
374 """find previous nodes that has been sent to Phabricator
382 """find previous nodes that has been sent to Phabricator
375
383
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
384 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 for node in nodelist with known previous sent versions, or associated
385 for node in nodelist with known previous sent versions, or associated
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
386 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 be ``None``.
387 be ``None``.
380
388
381 Examines commit messages like "Differential Revision:" to get the
389 Examines commit messages like "Differential Revision:" to get the
382 association information.
390 association information.
383
391
384 If such commit message line is not found, examines all precursors and their
392 If such commit message line is not found, examines all precursors and their
385 tags. Tags with format like "D1234" are considered a match and the node
393 tags. Tags with format like "D1234" are considered a match and the node
386 with that tag, and the number after "D" (ex. 1234) will be returned.
394 with that tag, and the number after "D" (ex. 1234) will be returned.
387
395
388 The ``old node``, if not None, is guaranteed to be the last diff of
396 The ``old node``, if not None, is guaranteed to be the last diff of
389 corresponding Differential Revision, and exist in the repo.
397 corresponding Differential Revision, and exist in the repo.
390 """
398 """
391 unfi = repo.unfiltered()
399 unfi = repo.unfiltered()
392 has_node = unfi.changelog.index.has_node
400 has_node = unfi.changelog.index.has_node
393
401
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
402 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
403 toconfirm = {} # {node: (force, {precnode}, drev)}
396 for node in nodelist:
404 for node in nodelist:
397 ctx = unfi[node]
405 ctx = unfi[node]
398 # For tags like "D123", put them into "toconfirm" to verify later
406 # For tags like "D123", put them into "toconfirm" to verify later
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
407 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 for n in precnodes:
408 for n in precnodes:
401 if has_node(n):
409 if has_node(n):
402 for tag in unfi.nodetags(n):
410 for tag in unfi.nodetags(n):
403 m = _differentialrevisiontagre.match(tag)
411 m = _differentialrevisiontagre.match(tag)
404 if m:
412 if m:
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
413 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 break
414 break
407 else:
415 else:
408 continue # move to next predecessor
416 continue # move to next predecessor
409 break # found a tag, stop
417 break # found a tag, stop
410 else:
418 else:
411 # Check commit message
419 # Check commit message
412 m = _differentialrevisiondescre.search(ctx.description())
420 m = _differentialrevisiondescre.search(ctx.description())
413 if m:
421 if m:
414 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
422 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
415
423
416 # Double check if tags are genuine by collecting all old nodes from
424 # Double check if tags are genuine by collecting all old nodes from
417 # Phabricator, and expect precursors overlap with it.
425 # Phabricator, and expect precursors overlap with it.
418 if toconfirm:
426 if toconfirm:
419 drevs = [drev for force, precs, drev in toconfirm.values()]
427 drevs = [drev for force, precs, drev in toconfirm.values()]
420 alldiffs = callconduit(
428 alldiffs = callconduit(
421 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
429 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
422 )
430 )
423 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
431 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
424 for newnode, (force, precset, drev) in toconfirm.items():
432 for newnode, (force, precset, drev) in toconfirm.items():
425 diffs = [
433 diffs = [
426 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
434 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
427 ]
435 ]
428
436
429 # "precursors" as known by Phabricator
437 # "precursors" as known by Phabricator
430 phprecset = set(getnode(d) for d in diffs)
438 phprecset = set(getnode(d) for d in diffs)
431
439
432 # Ignore if precursors (Phabricator and local repo) do not overlap,
440 # Ignore if precursors (Phabricator and local repo) do not overlap,
433 # and force is not set (when commit message says nothing)
441 # and force is not set (when commit message says nothing)
434 if not force and not bool(phprecset & precset):
442 if not force and not bool(phprecset & precset):
435 tagname = b'D%d' % drev
443 tagname = b'D%d' % drev
436 tags.tag(
444 tags.tag(
437 repo,
445 repo,
438 tagname,
446 tagname,
439 nullid,
447 nullid,
440 message=None,
448 message=None,
441 user=None,
449 user=None,
442 date=None,
450 date=None,
443 local=True,
451 local=True,
444 )
452 )
445 unfi.ui.warn(
453 unfi.ui.warn(
446 _(
454 _(
447 b'D%d: local tag removed - does not match '
455 b'D%d: local tag removed - does not match '
448 b'Differential history\n'
456 b'Differential history\n'
449 )
457 )
450 % drev
458 % drev
451 )
459 )
452 continue
460 continue
453
461
454 # Find the last node using Phabricator metadata, and make sure it
462 # Find the last node using Phabricator metadata, and make sure it
455 # exists in the repo
463 # exists in the repo
456 oldnode = lastdiff = None
464 oldnode = lastdiff = None
457 if diffs:
465 if diffs:
458 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
466 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
459 oldnode = getnode(lastdiff)
467 oldnode = getnode(lastdiff)
460 if oldnode and not has_node(oldnode):
468 if oldnode and not has_node(oldnode):
461 oldnode = None
469 oldnode = None
462
470
463 result[newnode] = (oldnode, lastdiff, drev)
471 result[newnode] = (oldnode, lastdiff, drev)
464
472
465 return result
473 return result
466
474
467
475
476 def getdrevmap(repo, revs):
477 """Return a dict mapping each rev in `revs` to their Differential Revision
478 ID or None.
479 """
480 result = {}
481 for rev in revs:
482 result[rev] = None
483 ctx = repo[rev]
484 # Check commit message
485 m = _differentialrevisiondescre.search(ctx.description())
486 if m:
487 result[rev] = int(m.group('id'))
488 continue
489 # Check tags
490 for tag in repo.nodetags(ctx.node()):
491 m = _differentialrevisiontagre.match(tag)
492 if m:
493 result[rev] = int(m.group(1))
494 break
495
496 return result
497
498
468 def getdiff(ctx, diffopts):
499 def getdiff(ctx, diffopts):
469 """plain-text diff without header (user, commit message, etc)"""
500 """plain-text diff without header (user, commit message, etc)"""
470 output = util.stringio()
501 output = util.stringio()
471 for chunk, _label in patch.diffui(
502 for chunk, _label in patch.diffui(
472 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
503 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
473 ):
504 ):
474 output.write(chunk)
505 output.write(chunk)
475 return output.getvalue()
506 return output.getvalue()
476
507
477
508
478 class DiffChangeType(object):
509 class DiffChangeType(object):
479 ADD = 1
510 ADD = 1
480 CHANGE = 2
511 CHANGE = 2
481 DELETE = 3
512 DELETE = 3
482 MOVE_AWAY = 4
513 MOVE_AWAY = 4
483 COPY_AWAY = 5
514 COPY_AWAY = 5
484 MOVE_HERE = 6
515 MOVE_HERE = 6
485 COPY_HERE = 7
516 COPY_HERE = 7
486 MULTICOPY = 8
517 MULTICOPY = 8
487
518
488
519
489 class DiffFileType(object):
520 class DiffFileType(object):
490 TEXT = 1
521 TEXT = 1
491 IMAGE = 2
522 IMAGE = 2
492 BINARY = 3
523 BINARY = 3
493
524
494
525
495 @attr.s
526 @attr.s
496 class phabhunk(dict):
527 class phabhunk(dict):
497 """Represents a Differential hunk, which is owned by a Differential change
528 """Represents a Differential hunk, which is owned by a Differential change
498 """
529 """
499
530
500 oldOffset = attr.ib(default=0) # camelcase-required
531 oldOffset = attr.ib(default=0) # camelcase-required
501 oldLength = attr.ib(default=0) # camelcase-required
532 oldLength = attr.ib(default=0) # camelcase-required
502 newOffset = attr.ib(default=0) # camelcase-required
533 newOffset = attr.ib(default=0) # camelcase-required
503 newLength = attr.ib(default=0) # camelcase-required
534 newLength = attr.ib(default=0) # camelcase-required
504 corpus = attr.ib(default='')
535 corpus = attr.ib(default='')
505 # These get added to the phabchange's equivalents
536 # These get added to the phabchange's equivalents
506 addLines = attr.ib(default=0) # camelcase-required
537 addLines = attr.ib(default=0) # camelcase-required
507 delLines = attr.ib(default=0) # camelcase-required
538 delLines = attr.ib(default=0) # camelcase-required
508
539
509
540
510 @attr.s
541 @attr.s
511 class phabchange(object):
542 class phabchange(object):
512 """Represents a Differential change, owns Differential hunks and owned by a
543 """Represents a Differential change, owns Differential hunks and owned by a
513 Differential diff. Each one represents one file in a diff.
544 Differential diff. Each one represents one file in a diff.
514 """
545 """
515
546
516 currentPath = attr.ib(default=None) # camelcase-required
547 currentPath = attr.ib(default=None) # camelcase-required
517 oldPath = attr.ib(default=None) # camelcase-required
548 oldPath = attr.ib(default=None) # camelcase-required
518 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
549 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
519 metadata = attr.ib(default=attr.Factory(dict))
550 metadata = attr.ib(default=attr.Factory(dict))
520 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
551 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
521 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
522 type = attr.ib(default=DiffChangeType.CHANGE)
553 type = attr.ib(default=DiffChangeType.CHANGE)
523 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
554 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
524 commitHash = attr.ib(default=None) # camelcase-required
555 commitHash = attr.ib(default=None) # camelcase-required
525 addLines = attr.ib(default=0) # camelcase-required
556 addLines = attr.ib(default=0) # camelcase-required
526 delLines = attr.ib(default=0) # camelcase-required
557 delLines = attr.ib(default=0) # camelcase-required
527 hunks = attr.ib(default=attr.Factory(list))
558 hunks = attr.ib(default=attr.Factory(list))
528
559
529 def copynewmetadatatoold(self):
560 def copynewmetadatatoold(self):
530 for key in list(self.metadata.keys()):
561 for key in list(self.metadata.keys()):
531 newkey = key.replace(b'new:', b'old:')
562 newkey = key.replace(b'new:', b'old:')
532 self.metadata[newkey] = self.metadata[key]
563 self.metadata[newkey] = self.metadata[key]
533
564
534 def addoldmode(self, value):
565 def addoldmode(self, value):
535 self.oldProperties[b'unix:filemode'] = value
566 self.oldProperties[b'unix:filemode'] = value
536
567
537 def addnewmode(self, value):
568 def addnewmode(self, value):
538 self.newProperties[b'unix:filemode'] = value
569 self.newProperties[b'unix:filemode'] = value
539
570
540 def addhunk(self, hunk):
571 def addhunk(self, hunk):
541 if not isinstance(hunk, phabhunk):
572 if not isinstance(hunk, phabhunk):
542 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
573 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
543 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
574 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
544 # It's useful to include these stats since the Phab web UI shows them,
575 # It's useful to include these stats since the Phab web UI shows them,
545 # and uses them to estimate how large a change a Revision is. Also used
576 # and uses them to estimate how large a change a Revision is. Also used
546 # in email subjects for the [+++--] bit.
577 # in email subjects for the [+++--] bit.
547 self.addLines += hunk.addLines
578 self.addLines += hunk.addLines
548 self.delLines += hunk.delLines
579 self.delLines += hunk.delLines
549
580
550
581
551 @attr.s
582 @attr.s
552 class phabdiff(object):
583 class phabdiff(object):
553 """Represents a Differential diff, owns Differential changes. Corresponds
584 """Represents a Differential diff, owns Differential changes. Corresponds
554 to a commit.
585 to a commit.
555 """
586 """
556
587
557 # Doesn't seem to be any reason to send this (output of uname -n)
588 # Doesn't seem to be any reason to send this (output of uname -n)
558 sourceMachine = attr.ib(default=b'') # camelcase-required
589 sourceMachine = attr.ib(default=b'') # camelcase-required
559 sourcePath = attr.ib(default=b'/') # camelcase-required
590 sourcePath = attr.ib(default=b'/') # camelcase-required
560 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
591 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
561 sourceControlPath = attr.ib(default=b'/') # camelcase-required
592 sourceControlPath = attr.ib(default=b'/') # camelcase-required
562 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
593 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
563 branch = attr.ib(default=b'default')
594 branch = attr.ib(default=b'default')
564 bookmark = attr.ib(default=None)
595 bookmark = attr.ib(default=None)
565 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
596 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
566 lintStatus = attr.ib(default=b'none') # camelcase-required
597 lintStatus = attr.ib(default=b'none') # camelcase-required
567 unitStatus = attr.ib(default=b'none') # camelcase-required
598 unitStatus = attr.ib(default=b'none') # camelcase-required
568 changes = attr.ib(default=attr.Factory(dict))
599 changes = attr.ib(default=attr.Factory(dict))
569 repositoryPHID = attr.ib(default=None) # camelcase-required
600 repositoryPHID = attr.ib(default=None) # camelcase-required
570
601
571 def addchange(self, change):
602 def addchange(self, change):
572 if not isinstance(change, phabchange):
603 if not isinstance(change, phabchange):
573 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
604 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
574 self.changes[change.currentPath] = pycompat.byteskwargs(
605 self.changes[change.currentPath] = pycompat.byteskwargs(
575 attr.asdict(change)
606 attr.asdict(change)
576 )
607 )
577
608
578
609
579 def maketext(pchange, ctx, fname):
610 def maketext(pchange, ctx, fname):
580 """populate the phabchange for a text file"""
611 """populate the phabchange for a text file"""
581 repo = ctx.repo()
612 repo = ctx.repo()
582 fmatcher = match.exact([fname])
613 fmatcher = match.exact([fname])
583 diffopts = mdiff.diffopts(git=True, context=32767)
614 diffopts = mdiff.diffopts(git=True, context=32767)
584 _pfctx, _fctx, header, fhunks = next(
615 _pfctx, _fctx, header, fhunks = next(
585 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
616 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
586 )
617 )
587
618
588 for fhunk in fhunks:
619 for fhunk in fhunks:
589 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
620 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
590 corpus = b''.join(lines[1:])
621 corpus = b''.join(lines[1:])
591 shunk = list(header)
622 shunk = list(header)
592 shunk.extend(lines)
623 shunk.extend(lines)
593 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
624 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
594 patch.diffstatdata(util.iterlines(shunk))
625 patch.diffstatdata(util.iterlines(shunk))
595 )
626 )
596 pchange.addhunk(
627 pchange.addhunk(
597 phabhunk(
628 phabhunk(
598 oldOffset,
629 oldOffset,
599 oldLength,
630 oldLength,
600 newOffset,
631 newOffset,
601 newLength,
632 newLength,
602 corpus,
633 corpus,
603 addLines,
634 addLines,
604 delLines,
635 delLines,
605 )
636 )
606 )
637 )
607
638
608
639
609 def uploadchunks(fctx, fphid):
640 def uploadchunks(fctx, fphid):
610 """upload large binary files as separate chunks.
641 """upload large binary files as separate chunks.
611 Phab requests chunking over 8MiB, and splits into 4MiB chunks
642 Phab requests chunking over 8MiB, and splits into 4MiB chunks
612 """
643 """
613 ui = fctx.repo().ui
644 ui = fctx.repo().ui
614 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
645 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
615 with ui.makeprogress(
646 with ui.makeprogress(
616 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
647 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
617 ) as progress:
648 ) as progress:
618 for chunk in chunks:
649 for chunk in chunks:
619 progress.increment()
650 progress.increment()
620 if chunk[b'complete']:
651 if chunk[b'complete']:
621 continue
652 continue
622 bstart = int(chunk[b'byteStart'])
653 bstart = int(chunk[b'byteStart'])
623 bend = int(chunk[b'byteEnd'])
654 bend = int(chunk[b'byteEnd'])
624 callconduit(
655 callconduit(
625 ui,
656 ui,
626 b'file.uploadchunk',
657 b'file.uploadchunk',
627 {
658 {
628 b'filePHID': fphid,
659 b'filePHID': fphid,
629 b'byteStart': bstart,
660 b'byteStart': bstart,
630 b'data': base64.b64encode(fctx.data()[bstart:bend]),
661 b'data': base64.b64encode(fctx.data()[bstart:bend]),
631 b'dataEncoding': b'base64',
662 b'dataEncoding': b'base64',
632 },
663 },
633 )
664 )
634
665
635
666
636 def uploadfile(fctx):
667 def uploadfile(fctx):
637 """upload binary files to Phabricator"""
668 """upload binary files to Phabricator"""
638 repo = fctx.repo()
669 repo = fctx.repo()
639 ui = repo.ui
670 ui = repo.ui
640 fname = fctx.path()
671 fname = fctx.path()
641 size = fctx.size()
672 size = fctx.size()
642 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
673 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
643
674
644 # an allocate call is required first to see if an upload is even required
675 # an allocate call is required first to see if an upload is even required
645 # (Phab might already have it) and to determine if chunking is needed
676 # (Phab might already have it) and to determine if chunking is needed
646 allocateparams = {
677 allocateparams = {
647 b'name': fname,
678 b'name': fname,
648 b'contentLength': size,
679 b'contentLength': size,
649 b'contentHash': fhash,
680 b'contentHash': fhash,
650 }
681 }
651 filealloc = callconduit(ui, b'file.allocate', allocateparams)
682 filealloc = callconduit(ui, b'file.allocate', allocateparams)
652 fphid = filealloc[b'filePHID']
683 fphid = filealloc[b'filePHID']
653
684
654 if filealloc[b'upload']:
685 if filealloc[b'upload']:
655 ui.write(_(b'uploading %s\n') % bytes(fctx))
686 ui.write(_(b'uploading %s\n') % bytes(fctx))
656 if not fphid:
687 if not fphid:
657 uploadparams = {
688 uploadparams = {
658 b'name': fname,
689 b'name': fname,
659 b'data_base64': base64.b64encode(fctx.data()),
690 b'data_base64': base64.b64encode(fctx.data()),
660 }
691 }
661 fphid = callconduit(ui, b'file.upload', uploadparams)
692 fphid = callconduit(ui, b'file.upload', uploadparams)
662 else:
693 else:
663 uploadchunks(fctx, fphid)
694 uploadchunks(fctx, fphid)
664 else:
695 else:
665 ui.debug(b'server already has %s\n' % bytes(fctx))
696 ui.debug(b'server already has %s\n' % bytes(fctx))
666
697
667 if not fphid:
698 if not fphid:
668 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
699 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
669
700
670 return fphid
701 return fphid
671
702
672
703
673 def addoldbinary(pchange, fctx, originalfname):
704 def addoldbinary(pchange, fctx, originalfname):
674 """add the metadata for the previous version of a binary file to the
705 """add the metadata for the previous version of a binary file to the
675 phabchange for the new version
706 phabchange for the new version
676 """
707 """
677 oldfctx = fctx.p1()[originalfname]
708 oldfctx = fctx.p1()[originalfname]
678 if fctx.cmp(oldfctx):
709 if fctx.cmp(oldfctx):
679 # Files differ, add the old one
710 # Files differ, add the old one
680 pchange.metadata[b'old:file:size'] = oldfctx.size()
711 pchange.metadata[b'old:file:size'] = oldfctx.size()
681 mimeguess, _enc = mimetypes.guess_type(
712 mimeguess, _enc = mimetypes.guess_type(
682 encoding.unifromlocal(oldfctx.path())
713 encoding.unifromlocal(oldfctx.path())
683 )
714 )
684 if mimeguess:
715 if mimeguess:
685 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
716 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
686 mimeguess
717 mimeguess
687 )
718 )
688 fphid = uploadfile(oldfctx)
719 fphid = uploadfile(oldfctx)
689 pchange.metadata[b'old:binary-phid'] = fphid
720 pchange.metadata[b'old:binary-phid'] = fphid
690 else:
721 else:
691 # If it's left as IMAGE/BINARY web UI might try to display it
722 # If it's left as IMAGE/BINARY web UI might try to display it
692 pchange.fileType = DiffFileType.TEXT
723 pchange.fileType = DiffFileType.TEXT
693 pchange.copynewmetadatatoold()
724 pchange.copynewmetadatatoold()
694
725
695
726
696 def makebinary(pchange, fctx):
727 def makebinary(pchange, fctx):
697 """populate the phabchange for a binary file"""
728 """populate the phabchange for a binary file"""
698 pchange.fileType = DiffFileType.BINARY
729 pchange.fileType = DiffFileType.BINARY
699 fphid = uploadfile(fctx)
730 fphid = uploadfile(fctx)
700 pchange.metadata[b'new:binary-phid'] = fphid
731 pchange.metadata[b'new:binary-phid'] = fphid
701 pchange.metadata[b'new:file:size'] = fctx.size()
732 pchange.metadata[b'new:file:size'] = fctx.size()
702 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
733 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
703 if mimeguess:
734 if mimeguess:
704 mimeguess = pycompat.bytestr(mimeguess)
735 mimeguess = pycompat.bytestr(mimeguess)
705 pchange.metadata[b'new:file:mime-type'] = mimeguess
736 pchange.metadata[b'new:file:mime-type'] = mimeguess
706 if mimeguess.startswith(b'image/'):
737 if mimeguess.startswith(b'image/'):
707 pchange.fileType = DiffFileType.IMAGE
738 pchange.fileType = DiffFileType.IMAGE
708
739
709
740
710 # Copied from mercurial/patch.py
741 # Copied from mercurial/patch.py
711 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
742 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
712
743
713
744
714 def notutf8(fctx):
745 def notutf8(fctx):
715 """detect non-UTF-8 text files since Phabricator requires them to be marked
746 """detect non-UTF-8 text files since Phabricator requires them to be marked
716 as binary
747 as binary
717 """
748 """
718 try:
749 try:
719 fctx.data().decode('utf-8')
750 fctx.data().decode('utf-8')
720 if fctx.parents():
751 if fctx.parents():
721 fctx.p1().data().decode('utf-8')
752 fctx.p1().data().decode('utf-8')
722 return False
753 return False
723 except UnicodeDecodeError:
754 except UnicodeDecodeError:
724 fctx.repo().ui.write(
755 fctx.repo().ui.write(
725 _(b'file %s detected as non-UTF-8, marked as binary\n')
756 _(b'file %s detected as non-UTF-8, marked as binary\n')
726 % fctx.path()
757 % fctx.path()
727 )
758 )
728 return True
759 return True
729
760
730
761
731 def addremoved(pdiff, ctx, removed):
762 def addremoved(pdiff, ctx, removed):
732 """add removed files to the phabdiff. Shouldn't include moves"""
763 """add removed files to the phabdiff. Shouldn't include moves"""
733 for fname in removed:
764 for fname in removed:
734 pchange = phabchange(
765 pchange = phabchange(
735 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
766 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
736 )
767 )
737 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
768 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
738 fctx = ctx.p1()[fname]
769 fctx = ctx.p1()[fname]
739 if not (fctx.isbinary() or notutf8(fctx)):
770 if not (fctx.isbinary() or notutf8(fctx)):
740 maketext(pchange, ctx, fname)
771 maketext(pchange, ctx, fname)
741
772
742 pdiff.addchange(pchange)
773 pdiff.addchange(pchange)
743
774
744
775
745 def addmodified(pdiff, ctx, modified):
776 def addmodified(pdiff, ctx, modified):
746 """add modified files to the phabdiff"""
777 """add modified files to the phabdiff"""
747 for fname in modified:
778 for fname in modified:
748 fctx = ctx[fname]
779 fctx = ctx[fname]
749 pchange = phabchange(currentPath=fname, oldPath=fname)
780 pchange = phabchange(currentPath=fname, oldPath=fname)
750 filemode = gitmode[ctx[fname].flags()]
781 filemode = gitmode[ctx[fname].flags()]
751 originalmode = gitmode[ctx.p1()[fname].flags()]
782 originalmode = gitmode[ctx.p1()[fname].flags()]
752 if filemode != originalmode:
783 if filemode != originalmode:
753 pchange.addoldmode(originalmode)
784 pchange.addoldmode(originalmode)
754 pchange.addnewmode(filemode)
785 pchange.addnewmode(filemode)
755
786
756 if fctx.isbinary() or notutf8(fctx):
787 if fctx.isbinary() or notutf8(fctx):
757 makebinary(pchange, fctx)
788 makebinary(pchange, fctx)
758 addoldbinary(pchange, fctx, fname)
789 addoldbinary(pchange, fctx, fname)
759 else:
790 else:
760 maketext(pchange, ctx, fname)
791 maketext(pchange, ctx, fname)
761
792
762 pdiff.addchange(pchange)
793 pdiff.addchange(pchange)
763
794
764
795
765 def addadded(pdiff, ctx, added, removed):
796 def addadded(pdiff, ctx, added, removed):
766 """add file adds to the phabdiff, both new files and copies/moves"""
797 """add file adds to the phabdiff, both new files and copies/moves"""
767 # Keep track of files that've been recorded as moved/copied, so if there are
798 # Keep track of files that've been recorded as moved/copied, so if there are
768 # additional copies we can mark them (moves get removed from removed)
799 # additional copies we can mark them (moves get removed from removed)
769 copiedchanges = {}
800 copiedchanges = {}
770 movedchanges = {}
801 movedchanges = {}
771 for fname in added:
802 for fname in added:
772 fctx = ctx[fname]
803 fctx = ctx[fname]
773 pchange = phabchange(currentPath=fname)
804 pchange = phabchange(currentPath=fname)
774
805
775 filemode = gitmode[ctx[fname].flags()]
806 filemode = gitmode[ctx[fname].flags()]
776 renamed = fctx.renamed()
807 renamed = fctx.renamed()
777
808
778 if renamed:
809 if renamed:
779 originalfname = renamed[0]
810 originalfname = renamed[0]
780 originalmode = gitmode[ctx.p1()[originalfname].flags()]
811 originalmode = gitmode[ctx.p1()[originalfname].flags()]
781 pchange.oldPath = originalfname
812 pchange.oldPath = originalfname
782
813
783 if originalfname in removed:
814 if originalfname in removed:
784 origpchange = phabchange(
815 origpchange = phabchange(
785 currentPath=originalfname,
816 currentPath=originalfname,
786 oldPath=originalfname,
817 oldPath=originalfname,
787 type=DiffChangeType.MOVE_AWAY,
818 type=DiffChangeType.MOVE_AWAY,
788 awayPaths=[fname],
819 awayPaths=[fname],
789 )
820 )
790 movedchanges[originalfname] = origpchange
821 movedchanges[originalfname] = origpchange
791 removed.remove(originalfname)
822 removed.remove(originalfname)
792 pchange.type = DiffChangeType.MOVE_HERE
823 pchange.type = DiffChangeType.MOVE_HERE
793 elif originalfname in movedchanges:
824 elif originalfname in movedchanges:
794 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
825 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
795 movedchanges[originalfname].awayPaths.append(fname)
826 movedchanges[originalfname].awayPaths.append(fname)
796 pchange.type = DiffChangeType.COPY_HERE
827 pchange.type = DiffChangeType.COPY_HERE
797 else: # pure copy
828 else: # pure copy
798 if originalfname not in copiedchanges:
829 if originalfname not in copiedchanges:
799 origpchange = phabchange(
830 origpchange = phabchange(
800 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
831 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
801 )
832 )
802 copiedchanges[originalfname] = origpchange
833 copiedchanges[originalfname] = origpchange
803 else:
834 else:
804 origpchange = copiedchanges[originalfname]
835 origpchange = copiedchanges[originalfname]
805 origpchange.awayPaths.append(fname)
836 origpchange.awayPaths.append(fname)
806 pchange.type = DiffChangeType.COPY_HERE
837 pchange.type = DiffChangeType.COPY_HERE
807
838
808 if filemode != originalmode:
839 if filemode != originalmode:
809 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
810 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
811 else: # Brand-new file
842 else: # Brand-new file
812 pchange.addnewmode(gitmode[fctx.flags()])
843 pchange.addnewmode(gitmode[fctx.flags()])
813 pchange.type = DiffChangeType.ADD
844 pchange.type = DiffChangeType.ADD
814
845
815 if fctx.isbinary() or notutf8(fctx):
846 if fctx.isbinary() or notutf8(fctx):
816 makebinary(pchange, fctx)
847 makebinary(pchange, fctx)
817 if renamed:
848 if renamed:
818 addoldbinary(pchange, fctx, originalfname)
849 addoldbinary(pchange, fctx, originalfname)
819 else:
850 else:
820 maketext(pchange, ctx, fname)
851 maketext(pchange, ctx, fname)
821
852
822 pdiff.addchange(pchange)
853 pdiff.addchange(pchange)
823
854
824 for _path, copiedchange in copiedchanges.items():
855 for _path, copiedchange in copiedchanges.items():
825 pdiff.addchange(copiedchange)
856 pdiff.addchange(copiedchange)
826 for _path, movedchange in movedchanges.items():
857 for _path, movedchange in movedchanges.items():
827 pdiff.addchange(movedchange)
858 pdiff.addchange(movedchange)
828
859
829
860
830 def creatediff(ctx):
861 def creatediff(ctx):
831 """create a Differential Diff"""
862 """create a Differential Diff"""
832 repo = ctx.repo()
863 repo = ctx.repo()
833 repophid = getrepophid(repo)
864 repophid = getrepophid(repo)
834 # Create a "Differential Diff" via "differential.creatediff" API
865 # Create a "Differential Diff" via "differential.creatediff" API
835 pdiff = phabdiff(
866 pdiff = phabdiff(
836 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
867 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
837 branch=b'%s' % ctx.branch(),
868 branch=b'%s' % ctx.branch(),
838 )
869 )
839 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
870 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
840 # addadded will remove moved files from removed, so addremoved won't get
871 # addadded will remove moved files from removed, so addremoved won't get
841 # them
872 # them
842 addadded(pdiff, ctx, added, removed)
873 addadded(pdiff, ctx, added, removed)
843 addmodified(pdiff, ctx, modified)
874 addmodified(pdiff, ctx, modified)
844 addremoved(pdiff, ctx, removed)
875 addremoved(pdiff, ctx, removed)
845 if repophid:
876 if repophid:
846 pdiff.repositoryPHID = repophid
877 pdiff.repositoryPHID = repophid
847 diff = callconduit(
878 diff = callconduit(
848 repo.ui,
879 repo.ui,
849 b'differential.creatediff',
880 b'differential.creatediff',
850 pycompat.byteskwargs(attr.asdict(pdiff)),
881 pycompat.byteskwargs(attr.asdict(pdiff)),
851 )
882 )
852 if not diff:
883 if not diff:
853 raise error.Abort(_(b'cannot create diff for %s') % ctx)
884 raise error.Abort(_(b'cannot create diff for %s') % ctx)
854 return diff
885 return diff
855
886
856
887
857 def writediffproperties(ctx, diff):
888 def writediffproperties(ctx, diff):
858 """write metadata to diff so patches could be applied losslessly"""
889 """write metadata to diff so patches could be applied losslessly"""
859 # creatediff returns with a diffid but query returns with an id
890 # creatediff returns with a diffid but query returns with an id
860 diffid = diff.get(b'diffid', diff.get(b'id'))
891 diffid = diff.get(b'diffid', diff.get(b'id'))
861 params = {
892 params = {
862 b'diff_id': diffid,
893 b'diff_id': diffid,
863 b'name': b'hg:meta',
894 b'name': b'hg:meta',
864 b'data': templatefilters.json(
895 b'data': templatefilters.json(
865 {
896 {
866 b'user': ctx.user(),
897 b'user': ctx.user(),
867 b'date': b'%d %d' % ctx.date(),
898 b'date': b'%d %d' % ctx.date(),
868 b'branch': ctx.branch(),
899 b'branch': ctx.branch(),
869 b'node': ctx.hex(),
900 b'node': ctx.hex(),
870 b'parent': ctx.p1().hex(),
901 b'parent': ctx.p1().hex(),
871 }
902 }
872 ),
903 ),
873 }
904 }
874 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
905 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
875
906
876 params = {
907 params = {
877 b'diff_id': diffid,
908 b'diff_id': diffid,
878 b'name': b'local:commits',
909 b'name': b'local:commits',
879 b'data': templatefilters.json(
910 b'data': templatefilters.json(
880 {
911 {
881 ctx.hex(): {
912 ctx.hex(): {
882 b'author': stringutil.person(ctx.user()),
913 b'author': stringutil.person(ctx.user()),
883 b'authorEmail': stringutil.email(ctx.user()),
914 b'authorEmail': stringutil.email(ctx.user()),
884 b'time': int(ctx.date()[0]),
915 b'time': int(ctx.date()[0]),
885 b'commit': ctx.hex(),
916 b'commit': ctx.hex(),
886 b'parents': [ctx.p1().hex()],
917 b'parents': [ctx.p1().hex()],
887 b'branch': ctx.branch(),
918 b'branch': ctx.branch(),
888 },
919 },
889 }
920 }
890 ),
921 ),
891 }
922 }
892 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
923 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
893
924
894
925
895 def createdifferentialrevision(
926 def createdifferentialrevision(
896 ctx,
927 ctx,
897 revid=None,
928 revid=None,
898 parentrevphid=None,
929 parentrevphid=None,
899 oldnode=None,
930 oldnode=None,
900 olddiff=None,
931 olddiff=None,
901 actions=None,
932 actions=None,
902 comment=None,
933 comment=None,
903 ):
934 ):
904 """create or update a Differential Revision
935 """create or update a Differential Revision
905
936
906 If revid is None, create a new Differential Revision, otherwise update
937 If revid is None, create a new Differential Revision, otherwise update
907 revid. If parentrevphid is not None, set it as a dependency.
938 revid. If parentrevphid is not None, set it as a dependency.
908
939
909 If oldnode is not None, check if the patch content (without commit message
940 If oldnode is not None, check if the patch content (without commit message
910 and metadata) has changed before creating another diff.
941 and metadata) has changed before creating another diff.
911
942
912 If actions is not None, they will be appended to the transaction.
943 If actions is not None, they will be appended to the transaction.
913 """
944 """
914 repo = ctx.repo()
945 repo = ctx.repo()
915 if oldnode:
946 if oldnode:
916 diffopts = mdiff.diffopts(git=True, context=32767)
947 diffopts = mdiff.diffopts(git=True, context=32767)
917 oldctx = repo.unfiltered()[oldnode]
948 oldctx = repo.unfiltered()[oldnode]
918 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
949 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
919 else:
950 else:
920 neednewdiff = True
951 neednewdiff = True
921
952
922 transactions = []
953 transactions = []
923 if neednewdiff:
954 if neednewdiff:
924 diff = creatediff(ctx)
955 diff = creatediff(ctx)
925 transactions.append({b'type': b'update', b'value': diff[b'phid']})
956 transactions.append({b'type': b'update', b'value': diff[b'phid']})
926 if comment:
957 if comment:
927 transactions.append({b'type': b'comment', b'value': comment})
958 transactions.append({b'type': b'comment', b'value': comment})
928 else:
959 else:
929 # Even if we don't need to upload a new diff because the patch content
960 # Even if we don't need to upload a new diff because the patch content
930 # does not change. We might still need to update its metadata so
961 # does not change. We might still need to update its metadata so
931 # pushers could know the correct node metadata.
962 # pushers could know the correct node metadata.
932 assert olddiff
963 assert olddiff
933 diff = olddiff
964 diff = olddiff
934 writediffproperties(ctx, diff)
965 writediffproperties(ctx, diff)
935
966
936 # Set the parent Revision every time, so commit re-ordering is picked-up
967 # Set the parent Revision every time, so commit re-ordering is picked-up
937 if parentrevphid:
968 if parentrevphid:
938 transactions.append(
969 transactions.append(
939 {b'type': b'parents.set', b'value': [parentrevphid]}
970 {b'type': b'parents.set', b'value': [parentrevphid]}
940 )
971 )
941
972
942 if actions:
973 if actions:
943 transactions += actions
974 transactions += actions
944
975
945 # Parse commit message and update related fields.
976 # Parse commit message and update related fields.
946 desc = ctx.description()
977 desc = ctx.description()
947 info = callconduit(
978 info = callconduit(
948 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
979 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
949 )
980 )
950 for k, v in info[b'fields'].items():
981 for k, v in info[b'fields'].items():
951 if k in [b'title', b'summary', b'testPlan']:
982 if k in [b'title', b'summary', b'testPlan']:
952 transactions.append({b'type': k, b'value': v})
983 transactions.append({b'type': k, b'value': v})
953
984
954 params = {b'transactions': transactions}
985 params = {b'transactions': transactions}
955 if revid is not None:
986 if revid is not None:
956 # Update an existing Differential Revision
987 # Update an existing Differential Revision
957 params[b'objectIdentifier'] = revid
988 params[b'objectIdentifier'] = revid
958
989
959 revision = callconduit(repo.ui, b'differential.revision.edit', params)
990 revision = callconduit(repo.ui, b'differential.revision.edit', params)
960 if not revision:
991 if not revision:
961 raise error.Abort(_(b'cannot create revision for %s') % ctx)
992 raise error.Abort(_(b'cannot create revision for %s') % ctx)
962
993
963 return revision, diff
994 return revision, diff
964
995
965
996
966 def userphids(repo, names):
997 def userphids(repo, names):
967 """convert user names to PHIDs"""
998 """convert user names to PHIDs"""
968 names = [name.lower() for name in names]
999 names = [name.lower() for name in names]
969 query = {b'constraints': {b'usernames': names}}
1000 query = {b'constraints': {b'usernames': names}}
970 result = callconduit(repo.ui, b'user.search', query)
1001 result = callconduit(repo.ui, b'user.search', query)
971 # username not found is not an error of the API. So check if we have missed
1002 # username not found is not an error of the API. So check if we have missed
972 # some names here.
1003 # some names here.
973 data = result[b'data']
1004 data = result[b'data']
974 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1005 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
975 unresolved = set(names) - resolved
1006 unresolved = set(names) - resolved
976 if unresolved:
1007 if unresolved:
977 raise error.Abort(
1008 raise error.Abort(
978 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1009 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
979 )
1010 )
980 return [entry[b'phid'] for entry in data]
1011 return [entry[b'phid'] for entry in data]
981
1012
982
1013
983 @vcrcommand(
1014 @vcrcommand(
984 b'phabsend',
1015 b'phabsend',
985 [
1016 [
986 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1017 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
987 (b'', b'amend', True, _(b'update commit messages')),
1018 (b'', b'amend', True, _(b'update commit messages')),
988 (b'', b'reviewer', [], _(b'specify reviewers')),
1019 (b'', b'reviewer', [], _(b'specify reviewers')),
989 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1020 (b'', b'blocker', [], _(b'specify blocking reviewers')),
990 (
1021 (
991 b'm',
1022 b'm',
992 b'comment',
1023 b'comment',
993 b'',
1024 b'',
994 _(b'add a comment to Revisions with new/updated Diffs'),
1025 _(b'add a comment to Revisions with new/updated Diffs'),
995 ),
1026 ),
996 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1027 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
997 ],
1028 ],
998 _(b'REV [OPTIONS]'),
1029 _(b'REV [OPTIONS]'),
999 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1030 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1000 )
1031 )
1001 def phabsend(ui, repo, *revs, **opts):
1032 def phabsend(ui, repo, *revs, **opts):
1002 """upload changesets to Phabricator
1033 """upload changesets to Phabricator
1003
1034
1004 If there are multiple revisions specified, they will be send as a stack
1035 If there are multiple revisions specified, they will be send as a stack
1005 with a linear dependencies relationship using the order specified by the
1036 with a linear dependencies relationship using the order specified by the
1006 revset.
1037 revset.
1007
1038
1008 For the first time uploading changesets, local tags will be created to
1039 For the first time uploading changesets, local tags will be created to
1009 maintain the association. After the first time, phabsend will check
1040 maintain the association. After the first time, phabsend will check
1010 obsstore and tags information so it can figure out whether to update an
1041 obsstore and tags information so it can figure out whether to update an
1011 existing Differential Revision, or create a new one.
1042 existing Differential Revision, or create a new one.
1012
1043
1013 If --amend is set, update commit messages so they have the
1044 If --amend is set, update commit messages so they have the
1014 ``Differential Revision`` URL, remove related tags. This is similar to what
1045 ``Differential Revision`` URL, remove related tags. This is similar to what
1015 arcanist will do, and is more desired in author-push workflows. Otherwise,
1046 arcanist will do, and is more desired in author-push workflows. Otherwise,
1016 use local tags to record the ``Differential Revision`` association.
1047 use local tags to record the ``Differential Revision`` association.
1017
1048
1018 The --confirm option lets you confirm changesets before sending them. You
1049 The --confirm option lets you confirm changesets before sending them. You
1019 can also add following to your configuration file to make it default
1050 can also add following to your configuration file to make it default
1020 behaviour::
1051 behaviour::
1021
1052
1022 [phabsend]
1053 [phabsend]
1023 confirm = true
1054 confirm = true
1024
1055
1025 phabsend will check obsstore and the above association to decide whether to
1056 phabsend will check obsstore and the above association to decide whether to
1026 update an existing Differential Revision, or create a new one.
1057 update an existing Differential Revision, or create a new one.
1027 """
1058 """
1028 opts = pycompat.byteskwargs(opts)
1059 opts = pycompat.byteskwargs(opts)
1029 revs = list(revs) + opts.get(b'rev', [])
1060 revs = list(revs) + opts.get(b'rev', [])
1030 revs = scmutil.revrange(repo, revs)
1061 revs = scmutil.revrange(repo, revs)
1031
1062
1032 if not revs:
1063 if not revs:
1033 raise error.Abort(_(b'phabsend requires at least one changeset'))
1064 raise error.Abort(_(b'phabsend requires at least one changeset'))
1034 if opts.get(b'amend'):
1065 if opts.get(b'amend'):
1035 cmdutil.checkunfinished(repo)
1066 cmdutil.checkunfinished(repo)
1036
1067
1037 # {newnode: (oldnode, olddiff, olddrev}
1068 # {newnode: (oldnode, olddiff, olddrev}
1038 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1069 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1039
1070
1040 confirm = ui.configbool(b'phabsend', b'confirm')
1071 confirm = ui.configbool(b'phabsend', b'confirm')
1041 confirm |= bool(opts.get(b'confirm'))
1072 confirm |= bool(opts.get(b'confirm'))
1042 if confirm:
1073 if confirm:
1043 confirmed = _confirmbeforesend(repo, revs, oldmap)
1074 confirmed = _confirmbeforesend(repo, revs, oldmap)
1044 if not confirmed:
1075 if not confirmed:
1045 raise error.Abort(_(b'phabsend cancelled'))
1076 raise error.Abort(_(b'phabsend cancelled'))
1046
1077
1047 actions = []
1078 actions = []
1048 reviewers = opts.get(b'reviewer', [])
1079 reviewers = opts.get(b'reviewer', [])
1049 blockers = opts.get(b'blocker', [])
1080 blockers = opts.get(b'blocker', [])
1050 phids = []
1081 phids = []
1051 if reviewers:
1082 if reviewers:
1052 phids.extend(userphids(repo, reviewers))
1083 phids.extend(userphids(repo, reviewers))
1053 if blockers:
1084 if blockers:
1054 phids.extend(
1085 phids.extend(
1055 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1086 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1056 )
1087 )
1057 if phids:
1088 if phids:
1058 actions.append({b'type': b'reviewers.add', b'value': phids})
1089 actions.append({b'type': b'reviewers.add', b'value': phids})
1059
1090
1060 drevids = [] # [int]
1091 drevids = [] # [int]
1061 diffmap = {} # {newnode: diff}
1092 diffmap = {} # {newnode: diff}
1062
1093
1063 # Send patches one by one so we know their Differential Revision PHIDs and
1094 # Send patches one by one so we know their Differential Revision PHIDs and
1064 # can provide dependency relationship
1095 # can provide dependency relationship
1065 lastrevphid = None
1096 lastrevphid = None
1066 for rev in revs:
1097 for rev in revs:
1067 ui.debug(b'sending rev %d\n' % rev)
1098 ui.debug(b'sending rev %d\n' % rev)
1068 ctx = repo[rev]
1099 ctx = repo[rev]
1069
1100
1070 # Get Differential Revision ID
1101 # Get Differential Revision ID
1071 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1102 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1072 if oldnode != ctx.node() or opts.get(b'amend'):
1103 if oldnode != ctx.node() or opts.get(b'amend'):
1073 # Create or update Differential Revision
1104 # Create or update Differential Revision
1074 revision, diff = createdifferentialrevision(
1105 revision, diff = createdifferentialrevision(
1075 ctx,
1106 ctx,
1076 revid,
1107 revid,
1077 lastrevphid,
1108 lastrevphid,
1078 oldnode,
1109 oldnode,
1079 olddiff,
1110 olddiff,
1080 actions,
1111 actions,
1081 opts.get(b'comment'),
1112 opts.get(b'comment'),
1082 )
1113 )
1083 diffmap[ctx.node()] = diff
1114 diffmap[ctx.node()] = diff
1084 newrevid = int(revision[b'object'][b'id'])
1115 newrevid = int(revision[b'object'][b'id'])
1085 newrevphid = revision[b'object'][b'phid']
1116 newrevphid = revision[b'object'][b'phid']
1086 if revid:
1117 if revid:
1087 action = b'updated'
1118 action = b'updated'
1088 else:
1119 else:
1089 action = b'created'
1120 action = b'created'
1090
1121
1091 # Create a local tag to note the association, if commit message
1122 # Create a local tag to note the association, if commit message
1092 # does not have it already
1123 # does not have it already
1093 m = _differentialrevisiondescre.search(ctx.description())
1124 m = _differentialrevisiondescre.search(ctx.description())
1094 if not m or int(m.group('id')) != newrevid:
1125 if not m or int(m.group('id')) != newrevid:
1095 tagname = b'D%d' % newrevid
1126 tagname = b'D%d' % newrevid
1096 tags.tag(
1127 tags.tag(
1097 repo,
1128 repo,
1098 tagname,
1129 tagname,
1099 ctx.node(),
1130 ctx.node(),
1100 message=None,
1131 message=None,
1101 user=None,
1132 user=None,
1102 date=None,
1133 date=None,
1103 local=True,
1134 local=True,
1104 )
1135 )
1105 else:
1136 else:
1106 # Nothing changed. But still set "newrevphid" so the next revision
1137 # Nothing changed. But still set "newrevphid" so the next revision
1107 # could depend on this one and "newrevid" for the summary line.
1138 # could depend on this one and "newrevid" for the summary line.
1108 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1139 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1109 newrevid = revid
1140 newrevid = revid
1110 action = b'skipped'
1141 action = b'skipped'
1111
1142
1112 actiondesc = ui.label(
1143 actiondesc = ui.label(
1113 {
1144 {
1114 b'created': _(b'created'),
1145 b'created': _(b'created'),
1115 b'skipped': _(b'skipped'),
1146 b'skipped': _(b'skipped'),
1116 b'updated': _(b'updated'),
1147 b'updated': _(b'updated'),
1117 }[action],
1148 }[action],
1118 b'phabricator.action.%s' % action,
1149 b'phabricator.action.%s' % action,
1119 )
1150 )
1120 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1151 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1121 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1152 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1122 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1153 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1123 ui.write(
1154 ui.write(
1124 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1155 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1125 )
1156 )
1126 drevids.append(newrevid)
1157 drevids.append(newrevid)
1127 lastrevphid = newrevphid
1158 lastrevphid = newrevphid
1128
1159
1129 # Update commit messages and remove tags
1160 # Update commit messages and remove tags
1130 if opts.get(b'amend'):
1161 if opts.get(b'amend'):
1131 unfi = repo.unfiltered()
1162 unfi = repo.unfiltered()
1132 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1163 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1133 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1164 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1134 wnode = unfi[b'.'].node()
1165 wnode = unfi[b'.'].node()
1135 mapping = {} # {oldnode: [newnode]}
1166 mapping = {} # {oldnode: [newnode]}
1136 for i, rev in enumerate(revs):
1167 for i, rev in enumerate(revs):
1137 old = unfi[rev]
1168 old = unfi[rev]
1138 drevid = drevids[i]
1169 drevid = drevids[i]
1139 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1170 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1140 newdesc = getdescfromdrev(drev)
1171 newdesc = getdescfromdrev(drev)
1141 # Make sure commit message contain "Differential Revision"
1172 # Make sure commit message contain "Differential Revision"
1142 if old.description() != newdesc:
1173 if old.description() != newdesc:
1143 if old.phase() == phases.public:
1174 if old.phase() == phases.public:
1144 ui.warn(
1175 ui.warn(
1145 _(b"warning: not updating public commit %s\n")
1176 _(b"warning: not updating public commit %s\n")
1146 % scmutil.formatchangeid(old)
1177 % scmutil.formatchangeid(old)
1147 )
1178 )
1148 continue
1179 continue
1149 parents = [
1180 parents = [
1150 mapping.get(old.p1().node(), (old.p1(),))[0],
1181 mapping.get(old.p1().node(), (old.p1(),))[0],
1151 mapping.get(old.p2().node(), (old.p2(),))[0],
1182 mapping.get(old.p2().node(), (old.p2(),))[0],
1152 ]
1183 ]
1153 new = context.metadataonlyctx(
1184 new = context.metadataonlyctx(
1154 repo,
1185 repo,
1155 old,
1186 old,
1156 parents=parents,
1187 parents=parents,
1157 text=newdesc,
1188 text=newdesc,
1158 user=old.user(),
1189 user=old.user(),
1159 date=old.date(),
1190 date=old.date(),
1160 extra=old.extra(),
1191 extra=old.extra(),
1161 )
1192 )
1162
1193
1163 newnode = new.commit()
1194 newnode = new.commit()
1164
1195
1165 mapping[old.node()] = [newnode]
1196 mapping[old.node()] = [newnode]
1166 # Update diff property
1197 # Update diff property
1167 # If it fails just warn and keep going, otherwise the DREV
1198 # If it fails just warn and keep going, otherwise the DREV
1168 # associations will be lost
1199 # associations will be lost
1169 try:
1200 try:
1170 writediffproperties(unfi[newnode], diffmap[old.node()])
1201 writediffproperties(unfi[newnode], diffmap[old.node()])
1171 except util.urlerr.urlerror:
1202 except util.urlerr.urlerror:
1172 ui.warnnoi18n(
1203 ui.warnnoi18n(
1173 b'Failed to update metadata for D%d\n' % drevid
1204 b'Failed to update metadata for D%d\n' % drevid
1174 )
1205 )
1175 # Remove local tags since it's no longer necessary
1206 # Remove local tags since it's no longer necessary
1176 tagname = b'D%d' % drevid
1207 tagname = b'D%d' % drevid
1177 if tagname in repo.tags():
1208 if tagname in repo.tags():
1178 tags.tag(
1209 tags.tag(
1179 repo,
1210 repo,
1180 tagname,
1211 tagname,
1181 nullid,
1212 nullid,
1182 message=None,
1213 message=None,
1183 user=None,
1214 user=None,
1184 date=None,
1215 date=None,
1185 local=True,
1216 local=True,
1186 )
1217 )
1187 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1218 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1188 if wnode in mapping:
1219 if wnode in mapping:
1189 unfi.setparents(mapping[wnode][0])
1220 unfi.setparents(mapping[wnode][0])
1190
1221
1191
1222
1192 # Map from "hg:meta" keys to header understood by "hg import". The order is
1223 # Map from "hg:meta" keys to header understood by "hg import". The order is
1193 # consistent with "hg export" output.
1224 # consistent with "hg export" output.
1194 _metanamemap = util.sortdict(
1225 _metanamemap = util.sortdict(
1195 [
1226 [
1196 (b'user', b'User'),
1227 (b'user', b'User'),
1197 (b'date', b'Date'),
1228 (b'date', b'Date'),
1198 (b'branch', b'Branch'),
1229 (b'branch', b'Branch'),
1199 (b'node', b'Node ID'),
1230 (b'node', b'Node ID'),
1200 (b'parent', b'Parent '),
1231 (b'parent', b'Parent '),
1201 ]
1232 ]
1202 )
1233 )
1203
1234
1204
1235
1205 def _confirmbeforesend(repo, revs, oldmap):
1236 def _confirmbeforesend(repo, revs, oldmap):
1206 url, token = readurltoken(repo.ui)
1237 url, token = readurltoken(repo.ui)
1207 ui = repo.ui
1238 ui = repo.ui
1208 for rev in revs:
1239 for rev in revs:
1209 ctx = repo[rev]
1240 ctx = repo[rev]
1210 desc = ctx.description().splitlines()[0]
1241 desc = ctx.description().splitlines()[0]
1211 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1242 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1212 if drevid:
1243 if drevid:
1213 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1244 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1214 else:
1245 else:
1215 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1246 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1216
1247
1217 ui.write(
1248 ui.write(
1218 _(b'%s - %s: %s\n')
1249 _(b'%s - %s: %s\n')
1219 % (
1250 % (
1220 drevdesc,
1251 drevdesc,
1221 ui.label(bytes(ctx), b'phabricator.node'),
1252 ui.label(bytes(ctx), b'phabricator.node'),
1222 ui.label(desc, b'phabricator.desc'),
1253 ui.label(desc, b'phabricator.desc'),
1223 )
1254 )
1224 )
1255 )
1225
1256
1226 if ui.promptchoice(
1257 if ui.promptchoice(
1227 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1258 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1228 ):
1259 ):
1229 return False
1260 return False
1230
1261
1231 return True
1262 return True
1232
1263
1233
1264
1234 _knownstatusnames = {
1265 _knownstatusnames = {
1235 b'accepted',
1266 b'accepted',
1236 b'needsreview',
1267 b'needsreview',
1237 b'needsrevision',
1268 b'needsrevision',
1238 b'closed',
1269 b'closed',
1239 b'abandoned',
1270 b'abandoned',
1240 }
1271 }
1241
1272
1242
1273
1243 def _getstatusname(drev):
1274 def _getstatusname(drev):
1244 """get normalized status name from a Differential Revision"""
1275 """get normalized status name from a Differential Revision"""
1245 return drev[b'statusName'].replace(b' ', b'').lower()
1276 return drev[b'statusName'].replace(b' ', b'').lower()
1246
1277
1247
1278
1248 # Small language to specify differential revisions. Support symbols: (), :X,
1279 # Small language to specify differential revisions. Support symbols: (), :X,
1249 # +, and -.
1280 # +, and -.
1250
1281
1251 _elements = {
1282 _elements = {
1252 # token-type: binding-strength, primary, prefix, infix, suffix
1283 # token-type: binding-strength, primary, prefix, infix, suffix
1253 b'(': (12, None, (b'group', 1, b')'), None, None),
1284 b'(': (12, None, (b'group', 1, b')'), None, None),
1254 b':': (8, None, (b'ancestors', 8), None, None),
1285 b':': (8, None, (b'ancestors', 8), None, None),
1255 b'&': (5, None, None, (b'and_', 5), None),
1286 b'&': (5, None, None, (b'and_', 5), None),
1256 b'+': (4, None, None, (b'add', 4), None),
1287 b'+': (4, None, None, (b'add', 4), None),
1257 b'-': (4, None, None, (b'sub', 4), None),
1288 b'-': (4, None, None, (b'sub', 4), None),
1258 b')': (0, None, None, None, None),
1289 b')': (0, None, None, None, None),
1259 b'symbol': (0, b'symbol', None, None, None),
1290 b'symbol': (0, b'symbol', None, None, None),
1260 b'end': (0, None, None, None, None),
1291 b'end': (0, None, None, None, None),
1261 }
1292 }
1262
1293
1263
1294
1264 def _tokenize(text):
1295 def _tokenize(text):
1265 view = memoryview(text) # zero-copy slice
1296 view = memoryview(text) # zero-copy slice
1266 special = b'():+-& '
1297 special = b'():+-& '
1267 pos = 0
1298 pos = 0
1268 length = len(text)
1299 length = len(text)
1269 while pos < length:
1300 while pos < length:
1270 symbol = b''.join(
1301 symbol = b''.join(
1271 itertools.takewhile(
1302 itertools.takewhile(
1272 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1303 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1273 )
1304 )
1274 )
1305 )
1275 if symbol:
1306 if symbol:
1276 yield (b'symbol', symbol, pos)
1307 yield (b'symbol', symbol, pos)
1277 pos += len(symbol)
1308 pos += len(symbol)
1278 else: # special char, ignore space
1309 else: # special char, ignore space
1279 if text[pos : pos + 1] != b' ':
1310 if text[pos : pos + 1] != b' ':
1280 yield (text[pos : pos + 1], None, pos)
1311 yield (text[pos : pos + 1], None, pos)
1281 pos += 1
1312 pos += 1
1282 yield (b'end', None, pos)
1313 yield (b'end', None, pos)
1283
1314
1284
1315
1285 def _parse(text):
1316 def _parse(text):
1286 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1317 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1287 if pos != len(text):
1318 if pos != len(text):
1288 raise error.ParseError(b'invalid token', pos)
1319 raise error.ParseError(b'invalid token', pos)
1289 return tree
1320 return tree
1290
1321
1291
1322
1292 def _parsedrev(symbol):
1323 def _parsedrev(symbol):
1293 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1324 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1294 if symbol.startswith(b'D') and symbol[1:].isdigit():
1325 if symbol.startswith(b'D') and symbol[1:].isdigit():
1295 return int(symbol[1:])
1326 return int(symbol[1:])
1296 if symbol.isdigit():
1327 if symbol.isdigit():
1297 return int(symbol)
1328 return int(symbol)
1298
1329
1299
1330
1300 def _prefetchdrevs(tree):
1331 def _prefetchdrevs(tree):
1301 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1332 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1302 drevs = set()
1333 drevs = set()
1303 ancestordrevs = set()
1334 ancestordrevs = set()
1304 op = tree[0]
1335 op = tree[0]
1305 if op == b'symbol':
1336 if op == b'symbol':
1306 r = _parsedrev(tree[1])
1337 r = _parsedrev(tree[1])
1307 if r:
1338 if r:
1308 drevs.add(r)
1339 drevs.add(r)
1309 elif op == b'ancestors':
1340 elif op == b'ancestors':
1310 r, a = _prefetchdrevs(tree[1])
1341 r, a = _prefetchdrevs(tree[1])
1311 drevs.update(r)
1342 drevs.update(r)
1312 ancestordrevs.update(r)
1343 ancestordrevs.update(r)
1313 ancestordrevs.update(a)
1344 ancestordrevs.update(a)
1314 else:
1345 else:
1315 for t in tree[1:]:
1346 for t in tree[1:]:
1316 r, a = _prefetchdrevs(t)
1347 r, a = _prefetchdrevs(t)
1317 drevs.update(r)
1348 drevs.update(r)
1318 ancestordrevs.update(a)
1349 ancestordrevs.update(a)
1319 return drevs, ancestordrevs
1350 return drevs, ancestordrevs
1320
1351
1321
1352
1322 def querydrev(repo, spec):
1353 def querydrev(repo, spec):
1323 """return a list of "Differential Revision" dicts
1354 """return a list of "Differential Revision" dicts
1324
1355
1325 spec is a string using a simple query language, see docstring in phabread
1356 spec is a string using a simple query language, see docstring in phabread
1326 for details.
1357 for details.
1327
1358
1328 A "Differential Revision dict" looks like:
1359 A "Differential Revision dict" looks like:
1329
1360
1330 {
1361 {
1331 "id": "2",
1362 "id": "2",
1332 "phid": "PHID-DREV-672qvysjcczopag46qty",
1363 "phid": "PHID-DREV-672qvysjcczopag46qty",
1333 "title": "example",
1364 "title": "example",
1334 "uri": "https://phab.example.com/D2",
1365 "uri": "https://phab.example.com/D2",
1335 "dateCreated": "1499181406",
1366 "dateCreated": "1499181406",
1336 "dateModified": "1499182103",
1367 "dateModified": "1499182103",
1337 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1368 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1338 "status": "0",
1369 "status": "0",
1339 "statusName": "Needs Review",
1370 "statusName": "Needs Review",
1340 "properties": [],
1371 "properties": [],
1341 "branch": null,
1372 "branch": null,
1342 "summary": "",
1373 "summary": "",
1343 "testPlan": "",
1374 "testPlan": "",
1344 "lineCount": "2",
1375 "lineCount": "2",
1345 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1376 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1346 "diffs": [
1377 "diffs": [
1347 "3",
1378 "3",
1348 "4",
1379 "4",
1349 ],
1380 ],
1350 "commits": [],
1381 "commits": [],
1351 "reviewers": [],
1382 "reviewers": [],
1352 "ccs": [],
1383 "ccs": [],
1353 "hashes": [],
1384 "hashes": [],
1354 "auxiliary": {
1385 "auxiliary": {
1355 "phabricator:projects": [],
1386 "phabricator:projects": [],
1356 "phabricator:depends-on": [
1387 "phabricator:depends-on": [
1357 "PHID-DREV-gbapp366kutjebt7agcd"
1388 "PHID-DREV-gbapp366kutjebt7agcd"
1358 ]
1389 ]
1359 },
1390 },
1360 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1391 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1361 "sourcePath": null
1392 "sourcePath": null
1362 }
1393 }
1363 """
1394 """
1364
1395
1365 def fetch(params):
1396 def fetch(params):
1366 """params -> single drev or None"""
1397 """params -> single drev or None"""
1367 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1398 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1368 if key in prefetched:
1399 if key in prefetched:
1369 return prefetched[key]
1400 return prefetched[key]
1370 drevs = callconduit(repo.ui, b'differential.query', params)
1401 drevs = callconduit(repo.ui, b'differential.query', params)
1371 # Fill prefetched with the result
1402 # Fill prefetched with the result
1372 for drev in drevs:
1403 for drev in drevs:
1373 prefetched[drev[b'phid']] = drev
1404 prefetched[drev[b'phid']] = drev
1374 prefetched[int(drev[b'id'])] = drev
1405 prefetched[int(drev[b'id'])] = drev
1375 if key not in prefetched:
1406 if key not in prefetched:
1376 raise error.Abort(
1407 raise error.Abort(
1377 _(b'cannot get Differential Revision %r') % params
1408 _(b'cannot get Differential Revision %r') % params
1378 )
1409 )
1379 return prefetched[key]
1410 return prefetched[key]
1380
1411
1381 def getstack(topdrevids):
1412 def getstack(topdrevids):
1382 """given a top, get a stack from the bottom, [id] -> [id]"""
1413 """given a top, get a stack from the bottom, [id] -> [id]"""
1383 visited = set()
1414 visited = set()
1384 result = []
1415 result = []
1385 queue = [{b'ids': [i]} for i in topdrevids]
1416 queue = [{b'ids': [i]} for i in topdrevids]
1386 while queue:
1417 while queue:
1387 params = queue.pop()
1418 params = queue.pop()
1388 drev = fetch(params)
1419 drev = fetch(params)
1389 if drev[b'id'] in visited:
1420 if drev[b'id'] in visited:
1390 continue
1421 continue
1391 visited.add(drev[b'id'])
1422 visited.add(drev[b'id'])
1392 result.append(int(drev[b'id']))
1423 result.append(int(drev[b'id']))
1393 auxiliary = drev.get(b'auxiliary', {})
1424 auxiliary = drev.get(b'auxiliary', {})
1394 depends = auxiliary.get(b'phabricator:depends-on', [])
1425 depends = auxiliary.get(b'phabricator:depends-on', [])
1395 for phid in depends:
1426 for phid in depends:
1396 queue.append({b'phids': [phid]})
1427 queue.append({b'phids': [phid]})
1397 result.reverse()
1428 result.reverse()
1398 return smartset.baseset(result)
1429 return smartset.baseset(result)
1399
1430
1400 # Initialize prefetch cache
1431 # Initialize prefetch cache
1401 prefetched = {} # {id or phid: drev}
1432 prefetched = {} # {id or phid: drev}
1402
1433
1403 tree = _parse(spec)
1434 tree = _parse(spec)
1404 drevs, ancestordrevs = _prefetchdrevs(tree)
1435 drevs, ancestordrevs = _prefetchdrevs(tree)
1405
1436
1406 # developer config: phabricator.batchsize
1437 # developer config: phabricator.batchsize
1407 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1438 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1408
1439
1409 # Prefetch Differential Revisions in batch
1440 # Prefetch Differential Revisions in batch
1410 tofetch = set(drevs)
1441 tofetch = set(drevs)
1411 for r in ancestordrevs:
1442 for r in ancestordrevs:
1412 tofetch.update(range(max(1, r - batchsize), r + 1))
1443 tofetch.update(range(max(1, r - batchsize), r + 1))
1413 if drevs:
1444 if drevs:
1414 fetch({b'ids': list(tofetch)})
1445 fetch({b'ids': list(tofetch)})
1415 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1446 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1416
1447
1417 # Walk through the tree, return smartsets
1448 # Walk through the tree, return smartsets
1418 def walk(tree):
1449 def walk(tree):
1419 op = tree[0]
1450 op = tree[0]
1420 if op == b'symbol':
1451 if op == b'symbol':
1421 drev = _parsedrev(tree[1])
1452 drev = _parsedrev(tree[1])
1422 if drev:
1453 if drev:
1423 return smartset.baseset([drev])
1454 return smartset.baseset([drev])
1424 elif tree[1] in _knownstatusnames:
1455 elif tree[1] in _knownstatusnames:
1425 drevs = [
1456 drevs = [
1426 r
1457 r
1427 for r in validids
1458 for r in validids
1428 if _getstatusname(prefetched[r]) == tree[1]
1459 if _getstatusname(prefetched[r]) == tree[1]
1429 ]
1460 ]
1430 return smartset.baseset(drevs)
1461 return smartset.baseset(drevs)
1431 else:
1462 else:
1432 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1463 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1433 elif op in {b'and_', b'add', b'sub'}:
1464 elif op in {b'and_', b'add', b'sub'}:
1434 assert len(tree) == 3
1465 assert len(tree) == 3
1435 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1466 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1436 elif op == b'group':
1467 elif op == b'group':
1437 return walk(tree[1])
1468 return walk(tree[1])
1438 elif op == b'ancestors':
1469 elif op == b'ancestors':
1439 return getstack(walk(tree[1]))
1470 return getstack(walk(tree[1]))
1440 else:
1471 else:
1441 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1472 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1442
1473
1443 return [prefetched[r] for r in walk(tree)]
1474 return [prefetched[r] for r in walk(tree)]
1444
1475
1445
1476
1446 def getdescfromdrev(drev):
1477 def getdescfromdrev(drev):
1447 """get description (commit message) from "Differential Revision"
1478 """get description (commit message) from "Differential Revision"
1448
1479
1449 This is similar to differential.getcommitmessage API. But we only care
1480 This is similar to differential.getcommitmessage API. But we only care
1450 about limited fields: title, summary, test plan, and URL.
1481 about limited fields: title, summary, test plan, and URL.
1451 """
1482 """
1452 title = drev[b'title']
1483 title = drev[b'title']
1453 summary = drev[b'summary'].rstrip()
1484 summary = drev[b'summary'].rstrip()
1454 testplan = drev[b'testPlan'].rstrip()
1485 testplan = drev[b'testPlan'].rstrip()
1455 if testplan:
1486 if testplan:
1456 testplan = b'Test Plan:\n%s' % testplan
1487 testplan = b'Test Plan:\n%s' % testplan
1457 uri = b'Differential Revision: %s' % drev[b'uri']
1488 uri = b'Differential Revision: %s' % drev[b'uri']
1458 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1489 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1459
1490
1460
1491
1461 def getdiffmeta(diff):
1492 def getdiffmeta(diff):
1462 """get commit metadata (date, node, user, p1) from a diff object
1493 """get commit metadata (date, node, user, p1) from a diff object
1463
1494
1464 The metadata could be "hg:meta", sent by phabsend, like:
1495 The metadata could be "hg:meta", sent by phabsend, like:
1465
1496
1466 "properties": {
1497 "properties": {
1467 "hg:meta": {
1498 "hg:meta": {
1468 "date": "1499571514 25200",
1499 "date": "1499571514 25200",
1469 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1500 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1470 "user": "Foo Bar <foo@example.com>",
1501 "user": "Foo Bar <foo@example.com>",
1471 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1502 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1472 }
1503 }
1473 }
1504 }
1474
1505
1475 Or converted from "local:commits", sent by "arc", like:
1506 Or converted from "local:commits", sent by "arc", like:
1476
1507
1477 "properties": {
1508 "properties": {
1478 "local:commits": {
1509 "local:commits": {
1479 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1510 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1480 "author": "Foo Bar",
1511 "author": "Foo Bar",
1481 "time": 1499546314,
1512 "time": 1499546314,
1482 "branch": "default",
1513 "branch": "default",
1483 "tag": "",
1514 "tag": "",
1484 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1515 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1485 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1516 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1486 "local": "1000",
1517 "local": "1000",
1487 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1518 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1488 "summary": "...",
1519 "summary": "...",
1489 "message": "...",
1520 "message": "...",
1490 "authorEmail": "foo@example.com"
1521 "authorEmail": "foo@example.com"
1491 }
1522 }
1492 }
1523 }
1493 }
1524 }
1494
1525
1495 Note: metadata extracted from "local:commits" will lose time zone
1526 Note: metadata extracted from "local:commits" will lose time zone
1496 information.
1527 information.
1497 """
1528 """
1498 props = diff.get(b'properties') or {}
1529 props = diff.get(b'properties') or {}
1499 meta = props.get(b'hg:meta')
1530 meta = props.get(b'hg:meta')
1500 if not meta:
1531 if not meta:
1501 if props.get(b'local:commits'):
1532 if props.get(b'local:commits'):
1502 commit = sorted(props[b'local:commits'].values())[0]
1533 commit = sorted(props[b'local:commits'].values())[0]
1503 meta = {}
1534 meta = {}
1504 if b'author' in commit and b'authorEmail' in commit:
1535 if b'author' in commit and b'authorEmail' in commit:
1505 meta[b'user'] = b'%s <%s>' % (
1536 meta[b'user'] = b'%s <%s>' % (
1506 commit[b'author'],
1537 commit[b'author'],
1507 commit[b'authorEmail'],
1538 commit[b'authorEmail'],
1508 )
1539 )
1509 if b'time' in commit:
1540 if b'time' in commit:
1510 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1541 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1511 if b'branch' in commit:
1542 if b'branch' in commit:
1512 meta[b'branch'] = commit[b'branch']
1543 meta[b'branch'] = commit[b'branch']
1513 node = commit.get(b'commit', commit.get(b'rev'))
1544 node = commit.get(b'commit', commit.get(b'rev'))
1514 if node:
1545 if node:
1515 meta[b'node'] = node
1546 meta[b'node'] = node
1516 if len(commit.get(b'parents', ())) >= 1:
1547 if len(commit.get(b'parents', ())) >= 1:
1517 meta[b'parent'] = commit[b'parents'][0]
1548 meta[b'parent'] = commit[b'parents'][0]
1518 else:
1549 else:
1519 meta = {}
1550 meta = {}
1520 if b'date' not in meta and b'dateCreated' in diff:
1551 if b'date' not in meta and b'dateCreated' in diff:
1521 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1552 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1522 if b'branch' not in meta and diff.get(b'branch'):
1553 if b'branch' not in meta and diff.get(b'branch'):
1523 meta[b'branch'] = diff[b'branch']
1554 meta[b'branch'] = diff[b'branch']
1524 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1555 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1525 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1556 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1526 return meta
1557 return meta
1527
1558
1528
1559
1529 def readpatch(repo, drevs, write):
1560 def readpatch(repo, drevs, write):
1530 """generate plain-text patch readable by 'hg import'
1561 """generate plain-text patch readable by 'hg import'
1531
1562
1532 write is usually ui.write. drevs is what "querydrev" returns, results of
1563 write is usually ui.write. drevs is what "querydrev" returns, results of
1533 "differential.query".
1564 "differential.query".
1534 """
1565 """
1535 # Prefetch hg:meta property for all diffs
1566 # Prefetch hg:meta property for all diffs
1536 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1567 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1537 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1568 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1538
1569
1539 # Generate patch for each drev
1570 # Generate patch for each drev
1540 for drev in drevs:
1571 for drev in drevs:
1541 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1572 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1542
1573
1543 diffid = max(int(v) for v in drev[b'diffs'])
1574 diffid = max(int(v) for v in drev[b'diffs'])
1544 body = callconduit(
1575 body = callconduit(
1545 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1576 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1546 )
1577 )
1547 desc = getdescfromdrev(drev)
1578 desc = getdescfromdrev(drev)
1548 header = b'# HG changeset patch\n'
1579 header = b'# HG changeset patch\n'
1549
1580
1550 # Try to preserve metadata from hg:meta property. Write hg patch
1581 # Try to preserve metadata from hg:meta property. Write hg patch
1551 # headers that can be read by the "import" command. See patchheadermap
1582 # headers that can be read by the "import" command. See patchheadermap
1552 # and extract in mercurial/patch.py for supported headers.
1583 # and extract in mercurial/patch.py for supported headers.
1553 meta = getdiffmeta(diffs[b'%d' % diffid])
1584 meta = getdiffmeta(diffs[b'%d' % diffid])
1554 for k in _metanamemap.keys():
1585 for k in _metanamemap.keys():
1555 if k in meta:
1586 if k in meta:
1556 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1587 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1557
1588
1558 content = b'%s%s\n%s' % (header, desc, body)
1589 content = b'%s%s\n%s' % (header, desc, body)
1559 write(content)
1590 write(content)
1560
1591
1561
1592
1562 @vcrcommand(
1593 @vcrcommand(
1563 b'phabread',
1594 b'phabread',
1564 [(b'', b'stack', False, _(b'read dependencies'))],
1595 [(b'', b'stack', False, _(b'read dependencies'))],
1565 _(b'DREVSPEC [OPTIONS]'),
1596 _(b'DREVSPEC [OPTIONS]'),
1566 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1597 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1567 )
1598 )
1568 def phabread(ui, repo, spec, **opts):
1599 def phabread(ui, repo, spec, **opts):
1569 """print patches from Phabricator suitable for importing
1600 """print patches from Phabricator suitable for importing
1570
1601
1571 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1602 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1572 the number ``123``. It could also have common operators like ``+``, ``-``,
1603 the number ``123``. It could also have common operators like ``+``, ``-``,
1573 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1604 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1574 select a stack.
1605 select a stack.
1575
1606
1576 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1607 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1577 could be used to filter patches by status. For performance reason, they
1608 could be used to filter patches by status. For performance reason, they
1578 only represent a subset of non-status selections and cannot be used alone.
1609 only represent a subset of non-status selections and cannot be used alone.
1579
1610
1580 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1611 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1581 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1612 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1582 stack up to D9.
1613 stack up to D9.
1583
1614
1584 If --stack is given, follow dependencies information and read all patches.
1615 If --stack is given, follow dependencies information and read all patches.
1585 It is equivalent to the ``:`` operator.
1616 It is equivalent to the ``:`` operator.
1586 """
1617 """
1587 opts = pycompat.byteskwargs(opts)
1618 opts = pycompat.byteskwargs(opts)
1588 if opts.get(b'stack'):
1619 if opts.get(b'stack'):
1589 spec = b':(%s)' % spec
1620 spec = b':(%s)' % spec
1590 drevs = querydrev(repo, spec)
1621 drevs = querydrev(repo, spec)
1591 readpatch(repo, drevs, ui.write)
1622 readpatch(repo, drevs, ui.write)
1592
1623
1593
1624
1594 @vcrcommand(
1625 @vcrcommand(
1595 b'phabupdate',
1626 b'phabupdate',
1596 [
1627 [
1597 (b'', b'accept', False, _(b'accept revisions')),
1628 (b'', b'accept', False, _(b'accept revisions')),
1598 (b'', b'reject', False, _(b'reject revisions')),
1629 (b'', b'reject', False, _(b'reject revisions')),
1599 (b'', b'abandon', False, _(b'abandon revisions')),
1630 (b'', b'abandon', False, _(b'abandon revisions')),
1600 (b'', b'reclaim', False, _(b'reclaim revisions')),
1631 (b'', b'reclaim', False, _(b'reclaim revisions')),
1601 (b'm', b'comment', b'', _(b'comment on the last revision')),
1632 (b'm', b'comment', b'', _(b'comment on the last revision')),
1602 ],
1633 ],
1603 _(b'DREVSPEC [OPTIONS]'),
1634 _(b'DREVSPEC [OPTIONS]'),
1604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1635 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1605 )
1636 )
1606 def phabupdate(ui, repo, spec, **opts):
1637 def phabupdate(ui, repo, spec, **opts):
1607 """update Differential Revision in batch
1638 """update Differential Revision in batch
1608
1639
1609 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1640 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1610 """
1641 """
1611 opts = pycompat.byteskwargs(opts)
1642 opts = pycompat.byteskwargs(opts)
1612 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1643 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1613 if len(flags) > 1:
1644 if len(flags) > 1:
1614 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1645 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1615
1646
1616 actions = []
1647 actions = []
1617 for f in flags:
1648 for f in flags:
1618 actions.append({b'type': f, b'value': True})
1649 actions.append({b'type': f, b'value': True})
1619
1650
1620 drevs = querydrev(repo, spec)
1651 drevs = querydrev(repo, spec)
1621 for i, drev in enumerate(drevs):
1652 for i, drev in enumerate(drevs):
1622 if i + 1 == len(drevs) and opts.get(b'comment'):
1653 if i + 1 == len(drevs) and opts.get(b'comment'):
1623 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1654 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1624 if actions:
1655 if actions:
1625 params = {
1656 params = {
1626 b'objectIdentifier': drev[b'phid'],
1657 b'objectIdentifier': drev[b'phid'],
1627 b'transactions': actions,
1658 b'transactions': actions,
1628 }
1659 }
1629 callconduit(ui, b'differential.revision.edit', params)
1660 callconduit(ui, b'differential.revision.edit', params)
1630
1661
1631
1662
1632 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1663 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1633 def template_review(context, mapping):
1664 def template_review(context, mapping):
1634 """:phabreview: Object describing the review for this changeset.
1665 """:phabreview: Object describing the review for this changeset.
1635 Has attributes `url` and `id`.
1666 Has attributes `url` and `id`.
1636 """
1667 """
1637 ctx = context.resource(mapping, b'ctx')
1668 ctx = context.resource(mapping, b'ctx')
1638 m = _differentialrevisiondescre.search(ctx.description())
1669 m = _differentialrevisiondescre.search(ctx.description())
1639 if m:
1670 if m:
1640 return templateutil.hybriddict(
1671 return templateutil.hybriddict(
1641 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1672 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1642 )
1673 )
1643 else:
1674 else:
1644 tags = ctx.repo().nodetags(ctx.node())
1675 tags = ctx.repo().nodetags(ctx.node())
1645 for t in tags:
1676 for t in tags:
1646 if _differentialrevisiontagre.match(t):
1677 if _differentialrevisiontagre.match(t):
1647 url = ctx.repo().ui.config(b'phabricator', b'url')
1678 url = ctx.repo().ui.config(b'phabricator', b'url')
1648 if not url.endswith(b'/'):
1679 if not url.endswith(b'/'):
1649 url += b'/'
1680 url += b'/'
1650 url += t
1681 url += t
1651
1682
1652 return templateutil.hybriddict({b'url': url, b'id': t,})
1683 return templateutil.hybriddict({b'url': url, b'id': t,})
1653 return None
1684 return None
1685
1686
1687 @show.showview(b'phabstatus', csettopic=b'work')
1688 def phabstatusshowview(ui, repo, displayer):
1689 """Phabricator differiential status"""
1690 revs = repo.revs('sort(_underway(), topo)')
1691 drevmap = getdrevmap(repo, revs)
1692 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1693 for rev, drevid in pycompat.iteritems(drevmap):
1694 if drevid is not None:
1695 drevids.add(drevid)
1696 revsbydrevid.setdefault(drevid, set([])).add(rev)
1697 else:
1698 unknownrevs.append(rev)
1699
1700 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1701 drevsbyrev = {}
1702 for drev in drevs:
1703 for rev in revsbydrevid[int(drev[b'id'])]:
1704 drevsbyrev[rev] = drev
1705
1706 def phabstatus(ctx):
1707 drev = drevsbyrev[ctx.rev()]
1708 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1709
1710 revs -= smartset.baseset(unknownrevs)
1711 revdag = graphmod.dagwalker(repo, revs)
1712
1713 ui.setconfig(b'experimental', b'graphshorten', True)
1714 displayer._exthook = phabstatus
1715 nodelen = show.longestshortest(repo, revs)
1716 logcmdutil.displaygraph(
1717 ui,
1718 repo,
1719 revdag,
1720 displayer,
1721 graphmod.asciiedges,
1722 props={b'nodelen': nodelen},
1723 )
General Comments 0
You need to be logged in to leave comments. Login now