##// END OF EJS Templates
phabricator: add addadded function...
Ian Moody -
r43552:d5d1edf6 default
parent child Browse files
Show More
@@ -1,1533 +1,1598 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = r1.body.split(b'&')
147 r1params = r1.body.split(b'&')
148 r2params = r2.body.split(b'&')
148 r2params = r2.body.split(b'&')
149 return set(r1params) == set(r2params)
149 return set(r1params) == set(r2params)
150
150
151 def sanitiserequest(request):
151 def sanitiserequest(request):
152 request.body = re.sub(
152 request.body = re.sub(
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 )
154 )
155 return request
155 return request
156
156
157 def sanitiseresponse(response):
157 def sanitiseresponse(response):
158 if r'set-cookie' in response[r'headers']:
158 if r'set-cookie' in response[r'headers']:
159 del response[r'headers'][r'set-cookie']
159 del response[r'headers'][r'set-cookie']
160 return response
160 return response
161
161
162 def decorate(fn):
162 def decorate(fn):
163 def inner(*args, **kwargs):
163 def inner(*args, **kwargs):
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 if cassette:
165 if cassette:
166 import hgdemandimport
166 import hgdemandimport
167
167
168 with hgdemandimport.deactivated():
168 with hgdemandimport.deactivated():
169 import vcr as vcrmod
169 import vcr as vcrmod
170 import vcr.stubs as stubs
170 import vcr.stubs as stubs
171
171
172 vcr = vcrmod.VCR(
172 vcr = vcrmod.VCR(
173 serializer=r'json',
173 serializer=r'json',
174 before_record_request=sanitiserequest,
174 before_record_request=sanitiserequest,
175 before_record_response=sanitiseresponse,
175 before_record_response=sanitiseresponse,
176 custom_patches=[
176 custom_patches=[
177 (
177 (
178 urlmod,
178 urlmod,
179 r'httpconnection',
179 r'httpconnection',
180 stubs.VCRHTTPConnection,
180 stubs.VCRHTTPConnection,
181 ),
181 ),
182 (
182 (
183 urlmod,
183 urlmod,
184 r'httpsconnection',
184 r'httpsconnection',
185 stubs.VCRHTTPSConnection,
185 stubs.VCRHTTPSConnection,
186 ),
186 ),
187 ],
187 ],
188 )
188 )
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
193
193
194 inner.__name__ = fn.__name__
194 inner.__name__ = fn.__name__
195 inner.__doc__ = fn.__doc__
195 inner.__doc__ = fn.__doc__
196 return command(
196 return command(
197 name,
197 name,
198 fullflags,
198 fullflags,
199 spec,
199 spec,
200 helpcategory=helpcategory,
200 helpcategory=helpcategory,
201 optionalrepo=optionalrepo,
201 optionalrepo=optionalrepo,
202 )(inner)
202 )(inner)
203
203
204 return decorate
204 return decorate
205
205
206
206
207 def urlencodenested(params):
207 def urlencodenested(params):
208 """like urlencode, but works with nested parameters.
208 """like urlencode, but works with nested parameters.
209
209
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 """
213 """
214 flatparams = util.sortdict()
214 flatparams = util.sortdict()
215
215
216 def process(prefix, obj):
216 def process(prefix, obj):
217 if isinstance(obj, bool):
217 if isinstance(obj, bool):
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 if items is None:
221 if items is None:
222 flatparams[prefix] = obj
222 flatparams[prefix] = obj
223 else:
223 else:
224 for k, v in items(obj):
224 for k, v in items(obj):
225 if prefix:
225 if prefix:
226 process(b'%s[%s]' % (prefix, k), v)
226 process(b'%s[%s]' % (prefix, k), v)
227 else:
227 else:
228 process(k, v)
228 process(k, v)
229
229
230 process(b'', params)
230 process(b'', params)
231 return util.urlreq.urlencode(flatparams)
231 return util.urlreq.urlencode(flatparams)
232
232
233
233
234 def readurltoken(ui):
234 def readurltoken(ui):
235 """return conduit url, token and make sure they exist
235 """return conduit url, token and make sure they exist
236
236
237 Currently read from [auth] config section. In the future, it might
237 Currently read from [auth] config section. In the future, it might
238 make sense to read from .arcconfig and .arcrc as well.
238 make sense to read from .arcconfig and .arcrc as well.
239 """
239 """
240 url = ui.config(b'phabricator', b'url')
240 url = ui.config(b'phabricator', b'url')
241 if not url:
241 if not url:
242 raise error.Abort(
242 raise error.Abort(
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 )
244 )
245
245
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 token = None
247 token = None
248
248
249 if res:
249 if res:
250 group, auth = res
250 group, auth = res
251
251
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253
253
254 token = auth.get(b'phabtoken')
254 token = auth.get(b'phabtoken')
255
255
256 if not token:
256 if not token:
257 raise error.Abort(
257 raise error.Abort(
258 _(b'Can\'t find conduit token associated to %s') % (url,)
258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 )
259 )
260
260
261 return url, token
261 return url, token
262
262
263
263
264 def callconduit(ui, name, params):
264 def callconduit(ui, name, params):
265 """call Conduit API, params is a dict. return json.loads result, or None"""
265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 host, token = readurltoken(ui)
266 host, token = readurltoken(ui)
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 params = params.copy()
269 params = params.copy()
270 params[b'api.token'] = token
270 params[b'api.token'] = token
271 data = urlencodenested(params)
271 data = urlencodenested(params)
272 curlcmd = ui.config(b'phabricator', b'curlcmd')
272 curlcmd = ui.config(b'phabricator', b'curlcmd')
273 if curlcmd:
273 if curlcmd:
274 sin, sout = procutil.popen2(
274 sin, sout = procutil.popen2(
275 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
275 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
276 )
276 )
277 sin.write(data)
277 sin.write(data)
278 sin.close()
278 sin.close()
279 body = sout.read()
279 body = sout.read()
280 else:
280 else:
281 urlopener = urlmod.opener(ui, authinfo)
281 urlopener = urlmod.opener(ui, authinfo)
282 request = util.urlreq.request(pycompat.strurl(url), data=data)
282 request = util.urlreq.request(pycompat.strurl(url), data=data)
283 with contextlib.closing(urlopener.open(request)) as rsp:
283 with contextlib.closing(urlopener.open(request)) as rsp:
284 body = rsp.read()
284 body = rsp.read()
285 ui.debug(b'Conduit Response: %s\n' % body)
285 ui.debug(b'Conduit Response: %s\n' % body)
286 parsed = pycompat.rapply(
286 parsed = pycompat.rapply(
287 lambda x: encoding.unitolocal(x)
287 lambda x: encoding.unitolocal(x)
288 if isinstance(x, pycompat.unicode)
288 if isinstance(x, pycompat.unicode)
289 else x,
289 else x,
290 # json.loads only accepts bytes from py3.6+
290 # json.loads only accepts bytes from py3.6+
291 json.loads(encoding.unifromlocal(body)),
291 json.loads(encoding.unifromlocal(body)),
292 )
292 )
293 if parsed.get(b'error_code'):
293 if parsed.get(b'error_code'):
294 msg = _(b'Conduit Error (%s): %s') % (
294 msg = _(b'Conduit Error (%s): %s') % (
295 parsed[b'error_code'],
295 parsed[b'error_code'],
296 parsed[b'error_info'],
296 parsed[b'error_info'],
297 )
297 )
298 raise error.Abort(msg)
298 raise error.Abort(msg)
299 return parsed[b'result']
299 return parsed[b'result']
300
300
301
301
302 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
302 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
303 def debugcallconduit(ui, repo, name):
303 def debugcallconduit(ui, repo, name):
304 """call Conduit API
304 """call Conduit API
305
305
306 Call parameters are read from stdin as a JSON blob. Result will be written
306 Call parameters are read from stdin as a JSON blob. Result will be written
307 to stdout as a JSON blob.
307 to stdout as a JSON blob.
308 """
308 """
309 # json.loads only accepts bytes from 3.6+
309 # json.loads only accepts bytes from 3.6+
310 rawparams = encoding.unifromlocal(ui.fin.read())
310 rawparams = encoding.unifromlocal(ui.fin.read())
311 # json.loads only returns unicode strings
311 # json.loads only returns unicode strings
312 params = pycompat.rapply(
312 params = pycompat.rapply(
313 lambda x: encoding.unitolocal(x)
313 lambda x: encoding.unitolocal(x)
314 if isinstance(x, pycompat.unicode)
314 if isinstance(x, pycompat.unicode)
315 else x,
315 else x,
316 json.loads(rawparams),
316 json.loads(rawparams),
317 )
317 )
318 # json.dumps only accepts unicode strings
318 # json.dumps only accepts unicode strings
319 result = pycompat.rapply(
319 result = pycompat.rapply(
320 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
320 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
321 callconduit(ui, name, params),
321 callconduit(ui, name, params),
322 )
322 )
323 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
323 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
324 ui.write(b'%s\n' % encoding.unitolocal(s))
324 ui.write(b'%s\n' % encoding.unitolocal(s))
325
325
326
326
327 def getrepophid(repo):
327 def getrepophid(repo):
328 """given callsign, return repository PHID or None"""
328 """given callsign, return repository PHID or None"""
329 # developer config: phabricator.repophid
329 # developer config: phabricator.repophid
330 repophid = repo.ui.config(b'phabricator', b'repophid')
330 repophid = repo.ui.config(b'phabricator', b'repophid')
331 if repophid:
331 if repophid:
332 return repophid
332 return repophid
333 callsign = repo.ui.config(b'phabricator', b'callsign')
333 callsign = repo.ui.config(b'phabricator', b'callsign')
334 if not callsign:
334 if not callsign:
335 return None
335 return None
336 query = callconduit(
336 query = callconduit(
337 repo.ui,
337 repo.ui,
338 b'diffusion.repository.search',
338 b'diffusion.repository.search',
339 {b'constraints': {b'callsigns': [callsign]}},
339 {b'constraints': {b'callsigns': [callsign]}},
340 )
340 )
341 if len(query[b'data']) == 0:
341 if len(query[b'data']) == 0:
342 return None
342 return None
343 repophid = query[b'data'][0][b'phid']
343 repophid = query[b'data'][0][b'phid']
344 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
344 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
345 return repophid
345 return repophid
346
346
347
347
348 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
348 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
349 _differentialrevisiondescre = re.compile(
349 _differentialrevisiondescre = re.compile(
350 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
350 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
351 )
351 )
352
352
353
353
354 def getoldnodedrevmap(repo, nodelist):
354 def getoldnodedrevmap(repo, nodelist):
355 """find previous nodes that has been sent to Phabricator
355 """find previous nodes that has been sent to Phabricator
356
356
357 return {node: (oldnode, Differential diff, Differential Revision ID)}
357 return {node: (oldnode, Differential diff, Differential Revision ID)}
358 for node in nodelist with known previous sent versions, or associated
358 for node in nodelist with known previous sent versions, or associated
359 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
359 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
360 be ``None``.
360 be ``None``.
361
361
362 Examines commit messages like "Differential Revision:" to get the
362 Examines commit messages like "Differential Revision:" to get the
363 association information.
363 association information.
364
364
365 If such commit message line is not found, examines all precursors and their
365 If such commit message line is not found, examines all precursors and their
366 tags. Tags with format like "D1234" are considered a match and the node
366 tags. Tags with format like "D1234" are considered a match and the node
367 with that tag, and the number after "D" (ex. 1234) will be returned.
367 with that tag, and the number after "D" (ex. 1234) will be returned.
368
368
369 The ``old node``, if not None, is guaranteed to be the last diff of
369 The ``old node``, if not None, is guaranteed to be the last diff of
370 corresponding Differential Revision, and exist in the repo.
370 corresponding Differential Revision, and exist in the repo.
371 """
371 """
372 unfi = repo.unfiltered()
372 unfi = repo.unfiltered()
373 nodemap = unfi.changelog.nodemap
373 nodemap = unfi.changelog.nodemap
374
374
375 result = {} # {node: (oldnode?, lastdiff?, drev)}
375 result = {} # {node: (oldnode?, lastdiff?, drev)}
376 toconfirm = {} # {node: (force, {precnode}, drev)}
376 toconfirm = {} # {node: (force, {precnode}, drev)}
377 for node in nodelist:
377 for node in nodelist:
378 ctx = unfi[node]
378 ctx = unfi[node]
379 # For tags like "D123", put them into "toconfirm" to verify later
379 # For tags like "D123", put them into "toconfirm" to verify later
380 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
380 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
381 for n in precnodes:
381 for n in precnodes:
382 if n in nodemap:
382 if n in nodemap:
383 for tag in unfi.nodetags(n):
383 for tag in unfi.nodetags(n):
384 m = _differentialrevisiontagre.match(tag)
384 m = _differentialrevisiontagre.match(tag)
385 if m:
385 if m:
386 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
386 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
387 continue
387 continue
388
388
389 # Check commit message
389 # Check commit message
390 m = _differentialrevisiondescre.search(ctx.description())
390 m = _differentialrevisiondescre.search(ctx.description())
391 if m:
391 if m:
392 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
392 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
393
393
394 # Double check if tags are genuine by collecting all old nodes from
394 # Double check if tags are genuine by collecting all old nodes from
395 # Phabricator, and expect precursors overlap with it.
395 # Phabricator, and expect precursors overlap with it.
396 if toconfirm:
396 if toconfirm:
397 drevs = [drev for force, precs, drev in toconfirm.values()]
397 drevs = [drev for force, precs, drev in toconfirm.values()]
398 alldiffs = callconduit(
398 alldiffs = callconduit(
399 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
399 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
400 )
400 )
401 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
401 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
402 for newnode, (force, precset, drev) in toconfirm.items():
402 for newnode, (force, precset, drev) in toconfirm.items():
403 diffs = [
403 diffs = [
404 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
404 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
405 ]
405 ]
406
406
407 # "precursors" as known by Phabricator
407 # "precursors" as known by Phabricator
408 phprecset = set(getnode(d) for d in diffs)
408 phprecset = set(getnode(d) for d in diffs)
409
409
410 # Ignore if precursors (Phabricator and local repo) do not overlap,
410 # Ignore if precursors (Phabricator and local repo) do not overlap,
411 # and force is not set (when commit message says nothing)
411 # and force is not set (when commit message says nothing)
412 if not force and not bool(phprecset & precset):
412 if not force and not bool(phprecset & precset):
413 tagname = b'D%d' % drev
413 tagname = b'D%d' % drev
414 tags.tag(
414 tags.tag(
415 repo,
415 repo,
416 tagname,
416 tagname,
417 nullid,
417 nullid,
418 message=None,
418 message=None,
419 user=None,
419 user=None,
420 date=None,
420 date=None,
421 local=True,
421 local=True,
422 )
422 )
423 unfi.ui.warn(
423 unfi.ui.warn(
424 _(
424 _(
425 b'D%s: local tag removed - does not match '
425 b'D%s: local tag removed - does not match '
426 b'Differential history\n'
426 b'Differential history\n'
427 )
427 )
428 % drev
428 % drev
429 )
429 )
430 continue
430 continue
431
431
432 # Find the last node using Phabricator metadata, and make sure it
432 # Find the last node using Phabricator metadata, and make sure it
433 # exists in the repo
433 # exists in the repo
434 oldnode = lastdiff = None
434 oldnode = lastdiff = None
435 if diffs:
435 if diffs:
436 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
436 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
437 oldnode = getnode(lastdiff)
437 oldnode = getnode(lastdiff)
438 if oldnode and oldnode not in nodemap:
438 if oldnode and oldnode not in nodemap:
439 oldnode = None
439 oldnode = None
440
440
441 result[newnode] = (oldnode, lastdiff, drev)
441 result[newnode] = (oldnode, lastdiff, drev)
442
442
443 return result
443 return result
444
444
445
445
446 def getdiff(ctx, diffopts):
446 def getdiff(ctx, diffopts):
447 """plain-text diff without header (user, commit message, etc)"""
447 """plain-text diff without header (user, commit message, etc)"""
448 output = util.stringio()
448 output = util.stringio()
449 for chunk, _label in patch.diffui(
449 for chunk, _label in patch.diffui(
450 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
450 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
451 ):
451 ):
452 output.write(chunk)
452 output.write(chunk)
453 return output.getvalue()
453 return output.getvalue()
454
454
455
455
456 class DiffChangeType(object):
456 class DiffChangeType(object):
457 ADD = 1
457 ADD = 1
458 CHANGE = 2
458 CHANGE = 2
459 DELETE = 3
459 DELETE = 3
460 MOVE_AWAY = 4
460 MOVE_AWAY = 4
461 COPY_AWAY = 5
461 COPY_AWAY = 5
462 MOVE_HERE = 6
462 MOVE_HERE = 6
463 COPY_HERE = 7
463 COPY_HERE = 7
464 MULTICOPY = 8
464 MULTICOPY = 8
465
465
466
466
467 class DiffFileType(object):
467 class DiffFileType(object):
468 TEXT = 1
468 TEXT = 1
469 IMAGE = 2
469 IMAGE = 2
470 BINARY = 3
470 BINARY = 3
471
471
472
472
473 @attr.s
473 @attr.s
474 class phabhunk(dict):
474 class phabhunk(dict):
475 """Represents a Differential hunk, which is owned by a Differential change
475 """Represents a Differential hunk, which is owned by a Differential change
476 """
476 """
477
477
478 oldOffset = attr.ib(default=0) # camelcase-required
478 oldOffset = attr.ib(default=0) # camelcase-required
479 oldLength = attr.ib(default=0) # camelcase-required
479 oldLength = attr.ib(default=0) # camelcase-required
480 newOffset = attr.ib(default=0) # camelcase-required
480 newOffset = attr.ib(default=0) # camelcase-required
481 newLength = attr.ib(default=0) # camelcase-required
481 newLength = attr.ib(default=0) # camelcase-required
482 corpus = attr.ib(default='')
482 corpus = attr.ib(default='')
483 # These get added to the phabchange's equivalents
483 # These get added to the phabchange's equivalents
484 addLines = attr.ib(default=0) # camelcase-required
484 addLines = attr.ib(default=0) # camelcase-required
485 delLines = attr.ib(default=0) # camelcase-required
485 delLines = attr.ib(default=0) # camelcase-required
486
486
487
487
488 @attr.s
488 @attr.s
489 class phabchange(object):
489 class phabchange(object):
490 """Represents a Differential change, owns Differential hunks and owned by a
490 """Represents a Differential change, owns Differential hunks and owned by a
491 Differential diff. Each one represents one file in a diff.
491 Differential diff. Each one represents one file in a diff.
492 """
492 """
493
493
494 currentPath = attr.ib(default=None) # camelcase-required
494 currentPath = attr.ib(default=None) # camelcase-required
495 oldPath = attr.ib(default=None) # camelcase-required
495 oldPath = attr.ib(default=None) # camelcase-required
496 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
496 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
497 metadata = attr.ib(default=attr.Factory(dict))
497 metadata = attr.ib(default=attr.Factory(dict))
498 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
500 type = attr.ib(default=DiffChangeType.CHANGE)
500 type = attr.ib(default=DiffChangeType.CHANGE)
501 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
501 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
502 commitHash = attr.ib(default=None) # camelcase-required
502 commitHash = attr.ib(default=None) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
505 hunks = attr.ib(default=attr.Factory(list))
505 hunks = attr.ib(default=attr.Factory(list))
506
506
507 def copynewmetadatatoold(self):
507 def copynewmetadatatoold(self):
508 for key in list(self.metadata.keys()):
508 for key in list(self.metadata.keys()):
509 newkey = key.replace(b'new:', b'old:')
509 newkey = key.replace(b'new:', b'old:')
510 self.metadata[newkey] = self.metadata[key]
510 self.metadata[newkey] = self.metadata[key]
511
511
512 def addoldmode(self, value):
512 def addoldmode(self, value):
513 self.oldProperties[b'unix:filemode'] = value
513 self.oldProperties[b'unix:filemode'] = value
514
514
515 def addnewmode(self, value):
515 def addnewmode(self, value):
516 self.newProperties[b'unix:filemode'] = value
516 self.newProperties[b'unix:filemode'] = value
517
517
518 def addhunk(self, hunk):
518 def addhunk(self, hunk):
519 if not isinstance(hunk, phabhunk):
519 if not isinstance(hunk, phabhunk):
520 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
520 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
521 self.hunks.append(hunk)
521 self.hunks.append(hunk)
522 # It's useful to include these stats since the Phab web UI shows them,
522 # It's useful to include these stats since the Phab web UI shows them,
523 # and uses them to estimate how large a change a Revision is. Also used
523 # and uses them to estimate how large a change a Revision is. Also used
524 # in email subjects for the [+++--] bit.
524 # in email subjects for the [+++--] bit.
525 self.addLines += hunk.addLines
525 self.addLines += hunk.addLines
526 self.delLines += hunk.delLines
526 self.delLines += hunk.delLines
527
527
528
528
529 @attr.s
529 @attr.s
530 class phabdiff(object):
530 class phabdiff(object):
531 """Represents a Differential diff, owns Differential changes. Corresponds
531 """Represents a Differential diff, owns Differential changes. Corresponds
532 to a commit.
532 to a commit.
533 """
533 """
534
534
535 # Doesn't seem to be any reason to send this (output of uname -n)
535 # Doesn't seem to be any reason to send this (output of uname -n)
536 sourceMachine = attr.ib(default=b'') # camelcase-required
536 sourceMachine = attr.ib(default=b'') # camelcase-required
537 sourcePath = attr.ib(default=b'/') # camelcase-required
537 sourcePath = attr.ib(default=b'/') # camelcase-required
538 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
538 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
539 sourceControlPath = attr.ib(default=b'/') # camelcase-required
539 sourceControlPath = attr.ib(default=b'/') # camelcase-required
540 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
540 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
541 branch = attr.ib(default=b'default')
541 branch = attr.ib(default=b'default')
542 bookmark = attr.ib(default=None)
542 bookmark = attr.ib(default=None)
543 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
543 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
544 lintStatus = attr.ib(default=b'none') # camelcase-required
544 lintStatus = attr.ib(default=b'none') # camelcase-required
545 unitStatus = attr.ib(default=b'none') # camelcase-required
545 unitStatus = attr.ib(default=b'none') # camelcase-required
546 changes = attr.ib(default=attr.Factory(dict))
546 changes = attr.ib(default=attr.Factory(dict))
547 repositoryPHID = attr.ib(default=None) # camelcase-required
547 repositoryPHID = attr.ib(default=None) # camelcase-required
548
548
549 def addchange(self, change):
549 def addchange(self, change):
550 if not isinstance(change, phabchange):
550 if not isinstance(change, phabchange):
551 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
551 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
552 self.changes[change.currentPath] = change
552 self.changes[change.currentPath] = change
553
553
554
554
555 def maketext(pchange, ctx, fname):
555 def maketext(pchange, ctx, fname):
556 """populate the phabchange for a text file"""
556 """populate the phabchange for a text file"""
557 repo = ctx.repo()
557 repo = ctx.repo()
558 fmatcher = match.exact([fname])
558 fmatcher = match.exact([fname])
559 diffopts = mdiff.diffopts(git=True, context=32767)
559 diffopts = mdiff.diffopts(git=True, context=32767)
560 _pfctx, _fctx, header, fhunks = next(
560 _pfctx, _fctx, header, fhunks = next(
561 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
561 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
562 )
562 )
563
563
564 for fhunk in fhunks:
564 for fhunk in fhunks:
565 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
565 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
566 corpus = b''.join(lines[1:])
566 corpus = b''.join(lines[1:])
567 shunk = list(header)
567 shunk = list(header)
568 shunk.extend(lines)
568 shunk.extend(lines)
569 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
569 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
570 patch.diffstatdata(util.iterlines(shunk))
570 patch.diffstatdata(util.iterlines(shunk))
571 )
571 )
572 pchange.addhunk(
572 pchange.addhunk(
573 phabhunk(
573 phabhunk(
574 oldOffset,
574 oldOffset,
575 oldLength,
575 oldLength,
576 newOffset,
576 newOffset,
577 newLength,
577 newLength,
578 corpus,
578 corpus,
579 addLines,
579 addLines,
580 delLines,
580 delLines,
581 )
581 )
582 )
582 )
583
583
584
584
585 def uploadchunks(fctx, fphid):
585 def uploadchunks(fctx, fphid):
586 """upload large binary files as separate chunks.
586 """upload large binary files as separate chunks.
587 Phab requests chunking over 8MiB, and splits into 4MiB chunks
587 Phab requests chunking over 8MiB, and splits into 4MiB chunks
588 """
588 """
589 ui = fctx.repo().ui
589 ui = fctx.repo().ui
590 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
590 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
591 progress = ui.makeprogress(
591 progress = ui.makeprogress(
592 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
592 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
593 )
593 )
594 for chunk in chunks:
594 for chunk in chunks:
595 progress.increment()
595 progress.increment()
596 if chunk[b'complete']:
596 if chunk[b'complete']:
597 continue
597 continue
598 bstart = int(chunk[b'byteStart'])
598 bstart = int(chunk[b'byteStart'])
599 bend = int(chunk[b'byteEnd'])
599 bend = int(chunk[b'byteEnd'])
600 callconduit(
600 callconduit(
601 ui,
601 ui,
602 b'file.uploadchunk',
602 b'file.uploadchunk',
603 {
603 {
604 b'filePHID': fphid,
604 b'filePHID': fphid,
605 b'byteStart': bstart,
605 b'byteStart': bstart,
606 b'data': base64.b64encode(fctx.data()[bstart:bend]),
606 b'data': base64.b64encode(fctx.data()[bstart:bend]),
607 b'dataEncoding': b'base64',
607 b'dataEncoding': b'base64',
608 },
608 },
609 )
609 )
610 progress.complete()
610 progress.complete()
611
611
612
612
613 def uploadfile(fctx):
613 def uploadfile(fctx):
614 """upload binary files to Phabricator"""
614 """upload binary files to Phabricator"""
615 repo = fctx.repo()
615 repo = fctx.repo()
616 ui = repo.ui
616 ui = repo.ui
617 fname = fctx.path()
617 fname = fctx.path()
618 size = fctx.size()
618 size = fctx.size()
619 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
619 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
620
620
621 # an allocate call is required first to see if an upload is even required
621 # an allocate call is required first to see if an upload is even required
622 # (Phab might already have it) and to determine if chunking is needed
622 # (Phab might already have it) and to determine if chunking is needed
623 allocateparams = {
623 allocateparams = {
624 b'name': fname,
624 b'name': fname,
625 b'contentLength': size,
625 b'contentLength': size,
626 b'contentHash': fhash,
626 b'contentHash': fhash,
627 }
627 }
628 filealloc = callconduit(ui, b'file.allocate', allocateparams)
628 filealloc = callconduit(ui, b'file.allocate', allocateparams)
629 fphid = filealloc[b'filePHID']
629 fphid = filealloc[b'filePHID']
630
630
631 if filealloc[b'upload']:
631 if filealloc[b'upload']:
632 ui.write(_(b'uploading %s\n') % bytes(fctx))
632 ui.write(_(b'uploading %s\n') % bytes(fctx))
633 if not fphid:
633 if not fphid:
634 uploadparams = {
634 uploadparams = {
635 b'name': fname,
635 b'name': fname,
636 b'data_base64': base64.b64encode(fctx.data()),
636 b'data_base64': base64.b64encode(fctx.data()),
637 }
637 }
638 fphid = callconduit(ui, b'file.upload', uploadparams)
638 fphid = callconduit(ui, b'file.upload', uploadparams)
639 else:
639 else:
640 uploadchunks(fctx, fphid)
640 uploadchunks(fctx, fphid)
641 else:
641 else:
642 ui.debug(b'server already has %s\n' % bytes(fctx))
642 ui.debug(b'server already has %s\n' % bytes(fctx))
643
643
644 if not fphid:
644 if not fphid:
645 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
645 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
646
646
647 return fphid
647 return fphid
648
648
649
649
650 def addoldbinary(pchange, fctx, originalfname):
650 def addoldbinary(pchange, fctx, originalfname):
651 """add the metadata for the previous version of a binary file to the
651 """add the metadata for the previous version of a binary file to the
652 phabchange for the new version
652 phabchange for the new version
653 """
653 """
654 oldfctx = fctx.p1()[originalfname]
654 oldfctx = fctx.p1()[originalfname]
655 if fctx.cmp(oldfctx):
655 if fctx.cmp(oldfctx):
656 # Files differ, add the old one
656 # Files differ, add the old one
657 pchange.metadata[b'old:file:size'] = oldfctx.size()
657 pchange.metadata[b'old:file:size'] = oldfctx.size()
658 mimeguess, _enc = mimetypes.guess_type(
658 mimeguess, _enc = mimetypes.guess_type(
659 encoding.unifromlocal(oldfctx.path())
659 encoding.unifromlocal(oldfctx.path())
660 )
660 )
661 if mimeguess:
661 if mimeguess:
662 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
662 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
663 mimeguess
663 mimeguess
664 )
664 )
665 fphid = uploadfile(oldfctx)
665 fphid = uploadfile(oldfctx)
666 pchange.metadata[b'old:binary-phid'] = fphid
666 pchange.metadata[b'old:binary-phid'] = fphid
667 else:
667 else:
668 # If it's left as IMAGE/BINARY web UI might try to display it
668 # If it's left as IMAGE/BINARY web UI might try to display it
669 pchange.fileType = DiffFileType.TEXT
669 pchange.fileType = DiffFileType.TEXT
670 pchange.copynewmetadatatoold()
670 pchange.copynewmetadatatoold()
671
671
672
672
673 def makebinary(pchange, fctx):
673 def makebinary(pchange, fctx):
674 """populate the phabchange for a binary file"""
674 """populate the phabchange for a binary file"""
675 pchange.fileType = DiffFileType.BINARY
675 pchange.fileType = DiffFileType.BINARY
676 fphid = uploadfile(fctx)
676 fphid = uploadfile(fctx)
677 pchange.metadata[b'new:binary-phid'] = fphid
677 pchange.metadata[b'new:binary-phid'] = fphid
678 pchange.metadata[b'new:file:size'] = fctx.size()
678 pchange.metadata[b'new:file:size'] = fctx.size()
679 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
679 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
680 if mimeguess:
680 if mimeguess:
681 mimeguess = pycompat.bytestr(mimeguess)
681 mimeguess = pycompat.bytestr(mimeguess)
682 pchange.metadata[b'new:file:mime-type'] = mimeguess
682 pchange.metadata[b'new:file:mime-type'] = mimeguess
683 if mimeguess.startswith(b'image/'):
683 if mimeguess.startswith(b'image/'):
684 pchange.fileType = DiffFileType.IMAGE
684 pchange.fileType = DiffFileType.IMAGE
685
685
686
686
687 # Copied from mercurial/patch.py
687 # Copied from mercurial/patch.py
688 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
688 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
689
689
690
690
691 def addremoved(pdiff, ctx, removed):
691 def addremoved(pdiff, ctx, removed):
692 """add removed files to the phabdiff. Shouldn't include moves"""
692 """add removed files to the phabdiff. Shouldn't include moves"""
693 for fname in removed:
693 for fname in removed:
694 pchange = phabchange(
694 pchange = phabchange(
695 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
695 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
696 )
696 )
697 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
697 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
698 fctx = ctx.p1()[fname]
698 fctx = ctx.p1()[fname]
699 if not fctx.isbinary():
699 if not fctx.isbinary():
700 maketext(pchange, ctx, fname)
700 maketext(pchange, ctx, fname)
701
701
702 pdiff.addchange(pchange)
702 pdiff.addchange(pchange)
703
703
704
704
705 def addmodified(pdiff, ctx, modified):
705 def addmodified(pdiff, ctx, modified):
706 """add modified files to the phabdiff"""
706 """add modified files to the phabdiff"""
707 for fname in modified:
707 for fname in modified:
708 fctx = ctx[fname]
708 fctx = ctx[fname]
709 pchange = phabchange(currentPath=fname, oldPath=fname)
709 pchange = phabchange(currentPath=fname, oldPath=fname)
710 filemode = gitmode[ctx[fname].flags()]
710 filemode = gitmode[ctx[fname].flags()]
711 originalmode = gitmode[ctx.p1()[fname].flags()]
711 originalmode = gitmode[ctx.p1()[fname].flags()]
712 if filemode != originalmode:
712 if filemode != originalmode:
713 pchange.addoldmode(originalmode)
713 pchange.addoldmode(originalmode)
714 pchange.addnewmode(filemode)
714 pchange.addnewmode(filemode)
715
715
716 if fctx.isbinary():
716 if fctx.isbinary():
717 makebinary(pchange, fctx)
717 makebinary(pchange, fctx)
718 addoldbinary(pchange, fctx, fname)
718 addoldbinary(pchange, fctx, fname)
719 else:
719 else:
720 maketext(pchange, ctx, fname)
720 maketext(pchange, ctx, fname)
721
721
722 pdiff.addchange(pchange)
722 pdiff.addchange(pchange)
723
723
724
724
725 def addadded(pdiff, ctx, added, removed):
726 """add file adds to the phabdiff, both new files and copies/moves"""
727 # Keep track of files that've been recorded as moved/copied, so if there are
728 # additional copies we can mark them (moves get removed from removed)
729 copiedchanges = {}
730 movedchanges = {}
731 for fname in added:
732 fctx = ctx[fname]
733 pchange = phabchange(currentPath=fname)
734
735 filemode = gitmode[ctx[fname].flags()]
736 renamed = fctx.renamed()
737
738 if renamed:
739 originalfname = renamed[0]
740 originalmode = gitmode[ctx.p1()[originalfname].flags()]
741 pchange.oldPath = originalfname
742
743 if originalfname in removed:
744 origpchange = phabchange(
745 currentPath=originalfname,
746 oldPath=originalfname,
747 type=DiffChangeType.MOVE_AWAY,
748 awayPaths=[fname],
749 )
750 movedchanges[originalfname] = origpchange
751 removed.remove(originalfname)
752 pchange.type = DiffChangeType.MOVE_HERE
753 elif originalfname in movedchanges:
754 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
755 movedchanges[originalfname].awayPaths.append(fname)
756 pchange.type = DiffChangeType.COPY_HERE
757 else: # pure copy
758 if originalfname not in copiedchanges:
759 origpchange = phabchange(
760 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
761 )
762 copiedchanges[originalfname] = origpchange
763 else:
764 origpchange = copiedchanges[originalfname]
765 origpchange.awayPaths.append(fname)
766 pchange.type = DiffChangeType.COPY_HERE
767
768 if filemode != originalmode:
769 pchange.addoldmode(originalmode)
770 pchange.addnewmode(filemode)
771 else: # Brand-new file
772 pchange.addnewmode(gitmode[fctx.flags()])
773 pchange.type = DiffChangeType.ADD
774
775 if fctx.isbinary():
776 makebinary(pchange, fctx)
777 if renamed:
778 addoldbinary(pchange, fctx, originalfname)
779 else:
780 maketext(pchange, ctx, fname)
781
782 pdiff.addchange(pchange)
783
784 for _path, copiedchange in copiedchanges.items():
785 pdiff.addchange(copiedchange)
786 for _path, movedchange in movedchanges.items():
787 pdiff.addchange(movedchange)
788
789
725 def creatediff(ctx):
790 def creatediff(ctx):
726 """create a Differential Diff"""
791 """create a Differential Diff"""
727 repo = ctx.repo()
792 repo = ctx.repo()
728 repophid = getrepophid(repo)
793 repophid = getrepophid(repo)
729 # Create a "Differential Diff" via "differential.createrawdiff" API
794 # Create a "Differential Diff" via "differential.createrawdiff" API
730 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
795 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
731 if repophid:
796 if repophid:
732 params[b'repositoryPHID'] = repophid
797 params[b'repositoryPHID'] = repophid
733 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
798 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
734 if not diff:
799 if not diff:
735 raise error.Abort(_(b'cannot create diff for %s') % ctx)
800 raise error.Abort(_(b'cannot create diff for %s') % ctx)
736 return diff
801 return diff
737
802
738
803
739 def writediffproperties(ctx, diff):
804 def writediffproperties(ctx, diff):
740 """write metadata to diff so patches could be applied losslessly"""
805 """write metadata to diff so patches could be applied losslessly"""
741 params = {
806 params = {
742 b'diff_id': diff[b'id'],
807 b'diff_id': diff[b'id'],
743 b'name': b'hg:meta',
808 b'name': b'hg:meta',
744 b'data': templatefilters.json(
809 b'data': templatefilters.json(
745 {
810 {
746 b'user': ctx.user(),
811 b'user': ctx.user(),
747 b'date': b'%d %d' % ctx.date(),
812 b'date': b'%d %d' % ctx.date(),
748 b'branch': ctx.branch(),
813 b'branch': ctx.branch(),
749 b'node': ctx.hex(),
814 b'node': ctx.hex(),
750 b'parent': ctx.p1().hex(),
815 b'parent': ctx.p1().hex(),
751 }
816 }
752 ),
817 ),
753 }
818 }
754 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
819 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
755
820
756 params = {
821 params = {
757 b'diff_id': diff[b'id'],
822 b'diff_id': diff[b'id'],
758 b'name': b'local:commits',
823 b'name': b'local:commits',
759 b'data': templatefilters.json(
824 b'data': templatefilters.json(
760 {
825 {
761 ctx.hex(): {
826 ctx.hex(): {
762 b'author': stringutil.person(ctx.user()),
827 b'author': stringutil.person(ctx.user()),
763 b'authorEmail': stringutil.email(ctx.user()),
828 b'authorEmail': stringutil.email(ctx.user()),
764 b'time': int(ctx.date()[0]),
829 b'time': int(ctx.date()[0]),
765 b'commit': ctx.hex(),
830 b'commit': ctx.hex(),
766 b'parents': [ctx.p1().hex()],
831 b'parents': [ctx.p1().hex()],
767 b'branch': ctx.branch(),
832 b'branch': ctx.branch(),
768 },
833 },
769 }
834 }
770 ),
835 ),
771 }
836 }
772 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
837 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
773
838
774
839
775 def createdifferentialrevision(
840 def createdifferentialrevision(
776 ctx,
841 ctx,
777 revid=None,
842 revid=None,
778 parentrevphid=None,
843 parentrevphid=None,
779 oldnode=None,
844 oldnode=None,
780 olddiff=None,
845 olddiff=None,
781 actions=None,
846 actions=None,
782 comment=None,
847 comment=None,
783 ):
848 ):
784 """create or update a Differential Revision
849 """create or update a Differential Revision
785
850
786 If revid is None, create a new Differential Revision, otherwise update
851 If revid is None, create a new Differential Revision, otherwise update
787 revid. If parentrevphid is not None, set it as a dependency.
852 revid. If parentrevphid is not None, set it as a dependency.
788
853
789 If oldnode is not None, check if the patch content (without commit message
854 If oldnode is not None, check if the patch content (without commit message
790 and metadata) has changed before creating another diff.
855 and metadata) has changed before creating another diff.
791
856
792 If actions is not None, they will be appended to the transaction.
857 If actions is not None, they will be appended to the transaction.
793 """
858 """
794 repo = ctx.repo()
859 repo = ctx.repo()
795 if oldnode:
860 if oldnode:
796 diffopts = mdiff.diffopts(git=True, context=32767)
861 diffopts = mdiff.diffopts(git=True, context=32767)
797 oldctx = repo.unfiltered()[oldnode]
862 oldctx = repo.unfiltered()[oldnode]
798 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
863 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
799 else:
864 else:
800 neednewdiff = True
865 neednewdiff = True
801
866
802 transactions = []
867 transactions = []
803 if neednewdiff:
868 if neednewdiff:
804 diff = creatediff(ctx)
869 diff = creatediff(ctx)
805 transactions.append({b'type': b'update', b'value': diff[b'phid']})
870 transactions.append({b'type': b'update', b'value': diff[b'phid']})
806 if comment:
871 if comment:
807 transactions.append({b'type': b'comment', b'value': comment})
872 transactions.append({b'type': b'comment', b'value': comment})
808 else:
873 else:
809 # Even if we don't need to upload a new diff because the patch content
874 # Even if we don't need to upload a new diff because the patch content
810 # does not change. We might still need to update its metadata so
875 # does not change. We might still need to update its metadata so
811 # pushers could know the correct node metadata.
876 # pushers could know the correct node metadata.
812 assert olddiff
877 assert olddiff
813 diff = olddiff
878 diff = olddiff
814 writediffproperties(ctx, diff)
879 writediffproperties(ctx, diff)
815
880
816 # Set the parent Revision every time, so commit re-ordering is picked-up
881 # Set the parent Revision every time, so commit re-ordering is picked-up
817 if parentrevphid:
882 if parentrevphid:
818 transactions.append(
883 transactions.append(
819 {b'type': b'parents.set', b'value': [parentrevphid]}
884 {b'type': b'parents.set', b'value': [parentrevphid]}
820 )
885 )
821
886
822 if actions:
887 if actions:
823 transactions += actions
888 transactions += actions
824
889
825 # Parse commit message and update related fields.
890 # Parse commit message and update related fields.
826 desc = ctx.description()
891 desc = ctx.description()
827 info = callconduit(
892 info = callconduit(
828 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
893 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
829 )
894 )
830 for k, v in info[b'fields'].items():
895 for k, v in info[b'fields'].items():
831 if k in [b'title', b'summary', b'testPlan']:
896 if k in [b'title', b'summary', b'testPlan']:
832 transactions.append({b'type': k, b'value': v})
897 transactions.append({b'type': k, b'value': v})
833
898
834 params = {b'transactions': transactions}
899 params = {b'transactions': transactions}
835 if revid is not None:
900 if revid is not None:
836 # Update an existing Differential Revision
901 # Update an existing Differential Revision
837 params[b'objectIdentifier'] = revid
902 params[b'objectIdentifier'] = revid
838
903
839 revision = callconduit(repo.ui, b'differential.revision.edit', params)
904 revision = callconduit(repo.ui, b'differential.revision.edit', params)
840 if not revision:
905 if not revision:
841 raise error.Abort(_(b'cannot create revision for %s') % ctx)
906 raise error.Abort(_(b'cannot create revision for %s') % ctx)
842
907
843 return revision, diff
908 return revision, diff
844
909
845
910
846 def userphids(repo, names):
911 def userphids(repo, names):
847 """convert user names to PHIDs"""
912 """convert user names to PHIDs"""
848 names = [name.lower() for name in names]
913 names = [name.lower() for name in names]
849 query = {b'constraints': {b'usernames': names}}
914 query = {b'constraints': {b'usernames': names}}
850 result = callconduit(repo.ui, b'user.search', query)
915 result = callconduit(repo.ui, b'user.search', query)
851 # username not found is not an error of the API. So check if we have missed
916 # username not found is not an error of the API. So check if we have missed
852 # some names here.
917 # some names here.
853 data = result[b'data']
918 data = result[b'data']
854 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
919 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
855 unresolved = set(names) - resolved
920 unresolved = set(names) - resolved
856 if unresolved:
921 if unresolved:
857 raise error.Abort(
922 raise error.Abort(
858 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
923 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
859 )
924 )
860 return [entry[b'phid'] for entry in data]
925 return [entry[b'phid'] for entry in data]
861
926
862
927
863 @vcrcommand(
928 @vcrcommand(
864 b'phabsend',
929 b'phabsend',
865 [
930 [
866 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
931 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
867 (b'', b'amend', True, _(b'update commit messages')),
932 (b'', b'amend', True, _(b'update commit messages')),
868 (b'', b'reviewer', [], _(b'specify reviewers')),
933 (b'', b'reviewer', [], _(b'specify reviewers')),
869 (b'', b'blocker', [], _(b'specify blocking reviewers')),
934 (b'', b'blocker', [], _(b'specify blocking reviewers')),
870 (
935 (
871 b'm',
936 b'm',
872 b'comment',
937 b'comment',
873 b'',
938 b'',
874 _(b'add a comment to Revisions with new/updated Diffs'),
939 _(b'add a comment to Revisions with new/updated Diffs'),
875 ),
940 ),
876 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
941 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
877 ],
942 ],
878 _(b'REV [OPTIONS]'),
943 _(b'REV [OPTIONS]'),
879 helpcategory=command.CATEGORY_IMPORT_EXPORT,
944 helpcategory=command.CATEGORY_IMPORT_EXPORT,
880 )
945 )
881 def phabsend(ui, repo, *revs, **opts):
946 def phabsend(ui, repo, *revs, **opts):
882 """upload changesets to Phabricator
947 """upload changesets to Phabricator
883
948
884 If there are multiple revisions specified, they will be send as a stack
949 If there are multiple revisions specified, they will be send as a stack
885 with a linear dependencies relationship using the order specified by the
950 with a linear dependencies relationship using the order specified by the
886 revset.
951 revset.
887
952
888 For the first time uploading changesets, local tags will be created to
953 For the first time uploading changesets, local tags will be created to
889 maintain the association. After the first time, phabsend will check
954 maintain the association. After the first time, phabsend will check
890 obsstore and tags information so it can figure out whether to update an
955 obsstore and tags information so it can figure out whether to update an
891 existing Differential Revision, or create a new one.
956 existing Differential Revision, or create a new one.
892
957
893 If --amend is set, update commit messages so they have the
958 If --amend is set, update commit messages so they have the
894 ``Differential Revision`` URL, remove related tags. This is similar to what
959 ``Differential Revision`` URL, remove related tags. This is similar to what
895 arcanist will do, and is more desired in author-push workflows. Otherwise,
960 arcanist will do, and is more desired in author-push workflows. Otherwise,
896 use local tags to record the ``Differential Revision`` association.
961 use local tags to record the ``Differential Revision`` association.
897
962
898 The --confirm option lets you confirm changesets before sending them. You
963 The --confirm option lets you confirm changesets before sending them. You
899 can also add following to your configuration file to make it default
964 can also add following to your configuration file to make it default
900 behaviour::
965 behaviour::
901
966
902 [phabsend]
967 [phabsend]
903 confirm = true
968 confirm = true
904
969
905 phabsend will check obsstore and the above association to decide whether to
970 phabsend will check obsstore and the above association to decide whether to
906 update an existing Differential Revision, or create a new one.
971 update an existing Differential Revision, or create a new one.
907 """
972 """
908 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
909 revs = list(revs) + opts.get(b'rev', [])
974 revs = list(revs) + opts.get(b'rev', [])
910 revs = scmutil.revrange(repo, revs)
975 revs = scmutil.revrange(repo, revs)
911
976
912 if not revs:
977 if not revs:
913 raise error.Abort(_(b'phabsend requires at least one changeset'))
978 raise error.Abort(_(b'phabsend requires at least one changeset'))
914 if opts.get(b'amend'):
979 if opts.get(b'amend'):
915 cmdutil.checkunfinished(repo)
980 cmdutil.checkunfinished(repo)
916
981
917 # {newnode: (oldnode, olddiff, olddrev}
982 # {newnode: (oldnode, olddiff, olddrev}
918 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
983 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
919
984
920 confirm = ui.configbool(b'phabsend', b'confirm')
985 confirm = ui.configbool(b'phabsend', b'confirm')
921 confirm |= bool(opts.get(b'confirm'))
986 confirm |= bool(opts.get(b'confirm'))
922 if confirm:
987 if confirm:
923 confirmed = _confirmbeforesend(repo, revs, oldmap)
988 confirmed = _confirmbeforesend(repo, revs, oldmap)
924 if not confirmed:
989 if not confirmed:
925 raise error.Abort(_(b'phabsend cancelled'))
990 raise error.Abort(_(b'phabsend cancelled'))
926
991
927 actions = []
992 actions = []
928 reviewers = opts.get(b'reviewer', [])
993 reviewers = opts.get(b'reviewer', [])
929 blockers = opts.get(b'blocker', [])
994 blockers = opts.get(b'blocker', [])
930 phids = []
995 phids = []
931 if reviewers:
996 if reviewers:
932 phids.extend(userphids(repo, reviewers))
997 phids.extend(userphids(repo, reviewers))
933 if blockers:
998 if blockers:
934 phids.extend(
999 phids.extend(
935 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1000 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
936 )
1001 )
937 if phids:
1002 if phids:
938 actions.append({b'type': b'reviewers.add', b'value': phids})
1003 actions.append({b'type': b'reviewers.add', b'value': phids})
939
1004
940 drevids = [] # [int]
1005 drevids = [] # [int]
941 diffmap = {} # {newnode: diff}
1006 diffmap = {} # {newnode: diff}
942
1007
943 # Send patches one by one so we know their Differential Revision PHIDs and
1008 # Send patches one by one so we know their Differential Revision PHIDs and
944 # can provide dependency relationship
1009 # can provide dependency relationship
945 lastrevphid = None
1010 lastrevphid = None
946 for rev in revs:
1011 for rev in revs:
947 ui.debug(b'sending rev %d\n' % rev)
1012 ui.debug(b'sending rev %d\n' % rev)
948 ctx = repo[rev]
1013 ctx = repo[rev]
949
1014
950 # Get Differential Revision ID
1015 # Get Differential Revision ID
951 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1016 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
952 if oldnode != ctx.node() or opts.get(b'amend'):
1017 if oldnode != ctx.node() or opts.get(b'amend'):
953 # Create or update Differential Revision
1018 # Create or update Differential Revision
954 revision, diff = createdifferentialrevision(
1019 revision, diff = createdifferentialrevision(
955 ctx,
1020 ctx,
956 revid,
1021 revid,
957 lastrevphid,
1022 lastrevphid,
958 oldnode,
1023 oldnode,
959 olddiff,
1024 olddiff,
960 actions,
1025 actions,
961 opts.get(b'comment'),
1026 opts.get(b'comment'),
962 )
1027 )
963 diffmap[ctx.node()] = diff
1028 diffmap[ctx.node()] = diff
964 newrevid = int(revision[b'object'][b'id'])
1029 newrevid = int(revision[b'object'][b'id'])
965 newrevphid = revision[b'object'][b'phid']
1030 newrevphid = revision[b'object'][b'phid']
966 if revid:
1031 if revid:
967 action = b'updated'
1032 action = b'updated'
968 else:
1033 else:
969 action = b'created'
1034 action = b'created'
970
1035
971 # Create a local tag to note the association, if commit message
1036 # Create a local tag to note the association, if commit message
972 # does not have it already
1037 # does not have it already
973 m = _differentialrevisiondescre.search(ctx.description())
1038 m = _differentialrevisiondescre.search(ctx.description())
974 if not m or int(m.group(r'id')) != newrevid:
1039 if not m or int(m.group(r'id')) != newrevid:
975 tagname = b'D%d' % newrevid
1040 tagname = b'D%d' % newrevid
976 tags.tag(
1041 tags.tag(
977 repo,
1042 repo,
978 tagname,
1043 tagname,
979 ctx.node(),
1044 ctx.node(),
980 message=None,
1045 message=None,
981 user=None,
1046 user=None,
982 date=None,
1047 date=None,
983 local=True,
1048 local=True,
984 )
1049 )
985 else:
1050 else:
986 # Nothing changed. But still set "newrevphid" so the next revision
1051 # Nothing changed. But still set "newrevphid" so the next revision
987 # could depend on this one and "newrevid" for the summary line.
1052 # could depend on this one and "newrevid" for the summary line.
988 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1053 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
989 newrevid = revid
1054 newrevid = revid
990 action = b'skipped'
1055 action = b'skipped'
991
1056
992 actiondesc = ui.label(
1057 actiondesc = ui.label(
993 {
1058 {
994 b'created': _(b'created'),
1059 b'created': _(b'created'),
995 b'skipped': _(b'skipped'),
1060 b'skipped': _(b'skipped'),
996 b'updated': _(b'updated'),
1061 b'updated': _(b'updated'),
997 }[action],
1062 }[action],
998 b'phabricator.action.%s' % action,
1063 b'phabricator.action.%s' % action,
999 )
1064 )
1000 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1065 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1001 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1066 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1002 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1067 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1003 ui.write(
1068 ui.write(
1004 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1069 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1005 )
1070 )
1006 drevids.append(newrevid)
1071 drevids.append(newrevid)
1007 lastrevphid = newrevphid
1072 lastrevphid = newrevphid
1008
1073
1009 # Update commit messages and remove tags
1074 # Update commit messages and remove tags
1010 if opts.get(b'amend'):
1075 if opts.get(b'amend'):
1011 unfi = repo.unfiltered()
1076 unfi = repo.unfiltered()
1012 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1077 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1013 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1078 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1014 wnode = unfi[b'.'].node()
1079 wnode = unfi[b'.'].node()
1015 mapping = {} # {oldnode: [newnode]}
1080 mapping = {} # {oldnode: [newnode]}
1016 for i, rev in enumerate(revs):
1081 for i, rev in enumerate(revs):
1017 old = unfi[rev]
1082 old = unfi[rev]
1018 drevid = drevids[i]
1083 drevid = drevids[i]
1019 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1084 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1020 newdesc = getdescfromdrev(drev)
1085 newdesc = getdescfromdrev(drev)
1021 # Make sure commit message contain "Differential Revision"
1086 # Make sure commit message contain "Differential Revision"
1022 if old.description() != newdesc:
1087 if old.description() != newdesc:
1023 if old.phase() == phases.public:
1088 if old.phase() == phases.public:
1024 ui.warn(
1089 ui.warn(
1025 _(b"warning: not updating public commit %s\n")
1090 _(b"warning: not updating public commit %s\n")
1026 % scmutil.formatchangeid(old)
1091 % scmutil.formatchangeid(old)
1027 )
1092 )
1028 continue
1093 continue
1029 parents = [
1094 parents = [
1030 mapping.get(old.p1().node(), (old.p1(),))[0],
1095 mapping.get(old.p1().node(), (old.p1(),))[0],
1031 mapping.get(old.p2().node(), (old.p2(),))[0],
1096 mapping.get(old.p2().node(), (old.p2(),))[0],
1032 ]
1097 ]
1033 new = context.metadataonlyctx(
1098 new = context.metadataonlyctx(
1034 repo,
1099 repo,
1035 old,
1100 old,
1036 parents=parents,
1101 parents=parents,
1037 text=newdesc,
1102 text=newdesc,
1038 user=old.user(),
1103 user=old.user(),
1039 date=old.date(),
1104 date=old.date(),
1040 extra=old.extra(),
1105 extra=old.extra(),
1041 )
1106 )
1042
1107
1043 newnode = new.commit()
1108 newnode = new.commit()
1044
1109
1045 mapping[old.node()] = [newnode]
1110 mapping[old.node()] = [newnode]
1046 # Update diff property
1111 # Update diff property
1047 # If it fails just warn and keep going, otherwise the DREV
1112 # If it fails just warn and keep going, otherwise the DREV
1048 # associations will be lost
1113 # associations will be lost
1049 try:
1114 try:
1050 writediffproperties(unfi[newnode], diffmap[old.node()])
1115 writediffproperties(unfi[newnode], diffmap[old.node()])
1051 except util.urlerr.urlerror:
1116 except util.urlerr.urlerror:
1052 ui.warnnoi18n(
1117 ui.warnnoi18n(
1053 b'Failed to update metadata for D%s\n' % drevid
1118 b'Failed to update metadata for D%s\n' % drevid
1054 )
1119 )
1055 # Remove local tags since it's no longer necessary
1120 # Remove local tags since it's no longer necessary
1056 tagname = b'D%d' % drevid
1121 tagname = b'D%d' % drevid
1057 if tagname in repo.tags():
1122 if tagname in repo.tags():
1058 tags.tag(
1123 tags.tag(
1059 repo,
1124 repo,
1060 tagname,
1125 tagname,
1061 nullid,
1126 nullid,
1062 message=None,
1127 message=None,
1063 user=None,
1128 user=None,
1064 date=None,
1129 date=None,
1065 local=True,
1130 local=True,
1066 )
1131 )
1067 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1132 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1068 if wnode in mapping:
1133 if wnode in mapping:
1069 unfi.setparents(mapping[wnode][0])
1134 unfi.setparents(mapping[wnode][0])
1070
1135
1071
1136
1072 # Map from "hg:meta" keys to header understood by "hg import". The order is
1137 # Map from "hg:meta" keys to header understood by "hg import". The order is
1073 # consistent with "hg export" output.
1138 # consistent with "hg export" output.
1074 _metanamemap = util.sortdict(
1139 _metanamemap = util.sortdict(
1075 [
1140 [
1076 (b'user', b'User'),
1141 (b'user', b'User'),
1077 (b'date', b'Date'),
1142 (b'date', b'Date'),
1078 (b'branch', b'Branch'),
1143 (b'branch', b'Branch'),
1079 (b'node', b'Node ID'),
1144 (b'node', b'Node ID'),
1080 (b'parent', b'Parent '),
1145 (b'parent', b'Parent '),
1081 ]
1146 ]
1082 )
1147 )
1083
1148
1084
1149
1085 def _confirmbeforesend(repo, revs, oldmap):
1150 def _confirmbeforesend(repo, revs, oldmap):
1086 url, token = readurltoken(repo.ui)
1151 url, token = readurltoken(repo.ui)
1087 ui = repo.ui
1152 ui = repo.ui
1088 for rev in revs:
1153 for rev in revs:
1089 ctx = repo[rev]
1154 ctx = repo[rev]
1090 desc = ctx.description().splitlines()[0]
1155 desc = ctx.description().splitlines()[0]
1091 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1156 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1092 if drevid:
1157 if drevid:
1093 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1158 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1094 else:
1159 else:
1095 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1160 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1096
1161
1097 ui.write(
1162 ui.write(
1098 _(b'%s - %s: %s\n')
1163 _(b'%s - %s: %s\n')
1099 % (
1164 % (
1100 drevdesc,
1165 drevdesc,
1101 ui.label(bytes(ctx), b'phabricator.node'),
1166 ui.label(bytes(ctx), b'phabricator.node'),
1102 ui.label(desc, b'phabricator.desc'),
1167 ui.label(desc, b'phabricator.desc'),
1103 )
1168 )
1104 )
1169 )
1105
1170
1106 if ui.promptchoice(
1171 if ui.promptchoice(
1107 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1172 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1108 ):
1173 ):
1109 return False
1174 return False
1110
1175
1111 return True
1176 return True
1112
1177
1113
1178
1114 _knownstatusnames = {
1179 _knownstatusnames = {
1115 b'accepted',
1180 b'accepted',
1116 b'needsreview',
1181 b'needsreview',
1117 b'needsrevision',
1182 b'needsrevision',
1118 b'closed',
1183 b'closed',
1119 b'abandoned',
1184 b'abandoned',
1120 }
1185 }
1121
1186
1122
1187
1123 def _getstatusname(drev):
1188 def _getstatusname(drev):
1124 """get normalized status name from a Differential Revision"""
1189 """get normalized status name from a Differential Revision"""
1125 return drev[b'statusName'].replace(b' ', b'').lower()
1190 return drev[b'statusName'].replace(b' ', b'').lower()
1126
1191
1127
1192
1128 # Small language to specify differential revisions. Support symbols: (), :X,
1193 # Small language to specify differential revisions. Support symbols: (), :X,
1129 # +, and -.
1194 # +, and -.
1130
1195
1131 _elements = {
1196 _elements = {
1132 # token-type: binding-strength, primary, prefix, infix, suffix
1197 # token-type: binding-strength, primary, prefix, infix, suffix
1133 b'(': (12, None, (b'group', 1, b')'), None, None),
1198 b'(': (12, None, (b'group', 1, b')'), None, None),
1134 b':': (8, None, (b'ancestors', 8), None, None),
1199 b':': (8, None, (b'ancestors', 8), None, None),
1135 b'&': (5, None, None, (b'and_', 5), None),
1200 b'&': (5, None, None, (b'and_', 5), None),
1136 b'+': (4, None, None, (b'add', 4), None),
1201 b'+': (4, None, None, (b'add', 4), None),
1137 b'-': (4, None, None, (b'sub', 4), None),
1202 b'-': (4, None, None, (b'sub', 4), None),
1138 b')': (0, None, None, None, None),
1203 b')': (0, None, None, None, None),
1139 b'symbol': (0, b'symbol', None, None, None),
1204 b'symbol': (0, b'symbol', None, None, None),
1140 b'end': (0, None, None, None, None),
1205 b'end': (0, None, None, None, None),
1141 }
1206 }
1142
1207
1143
1208
1144 def _tokenize(text):
1209 def _tokenize(text):
1145 view = memoryview(text) # zero-copy slice
1210 view = memoryview(text) # zero-copy slice
1146 special = b'():+-& '
1211 special = b'():+-& '
1147 pos = 0
1212 pos = 0
1148 length = len(text)
1213 length = len(text)
1149 while pos < length:
1214 while pos < length:
1150 symbol = b''.join(
1215 symbol = b''.join(
1151 itertools.takewhile(
1216 itertools.takewhile(
1152 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1217 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1153 )
1218 )
1154 )
1219 )
1155 if symbol:
1220 if symbol:
1156 yield (b'symbol', symbol, pos)
1221 yield (b'symbol', symbol, pos)
1157 pos += len(symbol)
1222 pos += len(symbol)
1158 else: # special char, ignore space
1223 else: # special char, ignore space
1159 if text[pos] != b' ':
1224 if text[pos] != b' ':
1160 yield (text[pos], None, pos)
1225 yield (text[pos], None, pos)
1161 pos += 1
1226 pos += 1
1162 yield (b'end', None, pos)
1227 yield (b'end', None, pos)
1163
1228
1164
1229
1165 def _parse(text):
1230 def _parse(text):
1166 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1231 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1167 if pos != len(text):
1232 if pos != len(text):
1168 raise error.ParseError(b'invalid token', pos)
1233 raise error.ParseError(b'invalid token', pos)
1169 return tree
1234 return tree
1170
1235
1171
1236
1172 def _parsedrev(symbol):
1237 def _parsedrev(symbol):
1173 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1238 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1174 if symbol.startswith(b'D') and symbol[1:].isdigit():
1239 if symbol.startswith(b'D') and symbol[1:].isdigit():
1175 return int(symbol[1:])
1240 return int(symbol[1:])
1176 if symbol.isdigit():
1241 if symbol.isdigit():
1177 return int(symbol)
1242 return int(symbol)
1178
1243
1179
1244
1180 def _prefetchdrevs(tree):
1245 def _prefetchdrevs(tree):
1181 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1246 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1182 drevs = set()
1247 drevs = set()
1183 ancestordrevs = set()
1248 ancestordrevs = set()
1184 op = tree[0]
1249 op = tree[0]
1185 if op == b'symbol':
1250 if op == b'symbol':
1186 r = _parsedrev(tree[1])
1251 r = _parsedrev(tree[1])
1187 if r:
1252 if r:
1188 drevs.add(r)
1253 drevs.add(r)
1189 elif op == b'ancestors':
1254 elif op == b'ancestors':
1190 r, a = _prefetchdrevs(tree[1])
1255 r, a = _prefetchdrevs(tree[1])
1191 drevs.update(r)
1256 drevs.update(r)
1192 ancestordrevs.update(r)
1257 ancestordrevs.update(r)
1193 ancestordrevs.update(a)
1258 ancestordrevs.update(a)
1194 else:
1259 else:
1195 for t in tree[1:]:
1260 for t in tree[1:]:
1196 r, a = _prefetchdrevs(t)
1261 r, a = _prefetchdrevs(t)
1197 drevs.update(r)
1262 drevs.update(r)
1198 ancestordrevs.update(a)
1263 ancestordrevs.update(a)
1199 return drevs, ancestordrevs
1264 return drevs, ancestordrevs
1200
1265
1201
1266
1202 def querydrev(repo, spec):
1267 def querydrev(repo, spec):
1203 """return a list of "Differential Revision" dicts
1268 """return a list of "Differential Revision" dicts
1204
1269
1205 spec is a string using a simple query language, see docstring in phabread
1270 spec is a string using a simple query language, see docstring in phabread
1206 for details.
1271 for details.
1207
1272
1208 A "Differential Revision dict" looks like:
1273 A "Differential Revision dict" looks like:
1209
1274
1210 {
1275 {
1211 "id": "2",
1276 "id": "2",
1212 "phid": "PHID-DREV-672qvysjcczopag46qty",
1277 "phid": "PHID-DREV-672qvysjcczopag46qty",
1213 "title": "example",
1278 "title": "example",
1214 "uri": "https://phab.example.com/D2",
1279 "uri": "https://phab.example.com/D2",
1215 "dateCreated": "1499181406",
1280 "dateCreated": "1499181406",
1216 "dateModified": "1499182103",
1281 "dateModified": "1499182103",
1217 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1282 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1218 "status": "0",
1283 "status": "0",
1219 "statusName": "Needs Review",
1284 "statusName": "Needs Review",
1220 "properties": [],
1285 "properties": [],
1221 "branch": null,
1286 "branch": null,
1222 "summary": "",
1287 "summary": "",
1223 "testPlan": "",
1288 "testPlan": "",
1224 "lineCount": "2",
1289 "lineCount": "2",
1225 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1290 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1226 "diffs": [
1291 "diffs": [
1227 "3",
1292 "3",
1228 "4",
1293 "4",
1229 ],
1294 ],
1230 "commits": [],
1295 "commits": [],
1231 "reviewers": [],
1296 "reviewers": [],
1232 "ccs": [],
1297 "ccs": [],
1233 "hashes": [],
1298 "hashes": [],
1234 "auxiliary": {
1299 "auxiliary": {
1235 "phabricator:projects": [],
1300 "phabricator:projects": [],
1236 "phabricator:depends-on": [
1301 "phabricator:depends-on": [
1237 "PHID-DREV-gbapp366kutjebt7agcd"
1302 "PHID-DREV-gbapp366kutjebt7agcd"
1238 ]
1303 ]
1239 },
1304 },
1240 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1305 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1241 "sourcePath": null
1306 "sourcePath": null
1242 }
1307 }
1243 """
1308 """
1244
1309
1245 def fetch(params):
1310 def fetch(params):
1246 """params -> single drev or None"""
1311 """params -> single drev or None"""
1247 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1312 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1248 if key in prefetched:
1313 if key in prefetched:
1249 return prefetched[key]
1314 return prefetched[key]
1250 drevs = callconduit(repo.ui, b'differential.query', params)
1315 drevs = callconduit(repo.ui, b'differential.query', params)
1251 # Fill prefetched with the result
1316 # Fill prefetched with the result
1252 for drev in drevs:
1317 for drev in drevs:
1253 prefetched[drev[b'phid']] = drev
1318 prefetched[drev[b'phid']] = drev
1254 prefetched[int(drev[b'id'])] = drev
1319 prefetched[int(drev[b'id'])] = drev
1255 if key not in prefetched:
1320 if key not in prefetched:
1256 raise error.Abort(
1321 raise error.Abort(
1257 _(b'cannot get Differential Revision %r') % params
1322 _(b'cannot get Differential Revision %r') % params
1258 )
1323 )
1259 return prefetched[key]
1324 return prefetched[key]
1260
1325
1261 def getstack(topdrevids):
1326 def getstack(topdrevids):
1262 """given a top, get a stack from the bottom, [id] -> [id]"""
1327 """given a top, get a stack from the bottom, [id] -> [id]"""
1263 visited = set()
1328 visited = set()
1264 result = []
1329 result = []
1265 queue = [{b'ids': [i]} for i in topdrevids]
1330 queue = [{b'ids': [i]} for i in topdrevids]
1266 while queue:
1331 while queue:
1267 params = queue.pop()
1332 params = queue.pop()
1268 drev = fetch(params)
1333 drev = fetch(params)
1269 if drev[b'id'] in visited:
1334 if drev[b'id'] in visited:
1270 continue
1335 continue
1271 visited.add(drev[b'id'])
1336 visited.add(drev[b'id'])
1272 result.append(int(drev[b'id']))
1337 result.append(int(drev[b'id']))
1273 auxiliary = drev.get(b'auxiliary', {})
1338 auxiliary = drev.get(b'auxiliary', {})
1274 depends = auxiliary.get(b'phabricator:depends-on', [])
1339 depends = auxiliary.get(b'phabricator:depends-on', [])
1275 for phid in depends:
1340 for phid in depends:
1276 queue.append({b'phids': [phid]})
1341 queue.append({b'phids': [phid]})
1277 result.reverse()
1342 result.reverse()
1278 return smartset.baseset(result)
1343 return smartset.baseset(result)
1279
1344
1280 # Initialize prefetch cache
1345 # Initialize prefetch cache
1281 prefetched = {} # {id or phid: drev}
1346 prefetched = {} # {id or phid: drev}
1282
1347
1283 tree = _parse(spec)
1348 tree = _parse(spec)
1284 drevs, ancestordrevs = _prefetchdrevs(tree)
1349 drevs, ancestordrevs = _prefetchdrevs(tree)
1285
1350
1286 # developer config: phabricator.batchsize
1351 # developer config: phabricator.batchsize
1287 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1352 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1288
1353
1289 # Prefetch Differential Revisions in batch
1354 # Prefetch Differential Revisions in batch
1290 tofetch = set(drevs)
1355 tofetch = set(drevs)
1291 for r in ancestordrevs:
1356 for r in ancestordrevs:
1292 tofetch.update(range(max(1, r - batchsize), r + 1))
1357 tofetch.update(range(max(1, r - batchsize), r + 1))
1293 if drevs:
1358 if drevs:
1294 fetch({b'ids': list(tofetch)})
1359 fetch({b'ids': list(tofetch)})
1295 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1360 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1296
1361
1297 # Walk through the tree, return smartsets
1362 # Walk through the tree, return smartsets
1298 def walk(tree):
1363 def walk(tree):
1299 op = tree[0]
1364 op = tree[0]
1300 if op == b'symbol':
1365 if op == b'symbol':
1301 drev = _parsedrev(tree[1])
1366 drev = _parsedrev(tree[1])
1302 if drev:
1367 if drev:
1303 return smartset.baseset([drev])
1368 return smartset.baseset([drev])
1304 elif tree[1] in _knownstatusnames:
1369 elif tree[1] in _knownstatusnames:
1305 drevs = [
1370 drevs = [
1306 r
1371 r
1307 for r in validids
1372 for r in validids
1308 if _getstatusname(prefetched[r]) == tree[1]
1373 if _getstatusname(prefetched[r]) == tree[1]
1309 ]
1374 ]
1310 return smartset.baseset(drevs)
1375 return smartset.baseset(drevs)
1311 else:
1376 else:
1312 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1377 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1313 elif op in {b'and_', b'add', b'sub'}:
1378 elif op in {b'and_', b'add', b'sub'}:
1314 assert len(tree) == 3
1379 assert len(tree) == 3
1315 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1380 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1316 elif op == b'group':
1381 elif op == b'group':
1317 return walk(tree[1])
1382 return walk(tree[1])
1318 elif op == b'ancestors':
1383 elif op == b'ancestors':
1319 return getstack(walk(tree[1]))
1384 return getstack(walk(tree[1]))
1320 else:
1385 else:
1321 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1386 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1322
1387
1323 return [prefetched[r] for r in walk(tree)]
1388 return [prefetched[r] for r in walk(tree)]
1324
1389
1325
1390
1326 def getdescfromdrev(drev):
1391 def getdescfromdrev(drev):
1327 """get description (commit message) from "Differential Revision"
1392 """get description (commit message) from "Differential Revision"
1328
1393
1329 This is similar to differential.getcommitmessage API. But we only care
1394 This is similar to differential.getcommitmessage API. But we only care
1330 about limited fields: title, summary, test plan, and URL.
1395 about limited fields: title, summary, test plan, and URL.
1331 """
1396 """
1332 title = drev[b'title']
1397 title = drev[b'title']
1333 summary = drev[b'summary'].rstrip()
1398 summary = drev[b'summary'].rstrip()
1334 testplan = drev[b'testPlan'].rstrip()
1399 testplan = drev[b'testPlan'].rstrip()
1335 if testplan:
1400 if testplan:
1336 testplan = b'Test Plan:\n%s' % testplan
1401 testplan = b'Test Plan:\n%s' % testplan
1337 uri = b'Differential Revision: %s' % drev[b'uri']
1402 uri = b'Differential Revision: %s' % drev[b'uri']
1338 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1403 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1339
1404
1340
1405
1341 def getdiffmeta(diff):
1406 def getdiffmeta(diff):
1342 """get commit metadata (date, node, user, p1) from a diff object
1407 """get commit metadata (date, node, user, p1) from a diff object
1343
1408
1344 The metadata could be "hg:meta", sent by phabsend, like:
1409 The metadata could be "hg:meta", sent by phabsend, like:
1345
1410
1346 "properties": {
1411 "properties": {
1347 "hg:meta": {
1412 "hg:meta": {
1348 "date": "1499571514 25200",
1413 "date": "1499571514 25200",
1349 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1414 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1350 "user": "Foo Bar <foo@example.com>",
1415 "user": "Foo Bar <foo@example.com>",
1351 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1416 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1352 }
1417 }
1353 }
1418 }
1354
1419
1355 Or converted from "local:commits", sent by "arc", like:
1420 Or converted from "local:commits", sent by "arc", like:
1356
1421
1357 "properties": {
1422 "properties": {
1358 "local:commits": {
1423 "local:commits": {
1359 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1424 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1360 "author": "Foo Bar",
1425 "author": "Foo Bar",
1361 "time": 1499546314,
1426 "time": 1499546314,
1362 "branch": "default",
1427 "branch": "default",
1363 "tag": "",
1428 "tag": "",
1364 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1429 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1365 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1430 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1366 "local": "1000",
1431 "local": "1000",
1367 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1432 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1368 "summary": "...",
1433 "summary": "...",
1369 "message": "...",
1434 "message": "...",
1370 "authorEmail": "foo@example.com"
1435 "authorEmail": "foo@example.com"
1371 }
1436 }
1372 }
1437 }
1373 }
1438 }
1374
1439
1375 Note: metadata extracted from "local:commits" will lose time zone
1440 Note: metadata extracted from "local:commits" will lose time zone
1376 information.
1441 information.
1377 """
1442 """
1378 props = diff.get(b'properties') or {}
1443 props = diff.get(b'properties') or {}
1379 meta = props.get(b'hg:meta')
1444 meta = props.get(b'hg:meta')
1380 if not meta:
1445 if not meta:
1381 if props.get(b'local:commits'):
1446 if props.get(b'local:commits'):
1382 commit = sorted(props[b'local:commits'].values())[0]
1447 commit = sorted(props[b'local:commits'].values())[0]
1383 meta = {}
1448 meta = {}
1384 if b'author' in commit and b'authorEmail' in commit:
1449 if b'author' in commit and b'authorEmail' in commit:
1385 meta[b'user'] = b'%s <%s>' % (
1450 meta[b'user'] = b'%s <%s>' % (
1386 commit[b'author'],
1451 commit[b'author'],
1387 commit[b'authorEmail'],
1452 commit[b'authorEmail'],
1388 )
1453 )
1389 if b'time' in commit:
1454 if b'time' in commit:
1390 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1455 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1391 if b'branch' in commit:
1456 if b'branch' in commit:
1392 meta[b'branch'] = commit[b'branch']
1457 meta[b'branch'] = commit[b'branch']
1393 node = commit.get(b'commit', commit.get(b'rev'))
1458 node = commit.get(b'commit', commit.get(b'rev'))
1394 if node:
1459 if node:
1395 meta[b'node'] = node
1460 meta[b'node'] = node
1396 if len(commit.get(b'parents', ())) >= 1:
1461 if len(commit.get(b'parents', ())) >= 1:
1397 meta[b'parent'] = commit[b'parents'][0]
1462 meta[b'parent'] = commit[b'parents'][0]
1398 else:
1463 else:
1399 meta = {}
1464 meta = {}
1400 if b'date' not in meta and b'dateCreated' in diff:
1465 if b'date' not in meta and b'dateCreated' in diff:
1401 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1466 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1402 if b'branch' not in meta and diff.get(b'branch'):
1467 if b'branch' not in meta and diff.get(b'branch'):
1403 meta[b'branch'] = diff[b'branch']
1468 meta[b'branch'] = diff[b'branch']
1404 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1469 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1405 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1470 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1406 return meta
1471 return meta
1407
1472
1408
1473
1409 def readpatch(repo, drevs, write):
1474 def readpatch(repo, drevs, write):
1410 """generate plain-text patch readable by 'hg import'
1475 """generate plain-text patch readable by 'hg import'
1411
1476
1412 write is usually ui.write. drevs is what "querydrev" returns, results of
1477 write is usually ui.write. drevs is what "querydrev" returns, results of
1413 "differential.query".
1478 "differential.query".
1414 """
1479 """
1415 # Prefetch hg:meta property for all diffs
1480 # Prefetch hg:meta property for all diffs
1416 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1481 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1417 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1482 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1418
1483
1419 # Generate patch for each drev
1484 # Generate patch for each drev
1420 for drev in drevs:
1485 for drev in drevs:
1421 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1486 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1422
1487
1423 diffid = max(int(v) for v in drev[b'diffs'])
1488 diffid = max(int(v) for v in drev[b'diffs'])
1424 body = callconduit(
1489 body = callconduit(
1425 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1490 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1426 )
1491 )
1427 desc = getdescfromdrev(drev)
1492 desc = getdescfromdrev(drev)
1428 header = b'# HG changeset patch\n'
1493 header = b'# HG changeset patch\n'
1429
1494
1430 # Try to preserve metadata from hg:meta property. Write hg patch
1495 # Try to preserve metadata from hg:meta property. Write hg patch
1431 # headers that can be read by the "import" command. See patchheadermap
1496 # headers that can be read by the "import" command. See patchheadermap
1432 # and extract in mercurial/patch.py for supported headers.
1497 # and extract in mercurial/patch.py for supported headers.
1433 meta = getdiffmeta(diffs[b'%d' % diffid])
1498 meta = getdiffmeta(diffs[b'%d' % diffid])
1434 for k in _metanamemap.keys():
1499 for k in _metanamemap.keys():
1435 if k in meta:
1500 if k in meta:
1436 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1501 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1437
1502
1438 content = b'%s%s\n%s' % (header, desc, body)
1503 content = b'%s%s\n%s' % (header, desc, body)
1439 write(content)
1504 write(content)
1440
1505
1441
1506
1442 @vcrcommand(
1507 @vcrcommand(
1443 b'phabread',
1508 b'phabread',
1444 [(b'', b'stack', False, _(b'read dependencies'))],
1509 [(b'', b'stack', False, _(b'read dependencies'))],
1445 _(b'DREVSPEC [OPTIONS]'),
1510 _(b'DREVSPEC [OPTIONS]'),
1446 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1511 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1447 )
1512 )
1448 def phabread(ui, repo, spec, **opts):
1513 def phabread(ui, repo, spec, **opts):
1449 """print patches from Phabricator suitable for importing
1514 """print patches from Phabricator suitable for importing
1450
1515
1451 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1516 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1452 the number ``123``. It could also have common operators like ``+``, ``-``,
1517 the number ``123``. It could also have common operators like ``+``, ``-``,
1453 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1518 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1454 select a stack.
1519 select a stack.
1455
1520
1456 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1521 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1457 could be used to filter patches by status. For performance reason, they
1522 could be used to filter patches by status. For performance reason, they
1458 only represent a subset of non-status selections and cannot be used alone.
1523 only represent a subset of non-status selections and cannot be used alone.
1459
1524
1460 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1525 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1461 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1526 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1462 stack up to D9.
1527 stack up to D9.
1463
1528
1464 If --stack is given, follow dependencies information and read all patches.
1529 If --stack is given, follow dependencies information and read all patches.
1465 It is equivalent to the ``:`` operator.
1530 It is equivalent to the ``:`` operator.
1466 """
1531 """
1467 opts = pycompat.byteskwargs(opts)
1532 opts = pycompat.byteskwargs(opts)
1468 if opts.get(b'stack'):
1533 if opts.get(b'stack'):
1469 spec = b':(%s)' % spec
1534 spec = b':(%s)' % spec
1470 drevs = querydrev(repo, spec)
1535 drevs = querydrev(repo, spec)
1471 readpatch(repo, drevs, ui.write)
1536 readpatch(repo, drevs, ui.write)
1472
1537
1473
1538
1474 @vcrcommand(
1539 @vcrcommand(
1475 b'phabupdate',
1540 b'phabupdate',
1476 [
1541 [
1477 (b'', b'accept', False, _(b'accept revisions')),
1542 (b'', b'accept', False, _(b'accept revisions')),
1478 (b'', b'reject', False, _(b'reject revisions')),
1543 (b'', b'reject', False, _(b'reject revisions')),
1479 (b'', b'abandon', False, _(b'abandon revisions')),
1544 (b'', b'abandon', False, _(b'abandon revisions')),
1480 (b'', b'reclaim', False, _(b'reclaim revisions')),
1545 (b'', b'reclaim', False, _(b'reclaim revisions')),
1481 (b'm', b'comment', b'', _(b'comment on the last revision')),
1546 (b'm', b'comment', b'', _(b'comment on the last revision')),
1482 ],
1547 ],
1483 _(b'DREVSPEC [OPTIONS]'),
1548 _(b'DREVSPEC [OPTIONS]'),
1484 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1549 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1485 )
1550 )
1486 def phabupdate(ui, repo, spec, **opts):
1551 def phabupdate(ui, repo, spec, **opts):
1487 """update Differential Revision in batch
1552 """update Differential Revision in batch
1488
1553
1489 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1554 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1490 """
1555 """
1491 opts = pycompat.byteskwargs(opts)
1556 opts = pycompat.byteskwargs(opts)
1492 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1557 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1493 if len(flags) > 1:
1558 if len(flags) > 1:
1494 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1559 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1495
1560
1496 actions = []
1561 actions = []
1497 for f in flags:
1562 for f in flags:
1498 actions.append({b'type': f, b'value': b'true'})
1563 actions.append({b'type': f, b'value': b'true'})
1499
1564
1500 drevs = querydrev(repo, spec)
1565 drevs = querydrev(repo, spec)
1501 for i, drev in enumerate(drevs):
1566 for i, drev in enumerate(drevs):
1502 if i + 1 == len(drevs) and opts.get(b'comment'):
1567 if i + 1 == len(drevs) and opts.get(b'comment'):
1503 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1568 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1504 if actions:
1569 if actions:
1505 params = {
1570 params = {
1506 b'objectIdentifier': drev[b'phid'],
1571 b'objectIdentifier': drev[b'phid'],
1507 b'transactions': actions,
1572 b'transactions': actions,
1508 }
1573 }
1509 callconduit(ui, b'differential.revision.edit', params)
1574 callconduit(ui, b'differential.revision.edit', params)
1510
1575
1511
1576
1512 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1577 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1513 def template_review(context, mapping):
1578 def template_review(context, mapping):
1514 """:phabreview: Object describing the review for this changeset.
1579 """:phabreview: Object describing the review for this changeset.
1515 Has attributes `url` and `id`.
1580 Has attributes `url` and `id`.
1516 """
1581 """
1517 ctx = context.resource(mapping, b'ctx')
1582 ctx = context.resource(mapping, b'ctx')
1518 m = _differentialrevisiondescre.search(ctx.description())
1583 m = _differentialrevisiondescre.search(ctx.description())
1519 if m:
1584 if m:
1520 return templateutil.hybriddict(
1585 return templateutil.hybriddict(
1521 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1586 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1522 )
1587 )
1523 else:
1588 else:
1524 tags = ctx.repo().nodetags(ctx.node())
1589 tags = ctx.repo().nodetags(ctx.node())
1525 for t in tags:
1590 for t in tags:
1526 if _differentialrevisiontagre.match(t):
1591 if _differentialrevisiontagre.match(t):
1527 url = ctx.repo().ui.config(b'phabricator', b'url')
1592 url = ctx.repo().ui.config(b'phabricator', b'url')
1528 if not url.endswith(b'/'):
1593 if not url.endswith(b'/'):
1529 url += b'/'
1594 url += b'/'
1530 url += t
1595 url += t
1531
1596
1532 return templateutil.hybriddict({b'url': url, b'id': t,})
1597 return templateutil.hybriddict({b'url': url, b'id': t,})
1533 return None
1598 return None
General Comments 0
You need to be logged in to leave comments. Login now