##// END OF EJS Templates
phabricator: treat non-utf-8 text files as binary as phabricator requires...
Ian Moody -
r43557:06a33a50 default
parent child Browse files
Show More
@@ -1,1622 +1,1639 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = r1.body.split(b'&')
147 r1params = r1.body.split(b'&')
148 r2params = r2.body.split(b'&')
148 r2params = r2.body.split(b'&')
149 return set(r1params) == set(r2params)
149 return set(r1params) == set(r2params)
150
150
151 def sanitiserequest(request):
151 def sanitiserequest(request):
152 request.body = re.sub(
152 request.body = re.sub(
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 )
154 )
155 return request
155 return request
156
156
157 def sanitiseresponse(response):
157 def sanitiseresponse(response):
158 if r'set-cookie' in response[r'headers']:
158 if r'set-cookie' in response[r'headers']:
159 del response[r'headers'][r'set-cookie']
159 del response[r'headers'][r'set-cookie']
160 return response
160 return response
161
161
162 def decorate(fn):
162 def decorate(fn):
163 def inner(*args, **kwargs):
163 def inner(*args, **kwargs):
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 if cassette:
165 if cassette:
166 import hgdemandimport
166 import hgdemandimport
167
167
168 with hgdemandimport.deactivated():
168 with hgdemandimport.deactivated():
169 import vcr as vcrmod
169 import vcr as vcrmod
170 import vcr.stubs as stubs
170 import vcr.stubs as stubs
171
171
172 vcr = vcrmod.VCR(
172 vcr = vcrmod.VCR(
173 serializer=r'json',
173 serializer=r'json',
174 before_record_request=sanitiserequest,
174 before_record_request=sanitiserequest,
175 before_record_response=sanitiseresponse,
175 before_record_response=sanitiseresponse,
176 custom_patches=[
176 custom_patches=[
177 (
177 (
178 urlmod,
178 urlmod,
179 r'httpconnection',
179 r'httpconnection',
180 stubs.VCRHTTPConnection,
180 stubs.VCRHTTPConnection,
181 ),
181 ),
182 (
182 (
183 urlmod,
183 urlmod,
184 r'httpsconnection',
184 r'httpsconnection',
185 stubs.VCRHTTPSConnection,
185 stubs.VCRHTTPSConnection,
186 ),
186 ),
187 ],
187 ],
188 )
188 )
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
193
193
194 inner.__name__ = fn.__name__
194 inner.__name__ = fn.__name__
195 inner.__doc__ = fn.__doc__
195 inner.__doc__ = fn.__doc__
196 return command(
196 return command(
197 name,
197 name,
198 fullflags,
198 fullflags,
199 spec,
199 spec,
200 helpcategory=helpcategory,
200 helpcategory=helpcategory,
201 optionalrepo=optionalrepo,
201 optionalrepo=optionalrepo,
202 )(inner)
202 )(inner)
203
203
204 return decorate
204 return decorate
205
205
206
206
207 def urlencodenested(params):
207 def urlencodenested(params):
208 """like urlencode, but works with nested parameters.
208 """like urlencode, but works with nested parameters.
209
209
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 """
213 """
214 flatparams = util.sortdict()
214 flatparams = util.sortdict()
215
215
216 def process(prefix, obj):
216 def process(prefix, obj):
217 if isinstance(obj, bool):
217 if isinstance(obj, bool):
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 if items is None:
221 if items is None:
222 flatparams[prefix] = obj
222 flatparams[prefix] = obj
223 else:
223 else:
224 for k, v in items(obj):
224 for k, v in items(obj):
225 if prefix:
225 if prefix:
226 process(b'%s[%s]' % (prefix, k), v)
226 process(b'%s[%s]' % (prefix, k), v)
227 else:
227 else:
228 process(k, v)
228 process(k, v)
229
229
230 process(b'', params)
230 process(b'', params)
231 return util.urlreq.urlencode(flatparams)
231 return util.urlreq.urlencode(flatparams)
232
232
233
233
234 def readurltoken(ui):
234 def readurltoken(ui):
235 """return conduit url, token and make sure they exist
235 """return conduit url, token and make sure they exist
236
236
237 Currently read from [auth] config section. In the future, it might
237 Currently read from [auth] config section. In the future, it might
238 make sense to read from .arcconfig and .arcrc as well.
238 make sense to read from .arcconfig and .arcrc as well.
239 """
239 """
240 url = ui.config(b'phabricator', b'url')
240 url = ui.config(b'phabricator', b'url')
241 if not url:
241 if not url:
242 raise error.Abort(
242 raise error.Abort(
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 )
244 )
245
245
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 token = None
247 token = None
248
248
249 if res:
249 if res:
250 group, auth = res
250 group, auth = res
251
251
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253
253
254 token = auth.get(b'phabtoken')
254 token = auth.get(b'phabtoken')
255
255
256 if not token:
256 if not token:
257 raise error.Abort(
257 raise error.Abort(
258 _(b'Can\'t find conduit token associated to %s') % (url,)
258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 )
259 )
260
260
261 return url, token
261 return url, token
262
262
263
263
264 def callconduit(ui, name, params):
264 def callconduit(ui, name, params):
265 """call Conduit API, params is a dict. return json.loads result, or None"""
265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 host, token = readurltoken(ui)
266 host, token = readurltoken(ui)
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 params = params.copy()
269 params = params.copy()
270 params[b'__conduit__'] = {
270 params[b'__conduit__'] = {
271 b'token': token,
271 b'token': token,
272 }
272 }
273 rawdata = {
273 rawdata = {
274 b'params': templatefilters.json(params),
274 b'params': templatefilters.json(params),
275 b'output': b'json',
275 b'output': b'json',
276 b'__conduit__': 1,
276 b'__conduit__': 1,
277 }
277 }
278 data = urlencodenested(rawdata)
278 data = urlencodenested(rawdata)
279 curlcmd = ui.config(b'phabricator', b'curlcmd')
279 curlcmd = ui.config(b'phabricator', b'curlcmd')
280 if curlcmd:
280 if curlcmd:
281 sin, sout = procutil.popen2(
281 sin, sout = procutil.popen2(
282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
283 )
283 )
284 sin.write(data)
284 sin.write(data)
285 sin.close()
285 sin.close()
286 body = sout.read()
286 body = sout.read()
287 else:
287 else:
288 urlopener = urlmod.opener(ui, authinfo)
288 urlopener = urlmod.opener(ui, authinfo)
289 request = util.urlreq.request(pycompat.strurl(url), data=data)
289 request = util.urlreq.request(pycompat.strurl(url), data=data)
290 with contextlib.closing(urlopener.open(request)) as rsp:
290 with contextlib.closing(urlopener.open(request)) as rsp:
291 body = rsp.read()
291 body = rsp.read()
292 ui.debug(b'Conduit Response: %s\n' % body)
292 ui.debug(b'Conduit Response: %s\n' % body)
293 parsed = pycompat.rapply(
293 parsed = pycompat.rapply(
294 lambda x: encoding.unitolocal(x)
294 lambda x: encoding.unitolocal(x)
295 if isinstance(x, pycompat.unicode)
295 if isinstance(x, pycompat.unicode)
296 else x,
296 else x,
297 # json.loads only accepts bytes from py3.6+
297 # json.loads only accepts bytes from py3.6+
298 json.loads(encoding.unifromlocal(body)),
298 json.loads(encoding.unifromlocal(body)),
299 )
299 )
300 if parsed.get(b'error_code'):
300 if parsed.get(b'error_code'):
301 msg = _(b'Conduit Error (%s): %s') % (
301 msg = _(b'Conduit Error (%s): %s') % (
302 parsed[b'error_code'],
302 parsed[b'error_code'],
303 parsed[b'error_info'],
303 parsed[b'error_info'],
304 )
304 )
305 raise error.Abort(msg)
305 raise error.Abort(msg)
306 return parsed[b'result']
306 return parsed[b'result']
307
307
308
308
309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
310 def debugcallconduit(ui, repo, name):
310 def debugcallconduit(ui, repo, name):
311 """call Conduit API
311 """call Conduit API
312
312
313 Call parameters are read from stdin as a JSON blob. Result will be written
313 Call parameters are read from stdin as a JSON blob. Result will be written
314 to stdout as a JSON blob.
314 to stdout as a JSON blob.
315 """
315 """
316 # json.loads only accepts bytes from 3.6+
316 # json.loads only accepts bytes from 3.6+
317 rawparams = encoding.unifromlocal(ui.fin.read())
317 rawparams = encoding.unifromlocal(ui.fin.read())
318 # json.loads only returns unicode strings
318 # json.loads only returns unicode strings
319 params = pycompat.rapply(
319 params = pycompat.rapply(
320 lambda x: encoding.unitolocal(x)
320 lambda x: encoding.unitolocal(x)
321 if isinstance(x, pycompat.unicode)
321 if isinstance(x, pycompat.unicode)
322 else x,
322 else x,
323 json.loads(rawparams),
323 json.loads(rawparams),
324 )
324 )
325 # json.dumps only accepts unicode strings
325 # json.dumps only accepts unicode strings
326 result = pycompat.rapply(
326 result = pycompat.rapply(
327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
328 callconduit(ui, name, params),
328 callconduit(ui, name, params),
329 )
329 )
330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
331 ui.write(b'%s\n' % encoding.unitolocal(s))
331 ui.write(b'%s\n' % encoding.unitolocal(s))
332
332
333
333
334 def getrepophid(repo):
334 def getrepophid(repo):
335 """given callsign, return repository PHID or None"""
335 """given callsign, return repository PHID or None"""
336 # developer config: phabricator.repophid
336 # developer config: phabricator.repophid
337 repophid = repo.ui.config(b'phabricator', b'repophid')
337 repophid = repo.ui.config(b'phabricator', b'repophid')
338 if repophid:
338 if repophid:
339 return repophid
339 return repophid
340 callsign = repo.ui.config(b'phabricator', b'callsign')
340 callsign = repo.ui.config(b'phabricator', b'callsign')
341 if not callsign:
341 if not callsign:
342 return None
342 return None
343 query = callconduit(
343 query = callconduit(
344 repo.ui,
344 repo.ui,
345 b'diffusion.repository.search',
345 b'diffusion.repository.search',
346 {b'constraints': {b'callsigns': [callsign]}},
346 {b'constraints': {b'callsigns': [callsign]}},
347 )
347 )
348 if len(query[b'data']) == 0:
348 if len(query[b'data']) == 0:
349 return None
349 return None
350 repophid = query[b'data'][0][b'phid']
350 repophid = query[b'data'][0][b'phid']
351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
352 return repophid
352 return repophid
353
353
354
354
355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
356 _differentialrevisiondescre = re.compile(
356 _differentialrevisiondescre = re.compile(
357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
358 )
358 )
359
359
360
360
361 def getoldnodedrevmap(repo, nodelist):
361 def getoldnodedrevmap(repo, nodelist):
362 """find previous nodes that has been sent to Phabricator
362 """find previous nodes that has been sent to Phabricator
363
363
364 return {node: (oldnode, Differential diff, Differential Revision ID)}
364 return {node: (oldnode, Differential diff, Differential Revision ID)}
365 for node in nodelist with known previous sent versions, or associated
365 for node in nodelist with known previous sent versions, or associated
366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
367 be ``None``.
367 be ``None``.
368
368
369 Examines commit messages like "Differential Revision:" to get the
369 Examines commit messages like "Differential Revision:" to get the
370 association information.
370 association information.
371
371
372 If such commit message line is not found, examines all precursors and their
372 If such commit message line is not found, examines all precursors and their
373 tags. Tags with format like "D1234" are considered a match and the node
373 tags. Tags with format like "D1234" are considered a match and the node
374 with that tag, and the number after "D" (ex. 1234) will be returned.
374 with that tag, and the number after "D" (ex. 1234) will be returned.
375
375
376 The ``old node``, if not None, is guaranteed to be the last diff of
376 The ``old node``, if not None, is guaranteed to be the last diff of
377 corresponding Differential Revision, and exist in the repo.
377 corresponding Differential Revision, and exist in the repo.
378 """
378 """
379 unfi = repo.unfiltered()
379 unfi = repo.unfiltered()
380 nodemap = unfi.changelog.nodemap
380 nodemap = unfi.changelog.nodemap
381
381
382 result = {} # {node: (oldnode?, lastdiff?, drev)}
382 result = {} # {node: (oldnode?, lastdiff?, drev)}
383 toconfirm = {} # {node: (force, {precnode}, drev)}
383 toconfirm = {} # {node: (force, {precnode}, drev)}
384 for node in nodelist:
384 for node in nodelist:
385 ctx = unfi[node]
385 ctx = unfi[node]
386 # For tags like "D123", put them into "toconfirm" to verify later
386 # For tags like "D123", put them into "toconfirm" to verify later
387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
388 for n in precnodes:
388 for n in precnodes:
389 if n in nodemap:
389 if n in nodemap:
390 for tag in unfi.nodetags(n):
390 for tag in unfi.nodetags(n):
391 m = _differentialrevisiontagre.match(tag)
391 m = _differentialrevisiontagre.match(tag)
392 if m:
392 if m:
393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
394 continue
394 continue
395
395
396 # Check commit message
396 # Check commit message
397 m = _differentialrevisiondescre.search(ctx.description())
397 m = _differentialrevisiondescre.search(ctx.description())
398 if m:
398 if m:
399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
400
400
401 # Double check if tags are genuine by collecting all old nodes from
401 # Double check if tags are genuine by collecting all old nodes from
402 # Phabricator, and expect precursors overlap with it.
402 # Phabricator, and expect precursors overlap with it.
403 if toconfirm:
403 if toconfirm:
404 drevs = [drev for force, precs, drev in toconfirm.values()]
404 drevs = [drev for force, precs, drev in toconfirm.values()]
405 alldiffs = callconduit(
405 alldiffs = callconduit(
406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
407 )
407 )
408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
409 for newnode, (force, precset, drev) in toconfirm.items():
409 for newnode, (force, precset, drev) in toconfirm.items():
410 diffs = [
410 diffs = [
411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
412 ]
412 ]
413
413
414 # "precursors" as known by Phabricator
414 # "precursors" as known by Phabricator
415 phprecset = set(getnode(d) for d in diffs)
415 phprecset = set(getnode(d) for d in diffs)
416
416
417 # Ignore if precursors (Phabricator and local repo) do not overlap,
417 # Ignore if precursors (Phabricator and local repo) do not overlap,
418 # and force is not set (when commit message says nothing)
418 # and force is not set (when commit message says nothing)
419 if not force and not bool(phprecset & precset):
419 if not force and not bool(phprecset & precset):
420 tagname = b'D%d' % drev
420 tagname = b'D%d' % drev
421 tags.tag(
421 tags.tag(
422 repo,
422 repo,
423 tagname,
423 tagname,
424 nullid,
424 nullid,
425 message=None,
425 message=None,
426 user=None,
426 user=None,
427 date=None,
427 date=None,
428 local=True,
428 local=True,
429 )
429 )
430 unfi.ui.warn(
430 unfi.ui.warn(
431 _(
431 _(
432 b'D%s: local tag removed - does not match '
432 b'D%s: local tag removed - does not match '
433 b'Differential history\n'
433 b'Differential history\n'
434 )
434 )
435 % drev
435 % drev
436 )
436 )
437 continue
437 continue
438
438
439 # Find the last node using Phabricator metadata, and make sure it
439 # Find the last node using Phabricator metadata, and make sure it
440 # exists in the repo
440 # exists in the repo
441 oldnode = lastdiff = None
441 oldnode = lastdiff = None
442 if diffs:
442 if diffs:
443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
444 oldnode = getnode(lastdiff)
444 oldnode = getnode(lastdiff)
445 if oldnode and oldnode not in nodemap:
445 if oldnode and oldnode not in nodemap:
446 oldnode = None
446 oldnode = None
447
447
448 result[newnode] = (oldnode, lastdiff, drev)
448 result[newnode] = (oldnode, lastdiff, drev)
449
449
450 return result
450 return result
451
451
452
452
453 def getdiff(ctx, diffopts):
453 def getdiff(ctx, diffopts):
454 """plain-text diff without header (user, commit message, etc)"""
454 """plain-text diff without header (user, commit message, etc)"""
455 output = util.stringio()
455 output = util.stringio()
456 for chunk, _label in patch.diffui(
456 for chunk, _label in patch.diffui(
457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
458 ):
458 ):
459 output.write(chunk)
459 output.write(chunk)
460 return output.getvalue()
460 return output.getvalue()
461
461
462
462
463 class DiffChangeType(object):
463 class DiffChangeType(object):
464 ADD = 1
464 ADD = 1
465 CHANGE = 2
465 CHANGE = 2
466 DELETE = 3
466 DELETE = 3
467 MOVE_AWAY = 4
467 MOVE_AWAY = 4
468 COPY_AWAY = 5
468 COPY_AWAY = 5
469 MOVE_HERE = 6
469 MOVE_HERE = 6
470 COPY_HERE = 7
470 COPY_HERE = 7
471 MULTICOPY = 8
471 MULTICOPY = 8
472
472
473
473
474 class DiffFileType(object):
474 class DiffFileType(object):
475 TEXT = 1
475 TEXT = 1
476 IMAGE = 2
476 IMAGE = 2
477 BINARY = 3
477 BINARY = 3
478
478
479
479
480 @attr.s
480 @attr.s
481 class phabhunk(dict):
481 class phabhunk(dict):
482 """Represents a Differential hunk, which is owned by a Differential change
482 """Represents a Differential hunk, which is owned by a Differential change
483 """
483 """
484
484
485 oldOffset = attr.ib(default=0) # camelcase-required
485 oldOffset = attr.ib(default=0) # camelcase-required
486 oldLength = attr.ib(default=0) # camelcase-required
486 oldLength = attr.ib(default=0) # camelcase-required
487 newOffset = attr.ib(default=0) # camelcase-required
487 newOffset = attr.ib(default=0) # camelcase-required
488 newLength = attr.ib(default=0) # camelcase-required
488 newLength = attr.ib(default=0) # camelcase-required
489 corpus = attr.ib(default='')
489 corpus = attr.ib(default='')
490 # These get added to the phabchange's equivalents
490 # These get added to the phabchange's equivalents
491 addLines = attr.ib(default=0) # camelcase-required
491 addLines = attr.ib(default=0) # camelcase-required
492 delLines = attr.ib(default=0) # camelcase-required
492 delLines = attr.ib(default=0) # camelcase-required
493
493
494
494
495 @attr.s
495 @attr.s
496 class phabchange(object):
496 class phabchange(object):
497 """Represents a Differential change, owns Differential hunks and owned by a
497 """Represents a Differential change, owns Differential hunks and owned by a
498 Differential diff. Each one represents one file in a diff.
498 Differential diff. Each one represents one file in a diff.
499 """
499 """
500
500
501 currentPath = attr.ib(default=None) # camelcase-required
501 currentPath = attr.ib(default=None) # camelcase-required
502 oldPath = attr.ib(default=None) # camelcase-required
502 oldPath = attr.ib(default=None) # camelcase-required
503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
504 metadata = attr.ib(default=attr.Factory(dict))
504 metadata = attr.ib(default=attr.Factory(dict))
505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
507 type = attr.ib(default=DiffChangeType.CHANGE)
507 type = attr.ib(default=DiffChangeType.CHANGE)
508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
509 commitHash = attr.ib(default=None) # camelcase-required
509 commitHash = attr.ib(default=None) # camelcase-required
510 addLines = attr.ib(default=0) # camelcase-required
510 addLines = attr.ib(default=0) # camelcase-required
511 delLines = attr.ib(default=0) # camelcase-required
511 delLines = attr.ib(default=0) # camelcase-required
512 hunks = attr.ib(default=attr.Factory(list))
512 hunks = attr.ib(default=attr.Factory(list))
513
513
514 def copynewmetadatatoold(self):
514 def copynewmetadatatoold(self):
515 for key in list(self.metadata.keys()):
515 for key in list(self.metadata.keys()):
516 newkey = key.replace(b'new:', b'old:')
516 newkey = key.replace(b'new:', b'old:')
517 self.metadata[newkey] = self.metadata[key]
517 self.metadata[newkey] = self.metadata[key]
518
518
519 def addoldmode(self, value):
519 def addoldmode(self, value):
520 self.oldProperties[b'unix:filemode'] = value
520 self.oldProperties[b'unix:filemode'] = value
521
521
522 def addnewmode(self, value):
522 def addnewmode(self, value):
523 self.newProperties[b'unix:filemode'] = value
523 self.newProperties[b'unix:filemode'] = value
524
524
525 def addhunk(self, hunk):
525 def addhunk(self, hunk):
526 if not isinstance(hunk, phabhunk):
526 if not isinstance(hunk, phabhunk):
527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
529 # It's useful to include these stats since the Phab web UI shows them,
529 # It's useful to include these stats since the Phab web UI shows them,
530 # and uses them to estimate how large a change a Revision is. Also used
530 # and uses them to estimate how large a change a Revision is. Also used
531 # in email subjects for the [+++--] bit.
531 # in email subjects for the [+++--] bit.
532 self.addLines += hunk.addLines
532 self.addLines += hunk.addLines
533 self.delLines += hunk.delLines
533 self.delLines += hunk.delLines
534
534
535
535
536 @attr.s
536 @attr.s
537 class phabdiff(object):
537 class phabdiff(object):
538 """Represents a Differential diff, owns Differential changes. Corresponds
538 """Represents a Differential diff, owns Differential changes. Corresponds
539 to a commit.
539 to a commit.
540 """
540 """
541
541
542 # Doesn't seem to be any reason to send this (output of uname -n)
542 # Doesn't seem to be any reason to send this (output of uname -n)
543 sourceMachine = attr.ib(default=b'') # camelcase-required
543 sourceMachine = attr.ib(default=b'') # camelcase-required
544 sourcePath = attr.ib(default=b'/') # camelcase-required
544 sourcePath = attr.ib(default=b'/') # camelcase-required
545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
548 branch = attr.ib(default=b'default')
548 branch = attr.ib(default=b'default')
549 bookmark = attr.ib(default=None)
549 bookmark = attr.ib(default=None)
550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
551 lintStatus = attr.ib(default=b'none') # camelcase-required
551 lintStatus = attr.ib(default=b'none') # camelcase-required
552 unitStatus = attr.ib(default=b'none') # camelcase-required
552 unitStatus = attr.ib(default=b'none') # camelcase-required
553 changes = attr.ib(default=attr.Factory(dict))
553 changes = attr.ib(default=attr.Factory(dict))
554 repositoryPHID = attr.ib(default=None) # camelcase-required
554 repositoryPHID = attr.ib(default=None) # camelcase-required
555
555
556 def addchange(self, change):
556 def addchange(self, change):
557 if not isinstance(change, phabchange):
557 if not isinstance(change, phabchange):
558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
559 self.changes[change.currentPath] = pycompat.byteskwargs(
559 self.changes[change.currentPath] = pycompat.byteskwargs(
560 attr.asdict(change)
560 attr.asdict(change)
561 )
561 )
562
562
563
563
564 def maketext(pchange, ctx, fname):
564 def maketext(pchange, ctx, fname):
565 """populate the phabchange for a text file"""
565 """populate the phabchange for a text file"""
566 repo = ctx.repo()
566 repo = ctx.repo()
567 fmatcher = match.exact([fname])
567 fmatcher = match.exact([fname])
568 diffopts = mdiff.diffopts(git=True, context=32767)
568 diffopts = mdiff.diffopts(git=True, context=32767)
569 _pfctx, _fctx, header, fhunks = next(
569 _pfctx, _fctx, header, fhunks = next(
570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
571 )
571 )
572
572
573 for fhunk in fhunks:
573 for fhunk in fhunks:
574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
575 corpus = b''.join(lines[1:])
575 corpus = b''.join(lines[1:])
576 shunk = list(header)
576 shunk = list(header)
577 shunk.extend(lines)
577 shunk.extend(lines)
578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
579 patch.diffstatdata(util.iterlines(shunk))
579 patch.diffstatdata(util.iterlines(shunk))
580 )
580 )
581 pchange.addhunk(
581 pchange.addhunk(
582 phabhunk(
582 phabhunk(
583 oldOffset,
583 oldOffset,
584 oldLength,
584 oldLength,
585 newOffset,
585 newOffset,
586 newLength,
586 newLength,
587 corpus,
587 corpus,
588 addLines,
588 addLines,
589 delLines,
589 delLines,
590 )
590 )
591 )
591 )
592
592
593
593
594 def uploadchunks(fctx, fphid):
594 def uploadchunks(fctx, fphid):
595 """upload large binary files as separate chunks.
595 """upload large binary files as separate chunks.
596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
597 """
597 """
598 ui = fctx.repo().ui
598 ui = fctx.repo().ui
599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
600 progress = ui.makeprogress(
600 progress = ui.makeprogress(
601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
602 )
602 )
603 for chunk in chunks:
603 for chunk in chunks:
604 progress.increment()
604 progress.increment()
605 if chunk[b'complete']:
605 if chunk[b'complete']:
606 continue
606 continue
607 bstart = int(chunk[b'byteStart'])
607 bstart = int(chunk[b'byteStart'])
608 bend = int(chunk[b'byteEnd'])
608 bend = int(chunk[b'byteEnd'])
609 callconduit(
609 callconduit(
610 ui,
610 ui,
611 b'file.uploadchunk',
611 b'file.uploadchunk',
612 {
612 {
613 b'filePHID': fphid,
613 b'filePHID': fphid,
614 b'byteStart': bstart,
614 b'byteStart': bstart,
615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
616 b'dataEncoding': b'base64',
616 b'dataEncoding': b'base64',
617 },
617 },
618 )
618 )
619 progress.complete()
619 progress.complete()
620
620
621
621
622 def uploadfile(fctx):
622 def uploadfile(fctx):
623 """upload binary files to Phabricator"""
623 """upload binary files to Phabricator"""
624 repo = fctx.repo()
624 repo = fctx.repo()
625 ui = repo.ui
625 ui = repo.ui
626 fname = fctx.path()
626 fname = fctx.path()
627 size = fctx.size()
627 size = fctx.size()
628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
629
629
630 # an allocate call is required first to see if an upload is even required
630 # an allocate call is required first to see if an upload is even required
631 # (Phab might already have it) and to determine if chunking is needed
631 # (Phab might already have it) and to determine if chunking is needed
632 allocateparams = {
632 allocateparams = {
633 b'name': fname,
633 b'name': fname,
634 b'contentLength': size,
634 b'contentLength': size,
635 b'contentHash': fhash,
635 b'contentHash': fhash,
636 }
636 }
637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
638 fphid = filealloc[b'filePHID']
638 fphid = filealloc[b'filePHID']
639
639
640 if filealloc[b'upload']:
640 if filealloc[b'upload']:
641 ui.write(_(b'uploading %s\n') % bytes(fctx))
641 ui.write(_(b'uploading %s\n') % bytes(fctx))
642 if not fphid:
642 if not fphid:
643 uploadparams = {
643 uploadparams = {
644 b'name': fname,
644 b'name': fname,
645 b'data_base64': base64.b64encode(fctx.data()),
645 b'data_base64': base64.b64encode(fctx.data()),
646 }
646 }
647 fphid = callconduit(ui, b'file.upload', uploadparams)
647 fphid = callconduit(ui, b'file.upload', uploadparams)
648 else:
648 else:
649 uploadchunks(fctx, fphid)
649 uploadchunks(fctx, fphid)
650 else:
650 else:
651 ui.debug(b'server already has %s\n' % bytes(fctx))
651 ui.debug(b'server already has %s\n' % bytes(fctx))
652
652
653 if not fphid:
653 if not fphid:
654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
655
655
656 return fphid
656 return fphid
657
657
658
658
659 def addoldbinary(pchange, fctx, originalfname):
659 def addoldbinary(pchange, fctx, originalfname):
660 """add the metadata for the previous version of a binary file to the
660 """add the metadata for the previous version of a binary file to the
661 phabchange for the new version
661 phabchange for the new version
662 """
662 """
663 oldfctx = fctx.p1()[originalfname]
663 oldfctx = fctx.p1()[originalfname]
664 if fctx.cmp(oldfctx):
664 if fctx.cmp(oldfctx):
665 # Files differ, add the old one
665 # Files differ, add the old one
666 pchange.metadata[b'old:file:size'] = oldfctx.size()
666 pchange.metadata[b'old:file:size'] = oldfctx.size()
667 mimeguess, _enc = mimetypes.guess_type(
667 mimeguess, _enc = mimetypes.guess_type(
668 encoding.unifromlocal(oldfctx.path())
668 encoding.unifromlocal(oldfctx.path())
669 )
669 )
670 if mimeguess:
670 if mimeguess:
671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
672 mimeguess
672 mimeguess
673 )
673 )
674 fphid = uploadfile(oldfctx)
674 fphid = uploadfile(oldfctx)
675 pchange.metadata[b'old:binary-phid'] = fphid
675 pchange.metadata[b'old:binary-phid'] = fphid
676 else:
676 else:
677 # If it's left as IMAGE/BINARY web UI might try to display it
677 # If it's left as IMAGE/BINARY web UI might try to display it
678 pchange.fileType = DiffFileType.TEXT
678 pchange.fileType = DiffFileType.TEXT
679 pchange.copynewmetadatatoold()
679 pchange.copynewmetadatatoold()
680
680
681
681
682 def makebinary(pchange, fctx):
682 def makebinary(pchange, fctx):
683 """populate the phabchange for a binary file"""
683 """populate the phabchange for a binary file"""
684 pchange.fileType = DiffFileType.BINARY
684 pchange.fileType = DiffFileType.BINARY
685 fphid = uploadfile(fctx)
685 fphid = uploadfile(fctx)
686 pchange.metadata[b'new:binary-phid'] = fphid
686 pchange.metadata[b'new:binary-phid'] = fphid
687 pchange.metadata[b'new:file:size'] = fctx.size()
687 pchange.metadata[b'new:file:size'] = fctx.size()
688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
689 if mimeguess:
689 if mimeguess:
690 mimeguess = pycompat.bytestr(mimeguess)
690 mimeguess = pycompat.bytestr(mimeguess)
691 pchange.metadata[b'new:file:mime-type'] = mimeguess
691 pchange.metadata[b'new:file:mime-type'] = mimeguess
692 if mimeguess.startswith(b'image/'):
692 if mimeguess.startswith(b'image/'):
693 pchange.fileType = DiffFileType.IMAGE
693 pchange.fileType = DiffFileType.IMAGE
694
694
695
695
696 # Copied from mercurial/patch.py
696 # Copied from mercurial/patch.py
697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
698
698
699
699
700 def notutf8(fctx):
701 """detect non-UTF-8 text files since Phabricator requires them to be marked
702 as binary
703 """
704 try:
705 fctx.data().decode('utf-8')
706 if fctx.parents():
707 fctx.p1().data().decode('utf-8')
708 return False
709 except UnicodeDecodeError:
710 fctx.repo().ui.write(
711 _(b'file %s detected as non-UTF-8, marked as binary\n')
712 % fctx.path()
713 )
714 return True
715
716
700 def addremoved(pdiff, ctx, removed):
717 def addremoved(pdiff, ctx, removed):
701 """add removed files to the phabdiff. Shouldn't include moves"""
718 """add removed files to the phabdiff. Shouldn't include moves"""
702 for fname in removed:
719 for fname in removed:
703 pchange = phabchange(
720 pchange = phabchange(
704 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
721 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
705 )
722 )
706 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
723 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
707 fctx = ctx.p1()[fname]
724 fctx = ctx.p1()[fname]
708 if not fctx.isbinary():
725 if not (fctx.isbinary() or notutf8(fctx)):
709 maketext(pchange, ctx, fname)
726 maketext(pchange, ctx, fname)
710
727
711 pdiff.addchange(pchange)
728 pdiff.addchange(pchange)
712
729
713
730
714 def addmodified(pdiff, ctx, modified):
731 def addmodified(pdiff, ctx, modified):
715 """add modified files to the phabdiff"""
732 """add modified files to the phabdiff"""
716 for fname in modified:
733 for fname in modified:
717 fctx = ctx[fname]
734 fctx = ctx[fname]
718 pchange = phabchange(currentPath=fname, oldPath=fname)
735 pchange = phabchange(currentPath=fname, oldPath=fname)
719 filemode = gitmode[ctx[fname].flags()]
736 filemode = gitmode[ctx[fname].flags()]
720 originalmode = gitmode[ctx.p1()[fname].flags()]
737 originalmode = gitmode[ctx.p1()[fname].flags()]
721 if filemode != originalmode:
738 if filemode != originalmode:
722 pchange.addoldmode(originalmode)
739 pchange.addoldmode(originalmode)
723 pchange.addnewmode(filemode)
740 pchange.addnewmode(filemode)
724
741
725 if fctx.isbinary():
742 if fctx.isbinary() or notutf8(fctx):
726 makebinary(pchange, fctx)
743 makebinary(pchange, fctx)
727 addoldbinary(pchange, fctx, fname)
744 addoldbinary(pchange, fctx, fname)
728 else:
745 else:
729 maketext(pchange, ctx, fname)
746 maketext(pchange, ctx, fname)
730
747
731 pdiff.addchange(pchange)
748 pdiff.addchange(pchange)
732
749
733
750
734 def addadded(pdiff, ctx, added, removed):
751 def addadded(pdiff, ctx, added, removed):
735 """add file adds to the phabdiff, both new files and copies/moves"""
752 """add file adds to the phabdiff, both new files and copies/moves"""
736 # Keep track of files that've been recorded as moved/copied, so if there are
753 # Keep track of files that've been recorded as moved/copied, so if there are
737 # additional copies we can mark them (moves get removed from removed)
754 # additional copies we can mark them (moves get removed from removed)
738 copiedchanges = {}
755 copiedchanges = {}
739 movedchanges = {}
756 movedchanges = {}
740 for fname in added:
757 for fname in added:
741 fctx = ctx[fname]
758 fctx = ctx[fname]
742 pchange = phabchange(currentPath=fname)
759 pchange = phabchange(currentPath=fname)
743
760
744 filemode = gitmode[ctx[fname].flags()]
761 filemode = gitmode[ctx[fname].flags()]
745 renamed = fctx.renamed()
762 renamed = fctx.renamed()
746
763
747 if renamed:
764 if renamed:
748 originalfname = renamed[0]
765 originalfname = renamed[0]
749 originalmode = gitmode[ctx.p1()[originalfname].flags()]
766 originalmode = gitmode[ctx.p1()[originalfname].flags()]
750 pchange.oldPath = originalfname
767 pchange.oldPath = originalfname
751
768
752 if originalfname in removed:
769 if originalfname in removed:
753 origpchange = phabchange(
770 origpchange = phabchange(
754 currentPath=originalfname,
771 currentPath=originalfname,
755 oldPath=originalfname,
772 oldPath=originalfname,
756 type=DiffChangeType.MOVE_AWAY,
773 type=DiffChangeType.MOVE_AWAY,
757 awayPaths=[fname],
774 awayPaths=[fname],
758 )
775 )
759 movedchanges[originalfname] = origpchange
776 movedchanges[originalfname] = origpchange
760 removed.remove(originalfname)
777 removed.remove(originalfname)
761 pchange.type = DiffChangeType.MOVE_HERE
778 pchange.type = DiffChangeType.MOVE_HERE
762 elif originalfname in movedchanges:
779 elif originalfname in movedchanges:
763 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
780 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
764 movedchanges[originalfname].awayPaths.append(fname)
781 movedchanges[originalfname].awayPaths.append(fname)
765 pchange.type = DiffChangeType.COPY_HERE
782 pchange.type = DiffChangeType.COPY_HERE
766 else: # pure copy
783 else: # pure copy
767 if originalfname not in copiedchanges:
784 if originalfname not in copiedchanges:
768 origpchange = phabchange(
785 origpchange = phabchange(
769 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
786 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
770 )
787 )
771 copiedchanges[originalfname] = origpchange
788 copiedchanges[originalfname] = origpchange
772 else:
789 else:
773 origpchange = copiedchanges[originalfname]
790 origpchange = copiedchanges[originalfname]
774 origpchange.awayPaths.append(fname)
791 origpchange.awayPaths.append(fname)
775 pchange.type = DiffChangeType.COPY_HERE
792 pchange.type = DiffChangeType.COPY_HERE
776
793
777 if filemode != originalmode:
794 if filemode != originalmode:
778 pchange.addoldmode(originalmode)
795 pchange.addoldmode(originalmode)
779 pchange.addnewmode(filemode)
796 pchange.addnewmode(filemode)
780 else: # Brand-new file
797 else: # Brand-new file
781 pchange.addnewmode(gitmode[fctx.flags()])
798 pchange.addnewmode(gitmode[fctx.flags()])
782 pchange.type = DiffChangeType.ADD
799 pchange.type = DiffChangeType.ADD
783
800
784 if fctx.isbinary():
801 if fctx.isbinary() or notutf8(fctx):
785 makebinary(pchange, fctx)
802 makebinary(pchange, fctx)
786 if renamed:
803 if renamed:
787 addoldbinary(pchange, fctx, originalfname)
804 addoldbinary(pchange, fctx, originalfname)
788 else:
805 else:
789 maketext(pchange, ctx, fname)
806 maketext(pchange, ctx, fname)
790
807
791 pdiff.addchange(pchange)
808 pdiff.addchange(pchange)
792
809
793 for _path, copiedchange in copiedchanges.items():
810 for _path, copiedchange in copiedchanges.items():
794 pdiff.addchange(copiedchange)
811 pdiff.addchange(copiedchange)
795 for _path, movedchange in movedchanges.items():
812 for _path, movedchange in movedchanges.items():
796 pdiff.addchange(movedchange)
813 pdiff.addchange(movedchange)
797
814
798
815
799 def creatediff(ctx):
816 def creatediff(ctx):
800 """create a Differential Diff"""
817 """create a Differential Diff"""
801 repo = ctx.repo()
818 repo = ctx.repo()
802 repophid = getrepophid(repo)
819 repophid = getrepophid(repo)
803 # Create a "Differential Diff" via "differential.creatediff" API
820 # Create a "Differential Diff" via "differential.creatediff" API
804 pdiff = phabdiff(
821 pdiff = phabdiff(
805 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
822 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
806 branch=b'%s' % ctx.branch(),
823 branch=b'%s' % ctx.branch(),
807 )
824 )
808 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
825 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
809 # addadded will remove moved files from removed, so addremoved won't get
826 # addadded will remove moved files from removed, so addremoved won't get
810 # them
827 # them
811 addadded(pdiff, ctx, added, removed)
828 addadded(pdiff, ctx, added, removed)
812 addmodified(pdiff, ctx, modified)
829 addmodified(pdiff, ctx, modified)
813 addremoved(pdiff, ctx, removed)
830 addremoved(pdiff, ctx, removed)
814 if repophid:
831 if repophid:
815 pdiff.repositoryPHID = repophid
832 pdiff.repositoryPHID = repophid
816 diff = callconduit(
833 diff = callconduit(
817 repo.ui,
834 repo.ui,
818 b'differential.creatediff',
835 b'differential.creatediff',
819 pycompat.byteskwargs(attr.asdict(pdiff)),
836 pycompat.byteskwargs(attr.asdict(pdiff)),
820 )
837 )
821 if not diff:
838 if not diff:
822 raise error.Abort(_(b'cannot create diff for %s') % ctx)
839 raise error.Abort(_(b'cannot create diff for %s') % ctx)
823 return diff
840 return diff
824
841
825
842
826 def writediffproperties(ctx, diff):
843 def writediffproperties(ctx, diff):
827 """write metadata to diff so patches could be applied losslessly"""
844 """write metadata to diff so patches could be applied losslessly"""
828 # creatediff returns with a diffid but query returns with an id
845 # creatediff returns with a diffid but query returns with an id
829 diffid = diff.get(b'diffid', diff.get(b'id'))
846 diffid = diff.get(b'diffid', diff.get(b'id'))
830 params = {
847 params = {
831 b'diff_id': diffid,
848 b'diff_id': diffid,
832 b'name': b'hg:meta',
849 b'name': b'hg:meta',
833 b'data': templatefilters.json(
850 b'data': templatefilters.json(
834 {
851 {
835 b'user': ctx.user(),
852 b'user': ctx.user(),
836 b'date': b'%d %d' % ctx.date(),
853 b'date': b'%d %d' % ctx.date(),
837 b'branch': ctx.branch(),
854 b'branch': ctx.branch(),
838 b'node': ctx.hex(),
855 b'node': ctx.hex(),
839 b'parent': ctx.p1().hex(),
856 b'parent': ctx.p1().hex(),
840 }
857 }
841 ),
858 ),
842 }
859 }
843 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
860 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
844
861
845 params = {
862 params = {
846 b'diff_id': diffid,
863 b'diff_id': diffid,
847 b'name': b'local:commits',
864 b'name': b'local:commits',
848 b'data': templatefilters.json(
865 b'data': templatefilters.json(
849 {
866 {
850 ctx.hex(): {
867 ctx.hex(): {
851 b'author': stringutil.person(ctx.user()),
868 b'author': stringutil.person(ctx.user()),
852 b'authorEmail': stringutil.email(ctx.user()),
869 b'authorEmail': stringutil.email(ctx.user()),
853 b'time': int(ctx.date()[0]),
870 b'time': int(ctx.date()[0]),
854 b'commit': ctx.hex(),
871 b'commit': ctx.hex(),
855 b'parents': [ctx.p1().hex()],
872 b'parents': [ctx.p1().hex()],
856 b'branch': ctx.branch(),
873 b'branch': ctx.branch(),
857 },
874 },
858 }
875 }
859 ),
876 ),
860 }
877 }
861 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
878 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
862
879
863
880
864 def createdifferentialrevision(
881 def createdifferentialrevision(
865 ctx,
882 ctx,
866 revid=None,
883 revid=None,
867 parentrevphid=None,
884 parentrevphid=None,
868 oldnode=None,
885 oldnode=None,
869 olddiff=None,
886 olddiff=None,
870 actions=None,
887 actions=None,
871 comment=None,
888 comment=None,
872 ):
889 ):
873 """create or update a Differential Revision
890 """create or update a Differential Revision
874
891
875 If revid is None, create a new Differential Revision, otherwise update
892 If revid is None, create a new Differential Revision, otherwise update
876 revid. If parentrevphid is not None, set it as a dependency.
893 revid. If parentrevphid is not None, set it as a dependency.
877
894
878 If oldnode is not None, check if the patch content (without commit message
895 If oldnode is not None, check if the patch content (without commit message
879 and metadata) has changed before creating another diff.
896 and metadata) has changed before creating another diff.
880
897
881 If actions is not None, they will be appended to the transaction.
898 If actions is not None, they will be appended to the transaction.
882 """
899 """
883 repo = ctx.repo()
900 repo = ctx.repo()
884 if oldnode:
901 if oldnode:
885 diffopts = mdiff.diffopts(git=True, context=32767)
902 diffopts = mdiff.diffopts(git=True, context=32767)
886 oldctx = repo.unfiltered()[oldnode]
903 oldctx = repo.unfiltered()[oldnode]
887 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
904 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
888 else:
905 else:
889 neednewdiff = True
906 neednewdiff = True
890
907
891 transactions = []
908 transactions = []
892 if neednewdiff:
909 if neednewdiff:
893 diff = creatediff(ctx)
910 diff = creatediff(ctx)
894 transactions.append({b'type': b'update', b'value': diff[b'phid']})
911 transactions.append({b'type': b'update', b'value': diff[b'phid']})
895 if comment:
912 if comment:
896 transactions.append({b'type': b'comment', b'value': comment})
913 transactions.append({b'type': b'comment', b'value': comment})
897 else:
914 else:
898 # Even if we don't need to upload a new diff because the patch content
915 # Even if we don't need to upload a new diff because the patch content
899 # does not change. We might still need to update its metadata so
916 # does not change. We might still need to update its metadata so
900 # pushers could know the correct node metadata.
917 # pushers could know the correct node metadata.
901 assert olddiff
918 assert olddiff
902 diff = olddiff
919 diff = olddiff
903 writediffproperties(ctx, diff)
920 writediffproperties(ctx, diff)
904
921
905 # Set the parent Revision every time, so commit re-ordering is picked-up
922 # Set the parent Revision every time, so commit re-ordering is picked-up
906 if parentrevphid:
923 if parentrevphid:
907 transactions.append(
924 transactions.append(
908 {b'type': b'parents.set', b'value': [parentrevphid]}
925 {b'type': b'parents.set', b'value': [parentrevphid]}
909 )
926 )
910
927
911 if actions:
928 if actions:
912 transactions += actions
929 transactions += actions
913
930
914 # Parse commit message and update related fields.
931 # Parse commit message and update related fields.
915 desc = ctx.description()
932 desc = ctx.description()
916 info = callconduit(
933 info = callconduit(
917 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
934 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
918 )
935 )
919 for k, v in info[b'fields'].items():
936 for k, v in info[b'fields'].items():
920 if k in [b'title', b'summary', b'testPlan']:
937 if k in [b'title', b'summary', b'testPlan']:
921 transactions.append({b'type': k, b'value': v})
938 transactions.append({b'type': k, b'value': v})
922
939
923 params = {b'transactions': transactions}
940 params = {b'transactions': transactions}
924 if revid is not None:
941 if revid is not None:
925 # Update an existing Differential Revision
942 # Update an existing Differential Revision
926 params[b'objectIdentifier'] = revid
943 params[b'objectIdentifier'] = revid
927
944
928 revision = callconduit(repo.ui, b'differential.revision.edit', params)
945 revision = callconduit(repo.ui, b'differential.revision.edit', params)
929 if not revision:
946 if not revision:
930 raise error.Abort(_(b'cannot create revision for %s') % ctx)
947 raise error.Abort(_(b'cannot create revision for %s') % ctx)
931
948
932 return revision, diff
949 return revision, diff
933
950
934
951
935 def userphids(repo, names):
952 def userphids(repo, names):
936 """convert user names to PHIDs"""
953 """convert user names to PHIDs"""
937 names = [name.lower() for name in names]
954 names = [name.lower() for name in names]
938 query = {b'constraints': {b'usernames': names}}
955 query = {b'constraints': {b'usernames': names}}
939 result = callconduit(repo.ui, b'user.search', query)
956 result = callconduit(repo.ui, b'user.search', query)
940 # username not found is not an error of the API. So check if we have missed
957 # username not found is not an error of the API. So check if we have missed
941 # some names here.
958 # some names here.
942 data = result[b'data']
959 data = result[b'data']
943 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
960 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
944 unresolved = set(names) - resolved
961 unresolved = set(names) - resolved
945 if unresolved:
962 if unresolved:
946 raise error.Abort(
963 raise error.Abort(
947 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
964 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
948 )
965 )
949 return [entry[b'phid'] for entry in data]
966 return [entry[b'phid'] for entry in data]
950
967
951
968
952 @vcrcommand(
969 @vcrcommand(
953 b'phabsend',
970 b'phabsend',
954 [
971 [
955 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
972 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
956 (b'', b'amend', True, _(b'update commit messages')),
973 (b'', b'amend', True, _(b'update commit messages')),
957 (b'', b'reviewer', [], _(b'specify reviewers')),
974 (b'', b'reviewer', [], _(b'specify reviewers')),
958 (b'', b'blocker', [], _(b'specify blocking reviewers')),
975 (b'', b'blocker', [], _(b'specify blocking reviewers')),
959 (
976 (
960 b'm',
977 b'm',
961 b'comment',
978 b'comment',
962 b'',
979 b'',
963 _(b'add a comment to Revisions with new/updated Diffs'),
980 _(b'add a comment to Revisions with new/updated Diffs'),
964 ),
981 ),
965 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
982 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
966 ],
983 ],
967 _(b'REV [OPTIONS]'),
984 _(b'REV [OPTIONS]'),
968 helpcategory=command.CATEGORY_IMPORT_EXPORT,
985 helpcategory=command.CATEGORY_IMPORT_EXPORT,
969 )
986 )
970 def phabsend(ui, repo, *revs, **opts):
987 def phabsend(ui, repo, *revs, **opts):
971 """upload changesets to Phabricator
988 """upload changesets to Phabricator
972
989
973 If there are multiple revisions specified, they will be send as a stack
990 If there are multiple revisions specified, they will be send as a stack
974 with a linear dependencies relationship using the order specified by the
991 with a linear dependencies relationship using the order specified by the
975 revset.
992 revset.
976
993
977 For the first time uploading changesets, local tags will be created to
994 For the first time uploading changesets, local tags will be created to
978 maintain the association. After the first time, phabsend will check
995 maintain the association. After the first time, phabsend will check
979 obsstore and tags information so it can figure out whether to update an
996 obsstore and tags information so it can figure out whether to update an
980 existing Differential Revision, or create a new one.
997 existing Differential Revision, or create a new one.
981
998
982 If --amend is set, update commit messages so they have the
999 If --amend is set, update commit messages so they have the
983 ``Differential Revision`` URL, remove related tags. This is similar to what
1000 ``Differential Revision`` URL, remove related tags. This is similar to what
984 arcanist will do, and is more desired in author-push workflows. Otherwise,
1001 arcanist will do, and is more desired in author-push workflows. Otherwise,
985 use local tags to record the ``Differential Revision`` association.
1002 use local tags to record the ``Differential Revision`` association.
986
1003
987 The --confirm option lets you confirm changesets before sending them. You
1004 The --confirm option lets you confirm changesets before sending them. You
988 can also add following to your configuration file to make it default
1005 can also add following to your configuration file to make it default
989 behaviour::
1006 behaviour::
990
1007
991 [phabsend]
1008 [phabsend]
992 confirm = true
1009 confirm = true
993
1010
994 phabsend will check obsstore and the above association to decide whether to
1011 phabsend will check obsstore and the above association to decide whether to
995 update an existing Differential Revision, or create a new one.
1012 update an existing Differential Revision, or create a new one.
996 """
1013 """
997 opts = pycompat.byteskwargs(opts)
1014 opts = pycompat.byteskwargs(opts)
998 revs = list(revs) + opts.get(b'rev', [])
1015 revs = list(revs) + opts.get(b'rev', [])
999 revs = scmutil.revrange(repo, revs)
1016 revs = scmutil.revrange(repo, revs)
1000
1017
1001 if not revs:
1018 if not revs:
1002 raise error.Abort(_(b'phabsend requires at least one changeset'))
1019 raise error.Abort(_(b'phabsend requires at least one changeset'))
1003 if opts.get(b'amend'):
1020 if opts.get(b'amend'):
1004 cmdutil.checkunfinished(repo)
1021 cmdutil.checkunfinished(repo)
1005
1022
1006 # {newnode: (oldnode, olddiff, olddrev}
1023 # {newnode: (oldnode, olddiff, olddrev}
1007 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1024 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1008
1025
1009 confirm = ui.configbool(b'phabsend', b'confirm')
1026 confirm = ui.configbool(b'phabsend', b'confirm')
1010 confirm |= bool(opts.get(b'confirm'))
1027 confirm |= bool(opts.get(b'confirm'))
1011 if confirm:
1028 if confirm:
1012 confirmed = _confirmbeforesend(repo, revs, oldmap)
1029 confirmed = _confirmbeforesend(repo, revs, oldmap)
1013 if not confirmed:
1030 if not confirmed:
1014 raise error.Abort(_(b'phabsend cancelled'))
1031 raise error.Abort(_(b'phabsend cancelled'))
1015
1032
1016 actions = []
1033 actions = []
1017 reviewers = opts.get(b'reviewer', [])
1034 reviewers = opts.get(b'reviewer', [])
1018 blockers = opts.get(b'blocker', [])
1035 blockers = opts.get(b'blocker', [])
1019 phids = []
1036 phids = []
1020 if reviewers:
1037 if reviewers:
1021 phids.extend(userphids(repo, reviewers))
1038 phids.extend(userphids(repo, reviewers))
1022 if blockers:
1039 if blockers:
1023 phids.extend(
1040 phids.extend(
1024 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1041 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1025 )
1042 )
1026 if phids:
1043 if phids:
1027 actions.append({b'type': b'reviewers.add', b'value': phids})
1044 actions.append({b'type': b'reviewers.add', b'value': phids})
1028
1045
1029 drevids = [] # [int]
1046 drevids = [] # [int]
1030 diffmap = {} # {newnode: diff}
1047 diffmap = {} # {newnode: diff}
1031
1048
1032 # Send patches one by one so we know their Differential Revision PHIDs and
1049 # Send patches one by one so we know their Differential Revision PHIDs and
1033 # can provide dependency relationship
1050 # can provide dependency relationship
1034 lastrevphid = None
1051 lastrevphid = None
1035 for rev in revs:
1052 for rev in revs:
1036 ui.debug(b'sending rev %d\n' % rev)
1053 ui.debug(b'sending rev %d\n' % rev)
1037 ctx = repo[rev]
1054 ctx = repo[rev]
1038
1055
1039 # Get Differential Revision ID
1056 # Get Differential Revision ID
1040 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1057 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1041 if oldnode != ctx.node() or opts.get(b'amend'):
1058 if oldnode != ctx.node() or opts.get(b'amend'):
1042 # Create or update Differential Revision
1059 # Create or update Differential Revision
1043 revision, diff = createdifferentialrevision(
1060 revision, diff = createdifferentialrevision(
1044 ctx,
1061 ctx,
1045 revid,
1062 revid,
1046 lastrevphid,
1063 lastrevphid,
1047 oldnode,
1064 oldnode,
1048 olddiff,
1065 olddiff,
1049 actions,
1066 actions,
1050 opts.get(b'comment'),
1067 opts.get(b'comment'),
1051 )
1068 )
1052 diffmap[ctx.node()] = diff
1069 diffmap[ctx.node()] = diff
1053 newrevid = int(revision[b'object'][b'id'])
1070 newrevid = int(revision[b'object'][b'id'])
1054 newrevphid = revision[b'object'][b'phid']
1071 newrevphid = revision[b'object'][b'phid']
1055 if revid:
1072 if revid:
1056 action = b'updated'
1073 action = b'updated'
1057 else:
1074 else:
1058 action = b'created'
1075 action = b'created'
1059
1076
1060 # Create a local tag to note the association, if commit message
1077 # Create a local tag to note the association, if commit message
1061 # does not have it already
1078 # does not have it already
1062 m = _differentialrevisiondescre.search(ctx.description())
1079 m = _differentialrevisiondescre.search(ctx.description())
1063 if not m or int(m.group(r'id')) != newrevid:
1080 if not m or int(m.group(r'id')) != newrevid:
1064 tagname = b'D%d' % newrevid
1081 tagname = b'D%d' % newrevid
1065 tags.tag(
1082 tags.tag(
1066 repo,
1083 repo,
1067 tagname,
1084 tagname,
1068 ctx.node(),
1085 ctx.node(),
1069 message=None,
1086 message=None,
1070 user=None,
1087 user=None,
1071 date=None,
1088 date=None,
1072 local=True,
1089 local=True,
1073 )
1090 )
1074 else:
1091 else:
1075 # Nothing changed. But still set "newrevphid" so the next revision
1092 # Nothing changed. But still set "newrevphid" so the next revision
1076 # could depend on this one and "newrevid" for the summary line.
1093 # could depend on this one and "newrevid" for the summary line.
1077 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1094 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1078 newrevid = revid
1095 newrevid = revid
1079 action = b'skipped'
1096 action = b'skipped'
1080
1097
1081 actiondesc = ui.label(
1098 actiondesc = ui.label(
1082 {
1099 {
1083 b'created': _(b'created'),
1100 b'created': _(b'created'),
1084 b'skipped': _(b'skipped'),
1101 b'skipped': _(b'skipped'),
1085 b'updated': _(b'updated'),
1102 b'updated': _(b'updated'),
1086 }[action],
1103 }[action],
1087 b'phabricator.action.%s' % action,
1104 b'phabricator.action.%s' % action,
1088 )
1105 )
1089 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1106 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1090 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1107 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1091 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1108 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1092 ui.write(
1109 ui.write(
1093 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1110 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1094 )
1111 )
1095 drevids.append(newrevid)
1112 drevids.append(newrevid)
1096 lastrevphid = newrevphid
1113 lastrevphid = newrevphid
1097
1114
1098 # Update commit messages and remove tags
1115 # Update commit messages and remove tags
1099 if opts.get(b'amend'):
1116 if opts.get(b'amend'):
1100 unfi = repo.unfiltered()
1117 unfi = repo.unfiltered()
1101 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1118 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1102 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1119 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1103 wnode = unfi[b'.'].node()
1120 wnode = unfi[b'.'].node()
1104 mapping = {} # {oldnode: [newnode]}
1121 mapping = {} # {oldnode: [newnode]}
1105 for i, rev in enumerate(revs):
1122 for i, rev in enumerate(revs):
1106 old = unfi[rev]
1123 old = unfi[rev]
1107 drevid = drevids[i]
1124 drevid = drevids[i]
1108 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1125 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1109 newdesc = getdescfromdrev(drev)
1126 newdesc = getdescfromdrev(drev)
1110 # Make sure commit message contain "Differential Revision"
1127 # Make sure commit message contain "Differential Revision"
1111 if old.description() != newdesc:
1128 if old.description() != newdesc:
1112 if old.phase() == phases.public:
1129 if old.phase() == phases.public:
1113 ui.warn(
1130 ui.warn(
1114 _(b"warning: not updating public commit %s\n")
1131 _(b"warning: not updating public commit %s\n")
1115 % scmutil.formatchangeid(old)
1132 % scmutil.formatchangeid(old)
1116 )
1133 )
1117 continue
1134 continue
1118 parents = [
1135 parents = [
1119 mapping.get(old.p1().node(), (old.p1(),))[0],
1136 mapping.get(old.p1().node(), (old.p1(),))[0],
1120 mapping.get(old.p2().node(), (old.p2(),))[0],
1137 mapping.get(old.p2().node(), (old.p2(),))[0],
1121 ]
1138 ]
1122 new = context.metadataonlyctx(
1139 new = context.metadataonlyctx(
1123 repo,
1140 repo,
1124 old,
1141 old,
1125 parents=parents,
1142 parents=parents,
1126 text=newdesc,
1143 text=newdesc,
1127 user=old.user(),
1144 user=old.user(),
1128 date=old.date(),
1145 date=old.date(),
1129 extra=old.extra(),
1146 extra=old.extra(),
1130 )
1147 )
1131
1148
1132 newnode = new.commit()
1149 newnode = new.commit()
1133
1150
1134 mapping[old.node()] = [newnode]
1151 mapping[old.node()] = [newnode]
1135 # Update diff property
1152 # Update diff property
1136 # If it fails just warn and keep going, otherwise the DREV
1153 # If it fails just warn and keep going, otherwise the DREV
1137 # associations will be lost
1154 # associations will be lost
1138 try:
1155 try:
1139 writediffproperties(unfi[newnode], diffmap[old.node()])
1156 writediffproperties(unfi[newnode], diffmap[old.node()])
1140 except util.urlerr.urlerror:
1157 except util.urlerr.urlerror:
1141 ui.warnnoi18n(
1158 ui.warnnoi18n(
1142 b'Failed to update metadata for D%s\n' % drevid
1159 b'Failed to update metadata for D%s\n' % drevid
1143 )
1160 )
1144 # Remove local tags since it's no longer necessary
1161 # Remove local tags since it's no longer necessary
1145 tagname = b'D%d' % drevid
1162 tagname = b'D%d' % drevid
1146 if tagname in repo.tags():
1163 if tagname in repo.tags():
1147 tags.tag(
1164 tags.tag(
1148 repo,
1165 repo,
1149 tagname,
1166 tagname,
1150 nullid,
1167 nullid,
1151 message=None,
1168 message=None,
1152 user=None,
1169 user=None,
1153 date=None,
1170 date=None,
1154 local=True,
1171 local=True,
1155 )
1172 )
1156 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1173 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1157 if wnode in mapping:
1174 if wnode in mapping:
1158 unfi.setparents(mapping[wnode][0])
1175 unfi.setparents(mapping[wnode][0])
1159
1176
1160
1177
1161 # Map from "hg:meta" keys to header understood by "hg import". The order is
1178 # Map from "hg:meta" keys to header understood by "hg import". The order is
1162 # consistent with "hg export" output.
1179 # consistent with "hg export" output.
1163 _metanamemap = util.sortdict(
1180 _metanamemap = util.sortdict(
1164 [
1181 [
1165 (b'user', b'User'),
1182 (b'user', b'User'),
1166 (b'date', b'Date'),
1183 (b'date', b'Date'),
1167 (b'branch', b'Branch'),
1184 (b'branch', b'Branch'),
1168 (b'node', b'Node ID'),
1185 (b'node', b'Node ID'),
1169 (b'parent', b'Parent '),
1186 (b'parent', b'Parent '),
1170 ]
1187 ]
1171 )
1188 )
1172
1189
1173
1190
1174 def _confirmbeforesend(repo, revs, oldmap):
1191 def _confirmbeforesend(repo, revs, oldmap):
1175 url, token = readurltoken(repo.ui)
1192 url, token = readurltoken(repo.ui)
1176 ui = repo.ui
1193 ui = repo.ui
1177 for rev in revs:
1194 for rev in revs:
1178 ctx = repo[rev]
1195 ctx = repo[rev]
1179 desc = ctx.description().splitlines()[0]
1196 desc = ctx.description().splitlines()[0]
1180 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1197 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1181 if drevid:
1198 if drevid:
1182 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1199 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1183 else:
1200 else:
1184 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1201 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1185
1202
1186 ui.write(
1203 ui.write(
1187 _(b'%s - %s: %s\n')
1204 _(b'%s - %s: %s\n')
1188 % (
1205 % (
1189 drevdesc,
1206 drevdesc,
1190 ui.label(bytes(ctx), b'phabricator.node'),
1207 ui.label(bytes(ctx), b'phabricator.node'),
1191 ui.label(desc, b'phabricator.desc'),
1208 ui.label(desc, b'phabricator.desc'),
1192 )
1209 )
1193 )
1210 )
1194
1211
1195 if ui.promptchoice(
1212 if ui.promptchoice(
1196 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1213 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1197 ):
1214 ):
1198 return False
1215 return False
1199
1216
1200 return True
1217 return True
1201
1218
1202
1219
1203 _knownstatusnames = {
1220 _knownstatusnames = {
1204 b'accepted',
1221 b'accepted',
1205 b'needsreview',
1222 b'needsreview',
1206 b'needsrevision',
1223 b'needsrevision',
1207 b'closed',
1224 b'closed',
1208 b'abandoned',
1225 b'abandoned',
1209 }
1226 }
1210
1227
1211
1228
1212 def _getstatusname(drev):
1229 def _getstatusname(drev):
1213 """get normalized status name from a Differential Revision"""
1230 """get normalized status name from a Differential Revision"""
1214 return drev[b'statusName'].replace(b' ', b'').lower()
1231 return drev[b'statusName'].replace(b' ', b'').lower()
1215
1232
1216
1233
1217 # Small language to specify differential revisions. Support symbols: (), :X,
1234 # Small language to specify differential revisions. Support symbols: (), :X,
1218 # +, and -.
1235 # +, and -.
1219
1236
1220 _elements = {
1237 _elements = {
1221 # token-type: binding-strength, primary, prefix, infix, suffix
1238 # token-type: binding-strength, primary, prefix, infix, suffix
1222 b'(': (12, None, (b'group', 1, b')'), None, None),
1239 b'(': (12, None, (b'group', 1, b')'), None, None),
1223 b':': (8, None, (b'ancestors', 8), None, None),
1240 b':': (8, None, (b'ancestors', 8), None, None),
1224 b'&': (5, None, None, (b'and_', 5), None),
1241 b'&': (5, None, None, (b'and_', 5), None),
1225 b'+': (4, None, None, (b'add', 4), None),
1242 b'+': (4, None, None, (b'add', 4), None),
1226 b'-': (4, None, None, (b'sub', 4), None),
1243 b'-': (4, None, None, (b'sub', 4), None),
1227 b')': (0, None, None, None, None),
1244 b')': (0, None, None, None, None),
1228 b'symbol': (0, b'symbol', None, None, None),
1245 b'symbol': (0, b'symbol', None, None, None),
1229 b'end': (0, None, None, None, None),
1246 b'end': (0, None, None, None, None),
1230 }
1247 }
1231
1248
1232
1249
1233 def _tokenize(text):
1250 def _tokenize(text):
1234 view = memoryview(text) # zero-copy slice
1251 view = memoryview(text) # zero-copy slice
1235 special = b'():+-& '
1252 special = b'():+-& '
1236 pos = 0
1253 pos = 0
1237 length = len(text)
1254 length = len(text)
1238 while pos < length:
1255 while pos < length:
1239 symbol = b''.join(
1256 symbol = b''.join(
1240 itertools.takewhile(
1257 itertools.takewhile(
1241 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1258 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1242 )
1259 )
1243 )
1260 )
1244 if symbol:
1261 if symbol:
1245 yield (b'symbol', symbol, pos)
1262 yield (b'symbol', symbol, pos)
1246 pos += len(symbol)
1263 pos += len(symbol)
1247 else: # special char, ignore space
1264 else: # special char, ignore space
1248 if text[pos] != b' ':
1265 if text[pos] != b' ':
1249 yield (text[pos], None, pos)
1266 yield (text[pos], None, pos)
1250 pos += 1
1267 pos += 1
1251 yield (b'end', None, pos)
1268 yield (b'end', None, pos)
1252
1269
1253
1270
1254 def _parse(text):
1271 def _parse(text):
1255 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1272 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1256 if pos != len(text):
1273 if pos != len(text):
1257 raise error.ParseError(b'invalid token', pos)
1274 raise error.ParseError(b'invalid token', pos)
1258 return tree
1275 return tree
1259
1276
1260
1277
1261 def _parsedrev(symbol):
1278 def _parsedrev(symbol):
1262 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1279 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1263 if symbol.startswith(b'D') and symbol[1:].isdigit():
1280 if symbol.startswith(b'D') and symbol[1:].isdigit():
1264 return int(symbol[1:])
1281 return int(symbol[1:])
1265 if symbol.isdigit():
1282 if symbol.isdigit():
1266 return int(symbol)
1283 return int(symbol)
1267
1284
1268
1285
1269 def _prefetchdrevs(tree):
1286 def _prefetchdrevs(tree):
1270 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1287 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1271 drevs = set()
1288 drevs = set()
1272 ancestordrevs = set()
1289 ancestordrevs = set()
1273 op = tree[0]
1290 op = tree[0]
1274 if op == b'symbol':
1291 if op == b'symbol':
1275 r = _parsedrev(tree[1])
1292 r = _parsedrev(tree[1])
1276 if r:
1293 if r:
1277 drevs.add(r)
1294 drevs.add(r)
1278 elif op == b'ancestors':
1295 elif op == b'ancestors':
1279 r, a = _prefetchdrevs(tree[1])
1296 r, a = _prefetchdrevs(tree[1])
1280 drevs.update(r)
1297 drevs.update(r)
1281 ancestordrevs.update(r)
1298 ancestordrevs.update(r)
1282 ancestordrevs.update(a)
1299 ancestordrevs.update(a)
1283 else:
1300 else:
1284 for t in tree[1:]:
1301 for t in tree[1:]:
1285 r, a = _prefetchdrevs(t)
1302 r, a = _prefetchdrevs(t)
1286 drevs.update(r)
1303 drevs.update(r)
1287 ancestordrevs.update(a)
1304 ancestordrevs.update(a)
1288 return drevs, ancestordrevs
1305 return drevs, ancestordrevs
1289
1306
1290
1307
1291 def querydrev(repo, spec):
1308 def querydrev(repo, spec):
1292 """return a list of "Differential Revision" dicts
1309 """return a list of "Differential Revision" dicts
1293
1310
1294 spec is a string using a simple query language, see docstring in phabread
1311 spec is a string using a simple query language, see docstring in phabread
1295 for details.
1312 for details.
1296
1313
1297 A "Differential Revision dict" looks like:
1314 A "Differential Revision dict" looks like:
1298
1315
1299 {
1316 {
1300 "id": "2",
1317 "id": "2",
1301 "phid": "PHID-DREV-672qvysjcczopag46qty",
1318 "phid": "PHID-DREV-672qvysjcczopag46qty",
1302 "title": "example",
1319 "title": "example",
1303 "uri": "https://phab.example.com/D2",
1320 "uri": "https://phab.example.com/D2",
1304 "dateCreated": "1499181406",
1321 "dateCreated": "1499181406",
1305 "dateModified": "1499182103",
1322 "dateModified": "1499182103",
1306 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1323 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1307 "status": "0",
1324 "status": "0",
1308 "statusName": "Needs Review",
1325 "statusName": "Needs Review",
1309 "properties": [],
1326 "properties": [],
1310 "branch": null,
1327 "branch": null,
1311 "summary": "",
1328 "summary": "",
1312 "testPlan": "",
1329 "testPlan": "",
1313 "lineCount": "2",
1330 "lineCount": "2",
1314 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1331 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1315 "diffs": [
1332 "diffs": [
1316 "3",
1333 "3",
1317 "4",
1334 "4",
1318 ],
1335 ],
1319 "commits": [],
1336 "commits": [],
1320 "reviewers": [],
1337 "reviewers": [],
1321 "ccs": [],
1338 "ccs": [],
1322 "hashes": [],
1339 "hashes": [],
1323 "auxiliary": {
1340 "auxiliary": {
1324 "phabricator:projects": [],
1341 "phabricator:projects": [],
1325 "phabricator:depends-on": [
1342 "phabricator:depends-on": [
1326 "PHID-DREV-gbapp366kutjebt7agcd"
1343 "PHID-DREV-gbapp366kutjebt7agcd"
1327 ]
1344 ]
1328 },
1345 },
1329 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1346 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1330 "sourcePath": null
1347 "sourcePath": null
1331 }
1348 }
1332 """
1349 """
1333
1350
1334 def fetch(params):
1351 def fetch(params):
1335 """params -> single drev or None"""
1352 """params -> single drev or None"""
1336 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1353 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1337 if key in prefetched:
1354 if key in prefetched:
1338 return prefetched[key]
1355 return prefetched[key]
1339 drevs = callconduit(repo.ui, b'differential.query', params)
1356 drevs = callconduit(repo.ui, b'differential.query', params)
1340 # Fill prefetched with the result
1357 # Fill prefetched with the result
1341 for drev in drevs:
1358 for drev in drevs:
1342 prefetched[drev[b'phid']] = drev
1359 prefetched[drev[b'phid']] = drev
1343 prefetched[int(drev[b'id'])] = drev
1360 prefetched[int(drev[b'id'])] = drev
1344 if key not in prefetched:
1361 if key not in prefetched:
1345 raise error.Abort(
1362 raise error.Abort(
1346 _(b'cannot get Differential Revision %r') % params
1363 _(b'cannot get Differential Revision %r') % params
1347 )
1364 )
1348 return prefetched[key]
1365 return prefetched[key]
1349
1366
1350 def getstack(topdrevids):
1367 def getstack(topdrevids):
1351 """given a top, get a stack from the bottom, [id] -> [id]"""
1368 """given a top, get a stack from the bottom, [id] -> [id]"""
1352 visited = set()
1369 visited = set()
1353 result = []
1370 result = []
1354 queue = [{b'ids': [i]} for i in topdrevids]
1371 queue = [{b'ids': [i]} for i in topdrevids]
1355 while queue:
1372 while queue:
1356 params = queue.pop()
1373 params = queue.pop()
1357 drev = fetch(params)
1374 drev = fetch(params)
1358 if drev[b'id'] in visited:
1375 if drev[b'id'] in visited:
1359 continue
1376 continue
1360 visited.add(drev[b'id'])
1377 visited.add(drev[b'id'])
1361 result.append(int(drev[b'id']))
1378 result.append(int(drev[b'id']))
1362 auxiliary = drev.get(b'auxiliary', {})
1379 auxiliary = drev.get(b'auxiliary', {})
1363 depends = auxiliary.get(b'phabricator:depends-on', [])
1380 depends = auxiliary.get(b'phabricator:depends-on', [])
1364 for phid in depends:
1381 for phid in depends:
1365 queue.append({b'phids': [phid]})
1382 queue.append({b'phids': [phid]})
1366 result.reverse()
1383 result.reverse()
1367 return smartset.baseset(result)
1384 return smartset.baseset(result)
1368
1385
1369 # Initialize prefetch cache
1386 # Initialize prefetch cache
1370 prefetched = {} # {id or phid: drev}
1387 prefetched = {} # {id or phid: drev}
1371
1388
1372 tree = _parse(spec)
1389 tree = _parse(spec)
1373 drevs, ancestordrevs = _prefetchdrevs(tree)
1390 drevs, ancestordrevs = _prefetchdrevs(tree)
1374
1391
1375 # developer config: phabricator.batchsize
1392 # developer config: phabricator.batchsize
1376 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1393 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1377
1394
1378 # Prefetch Differential Revisions in batch
1395 # Prefetch Differential Revisions in batch
1379 tofetch = set(drevs)
1396 tofetch = set(drevs)
1380 for r in ancestordrevs:
1397 for r in ancestordrevs:
1381 tofetch.update(range(max(1, r - batchsize), r + 1))
1398 tofetch.update(range(max(1, r - batchsize), r + 1))
1382 if drevs:
1399 if drevs:
1383 fetch({b'ids': list(tofetch)})
1400 fetch({b'ids': list(tofetch)})
1384 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1401 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1385
1402
1386 # Walk through the tree, return smartsets
1403 # Walk through the tree, return smartsets
1387 def walk(tree):
1404 def walk(tree):
1388 op = tree[0]
1405 op = tree[0]
1389 if op == b'symbol':
1406 if op == b'symbol':
1390 drev = _parsedrev(tree[1])
1407 drev = _parsedrev(tree[1])
1391 if drev:
1408 if drev:
1392 return smartset.baseset([drev])
1409 return smartset.baseset([drev])
1393 elif tree[1] in _knownstatusnames:
1410 elif tree[1] in _knownstatusnames:
1394 drevs = [
1411 drevs = [
1395 r
1412 r
1396 for r in validids
1413 for r in validids
1397 if _getstatusname(prefetched[r]) == tree[1]
1414 if _getstatusname(prefetched[r]) == tree[1]
1398 ]
1415 ]
1399 return smartset.baseset(drevs)
1416 return smartset.baseset(drevs)
1400 else:
1417 else:
1401 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1418 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1402 elif op in {b'and_', b'add', b'sub'}:
1419 elif op in {b'and_', b'add', b'sub'}:
1403 assert len(tree) == 3
1420 assert len(tree) == 3
1404 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1421 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1405 elif op == b'group':
1422 elif op == b'group':
1406 return walk(tree[1])
1423 return walk(tree[1])
1407 elif op == b'ancestors':
1424 elif op == b'ancestors':
1408 return getstack(walk(tree[1]))
1425 return getstack(walk(tree[1]))
1409 else:
1426 else:
1410 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1427 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1411
1428
1412 return [prefetched[r] for r in walk(tree)]
1429 return [prefetched[r] for r in walk(tree)]
1413
1430
1414
1431
1415 def getdescfromdrev(drev):
1432 def getdescfromdrev(drev):
1416 """get description (commit message) from "Differential Revision"
1433 """get description (commit message) from "Differential Revision"
1417
1434
1418 This is similar to differential.getcommitmessage API. But we only care
1435 This is similar to differential.getcommitmessage API. But we only care
1419 about limited fields: title, summary, test plan, and URL.
1436 about limited fields: title, summary, test plan, and URL.
1420 """
1437 """
1421 title = drev[b'title']
1438 title = drev[b'title']
1422 summary = drev[b'summary'].rstrip()
1439 summary = drev[b'summary'].rstrip()
1423 testplan = drev[b'testPlan'].rstrip()
1440 testplan = drev[b'testPlan'].rstrip()
1424 if testplan:
1441 if testplan:
1425 testplan = b'Test Plan:\n%s' % testplan
1442 testplan = b'Test Plan:\n%s' % testplan
1426 uri = b'Differential Revision: %s' % drev[b'uri']
1443 uri = b'Differential Revision: %s' % drev[b'uri']
1427 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1444 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1428
1445
1429
1446
1430 def getdiffmeta(diff):
1447 def getdiffmeta(diff):
1431 """get commit metadata (date, node, user, p1) from a diff object
1448 """get commit metadata (date, node, user, p1) from a diff object
1432
1449
1433 The metadata could be "hg:meta", sent by phabsend, like:
1450 The metadata could be "hg:meta", sent by phabsend, like:
1434
1451
1435 "properties": {
1452 "properties": {
1436 "hg:meta": {
1453 "hg:meta": {
1437 "date": "1499571514 25200",
1454 "date": "1499571514 25200",
1438 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1455 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1439 "user": "Foo Bar <foo@example.com>",
1456 "user": "Foo Bar <foo@example.com>",
1440 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1457 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1441 }
1458 }
1442 }
1459 }
1443
1460
1444 Or converted from "local:commits", sent by "arc", like:
1461 Or converted from "local:commits", sent by "arc", like:
1445
1462
1446 "properties": {
1463 "properties": {
1447 "local:commits": {
1464 "local:commits": {
1448 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1465 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1449 "author": "Foo Bar",
1466 "author": "Foo Bar",
1450 "time": 1499546314,
1467 "time": 1499546314,
1451 "branch": "default",
1468 "branch": "default",
1452 "tag": "",
1469 "tag": "",
1453 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1470 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1454 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1471 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1455 "local": "1000",
1472 "local": "1000",
1456 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1473 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1457 "summary": "...",
1474 "summary": "...",
1458 "message": "...",
1475 "message": "...",
1459 "authorEmail": "foo@example.com"
1476 "authorEmail": "foo@example.com"
1460 }
1477 }
1461 }
1478 }
1462 }
1479 }
1463
1480
1464 Note: metadata extracted from "local:commits" will lose time zone
1481 Note: metadata extracted from "local:commits" will lose time zone
1465 information.
1482 information.
1466 """
1483 """
1467 props = diff.get(b'properties') or {}
1484 props = diff.get(b'properties') or {}
1468 meta = props.get(b'hg:meta')
1485 meta = props.get(b'hg:meta')
1469 if not meta:
1486 if not meta:
1470 if props.get(b'local:commits'):
1487 if props.get(b'local:commits'):
1471 commit = sorted(props[b'local:commits'].values())[0]
1488 commit = sorted(props[b'local:commits'].values())[0]
1472 meta = {}
1489 meta = {}
1473 if b'author' in commit and b'authorEmail' in commit:
1490 if b'author' in commit and b'authorEmail' in commit:
1474 meta[b'user'] = b'%s <%s>' % (
1491 meta[b'user'] = b'%s <%s>' % (
1475 commit[b'author'],
1492 commit[b'author'],
1476 commit[b'authorEmail'],
1493 commit[b'authorEmail'],
1477 )
1494 )
1478 if b'time' in commit:
1495 if b'time' in commit:
1479 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1496 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1480 if b'branch' in commit:
1497 if b'branch' in commit:
1481 meta[b'branch'] = commit[b'branch']
1498 meta[b'branch'] = commit[b'branch']
1482 node = commit.get(b'commit', commit.get(b'rev'))
1499 node = commit.get(b'commit', commit.get(b'rev'))
1483 if node:
1500 if node:
1484 meta[b'node'] = node
1501 meta[b'node'] = node
1485 if len(commit.get(b'parents', ())) >= 1:
1502 if len(commit.get(b'parents', ())) >= 1:
1486 meta[b'parent'] = commit[b'parents'][0]
1503 meta[b'parent'] = commit[b'parents'][0]
1487 else:
1504 else:
1488 meta = {}
1505 meta = {}
1489 if b'date' not in meta and b'dateCreated' in diff:
1506 if b'date' not in meta and b'dateCreated' in diff:
1490 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1507 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1491 if b'branch' not in meta and diff.get(b'branch'):
1508 if b'branch' not in meta and diff.get(b'branch'):
1492 meta[b'branch'] = diff[b'branch']
1509 meta[b'branch'] = diff[b'branch']
1493 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1510 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1494 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1511 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1495 return meta
1512 return meta
1496
1513
1497
1514
1498 def readpatch(repo, drevs, write):
1515 def readpatch(repo, drevs, write):
1499 """generate plain-text patch readable by 'hg import'
1516 """generate plain-text patch readable by 'hg import'
1500
1517
1501 write is usually ui.write. drevs is what "querydrev" returns, results of
1518 write is usually ui.write. drevs is what "querydrev" returns, results of
1502 "differential.query".
1519 "differential.query".
1503 """
1520 """
1504 # Prefetch hg:meta property for all diffs
1521 # Prefetch hg:meta property for all diffs
1505 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1522 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1506 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1523 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1507
1524
1508 # Generate patch for each drev
1525 # Generate patch for each drev
1509 for drev in drevs:
1526 for drev in drevs:
1510 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1527 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1511
1528
1512 diffid = max(int(v) for v in drev[b'diffs'])
1529 diffid = max(int(v) for v in drev[b'diffs'])
1513 body = callconduit(
1530 body = callconduit(
1514 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1531 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1515 )
1532 )
1516 desc = getdescfromdrev(drev)
1533 desc = getdescfromdrev(drev)
1517 header = b'# HG changeset patch\n'
1534 header = b'# HG changeset patch\n'
1518
1535
1519 # Try to preserve metadata from hg:meta property. Write hg patch
1536 # Try to preserve metadata from hg:meta property. Write hg patch
1520 # headers that can be read by the "import" command. See patchheadermap
1537 # headers that can be read by the "import" command. See patchheadermap
1521 # and extract in mercurial/patch.py for supported headers.
1538 # and extract in mercurial/patch.py for supported headers.
1522 meta = getdiffmeta(diffs[b'%d' % diffid])
1539 meta = getdiffmeta(diffs[b'%d' % diffid])
1523 for k in _metanamemap.keys():
1540 for k in _metanamemap.keys():
1524 if k in meta:
1541 if k in meta:
1525 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1542 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1526
1543
1527 content = b'%s%s\n%s' % (header, desc, body)
1544 content = b'%s%s\n%s' % (header, desc, body)
1528 write(content)
1545 write(content)
1529
1546
1530
1547
1531 @vcrcommand(
1548 @vcrcommand(
1532 b'phabread',
1549 b'phabread',
1533 [(b'', b'stack', False, _(b'read dependencies'))],
1550 [(b'', b'stack', False, _(b'read dependencies'))],
1534 _(b'DREVSPEC [OPTIONS]'),
1551 _(b'DREVSPEC [OPTIONS]'),
1535 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1552 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1536 )
1553 )
1537 def phabread(ui, repo, spec, **opts):
1554 def phabread(ui, repo, spec, **opts):
1538 """print patches from Phabricator suitable for importing
1555 """print patches from Phabricator suitable for importing
1539
1556
1540 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1557 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1541 the number ``123``. It could also have common operators like ``+``, ``-``,
1558 the number ``123``. It could also have common operators like ``+``, ``-``,
1542 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1559 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1543 select a stack.
1560 select a stack.
1544
1561
1545 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1562 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1546 could be used to filter patches by status. For performance reason, they
1563 could be used to filter patches by status. For performance reason, they
1547 only represent a subset of non-status selections and cannot be used alone.
1564 only represent a subset of non-status selections and cannot be used alone.
1548
1565
1549 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1566 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1550 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1567 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1551 stack up to D9.
1568 stack up to D9.
1552
1569
1553 If --stack is given, follow dependencies information and read all patches.
1570 If --stack is given, follow dependencies information and read all patches.
1554 It is equivalent to the ``:`` operator.
1571 It is equivalent to the ``:`` operator.
1555 """
1572 """
1556 opts = pycompat.byteskwargs(opts)
1573 opts = pycompat.byteskwargs(opts)
1557 if opts.get(b'stack'):
1574 if opts.get(b'stack'):
1558 spec = b':(%s)' % spec
1575 spec = b':(%s)' % spec
1559 drevs = querydrev(repo, spec)
1576 drevs = querydrev(repo, spec)
1560 readpatch(repo, drevs, ui.write)
1577 readpatch(repo, drevs, ui.write)
1561
1578
1562
1579
1563 @vcrcommand(
1580 @vcrcommand(
1564 b'phabupdate',
1581 b'phabupdate',
1565 [
1582 [
1566 (b'', b'accept', False, _(b'accept revisions')),
1583 (b'', b'accept', False, _(b'accept revisions')),
1567 (b'', b'reject', False, _(b'reject revisions')),
1584 (b'', b'reject', False, _(b'reject revisions')),
1568 (b'', b'abandon', False, _(b'abandon revisions')),
1585 (b'', b'abandon', False, _(b'abandon revisions')),
1569 (b'', b'reclaim', False, _(b'reclaim revisions')),
1586 (b'', b'reclaim', False, _(b'reclaim revisions')),
1570 (b'm', b'comment', b'', _(b'comment on the last revision')),
1587 (b'm', b'comment', b'', _(b'comment on the last revision')),
1571 ],
1588 ],
1572 _(b'DREVSPEC [OPTIONS]'),
1589 _(b'DREVSPEC [OPTIONS]'),
1573 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1590 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1574 )
1591 )
1575 def phabupdate(ui, repo, spec, **opts):
1592 def phabupdate(ui, repo, spec, **opts):
1576 """update Differential Revision in batch
1593 """update Differential Revision in batch
1577
1594
1578 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1595 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1579 """
1596 """
1580 opts = pycompat.byteskwargs(opts)
1597 opts = pycompat.byteskwargs(opts)
1581 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1598 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1582 if len(flags) > 1:
1599 if len(flags) > 1:
1583 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1600 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1584
1601
1585 actions = []
1602 actions = []
1586 for f in flags:
1603 for f in flags:
1587 actions.append({b'type': f, b'value': b'true'})
1604 actions.append({b'type': f, b'value': b'true'})
1588
1605
1589 drevs = querydrev(repo, spec)
1606 drevs = querydrev(repo, spec)
1590 for i, drev in enumerate(drevs):
1607 for i, drev in enumerate(drevs):
1591 if i + 1 == len(drevs) and opts.get(b'comment'):
1608 if i + 1 == len(drevs) and opts.get(b'comment'):
1592 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1609 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1593 if actions:
1610 if actions:
1594 params = {
1611 params = {
1595 b'objectIdentifier': drev[b'phid'],
1612 b'objectIdentifier': drev[b'phid'],
1596 b'transactions': actions,
1613 b'transactions': actions,
1597 }
1614 }
1598 callconduit(ui, b'differential.revision.edit', params)
1615 callconduit(ui, b'differential.revision.edit', params)
1599
1616
1600
1617
1601 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1618 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1602 def template_review(context, mapping):
1619 def template_review(context, mapping):
1603 """:phabreview: Object describing the review for this changeset.
1620 """:phabreview: Object describing the review for this changeset.
1604 Has attributes `url` and `id`.
1621 Has attributes `url` and `id`.
1605 """
1622 """
1606 ctx = context.resource(mapping, b'ctx')
1623 ctx = context.resource(mapping, b'ctx')
1607 m = _differentialrevisiondescre.search(ctx.description())
1624 m = _differentialrevisiondescre.search(ctx.description())
1608 if m:
1625 if m:
1609 return templateutil.hybriddict(
1626 return templateutil.hybriddict(
1610 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1627 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1611 )
1628 )
1612 else:
1629 else:
1613 tags = ctx.repo().nodetags(ctx.node())
1630 tags = ctx.repo().nodetags(ctx.node())
1614 for t in tags:
1631 for t in tags:
1615 if _differentialrevisiontagre.match(t):
1632 if _differentialrevisiontagre.match(t):
1616 url = ctx.repo().ui.config(b'phabricator', b'url')
1633 url = ctx.repo().ui.config(b'phabricator', b'url')
1617 if not url.endswith(b'/'):
1634 if not url.endswith(b'/'):
1618 url += b'/'
1635 url += b'/'
1619 url += t
1636 url += t
1620
1637
1621 return templateutil.hybriddict({b'url': url, b'id': t,})
1638 return templateutil.hybriddict({b'url': url, b'id': t,})
1622 return None
1639 return None
General Comments 0
You need to be logged in to leave comments. Login now