##// END OF EJS Templates
phabricator: add debug logging to show previous node values in `phabsend`...
Matt Harbison -
r45209:38f7b2f0 default
parent child Browse files
Show More
@@ -1,2070 +1,2104
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.debug
119 eh.configitem(
120 b'phabricator', b'debug', default=False,
121 )
118 # developer config: phabricator.repophid
122 # developer config: phabricator.repophid
119 eh.configitem(
123 eh.configitem(
120 b'phabricator', b'repophid', default=None,
124 b'phabricator', b'repophid', default=None,
121 )
125 )
122 eh.configitem(
126 eh.configitem(
123 b'phabricator', b'url', default=None,
127 b'phabricator', b'url', default=None,
124 )
128 )
125 eh.configitem(
129 eh.configitem(
126 b'phabsend', b'confirm', default=False,
130 b'phabsend', b'confirm', default=False,
127 )
131 )
128 eh.configitem(
132 eh.configitem(
129 b'phabimport', b'secret', default=False,
133 b'phabimport', b'secret', default=False,
130 )
134 )
131 eh.configitem(
135 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
136 b'phabimport', b'obsolete', default=False,
133 )
137 )
134
138
135 colortable = {
139 colortable = {
136 b'phabricator.action.created': b'green',
140 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
141 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
142 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
143 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
144 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
145 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
146 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
147 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
148 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
149 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
150 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
151 b'phabricator.status.changesplanned': b'red',
148 }
152 }
149
153
150 _VCR_FLAGS = [
154 _VCR_FLAGS = [
151 (
155 (
152 b'',
156 b'',
153 b'test-vcr',
157 b'test-vcr',
154 b'',
158 b'',
155 _(
159 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
161 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
162 b' (ADVANCED)'
159 ),
163 ),
160 ),
164 ),
161 ]
165 ]
162
166
163
167
164 @eh.wrapfunction(localrepo, "loadhgrc")
168 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
171 """
168 result = False
172 result = False
169 arcconfig = {}
173 arcconfig = {}
170
174
171 try:
175 try:
172 # json.loads only accepts bytes from 3.6+
176 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
178 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
179 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
180 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
181 if isinstance(x, pycompat.unicode)
178 else x,
182 else x,
179 pycompat.json_loads(rawparams),
183 pycompat.json_loads(rawparams),
180 )
184 )
181
185
182 result = True
186 result = True
183 except ValueError:
187 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
189 except IOError:
186 pass
190 pass
187
191
188 cfg = util.sortdict()
192 cfg = util.sortdict()
189
193
190 if b"repository.callsign" in arcconfig:
194 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
196
193 if b"phabricator.uri" in arcconfig:
197 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
199
196 if cfg:
200 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
202
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
204
201
205
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
207 fullflags = flags + _VCR_FLAGS
204
208
205 def hgmatcher(r1, r2):
209 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
210 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
211 return False
208 r1params = util.urlreq.parseqs(r1.body)
212 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
213 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
214 for key in r1params:
211 if key not in r2params:
215 if key not in r2params:
212 return False
216 return False
213 value = r1params[key][0]
217 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
218 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
219 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
220 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
221 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
222 if r1json != r2json:
219 return False
223 return False
220 elif r2params[key][0] != value:
224 elif r2params[key][0] != value:
221 return False
225 return False
222 return True
226 return True
223
227
224 def sanitiserequest(request):
228 def sanitiserequest(request):
225 request.body = re.sub(
229 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
231 )
228 return request
232 return request
229
233
230 def sanitiseresponse(response):
234 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
235 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
236 del response['headers']['set-cookie']
233 return response
237 return response
234
238
235 def decorate(fn):
239 def decorate(fn):
236 def inner(*args, **kwargs):
240 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
241 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
242 if cassette:
239 import hgdemandimport
243 import hgdemandimport
240
244
241 with hgdemandimport.deactivated():
245 with hgdemandimport.deactivated():
242 import vcr as vcrmod
246 import vcr as vcrmod
243 import vcr.stubs as stubs
247 import vcr.stubs as stubs
244
248
245 vcr = vcrmod.VCR(
249 vcr = vcrmod.VCR(
246 serializer='json',
250 serializer='json',
247 before_record_request=sanitiserequest,
251 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
252 before_record_response=sanitiseresponse,
249 custom_patches=[
253 custom_patches=[
250 (
254 (
251 urlmod,
255 urlmod,
252 'httpconnection',
256 'httpconnection',
253 stubs.VCRHTTPConnection,
257 stubs.VCRHTTPConnection,
254 ),
258 ),
255 (
259 (
256 urlmod,
260 urlmod,
257 'httpsconnection',
261 'httpsconnection',
258 stubs.VCRHTTPSConnection,
262 stubs.VCRHTTPSConnection,
259 ),
263 ),
260 ],
264 ],
261 )
265 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
266 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
268 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
266
270
267 cmd = util.checksignature(inner, depth=2)
271 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
272 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
273 cmd.__doc__ = fn.__doc__
270
274
271 return command(
275 return command(
272 name,
276 name,
273 fullflags,
277 fullflags,
274 spec,
278 spec,
275 helpcategory=helpcategory,
279 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
280 optionalrepo=optionalrepo,
277 )(cmd)
281 )(cmd)
278
282
279 return decorate
283 return decorate
280
284
281
285
286 def _debug(ui, *msg, **opts):
287 """write debug output for Phabricator if ``phabricator.debug`` is set
288
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 printed with the --debug argument.
291 """
292 if ui.configbool(b"phabricator", b"debug"):
293 flag = ui.debugflag
294 try:
295 ui.debugflag = True
296 ui.write(*msg, **opts)
297 finally:
298 ui.debugflag = flag
299
300
282 def urlencodenested(params):
301 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
302 """like urlencode, but works with nested parameters.
284
303
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
307 """
289 flatparams = util.sortdict()
308 flatparams = util.sortdict()
290
309
291 def process(prefix, obj):
310 def process(prefix, obj):
292 if isinstance(obj, bool):
311 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
315 if items is None:
297 flatparams[prefix] = obj
316 flatparams[prefix] = obj
298 else:
317 else:
299 for k, v in items(obj):
318 for k, v in items(obj):
300 if prefix:
319 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
320 process(b'%s[%s]' % (prefix, k), v)
302 else:
321 else:
303 process(k, v)
322 process(k, v)
304
323
305 process(b'', params)
324 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
325 return util.urlreq.urlencode(flatparams)
307
326
308
327
309 def readurltoken(ui):
328 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
329 """return conduit url, token and make sure they exist
311
330
312 Currently read from [auth] config section. In the future, it might
331 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
332 make sense to read from .arcconfig and .arcrc as well.
314 """
333 """
315 url = ui.config(b'phabricator', b'url')
334 url = ui.config(b'phabricator', b'url')
316 if not url:
335 if not url:
317 raise error.Abort(
336 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
338 )
320
339
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
341 token = None
323
342
324 if res:
343 if res:
325 group, auth = res
344 group, auth = res
326
345
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
347
329 token = auth.get(b'phabtoken')
348 token = auth.get(b'phabtoken')
330
349
331 if not token:
350 if not token:
332 raise error.Abort(
351 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
352 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
353 )
335
354
336 return url, token
355 return url, token
337
356
338
357
339 def callconduit(ui, name, params):
358 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
359 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
360 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
363 params = params.copy()
345 params[b'__conduit__'] = {
364 params[b'__conduit__'] = {
346 b'token': token,
365 b'token': token,
347 }
366 }
348 rawdata = {
367 rawdata = {
349 b'params': templatefilters.json(params),
368 b'params': templatefilters.json(params),
350 b'output': b'json',
369 b'output': b'json',
351 b'__conduit__': 1,
370 b'__conduit__': 1,
352 }
371 }
353 data = urlencodenested(rawdata)
372 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
374 if curlcmd:
356 sin, sout = procutil.popen2(
375 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
377 )
359 sin.write(data)
378 sin.write(data)
360 sin.close()
379 sin.close()
361 body = sout.read()
380 body = sout.read()
362 else:
381 else:
363 urlopener = urlmod.opener(ui, authinfo)
382 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
384 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
385 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
386 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
387 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
371 else x,
390 else x,
372 # json.loads only accepts bytes from py3.6+
391 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
392 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
393 )
375 if parsed.get(b'error_code'):
394 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
395 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
396 parsed[b'error_code'],
378 parsed[b'error_info'],
397 parsed[b'error_info'],
379 )
398 )
380 raise error.Abort(msg)
399 raise error.Abort(msg)
381 return parsed[b'result']
400 return parsed[b'result']
382
401
383
402
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
404 def debugcallconduit(ui, repo, name):
386 """call Conduit API
405 """call Conduit API
387
406
388 Call parameters are read from stdin as a JSON blob. Result will be written
407 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
408 to stdout as a JSON blob.
390 """
409 """
391 # json.loads only accepts bytes from 3.6+
410 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
411 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
412 # json.loads only returns unicode strings
394 params = pycompat.rapply(
413 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
414 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
415 if isinstance(x, pycompat.unicode)
397 else x,
416 else x,
398 pycompat.json_loads(rawparams),
417 pycompat.json_loads(rawparams),
399 )
418 )
400 # json.dumps only accepts unicode strings
419 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
420 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
422 callconduit(ui, name, params),
404 )
423 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
407
426
408
427
409 def getrepophid(repo):
428 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
429 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
430 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
431 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
432 if repophid:
414 return repophid
433 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
434 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
435 if not callsign:
417 return None
436 return None
418 query = callconduit(
437 query = callconduit(
419 repo.ui,
438 repo.ui,
420 b'diffusion.repository.search',
439 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
440 {b'constraints': {b'callsigns': [callsign]}},
422 )
441 )
423 if len(query[b'data']) == 0:
442 if len(query[b'data']) == 0:
424 return None
443 return None
425 repophid = query[b'data'][0][b'phid']
444 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
446 return repophid
428
447
429
448
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
450 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
452 )
434
453
435
454
436 def getoldnodedrevmap(repo, nodelist):
455 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
456 """find previous nodes that has been sent to Phabricator
438
457
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
459 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
461 be ``None``.
443
462
444 Examines commit messages like "Differential Revision:" to get the
463 Examines commit messages like "Differential Revision:" to get the
445 association information.
464 association information.
446
465
447 If such commit message line is not found, examines all precursors and their
466 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
467 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
468 with that tag, and the number after "D" (ex. 1234) will be returned.
450
469
451 The ``old node``, if not None, is guaranteed to be the last diff of
470 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
471 corresponding Differential Revision, and exist in the repo.
453 """
472 """
454 unfi = repo.unfiltered()
473 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
474 has_node = unfi.changelog.index.has_node
456
475
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
477 # ordered for test stability when printing new -> old mapping below
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
459 for node in nodelist:
479 for node in nodelist:
460 ctx = unfi[node]
480 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
481 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
483 for n in precnodes:
464 if has_node(n):
484 if has_node(n):
465 for tag in unfi.nodetags(n):
485 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
486 m = _differentialrevisiontagre.match(tag)
467 if m:
487 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
489 break
470 else:
490 else:
471 continue # move to next predecessor
491 continue # move to next predecessor
472 break # found a tag, stop
492 break # found a tag, stop
473 else:
493 else:
474 # Check commit message
494 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
495 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
496 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
498
479 # Double check if tags are genuine by collecting all old nodes from
499 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
500 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
501 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
502 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
503 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
505 )
486
506
487 def getnodes(d, precset):
507 def getnodes(d, precset):
488 # Ignore other nodes that were combined into the Differential
508 # Ignore other nodes that were combined into the Differential
489 # that aren't predecessors of the current local node.
509 # that aren't predecessors of the current local node.
490 return [n for n in getlocalcommits(d) if n in precset]
510 return [n for n in getlocalcommits(d) if n in precset]
491
511
492 for newnode, (force, precset, drev) in toconfirm.items():
512 for newnode, (force, precset, drev) in toconfirm.items():
493 diffs = [
513 diffs = [
494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
495 ]
515 ]
496
516
497 # local predecessors known by Phabricator
517 # local predecessors known by Phabricator
498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
499
519
500 # Ignore if precursors (Phabricator and local repo) do not overlap,
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
501 # and force is not set (when commit message says nothing)
521 # and force is not set (when commit message says nothing)
502 if not force and not phprecset:
522 if not force and not phprecset:
503 tagname = b'D%d' % drev
523 tagname = b'D%d' % drev
504 tags.tag(
524 tags.tag(
505 repo,
525 repo,
506 tagname,
526 tagname,
507 nullid,
527 nullid,
508 message=None,
528 message=None,
509 user=None,
529 user=None,
510 date=None,
530 date=None,
511 local=True,
531 local=True,
512 )
532 )
513 unfi.ui.warn(
533 unfi.ui.warn(
514 _(
534 _(
515 b'D%d: local tag removed - does not match '
535 b'D%d: local tag removed - does not match '
516 b'Differential history\n'
536 b'Differential history\n'
517 )
537 )
518 % drev
538 % drev
519 )
539 )
520 continue
540 continue
521
541
522 # Find the last node using Phabricator metadata, and make sure it
542 # Find the last node using Phabricator metadata, and make sure it
523 # exists in the repo
543 # exists in the repo
524 oldnode = lastdiff = None
544 oldnode = lastdiff = None
525 if diffs:
545 if diffs:
526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
527 oldnodes = getnodes(lastdiff, precset)
547 oldnodes = getnodes(lastdiff, precset)
528
548
549 _debug(
550 unfi.ui,
551 b"%s mapped to old nodes %s\n"
552 % (
553 short(newnode),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 ),
556 )
557
529 # If this commit was the result of `hg fold` after submission,
558 # If this commit was the result of `hg fold` after submission,
530 # and now resubmitted with --fold, the easiest thing to do is
559 # and now resubmitted with --fold, the easiest thing to do is
531 # to leave the node clear. This only results in creating a new
560 # to leave the node clear. This only results in creating a new
532 # diff for the _same_ Differential Revision if this commit is
561 # diff for the _same_ Differential Revision if this commit is
533 # the first or last in the selected range.
562 # the first or last in the selected range.
534 # If this commit is the result of `hg split` in the same
563 # If this commit is the result of `hg split` in the same
535 # scenario, there is a single oldnode here (and multiple
564 # scenario, there is a single oldnode here (and multiple
536 # newnodes mapped to it). That makes it the same as the normal
565 # newnodes mapped to it). That makes it the same as the normal
537 # case, as the edges of the newnode range cleanly maps to one
566 # case, as the edges of the newnode range cleanly maps to one
538 # oldnode each.
567 # oldnode each.
539 if len(oldnodes) == 1:
568 if len(oldnodes) == 1:
540 oldnode = oldnodes[0]
569 oldnode = oldnodes[0]
541 if oldnode and not has_node(oldnode):
570 if oldnode and not has_node(oldnode):
542 oldnode = None
571 oldnode = None
543
572
544 result[newnode] = (oldnode, lastdiff, drev)
573 result[newnode] = (oldnode, lastdiff, drev)
545
574
546 return result
575 return result
547
576
548
577
549 def getdrevmap(repo, revs):
578 def getdrevmap(repo, revs):
550 """Return a dict mapping each rev in `revs` to their Differential Revision
579 """Return a dict mapping each rev in `revs` to their Differential Revision
551 ID or None.
580 ID or None.
552 """
581 """
553 result = {}
582 result = {}
554 for rev in revs:
583 for rev in revs:
555 result[rev] = None
584 result[rev] = None
556 ctx = repo[rev]
585 ctx = repo[rev]
557 # Check commit message
586 # Check commit message
558 m = _differentialrevisiondescre.search(ctx.description())
587 m = _differentialrevisiondescre.search(ctx.description())
559 if m:
588 if m:
560 result[rev] = int(m.group('id'))
589 result[rev] = int(m.group('id'))
561 continue
590 continue
562 # Check tags
591 # Check tags
563 for tag in repo.nodetags(ctx.node()):
592 for tag in repo.nodetags(ctx.node()):
564 m = _differentialrevisiontagre.match(tag)
593 m = _differentialrevisiontagre.match(tag)
565 if m:
594 if m:
566 result[rev] = int(m.group(1))
595 result[rev] = int(m.group(1))
567 break
596 break
568
597
569 return result
598 return result
570
599
571
600
572 def getdiff(basectx, ctx, diffopts):
601 def getdiff(basectx, ctx, diffopts):
573 """plain-text diff without header (user, commit message, etc)"""
602 """plain-text diff without header (user, commit message, etc)"""
574 output = util.stringio()
603 output = util.stringio()
575 for chunk, _label in patch.diffui(
604 for chunk, _label in patch.diffui(
576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
605 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
577 ):
606 ):
578 output.write(chunk)
607 output.write(chunk)
579 return output.getvalue()
608 return output.getvalue()
580
609
581
610
582 class DiffChangeType(object):
611 class DiffChangeType(object):
583 ADD = 1
612 ADD = 1
584 CHANGE = 2
613 CHANGE = 2
585 DELETE = 3
614 DELETE = 3
586 MOVE_AWAY = 4
615 MOVE_AWAY = 4
587 COPY_AWAY = 5
616 COPY_AWAY = 5
588 MOVE_HERE = 6
617 MOVE_HERE = 6
589 COPY_HERE = 7
618 COPY_HERE = 7
590 MULTICOPY = 8
619 MULTICOPY = 8
591
620
592
621
593 class DiffFileType(object):
622 class DiffFileType(object):
594 TEXT = 1
623 TEXT = 1
595 IMAGE = 2
624 IMAGE = 2
596 BINARY = 3
625 BINARY = 3
597
626
598
627
599 @attr.s
628 @attr.s
600 class phabhunk(dict):
629 class phabhunk(dict):
601 """Represents a Differential hunk, which is owned by a Differential change
630 """Represents a Differential hunk, which is owned by a Differential change
602 """
631 """
603
632
604 oldOffset = attr.ib(default=0) # camelcase-required
633 oldOffset = attr.ib(default=0) # camelcase-required
605 oldLength = attr.ib(default=0) # camelcase-required
634 oldLength = attr.ib(default=0) # camelcase-required
606 newOffset = attr.ib(default=0) # camelcase-required
635 newOffset = attr.ib(default=0) # camelcase-required
607 newLength = attr.ib(default=0) # camelcase-required
636 newLength = attr.ib(default=0) # camelcase-required
608 corpus = attr.ib(default='')
637 corpus = attr.ib(default='')
609 # These get added to the phabchange's equivalents
638 # These get added to the phabchange's equivalents
610 addLines = attr.ib(default=0) # camelcase-required
639 addLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
640 delLines = attr.ib(default=0) # camelcase-required
612
641
613
642
614 @attr.s
643 @attr.s
615 class phabchange(object):
644 class phabchange(object):
616 """Represents a Differential change, owns Differential hunks and owned by a
645 """Represents a Differential change, owns Differential hunks and owned by a
617 Differential diff. Each one represents one file in a diff.
646 Differential diff. Each one represents one file in a diff.
618 """
647 """
619
648
620 currentPath = attr.ib(default=None) # camelcase-required
649 currentPath = attr.ib(default=None) # camelcase-required
621 oldPath = attr.ib(default=None) # camelcase-required
650 oldPath = attr.ib(default=None) # camelcase-required
622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
651 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
623 metadata = attr.ib(default=attr.Factory(dict))
652 metadata = attr.ib(default=attr.Factory(dict))
624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
653 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
654 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
626 type = attr.ib(default=DiffChangeType.CHANGE)
655 type = attr.ib(default=DiffChangeType.CHANGE)
627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
656 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
628 commitHash = attr.ib(default=None) # camelcase-required
657 commitHash = attr.ib(default=None) # camelcase-required
629 addLines = attr.ib(default=0) # camelcase-required
658 addLines = attr.ib(default=0) # camelcase-required
630 delLines = attr.ib(default=0) # camelcase-required
659 delLines = attr.ib(default=0) # camelcase-required
631 hunks = attr.ib(default=attr.Factory(list))
660 hunks = attr.ib(default=attr.Factory(list))
632
661
633 def copynewmetadatatoold(self):
662 def copynewmetadatatoold(self):
634 for key in list(self.metadata.keys()):
663 for key in list(self.metadata.keys()):
635 newkey = key.replace(b'new:', b'old:')
664 newkey = key.replace(b'new:', b'old:')
636 self.metadata[newkey] = self.metadata[key]
665 self.metadata[newkey] = self.metadata[key]
637
666
638 def addoldmode(self, value):
667 def addoldmode(self, value):
639 self.oldProperties[b'unix:filemode'] = value
668 self.oldProperties[b'unix:filemode'] = value
640
669
641 def addnewmode(self, value):
670 def addnewmode(self, value):
642 self.newProperties[b'unix:filemode'] = value
671 self.newProperties[b'unix:filemode'] = value
643
672
644 def addhunk(self, hunk):
673 def addhunk(self, hunk):
645 if not isinstance(hunk, phabhunk):
674 if not isinstance(hunk, phabhunk):
646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
675 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
676 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
648 # It's useful to include these stats since the Phab web UI shows them,
677 # It's useful to include these stats since the Phab web UI shows them,
649 # and uses them to estimate how large a change a Revision is. Also used
678 # and uses them to estimate how large a change a Revision is. Also used
650 # in email subjects for the [+++--] bit.
679 # in email subjects for the [+++--] bit.
651 self.addLines += hunk.addLines
680 self.addLines += hunk.addLines
652 self.delLines += hunk.delLines
681 self.delLines += hunk.delLines
653
682
654
683
655 @attr.s
684 @attr.s
656 class phabdiff(object):
685 class phabdiff(object):
657 """Represents a Differential diff, owns Differential changes. Corresponds
686 """Represents a Differential diff, owns Differential changes. Corresponds
658 to a commit.
687 to a commit.
659 """
688 """
660
689
661 # Doesn't seem to be any reason to send this (output of uname -n)
690 # Doesn't seem to be any reason to send this (output of uname -n)
662 sourceMachine = attr.ib(default=b'') # camelcase-required
691 sourceMachine = attr.ib(default=b'') # camelcase-required
663 sourcePath = attr.ib(default=b'/') # camelcase-required
692 sourcePath = attr.ib(default=b'/') # camelcase-required
664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
693 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
694 sourceControlPath = attr.ib(default=b'/') # camelcase-required
666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
695 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
667 branch = attr.ib(default=b'default')
696 branch = attr.ib(default=b'default')
668 bookmark = attr.ib(default=None)
697 bookmark = attr.ib(default=None)
669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
698 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
670 lintStatus = attr.ib(default=b'none') # camelcase-required
699 lintStatus = attr.ib(default=b'none') # camelcase-required
671 unitStatus = attr.ib(default=b'none') # camelcase-required
700 unitStatus = attr.ib(default=b'none') # camelcase-required
672 changes = attr.ib(default=attr.Factory(dict))
701 changes = attr.ib(default=attr.Factory(dict))
673 repositoryPHID = attr.ib(default=None) # camelcase-required
702 repositoryPHID = attr.ib(default=None) # camelcase-required
674
703
675 def addchange(self, change):
704 def addchange(self, change):
676 if not isinstance(change, phabchange):
705 if not isinstance(change, phabchange):
677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
706 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
678 self.changes[change.currentPath] = pycompat.byteskwargs(
707 self.changes[change.currentPath] = pycompat.byteskwargs(
679 attr.asdict(change)
708 attr.asdict(change)
680 )
709 )
681
710
682
711
683 def maketext(pchange, basectx, ctx, fname):
712 def maketext(pchange, basectx, ctx, fname):
684 """populate the phabchange for a text file"""
713 """populate the phabchange for a text file"""
685 repo = ctx.repo()
714 repo = ctx.repo()
686 fmatcher = match.exact([fname])
715 fmatcher = match.exact([fname])
687 diffopts = mdiff.diffopts(git=True, context=32767)
716 diffopts = mdiff.diffopts(git=True, context=32767)
688 _pfctx, _fctx, header, fhunks = next(
717 _pfctx, _fctx, header, fhunks = next(
689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
718 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
690 )
719 )
691
720
692 for fhunk in fhunks:
721 for fhunk in fhunks:
693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
722 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
694 corpus = b''.join(lines[1:])
723 corpus = b''.join(lines[1:])
695 shunk = list(header)
724 shunk = list(header)
696 shunk.extend(lines)
725 shunk.extend(lines)
697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
726 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
698 patch.diffstatdata(util.iterlines(shunk))
727 patch.diffstatdata(util.iterlines(shunk))
699 )
728 )
700 pchange.addhunk(
729 pchange.addhunk(
701 phabhunk(
730 phabhunk(
702 oldOffset,
731 oldOffset,
703 oldLength,
732 oldLength,
704 newOffset,
733 newOffset,
705 newLength,
734 newLength,
706 corpus,
735 corpus,
707 addLines,
736 addLines,
708 delLines,
737 delLines,
709 )
738 )
710 )
739 )
711
740
712
741
713 def uploadchunks(fctx, fphid):
742 def uploadchunks(fctx, fphid):
714 """upload large binary files as separate chunks.
743 """upload large binary files as separate chunks.
715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
744 Phab requests chunking over 8MiB, and splits into 4MiB chunks
716 """
745 """
717 ui = fctx.repo().ui
746 ui = fctx.repo().ui
718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
747 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
719 with ui.makeprogress(
748 with ui.makeprogress(
720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
749 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
721 ) as progress:
750 ) as progress:
722 for chunk in chunks:
751 for chunk in chunks:
723 progress.increment()
752 progress.increment()
724 if chunk[b'complete']:
753 if chunk[b'complete']:
725 continue
754 continue
726 bstart = int(chunk[b'byteStart'])
755 bstart = int(chunk[b'byteStart'])
727 bend = int(chunk[b'byteEnd'])
756 bend = int(chunk[b'byteEnd'])
728 callconduit(
757 callconduit(
729 ui,
758 ui,
730 b'file.uploadchunk',
759 b'file.uploadchunk',
731 {
760 {
732 b'filePHID': fphid,
761 b'filePHID': fphid,
733 b'byteStart': bstart,
762 b'byteStart': bstart,
734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
763 b'data': base64.b64encode(fctx.data()[bstart:bend]),
735 b'dataEncoding': b'base64',
764 b'dataEncoding': b'base64',
736 },
765 },
737 )
766 )
738
767
739
768
740 def uploadfile(fctx):
769 def uploadfile(fctx):
741 """upload binary files to Phabricator"""
770 """upload binary files to Phabricator"""
742 repo = fctx.repo()
771 repo = fctx.repo()
743 ui = repo.ui
772 ui = repo.ui
744 fname = fctx.path()
773 fname = fctx.path()
745 size = fctx.size()
774 size = fctx.size()
746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
775 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
747
776
748 # an allocate call is required first to see if an upload is even required
777 # an allocate call is required first to see if an upload is even required
749 # (Phab might already have it) and to determine if chunking is needed
778 # (Phab might already have it) and to determine if chunking is needed
750 allocateparams = {
779 allocateparams = {
751 b'name': fname,
780 b'name': fname,
752 b'contentLength': size,
781 b'contentLength': size,
753 b'contentHash': fhash,
782 b'contentHash': fhash,
754 }
783 }
755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
784 filealloc = callconduit(ui, b'file.allocate', allocateparams)
756 fphid = filealloc[b'filePHID']
785 fphid = filealloc[b'filePHID']
757
786
758 if filealloc[b'upload']:
787 if filealloc[b'upload']:
759 ui.write(_(b'uploading %s\n') % bytes(fctx))
788 ui.write(_(b'uploading %s\n') % bytes(fctx))
760 if not fphid:
789 if not fphid:
761 uploadparams = {
790 uploadparams = {
762 b'name': fname,
791 b'name': fname,
763 b'data_base64': base64.b64encode(fctx.data()),
792 b'data_base64': base64.b64encode(fctx.data()),
764 }
793 }
765 fphid = callconduit(ui, b'file.upload', uploadparams)
794 fphid = callconduit(ui, b'file.upload', uploadparams)
766 else:
795 else:
767 uploadchunks(fctx, fphid)
796 uploadchunks(fctx, fphid)
768 else:
797 else:
769 ui.debug(b'server already has %s\n' % bytes(fctx))
798 ui.debug(b'server already has %s\n' % bytes(fctx))
770
799
771 if not fphid:
800 if not fphid:
772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
801 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
773
802
774 return fphid
803 return fphid
775
804
776
805
777 def addoldbinary(pchange, oldfctx, fctx):
806 def addoldbinary(pchange, oldfctx, fctx):
778 """add the metadata for the previous version of a binary file to the
807 """add the metadata for the previous version of a binary file to the
779 phabchange for the new version
808 phabchange for the new version
780
809
781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
810 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
782 version of the file, or None if the file is being removed.
811 version of the file, or None if the file is being removed.
783 """
812 """
784 if not fctx or fctx.cmp(oldfctx):
813 if not fctx or fctx.cmp(oldfctx):
785 # Files differ, add the old one
814 # Files differ, add the old one
786 pchange.metadata[b'old:file:size'] = oldfctx.size()
815 pchange.metadata[b'old:file:size'] = oldfctx.size()
787 mimeguess, _enc = mimetypes.guess_type(
816 mimeguess, _enc = mimetypes.guess_type(
788 encoding.unifromlocal(oldfctx.path())
817 encoding.unifromlocal(oldfctx.path())
789 )
818 )
790 if mimeguess:
819 if mimeguess:
791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
820 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
792 mimeguess
821 mimeguess
793 )
822 )
794 fphid = uploadfile(oldfctx)
823 fphid = uploadfile(oldfctx)
795 pchange.metadata[b'old:binary-phid'] = fphid
824 pchange.metadata[b'old:binary-phid'] = fphid
796 else:
825 else:
797 # If it's left as IMAGE/BINARY web UI might try to display it
826 # If it's left as IMAGE/BINARY web UI might try to display it
798 pchange.fileType = DiffFileType.TEXT
827 pchange.fileType = DiffFileType.TEXT
799 pchange.copynewmetadatatoold()
828 pchange.copynewmetadatatoold()
800
829
801
830
802 def makebinary(pchange, fctx):
831 def makebinary(pchange, fctx):
803 """populate the phabchange for a binary file"""
832 """populate the phabchange for a binary file"""
804 pchange.fileType = DiffFileType.BINARY
833 pchange.fileType = DiffFileType.BINARY
805 fphid = uploadfile(fctx)
834 fphid = uploadfile(fctx)
806 pchange.metadata[b'new:binary-phid'] = fphid
835 pchange.metadata[b'new:binary-phid'] = fphid
807 pchange.metadata[b'new:file:size'] = fctx.size()
836 pchange.metadata[b'new:file:size'] = fctx.size()
808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
837 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
809 if mimeguess:
838 if mimeguess:
810 mimeguess = pycompat.bytestr(mimeguess)
839 mimeguess = pycompat.bytestr(mimeguess)
811 pchange.metadata[b'new:file:mime-type'] = mimeguess
840 pchange.metadata[b'new:file:mime-type'] = mimeguess
812 if mimeguess.startswith(b'image/'):
841 if mimeguess.startswith(b'image/'):
813 pchange.fileType = DiffFileType.IMAGE
842 pchange.fileType = DiffFileType.IMAGE
814
843
815
844
816 # Copied from mercurial/patch.py
845 # Copied from mercurial/patch.py
817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
846 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
818
847
819
848
820 def notutf8(fctx):
849 def notutf8(fctx):
821 """detect non-UTF-8 text files since Phabricator requires them to be marked
850 """detect non-UTF-8 text files since Phabricator requires them to be marked
822 as binary
851 as binary
823 """
852 """
824 try:
853 try:
825 fctx.data().decode('utf-8')
854 fctx.data().decode('utf-8')
826 return False
855 return False
827 except UnicodeDecodeError:
856 except UnicodeDecodeError:
828 fctx.repo().ui.write(
857 fctx.repo().ui.write(
829 _(b'file %s detected as non-UTF-8, marked as binary\n')
858 _(b'file %s detected as non-UTF-8, marked as binary\n')
830 % fctx.path()
859 % fctx.path()
831 )
860 )
832 return True
861 return True
833
862
834
863
835 def addremoved(pdiff, basectx, ctx, removed):
864 def addremoved(pdiff, basectx, ctx, removed):
836 """add removed files to the phabdiff. Shouldn't include moves"""
865 """add removed files to the phabdiff. Shouldn't include moves"""
837 for fname in removed:
866 for fname in removed:
838 pchange = phabchange(
867 pchange = phabchange(
839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
868 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
840 )
869 )
841 oldfctx = basectx.p1()[fname]
870 oldfctx = basectx.p1()[fname]
842 pchange.addoldmode(gitmode[oldfctx.flags()])
871 pchange.addoldmode(gitmode[oldfctx.flags()])
843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
872 if not (oldfctx.isbinary() or notutf8(oldfctx)):
844 maketext(pchange, basectx, ctx, fname)
873 maketext(pchange, basectx, ctx, fname)
845
874
846 pdiff.addchange(pchange)
875 pdiff.addchange(pchange)
847
876
848
877
849 def addmodified(pdiff, basectx, ctx, modified):
878 def addmodified(pdiff, basectx, ctx, modified):
850 """add modified files to the phabdiff"""
879 """add modified files to the phabdiff"""
851 for fname in modified:
880 for fname in modified:
852 fctx = ctx[fname]
881 fctx = ctx[fname]
853 oldfctx = basectx.p1()[fname]
882 oldfctx = basectx.p1()[fname]
854 pchange = phabchange(currentPath=fname, oldPath=fname)
883 pchange = phabchange(currentPath=fname, oldPath=fname)
855 filemode = gitmode[fctx.flags()]
884 filemode = gitmode[fctx.flags()]
856 originalmode = gitmode[oldfctx.flags()]
885 originalmode = gitmode[oldfctx.flags()]
857 if filemode != originalmode:
886 if filemode != originalmode:
858 pchange.addoldmode(originalmode)
887 pchange.addoldmode(originalmode)
859 pchange.addnewmode(filemode)
888 pchange.addnewmode(filemode)
860
889
861 if (
890 if (
862 fctx.isbinary()
891 fctx.isbinary()
863 or notutf8(fctx)
892 or notutf8(fctx)
864 or oldfctx.isbinary()
893 or oldfctx.isbinary()
865 or notutf8(oldfctx)
894 or notutf8(oldfctx)
866 ):
895 ):
867 makebinary(pchange, fctx)
896 makebinary(pchange, fctx)
868 addoldbinary(pchange, oldfctx, fctx)
897 addoldbinary(pchange, oldfctx, fctx)
869 else:
898 else:
870 maketext(pchange, basectx, ctx, fname)
899 maketext(pchange, basectx, ctx, fname)
871
900
872 pdiff.addchange(pchange)
901 pdiff.addchange(pchange)
873
902
874
903
875 def addadded(pdiff, basectx, ctx, added, removed):
904 def addadded(pdiff, basectx, ctx, added, removed):
876 """add file adds to the phabdiff, both new files and copies/moves"""
905 """add file adds to the phabdiff, both new files and copies/moves"""
877 # Keep track of files that've been recorded as moved/copied, so if there are
906 # Keep track of files that've been recorded as moved/copied, so if there are
878 # additional copies we can mark them (moves get removed from removed)
907 # additional copies we can mark them (moves get removed from removed)
879 copiedchanges = {}
908 copiedchanges = {}
880 movedchanges = {}
909 movedchanges = {}
881
910
882 copy = {}
911 copy = {}
883 if basectx != ctx:
912 if basectx != ctx:
884 copy = copies.pathcopies(basectx.p1(), ctx)
913 copy = copies.pathcopies(basectx.p1(), ctx)
885
914
886 for fname in added:
915 for fname in added:
887 fctx = ctx[fname]
916 fctx = ctx[fname]
888 oldfctx = None
917 oldfctx = None
889 pchange = phabchange(currentPath=fname)
918 pchange = phabchange(currentPath=fname)
890
919
891 filemode = gitmode[fctx.flags()]
920 filemode = gitmode[fctx.flags()]
892
921
893 if copy:
922 if copy:
894 originalfname = copy.get(fname, fname)
923 originalfname = copy.get(fname, fname)
895 else:
924 else:
896 originalfname = fname
925 originalfname = fname
897 if fctx.renamed():
926 if fctx.renamed():
898 originalfname = fctx.renamed()[0]
927 originalfname = fctx.renamed()[0]
899
928
900 renamed = fname != originalfname
929 renamed = fname != originalfname
901
930
902 if renamed:
931 if renamed:
903 oldfctx = basectx.p1()[originalfname]
932 oldfctx = basectx.p1()[originalfname]
904 originalmode = gitmode[oldfctx.flags()]
933 originalmode = gitmode[oldfctx.flags()]
905 pchange.oldPath = originalfname
934 pchange.oldPath = originalfname
906
935
907 if originalfname in removed:
936 if originalfname in removed:
908 origpchange = phabchange(
937 origpchange = phabchange(
909 currentPath=originalfname,
938 currentPath=originalfname,
910 oldPath=originalfname,
939 oldPath=originalfname,
911 type=DiffChangeType.MOVE_AWAY,
940 type=DiffChangeType.MOVE_AWAY,
912 awayPaths=[fname],
941 awayPaths=[fname],
913 )
942 )
914 movedchanges[originalfname] = origpchange
943 movedchanges[originalfname] = origpchange
915 removed.remove(originalfname)
944 removed.remove(originalfname)
916 pchange.type = DiffChangeType.MOVE_HERE
945 pchange.type = DiffChangeType.MOVE_HERE
917 elif originalfname in movedchanges:
946 elif originalfname in movedchanges:
918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
947 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
919 movedchanges[originalfname].awayPaths.append(fname)
948 movedchanges[originalfname].awayPaths.append(fname)
920 pchange.type = DiffChangeType.COPY_HERE
949 pchange.type = DiffChangeType.COPY_HERE
921 else: # pure copy
950 else: # pure copy
922 if originalfname not in copiedchanges:
951 if originalfname not in copiedchanges:
923 origpchange = phabchange(
952 origpchange = phabchange(
924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
953 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
925 )
954 )
926 copiedchanges[originalfname] = origpchange
955 copiedchanges[originalfname] = origpchange
927 else:
956 else:
928 origpchange = copiedchanges[originalfname]
957 origpchange = copiedchanges[originalfname]
929 origpchange.awayPaths.append(fname)
958 origpchange.awayPaths.append(fname)
930 pchange.type = DiffChangeType.COPY_HERE
959 pchange.type = DiffChangeType.COPY_HERE
931
960
932 if filemode != originalmode:
961 if filemode != originalmode:
933 pchange.addoldmode(originalmode)
962 pchange.addoldmode(originalmode)
934 pchange.addnewmode(filemode)
963 pchange.addnewmode(filemode)
935 else: # Brand-new file
964 else: # Brand-new file
936 pchange.addnewmode(gitmode[fctx.flags()])
965 pchange.addnewmode(gitmode[fctx.flags()])
937 pchange.type = DiffChangeType.ADD
966 pchange.type = DiffChangeType.ADD
938
967
939 if (
968 if (
940 fctx.isbinary()
969 fctx.isbinary()
941 or notutf8(fctx)
970 or notutf8(fctx)
942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
971 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
943 ):
972 ):
944 makebinary(pchange, fctx)
973 makebinary(pchange, fctx)
945 if renamed:
974 if renamed:
946 addoldbinary(pchange, oldfctx, fctx)
975 addoldbinary(pchange, oldfctx, fctx)
947 else:
976 else:
948 maketext(pchange, basectx, ctx, fname)
977 maketext(pchange, basectx, ctx, fname)
949
978
950 pdiff.addchange(pchange)
979 pdiff.addchange(pchange)
951
980
952 for _path, copiedchange in copiedchanges.items():
981 for _path, copiedchange in copiedchanges.items():
953 pdiff.addchange(copiedchange)
982 pdiff.addchange(copiedchange)
954 for _path, movedchange in movedchanges.items():
983 for _path, movedchange in movedchanges.items():
955 pdiff.addchange(movedchange)
984 pdiff.addchange(movedchange)
956
985
957
986
958 def creatediff(basectx, ctx):
987 def creatediff(basectx, ctx):
959 """create a Differential Diff"""
988 """create a Differential Diff"""
960 repo = ctx.repo()
989 repo = ctx.repo()
961 repophid = getrepophid(repo)
990 repophid = getrepophid(repo)
962 # Create a "Differential Diff" via "differential.creatediff" API
991 # Create a "Differential Diff" via "differential.creatediff" API
963 pdiff = phabdiff(
992 pdiff = phabdiff(
964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
993 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
965 branch=b'%s' % ctx.branch(),
994 branch=b'%s' % ctx.branch(),
966 )
995 )
967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
996 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
968 # addadded will remove moved files from removed, so addremoved won't get
997 # addadded will remove moved files from removed, so addremoved won't get
969 # them
998 # them
970 addadded(pdiff, basectx, ctx, added, removed)
999 addadded(pdiff, basectx, ctx, added, removed)
971 addmodified(pdiff, basectx, ctx, modified)
1000 addmodified(pdiff, basectx, ctx, modified)
972 addremoved(pdiff, basectx, ctx, removed)
1001 addremoved(pdiff, basectx, ctx, removed)
973 if repophid:
1002 if repophid:
974 pdiff.repositoryPHID = repophid
1003 pdiff.repositoryPHID = repophid
975 diff = callconduit(
1004 diff = callconduit(
976 repo.ui,
1005 repo.ui,
977 b'differential.creatediff',
1006 b'differential.creatediff',
978 pycompat.byteskwargs(attr.asdict(pdiff)),
1007 pycompat.byteskwargs(attr.asdict(pdiff)),
979 )
1008 )
980 if not diff:
1009 if not diff:
981 if basectx != ctx:
1010 if basectx != ctx:
982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1011 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
983 else:
1012 else:
984 msg = _(b'cannot create diff for %s') % ctx
1013 msg = _(b'cannot create diff for %s') % ctx
985 raise error.Abort(msg)
1014 raise error.Abort(msg)
986 return diff
1015 return diff
987
1016
988
1017
989 def writediffproperties(ctxs, diff):
1018 def writediffproperties(ctxs, diff):
990 """write metadata to diff so patches could be applied losslessly
1019 """write metadata to diff so patches could be applied losslessly
991
1020
992 ``ctxs`` is the list of commits that created the diff, in ascending order.
1021 ``ctxs`` is the list of commits that created the diff, in ascending order.
993 The list is generally a single commit, but may be several when using
1022 The list is generally a single commit, but may be several when using
994 ``phabsend --fold``.
1023 ``phabsend --fold``.
995 """
1024 """
996 # creatediff returns with a diffid but query returns with an id
1025 # creatediff returns with a diffid but query returns with an id
997 diffid = diff.get(b'diffid', diff.get(b'id'))
1026 diffid = diff.get(b'diffid', diff.get(b'id'))
998 basectx = ctxs[0]
1027 basectx = ctxs[0]
999 tipctx = ctxs[-1]
1028 tipctx = ctxs[-1]
1000
1029
1001 params = {
1030 params = {
1002 b'diff_id': diffid,
1031 b'diff_id': diffid,
1003 b'name': b'hg:meta',
1032 b'name': b'hg:meta',
1004 b'data': templatefilters.json(
1033 b'data': templatefilters.json(
1005 {
1034 {
1006 b'user': tipctx.user(),
1035 b'user': tipctx.user(),
1007 b'date': b'%d %d' % tipctx.date(),
1036 b'date': b'%d %d' % tipctx.date(),
1008 b'branch': tipctx.branch(),
1037 b'branch': tipctx.branch(),
1009 b'node': tipctx.hex(),
1038 b'node': tipctx.hex(),
1010 b'parent': basectx.p1().hex(),
1039 b'parent': basectx.p1().hex(),
1011 }
1040 }
1012 ),
1041 ),
1013 }
1042 }
1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1043 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1015
1044
1016 commits = {}
1045 commits = {}
1017 for ctx in ctxs:
1046 for ctx in ctxs:
1018 commits[ctx.hex()] = {
1047 commits[ctx.hex()] = {
1019 b'author': stringutil.person(ctx.user()),
1048 b'author': stringutil.person(ctx.user()),
1020 b'authorEmail': stringutil.email(ctx.user()),
1049 b'authorEmail': stringutil.email(ctx.user()),
1021 b'time': int(ctx.date()[0]),
1050 b'time': int(ctx.date()[0]),
1022 b'commit': ctx.hex(),
1051 b'commit': ctx.hex(),
1023 b'parents': [ctx.p1().hex()],
1052 b'parents': [ctx.p1().hex()],
1024 b'branch': ctx.branch(),
1053 b'branch': ctx.branch(),
1025 }
1054 }
1026 params = {
1055 params = {
1027 b'diff_id': diffid,
1056 b'diff_id': diffid,
1028 b'name': b'local:commits',
1057 b'name': b'local:commits',
1029 b'data': templatefilters.json(commits),
1058 b'data': templatefilters.json(commits),
1030 }
1059 }
1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1060 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1032
1061
1033
1062
1034 def createdifferentialrevision(
1063 def createdifferentialrevision(
1035 ctxs,
1064 ctxs,
1036 revid=None,
1065 revid=None,
1037 parentrevphid=None,
1066 parentrevphid=None,
1038 oldbasenode=None,
1067 oldbasenode=None,
1039 oldnode=None,
1068 oldnode=None,
1040 olddiff=None,
1069 olddiff=None,
1041 actions=None,
1070 actions=None,
1042 comment=None,
1071 comment=None,
1043 ):
1072 ):
1044 """create or update a Differential Revision
1073 """create or update a Differential Revision
1045
1074
1046 If revid is None, create a new Differential Revision, otherwise update
1075 If revid is None, create a new Differential Revision, otherwise update
1047 revid. If parentrevphid is not None, set it as a dependency.
1076 revid. If parentrevphid is not None, set it as a dependency.
1048
1077
1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1078 If there is a single commit for the new Differential Revision, ``ctxs`` will
1050 be a list of that single context. Otherwise, it is a list that covers the
1079 be a list of that single context. Otherwise, it is a list that covers the
1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1080 range of changes for the differential, where ``ctxs[0]`` is the first change
1052 to include and ``ctxs[-1]`` is the last.
1081 to include and ``ctxs[-1]`` is the last.
1053
1082
1054 If oldnode is not None, check if the patch content (without commit message
1083 If oldnode is not None, check if the patch content (without commit message
1055 and metadata) has changed before creating another diff. For a Revision with
1084 and metadata) has changed before creating another diff. For a Revision with
1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1085 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1086 Revision covering multiple commits, ``oldbasenode`` corresponds to
1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1087 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1059 corresponds to ``ctxs[-1]``.
1088 corresponds to ``ctxs[-1]``.
1060
1089
1061 If actions is not None, they will be appended to the transaction.
1090 If actions is not None, they will be appended to the transaction.
1062 """
1091 """
1063 ctx = ctxs[-1]
1092 ctx = ctxs[-1]
1064 basectx = ctxs[0]
1093 basectx = ctxs[0]
1065
1094
1066 repo = ctx.repo()
1095 repo = ctx.repo()
1067 if oldnode:
1096 if oldnode:
1068 diffopts = mdiff.diffopts(git=True, context=32767)
1097 diffopts = mdiff.diffopts(git=True, context=32767)
1069 unfi = repo.unfiltered()
1098 unfi = repo.unfiltered()
1070 oldctx = unfi[oldnode]
1099 oldctx = unfi[oldnode]
1071 oldbasectx = unfi[oldbasenode]
1100 oldbasectx = unfi[oldbasenode]
1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1101 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1073 oldbasectx, oldctx, diffopts
1102 oldbasectx, oldctx, diffopts
1074 )
1103 )
1075 else:
1104 else:
1076 neednewdiff = True
1105 neednewdiff = True
1077
1106
1078 transactions = []
1107 transactions = []
1079 if neednewdiff:
1108 if neednewdiff:
1080 diff = creatediff(basectx, ctx)
1109 diff = creatediff(basectx, ctx)
1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1110 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1082 if comment:
1111 if comment:
1083 transactions.append({b'type': b'comment', b'value': comment})
1112 transactions.append({b'type': b'comment', b'value': comment})
1084 else:
1113 else:
1085 # Even if we don't need to upload a new diff because the patch content
1114 # Even if we don't need to upload a new diff because the patch content
1086 # does not change. We might still need to update its metadata so
1115 # does not change. We might still need to update its metadata so
1087 # pushers could know the correct node metadata.
1116 # pushers could know the correct node metadata.
1088 assert olddiff
1117 assert olddiff
1089 diff = olddiff
1118 diff = olddiff
1090 writediffproperties(ctxs, diff)
1119 writediffproperties(ctxs, diff)
1091
1120
1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1121 # Set the parent Revision every time, so commit re-ordering is picked-up
1093 if parentrevphid:
1122 if parentrevphid:
1094 transactions.append(
1123 transactions.append(
1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1124 {b'type': b'parents.set', b'value': [parentrevphid]}
1096 )
1125 )
1097
1126
1098 if actions:
1127 if actions:
1099 transactions += actions
1128 transactions += actions
1100
1129
1101 # When folding multiple local commits into a single review, arcanist will
1130 # When folding multiple local commits into a single review, arcanist will
1102 # take the summary line of the first commit as the title, and then
1131 # take the summary line of the first commit as the title, and then
1103 # concatenate the rest of the remaining messages (including each of their
1132 # concatenate the rest of the remaining messages (including each of their
1104 # first lines) to the rest of the first commit message (each separated by
1133 # first lines) to the rest of the first commit message (each separated by
1105 # an empty line), and use that as the summary field. Do the same here.
1134 # an empty line), and use that as the summary field. Do the same here.
1106 # For commits with only a one line message, there is no summary field, as
1135 # For commits with only a one line message, there is no summary field, as
1107 # this gets assigned to the title.
1136 # this gets assigned to the title.
1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1137 fields = util.sortdict() # sorted for stable wire protocol in tests
1109
1138
1110 for i, _ctx in enumerate(ctxs):
1139 for i, _ctx in enumerate(ctxs):
1111 # Parse commit message and update related fields.
1140 # Parse commit message and update related fields.
1112 desc = _ctx.description()
1141 desc = _ctx.description()
1113 info = callconduit(
1142 info = callconduit(
1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1143 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1115 )
1144 )
1116
1145
1117 for k in [b'title', b'summary', b'testPlan']:
1146 for k in [b'title', b'summary', b'testPlan']:
1118 v = info[b'fields'].get(k)
1147 v = info[b'fields'].get(k)
1119 if not v:
1148 if not v:
1120 continue
1149 continue
1121
1150
1122 if i == 0:
1151 if i == 0:
1123 # Title, summary and test plan (if present) are taken verbatim
1152 # Title, summary and test plan (if present) are taken verbatim
1124 # for the first commit.
1153 # for the first commit.
1125 fields[k] = v.rstrip()
1154 fields[k] = v.rstrip()
1126 continue
1155 continue
1127 elif k == b'title':
1156 elif k == b'title':
1128 # Add subsequent titles (i.e. the first line of the commit
1157 # Add subsequent titles (i.e. the first line of the commit
1129 # message) back to the summary.
1158 # message) back to the summary.
1130 k = b'summary'
1159 k = b'summary'
1131
1160
1132 # Append any current field to the existing composite field
1161 # Append any current field to the existing composite field
1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1162 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1134
1163
1135 for k, v in fields.items():
1164 for k, v in fields.items():
1136 transactions.append({b'type': k, b'value': v})
1165 transactions.append({b'type': k, b'value': v})
1137
1166
1138 params = {b'transactions': transactions}
1167 params = {b'transactions': transactions}
1139 if revid is not None:
1168 if revid is not None:
1140 # Update an existing Differential Revision
1169 # Update an existing Differential Revision
1141 params[b'objectIdentifier'] = revid
1170 params[b'objectIdentifier'] = revid
1142
1171
1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1172 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1144 if not revision:
1173 if not revision:
1145 if len(ctxs) == 1:
1174 if len(ctxs) == 1:
1146 msg = _(b'cannot create revision for %s') % ctx
1175 msg = _(b'cannot create revision for %s') % ctx
1147 else:
1176 else:
1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1177 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1149 raise error.Abort(msg)
1178 raise error.Abort(msg)
1150
1179
1151 return revision, diff
1180 return revision, diff
1152
1181
1153
1182
1154 def userphids(ui, names):
1183 def userphids(ui, names):
1155 """convert user names to PHIDs"""
1184 """convert user names to PHIDs"""
1156 names = [name.lower() for name in names]
1185 names = [name.lower() for name in names]
1157 query = {b'constraints': {b'usernames': names}}
1186 query = {b'constraints': {b'usernames': names}}
1158 result = callconduit(ui, b'user.search', query)
1187 result = callconduit(ui, b'user.search', query)
1159 # username not found is not an error of the API. So check if we have missed
1188 # username not found is not an error of the API. So check if we have missed
1160 # some names here.
1189 # some names here.
1161 data = result[b'data']
1190 data = result[b'data']
1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1191 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1163 unresolved = set(names) - resolved
1192 unresolved = set(names) - resolved
1164 if unresolved:
1193 if unresolved:
1165 raise error.Abort(
1194 raise error.Abort(
1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1195 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1167 )
1196 )
1168 return [entry[b'phid'] for entry in data]
1197 return [entry[b'phid'] for entry in data]
1169
1198
1170
1199
1171 def _print_phabsend_action(ui, ctx, newrevid, action):
1200 def _print_phabsend_action(ui, ctx, newrevid, action):
1172 """print the ``action`` that occurred when posting ``ctx`` for review
1201 """print the ``action`` that occurred when posting ``ctx`` for review
1173
1202
1174 This is a utility function for the sending phase of ``phabsend``, which
1203 This is a utility function for the sending phase of ``phabsend``, which
1175 makes it easier to show a status for all local commits with `--fold``.
1204 makes it easier to show a status for all local commits with `--fold``.
1176 """
1205 """
1177 actiondesc = ui.label(
1206 actiondesc = ui.label(
1178 {
1207 {
1179 b'created': _(b'created'),
1208 b'created': _(b'created'),
1180 b'skipped': _(b'skipped'),
1209 b'skipped': _(b'skipped'),
1181 b'updated': _(b'updated'),
1210 b'updated': _(b'updated'),
1182 }[action],
1211 }[action],
1183 b'phabricator.action.%s' % action,
1212 b'phabricator.action.%s' % action,
1184 )
1213 )
1185 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1214 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1186 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1215 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1187 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1216 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1188 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1217 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1189
1218
1190
1219
1191 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1220 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1192 """update the local commit list for the ``diff`` associated with ``drevid``
1221 """update the local commit list for the ``diff`` associated with ``drevid``
1193
1222
1194 This is a utility function for the amend phase of ``phabsend``, which
1223 This is a utility function for the amend phase of ``phabsend``, which
1195 converts failures to warning messages.
1224 converts failures to warning messages.
1196 """
1225 """
1226 _debug(
1227 unfi.ui,
1228 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1229 )
1230
1197 try:
1231 try:
1198 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1232 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1199 except util.urlerr.urlerror:
1233 except util.urlerr.urlerror:
1200 # If it fails just warn and keep going, otherwise the DREV
1234 # If it fails just warn and keep going, otherwise the DREV
1201 # associations will be lost
1235 # associations will be lost
1202 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1236 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1203
1237
1204
1238
1205 @vcrcommand(
1239 @vcrcommand(
1206 b'phabsend',
1240 b'phabsend',
1207 [
1241 [
1208 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1242 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1209 (b'', b'amend', True, _(b'update commit messages')),
1243 (b'', b'amend', True, _(b'update commit messages')),
1210 (b'', b'reviewer', [], _(b'specify reviewers')),
1244 (b'', b'reviewer', [], _(b'specify reviewers')),
1211 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1245 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1212 (
1246 (
1213 b'm',
1247 b'm',
1214 b'comment',
1248 b'comment',
1215 b'',
1249 b'',
1216 _(b'add a comment to Revisions with new/updated Diffs'),
1250 _(b'add a comment to Revisions with new/updated Diffs'),
1217 ),
1251 ),
1218 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1252 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1219 ],
1253 ],
1220 _(b'REV [OPTIONS]'),
1254 _(b'REV [OPTIONS]'),
1221 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1255 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1222 )
1256 )
1223 def phabsend(ui, repo, *revs, **opts):
1257 def phabsend(ui, repo, *revs, **opts):
1224 """upload changesets to Phabricator
1258 """upload changesets to Phabricator
1225
1259
1226 If there are multiple revisions specified, they will be send as a stack
1260 If there are multiple revisions specified, they will be send as a stack
1227 with a linear dependencies relationship using the order specified by the
1261 with a linear dependencies relationship using the order specified by the
1228 revset.
1262 revset.
1229
1263
1230 For the first time uploading changesets, local tags will be created to
1264 For the first time uploading changesets, local tags will be created to
1231 maintain the association. After the first time, phabsend will check
1265 maintain the association. After the first time, phabsend will check
1232 obsstore and tags information so it can figure out whether to update an
1266 obsstore and tags information so it can figure out whether to update an
1233 existing Differential Revision, or create a new one.
1267 existing Differential Revision, or create a new one.
1234
1268
1235 If --amend is set, update commit messages so they have the
1269 If --amend is set, update commit messages so they have the
1236 ``Differential Revision`` URL, remove related tags. This is similar to what
1270 ``Differential Revision`` URL, remove related tags. This is similar to what
1237 arcanist will do, and is more desired in author-push workflows. Otherwise,
1271 arcanist will do, and is more desired in author-push workflows. Otherwise,
1238 use local tags to record the ``Differential Revision`` association.
1272 use local tags to record the ``Differential Revision`` association.
1239
1273
1240 The --confirm option lets you confirm changesets before sending them. You
1274 The --confirm option lets you confirm changesets before sending them. You
1241 can also add following to your configuration file to make it default
1275 can also add following to your configuration file to make it default
1242 behaviour::
1276 behaviour::
1243
1277
1244 [phabsend]
1278 [phabsend]
1245 confirm = true
1279 confirm = true
1246
1280
1247 phabsend will check obsstore and the above association to decide whether to
1281 phabsend will check obsstore and the above association to decide whether to
1248 update an existing Differential Revision, or create a new one.
1282 update an existing Differential Revision, or create a new one.
1249 """
1283 """
1250 opts = pycompat.byteskwargs(opts)
1284 opts = pycompat.byteskwargs(opts)
1251 revs = list(revs) + opts.get(b'rev', [])
1285 revs = list(revs) + opts.get(b'rev', [])
1252 revs = scmutil.revrange(repo, revs)
1286 revs = scmutil.revrange(repo, revs)
1253 revs.sort() # ascending order to preserve topological parent/child in phab
1287 revs.sort() # ascending order to preserve topological parent/child in phab
1254
1288
1255 if not revs:
1289 if not revs:
1256 raise error.Abort(_(b'phabsend requires at least one changeset'))
1290 raise error.Abort(_(b'phabsend requires at least one changeset'))
1257 if opts.get(b'amend'):
1291 if opts.get(b'amend'):
1258 cmdutil.checkunfinished(repo)
1292 cmdutil.checkunfinished(repo)
1259
1293
1260 # {newnode: (oldnode, olddiff, olddrev}
1294 # {newnode: (oldnode, olddiff, olddrev}
1261 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1295 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1262
1296
1263 confirm = ui.configbool(b'phabsend', b'confirm')
1297 confirm = ui.configbool(b'phabsend', b'confirm')
1264 confirm |= bool(opts.get(b'confirm'))
1298 confirm |= bool(opts.get(b'confirm'))
1265 if confirm:
1299 if confirm:
1266 confirmed = _confirmbeforesend(repo, revs, oldmap)
1300 confirmed = _confirmbeforesend(repo, revs, oldmap)
1267 if not confirmed:
1301 if not confirmed:
1268 raise error.Abort(_(b'phabsend cancelled'))
1302 raise error.Abort(_(b'phabsend cancelled'))
1269
1303
1270 actions = []
1304 actions = []
1271 reviewers = opts.get(b'reviewer', [])
1305 reviewers = opts.get(b'reviewer', [])
1272 blockers = opts.get(b'blocker', [])
1306 blockers = opts.get(b'blocker', [])
1273 phids = []
1307 phids = []
1274 if reviewers:
1308 if reviewers:
1275 phids.extend(userphids(repo.ui, reviewers))
1309 phids.extend(userphids(repo.ui, reviewers))
1276 if blockers:
1310 if blockers:
1277 phids.extend(
1311 phids.extend(
1278 map(
1312 map(
1279 lambda phid: b'blocking(%s)' % phid,
1313 lambda phid: b'blocking(%s)' % phid,
1280 userphids(repo.ui, blockers),
1314 userphids(repo.ui, blockers),
1281 )
1315 )
1282 )
1316 )
1283 if phids:
1317 if phids:
1284 actions.append({b'type': b'reviewers.add', b'value': phids})
1318 actions.append({b'type': b'reviewers.add', b'value': phids})
1285
1319
1286 drevids = [] # [int]
1320 drevids = [] # [int]
1287 diffmap = {} # {newnode: diff}
1321 diffmap = {} # {newnode: diff}
1288
1322
1289 # Send patches one by one so we know their Differential Revision PHIDs and
1323 # Send patches one by one so we know their Differential Revision PHIDs and
1290 # can provide dependency relationship
1324 # can provide dependency relationship
1291 lastrevphid = None
1325 lastrevphid = None
1292 for rev in revs:
1326 for rev in revs:
1293 ui.debug(b'sending rev %d\n' % rev)
1327 ui.debug(b'sending rev %d\n' % rev)
1294 ctx = repo[rev]
1328 ctx = repo[rev]
1295
1329
1296 # Get Differential Revision ID
1330 # Get Differential Revision ID
1297 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1331 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1298 oldbasenode = oldnode
1332 oldbasenode = oldnode
1299 if oldnode != ctx.node() or opts.get(b'amend'):
1333 if oldnode != ctx.node() or opts.get(b'amend'):
1300 # Create or update Differential Revision
1334 # Create or update Differential Revision
1301 revision, diff = createdifferentialrevision(
1335 revision, diff = createdifferentialrevision(
1302 [ctx],
1336 [ctx],
1303 revid,
1337 revid,
1304 lastrevphid,
1338 lastrevphid,
1305 oldbasenode,
1339 oldbasenode,
1306 oldnode,
1340 oldnode,
1307 olddiff,
1341 olddiff,
1308 actions,
1342 actions,
1309 opts.get(b'comment'),
1343 opts.get(b'comment'),
1310 )
1344 )
1311 diffmap[ctx.node()] = diff
1345 diffmap[ctx.node()] = diff
1312 newrevid = int(revision[b'object'][b'id'])
1346 newrevid = int(revision[b'object'][b'id'])
1313 newrevphid = revision[b'object'][b'phid']
1347 newrevphid = revision[b'object'][b'phid']
1314 if revid:
1348 if revid:
1315 action = b'updated'
1349 action = b'updated'
1316 else:
1350 else:
1317 action = b'created'
1351 action = b'created'
1318
1352
1319 # Create a local tag to note the association, if commit message
1353 # Create a local tag to note the association, if commit message
1320 # does not have it already
1354 # does not have it already
1321 m = _differentialrevisiondescre.search(ctx.description())
1355 m = _differentialrevisiondescre.search(ctx.description())
1322 if not m or int(m.group('id')) != newrevid:
1356 if not m or int(m.group('id')) != newrevid:
1323 tagname = b'D%d' % newrevid
1357 tagname = b'D%d' % newrevid
1324 tags.tag(
1358 tags.tag(
1325 repo,
1359 repo,
1326 tagname,
1360 tagname,
1327 ctx.node(),
1361 ctx.node(),
1328 message=None,
1362 message=None,
1329 user=None,
1363 user=None,
1330 date=None,
1364 date=None,
1331 local=True,
1365 local=True,
1332 )
1366 )
1333 else:
1367 else:
1334 # Nothing changed. But still set "newrevphid" so the next revision
1368 # Nothing changed. But still set "newrevphid" so the next revision
1335 # could depend on this one and "newrevid" for the summary line.
1369 # could depend on this one and "newrevid" for the summary line.
1336 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1370 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1337 newrevid = revid
1371 newrevid = revid
1338 action = b'skipped'
1372 action = b'skipped'
1339
1373
1340 drevids.append(newrevid)
1374 drevids.append(newrevid)
1341 lastrevphid = newrevphid
1375 lastrevphid = newrevphid
1342
1376
1343 _print_phabsend_action(ui, ctx, newrevid, action)
1377 _print_phabsend_action(ui, ctx, newrevid, action)
1344
1378
1345 # Update commit messages and remove tags
1379 # Update commit messages and remove tags
1346 if opts.get(b'amend'):
1380 if opts.get(b'amend'):
1347 unfi = repo.unfiltered()
1381 unfi = repo.unfiltered()
1348 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1382 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1349 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1383 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1350 wnode = unfi[b'.'].node()
1384 wnode = unfi[b'.'].node()
1351 mapping = {} # {oldnode: [newnode]}
1385 mapping = {} # {oldnode: [newnode]}
1352 for i, rev in enumerate(revs):
1386 for i, rev in enumerate(revs):
1353 old = unfi[rev]
1387 old = unfi[rev]
1354 drevid = drevids[i]
1388 drevid = drevids[i]
1355 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1389 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1356 newdesc = get_amended_desc(drev, old, False)
1390 newdesc = get_amended_desc(drev, old, False)
1357 # Make sure commit message contain "Differential Revision"
1391 # Make sure commit message contain "Differential Revision"
1358 if old.description() != newdesc:
1392 if old.description() != newdesc:
1359 if old.phase() == phases.public:
1393 if old.phase() == phases.public:
1360 ui.warn(
1394 ui.warn(
1361 _(b"warning: not updating public commit %s\n")
1395 _(b"warning: not updating public commit %s\n")
1362 % scmutil.formatchangeid(old)
1396 % scmutil.formatchangeid(old)
1363 )
1397 )
1364 continue
1398 continue
1365 parents = [
1399 parents = [
1366 mapping.get(old.p1().node(), (old.p1(),))[0],
1400 mapping.get(old.p1().node(), (old.p1(),))[0],
1367 mapping.get(old.p2().node(), (old.p2(),))[0],
1401 mapping.get(old.p2().node(), (old.p2(),))[0],
1368 ]
1402 ]
1369 new = context.metadataonlyctx(
1403 new = context.metadataonlyctx(
1370 repo,
1404 repo,
1371 old,
1405 old,
1372 parents=parents,
1406 parents=parents,
1373 text=newdesc,
1407 text=newdesc,
1374 user=old.user(),
1408 user=old.user(),
1375 date=old.date(),
1409 date=old.date(),
1376 extra=old.extra(),
1410 extra=old.extra(),
1377 )
1411 )
1378
1412
1379 newnode = new.commit()
1413 newnode = new.commit()
1380
1414
1381 mapping[old.node()] = [newnode]
1415 mapping[old.node()] = [newnode]
1382
1416
1383 _amend_diff_properties(
1417 _amend_diff_properties(
1384 unfi, drevid, [newnode], diffmap[old.node()]
1418 unfi, drevid, [newnode], diffmap[old.node()]
1385 )
1419 )
1386 # Remove local tags since it's no longer necessary
1420 # Remove local tags since it's no longer necessary
1387 tagname = b'D%d' % drevid
1421 tagname = b'D%d' % drevid
1388 if tagname in repo.tags():
1422 if tagname in repo.tags():
1389 tags.tag(
1423 tags.tag(
1390 repo,
1424 repo,
1391 tagname,
1425 tagname,
1392 nullid,
1426 nullid,
1393 message=None,
1427 message=None,
1394 user=None,
1428 user=None,
1395 date=None,
1429 date=None,
1396 local=True,
1430 local=True,
1397 )
1431 )
1398 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1432 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1399 if wnode in mapping:
1433 if wnode in mapping:
1400 unfi.setparents(mapping[wnode][0])
1434 unfi.setparents(mapping[wnode][0])
1401
1435
1402
1436
1403 # Map from "hg:meta" keys to header understood by "hg import". The order is
1437 # Map from "hg:meta" keys to header understood by "hg import". The order is
1404 # consistent with "hg export" output.
1438 # consistent with "hg export" output.
1405 _metanamemap = util.sortdict(
1439 _metanamemap = util.sortdict(
1406 [
1440 [
1407 (b'user', b'User'),
1441 (b'user', b'User'),
1408 (b'date', b'Date'),
1442 (b'date', b'Date'),
1409 (b'branch', b'Branch'),
1443 (b'branch', b'Branch'),
1410 (b'node', b'Node ID'),
1444 (b'node', b'Node ID'),
1411 (b'parent', b'Parent '),
1445 (b'parent', b'Parent '),
1412 ]
1446 ]
1413 )
1447 )
1414
1448
1415
1449
1416 def _confirmbeforesend(repo, revs, oldmap):
1450 def _confirmbeforesend(repo, revs, oldmap):
1417 url, token = readurltoken(repo.ui)
1451 url, token = readurltoken(repo.ui)
1418 ui = repo.ui
1452 ui = repo.ui
1419 for rev in revs:
1453 for rev in revs:
1420 ctx = repo[rev]
1454 ctx = repo[rev]
1421 desc = ctx.description().splitlines()[0]
1455 desc = ctx.description().splitlines()[0]
1422 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1456 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1423 if drevid:
1457 if drevid:
1424 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1458 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1425 else:
1459 else:
1426 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1460 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1427
1461
1428 ui.write(
1462 ui.write(
1429 _(b'%s - %s: %s\n')
1463 _(b'%s - %s: %s\n')
1430 % (
1464 % (
1431 drevdesc,
1465 drevdesc,
1432 ui.label(bytes(ctx), b'phabricator.node'),
1466 ui.label(bytes(ctx), b'phabricator.node'),
1433 ui.label(desc, b'phabricator.desc'),
1467 ui.label(desc, b'phabricator.desc'),
1434 )
1468 )
1435 )
1469 )
1436
1470
1437 if ui.promptchoice(
1471 if ui.promptchoice(
1438 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1472 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1439 ):
1473 ):
1440 return False
1474 return False
1441
1475
1442 return True
1476 return True
1443
1477
1444
1478
1445 _knownstatusnames = {
1479 _knownstatusnames = {
1446 b'accepted',
1480 b'accepted',
1447 b'needsreview',
1481 b'needsreview',
1448 b'needsrevision',
1482 b'needsrevision',
1449 b'closed',
1483 b'closed',
1450 b'abandoned',
1484 b'abandoned',
1451 b'changesplanned',
1485 b'changesplanned',
1452 }
1486 }
1453
1487
1454
1488
1455 def _getstatusname(drev):
1489 def _getstatusname(drev):
1456 """get normalized status name from a Differential Revision"""
1490 """get normalized status name from a Differential Revision"""
1457 return drev[b'statusName'].replace(b' ', b'').lower()
1491 return drev[b'statusName'].replace(b' ', b'').lower()
1458
1492
1459
1493
1460 # Small language to specify differential revisions. Support symbols: (), :X,
1494 # Small language to specify differential revisions. Support symbols: (), :X,
1461 # +, and -.
1495 # +, and -.
1462
1496
1463 _elements = {
1497 _elements = {
1464 # token-type: binding-strength, primary, prefix, infix, suffix
1498 # token-type: binding-strength, primary, prefix, infix, suffix
1465 b'(': (12, None, (b'group', 1, b')'), None, None),
1499 b'(': (12, None, (b'group', 1, b')'), None, None),
1466 b':': (8, None, (b'ancestors', 8), None, None),
1500 b':': (8, None, (b'ancestors', 8), None, None),
1467 b'&': (5, None, None, (b'and_', 5), None),
1501 b'&': (5, None, None, (b'and_', 5), None),
1468 b'+': (4, None, None, (b'add', 4), None),
1502 b'+': (4, None, None, (b'add', 4), None),
1469 b'-': (4, None, None, (b'sub', 4), None),
1503 b'-': (4, None, None, (b'sub', 4), None),
1470 b')': (0, None, None, None, None),
1504 b')': (0, None, None, None, None),
1471 b'symbol': (0, b'symbol', None, None, None),
1505 b'symbol': (0, b'symbol', None, None, None),
1472 b'end': (0, None, None, None, None),
1506 b'end': (0, None, None, None, None),
1473 }
1507 }
1474
1508
1475
1509
1476 def _tokenize(text):
1510 def _tokenize(text):
1477 view = memoryview(text) # zero-copy slice
1511 view = memoryview(text) # zero-copy slice
1478 special = b'():+-& '
1512 special = b'():+-& '
1479 pos = 0
1513 pos = 0
1480 length = len(text)
1514 length = len(text)
1481 while pos < length:
1515 while pos < length:
1482 symbol = b''.join(
1516 symbol = b''.join(
1483 itertools.takewhile(
1517 itertools.takewhile(
1484 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1518 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1485 )
1519 )
1486 )
1520 )
1487 if symbol:
1521 if symbol:
1488 yield (b'symbol', symbol, pos)
1522 yield (b'symbol', symbol, pos)
1489 pos += len(symbol)
1523 pos += len(symbol)
1490 else: # special char, ignore space
1524 else: # special char, ignore space
1491 if text[pos : pos + 1] != b' ':
1525 if text[pos : pos + 1] != b' ':
1492 yield (text[pos : pos + 1], None, pos)
1526 yield (text[pos : pos + 1], None, pos)
1493 pos += 1
1527 pos += 1
1494 yield (b'end', None, pos)
1528 yield (b'end', None, pos)
1495
1529
1496
1530
1497 def _parse(text):
1531 def _parse(text):
1498 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1532 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1499 if pos != len(text):
1533 if pos != len(text):
1500 raise error.ParseError(b'invalid token', pos)
1534 raise error.ParseError(b'invalid token', pos)
1501 return tree
1535 return tree
1502
1536
1503
1537
1504 def _parsedrev(symbol):
1538 def _parsedrev(symbol):
1505 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1539 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1506 if symbol.startswith(b'D') and symbol[1:].isdigit():
1540 if symbol.startswith(b'D') and symbol[1:].isdigit():
1507 return int(symbol[1:])
1541 return int(symbol[1:])
1508 if symbol.isdigit():
1542 if symbol.isdigit():
1509 return int(symbol)
1543 return int(symbol)
1510
1544
1511
1545
1512 def _prefetchdrevs(tree):
1546 def _prefetchdrevs(tree):
1513 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1547 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1514 drevs = set()
1548 drevs = set()
1515 ancestordrevs = set()
1549 ancestordrevs = set()
1516 op = tree[0]
1550 op = tree[0]
1517 if op == b'symbol':
1551 if op == b'symbol':
1518 r = _parsedrev(tree[1])
1552 r = _parsedrev(tree[1])
1519 if r:
1553 if r:
1520 drevs.add(r)
1554 drevs.add(r)
1521 elif op == b'ancestors':
1555 elif op == b'ancestors':
1522 r, a = _prefetchdrevs(tree[1])
1556 r, a = _prefetchdrevs(tree[1])
1523 drevs.update(r)
1557 drevs.update(r)
1524 ancestordrevs.update(r)
1558 ancestordrevs.update(r)
1525 ancestordrevs.update(a)
1559 ancestordrevs.update(a)
1526 else:
1560 else:
1527 for t in tree[1:]:
1561 for t in tree[1:]:
1528 r, a = _prefetchdrevs(t)
1562 r, a = _prefetchdrevs(t)
1529 drevs.update(r)
1563 drevs.update(r)
1530 ancestordrevs.update(a)
1564 ancestordrevs.update(a)
1531 return drevs, ancestordrevs
1565 return drevs, ancestordrevs
1532
1566
1533
1567
1534 def querydrev(ui, spec):
1568 def querydrev(ui, spec):
1535 """return a list of "Differential Revision" dicts
1569 """return a list of "Differential Revision" dicts
1536
1570
1537 spec is a string using a simple query language, see docstring in phabread
1571 spec is a string using a simple query language, see docstring in phabread
1538 for details.
1572 for details.
1539
1573
1540 A "Differential Revision dict" looks like:
1574 A "Differential Revision dict" looks like:
1541
1575
1542 {
1576 {
1543 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1577 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1544 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1578 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1545 "auxiliary": {
1579 "auxiliary": {
1546 "phabricator:depends-on": [
1580 "phabricator:depends-on": [
1547 "PHID-DREV-gbapp366kutjebt7agcd"
1581 "PHID-DREV-gbapp366kutjebt7agcd"
1548 ]
1582 ]
1549 "phabricator:projects": [],
1583 "phabricator:projects": [],
1550 },
1584 },
1551 "branch": "default",
1585 "branch": "default",
1552 "ccs": [],
1586 "ccs": [],
1553 "commits": [],
1587 "commits": [],
1554 "dateCreated": "1499181406",
1588 "dateCreated": "1499181406",
1555 "dateModified": "1499182103",
1589 "dateModified": "1499182103",
1556 "diffs": [
1590 "diffs": [
1557 "3",
1591 "3",
1558 "4",
1592 "4",
1559 ],
1593 ],
1560 "hashes": [],
1594 "hashes": [],
1561 "id": "2",
1595 "id": "2",
1562 "lineCount": "2",
1596 "lineCount": "2",
1563 "phid": "PHID-DREV-672qvysjcczopag46qty",
1597 "phid": "PHID-DREV-672qvysjcczopag46qty",
1564 "properties": {},
1598 "properties": {},
1565 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1599 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1566 "reviewers": [],
1600 "reviewers": [],
1567 "sourcePath": null
1601 "sourcePath": null
1568 "status": "0",
1602 "status": "0",
1569 "statusName": "Needs Review",
1603 "statusName": "Needs Review",
1570 "summary": "",
1604 "summary": "",
1571 "testPlan": "",
1605 "testPlan": "",
1572 "title": "example",
1606 "title": "example",
1573 "uri": "https://phab.example.com/D2",
1607 "uri": "https://phab.example.com/D2",
1574 }
1608 }
1575 """
1609 """
1576 # TODO: replace differential.query and differential.querydiffs with
1610 # TODO: replace differential.query and differential.querydiffs with
1577 # differential.diff.search because the former (and their output) are
1611 # differential.diff.search because the former (and their output) are
1578 # frozen, and planned to be deprecated and removed.
1612 # frozen, and planned to be deprecated and removed.
1579
1613
1580 def fetch(params):
1614 def fetch(params):
1581 """params -> single drev or None"""
1615 """params -> single drev or None"""
1582 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1616 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1583 if key in prefetched:
1617 if key in prefetched:
1584 return prefetched[key]
1618 return prefetched[key]
1585 drevs = callconduit(ui, b'differential.query', params)
1619 drevs = callconduit(ui, b'differential.query', params)
1586 # Fill prefetched with the result
1620 # Fill prefetched with the result
1587 for drev in drevs:
1621 for drev in drevs:
1588 prefetched[drev[b'phid']] = drev
1622 prefetched[drev[b'phid']] = drev
1589 prefetched[int(drev[b'id'])] = drev
1623 prefetched[int(drev[b'id'])] = drev
1590 if key not in prefetched:
1624 if key not in prefetched:
1591 raise error.Abort(
1625 raise error.Abort(
1592 _(b'cannot get Differential Revision %r') % params
1626 _(b'cannot get Differential Revision %r') % params
1593 )
1627 )
1594 return prefetched[key]
1628 return prefetched[key]
1595
1629
1596 def getstack(topdrevids):
1630 def getstack(topdrevids):
1597 """given a top, get a stack from the bottom, [id] -> [id]"""
1631 """given a top, get a stack from the bottom, [id] -> [id]"""
1598 visited = set()
1632 visited = set()
1599 result = []
1633 result = []
1600 queue = [{b'ids': [i]} for i in topdrevids]
1634 queue = [{b'ids': [i]} for i in topdrevids]
1601 while queue:
1635 while queue:
1602 params = queue.pop()
1636 params = queue.pop()
1603 drev = fetch(params)
1637 drev = fetch(params)
1604 if drev[b'id'] in visited:
1638 if drev[b'id'] in visited:
1605 continue
1639 continue
1606 visited.add(drev[b'id'])
1640 visited.add(drev[b'id'])
1607 result.append(int(drev[b'id']))
1641 result.append(int(drev[b'id']))
1608 auxiliary = drev.get(b'auxiliary', {})
1642 auxiliary = drev.get(b'auxiliary', {})
1609 depends = auxiliary.get(b'phabricator:depends-on', [])
1643 depends = auxiliary.get(b'phabricator:depends-on', [])
1610 for phid in depends:
1644 for phid in depends:
1611 queue.append({b'phids': [phid]})
1645 queue.append({b'phids': [phid]})
1612 result.reverse()
1646 result.reverse()
1613 return smartset.baseset(result)
1647 return smartset.baseset(result)
1614
1648
1615 # Initialize prefetch cache
1649 # Initialize prefetch cache
1616 prefetched = {} # {id or phid: drev}
1650 prefetched = {} # {id or phid: drev}
1617
1651
1618 tree = _parse(spec)
1652 tree = _parse(spec)
1619 drevs, ancestordrevs = _prefetchdrevs(tree)
1653 drevs, ancestordrevs = _prefetchdrevs(tree)
1620
1654
1621 # developer config: phabricator.batchsize
1655 # developer config: phabricator.batchsize
1622 batchsize = ui.configint(b'phabricator', b'batchsize')
1656 batchsize = ui.configint(b'phabricator', b'batchsize')
1623
1657
1624 # Prefetch Differential Revisions in batch
1658 # Prefetch Differential Revisions in batch
1625 tofetch = set(drevs)
1659 tofetch = set(drevs)
1626 for r in ancestordrevs:
1660 for r in ancestordrevs:
1627 tofetch.update(range(max(1, r - batchsize), r + 1))
1661 tofetch.update(range(max(1, r - batchsize), r + 1))
1628 if drevs:
1662 if drevs:
1629 fetch({b'ids': list(tofetch)})
1663 fetch({b'ids': list(tofetch)})
1630 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1664 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1631
1665
1632 # Walk through the tree, return smartsets
1666 # Walk through the tree, return smartsets
1633 def walk(tree):
1667 def walk(tree):
1634 op = tree[0]
1668 op = tree[0]
1635 if op == b'symbol':
1669 if op == b'symbol':
1636 drev = _parsedrev(tree[1])
1670 drev = _parsedrev(tree[1])
1637 if drev:
1671 if drev:
1638 return smartset.baseset([drev])
1672 return smartset.baseset([drev])
1639 elif tree[1] in _knownstatusnames:
1673 elif tree[1] in _knownstatusnames:
1640 drevs = [
1674 drevs = [
1641 r
1675 r
1642 for r in validids
1676 for r in validids
1643 if _getstatusname(prefetched[r]) == tree[1]
1677 if _getstatusname(prefetched[r]) == tree[1]
1644 ]
1678 ]
1645 return smartset.baseset(drevs)
1679 return smartset.baseset(drevs)
1646 else:
1680 else:
1647 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1681 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1648 elif op in {b'and_', b'add', b'sub'}:
1682 elif op in {b'and_', b'add', b'sub'}:
1649 assert len(tree) == 3
1683 assert len(tree) == 3
1650 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1684 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1651 elif op == b'group':
1685 elif op == b'group':
1652 return walk(tree[1])
1686 return walk(tree[1])
1653 elif op == b'ancestors':
1687 elif op == b'ancestors':
1654 return getstack(walk(tree[1]))
1688 return getstack(walk(tree[1]))
1655 else:
1689 else:
1656 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1690 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1657
1691
1658 return [prefetched[r] for r in walk(tree)]
1692 return [prefetched[r] for r in walk(tree)]
1659
1693
1660
1694
1661 def getdescfromdrev(drev):
1695 def getdescfromdrev(drev):
1662 """get description (commit message) from "Differential Revision"
1696 """get description (commit message) from "Differential Revision"
1663
1697
1664 This is similar to differential.getcommitmessage API. But we only care
1698 This is similar to differential.getcommitmessage API. But we only care
1665 about limited fields: title, summary, test plan, and URL.
1699 about limited fields: title, summary, test plan, and URL.
1666 """
1700 """
1667 title = drev[b'title']
1701 title = drev[b'title']
1668 summary = drev[b'summary'].rstrip()
1702 summary = drev[b'summary'].rstrip()
1669 testplan = drev[b'testPlan'].rstrip()
1703 testplan = drev[b'testPlan'].rstrip()
1670 if testplan:
1704 if testplan:
1671 testplan = b'Test Plan:\n%s' % testplan
1705 testplan = b'Test Plan:\n%s' % testplan
1672 uri = b'Differential Revision: %s' % drev[b'uri']
1706 uri = b'Differential Revision: %s' % drev[b'uri']
1673 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1707 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1674
1708
1675
1709
1676 def get_amended_desc(drev, ctx, folded):
1710 def get_amended_desc(drev, ctx, folded):
1677 """similar to ``getdescfromdrev``, but supports a folded series of commits
1711 """similar to ``getdescfromdrev``, but supports a folded series of commits
1678
1712
1679 This is used when determining if an individual commit needs to have its
1713 This is used when determining if an individual commit needs to have its
1680 message amended after posting it for review. The determination is made for
1714 message amended after posting it for review. The determination is made for
1681 each individual commit, even when they were folded into one review.
1715 each individual commit, even when they were folded into one review.
1682 """
1716 """
1683 if not folded:
1717 if not folded:
1684 return getdescfromdrev(drev)
1718 return getdescfromdrev(drev)
1685
1719
1686 uri = b'Differential Revision: %s' % drev[b'uri']
1720 uri = b'Differential Revision: %s' % drev[b'uri']
1687
1721
1688 # Since the commit messages were combined when posting multiple commits
1722 # Since the commit messages were combined when posting multiple commits
1689 # with --fold, the fields can't be read from Phabricator here, or *all*
1723 # with --fold, the fields can't be read from Phabricator here, or *all*
1690 # affected local revisions will end up with the same commit message after
1724 # affected local revisions will end up with the same commit message after
1691 # the URI is amended in. Append in the DREV line, or update it if it
1725 # the URI is amended in. Append in the DREV line, or update it if it
1692 # exists. At worst, this means commit message or test plan updates on
1726 # exists. At worst, this means commit message or test plan updates on
1693 # Phabricator aren't propagated back to the repository, but that seems
1727 # Phabricator aren't propagated back to the repository, but that seems
1694 # reasonable for the case where local commits are effectively combined
1728 # reasonable for the case where local commits are effectively combined
1695 # in Phabricator.
1729 # in Phabricator.
1696 m = _differentialrevisiondescre.search(ctx.description())
1730 m = _differentialrevisiondescre.search(ctx.description())
1697 if not m:
1731 if not m:
1698 return b'\n\n'.join([ctx.description(), uri])
1732 return b'\n\n'.join([ctx.description(), uri])
1699
1733
1700 return _differentialrevisiondescre.sub(uri, ctx.description())
1734 return _differentialrevisiondescre.sub(uri, ctx.description())
1701
1735
1702
1736
1703 def getlocalcommits(diff):
1737 def getlocalcommits(diff):
1704 """get the set of local commits from a diff object
1738 """get the set of local commits from a diff object
1705
1739
1706 See ``getdiffmeta()`` for an example diff object.
1740 See ``getdiffmeta()`` for an example diff object.
1707 """
1741 """
1708 props = diff.get(b'properties') or {}
1742 props = diff.get(b'properties') or {}
1709 commits = props.get(b'local:commits') or {}
1743 commits = props.get(b'local:commits') or {}
1710 if len(commits) > 1:
1744 if len(commits) > 1:
1711 return {bin(c) for c in commits.keys()}
1745 return {bin(c) for c in commits.keys()}
1712
1746
1713 # Storing the diff metadata predates storing `local:commits`, so continue
1747 # Storing the diff metadata predates storing `local:commits`, so continue
1714 # to use that in the --no-fold case.
1748 # to use that in the --no-fold case.
1715 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1749 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1716
1750
1717
1751
1718 def getdiffmeta(diff):
1752 def getdiffmeta(diff):
1719 """get commit metadata (date, node, user, p1) from a diff object
1753 """get commit metadata (date, node, user, p1) from a diff object
1720
1754
1721 The metadata could be "hg:meta", sent by phabsend, like:
1755 The metadata could be "hg:meta", sent by phabsend, like:
1722
1756
1723 "properties": {
1757 "properties": {
1724 "hg:meta": {
1758 "hg:meta": {
1725 "branch": "default",
1759 "branch": "default",
1726 "date": "1499571514 25200",
1760 "date": "1499571514 25200",
1727 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1761 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1728 "user": "Foo Bar <foo@example.com>",
1762 "user": "Foo Bar <foo@example.com>",
1729 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1763 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1730 }
1764 }
1731 }
1765 }
1732
1766
1733 Or converted from "local:commits", sent by "arc", like:
1767 Or converted from "local:commits", sent by "arc", like:
1734
1768
1735 "properties": {
1769 "properties": {
1736 "local:commits": {
1770 "local:commits": {
1737 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1771 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1738 "author": "Foo Bar",
1772 "author": "Foo Bar",
1739 "authorEmail": "foo@example.com"
1773 "authorEmail": "foo@example.com"
1740 "branch": "default",
1774 "branch": "default",
1741 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1775 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1742 "local": "1000",
1776 "local": "1000",
1743 "message": "...",
1777 "message": "...",
1744 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1778 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1745 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1779 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1746 "summary": "...",
1780 "summary": "...",
1747 "tag": "",
1781 "tag": "",
1748 "time": 1499546314,
1782 "time": 1499546314,
1749 }
1783 }
1750 }
1784 }
1751 }
1785 }
1752
1786
1753 Note: metadata extracted from "local:commits" will lose time zone
1787 Note: metadata extracted from "local:commits" will lose time zone
1754 information.
1788 information.
1755 """
1789 """
1756 props = diff.get(b'properties') or {}
1790 props = diff.get(b'properties') or {}
1757 meta = props.get(b'hg:meta')
1791 meta = props.get(b'hg:meta')
1758 if not meta:
1792 if not meta:
1759 if props.get(b'local:commits'):
1793 if props.get(b'local:commits'):
1760 commit = sorted(props[b'local:commits'].values())[0]
1794 commit = sorted(props[b'local:commits'].values())[0]
1761 meta = {}
1795 meta = {}
1762 if b'author' in commit and b'authorEmail' in commit:
1796 if b'author' in commit and b'authorEmail' in commit:
1763 meta[b'user'] = b'%s <%s>' % (
1797 meta[b'user'] = b'%s <%s>' % (
1764 commit[b'author'],
1798 commit[b'author'],
1765 commit[b'authorEmail'],
1799 commit[b'authorEmail'],
1766 )
1800 )
1767 if b'time' in commit:
1801 if b'time' in commit:
1768 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1802 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1769 if b'branch' in commit:
1803 if b'branch' in commit:
1770 meta[b'branch'] = commit[b'branch']
1804 meta[b'branch'] = commit[b'branch']
1771 node = commit.get(b'commit', commit.get(b'rev'))
1805 node = commit.get(b'commit', commit.get(b'rev'))
1772 if node:
1806 if node:
1773 meta[b'node'] = node
1807 meta[b'node'] = node
1774 if len(commit.get(b'parents', ())) >= 1:
1808 if len(commit.get(b'parents', ())) >= 1:
1775 meta[b'parent'] = commit[b'parents'][0]
1809 meta[b'parent'] = commit[b'parents'][0]
1776 else:
1810 else:
1777 meta = {}
1811 meta = {}
1778 if b'date' not in meta and b'dateCreated' in diff:
1812 if b'date' not in meta and b'dateCreated' in diff:
1779 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1813 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1780 if b'branch' not in meta and diff.get(b'branch'):
1814 if b'branch' not in meta and diff.get(b'branch'):
1781 meta[b'branch'] = diff[b'branch']
1815 meta[b'branch'] = diff[b'branch']
1782 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1816 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1783 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1817 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1784 return meta
1818 return meta
1785
1819
1786
1820
1787 def _getdrevs(ui, stack, specs):
1821 def _getdrevs(ui, stack, specs):
1788 """convert user supplied DREVSPECs into "Differential Revision" dicts
1822 """convert user supplied DREVSPECs into "Differential Revision" dicts
1789
1823
1790 See ``hg help phabread`` for how to specify each DREVSPEC.
1824 See ``hg help phabread`` for how to specify each DREVSPEC.
1791 """
1825 """
1792 if len(specs) > 0:
1826 if len(specs) > 0:
1793
1827
1794 def _formatspec(s):
1828 def _formatspec(s):
1795 if stack:
1829 if stack:
1796 s = b':(%s)' % s
1830 s = b':(%s)' % s
1797 return b'(%s)' % s
1831 return b'(%s)' % s
1798
1832
1799 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1833 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1800
1834
1801 drevs = querydrev(ui, spec)
1835 drevs = querydrev(ui, spec)
1802 if drevs:
1836 if drevs:
1803 return drevs
1837 return drevs
1804
1838
1805 raise error.Abort(_(b"empty DREVSPEC set"))
1839 raise error.Abort(_(b"empty DREVSPEC set"))
1806
1840
1807
1841
1808 def readpatch(ui, drevs, write):
1842 def readpatch(ui, drevs, write):
1809 """generate plain-text patch readable by 'hg import'
1843 """generate plain-text patch readable by 'hg import'
1810
1844
1811 write takes a list of (DREV, bytes), where DREV is the differential number
1845 write takes a list of (DREV, bytes), where DREV is the differential number
1812 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1846 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1813 to be imported. drevs is what "querydrev" returns, results of
1847 to be imported. drevs is what "querydrev" returns, results of
1814 "differential.query".
1848 "differential.query".
1815 """
1849 """
1816 # Prefetch hg:meta property for all diffs
1850 # Prefetch hg:meta property for all diffs
1817 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1851 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1818 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1852 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1819
1853
1820 patches = []
1854 patches = []
1821
1855
1822 # Generate patch for each drev
1856 # Generate patch for each drev
1823 for drev in drevs:
1857 for drev in drevs:
1824 ui.note(_(b'reading D%s\n') % drev[b'id'])
1858 ui.note(_(b'reading D%s\n') % drev[b'id'])
1825
1859
1826 diffid = max(int(v) for v in drev[b'diffs'])
1860 diffid = max(int(v) for v in drev[b'diffs'])
1827 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1861 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1828 desc = getdescfromdrev(drev)
1862 desc = getdescfromdrev(drev)
1829 header = b'# HG changeset patch\n'
1863 header = b'# HG changeset patch\n'
1830
1864
1831 # Try to preserve metadata from hg:meta property. Write hg patch
1865 # Try to preserve metadata from hg:meta property. Write hg patch
1832 # headers that can be read by the "import" command. See patchheadermap
1866 # headers that can be read by the "import" command. See patchheadermap
1833 # and extract in mercurial/patch.py for supported headers.
1867 # and extract in mercurial/patch.py for supported headers.
1834 meta = getdiffmeta(diffs[b'%d' % diffid])
1868 meta = getdiffmeta(diffs[b'%d' % diffid])
1835 for k in _metanamemap.keys():
1869 for k in _metanamemap.keys():
1836 if k in meta:
1870 if k in meta:
1837 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1871 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1838
1872
1839 content = b'%s%s\n%s' % (header, desc, body)
1873 content = b'%s%s\n%s' % (header, desc, body)
1840 patches.append((drev[b'id'], content))
1874 patches.append((drev[b'id'], content))
1841
1875
1842 # Write patches to the supplied callback
1876 # Write patches to the supplied callback
1843 write(patches)
1877 write(patches)
1844
1878
1845
1879
1846 @vcrcommand(
1880 @vcrcommand(
1847 b'phabread',
1881 b'phabread',
1848 [(b'', b'stack', False, _(b'read dependencies'))],
1882 [(b'', b'stack', False, _(b'read dependencies'))],
1849 _(b'DREVSPEC... [OPTIONS]'),
1883 _(b'DREVSPEC... [OPTIONS]'),
1850 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1884 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1851 optionalrepo=True,
1885 optionalrepo=True,
1852 )
1886 )
1853 def phabread(ui, repo, *specs, **opts):
1887 def phabread(ui, repo, *specs, **opts):
1854 """print patches from Phabricator suitable for importing
1888 """print patches from Phabricator suitable for importing
1855
1889
1856 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1890 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1857 the number ``123``. It could also have common operators like ``+``, ``-``,
1891 the number ``123``. It could also have common operators like ``+``, ``-``,
1858 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1892 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1859 select a stack. If multiple DREVSPEC values are given, the result is the
1893 select a stack. If multiple DREVSPEC values are given, the result is the
1860 union of each individually evaluated value. No attempt is currently made
1894 union of each individually evaluated value. No attempt is currently made
1861 to reorder the values to run from parent to child.
1895 to reorder the values to run from parent to child.
1862
1896
1863 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1897 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1864 could be used to filter patches by status. For performance reason, they
1898 could be used to filter patches by status. For performance reason, they
1865 only represent a subset of non-status selections and cannot be used alone.
1899 only represent a subset of non-status selections and cannot be used alone.
1866
1900
1867 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1901 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1868 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1902 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1869 stack up to D9.
1903 stack up to D9.
1870
1904
1871 If --stack is given, follow dependencies information and read all patches.
1905 If --stack is given, follow dependencies information and read all patches.
1872 It is equivalent to the ``:`` operator.
1906 It is equivalent to the ``:`` operator.
1873 """
1907 """
1874 opts = pycompat.byteskwargs(opts)
1908 opts = pycompat.byteskwargs(opts)
1875 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1909 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1876
1910
1877 def _write(patches):
1911 def _write(patches):
1878 for drev, content in patches:
1912 for drev, content in patches:
1879 ui.write(content)
1913 ui.write(content)
1880
1914
1881 readpatch(ui, drevs, _write)
1915 readpatch(ui, drevs, _write)
1882
1916
1883
1917
1884 @vcrcommand(
1918 @vcrcommand(
1885 b'phabimport',
1919 b'phabimport',
1886 [(b'', b'stack', False, _(b'import dependencies as well'))],
1920 [(b'', b'stack', False, _(b'import dependencies as well'))],
1887 _(b'DREVSPEC... [OPTIONS]'),
1921 _(b'DREVSPEC... [OPTIONS]'),
1888 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1922 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1889 )
1923 )
1890 def phabimport(ui, repo, *specs, **opts):
1924 def phabimport(ui, repo, *specs, **opts):
1891 """import patches from Phabricator for the specified Differential Revisions
1925 """import patches from Phabricator for the specified Differential Revisions
1892
1926
1893 The patches are read and applied starting at the parent of the working
1927 The patches are read and applied starting at the parent of the working
1894 directory.
1928 directory.
1895
1929
1896 See ``hg help phabread`` for how to specify DREVSPEC.
1930 See ``hg help phabread`` for how to specify DREVSPEC.
1897 """
1931 """
1898 opts = pycompat.byteskwargs(opts)
1932 opts = pycompat.byteskwargs(opts)
1899
1933
1900 # --bypass avoids losing exec and symlink bits when importing on Windows,
1934 # --bypass avoids losing exec and symlink bits when importing on Windows,
1901 # and allows importing with a dirty wdir. It also aborts instead of leaving
1935 # and allows importing with a dirty wdir. It also aborts instead of leaving
1902 # rejects.
1936 # rejects.
1903 opts[b'bypass'] = True
1937 opts[b'bypass'] = True
1904
1938
1905 # Mandatory default values, synced with commands.import
1939 # Mandatory default values, synced with commands.import
1906 opts[b'strip'] = 1
1940 opts[b'strip'] = 1
1907 opts[b'prefix'] = b''
1941 opts[b'prefix'] = b''
1908 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1942 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1909 opts[b'obsolete'] = False
1943 opts[b'obsolete'] = False
1910
1944
1911 if ui.configbool(b'phabimport', b'secret'):
1945 if ui.configbool(b'phabimport', b'secret'):
1912 opts[b'secret'] = True
1946 opts[b'secret'] = True
1913 if ui.configbool(b'phabimport', b'obsolete'):
1947 if ui.configbool(b'phabimport', b'obsolete'):
1914 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1948 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1915
1949
1916 def _write(patches):
1950 def _write(patches):
1917 parents = repo[None].parents()
1951 parents = repo[None].parents()
1918
1952
1919 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1953 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1920 for drev, contents in patches:
1954 for drev, contents in patches:
1921 ui.status(_(b'applying patch from D%s\n') % drev)
1955 ui.status(_(b'applying patch from D%s\n') % drev)
1922
1956
1923 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1957 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1924 msg, node, rej = cmdutil.tryimportone(
1958 msg, node, rej = cmdutil.tryimportone(
1925 ui,
1959 ui,
1926 repo,
1960 repo,
1927 patchdata,
1961 patchdata,
1928 parents,
1962 parents,
1929 opts,
1963 opts,
1930 [],
1964 [],
1931 None, # Never update wdir to another revision
1965 None, # Never update wdir to another revision
1932 )
1966 )
1933
1967
1934 if not node:
1968 if not node:
1935 raise error.Abort(_(b'D%s: no diffs found') % drev)
1969 raise error.Abort(_(b'D%s: no diffs found') % drev)
1936
1970
1937 ui.note(msg + b'\n')
1971 ui.note(msg + b'\n')
1938 parents = [repo[node]]
1972 parents = [repo[node]]
1939
1973
1940 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1974 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1941
1975
1942 readpatch(repo.ui, drevs, _write)
1976 readpatch(repo.ui, drevs, _write)
1943
1977
1944
1978
1945 @vcrcommand(
1979 @vcrcommand(
1946 b'phabupdate',
1980 b'phabupdate',
1947 [
1981 [
1948 (b'', b'accept', False, _(b'accept revisions')),
1982 (b'', b'accept', False, _(b'accept revisions')),
1949 (b'', b'reject', False, _(b'reject revisions')),
1983 (b'', b'reject', False, _(b'reject revisions')),
1950 (b'', b'abandon', False, _(b'abandon revisions')),
1984 (b'', b'abandon', False, _(b'abandon revisions')),
1951 (b'', b'reclaim', False, _(b'reclaim revisions')),
1985 (b'', b'reclaim', False, _(b'reclaim revisions')),
1952 (b'm', b'comment', b'', _(b'comment on the last revision')),
1986 (b'm', b'comment', b'', _(b'comment on the last revision')),
1953 ],
1987 ],
1954 _(b'DREVSPEC... [OPTIONS]'),
1988 _(b'DREVSPEC... [OPTIONS]'),
1955 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1989 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1956 optionalrepo=True,
1990 optionalrepo=True,
1957 )
1991 )
1958 def phabupdate(ui, repo, *specs, **opts):
1992 def phabupdate(ui, repo, *specs, **opts):
1959 """update Differential Revision in batch
1993 """update Differential Revision in batch
1960
1994
1961 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1995 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1962 """
1996 """
1963 opts = pycompat.byteskwargs(opts)
1997 opts = pycompat.byteskwargs(opts)
1964 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1998 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1965 if len(flags) > 1:
1999 if len(flags) > 1:
1966 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2000 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1967
2001
1968 actions = []
2002 actions = []
1969 for f in flags:
2003 for f in flags:
1970 actions.append({b'type': f, b'value': True})
2004 actions.append({b'type': f, b'value': True})
1971
2005
1972 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2006 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1973 for i, drev in enumerate(drevs):
2007 for i, drev in enumerate(drevs):
1974 if i + 1 == len(drevs) and opts.get(b'comment'):
2008 if i + 1 == len(drevs) and opts.get(b'comment'):
1975 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2009 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1976 if actions:
2010 if actions:
1977 params = {
2011 params = {
1978 b'objectIdentifier': drev[b'phid'],
2012 b'objectIdentifier': drev[b'phid'],
1979 b'transactions': actions,
2013 b'transactions': actions,
1980 }
2014 }
1981 callconduit(ui, b'differential.revision.edit', params)
2015 callconduit(ui, b'differential.revision.edit', params)
1982
2016
1983
2017
1984 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2018 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1985 def template_review(context, mapping):
2019 def template_review(context, mapping):
1986 """:phabreview: Object describing the review for this changeset.
2020 """:phabreview: Object describing the review for this changeset.
1987 Has attributes `url` and `id`.
2021 Has attributes `url` and `id`.
1988 """
2022 """
1989 ctx = context.resource(mapping, b'ctx')
2023 ctx = context.resource(mapping, b'ctx')
1990 m = _differentialrevisiondescre.search(ctx.description())
2024 m = _differentialrevisiondescre.search(ctx.description())
1991 if m:
2025 if m:
1992 return templateutil.hybriddict(
2026 return templateutil.hybriddict(
1993 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2027 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1994 )
2028 )
1995 else:
2029 else:
1996 tags = ctx.repo().nodetags(ctx.node())
2030 tags = ctx.repo().nodetags(ctx.node())
1997 for t in tags:
2031 for t in tags:
1998 if _differentialrevisiontagre.match(t):
2032 if _differentialrevisiontagre.match(t):
1999 url = ctx.repo().ui.config(b'phabricator', b'url')
2033 url = ctx.repo().ui.config(b'phabricator', b'url')
2000 if not url.endswith(b'/'):
2034 if not url.endswith(b'/'):
2001 url += b'/'
2035 url += b'/'
2002 url += t
2036 url += t
2003
2037
2004 return templateutil.hybriddict({b'url': url, b'id': t,})
2038 return templateutil.hybriddict({b'url': url, b'id': t,})
2005 return None
2039 return None
2006
2040
2007
2041
2008 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2042 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2009 def template_status(context, mapping):
2043 def template_status(context, mapping):
2010 """:phabstatus: String. Status of Phabricator differential.
2044 """:phabstatus: String. Status of Phabricator differential.
2011 """
2045 """
2012 ctx = context.resource(mapping, b'ctx')
2046 ctx = context.resource(mapping, b'ctx')
2013 repo = context.resource(mapping, b'repo')
2047 repo = context.resource(mapping, b'repo')
2014 ui = context.resource(mapping, b'ui')
2048 ui = context.resource(mapping, b'ui')
2015
2049
2016 rev = ctx.rev()
2050 rev = ctx.rev()
2017 try:
2051 try:
2018 drevid = getdrevmap(repo, [rev])[rev]
2052 drevid = getdrevmap(repo, [rev])[rev]
2019 except KeyError:
2053 except KeyError:
2020 return None
2054 return None
2021 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2055 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2022 for drev in drevs:
2056 for drev in drevs:
2023 if int(drev[b'id']) == drevid:
2057 if int(drev[b'id']) == drevid:
2024 return templateutil.hybriddict(
2058 return templateutil.hybriddict(
2025 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2059 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2026 )
2060 )
2027 return None
2061 return None
2028
2062
2029
2063
2030 @show.showview(b'phabstatus', csettopic=b'work')
2064 @show.showview(b'phabstatus', csettopic=b'work')
2031 def phabstatusshowview(ui, repo, displayer):
2065 def phabstatusshowview(ui, repo, displayer):
2032 """Phabricator differiential status"""
2066 """Phabricator differiential status"""
2033 revs = repo.revs('sort(_underway(), topo)')
2067 revs = repo.revs('sort(_underway(), topo)')
2034 drevmap = getdrevmap(repo, revs)
2068 drevmap = getdrevmap(repo, revs)
2035 unknownrevs, drevids, revsbydrevid = [], set(), {}
2069 unknownrevs, drevids, revsbydrevid = [], set(), {}
2036 for rev, drevid in pycompat.iteritems(drevmap):
2070 for rev, drevid in pycompat.iteritems(drevmap):
2037 if drevid is not None:
2071 if drevid is not None:
2038 drevids.add(drevid)
2072 drevids.add(drevid)
2039 revsbydrevid.setdefault(drevid, set()).add(rev)
2073 revsbydrevid.setdefault(drevid, set()).add(rev)
2040 else:
2074 else:
2041 unknownrevs.append(rev)
2075 unknownrevs.append(rev)
2042
2076
2043 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2077 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2044 drevsbyrev = {}
2078 drevsbyrev = {}
2045 for drev in drevs:
2079 for drev in drevs:
2046 for rev in revsbydrevid[int(drev[b'id'])]:
2080 for rev in revsbydrevid[int(drev[b'id'])]:
2047 drevsbyrev[rev] = drev
2081 drevsbyrev[rev] = drev
2048
2082
2049 def phabstatus(ctx):
2083 def phabstatus(ctx):
2050 drev = drevsbyrev[ctx.rev()]
2084 drev = drevsbyrev[ctx.rev()]
2051 status = ui.label(
2085 status = ui.label(
2052 b'%(statusName)s' % drev,
2086 b'%(statusName)s' % drev,
2053 b'phabricator.status.%s' % _getstatusname(drev),
2087 b'phabricator.status.%s' % _getstatusname(drev),
2054 )
2088 )
2055 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2089 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2056
2090
2057 revs -= smartset.baseset(unknownrevs)
2091 revs -= smartset.baseset(unknownrevs)
2058 revdag = graphmod.dagwalker(repo, revs)
2092 revdag = graphmod.dagwalker(repo, revs)
2059
2093
2060 ui.setconfig(b'experimental', b'graphshorten', True)
2094 ui.setconfig(b'experimental', b'graphshorten', True)
2061 displayer._exthook = phabstatus
2095 displayer._exthook = phabstatus
2062 nodelen = show.longestshortest(repo, revs)
2096 nodelen = show.longestshortest(repo, revs)
2063 logcmdutil.displaygraph(
2097 logcmdutil.displaygraph(
2064 ui,
2098 ui,
2065 repo,
2099 repo,
2066 revdag,
2100 revdag,
2067 displayer,
2101 displayer,
2068 graphmod.asciiedges,
2102 graphmod.asciiedges,
2069 props={b'nodelen': nodelen},
2103 props={b'nodelen': nodelen},
2070 )
2104 )
@@ -1,398 +1,419
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 >
6 > [phabricator]
7 > debug = True
5 > EOF
8 > EOF
6 $ hg init repo
9 $ hg init repo
7 $ cd repo
10 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
11 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
12 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
13 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
14 > callsign = HG
12 >
15 >
13 > [auth]
16 > [auth]
14 > hgphab.schemes = https
17 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
18 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
19 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
20 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
21 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 > # token with this value.
22 > # token with this value.
20 > hgphab.phabtoken = cli-hahayouwish
23 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
24 > EOF
22 $ VCR="$TESTDIR/phabricator"
25 $ VCR="$TESTDIR/phabricator"
23
26
24 Error is handled reasonably. We override the phabtoken here so that
27 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
28 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
29 above config and have a real token in the test but not have to edit
27 this test.
30 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
34
32 Missing arguments don't crash, and may print the command help
35 Missing arguments don't crash, and may print the command help
33
36
34 $ hg debugcallconduit
37 $ hg debugcallconduit
35 hg debugcallconduit: invalid arguments
38 hg debugcallconduit: invalid arguments
36 hg debugcallconduit METHOD
39 hg debugcallconduit METHOD
37
40
38 call Conduit API
41 call Conduit API
39
42
40 options:
43 options:
41
44
42 (use 'hg debugcallconduit -h' to show more help)
45 (use 'hg debugcallconduit -h' to show more help)
43 [255]
46 [255]
44 $ hg phabread
47 $ hg phabread
45 abort: empty DREVSPEC set
48 abort: empty DREVSPEC set
46 [255]
49 [255]
47
50
48 Basic phabread:
51 Basic phabread:
49 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
50 # HG changeset patch
53 # HG changeset patch
51 # Date 1536771503 0
54 # Date 1536771503 0
52 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
53 exchangev2: start to implement pull with wire protocol v2
56 exchangev2: start to implement pull with wire protocol v2
54
57
55 Wire protocol version 2 will take a substantially different
58 Wire protocol version 2 will take a substantially different
56 approach to exchange than version 1 (at least as far as pulling
59 approach to exchange than version 1 (at least as far as pulling
57 is concerned).
60 is concerned).
58
61
59 This commit establishes a new exchangev2 module for holding
62 This commit establishes a new exchangev2 module for holding
60
63
61 Phabread with multiple DREVSPEC
64 Phabread with multiple DREVSPEC
62
65
63 TODO: attempt to order related revisions like --stack?
66 TODO: attempt to order related revisions like --stack?
64 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
65 > | grep '^Differential Revision'
68 > | grep '^Differential Revision'
66 Differential Revision: https://phab.mercurial-scm.org/D8205
69 Differential Revision: https://phab.mercurial-scm.org/D8205
67 Differential Revision: https://phab.mercurial-scm.org/D8206
70 Differential Revision: https://phab.mercurial-scm.org/D8206
68 Differential Revision: https://phab.mercurial-scm.org/D8207
71 Differential Revision: https://phab.mercurial-scm.org/D8207
69
72
70 Empty DREVSPECs don't crash
73 Empty DREVSPECs don't crash
71
74
72 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
73 abort: empty DREVSPEC set
76 abort: empty DREVSPEC set
74 [255]
77 [255]
75
78
76
79
77 phabupdate with an accept:
80 phabupdate with an accept:
78 $ hg phabupdate --accept D4564 \
81 $ hg phabupdate --accept D4564 \
79 > -m 'I think I like where this is headed. Will read rest of series later.'\
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
80 > --test-vcr "$VCR/accept-4564.json"
83 > --test-vcr "$VCR/accept-4564.json"
81 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
82 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
83 [255]
86 [255]
84 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
85
88
86 Create a differential diff:
89 Create a differential diff:
87 $ HGENCODING=utf-8; export HGENCODING
90 $ HGENCODING=utf-8; export HGENCODING
88 $ echo alpha > alpha
91 $ echo alpha > alpha
89 $ hg ci --addremove -m 'create alpha for phabricator test €'
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
90 adding alpha
93 adding alpha
91 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
92 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
96 new commits: ['347bf67801e5']
93 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
94 $ echo more >> alpha
98 $ echo more >> alpha
95 $ HGEDITOR=true hg ci --amend
99 $ HGEDITOR=true hg ci --amend
96 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
97 $ echo beta > beta
101 $ echo beta > beta
98 $ hg ci --addremove -m 'create beta for phabricator test'
102 $ hg ci --addremove -m 'create beta for phabricator test'
99 adding beta
103 adding beta
100 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
105 c44b38f24a45 mapped to old nodes []
101 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
102 D7916 - created - 9e6901f21d5b: create beta for phabricator test
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
108 new commits: ['a692622e6937']
103 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
104 $ unset HGENCODING
110 $ unset HGENCODING
105
111
106 The amend won't explode after posting a public commit. The local tag is left
112 The amend won't explode after posting a public commit. The local tag is left
107 behind to identify it.
113 behind to identify it.
108
114
109 $ echo 'public change' > beta
115 $ echo 'public change' > beta
110 $ hg ci -m 'create public change for phabricator testing'
116 $ hg ci -m 'create public change for phabricator testing'
111 $ hg phase --public .
117 $ hg phase --public .
112 $ echo 'draft change' > alpha
118 $ echo 'draft change' > alpha
113 $ hg ci -m 'create draft change for phabricator testing'
119 $ hg ci -m 'create draft change for phabricator testing'
114 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
115 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
116 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
117 warning: not updating public commit 2:7b4185ab5d16
123 warning: not updating public commit 2:7b4185ab5d16
124 new commits: ['3244dc4a3334']
118 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
119 $ hg tags -v
126 $ hg tags -v
120 tip 3:3244dc4a3334
127 tip 3:3244dc4a3334
121 D7917 2:7b4185ab5d16 local
128 D7917 2:7b4185ab5d16 local
122
129
123 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
124 > {
131 > {
125 > "constraints": {
132 > "constraints": {
126 > "isBot": true
133 > "isBot": true
127 > }
134 > }
128 > }
135 > }
129 > EOF
136 > EOF
130 {
137 {
131 "cursor": {
138 "cursor": {
132 "after": null,
139 "after": null,
133 "before": null,
140 "before": null,
134 "limit": 100,
141 "limit": 100,
135 "order": null
142 "order": null
136 },
143 },
137 "data": [],
144 "data": [],
138 "maps": {},
145 "maps": {},
139 "query": {
146 "query": {
140 "queryKey": null
147 "queryKey": null
141 }
148 }
142 }
149 }
143
150
144 Template keywords
151 Template keywords
145 $ hg log -T'{rev} {phabreview|json}\n'
152 $ hg log -T'{rev} {phabreview|json}\n'
146 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
147 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
148 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
149 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
150
157
151 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
152 3 https://phab.mercurial-scm.org/D7918 D7918
159 3 https://phab.mercurial-scm.org/D7918 D7918
153 2 https://phab.mercurial-scm.org/D7917 D7917
160 2 https://phab.mercurial-scm.org/D7917 D7917
154 1 https://phab.mercurial-scm.org/D7916 D7916
161 1 https://phab.mercurial-scm.org/D7916 D7916
155 0 https://phab.mercurial-scm.org/D7915 D7915
162 0 https://phab.mercurial-scm.org/D7915 D7915
156
163
157 Commenting when phabsending:
164 Commenting when phabsending:
158 $ echo comment > comment
165 $ echo comment > comment
159 $ hg ci --addremove -m "create comment for phabricator test"
166 $ hg ci --addremove -m "create comment for phabricator test"
160 adding comment
167 adding comment
161 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
162 D7919 - created - d5dddca9023d: create comment for phabricator test
169 D7919 - created - d5dddca9023d: create comment for phabricator test
170 new commits: ['f7db812bbe1d']
163 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
164 $ echo comment2 >> comment
172 $ echo comment2 >> comment
165 $ hg ci --amend
173 $ hg ci --amend
166 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
167 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
176 1849d7828727 mapped to old nodes []
168 D7919 - updated - 1849d7828727: create comment for phabricator test
177 D7919 - updated - 1849d7828727: create comment for phabricator test
169
178
170 Phabsending a skipped commit:
179 Phabsending a skipped commit:
171 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
181 1849d7828727 mapped to old nodes ['1849d7828727']
172 D7919 - skipped - 1849d7828727: create comment for phabricator test
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
173
183
174 Phabesending a new binary, a modified binary, and a removed binary
184 Phabesending a new binary, a modified binary, and a removed binary
175
185
176 >>> open('bin', 'wb').write(b'\0a') and None
186 >>> open('bin', 'wb').write(b'\0a') and None
177 $ hg ci -Am 'add binary'
187 $ hg ci -Am 'add binary'
178 adding bin
188 adding bin
179 >>> open('bin', 'wb').write(b'\0b') and None
189 >>> open('bin', 'wb').write(b'\0b') and None
180 $ hg ci -m 'modify binary'
190 $ hg ci -m 'modify binary'
181 $ hg rm bin
191 $ hg rm bin
182 $ hg ci -m 'remove binary'
192 $ hg ci -m 'remove binary'
183 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
193 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
184 uploading bin@aa24a81f55de
194 uploading bin@aa24a81f55de
185 D8007 - created - aa24a81f55de: add binary
195 D8007 - created - aa24a81f55de: add binary
186 uploading bin@d8d62a881b54
196 uploading bin@d8d62a881b54
187 D8008 - created - d8d62a881b54: modify binary
197 D8008 - created - d8d62a881b54: modify binary
188 D8009 - created - af55645b2e29: remove binary
198 D8009 - created - af55645b2e29: remove binary
199 new commits: ['b8139fbb4a57']
200 new commits: ['c88ce4c2d2ad']
201 new commits: ['75dbbc901145']
189 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
202 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
190
203
191 Phabsend a renamed binary and a copied binary, with and without content changes
204 Phabsend a renamed binary and a copied binary, with and without content changes
192 to src and dest
205 to src and dest
193
206
194 >>> open('bin2', 'wb').write(b'\0c') and None
207 >>> open('bin2', 'wb').write(b'\0c') and None
195 $ hg ci -Am 'add another binary'
208 $ hg ci -Am 'add another binary'
196 adding bin2
209 adding bin2
197
210
198 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
211 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
199 looks much different than when viewing "bin2_moved". No idea if this is a phab
212 looks much different than when viewing "bin2_moved". No idea if this is a phab
200 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
213 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
201 though.
214 though.
202
215
203 $ hg mv bin2 bin2_moved
216 $ hg mv bin2 bin2_moved
204 $ hg ci -m "moved binary"
217 $ hg ci -m "moved binary"
205
218
206 Note: "bin2_moved" is also not viewable in phabricator with this review
219 Note: "bin2_moved" is also not viewable in phabricator with this review
207
220
208 $ hg cp bin2_moved bin2_copied
221 $ hg cp bin2_moved bin2_copied
209 $ hg ci -m "copied binary"
222 $ hg ci -m "copied binary"
210
223
211 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
224 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
212 are viewable in their proper state. "bin2_copied" is not viewable, and not
225 are viewable in their proper state. "bin2_copied" is not viewable, and not
213 listed as binary in phabricator.
226 listed as binary in phabricator.
214
227
215 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
228 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
216 $ hg mv bin2_copied bin2_moved_again
229 $ hg mv bin2_copied bin2_moved_again
217 $ hg ci -m "move+mod copied binary"
230 $ hg ci -m "move+mod copied binary"
218
231
219 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
232 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
220 viewable on each side.
233 viewable on each side.
221
234
222 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
235 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
223 $ hg cp bin2_moved bin2_moved_copied
236 $ hg cp bin2_moved bin2_moved_copied
224 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
237 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
225 $ hg ci -m "copy+mod moved binary"
238 $ hg ci -m "copy+mod moved binary"
226
239
227 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
240 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
228 uploading bin2@f42f9195e00c
241 uploading bin2@f42f9195e00c
229 D8128 - created - f42f9195e00c: add another binary
242 D8128 - created - f42f9195e00c: add another binary
230 D8129 - created - 834ab31d80ae: moved binary
243 D8129 - created - 834ab31d80ae: moved binary
231 D8130 - created - 494b750e5194: copied binary
244 D8130 - created - 494b750e5194: copied binary
232 uploading bin2_moved_again@25f766b50cc2
245 uploading bin2_moved_again@25f766b50cc2
233 D8131 - created - 25f766b50cc2: move+mod copied binary
246 D8131 - created - 25f766b50cc2: move+mod copied binary
234 uploading bin2_moved_copied@1b87b363a5e4
247 uploading bin2_moved_copied@1b87b363a5e4
235 uploading bin2_moved@1b87b363a5e4
248 uploading bin2_moved@1b87b363a5e4
236 D8132 - created - 1b87b363a5e4: copy+mod moved binary
249 D8132 - created - 1b87b363a5e4: copy+mod moved binary
250 new commits: ['90437c20312a']
251 new commits: ['f391f4da4c61']
252 new commits: ['da86a9f3268c']
253 new commits: ['003ffc16ba66']
254 new commits: ['13bd750c36fa']
237 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
255 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
238
256
239 Phabreading a DREV with a local:commits time as a string:
257 Phabreading a DREV with a local:commits time as a string:
240 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
258 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
241 # HG changeset patch
259 # HG changeset patch
242 # User Pulkit Goyal <7895pulkit@gmail.com>
260 # User Pulkit Goyal <7895pulkit@gmail.com>
243 # Date 1509404054 -19800
261 # Date 1509404054 -19800
244 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
262 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
245 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
263 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
246 repoview: add a new attribute _visibilityexceptions and related API
264 repoview: add a new attribute _visibilityexceptions and related API
247
265
248 Currently we don't have a defined way in core to make some hidden revisions
266 Currently we don't have a defined way in core to make some hidden revisions
249 visible in filtered repo. Extensions to achieve the purpose of unhiding some
267 visible in filtered repo. Extensions to achieve the purpose of unhiding some
250 hidden commits, wrap repoview.pinnedrevs() function.
268 hidden commits, wrap repoview.pinnedrevs() function.
251
269
252 To make the above task simple and have well defined API, this patch adds a new
270 To make the above task simple and have well defined API, this patch adds a new
253 attribute '_visibilityexceptions' to repoview class which will contains
271 attribute '_visibilityexceptions' to repoview class which will contains
254 the hidden revs which should be exception.
272 the hidden revs which should be exception.
255 This will allow to set different exceptions for different repoview objects
273 This will allow to set different exceptions for different repoview objects
256 backed by the same unfiltered repo.
274 backed by the same unfiltered repo.
257
275
258 This patch also adds API to add revs to the attribute set and get them.
276 This patch also adds API to add revs to the attribute set and get them.
259
277
260 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
278 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
261
279
262 Differential Revision: https://phab.mercurial-scm.org/D1285
280 Differential Revision: https://phab.mercurial-scm.org/D1285
263 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
281 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
264 --- a/mercurial/repoview.py
282 --- a/mercurial/repoview.py
265 +++ b/mercurial/repoview.py
283 +++ b/mercurial/repoview.py
266 @@ * @@ (glob)
284 @@ * @@ (glob)
267 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
285 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
268 """
286 """
269
287
270 + # hidden revs which should be visible
288 + # hidden revs which should be visible
271 + _visibilityexceptions = set()
289 + _visibilityexceptions = set()
272 +
290 +
273 def __init__(self, repo, filtername):
291 def __init__(self, repo, filtername):
274 object.__setattr__(self, r'_unfilteredrepo', repo)
292 object.__setattr__(self, r'_unfilteredrepo', repo)
275 object.__setattr__(self, r'filtername', filtername)
293 object.__setattr__(self, r'filtername', filtername)
276 @@ -231,6 +234,14 @@
294 @@ -231,6 +234,14 @@
277 return self
295 return self
278 return self.unfiltered().filtered(name)
296 return self.unfiltered().filtered(name)
279
297
280 + def addvisibilityexceptions(self, revs):
298 + def addvisibilityexceptions(self, revs):
281 + """adds hidden revs which should be visible to set of exceptions"""
299 + """adds hidden revs which should be visible to set of exceptions"""
282 + self._visibilityexceptions.update(revs)
300 + self._visibilityexceptions.update(revs)
283 +
301 +
284 + def getvisibilityexceptions(self):
302 + def getvisibilityexceptions(self):
285 + """returns the set of hidden revs which should be visible"""
303 + """returns the set of hidden revs which should be visible"""
286 + return self._visibilityexceptions
304 + return self._visibilityexceptions
287 +
305 +
288 # everything access are forwarded to the proxied repo
306 # everything access are forwarded to the proxied repo
289 def __getattr__(self, attr):
307 def __getattr__(self, attr):
290 return getattr(self._unfilteredrepo, attr)
308 return getattr(self._unfilteredrepo, attr)
291 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
309 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
292 --- a/mercurial/localrepo.py
310 --- a/mercurial/localrepo.py
293 +++ b/mercurial/localrepo.py
311 +++ b/mercurial/localrepo.py
294 @@ -570,6 +570,14 @@
312 @@ -570,6 +570,14 @@
295 def close(self):
313 def close(self):
296 self._writecaches()
314 self._writecaches()
297
315
298 + def addvisibilityexceptions(self, exceptions):
316 + def addvisibilityexceptions(self, exceptions):
299 + # should be called on a filtered repository
317 + # should be called on a filtered repository
300 + pass
318 + pass
301 +
319 +
302 + def getvisibilityexceptions(self):
320 + def getvisibilityexceptions(self):
303 + # should be called on a filtered repository
321 + # should be called on a filtered repository
304 + return set()
322 + return set()
305 +
323 +
306 def _loadextensions(self):
324 def _loadextensions(self):
307 extensions.loadall(self.ui)
325 extensions.loadall(self.ui)
308
326
309
327
310 A bad .arcconfig doesn't error out
328 A bad .arcconfig doesn't error out
311 $ echo 'garbage' > .arcconfig
329 $ echo 'garbage' > .arcconfig
312 $ hg config phabricator --debug
330 $ hg config phabricator --debug
313 invalid JSON in $TESTTMP/repo/.arcconfig
331 invalid JSON in $TESTTMP/repo/.arcconfig
314 read config from: */.hgrc (glob)
332 read config from: */.hgrc (glob)
333 */.hgrc:*: phabricator.debug=True (glob)
315 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
334 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
316 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
335 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
317
336
318 The .arcconfig content overrides global config
337 The .arcconfig content overrides global config
319 $ cat >> $HGRCPATH << EOF
338 $ cat >> $HGRCPATH << EOF
320 > [phabricator]
339 > [phabricator]
321 > url = global
340 > url = global
322 > callsign = global
341 > callsign = global
323 > EOF
342 > EOF
324 $ cp $TESTDIR/../.arcconfig .
343 $ cp $TESTDIR/../.arcconfig .
325 $ mv .hg/hgrc .hg/hgrc.bak
344 $ mv .hg/hgrc .hg/hgrc.bak
326 $ hg config phabricator --debug
345 $ hg config phabricator --debug
327 read config from: */.hgrc (glob)
346 read config from: */.hgrc (glob)
347 */.hgrc:*: phabricator.debug=True (glob)
328 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
348 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
329 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
349 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
330
350
331 But it doesn't override local config
351 But it doesn't override local config
332 $ cat >> .hg/hgrc << EOF
352 $ cat >> .hg/hgrc << EOF
333 > [phabricator]
353 > [phabricator]
334 > url = local
354 > url = local
335 > callsign = local
355 > callsign = local
336 > EOF
356 > EOF
337 $ hg config phabricator --debug
357 $ hg config phabricator --debug
338 read config from: */.hgrc (glob)
358 read config from: */.hgrc (glob)
359 */.hgrc:*: phabricator.debug=True (glob)
339 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
360 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
340 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
361 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
341 $ mv .hg/hgrc.bak .hg/hgrc
362 $ mv .hg/hgrc.bak .hg/hgrc
342
363
343 Phabimport works with a stack
364 Phabimport works with a stack
344
365
345 $ cd ..
366 $ cd ..
346 $ hg clone repo repo2 -qr 1
367 $ hg clone repo repo2 -qr 1
347 $ cp repo/.hg/hgrc repo2/.hg/
368 $ cp repo/.hg/hgrc repo2/.hg/
348 $ cd repo2
369 $ cd repo2
349 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
370 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
350 applying patch from D7917
371 applying patch from D7917
351 applying patch from D7918
372 applying patch from D7918
352 $ hg log -r .: -G -Tcompact
373 $ hg log -r .: -G -Tcompact
353 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
374 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
354 | create draft change for phabricator testing
375 | create draft change for phabricator testing
355 |
376 |
356 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
377 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
357 | create public change for phabricator testing
378 | create public change for phabricator testing
358 |
379 |
359 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
380 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
360 | create beta for phabricator test
381 | create beta for phabricator test
361 ~
382 ~
362 Phabimport can create secret commits
383 Phabimport can create secret commits
363
384
364 $ hg rollback --config ui.rollback=True
385 $ hg rollback --config ui.rollback=True
365 repository tip rolled back to revision 1 (undo phabimport)
386 repository tip rolled back to revision 1 (undo phabimport)
366 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
387 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
367 > --config phabimport.secret=True
388 > --config phabimport.secret=True
368 applying patch from D7917
389 applying patch from D7917
369 applying patch from D7918
390 applying patch from D7918
370 $ hg log -r 'reverse(.:)' -T phases
391 $ hg log -r 'reverse(.:)' -T phases
371 changeset: 3:aaef04066140
392 changeset: 3:aaef04066140
372 tag: tip
393 tag: tip
373 phase: secret
394 phase: secret
374 user: test
395 user: test
375 date: Thu Jan 01 00:00:00 1970 +0000
396 date: Thu Jan 01 00:00:00 1970 +0000
376 summary: create draft change for phabricator testing
397 summary: create draft change for phabricator testing
377
398
378 changeset: 2:8de3712202d1
399 changeset: 2:8de3712202d1
379 phase: secret
400 phase: secret
380 user: test
401 user: test
381 date: Thu Jan 01 00:00:00 1970 +0000
402 date: Thu Jan 01 00:00:00 1970 +0000
382 summary: create public change for phabricator testing
403 summary: create public change for phabricator testing
383
404
384 changeset: 1:a692622e6937
405 changeset: 1:a692622e6937
385 phase: public
406 phase: public
386 user: test
407 user: test
387 date: Thu Jan 01 00:00:00 1970 +0000
408 date: Thu Jan 01 00:00:00 1970 +0000
388 summary: create beta for phabricator test
409 summary: create beta for phabricator test
389
410
390 Phabimport accepts multiple DREVSPECs
411 Phabimport accepts multiple DREVSPECs
391
412
392 $ hg rollback --config ui.rollback=True
413 $ hg rollback --config ui.rollback=True
393 repository tip rolled back to revision 1 (undo phabimport)
414 repository tip rolled back to revision 1 (undo phabimport)
394 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
415 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
395 applying patch from D7917
416 applying patch from D7917
396 applying patch from D7918
417 applying patch from D7918
397
418
398 $ cd ..
419 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now