##// END OF EJS Templates
phabricator: prevent posting obsolete commits...
Matt Harbison -
r45213:c482e2fe default
parent child Browse files
Show More
@@ -1,2223 +1,2226 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.debug
118 # developer config: phabricator.debug
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'debug', default=False,
120 b'phabricator', b'debug', default=False,
121 )
121 )
122 # developer config: phabricator.repophid
122 # developer config: phabricator.repophid
123 eh.configitem(
123 eh.configitem(
124 b'phabricator', b'repophid', default=None,
124 b'phabricator', b'repophid', default=None,
125 )
125 )
126 eh.configitem(
126 eh.configitem(
127 b'phabricator', b'url', default=None,
127 b'phabricator', b'url', default=None,
128 )
128 )
129 eh.configitem(
129 eh.configitem(
130 b'phabsend', b'confirm', default=False,
130 b'phabsend', b'confirm', default=False,
131 )
131 )
132 eh.configitem(
132 eh.configitem(
133 b'phabimport', b'secret', default=False,
133 b'phabimport', b'secret', default=False,
134 )
134 )
135 eh.configitem(
135 eh.configitem(
136 b'phabimport', b'obsolete', default=False,
136 b'phabimport', b'obsolete', default=False,
137 )
137 )
138
138
139 colortable = {
139 colortable = {
140 b'phabricator.action.created': b'green',
140 b'phabricator.action.created': b'green',
141 b'phabricator.action.skipped': b'magenta',
141 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.updated': b'magenta',
142 b'phabricator.action.updated': b'magenta',
143 b'phabricator.desc': b'',
143 b'phabricator.desc': b'',
144 b'phabricator.drev': b'bold',
144 b'phabricator.drev': b'bold',
145 b'phabricator.node': b'',
145 b'phabricator.node': b'',
146 b'phabricator.status.abandoned': b'magenta dim',
146 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.accepted': b'green bold',
147 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.closed': b'green',
148 b'phabricator.status.closed': b'green',
149 b'phabricator.status.needsreview': b'yellow',
149 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsrevision': b'red',
150 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.changesplanned': b'red',
151 b'phabricator.status.changesplanned': b'red',
152 }
152 }
153
153
154 _VCR_FLAGS = [
154 _VCR_FLAGS = [
155 (
155 (
156 b'',
156 b'',
157 b'test-vcr',
157 b'test-vcr',
158 b'',
158 b'',
159 _(
159 _(
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b', otherwise will mock all http requests using the specified vcr file.'
161 b', otherwise will mock all http requests using the specified vcr file.'
162 b' (ADVANCED)'
162 b' (ADVANCED)'
163 ),
163 ),
164 ),
164 ),
165 ]
165 ]
166
166
167
167
168 @eh.wrapfunction(localrepo, "loadhgrc")
168 @eh.wrapfunction(localrepo, "loadhgrc")
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """
171 """
172 result = False
172 result = False
173 arcconfig = {}
173 arcconfig = {}
174
174
175 try:
175 try:
176 # json.loads only accepts bytes from 3.6+
176 # json.loads only accepts bytes from 3.6+
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 # json.loads only returns unicode strings
178 # json.loads only returns unicode strings
179 arcconfig = pycompat.rapply(
179 arcconfig = pycompat.rapply(
180 lambda x: encoding.unitolocal(x)
180 lambda x: encoding.unitolocal(x)
181 if isinstance(x, pycompat.unicode)
181 if isinstance(x, pycompat.unicode)
182 else x,
182 else x,
183 pycompat.json_loads(rawparams),
183 pycompat.json_loads(rawparams),
184 )
184 )
185
185
186 result = True
186 result = True
187 except ValueError:
187 except ValueError:
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 except IOError:
189 except IOError:
190 pass
190 pass
191
191
192 cfg = util.sortdict()
192 cfg = util.sortdict()
193
193
194 if b"repository.callsign" in arcconfig:
194 if b"repository.callsign" in arcconfig:
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196
196
197 if b"phabricator.uri" in arcconfig:
197 if b"phabricator.uri" in arcconfig:
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199
199
200 if cfg:
200 if cfg:
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202
202
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204
204
205
205
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 fullflags = flags + _VCR_FLAGS
207 fullflags = flags + _VCR_FLAGS
208
208
209 def hgmatcher(r1, r2):
209 def hgmatcher(r1, r2):
210 if r1.uri != r2.uri or r1.method != r2.method:
210 if r1.uri != r2.uri or r1.method != r2.method:
211 return False
211 return False
212 r1params = util.urlreq.parseqs(r1.body)
212 r1params = util.urlreq.parseqs(r1.body)
213 r2params = util.urlreq.parseqs(r2.body)
213 r2params = util.urlreq.parseqs(r2.body)
214 for key in r1params:
214 for key in r1params:
215 if key not in r2params:
215 if key not in r2params:
216 return False
216 return False
217 value = r1params[key][0]
217 value = r1params[key][0]
218 # we want to compare json payloads without worrying about ordering
218 # we want to compare json payloads without worrying about ordering
219 if value.startswith(b'{') and value.endswith(b'}'):
219 if value.startswith(b'{') and value.endswith(b'}'):
220 r1json = pycompat.json_loads(value)
220 r1json = pycompat.json_loads(value)
221 r2json = pycompat.json_loads(r2params[key][0])
221 r2json = pycompat.json_loads(r2params[key][0])
222 if r1json != r2json:
222 if r1json != r2json:
223 return False
223 return False
224 elif r2params[key][0] != value:
224 elif r2params[key][0] != value:
225 return False
225 return False
226 return True
226 return True
227
227
228 def sanitiserequest(request):
228 def sanitiserequest(request):
229 request.body = re.sub(
229 request.body = re.sub(
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 )
231 )
232 return request
232 return request
233
233
234 def sanitiseresponse(response):
234 def sanitiseresponse(response):
235 if 'set-cookie' in response['headers']:
235 if 'set-cookie' in response['headers']:
236 del response['headers']['set-cookie']
236 del response['headers']['set-cookie']
237 return response
237 return response
238
238
239 def decorate(fn):
239 def decorate(fn):
240 def inner(*args, **kwargs):
240 def inner(*args, **kwargs):
241 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
241 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
242 if cassette:
242 if cassette:
243 import hgdemandimport
243 import hgdemandimport
244
244
245 with hgdemandimport.deactivated():
245 with hgdemandimport.deactivated():
246 import vcr as vcrmod
246 import vcr as vcrmod
247 import vcr.stubs as stubs
247 import vcr.stubs as stubs
248
248
249 vcr = vcrmod.VCR(
249 vcr = vcrmod.VCR(
250 serializer='json',
250 serializer='json',
251 before_record_request=sanitiserequest,
251 before_record_request=sanitiserequest,
252 before_record_response=sanitiseresponse,
252 before_record_response=sanitiseresponse,
253 custom_patches=[
253 custom_patches=[
254 (
254 (
255 urlmod,
255 urlmod,
256 'httpconnection',
256 'httpconnection',
257 stubs.VCRHTTPConnection,
257 stubs.VCRHTTPConnection,
258 ),
258 ),
259 (
259 (
260 urlmod,
260 urlmod,
261 'httpsconnection',
261 'httpsconnection',
262 stubs.VCRHTTPSConnection,
262 stubs.VCRHTTPSConnection,
263 ),
263 ),
264 ],
264 ],
265 )
265 )
266 vcr.register_matcher('hgmatcher', hgmatcher)
266 vcr.register_matcher('hgmatcher', hgmatcher)
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
268 return fn(*args, **kwargs)
268 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
270
270
271 cmd = util.checksignature(inner, depth=2)
271 cmd = util.checksignature(inner, depth=2)
272 cmd.__name__ = fn.__name__
272 cmd.__name__ = fn.__name__
273 cmd.__doc__ = fn.__doc__
273 cmd.__doc__ = fn.__doc__
274
274
275 return command(
275 return command(
276 name,
276 name,
277 fullflags,
277 fullflags,
278 spec,
278 spec,
279 helpcategory=helpcategory,
279 helpcategory=helpcategory,
280 optionalrepo=optionalrepo,
280 optionalrepo=optionalrepo,
281 )(cmd)
281 )(cmd)
282
282
283 return decorate
283 return decorate
284
284
285
285
286 def _debug(ui, *msg, **opts):
286 def _debug(ui, *msg, **opts):
287 """write debug output for Phabricator if ``phabricator.debug`` is set
287 """write debug output for Phabricator if ``phabricator.debug`` is set
288
288
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 printed with the --debug argument.
290 printed with the --debug argument.
291 """
291 """
292 if ui.configbool(b"phabricator", b"debug"):
292 if ui.configbool(b"phabricator", b"debug"):
293 flag = ui.debugflag
293 flag = ui.debugflag
294 try:
294 try:
295 ui.debugflag = True
295 ui.debugflag = True
296 ui.write(*msg, **opts)
296 ui.write(*msg, **opts)
297 finally:
297 finally:
298 ui.debugflag = flag
298 ui.debugflag = flag
299
299
300
300
301 def urlencodenested(params):
301 def urlencodenested(params):
302 """like urlencode, but works with nested parameters.
302 """like urlencode, but works with nested parameters.
303
303
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
307 """
307 """
308 flatparams = util.sortdict()
308 flatparams = util.sortdict()
309
309
310 def process(prefix, obj):
310 def process(prefix, obj):
311 if isinstance(obj, bool):
311 if isinstance(obj, bool):
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
315 if items is None:
315 if items is None:
316 flatparams[prefix] = obj
316 flatparams[prefix] = obj
317 else:
317 else:
318 for k, v in items(obj):
318 for k, v in items(obj):
319 if prefix:
319 if prefix:
320 process(b'%s[%s]' % (prefix, k), v)
320 process(b'%s[%s]' % (prefix, k), v)
321 else:
321 else:
322 process(k, v)
322 process(k, v)
323
323
324 process(b'', params)
324 process(b'', params)
325 return util.urlreq.urlencode(flatparams)
325 return util.urlreq.urlencode(flatparams)
326
326
327
327
328 def readurltoken(ui):
328 def readurltoken(ui):
329 """return conduit url, token and make sure they exist
329 """return conduit url, token and make sure they exist
330
330
331 Currently read from [auth] config section. In the future, it might
331 Currently read from [auth] config section. In the future, it might
332 make sense to read from .arcconfig and .arcrc as well.
332 make sense to read from .arcconfig and .arcrc as well.
333 """
333 """
334 url = ui.config(b'phabricator', b'url')
334 url = ui.config(b'phabricator', b'url')
335 if not url:
335 if not url:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
338 )
338 )
339
339
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
341 token = None
341 token = None
342
342
343 if res:
343 if res:
344 group, auth = res
344 group, auth = res
345
345
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
347
347
348 token = auth.get(b'phabtoken')
348 token = auth.get(b'phabtoken')
349
349
350 if not token:
350 if not token:
351 raise error.Abort(
351 raise error.Abort(
352 _(b'Can\'t find conduit token associated to %s') % (url,)
352 _(b'Can\'t find conduit token associated to %s') % (url,)
353 )
353 )
354
354
355 return url, token
355 return url, token
356
356
357
357
358 def callconduit(ui, name, params):
358 def callconduit(ui, name, params):
359 """call Conduit API, params is a dict. return json.loads result, or None"""
359 """call Conduit API, params is a dict. return json.loads result, or None"""
360 host, token = readurltoken(ui)
360 host, token = readurltoken(ui)
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
363 params = params.copy()
363 params = params.copy()
364 params[b'__conduit__'] = {
364 params[b'__conduit__'] = {
365 b'token': token,
365 b'token': token,
366 }
366 }
367 rawdata = {
367 rawdata = {
368 b'params': templatefilters.json(params),
368 b'params': templatefilters.json(params),
369 b'output': b'json',
369 b'output': b'json',
370 b'__conduit__': 1,
370 b'__conduit__': 1,
371 }
371 }
372 data = urlencodenested(rawdata)
372 data = urlencodenested(rawdata)
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
374 if curlcmd:
374 if curlcmd:
375 sin, sout = procutil.popen2(
375 sin, sout = procutil.popen2(
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
377 )
377 )
378 sin.write(data)
378 sin.write(data)
379 sin.close()
379 sin.close()
380 body = sout.read()
380 body = sout.read()
381 else:
381 else:
382 urlopener = urlmod.opener(ui, authinfo)
382 urlopener = urlmod.opener(ui, authinfo)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
384 with contextlib.closing(urlopener.open(request)) as rsp:
384 with contextlib.closing(urlopener.open(request)) as rsp:
385 body = rsp.read()
385 body = rsp.read()
386 ui.debug(b'Conduit Response: %s\n' % body)
386 ui.debug(b'Conduit Response: %s\n' % body)
387 parsed = pycompat.rapply(
387 parsed = pycompat.rapply(
388 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
389 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
390 else x,
390 else x,
391 # json.loads only accepts bytes from py3.6+
391 # json.loads only accepts bytes from py3.6+
392 pycompat.json_loads(encoding.unifromlocal(body)),
392 pycompat.json_loads(encoding.unifromlocal(body)),
393 )
393 )
394 if parsed.get(b'error_code'):
394 if parsed.get(b'error_code'):
395 msg = _(b'Conduit Error (%s): %s') % (
395 msg = _(b'Conduit Error (%s): %s') % (
396 parsed[b'error_code'],
396 parsed[b'error_code'],
397 parsed[b'error_info'],
397 parsed[b'error_info'],
398 )
398 )
399 raise error.Abort(msg)
399 raise error.Abort(msg)
400 return parsed[b'result']
400 return parsed[b'result']
401
401
402
402
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
404 def debugcallconduit(ui, repo, name):
404 def debugcallconduit(ui, repo, name):
405 """call Conduit API
405 """call Conduit API
406
406
407 Call parameters are read from stdin as a JSON blob. Result will be written
407 Call parameters are read from stdin as a JSON blob. Result will be written
408 to stdout as a JSON blob.
408 to stdout as a JSON blob.
409 """
409 """
410 # json.loads only accepts bytes from 3.6+
410 # json.loads only accepts bytes from 3.6+
411 rawparams = encoding.unifromlocal(ui.fin.read())
411 rawparams = encoding.unifromlocal(ui.fin.read())
412 # json.loads only returns unicode strings
412 # json.loads only returns unicode strings
413 params = pycompat.rapply(
413 params = pycompat.rapply(
414 lambda x: encoding.unitolocal(x)
414 lambda x: encoding.unitolocal(x)
415 if isinstance(x, pycompat.unicode)
415 if isinstance(x, pycompat.unicode)
416 else x,
416 else x,
417 pycompat.json_loads(rawparams),
417 pycompat.json_loads(rawparams),
418 )
418 )
419 # json.dumps only accepts unicode strings
419 # json.dumps only accepts unicode strings
420 result = pycompat.rapply(
420 result = pycompat.rapply(
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
422 callconduit(ui, name, params),
422 callconduit(ui, name, params),
423 )
423 )
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
426
426
427
427
428 def getrepophid(repo):
428 def getrepophid(repo):
429 """given callsign, return repository PHID or None"""
429 """given callsign, return repository PHID or None"""
430 # developer config: phabricator.repophid
430 # developer config: phabricator.repophid
431 repophid = repo.ui.config(b'phabricator', b'repophid')
431 repophid = repo.ui.config(b'phabricator', b'repophid')
432 if repophid:
432 if repophid:
433 return repophid
433 return repophid
434 callsign = repo.ui.config(b'phabricator', b'callsign')
434 callsign = repo.ui.config(b'phabricator', b'callsign')
435 if not callsign:
435 if not callsign:
436 return None
436 return None
437 query = callconduit(
437 query = callconduit(
438 repo.ui,
438 repo.ui,
439 b'diffusion.repository.search',
439 b'diffusion.repository.search',
440 {b'constraints': {b'callsigns': [callsign]}},
440 {b'constraints': {b'callsigns': [callsign]}},
441 )
441 )
442 if len(query[b'data']) == 0:
442 if len(query[b'data']) == 0:
443 return None
443 return None
444 repophid = query[b'data'][0][b'phid']
444 repophid = query[b'data'][0][b'phid']
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
446 return repophid
446 return repophid
447
447
448
448
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
450 _differentialrevisiondescre = re.compile(
450 _differentialrevisiondescre = re.compile(
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
452 )
452 )
453
453
454
454
455 def getoldnodedrevmap(repo, nodelist):
455 def getoldnodedrevmap(repo, nodelist):
456 """find previous nodes that has been sent to Phabricator
456 """find previous nodes that has been sent to Phabricator
457
457
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
459 for node in nodelist with known previous sent versions, or associated
459 for node in nodelist with known previous sent versions, or associated
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
461 be ``None``.
461 be ``None``.
462
462
463 Examines commit messages like "Differential Revision:" to get the
463 Examines commit messages like "Differential Revision:" to get the
464 association information.
464 association information.
465
465
466 If such commit message line is not found, examines all precursors and their
466 If such commit message line is not found, examines all precursors and their
467 tags. Tags with format like "D1234" are considered a match and the node
467 tags. Tags with format like "D1234" are considered a match and the node
468 with that tag, and the number after "D" (ex. 1234) will be returned.
468 with that tag, and the number after "D" (ex. 1234) will be returned.
469
469
470 The ``old node``, if not None, is guaranteed to be the last diff of
470 The ``old node``, if not None, is guaranteed to be the last diff of
471 corresponding Differential Revision, and exist in the repo.
471 corresponding Differential Revision, and exist in the repo.
472 """
472 """
473 unfi = repo.unfiltered()
473 unfi = repo.unfiltered()
474 has_node = unfi.changelog.index.has_node
474 has_node = unfi.changelog.index.has_node
475
475
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
477 # ordered for test stability when printing new -> old mapping below
477 # ordered for test stability when printing new -> old mapping below
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
479 for node in nodelist:
479 for node in nodelist:
480 ctx = unfi[node]
480 ctx = unfi[node]
481 # For tags like "D123", put them into "toconfirm" to verify later
481 # For tags like "D123", put them into "toconfirm" to verify later
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
483 for n in precnodes:
483 for n in precnodes:
484 if has_node(n):
484 if has_node(n):
485 for tag in unfi.nodetags(n):
485 for tag in unfi.nodetags(n):
486 m = _differentialrevisiontagre.match(tag)
486 m = _differentialrevisiontagre.match(tag)
487 if m:
487 if m:
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
489 break
489 break
490 else:
490 else:
491 continue # move to next predecessor
491 continue # move to next predecessor
492 break # found a tag, stop
492 break # found a tag, stop
493 else:
493 else:
494 # Check commit message
494 # Check commit message
495 m = _differentialrevisiondescre.search(ctx.description())
495 m = _differentialrevisiondescre.search(ctx.description())
496 if m:
496 if m:
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
498
498
499 # Double check if tags are genuine by collecting all old nodes from
499 # Double check if tags are genuine by collecting all old nodes from
500 # Phabricator, and expect precursors overlap with it.
500 # Phabricator, and expect precursors overlap with it.
501 if toconfirm:
501 if toconfirm:
502 drevs = [drev for force, precs, drev in toconfirm.values()]
502 drevs = [drev for force, precs, drev in toconfirm.values()]
503 alldiffs = callconduit(
503 alldiffs = callconduit(
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
505 )
505 )
506
506
507 def getnodes(d, precset):
507 def getnodes(d, precset):
508 # Ignore other nodes that were combined into the Differential
508 # Ignore other nodes that were combined into the Differential
509 # that aren't predecessors of the current local node.
509 # that aren't predecessors of the current local node.
510 return [n for n in getlocalcommits(d) if n in precset]
510 return [n for n in getlocalcommits(d) if n in precset]
511
511
512 for newnode, (force, precset, drev) in toconfirm.items():
512 for newnode, (force, precset, drev) in toconfirm.items():
513 diffs = [
513 diffs = [
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
515 ]
515 ]
516
516
517 # local predecessors known by Phabricator
517 # local predecessors known by Phabricator
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
519
519
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
521 # and force is not set (when commit message says nothing)
521 # and force is not set (when commit message says nothing)
522 if not force and not phprecset:
522 if not force and not phprecset:
523 tagname = b'D%d' % drev
523 tagname = b'D%d' % drev
524 tags.tag(
524 tags.tag(
525 repo,
525 repo,
526 tagname,
526 tagname,
527 nullid,
527 nullid,
528 message=None,
528 message=None,
529 user=None,
529 user=None,
530 date=None,
530 date=None,
531 local=True,
531 local=True,
532 )
532 )
533 unfi.ui.warn(
533 unfi.ui.warn(
534 _(
534 _(
535 b'D%d: local tag removed - does not match '
535 b'D%d: local tag removed - does not match '
536 b'Differential history\n'
536 b'Differential history\n'
537 )
537 )
538 % drev
538 % drev
539 )
539 )
540 continue
540 continue
541
541
542 # Find the last node using Phabricator metadata, and make sure it
542 # Find the last node using Phabricator metadata, and make sure it
543 # exists in the repo
543 # exists in the repo
544 oldnode = lastdiff = None
544 oldnode = lastdiff = None
545 if diffs:
545 if diffs:
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
547 oldnodes = getnodes(lastdiff, precset)
547 oldnodes = getnodes(lastdiff, precset)
548
548
549 _debug(
549 _debug(
550 unfi.ui,
550 unfi.ui,
551 b"%s mapped to old nodes %s\n"
551 b"%s mapped to old nodes %s\n"
552 % (
552 % (
553 short(newnode),
553 short(newnode),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 ),
555 ),
556 )
556 )
557
557
558 # If this commit was the result of `hg fold` after submission,
558 # If this commit was the result of `hg fold` after submission,
559 # and now resubmitted with --fold, the easiest thing to do is
559 # and now resubmitted with --fold, the easiest thing to do is
560 # to leave the node clear. This only results in creating a new
560 # to leave the node clear. This only results in creating a new
561 # diff for the _same_ Differential Revision if this commit is
561 # diff for the _same_ Differential Revision if this commit is
562 # the first or last in the selected range. If we picked a node
562 # the first or last in the selected range. If we picked a node
563 # from the list instead, it would have to be the lowest if at
563 # from the list instead, it would have to be the lowest if at
564 # the beginning of the --fold range, or the highest at the end.
564 # the beginning of the --fold range, or the highest at the end.
565 # Otherwise, one or more of the nodes wouldn't be considered in
565 # Otherwise, one or more of the nodes wouldn't be considered in
566 # the diff, and the Differential wouldn't be properly updated.
566 # the diff, and the Differential wouldn't be properly updated.
567 # If this commit is the result of `hg split` in the same
567 # If this commit is the result of `hg split` in the same
568 # scenario, there is a single oldnode here (and multiple
568 # scenario, there is a single oldnode here (and multiple
569 # newnodes mapped to it). That makes it the same as the normal
569 # newnodes mapped to it). That makes it the same as the normal
570 # case, as the edges of the newnode range cleanly maps to one
570 # case, as the edges of the newnode range cleanly maps to one
571 # oldnode each.
571 # oldnode each.
572 if len(oldnodes) == 1:
572 if len(oldnodes) == 1:
573 oldnode = oldnodes[0]
573 oldnode = oldnodes[0]
574 if oldnode and not has_node(oldnode):
574 if oldnode and not has_node(oldnode):
575 oldnode = None
575 oldnode = None
576
576
577 result[newnode] = (oldnode, lastdiff, drev)
577 result[newnode] = (oldnode, lastdiff, drev)
578
578
579 return result
579 return result
580
580
581
581
582 def getdrevmap(repo, revs):
582 def getdrevmap(repo, revs):
583 """Return a dict mapping each rev in `revs` to their Differential Revision
583 """Return a dict mapping each rev in `revs` to their Differential Revision
584 ID or None.
584 ID or None.
585 """
585 """
586 result = {}
586 result = {}
587 for rev in revs:
587 for rev in revs:
588 result[rev] = None
588 result[rev] = None
589 ctx = repo[rev]
589 ctx = repo[rev]
590 # Check commit message
590 # Check commit message
591 m = _differentialrevisiondescre.search(ctx.description())
591 m = _differentialrevisiondescre.search(ctx.description())
592 if m:
592 if m:
593 result[rev] = int(m.group('id'))
593 result[rev] = int(m.group('id'))
594 continue
594 continue
595 # Check tags
595 # Check tags
596 for tag in repo.nodetags(ctx.node()):
596 for tag in repo.nodetags(ctx.node()):
597 m = _differentialrevisiontagre.match(tag)
597 m = _differentialrevisiontagre.match(tag)
598 if m:
598 if m:
599 result[rev] = int(m.group(1))
599 result[rev] = int(m.group(1))
600 break
600 break
601
601
602 return result
602 return result
603
603
604
604
605 def getdiff(basectx, ctx, diffopts):
605 def getdiff(basectx, ctx, diffopts):
606 """plain-text diff without header (user, commit message, etc)"""
606 """plain-text diff without header (user, commit message, etc)"""
607 output = util.stringio()
607 output = util.stringio()
608 for chunk, _label in patch.diffui(
608 for chunk, _label in patch.diffui(
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
610 ):
610 ):
611 output.write(chunk)
611 output.write(chunk)
612 return output.getvalue()
612 return output.getvalue()
613
613
614
614
615 class DiffChangeType(object):
615 class DiffChangeType(object):
616 ADD = 1
616 ADD = 1
617 CHANGE = 2
617 CHANGE = 2
618 DELETE = 3
618 DELETE = 3
619 MOVE_AWAY = 4
619 MOVE_AWAY = 4
620 COPY_AWAY = 5
620 COPY_AWAY = 5
621 MOVE_HERE = 6
621 MOVE_HERE = 6
622 COPY_HERE = 7
622 COPY_HERE = 7
623 MULTICOPY = 8
623 MULTICOPY = 8
624
624
625
625
626 class DiffFileType(object):
626 class DiffFileType(object):
627 TEXT = 1
627 TEXT = 1
628 IMAGE = 2
628 IMAGE = 2
629 BINARY = 3
629 BINARY = 3
630
630
631
631
632 @attr.s
632 @attr.s
633 class phabhunk(dict):
633 class phabhunk(dict):
634 """Represents a Differential hunk, which is owned by a Differential change
634 """Represents a Differential hunk, which is owned by a Differential change
635 """
635 """
636
636
637 oldOffset = attr.ib(default=0) # camelcase-required
637 oldOffset = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
641 corpus = attr.ib(default='')
641 corpus = attr.ib(default='')
642 # These get added to the phabchange's equivalents
642 # These get added to the phabchange's equivalents
643 addLines = attr.ib(default=0) # camelcase-required
643 addLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
645
645
646
646
647 @attr.s
647 @attr.s
648 class phabchange(object):
648 class phabchange(object):
649 """Represents a Differential change, owns Differential hunks and owned by a
649 """Represents a Differential change, owns Differential hunks and owned by a
650 Differential diff. Each one represents one file in a diff.
650 Differential diff. Each one represents one file in a diff.
651 """
651 """
652
652
653 currentPath = attr.ib(default=None) # camelcase-required
653 currentPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
656 metadata = attr.ib(default=attr.Factory(dict))
656 metadata = attr.ib(default=attr.Factory(dict))
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 type = attr.ib(default=DiffChangeType.CHANGE)
659 type = attr.ib(default=DiffChangeType.CHANGE)
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
664 hunks = attr.ib(default=attr.Factory(list))
664 hunks = attr.ib(default=attr.Factory(list))
665
665
666 def copynewmetadatatoold(self):
666 def copynewmetadatatoold(self):
667 for key in list(self.metadata.keys()):
667 for key in list(self.metadata.keys()):
668 newkey = key.replace(b'new:', b'old:')
668 newkey = key.replace(b'new:', b'old:')
669 self.metadata[newkey] = self.metadata[key]
669 self.metadata[newkey] = self.metadata[key]
670
670
671 def addoldmode(self, value):
671 def addoldmode(self, value):
672 self.oldProperties[b'unix:filemode'] = value
672 self.oldProperties[b'unix:filemode'] = value
673
673
674 def addnewmode(self, value):
674 def addnewmode(self, value):
675 self.newProperties[b'unix:filemode'] = value
675 self.newProperties[b'unix:filemode'] = value
676
676
677 def addhunk(self, hunk):
677 def addhunk(self, hunk):
678 if not isinstance(hunk, phabhunk):
678 if not isinstance(hunk, phabhunk):
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
681 # It's useful to include these stats since the Phab web UI shows them,
681 # It's useful to include these stats since the Phab web UI shows them,
682 # and uses them to estimate how large a change a Revision is. Also used
682 # and uses them to estimate how large a change a Revision is. Also used
683 # in email subjects for the [+++--] bit.
683 # in email subjects for the [+++--] bit.
684 self.addLines += hunk.addLines
684 self.addLines += hunk.addLines
685 self.delLines += hunk.delLines
685 self.delLines += hunk.delLines
686
686
687
687
688 @attr.s
688 @attr.s
689 class phabdiff(object):
689 class phabdiff(object):
690 """Represents a Differential diff, owns Differential changes. Corresponds
690 """Represents a Differential diff, owns Differential changes. Corresponds
691 to a commit.
691 to a commit.
692 """
692 """
693
693
694 # Doesn't seem to be any reason to send this (output of uname -n)
694 # Doesn't seem to be any reason to send this (output of uname -n)
695 sourceMachine = attr.ib(default=b'') # camelcase-required
695 sourceMachine = attr.ib(default=b'') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
700 branch = attr.ib(default=b'default')
700 branch = attr.ib(default=b'default')
701 bookmark = attr.ib(default=None)
701 bookmark = attr.ib(default=None)
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
705 changes = attr.ib(default=attr.Factory(dict))
705 changes = attr.ib(default=attr.Factory(dict))
706 repositoryPHID = attr.ib(default=None) # camelcase-required
706 repositoryPHID = attr.ib(default=None) # camelcase-required
707
707
708 def addchange(self, change):
708 def addchange(self, change):
709 if not isinstance(change, phabchange):
709 if not isinstance(change, phabchange):
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
711 self.changes[change.currentPath] = pycompat.byteskwargs(
711 self.changes[change.currentPath] = pycompat.byteskwargs(
712 attr.asdict(change)
712 attr.asdict(change)
713 )
713 )
714
714
715
715
716 def maketext(pchange, basectx, ctx, fname):
716 def maketext(pchange, basectx, ctx, fname):
717 """populate the phabchange for a text file"""
717 """populate the phabchange for a text file"""
718 repo = ctx.repo()
718 repo = ctx.repo()
719 fmatcher = match.exact([fname])
719 fmatcher = match.exact([fname])
720 diffopts = mdiff.diffopts(git=True, context=32767)
720 diffopts = mdiff.diffopts(git=True, context=32767)
721 _pfctx, _fctx, header, fhunks = next(
721 _pfctx, _fctx, header, fhunks = next(
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
723 )
723 )
724
724
725 for fhunk in fhunks:
725 for fhunk in fhunks:
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
727 corpus = b''.join(lines[1:])
727 corpus = b''.join(lines[1:])
728 shunk = list(header)
728 shunk = list(header)
729 shunk.extend(lines)
729 shunk.extend(lines)
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
731 patch.diffstatdata(util.iterlines(shunk))
731 patch.diffstatdata(util.iterlines(shunk))
732 )
732 )
733 pchange.addhunk(
733 pchange.addhunk(
734 phabhunk(
734 phabhunk(
735 oldOffset,
735 oldOffset,
736 oldLength,
736 oldLength,
737 newOffset,
737 newOffset,
738 newLength,
738 newLength,
739 corpus,
739 corpus,
740 addLines,
740 addLines,
741 delLines,
741 delLines,
742 )
742 )
743 )
743 )
744
744
745
745
746 def uploadchunks(fctx, fphid):
746 def uploadchunks(fctx, fphid):
747 """upload large binary files as separate chunks.
747 """upload large binary files as separate chunks.
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
749 """
749 """
750 ui = fctx.repo().ui
750 ui = fctx.repo().ui
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
752 with ui.makeprogress(
752 with ui.makeprogress(
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
754 ) as progress:
754 ) as progress:
755 for chunk in chunks:
755 for chunk in chunks:
756 progress.increment()
756 progress.increment()
757 if chunk[b'complete']:
757 if chunk[b'complete']:
758 continue
758 continue
759 bstart = int(chunk[b'byteStart'])
759 bstart = int(chunk[b'byteStart'])
760 bend = int(chunk[b'byteEnd'])
760 bend = int(chunk[b'byteEnd'])
761 callconduit(
761 callconduit(
762 ui,
762 ui,
763 b'file.uploadchunk',
763 b'file.uploadchunk',
764 {
764 {
765 b'filePHID': fphid,
765 b'filePHID': fphid,
766 b'byteStart': bstart,
766 b'byteStart': bstart,
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
768 b'dataEncoding': b'base64',
768 b'dataEncoding': b'base64',
769 },
769 },
770 )
770 )
771
771
772
772
773 def uploadfile(fctx):
773 def uploadfile(fctx):
774 """upload binary files to Phabricator"""
774 """upload binary files to Phabricator"""
775 repo = fctx.repo()
775 repo = fctx.repo()
776 ui = repo.ui
776 ui = repo.ui
777 fname = fctx.path()
777 fname = fctx.path()
778 size = fctx.size()
778 size = fctx.size()
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
780
780
781 # an allocate call is required first to see if an upload is even required
781 # an allocate call is required first to see if an upload is even required
782 # (Phab might already have it) and to determine if chunking is needed
782 # (Phab might already have it) and to determine if chunking is needed
783 allocateparams = {
783 allocateparams = {
784 b'name': fname,
784 b'name': fname,
785 b'contentLength': size,
785 b'contentLength': size,
786 b'contentHash': fhash,
786 b'contentHash': fhash,
787 }
787 }
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
789 fphid = filealloc[b'filePHID']
789 fphid = filealloc[b'filePHID']
790
790
791 if filealloc[b'upload']:
791 if filealloc[b'upload']:
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
793 if not fphid:
793 if not fphid:
794 uploadparams = {
794 uploadparams = {
795 b'name': fname,
795 b'name': fname,
796 b'data_base64': base64.b64encode(fctx.data()),
796 b'data_base64': base64.b64encode(fctx.data()),
797 }
797 }
798 fphid = callconduit(ui, b'file.upload', uploadparams)
798 fphid = callconduit(ui, b'file.upload', uploadparams)
799 else:
799 else:
800 uploadchunks(fctx, fphid)
800 uploadchunks(fctx, fphid)
801 else:
801 else:
802 ui.debug(b'server already has %s\n' % bytes(fctx))
802 ui.debug(b'server already has %s\n' % bytes(fctx))
803
803
804 if not fphid:
804 if not fphid:
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
806
806
807 return fphid
807 return fphid
808
808
809
809
810 def addoldbinary(pchange, oldfctx, fctx):
810 def addoldbinary(pchange, oldfctx, fctx):
811 """add the metadata for the previous version of a binary file to the
811 """add the metadata for the previous version of a binary file to the
812 phabchange for the new version
812 phabchange for the new version
813
813
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
815 version of the file, or None if the file is being removed.
815 version of the file, or None if the file is being removed.
816 """
816 """
817 if not fctx or fctx.cmp(oldfctx):
817 if not fctx or fctx.cmp(oldfctx):
818 # Files differ, add the old one
818 # Files differ, add the old one
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
820 mimeguess, _enc = mimetypes.guess_type(
820 mimeguess, _enc = mimetypes.guess_type(
821 encoding.unifromlocal(oldfctx.path())
821 encoding.unifromlocal(oldfctx.path())
822 )
822 )
823 if mimeguess:
823 if mimeguess:
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
825 mimeguess
825 mimeguess
826 )
826 )
827 fphid = uploadfile(oldfctx)
827 fphid = uploadfile(oldfctx)
828 pchange.metadata[b'old:binary-phid'] = fphid
828 pchange.metadata[b'old:binary-phid'] = fphid
829 else:
829 else:
830 # If it's left as IMAGE/BINARY web UI might try to display it
830 # If it's left as IMAGE/BINARY web UI might try to display it
831 pchange.fileType = DiffFileType.TEXT
831 pchange.fileType = DiffFileType.TEXT
832 pchange.copynewmetadatatoold()
832 pchange.copynewmetadatatoold()
833
833
834
834
835 def makebinary(pchange, fctx):
835 def makebinary(pchange, fctx):
836 """populate the phabchange for a binary file"""
836 """populate the phabchange for a binary file"""
837 pchange.fileType = DiffFileType.BINARY
837 pchange.fileType = DiffFileType.BINARY
838 fphid = uploadfile(fctx)
838 fphid = uploadfile(fctx)
839 pchange.metadata[b'new:binary-phid'] = fphid
839 pchange.metadata[b'new:binary-phid'] = fphid
840 pchange.metadata[b'new:file:size'] = fctx.size()
840 pchange.metadata[b'new:file:size'] = fctx.size()
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
842 if mimeguess:
842 if mimeguess:
843 mimeguess = pycompat.bytestr(mimeguess)
843 mimeguess = pycompat.bytestr(mimeguess)
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
845 if mimeguess.startswith(b'image/'):
845 if mimeguess.startswith(b'image/'):
846 pchange.fileType = DiffFileType.IMAGE
846 pchange.fileType = DiffFileType.IMAGE
847
847
848
848
849 # Copied from mercurial/patch.py
849 # Copied from mercurial/patch.py
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
851
851
852
852
853 def notutf8(fctx):
853 def notutf8(fctx):
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
855 as binary
855 as binary
856 """
856 """
857 try:
857 try:
858 fctx.data().decode('utf-8')
858 fctx.data().decode('utf-8')
859 return False
859 return False
860 except UnicodeDecodeError:
860 except UnicodeDecodeError:
861 fctx.repo().ui.write(
861 fctx.repo().ui.write(
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
863 % fctx.path()
863 % fctx.path()
864 )
864 )
865 return True
865 return True
866
866
867
867
868 def addremoved(pdiff, basectx, ctx, removed):
868 def addremoved(pdiff, basectx, ctx, removed):
869 """add removed files to the phabdiff. Shouldn't include moves"""
869 """add removed files to the phabdiff. Shouldn't include moves"""
870 for fname in removed:
870 for fname in removed:
871 pchange = phabchange(
871 pchange = phabchange(
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
873 )
873 )
874 oldfctx = basectx.p1()[fname]
874 oldfctx = basectx.p1()[fname]
875 pchange.addoldmode(gitmode[oldfctx.flags()])
875 pchange.addoldmode(gitmode[oldfctx.flags()])
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
877 maketext(pchange, basectx, ctx, fname)
877 maketext(pchange, basectx, ctx, fname)
878
878
879 pdiff.addchange(pchange)
879 pdiff.addchange(pchange)
880
880
881
881
882 def addmodified(pdiff, basectx, ctx, modified):
882 def addmodified(pdiff, basectx, ctx, modified):
883 """add modified files to the phabdiff"""
883 """add modified files to the phabdiff"""
884 for fname in modified:
884 for fname in modified:
885 fctx = ctx[fname]
885 fctx = ctx[fname]
886 oldfctx = basectx.p1()[fname]
886 oldfctx = basectx.p1()[fname]
887 pchange = phabchange(currentPath=fname, oldPath=fname)
887 pchange = phabchange(currentPath=fname, oldPath=fname)
888 filemode = gitmode[fctx.flags()]
888 filemode = gitmode[fctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
890 if filemode != originalmode:
890 if filemode != originalmode:
891 pchange.addoldmode(originalmode)
891 pchange.addoldmode(originalmode)
892 pchange.addnewmode(filemode)
892 pchange.addnewmode(filemode)
893
893
894 if (
894 if (
895 fctx.isbinary()
895 fctx.isbinary()
896 or notutf8(fctx)
896 or notutf8(fctx)
897 or oldfctx.isbinary()
897 or oldfctx.isbinary()
898 or notutf8(oldfctx)
898 or notutf8(oldfctx)
899 ):
899 ):
900 makebinary(pchange, fctx)
900 makebinary(pchange, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
902 else:
902 else:
903 maketext(pchange, basectx, ctx, fname)
903 maketext(pchange, basectx, ctx, fname)
904
904
905 pdiff.addchange(pchange)
905 pdiff.addchange(pchange)
906
906
907
907
908 def addadded(pdiff, basectx, ctx, added, removed):
908 def addadded(pdiff, basectx, ctx, added, removed):
909 """add file adds to the phabdiff, both new files and copies/moves"""
909 """add file adds to the phabdiff, both new files and copies/moves"""
910 # Keep track of files that've been recorded as moved/copied, so if there are
910 # Keep track of files that've been recorded as moved/copied, so if there are
911 # additional copies we can mark them (moves get removed from removed)
911 # additional copies we can mark them (moves get removed from removed)
912 copiedchanges = {}
912 copiedchanges = {}
913 movedchanges = {}
913 movedchanges = {}
914
914
915 copy = {}
915 copy = {}
916 if basectx != ctx:
916 if basectx != ctx:
917 copy = copies.pathcopies(basectx.p1(), ctx)
917 copy = copies.pathcopies(basectx.p1(), ctx)
918
918
919 for fname in added:
919 for fname in added:
920 fctx = ctx[fname]
920 fctx = ctx[fname]
921 oldfctx = None
921 oldfctx = None
922 pchange = phabchange(currentPath=fname)
922 pchange = phabchange(currentPath=fname)
923
923
924 filemode = gitmode[fctx.flags()]
924 filemode = gitmode[fctx.flags()]
925
925
926 if copy:
926 if copy:
927 originalfname = copy.get(fname, fname)
927 originalfname = copy.get(fname, fname)
928 else:
928 else:
929 originalfname = fname
929 originalfname = fname
930 if fctx.renamed():
930 if fctx.renamed():
931 originalfname = fctx.renamed()[0]
931 originalfname = fctx.renamed()[0]
932
932
933 renamed = fname != originalfname
933 renamed = fname != originalfname
934
934
935 if renamed:
935 if renamed:
936 oldfctx = basectx.p1()[originalfname]
936 oldfctx = basectx.p1()[originalfname]
937 originalmode = gitmode[oldfctx.flags()]
937 originalmode = gitmode[oldfctx.flags()]
938 pchange.oldPath = originalfname
938 pchange.oldPath = originalfname
939
939
940 if originalfname in removed:
940 if originalfname in removed:
941 origpchange = phabchange(
941 origpchange = phabchange(
942 currentPath=originalfname,
942 currentPath=originalfname,
943 oldPath=originalfname,
943 oldPath=originalfname,
944 type=DiffChangeType.MOVE_AWAY,
944 type=DiffChangeType.MOVE_AWAY,
945 awayPaths=[fname],
945 awayPaths=[fname],
946 )
946 )
947 movedchanges[originalfname] = origpchange
947 movedchanges[originalfname] = origpchange
948 removed.remove(originalfname)
948 removed.remove(originalfname)
949 pchange.type = DiffChangeType.MOVE_HERE
949 pchange.type = DiffChangeType.MOVE_HERE
950 elif originalfname in movedchanges:
950 elif originalfname in movedchanges:
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
952 movedchanges[originalfname].awayPaths.append(fname)
952 movedchanges[originalfname].awayPaths.append(fname)
953 pchange.type = DiffChangeType.COPY_HERE
953 pchange.type = DiffChangeType.COPY_HERE
954 else: # pure copy
954 else: # pure copy
955 if originalfname not in copiedchanges:
955 if originalfname not in copiedchanges:
956 origpchange = phabchange(
956 origpchange = phabchange(
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
958 )
958 )
959 copiedchanges[originalfname] = origpchange
959 copiedchanges[originalfname] = origpchange
960 else:
960 else:
961 origpchange = copiedchanges[originalfname]
961 origpchange = copiedchanges[originalfname]
962 origpchange.awayPaths.append(fname)
962 origpchange.awayPaths.append(fname)
963 pchange.type = DiffChangeType.COPY_HERE
963 pchange.type = DiffChangeType.COPY_HERE
964
964
965 if filemode != originalmode:
965 if filemode != originalmode:
966 pchange.addoldmode(originalmode)
966 pchange.addoldmode(originalmode)
967 pchange.addnewmode(filemode)
967 pchange.addnewmode(filemode)
968 else: # Brand-new file
968 else: # Brand-new file
969 pchange.addnewmode(gitmode[fctx.flags()])
969 pchange.addnewmode(gitmode[fctx.flags()])
970 pchange.type = DiffChangeType.ADD
970 pchange.type = DiffChangeType.ADD
971
971
972 if (
972 if (
973 fctx.isbinary()
973 fctx.isbinary()
974 or notutf8(fctx)
974 or notutf8(fctx)
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
976 ):
976 ):
977 makebinary(pchange, fctx)
977 makebinary(pchange, fctx)
978 if renamed:
978 if renamed:
979 addoldbinary(pchange, oldfctx, fctx)
979 addoldbinary(pchange, oldfctx, fctx)
980 else:
980 else:
981 maketext(pchange, basectx, ctx, fname)
981 maketext(pchange, basectx, ctx, fname)
982
982
983 pdiff.addchange(pchange)
983 pdiff.addchange(pchange)
984
984
985 for _path, copiedchange in copiedchanges.items():
985 for _path, copiedchange in copiedchanges.items():
986 pdiff.addchange(copiedchange)
986 pdiff.addchange(copiedchange)
987 for _path, movedchange in movedchanges.items():
987 for _path, movedchange in movedchanges.items():
988 pdiff.addchange(movedchange)
988 pdiff.addchange(movedchange)
989
989
990
990
991 def creatediff(basectx, ctx):
991 def creatediff(basectx, ctx):
992 """create a Differential Diff"""
992 """create a Differential Diff"""
993 repo = ctx.repo()
993 repo = ctx.repo()
994 repophid = getrepophid(repo)
994 repophid = getrepophid(repo)
995 # Create a "Differential Diff" via "differential.creatediff" API
995 # Create a "Differential Diff" via "differential.creatediff" API
996 pdiff = phabdiff(
996 pdiff = phabdiff(
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
998 branch=b'%s' % ctx.branch(),
998 branch=b'%s' % ctx.branch(),
999 )
999 )
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1001 # addadded will remove moved files from removed, so addremoved won't get
1001 # addadded will remove moved files from removed, so addremoved won't get
1002 # them
1002 # them
1003 addadded(pdiff, basectx, ctx, added, removed)
1003 addadded(pdiff, basectx, ctx, added, removed)
1004 addmodified(pdiff, basectx, ctx, modified)
1004 addmodified(pdiff, basectx, ctx, modified)
1005 addremoved(pdiff, basectx, ctx, removed)
1005 addremoved(pdiff, basectx, ctx, removed)
1006 if repophid:
1006 if repophid:
1007 pdiff.repositoryPHID = repophid
1007 pdiff.repositoryPHID = repophid
1008 diff = callconduit(
1008 diff = callconduit(
1009 repo.ui,
1009 repo.ui,
1010 b'differential.creatediff',
1010 b'differential.creatediff',
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1012 )
1012 )
1013 if not diff:
1013 if not diff:
1014 if basectx != ctx:
1014 if basectx != ctx:
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1016 else:
1016 else:
1017 msg = _(b'cannot create diff for %s') % ctx
1017 msg = _(b'cannot create diff for %s') % ctx
1018 raise error.Abort(msg)
1018 raise error.Abort(msg)
1019 return diff
1019 return diff
1020
1020
1021
1021
1022 def writediffproperties(ctxs, diff):
1022 def writediffproperties(ctxs, diff):
1023 """write metadata to diff so patches could be applied losslessly
1023 """write metadata to diff so patches could be applied losslessly
1024
1024
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1026 The list is generally a single commit, but may be several when using
1026 The list is generally a single commit, but may be several when using
1027 ``phabsend --fold``.
1027 ``phabsend --fold``.
1028 """
1028 """
1029 # creatediff returns with a diffid but query returns with an id
1029 # creatediff returns with a diffid but query returns with an id
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1031 basectx = ctxs[0]
1031 basectx = ctxs[0]
1032 tipctx = ctxs[-1]
1032 tipctx = ctxs[-1]
1033
1033
1034 params = {
1034 params = {
1035 b'diff_id': diffid,
1035 b'diff_id': diffid,
1036 b'name': b'hg:meta',
1036 b'name': b'hg:meta',
1037 b'data': templatefilters.json(
1037 b'data': templatefilters.json(
1038 {
1038 {
1039 b'user': tipctx.user(),
1039 b'user': tipctx.user(),
1040 b'date': b'%d %d' % tipctx.date(),
1040 b'date': b'%d %d' % tipctx.date(),
1041 b'branch': tipctx.branch(),
1041 b'branch': tipctx.branch(),
1042 b'node': tipctx.hex(),
1042 b'node': tipctx.hex(),
1043 b'parent': basectx.p1().hex(),
1043 b'parent': basectx.p1().hex(),
1044 }
1044 }
1045 ),
1045 ),
1046 }
1046 }
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1048
1048
1049 commits = {}
1049 commits = {}
1050 for ctx in ctxs:
1050 for ctx in ctxs:
1051 commits[ctx.hex()] = {
1051 commits[ctx.hex()] = {
1052 b'author': stringutil.person(ctx.user()),
1052 b'author': stringutil.person(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1054 b'time': int(ctx.date()[0]),
1054 b'time': int(ctx.date()[0]),
1055 b'commit': ctx.hex(),
1055 b'commit': ctx.hex(),
1056 b'parents': [ctx.p1().hex()],
1056 b'parents': [ctx.p1().hex()],
1057 b'branch': ctx.branch(),
1057 b'branch': ctx.branch(),
1058 }
1058 }
1059 params = {
1059 params = {
1060 b'diff_id': diffid,
1060 b'diff_id': diffid,
1061 b'name': b'local:commits',
1061 b'name': b'local:commits',
1062 b'data': templatefilters.json(commits),
1062 b'data': templatefilters.json(commits),
1063 }
1063 }
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1065
1065
1066
1066
1067 def createdifferentialrevision(
1067 def createdifferentialrevision(
1068 ctxs,
1068 ctxs,
1069 revid=None,
1069 revid=None,
1070 parentrevphid=None,
1070 parentrevphid=None,
1071 oldbasenode=None,
1071 oldbasenode=None,
1072 oldnode=None,
1072 oldnode=None,
1073 olddiff=None,
1073 olddiff=None,
1074 actions=None,
1074 actions=None,
1075 comment=None,
1075 comment=None,
1076 ):
1076 ):
1077 """create or update a Differential Revision
1077 """create or update a Differential Revision
1078
1078
1079 If revid is None, create a new Differential Revision, otherwise update
1079 If revid is None, create a new Differential Revision, otherwise update
1080 revid. If parentrevphid is not None, set it as a dependency.
1080 revid. If parentrevphid is not None, set it as a dependency.
1081
1081
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1083 be a list of that single context. Otherwise, it is a list that covers the
1083 be a list of that single context. Otherwise, it is a list that covers the
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1085 to include and ``ctxs[-1]`` is the last.
1085 to include and ``ctxs[-1]`` is the last.
1086
1086
1087 If oldnode is not None, check if the patch content (without commit message
1087 If oldnode is not None, check if the patch content (without commit message
1088 and metadata) has changed before creating another diff. For a Revision with
1088 and metadata) has changed before creating another diff. For a Revision with
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1092 corresponds to ``ctxs[-1]``.
1092 corresponds to ``ctxs[-1]``.
1093
1093
1094 If actions is not None, they will be appended to the transaction.
1094 If actions is not None, they will be appended to the transaction.
1095 """
1095 """
1096 ctx = ctxs[-1]
1096 ctx = ctxs[-1]
1097 basectx = ctxs[0]
1097 basectx = ctxs[0]
1098
1098
1099 repo = ctx.repo()
1099 repo = ctx.repo()
1100 if oldnode:
1100 if oldnode:
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1102 unfi = repo.unfiltered()
1102 unfi = repo.unfiltered()
1103 oldctx = unfi[oldnode]
1103 oldctx = unfi[oldnode]
1104 oldbasectx = unfi[oldbasenode]
1104 oldbasectx = unfi[oldbasenode]
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1106 oldbasectx, oldctx, diffopts
1106 oldbasectx, oldctx, diffopts
1107 )
1107 )
1108 else:
1108 else:
1109 neednewdiff = True
1109 neednewdiff = True
1110
1110
1111 transactions = []
1111 transactions = []
1112 if neednewdiff:
1112 if neednewdiff:
1113 diff = creatediff(basectx, ctx)
1113 diff = creatediff(basectx, ctx)
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1115 if comment:
1115 if comment:
1116 transactions.append({b'type': b'comment', b'value': comment})
1116 transactions.append({b'type': b'comment', b'value': comment})
1117 else:
1117 else:
1118 # Even if we don't need to upload a new diff because the patch content
1118 # Even if we don't need to upload a new diff because the patch content
1119 # does not change. We might still need to update its metadata so
1119 # does not change. We might still need to update its metadata so
1120 # pushers could know the correct node metadata.
1120 # pushers could know the correct node metadata.
1121 assert olddiff
1121 assert olddiff
1122 diff = olddiff
1122 diff = olddiff
1123 writediffproperties(ctxs, diff)
1123 writediffproperties(ctxs, diff)
1124
1124
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1126 if parentrevphid:
1126 if parentrevphid:
1127 transactions.append(
1127 transactions.append(
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1129 )
1129 )
1130
1130
1131 if actions:
1131 if actions:
1132 transactions += actions
1132 transactions += actions
1133
1133
1134 # When folding multiple local commits into a single review, arcanist will
1134 # When folding multiple local commits into a single review, arcanist will
1135 # take the summary line of the first commit as the title, and then
1135 # take the summary line of the first commit as the title, and then
1136 # concatenate the rest of the remaining messages (including each of their
1136 # concatenate the rest of the remaining messages (including each of their
1137 # first lines) to the rest of the first commit message (each separated by
1137 # first lines) to the rest of the first commit message (each separated by
1138 # an empty line), and use that as the summary field. Do the same here.
1138 # an empty line), and use that as the summary field. Do the same here.
1139 # For commits with only a one line message, there is no summary field, as
1139 # For commits with only a one line message, there is no summary field, as
1140 # this gets assigned to the title.
1140 # this gets assigned to the title.
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1142
1142
1143 for i, _ctx in enumerate(ctxs):
1143 for i, _ctx in enumerate(ctxs):
1144 # Parse commit message and update related fields.
1144 # Parse commit message and update related fields.
1145 desc = _ctx.description()
1145 desc = _ctx.description()
1146 info = callconduit(
1146 info = callconduit(
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1148 )
1148 )
1149
1149
1150 for k in [b'title', b'summary', b'testPlan']:
1150 for k in [b'title', b'summary', b'testPlan']:
1151 v = info[b'fields'].get(k)
1151 v = info[b'fields'].get(k)
1152 if not v:
1152 if not v:
1153 continue
1153 continue
1154
1154
1155 if i == 0:
1155 if i == 0:
1156 # Title, summary and test plan (if present) are taken verbatim
1156 # Title, summary and test plan (if present) are taken verbatim
1157 # for the first commit.
1157 # for the first commit.
1158 fields[k] = v.rstrip()
1158 fields[k] = v.rstrip()
1159 continue
1159 continue
1160 elif k == b'title':
1160 elif k == b'title':
1161 # Add subsequent titles (i.e. the first line of the commit
1161 # Add subsequent titles (i.e. the first line of the commit
1162 # message) back to the summary.
1162 # message) back to the summary.
1163 k = b'summary'
1163 k = b'summary'
1164
1164
1165 # Append any current field to the existing composite field
1165 # Append any current field to the existing composite field
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1167
1167
1168 for k, v in fields.items():
1168 for k, v in fields.items():
1169 transactions.append({b'type': k, b'value': v})
1169 transactions.append({b'type': k, b'value': v})
1170
1170
1171 params = {b'transactions': transactions}
1171 params = {b'transactions': transactions}
1172 if revid is not None:
1172 if revid is not None:
1173 # Update an existing Differential Revision
1173 # Update an existing Differential Revision
1174 params[b'objectIdentifier'] = revid
1174 params[b'objectIdentifier'] = revid
1175
1175
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1177 if not revision:
1177 if not revision:
1178 if len(ctxs) == 1:
1178 if len(ctxs) == 1:
1179 msg = _(b'cannot create revision for %s') % ctx
1179 msg = _(b'cannot create revision for %s') % ctx
1180 else:
1180 else:
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1182 raise error.Abort(msg)
1182 raise error.Abort(msg)
1183
1183
1184 return revision, diff
1184 return revision, diff
1185
1185
1186
1186
1187 def userphids(ui, names):
1187 def userphids(ui, names):
1188 """convert user names to PHIDs"""
1188 """convert user names to PHIDs"""
1189 names = [name.lower() for name in names]
1189 names = [name.lower() for name in names]
1190 query = {b'constraints': {b'usernames': names}}
1190 query = {b'constraints': {b'usernames': names}}
1191 result = callconduit(ui, b'user.search', query)
1191 result = callconduit(ui, b'user.search', query)
1192 # username not found is not an error of the API. So check if we have missed
1192 # username not found is not an error of the API. So check if we have missed
1193 # some names here.
1193 # some names here.
1194 data = result[b'data']
1194 data = result[b'data']
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1196 unresolved = set(names) - resolved
1196 unresolved = set(names) - resolved
1197 if unresolved:
1197 if unresolved:
1198 raise error.Abort(
1198 raise error.Abort(
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1200 )
1200 )
1201 return [entry[b'phid'] for entry in data]
1201 return [entry[b'phid'] for entry in data]
1202
1202
1203
1203
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1206
1206
1207 This is a utility function for the sending phase of ``phabsend``, which
1207 This is a utility function for the sending phase of ``phabsend``, which
1208 makes it easier to show a status for all local commits with `--fold``.
1208 makes it easier to show a status for all local commits with `--fold``.
1209 """
1209 """
1210 actiondesc = ui.label(
1210 actiondesc = ui.label(
1211 {
1211 {
1212 b'created': _(b'created'),
1212 b'created': _(b'created'),
1213 b'skipped': _(b'skipped'),
1213 b'skipped': _(b'skipped'),
1214 b'updated': _(b'updated'),
1214 b'updated': _(b'updated'),
1215 }[action],
1215 }[action],
1216 b'phabricator.action.%s' % action,
1216 b'phabricator.action.%s' % action,
1217 )
1217 )
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1222
1222
1223
1223
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1226
1226
1227 This is a utility function for the amend phase of ``phabsend``, which
1227 This is a utility function for the amend phase of ``phabsend``, which
1228 converts failures to warning messages.
1228 converts failures to warning messages.
1229 """
1229 """
1230 _debug(
1230 _debug(
1231 unfi.ui,
1231 unfi.ui,
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1233 )
1233 )
1234
1234
1235 try:
1235 try:
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1237 except util.urlerr.urlerror:
1237 except util.urlerr.urlerror:
1238 # If it fails just warn and keep going, otherwise the DREV
1238 # If it fails just warn and keep going, otherwise the DREV
1239 # associations will be lost
1239 # associations will be lost
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1241
1241
1242
1242
1243 @vcrcommand(
1243 @vcrcommand(
1244 b'phabsend',
1244 b'phabsend',
1245 [
1245 [
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1250 (
1250 (
1251 b'm',
1251 b'm',
1252 b'comment',
1252 b'comment',
1253 b'',
1253 b'',
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1255 ),
1255 ),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1258 ],
1258 ],
1259 _(b'REV [OPTIONS]'),
1259 _(b'REV [OPTIONS]'),
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1261 )
1261 )
1262 def phabsend(ui, repo, *revs, **opts):
1262 def phabsend(ui, repo, *revs, **opts):
1263 """upload changesets to Phabricator
1263 """upload changesets to Phabricator
1264
1264
1265 If there are multiple revisions specified, they will be send as a stack
1265 If there are multiple revisions specified, they will be send as a stack
1266 with a linear dependencies relationship using the order specified by the
1266 with a linear dependencies relationship using the order specified by the
1267 revset.
1267 revset.
1268
1268
1269 For the first time uploading changesets, local tags will be created to
1269 For the first time uploading changesets, local tags will be created to
1270 maintain the association. After the first time, phabsend will check
1270 maintain the association. After the first time, phabsend will check
1271 obsstore and tags information so it can figure out whether to update an
1271 obsstore and tags information so it can figure out whether to update an
1272 existing Differential Revision, or create a new one.
1272 existing Differential Revision, or create a new one.
1273
1273
1274 If --amend is set, update commit messages so they have the
1274 If --amend is set, update commit messages so they have the
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1277 use local tags to record the ``Differential Revision`` association.
1277 use local tags to record the ``Differential Revision`` association.
1278
1278
1279 The --confirm option lets you confirm changesets before sending them. You
1279 The --confirm option lets you confirm changesets before sending them. You
1280 can also add following to your configuration file to make it default
1280 can also add following to your configuration file to make it default
1281 behaviour::
1281 behaviour::
1282
1282
1283 [phabsend]
1283 [phabsend]
1284 confirm = true
1284 confirm = true
1285
1285
1286 By default, a separate review will be created for each commit that is
1286 By default, a separate review will be created for each commit that is
1287 selected, and will have the same parent/child relationship in Phabricator.
1287 selected, and will have the same parent/child relationship in Phabricator.
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1289 as if diffed from the parent of the first revision to the last. The commit
1289 as if diffed from the parent of the first revision to the last. The commit
1290 messages are concatenated in the summary field on Phabricator.
1290 messages are concatenated in the summary field on Phabricator.
1291
1291
1292 phabsend will check obsstore and the above association to decide whether to
1292 phabsend will check obsstore and the above association to decide whether to
1293 update an existing Differential Revision, or create a new one.
1293 update an existing Differential Revision, or create a new one.
1294 """
1294 """
1295 opts = pycompat.byteskwargs(opts)
1295 opts = pycompat.byteskwargs(opts)
1296 revs = list(revs) + opts.get(b'rev', [])
1296 revs = list(revs) + opts.get(b'rev', [])
1297 revs = scmutil.revrange(repo, revs)
1297 revs = scmutil.revrange(repo, revs)
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1299
1299
1300 if not revs:
1300 if not revs:
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1302 if opts.get(b'amend'):
1302 if opts.get(b'amend'):
1303 cmdutil.checkunfinished(repo)
1303 cmdutil.checkunfinished(repo)
1304
1304
1305 ctxs = [repo[rev] for rev in revs]
1305 ctxs = [repo[rev] for rev in revs]
1306
1306
1307 if any(c for c in ctxs if c.obsolete()):
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1309
1307 fold = opts.get(b'fold')
1310 fold = opts.get(b'fold')
1308 if fold:
1311 if fold:
1309 if len(revs) == 1:
1312 if len(revs) == 1:
1310 # TODO: just switch to --no-fold instead?
1313 # TODO: just switch to --no-fold instead?
1311 raise error.Abort(_(b"cannot fold a single revision"))
1314 raise error.Abort(_(b"cannot fold a single revision"))
1312
1315
1313 # There's no clear way to manage multiple commits with a Dxxx tag, so
1316 # There's no clear way to manage multiple commits with a Dxxx tag, so
1314 # require the amend option. (We could append "_nnn", but then it
1317 # require the amend option. (We could append "_nnn", but then it
1315 # becomes jumbled if earlier commits are added to an update.) It should
1318 # becomes jumbled if earlier commits are added to an update.) It should
1316 # lock the repo and ensure that the range is editable, but that would
1319 # lock the repo and ensure that the range is editable, but that would
1317 # make the code pretty convoluted. The default behavior of `arc` is to
1320 # make the code pretty convoluted. The default behavior of `arc` is to
1318 # create a new review anyway.
1321 # create a new review anyway.
1319 if not opts.get(b"amend"):
1322 if not opts.get(b"amend"):
1320 raise error.Abort(_(b"cannot fold with --no-amend"))
1323 raise error.Abort(_(b"cannot fold with --no-amend"))
1321
1324
1322 # Ensure the local commits are an unbroken range
1325 # Ensure the local commits are an unbroken range
1323 revrange = repo.revs(b'(first(%ld)::last(%ld))', revs, revs)
1326 revrange = repo.revs(b'(first(%ld)::last(%ld))', revs, revs)
1324 if any(r for r in revs if r not in revrange) or any(
1327 if any(r for r in revs if r not in revrange) or any(
1325 r for r in revrange if r not in revs
1328 r for r in revrange if r not in revs
1326 ):
1329 ):
1327 raise error.Abort(_(b"cannot fold non-linear revisions"))
1330 raise error.Abort(_(b"cannot fold non-linear revisions"))
1328
1331
1329 # It might be possible to bucketize the revisions by the DREV value, and
1332 # It might be possible to bucketize the revisions by the DREV value, and
1330 # iterate over those groups when posting, and then again when amending.
1333 # iterate over those groups when posting, and then again when amending.
1331 # But for simplicity, require all selected revisions to be for the same
1334 # But for simplicity, require all selected revisions to be for the same
1332 # DREV (if present). Adding local revisions to an existing DREV is
1335 # DREV (if present). Adding local revisions to an existing DREV is
1333 # acceptable.
1336 # acceptable.
1334 drevmatchers = [
1337 drevmatchers = [
1335 _differentialrevisiondescre.search(ctx.description())
1338 _differentialrevisiondescre.search(ctx.description())
1336 for ctx in ctxs
1339 for ctx in ctxs
1337 ]
1340 ]
1338 if len({m.group('url') for m in drevmatchers if m}) > 1:
1341 if len({m.group('url') for m in drevmatchers if m}) > 1:
1339 raise error.Abort(
1342 raise error.Abort(
1340 _(b"cannot fold revisions with different DREV values")
1343 _(b"cannot fold revisions with different DREV values")
1341 )
1344 )
1342
1345
1343 # {newnode: (oldnode, olddiff, olddrev}
1346 # {newnode: (oldnode, olddiff, olddrev}
1344 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1347 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1345
1348
1346 confirm = ui.configbool(b'phabsend', b'confirm')
1349 confirm = ui.configbool(b'phabsend', b'confirm')
1347 confirm |= bool(opts.get(b'confirm'))
1350 confirm |= bool(opts.get(b'confirm'))
1348 if confirm:
1351 if confirm:
1349 confirmed = _confirmbeforesend(repo, revs, oldmap)
1352 confirmed = _confirmbeforesend(repo, revs, oldmap)
1350 if not confirmed:
1353 if not confirmed:
1351 raise error.Abort(_(b'phabsend cancelled'))
1354 raise error.Abort(_(b'phabsend cancelled'))
1352
1355
1353 actions = []
1356 actions = []
1354 reviewers = opts.get(b'reviewer', [])
1357 reviewers = opts.get(b'reviewer', [])
1355 blockers = opts.get(b'blocker', [])
1358 blockers = opts.get(b'blocker', [])
1356 phids = []
1359 phids = []
1357 if reviewers:
1360 if reviewers:
1358 phids.extend(userphids(repo.ui, reviewers))
1361 phids.extend(userphids(repo.ui, reviewers))
1359 if blockers:
1362 if blockers:
1360 phids.extend(
1363 phids.extend(
1361 map(
1364 map(
1362 lambda phid: b'blocking(%s)' % phid,
1365 lambda phid: b'blocking(%s)' % phid,
1363 userphids(repo.ui, blockers),
1366 userphids(repo.ui, blockers),
1364 )
1367 )
1365 )
1368 )
1366 if phids:
1369 if phids:
1367 actions.append({b'type': b'reviewers.add', b'value': phids})
1370 actions.append({b'type': b'reviewers.add', b'value': phids})
1368
1371
1369 drevids = [] # [int]
1372 drevids = [] # [int]
1370 diffmap = {} # {newnode: diff}
1373 diffmap = {} # {newnode: diff}
1371
1374
1372 # Send patches one by one so we know their Differential Revision PHIDs and
1375 # Send patches one by one so we know their Differential Revision PHIDs and
1373 # can provide dependency relationship
1376 # can provide dependency relationship
1374 lastrevphid = None
1377 lastrevphid = None
1375 for ctx in ctxs:
1378 for ctx in ctxs:
1376 if fold:
1379 if fold:
1377 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1380 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1378 else:
1381 else:
1379 ui.debug(b'sending rev %d\n' % ctx.rev())
1382 ui.debug(b'sending rev %d\n' % ctx.rev())
1380
1383
1381 # Get Differential Revision ID
1384 # Get Differential Revision ID
1382 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1385 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1383 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1386 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1384
1387
1385 if fold:
1388 if fold:
1386 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1389 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1387 ctxs[-1].node(), (None, None, None)
1390 ctxs[-1].node(), (None, None, None)
1388 )
1391 )
1389
1392
1390 if oldnode != ctx.node() or opts.get(b'amend'):
1393 if oldnode != ctx.node() or opts.get(b'amend'):
1391 # Create or update Differential Revision
1394 # Create or update Differential Revision
1392 revision, diff = createdifferentialrevision(
1395 revision, diff = createdifferentialrevision(
1393 ctxs if fold else [ctx],
1396 ctxs if fold else [ctx],
1394 revid,
1397 revid,
1395 lastrevphid,
1398 lastrevphid,
1396 oldbasenode,
1399 oldbasenode,
1397 oldnode,
1400 oldnode,
1398 olddiff,
1401 olddiff,
1399 actions,
1402 actions,
1400 opts.get(b'comment'),
1403 opts.get(b'comment'),
1401 )
1404 )
1402
1405
1403 if fold:
1406 if fold:
1404 for ctx in ctxs:
1407 for ctx in ctxs:
1405 diffmap[ctx.node()] = diff
1408 diffmap[ctx.node()] = diff
1406 else:
1409 else:
1407 diffmap[ctx.node()] = diff
1410 diffmap[ctx.node()] = diff
1408
1411
1409 newrevid = int(revision[b'object'][b'id'])
1412 newrevid = int(revision[b'object'][b'id'])
1410 newrevphid = revision[b'object'][b'phid']
1413 newrevphid = revision[b'object'][b'phid']
1411 if revid:
1414 if revid:
1412 action = b'updated'
1415 action = b'updated'
1413 else:
1416 else:
1414 action = b'created'
1417 action = b'created'
1415
1418
1416 # Create a local tag to note the association, if commit message
1419 # Create a local tag to note the association, if commit message
1417 # does not have it already
1420 # does not have it already
1418 if not fold:
1421 if not fold:
1419 m = _differentialrevisiondescre.search(ctx.description())
1422 m = _differentialrevisiondescre.search(ctx.description())
1420 if not m or int(m.group('id')) != newrevid:
1423 if not m or int(m.group('id')) != newrevid:
1421 tagname = b'D%d' % newrevid
1424 tagname = b'D%d' % newrevid
1422 tags.tag(
1425 tags.tag(
1423 repo,
1426 repo,
1424 tagname,
1427 tagname,
1425 ctx.node(),
1428 ctx.node(),
1426 message=None,
1429 message=None,
1427 user=None,
1430 user=None,
1428 date=None,
1431 date=None,
1429 local=True,
1432 local=True,
1430 )
1433 )
1431 else:
1434 else:
1432 # Nothing changed. But still set "newrevphid" so the next revision
1435 # Nothing changed. But still set "newrevphid" so the next revision
1433 # could depend on this one and "newrevid" for the summary line.
1436 # could depend on this one and "newrevid" for the summary line.
1434 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1437 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1435 newrevid = revid
1438 newrevid = revid
1436 action = b'skipped'
1439 action = b'skipped'
1437
1440
1438 drevids.append(newrevid)
1441 drevids.append(newrevid)
1439 lastrevphid = newrevphid
1442 lastrevphid = newrevphid
1440
1443
1441 if fold:
1444 if fold:
1442 for c in ctxs:
1445 for c in ctxs:
1443 if oldmap.get(c.node(), (None, None, None))[2]:
1446 if oldmap.get(c.node(), (None, None, None))[2]:
1444 action = b'updated'
1447 action = b'updated'
1445 else:
1448 else:
1446 action = b'created'
1449 action = b'created'
1447 _print_phabsend_action(ui, c, newrevid, action)
1450 _print_phabsend_action(ui, c, newrevid, action)
1448 break
1451 break
1449
1452
1450 _print_phabsend_action(ui, ctx, newrevid, action)
1453 _print_phabsend_action(ui, ctx, newrevid, action)
1451
1454
1452 # Update commit messages and remove tags
1455 # Update commit messages and remove tags
1453 if opts.get(b'amend'):
1456 if opts.get(b'amend'):
1454 unfi = repo.unfiltered()
1457 unfi = repo.unfiltered()
1455 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1458 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1456 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1459 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1457 wnode = unfi[b'.'].node()
1460 wnode = unfi[b'.'].node()
1458 mapping = {} # {oldnode: [newnode]}
1461 mapping = {} # {oldnode: [newnode]}
1459 newnodes = []
1462 newnodes = []
1460
1463
1461 drevid = drevids[0]
1464 drevid = drevids[0]
1462
1465
1463 for i, rev in enumerate(revs):
1466 for i, rev in enumerate(revs):
1464 old = unfi[rev]
1467 old = unfi[rev]
1465 if not fold:
1468 if not fold:
1466 drevid = drevids[i]
1469 drevid = drevids[i]
1467 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1470 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1468
1471
1469 newdesc = get_amended_desc(drev, old, fold)
1472 newdesc = get_amended_desc(drev, old, fold)
1470 # Make sure commit message contain "Differential Revision"
1473 # Make sure commit message contain "Differential Revision"
1471 if (
1474 if (
1472 old.description() != newdesc
1475 old.description() != newdesc
1473 or old.p1().node() in mapping
1476 or old.p1().node() in mapping
1474 or old.p2().node() in mapping
1477 or old.p2().node() in mapping
1475 ):
1478 ):
1476 if old.phase() == phases.public:
1479 if old.phase() == phases.public:
1477 ui.warn(
1480 ui.warn(
1478 _(b"warning: not updating public commit %s\n")
1481 _(b"warning: not updating public commit %s\n")
1479 % scmutil.formatchangeid(old)
1482 % scmutil.formatchangeid(old)
1480 )
1483 )
1481 continue
1484 continue
1482 parents = [
1485 parents = [
1483 mapping.get(old.p1().node(), (old.p1(),))[0],
1486 mapping.get(old.p1().node(), (old.p1(),))[0],
1484 mapping.get(old.p2().node(), (old.p2(),))[0],
1487 mapping.get(old.p2().node(), (old.p2(),))[0],
1485 ]
1488 ]
1486 new = context.metadataonlyctx(
1489 new = context.metadataonlyctx(
1487 repo,
1490 repo,
1488 old,
1491 old,
1489 parents=parents,
1492 parents=parents,
1490 text=newdesc,
1493 text=newdesc,
1491 user=old.user(),
1494 user=old.user(),
1492 date=old.date(),
1495 date=old.date(),
1493 extra=old.extra(),
1496 extra=old.extra(),
1494 )
1497 )
1495
1498
1496 newnode = new.commit()
1499 newnode = new.commit()
1497
1500
1498 mapping[old.node()] = [newnode]
1501 mapping[old.node()] = [newnode]
1499
1502
1500 if fold:
1503 if fold:
1501 # Defer updating the (single) Diff until all nodes are
1504 # Defer updating the (single) Diff until all nodes are
1502 # collected. No tags were created, so none need to be
1505 # collected. No tags were created, so none need to be
1503 # removed.
1506 # removed.
1504 newnodes.append(newnode)
1507 newnodes.append(newnode)
1505 continue
1508 continue
1506
1509
1507 _amend_diff_properties(
1510 _amend_diff_properties(
1508 unfi, drevid, [newnode], diffmap[old.node()]
1511 unfi, drevid, [newnode], diffmap[old.node()]
1509 )
1512 )
1510
1513
1511 # Remove local tags since it's no longer necessary
1514 # Remove local tags since it's no longer necessary
1512 tagname = b'D%d' % drevid
1515 tagname = b'D%d' % drevid
1513 if tagname in repo.tags():
1516 if tagname in repo.tags():
1514 tags.tag(
1517 tags.tag(
1515 repo,
1518 repo,
1516 tagname,
1519 tagname,
1517 nullid,
1520 nullid,
1518 message=None,
1521 message=None,
1519 user=None,
1522 user=None,
1520 date=None,
1523 date=None,
1521 local=True,
1524 local=True,
1522 )
1525 )
1523 elif fold:
1526 elif fold:
1524 # When folding multiple commits into one review with
1527 # When folding multiple commits into one review with
1525 # --fold, track even the commits that weren't amended, so
1528 # --fold, track even the commits that weren't amended, so
1526 # that their association isn't lost if the properties are
1529 # that their association isn't lost if the properties are
1527 # rewritten below.
1530 # rewritten below.
1528 newnodes.append(old.node())
1531 newnodes.append(old.node())
1529
1532
1530 # If the submitted commits are public, no amend takes place so
1533 # If the submitted commits are public, no amend takes place so
1531 # there are no newnodes and therefore no diff update to do.
1534 # there are no newnodes and therefore no diff update to do.
1532 if fold and newnodes:
1535 if fold and newnodes:
1533 diff = diffmap[old.node()]
1536 diff = diffmap[old.node()]
1534
1537
1535 # The diff object in diffmap doesn't have the local commits
1538 # The diff object in diffmap doesn't have the local commits
1536 # because that could be returned from differential.creatediff,
1539 # because that could be returned from differential.creatediff,
1537 # not differential.querydiffs. So use the queried diff (if
1540 # not differential.querydiffs. So use the queried diff (if
1538 # present), or force the amend (a new revision is being posted.)
1541 # present), or force the amend (a new revision is being posted.)
1539 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1542 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1540 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1543 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1541 _amend_diff_properties(unfi, drevid, newnodes, diff)
1544 _amend_diff_properties(unfi, drevid, newnodes, diff)
1542 else:
1545 else:
1543 _debug(
1546 _debug(
1544 ui,
1547 ui,
1545 b"local commit list for D%d is already up-to-date\n"
1548 b"local commit list for D%d is already up-to-date\n"
1546 % drevid,
1549 % drevid,
1547 )
1550 )
1548 elif fold:
1551 elif fold:
1549 _debug(ui, b"no newnodes to update\n")
1552 _debug(ui, b"no newnodes to update\n")
1550
1553
1551 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1554 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1552 if wnode in mapping:
1555 if wnode in mapping:
1553 unfi.setparents(mapping[wnode][0])
1556 unfi.setparents(mapping[wnode][0])
1554
1557
1555
1558
1556 # Map from "hg:meta" keys to header understood by "hg import". The order is
1559 # Map from "hg:meta" keys to header understood by "hg import". The order is
1557 # consistent with "hg export" output.
1560 # consistent with "hg export" output.
1558 _metanamemap = util.sortdict(
1561 _metanamemap = util.sortdict(
1559 [
1562 [
1560 (b'user', b'User'),
1563 (b'user', b'User'),
1561 (b'date', b'Date'),
1564 (b'date', b'Date'),
1562 (b'branch', b'Branch'),
1565 (b'branch', b'Branch'),
1563 (b'node', b'Node ID'),
1566 (b'node', b'Node ID'),
1564 (b'parent', b'Parent '),
1567 (b'parent', b'Parent '),
1565 ]
1568 ]
1566 )
1569 )
1567
1570
1568
1571
1569 def _confirmbeforesend(repo, revs, oldmap):
1572 def _confirmbeforesend(repo, revs, oldmap):
1570 url, token = readurltoken(repo.ui)
1573 url, token = readurltoken(repo.ui)
1571 ui = repo.ui
1574 ui = repo.ui
1572 for rev in revs:
1575 for rev in revs:
1573 ctx = repo[rev]
1576 ctx = repo[rev]
1574 desc = ctx.description().splitlines()[0]
1577 desc = ctx.description().splitlines()[0]
1575 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1578 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1576 if drevid:
1579 if drevid:
1577 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1580 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1578 else:
1581 else:
1579 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1582 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1580
1583
1581 ui.write(
1584 ui.write(
1582 _(b'%s - %s: %s\n')
1585 _(b'%s - %s: %s\n')
1583 % (
1586 % (
1584 drevdesc,
1587 drevdesc,
1585 ui.label(bytes(ctx), b'phabricator.node'),
1588 ui.label(bytes(ctx), b'phabricator.node'),
1586 ui.label(desc, b'phabricator.desc'),
1589 ui.label(desc, b'phabricator.desc'),
1587 )
1590 )
1588 )
1591 )
1589
1592
1590 if ui.promptchoice(
1593 if ui.promptchoice(
1591 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1594 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1592 ):
1595 ):
1593 return False
1596 return False
1594
1597
1595 return True
1598 return True
1596
1599
1597
1600
1598 _knownstatusnames = {
1601 _knownstatusnames = {
1599 b'accepted',
1602 b'accepted',
1600 b'needsreview',
1603 b'needsreview',
1601 b'needsrevision',
1604 b'needsrevision',
1602 b'closed',
1605 b'closed',
1603 b'abandoned',
1606 b'abandoned',
1604 b'changesplanned',
1607 b'changesplanned',
1605 }
1608 }
1606
1609
1607
1610
1608 def _getstatusname(drev):
1611 def _getstatusname(drev):
1609 """get normalized status name from a Differential Revision"""
1612 """get normalized status name from a Differential Revision"""
1610 return drev[b'statusName'].replace(b' ', b'').lower()
1613 return drev[b'statusName'].replace(b' ', b'').lower()
1611
1614
1612
1615
1613 # Small language to specify differential revisions. Support symbols: (), :X,
1616 # Small language to specify differential revisions. Support symbols: (), :X,
1614 # +, and -.
1617 # +, and -.
1615
1618
1616 _elements = {
1619 _elements = {
1617 # token-type: binding-strength, primary, prefix, infix, suffix
1620 # token-type: binding-strength, primary, prefix, infix, suffix
1618 b'(': (12, None, (b'group', 1, b')'), None, None),
1621 b'(': (12, None, (b'group', 1, b')'), None, None),
1619 b':': (8, None, (b'ancestors', 8), None, None),
1622 b':': (8, None, (b'ancestors', 8), None, None),
1620 b'&': (5, None, None, (b'and_', 5), None),
1623 b'&': (5, None, None, (b'and_', 5), None),
1621 b'+': (4, None, None, (b'add', 4), None),
1624 b'+': (4, None, None, (b'add', 4), None),
1622 b'-': (4, None, None, (b'sub', 4), None),
1625 b'-': (4, None, None, (b'sub', 4), None),
1623 b')': (0, None, None, None, None),
1626 b')': (0, None, None, None, None),
1624 b'symbol': (0, b'symbol', None, None, None),
1627 b'symbol': (0, b'symbol', None, None, None),
1625 b'end': (0, None, None, None, None),
1628 b'end': (0, None, None, None, None),
1626 }
1629 }
1627
1630
1628
1631
1629 def _tokenize(text):
1632 def _tokenize(text):
1630 view = memoryview(text) # zero-copy slice
1633 view = memoryview(text) # zero-copy slice
1631 special = b'():+-& '
1634 special = b'():+-& '
1632 pos = 0
1635 pos = 0
1633 length = len(text)
1636 length = len(text)
1634 while pos < length:
1637 while pos < length:
1635 symbol = b''.join(
1638 symbol = b''.join(
1636 itertools.takewhile(
1639 itertools.takewhile(
1637 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1640 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1638 )
1641 )
1639 )
1642 )
1640 if symbol:
1643 if symbol:
1641 yield (b'symbol', symbol, pos)
1644 yield (b'symbol', symbol, pos)
1642 pos += len(symbol)
1645 pos += len(symbol)
1643 else: # special char, ignore space
1646 else: # special char, ignore space
1644 if text[pos : pos + 1] != b' ':
1647 if text[pos : pos + 1] != b' ':
1645 yield (text[pos : pos + 1], None, pos)
1648 yield (text[pos : pos + 1], None, pos)
1646 pos += 1
1649 pos += 1
1647 yield (b'end', None, pos)
1650 yield (b'end', None, pos)
1648
1651
1649
1652
1650 def _parse(text):
1653 def _parse(text):
1651 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1654 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1652 if pos != len(text):
1655 if pos != len(text):
1653 raise error.ParseError(b'invalid token', pos)
1656 raise error.ParseError(b'invalid token', pos)
1654 return tree
1657 return tree
1655
1658
1656
1659
1657 def _parsedrev(symbol):
1660 def _parsedrev(symbol):
1658 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1661 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1659 if symbol.startswith(b'D') and symbol[1:].isdigit():
1662 if symbol.startswith(b'D') and symbol[1:].isdigit():
1660 return int(symbol[1:])
1663 return int(symbol[1:])
1661 if symbol.isdigit():
1664 if symbol.isdigit():
1662 return int(symbol)
1665 return int(symbol)
1663
1666
1664
1667
1665 def _prefetchdrevs(tree):
1668 def _prefetchdrevs(tree):
1666 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1669 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1667 drevs = set()
1670 drevs = set()
1668 ancestordrevs = set()
1671 ancestordrevs = set()
1669 op = tree[0]
1672 op = tree[0]
1670 if op == b'symbol':
1673 if op == b'symbol':
1671 r = _parsedrev(tree[1])
1674 r = _parsedrev(tree[1])
1672 if r:
1675 if r:
1673 drevs.add(r)
1676 drevs.add(r)
1674 elif op == b'ancestors':
1677 elif op == b'ancestors':
1675 r, a = _prefetchdrevs(tree[1])
1678 r, a = _prefetchdrevs(tree[1])
1676 drevs.update(r)
1679 drevs.update(r)
1677 ancestordrevs.update(r)
1680 ancestordrevs.update(r)
1678 ancestordrevs.update(a)
1681 ancestordrevs.update(a)
1679 else:
1682 else:
1680 for t in tree[1:]:
1683 for t in tree[1:]:
1681 r, a = _prefetchdrevs(t)
1684 r, a = _prefetchdrevs(t)
1682 drevs.update(r)
1685 drevs.update(r)
1683 ancestordrevs.update(a)
1686 ancestordrevs.update(a)
1684 return drevs, ancestordrevs
1687 return drevs, ancestordrevs
1685
1688
1686
1689
1687 def querydrev(ui, spec):
1690 def querydrev(ui, spec):
1688 """return a list of "Differential Revision" dicts
1691 """return a list of "Differential Revision" dicts
1689
1692
1690 spec is a string using a simple query language, see docstring in phabread
1693 spec is a string using a simple query language, see docstring in phabread
1691 for details.
1694 for details.
1692
1695
1693 A "Differential Revision dict" looks like:
1696 A "Differential Revision dict" looks like:
1694
1697
1695 {
1698 {
1696 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1699 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1697 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1700 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1698 "auxiliary": {
1701 "auxiliary": {
1699 "phabricator:depends-on": [
1702 "phabricator:depends-on": [
1700 "PHID-DREV-gbapp366kutjebt7agcd"
1703 "PHID-DREV-gbapp366kutjebt7agcd"
1701 ]
1704 ]
1702 "phabricator:projects": [],
1705 "phabricator:projects": [],
1703 },
1706 },
1704 "branch": "default",
1707 "branch": "default",
1705 "ccs": [],
1708 "ccs": [],
1706 "commits": [],
1709 "commits": [],
1707 "dateCreated": "1499181406",
1710 "dateCreated": "1499181406",
1708 "dateModified": "1499182103",
1711 "dateModified": "1499182103",
1709 "diffs": [
1712 "diffs": [
1710 "3",
1713 "3",
1711 "4",
1714 "4",
1712 ],
1715 ],
1713 "hashes": [],
1716 "hashes": [],
1714 "id": "2",
1717 "id": "2",
1715 "lineCount": "2",
1718 "lineCount": "2",
1716 "phid": "PHID-DREV-672qvysjcczopag46qty",
1719 "phid": "PHID-DREV-672qvysjcczopag46qty",
1717 "properties": {},
1720 "properties": {},
1718 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1721 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1719 "reviewers": [],
1722 "reviewers": [],
1720 "sourcePath": null
1723 "sourcePath": null
1721 "status": "0",
1724 "status": "0",
1722 "statusName": "Needs Review",
1725 "statusName": "Needs Review",
1723 "summary": "",
1726 "summary": "",
1724 "testPlan": "",
1727 "testPlan": "",
1725 "title": "example",
1728 "title": "example",
1726 "uri": "https://phab.example.com/D2",
1729 "uri": "https://phab.example.com/D2",
1727 }
1730 }
1728 """
1731 """
1729 # TODO: replace differential.query and differential.querydiffs with
1732 # TODO: replace differential.query and differential.querydiffs with
1730 # differential.diff.search because the former (and their output) are
1733 # differential.diff.search because the former (and their output) are
1731 # frozen, and planned to be deprecated and removed.
1734 # frozen, and planned to be deprecated and removed.
1732
1735
1733 def fetch(params):
1736 def fetch(params):
1734 """params -> single drev or None"""
1737 """params -> single drev or None"""
1735 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1738 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1736 if key in prefetched:
1739 if key in prefetched:
1737 return prefetched[key]
1740 return prefetched[key]
1738 drevs = callconduit(ui, b'differential.query', params)
1741 drevs = callconduit(ui, b'differential.query', params)
1739 # Fill prefetched with the result
1742 # Fill prefetched with the result
1740 for drev in drevs:
1743 for drev in drevs:
1741 prefetched[drev[b'phid']] = drev
1744 prefetched[drev[b'phid']] = drev
1742 prefetched[int(drev[b'id'])] = drev
1745 prefetched[int(drev[b'id'])] = drev
1743 if key not in prefetched:
1746 if key not in prefetched:
1744 raise error.Abort(
1747 raise error.Abort(
1745 _(b'cannot get Differential Revision %r') % params
1748 _(b'cannot get Differential Revision %r') % params
1746 )
1749 )
1747 return prefetched[key]
1750 return prefetched[key]
1748
1751
1749 def getstack(topdrevids):
1752 def getstack(topdrevids):
1750 """given a top, get a stack from the bottom, [id] -> [id]"""
1753 """given a top, get a stack from the bottom, [id] -> [id]"""
1751 visited = set()
1754 visited = set()
1752 result = []
1755 result = []
1753 queue = [{b'ids': [i]} for i in topdrevids]
1756 queue = [{b'ids': [i]} for i in topdrevids]
1754 while queue:
1757 while queue:
1755 params = queue.pop()
1758 params = queue.pop()
1756 drev = fetch(params)
1759 drev = fetch(params)
1757 if drev[b'id'] in visited:
1760 if drev[b'id'] in visited:
1758 continue
1761 continue
1759 visited.add(drev[b'id'])
1762 visited.add(drev[b'id'])
1760 result.append(int(drev[b'id']))
1763 result.append(int(drev[b'id']))
1761 auxiliary = drev.get(b'auxiliary', {})
1764 auxiliary = drev.get(b'auxiliary', {})
1762 depends = auxiliary.get(b'phabricator:depends-on', [])
1765 depends = auxiliary.get(b'phabricator:depends-on', [])
1763 for phid in depends:
1766 for phid in depends:
1764 queue.append({b'phids': [phid]})
1767 queue.append({b'phids': [phid]})
1765 result.reverse()
1768 result.reverse()
1766 return smartset.baseset(result)
1769 return smartset.baseset(result)
1767
1770
1768 # Initialize prefetch cache
1771 # Initialize prefetch cache
1769 prefetched = {} # {id or phid: drev}
1772 prefetched = {} # {id or phid: drev}
1770
1773
1771 tree = _parse(spec)
1774 tree = _parse(spec)
1772 drevs, ancestordrevs = _prefetchdrevs(tree)
1775 drevs, ancestordrevs = _prefetchdrevs(tree)
1773
1776
1774 # developer config: phabricator.batchsize
1777 # developer config: phabricator.batchsize
1775 batchsize = ui.configint(b'phabricator', b'batchsize')
1778 batchsize = ui.configint(b'phabricator', b'batchsize')
1776
1779
1777 # Prefetch Differential Revisions in batch
1780 # Prefetch Differential Revisions in batch
1778 tofetch = set(drevs)
1781 tofetch = set(drevs)
1779 for r in ancestordrevs:
1782 for r in ancestordrevs:
1780 tofetch.update(range(max(1, r - batchsize), r + 1))
1783 tofetch.update(range(max(1, r - batchsize), r + 1))
1781 if drevs:
1784 if drevs:
1782 fetch({b'ids': list(tofetch)})
1785 fetch({b'ids': list(tofetch)})
1783 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1786 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1784
1787
1785 # Walk through the tree, return smartsets
1788 # Walk through the tree, return smartsets
1786 def walk(tree):
1789 def walk(tree):
1787 op = tree[0]
1790 op = tree[0]
1788 if op == b'symbol':
1791 if op == b'symbol':
1789 drev = _parsedrev(tree[1])
1792 drev = _parsedrev(tree[1])
1790 if drev:
1793 if drev:
1791 return smartset.baseset([drev])
1794 return smartset.baseset([drev])
1792 elif tree[1] in _knownstatusnames:
1795 elif tree[1] in _knownstatusnames:
1793 drevs = [
1796 drevs = [
1794 r
1797 r
1795 for r in validids
1798 for r in validids
1796 if _getstatusname(prefetched[r]) == tree[1]
1799 if _getstatusname(prefetched[r]) == tree[1]
1797 ]
1800 ]
1798 return smartset.baseset(drevs)
1801 return smartset.baseset(drevs)
1799 else:
1802 else:
1800 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1803 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1801 elif op in {b'and_', b'add', b'sub'}:
1804 elif op in {b'and_', b'add', b'sub'}:
1802 assert len(tree) == 3
1805 assert len(tree) == 3
1803 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1806 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1804 elif op == b'group':
1807 elif op == b'group':
1805 return walk(tree[1])
1808 return walk(tree[1])
1806 elif op == b'ancestors':
1809 elif op == b'ancestors':
1807 return getstack(walk(tree[1]))
1810 return getstack(walk(tree[1]))
1808 else:
1811 else:
1809 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1812 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1810
1813
1811 return [prefetched[r] for r in walk(tree)]
1814 return [prefetched[r] for r in walk(tree)]
1812
1815
1813
1816
1814 def getdescfromdrev(drev):
1817 def getdescfromdrev(drev):
1815 """get description (commit message) from "Differential Revision"
1818 """get description (commit message) from "Differential Revision"
1816
1819
1817 This is similar to differential.getcommitmessage API. But we only care
1820 This is similar to differential.getcommitmessage API. But we only care
1818 about limited fields: title, summary, test plan, and URL.
1821 about limited fields: title, summary, test plan, and URL.
1819 """
1822 """
1820 title = drev[b'title']
1823 title = drev[b'title']
1821 summary = drev[b'summary'].rstrip()
1824 summary = drev[b'summary'].rstrip()
1822 testplan = drev[b'testPlan'].rstrip()
1825 testplan = drev[b'testPlan'].rstrip()
1823 if testplan:
1826 if testplan:
1824 testplan = b'Test Plan:\n%s' % testplan
1827 testplan = b'Test Plan:\n%s' % testplan
1825 uri = b'Differential Revision: %s' % drev[b'uri']
1828 uri = b'Differential Revision: %s' % drev[b'uri']
1826 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1829 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1827
1830
1828
1831
1829 def get_amended_desc(drev, ctx, folded):
1832 def get_amended_desc(drev, ctx, folded):
1830 """similar to ``getdescfromdrev``, but supports a folded series of commits
1833 """similar to ``getdescfromdrev``, but supports a folded series of commits
1831
1834
1832 This is used when determining if an individual commit needs to have its
1835 This is used when determining if an individual commit needs to have its
1833 message amended after posting it for review. The determination is made for
1836 message amended after posting it for review. The determination is made for
1834 each individual commit, even when they were folded into one review.
1837 each individual commit, even when they were folded into one review.
1835 """
1838 """
1836 if not folded:
1839 if not folded:
1837 return getdescfromdrev(drev)
1840 return getdescfromdrev(drev)
1838
1841
1839 uri = b'Differential Revision: %s' % drev[b'uri']
1842 uri = b'Differential Revision: %s' % drev[b'uri']
1840
1843
1841 # Since the commit messages were combined when posting multiple commits
1844 # Since the commit messages were combined when posting multiple commits
1842 # with --fold, the fields can't be read from Phabricator here, or *all*
1845 # with --fold, the fields can't be read from Phabricator here, or *all*
1843 # affected local revisions will end up with the same commit message after
1846 # affected local revisions will end up with the same commit message after
1844 # the URI is amended in. Append in the DREV line, or update it if it
1847 # the URI is amended in. Append in the DREV line, or update it if it
1845 # exists. At worst, this means commit message or test plan updates on
1848 # exists. At worst, this means commit message or test plan updates on
1846 # Phabricator aren't propagated back to the repository, but that seems
1849 # Phabricator aren't propagated back to the repository, but that seems
1847 # reasonable for the case where local commits are effectively combined
1850 # reasonable for the case where local commits are effectively combined
1848 # in Phabricator.
1851 # in Phabricator.
1849 m = _differentialrevisiondescre.search(ctx.description())
1852 m = _differentialrevisiondescre.search(ctx.description())
1850 if not m:
1853 if not m:
1851 return b'\n\n'.join([ctx.description(), uri])
1854 return b'\n\n'.join([ctx.description(), uri])
1852
1855
1853 return _differentialrevisiondescre.sub(uri, ctx.description())
1856 return _differentialrevisiondescre.sub(uri, ctx.description())
1854
1857
1855
1858
1856 def getlocalcommits(diff):
1859 def getlocalcommits(diff):
1857 """get the set of local commits from a diff object
1860 """get the set of local commits from a diff object
1858
1861
1859 See ``getdiffmeta()`` for an example diff object.
1862 See ``getdiffmeta()`` for an example diff object.
1860 """
1863 """
1861 props = diff.get(b'properties') or {}
1864 props = diff.get(b'properties') or {}
1862 commits = props.get(b'local:commits') or {}
1865 commits = props.get(b'local:commits') or {}
1863 if len(commits) > 1:
1866 if len(commits) > 1:
1864 return {bin(c) for c in commits.keys()}
1867 return {bin(c) for c in commits.keys()}
1865
1868
1866 # Storing the diff metadata predates storing `local:commits`, so continue
1869 # Storing the diff metadata predates storing `local:commits`, so continue
1867 # to use that in the --no-fold case.
1870 # to use that in the --no-fold case.
1868 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1871 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1869
1872
1870
1873
1871 def getdiffmeta(diff):
1874 def getdiffmeta(diff):
1872 """get commit metadata (date, node, user, p1) from a diff object
1875 """get commit metadata (date, node, user, p1) from a diff object
1873
1876
1874 The metadata could be "hg:meta", sent by phabsend, like:
1877 The metadata could be "hg:meta", sent by phabsend, like:
1875
1878
1876 "properties": {
1879 "properties": {
1877 "hg:meta": {
1880 "hg:meta": {
1878 "branch": "default",
1881 "branch": "default",
1879 "date": "1499571514 25200",
1882 "date": "1499571514 25200",
1880 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1883 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1881 "user": "Foo Bar <foo@example.com>",
1884 "user": "Foo Bar <foo@example.com>",
1882 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1885 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1883 }
1886 }
1884 }
1887 }
1885
1888
1886 Or converted from "local:commits", sent by "arc", like:
1889 Or converted from "local:commits", sent by "arc", like:
1887
1890
1888 "properties": {
1891 "properties": {
1889 "local:commits": {
1892 "local:commits": {
1890 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1893 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1891 "author": "Foo Bar",
1894 "author": "Foo Bar",
1892 "authorEmail": "foo@example.com"
1895 "authorEmail": "foo@example.com"
1893 "branch": "default",
1896 "branch": "default",
1894 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1897 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1895 "local": "1000",
1898 "local": "1000",
1896 "message": "...",
1899 "message": "...",
1897 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1900 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1898 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1901 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1899 "summary": "...",
1902 "summary": "...",
1900 "tag": "",
1903 "tag": "",
1901 "time": 1499546314,
1904 "time": 1499546314,
1902 }
1905 }
1903 }
1906 }
1904 }
1907 }
1905
1908
1906 Note: metadata extracted from "local:commits" will lose time zone
1909 Note: metadata extracted from "local:commits" will lose time zone
1907 information.
1910 information.
1908 """
1911 """
1909 props = diff.get(b'properties') or {}
1912 props = diff.get(b'properties') or {}
1910 meta = props.get(b'hg:meta')
1913 meta = props.get(b'hg:meta')
1911 if not meta:
1914 if not meta:
1912 if props.get(b'local:commits'):
1915 if props.get(b'local:commits'):
1913 commit = sorted(props[b'local:commits'].values())[0]
1916 commit = sorted(props[b'local:commits'].values())[0]
1914 meta = {}
1917 meta = {}
1915 if b'author' in commit and b'authorEmail' in commit:
1918 if b'author' in commit and b'authorEmail' in commit:
1916 meta[b'user'] = b'%s <%s>' % (
1919 meta[b'user'] = b'%s <%s>' % (
1917 commit[b'author'],
1920 commit[b'author'],
1918 commit[b'authorEmail'],
1921 commit[b'authorEmail'],
1919 )
1922 )
1920 if b'time' in commit:
1923 if b'time' in commit:
1921 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1924 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1922 if b'branch' in commit:
1925 if b'branch' in commit:
1923 meta[b'branch'] = commit[b'branch']
1926 meta[b'branch'] = commit[b'branch']
1924 node = commit.get(b'commit', commit.get(b'rev'))
1927 node = commit.get(b'commit', commit.get(b'rev'))
1925 if node:
1928 if node:
1926 meta[b'node'] = node
1929 meta[b'node'] = node
1927 if len(commit.get(b'parents', ())) >= 1:
1930 if len(commit.get(b'parents', ())) >= 1:
1928 meta[b'parent'] = commit[b'parents'][0]
1931 meta[b'parent'] = commit[b'parents'][0]
1929 else:
1932 else:
1930 meta = {}
1933 meta = {}
1931 if b'date' not in meta and b'dateCreated' in diff:
1934 if b'date' not in meta and b'dateCreated' in diff:
1932 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1935 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1933 if b'branch' not in meta and diff.get(b'branch'):
1936 if b'branch' not in meta and diff.get(b'branch'):
1934 meta[b'branch'] = diff[b'branch']
1937 meta[b'branch'] = diff[b'branch']
1935 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1938 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1936 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1939 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1937 return meta
1940 return meta
1938
1941
1939
1942
1940 def _getdrevs(ui, stack, specs):
1943 def _getdrevs(ui, stack, specs):
1941 """convert user supplied DREVSPECs into "Differential Revision" dicts
1944 """convert user supplied DREVSPECs into "Differential Revision" dicts
1942
1945
1943 See ``hg help phabread`` for how to specify each DREVSPEC.
1946 See ``hg help phabread`` for how to specify each DREVSPEC.
1944 """
1947 """
1945 if len(specs) > 0:
1948 if len(specs) > 0:
1946
1949
1947 def _formatspec(s):
1950 def _formatspec(s):
1948 if stack:
1951 if stack:
1949 s = b':(%s)' % s
1952 s = b':(%s)' % s
1950 return b'(%s)' % s
1953 return b'(%s)' % s
1951
1954
1952 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1955 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1953
1956
1954 drevs = querydrev(ui, spec)
1957 drevs = querydrev(ui, spec)
1955 if drevs:
1958 if drevs:
1956 return drevs
1959 return drevs
1957
1960
1958 raise error.Abort(_(b"empty DREVSPEC set"))
1961 raise error.Abort(_(b"empty DREVSPEC set"))
1959
1962
1960
1963
1961 def readpatch(ui, drevs, write):
1964 def readpatch(ui, drevs, write):
1962 """generate plain-text patch readable by 'hg import'
1965 """generate plain-text patch readable by 'hg import'
1963
1966
1964 write takes a list of (DREV, bytes), where DREV is the differential number
1967 write takes a list of (DREV, bytes), where DREV is the differential number
1965 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1968 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1966 to be imported. drevs is what "querydrev" returns, results of
1969 to be imported. drevs is what "querydrev" returns, results of
1967 "differential.query".
1970 "differential.query".
1968 """
1971 """
1969 # Prefetch hg:meta property for all diffs
1972 # Prefetch hg:meta property for all diffs
1970 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1973 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1971 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1974 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1972
1975
1973 patches = []
1976 patches = []
1974
1977
1975 # Generate patch for each drev
1978 # Generate patch for each drev
1976 for drev in drevs:
1979 for drev in drevs:
1977 ui.note(_(b'reading D%s\n') % drev[b'id'])
1980 ui.note(_(b'reading D%s\n') % drev[b'id'])
1978
1981
1979 diffid = max(int(v) for v in drev[b'diffs'])
1982 diffid = max(int(v) for v in drev[b'diffs'])
1980 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1983 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1981 desc = getdescfromdrev(drev)
1984 desc = getdescfromdrev(drev)
1982 header = b'# HG changeset patch\n'
1985 header = b'# HG changeset patch\n'
1983
1986
1984 # Try to preserve metadata from hg:meta property. Write hg patch
1987 # Try to preserve metadata from hg:meta property. Write hg patch
1985 # headers that can be read by the "import" command. See patchheadermap
1988 # headers that can be read by the "import" command. See patchheadermap
1986 # and extract in mercurial/patch.py for supported headers.
1989 # and extract in mercurial/patch.py for supported headers.
1987 meta = getdiffmeta(diffs[b'%d' % diffid])
1990 meta = getdiffmeta(diffs[b'%d' % diffid])
1988 for k in _metanamemap.keys():
1991 for k in _metanamemap.keys():
1989 if k in meta:
1992 if k in meta:
1990 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1993 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1991
1994
1992 content = b'%s%s\n%s' % (header, desc, body)
1995 content = b'%s%s\n%s' % (header, desc, body)
1993 patches.append((drev[b'id'], content))
1996 patches.append((drev[b'id'], content))
1994
1997
1995 # Write patches to the supplied callback
1998 # Write patches to the supplied callback
1996 write(patches)
1999 write(patches)
1997
2000
1998
2001
1999 @vcrcommand(
2002 @vcrcommand(
2000 b'phabread',
2003 b'phabread',
2001 [(b'', b'stack', False, _(b'read dependencies'))],
2004 [(b'', b'stack', False, _(b'read dependencies'))],
2002 _(b'DREVSPEC... [OPTIONS]'),
2005 _(b'DREVSPEC... [OPTIONS]'),
2003 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2006 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2004 optionalrepo=True,
2007 optionalrepo=True,
2005 )
2008 )
2006 def phabread(ui, repo, *specs, **opts):
2009 def phabread(ui, repo, *specs, **opts):
2007 """print patches from Phabricator suitable for importing
2010 """print patches from Phabricator suitable for importing
2008
2011
2009 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2012 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2010 the number ``123``. It could also have common operators like ``+``, ``-``,
2013 the number ``123``. It could also have common operators like ``+``, ``-``,
2011 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2014 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2012 select a stack. If multiple DREVSPEC values are given, the result is the
2015 select a stack. If multiple DREVSPEC values are given, the result is the
2013 union of each individually evaluated value. No attempt is currently made
2016 union of each individually evaluated value. No attempt is currently made
2014 to reorder the values to run from parent to child.
2017 to reorder the values to run from parent to child.
2015
2018
2016 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2019 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2017 could be used to filter patches by status. For performance reason, they
2020 could be used to filter patches by status. For performance reason, they
2018 only represent a subset of non-status selections and cannot be used alone.
2021 only represent a subset of non-status selections and cannot be used alone.
2019
2022
2020 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2023 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2021 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2024 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2022 stack up to D9.
2025 stack up to D9.
2023
2026
2024 If --stack is given, follow dependencies information and read all patches.
2027 If --stack is given, follow dependencies information and read all patches.
2025 It is equivalent to the ``:`` operator.
2028 It is equivalent to the ``:`` operator.
2026 """
2029 """
2027 opts = pycompat.byteskwargs(opts)
2030 opts = pycompat.byteskwargs(opts)
2028 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2031 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2029
2032
2030 def _write(patches):
2033 def _write(patches):
2031 for drev, content in patches:
2034 for drev, content in patches:
2032 ui.write(content)
2035 ui.write(content)
2033
2036
2034 readpatch(ui, drevs, _write)
2037 readpatch(ui, drevs, _write)
2035
2038
2036
2039
2037 @vcrcommand(
2040 @vcrcommand(
2038 b'phabimport',
2041 b'phabimport',
2039 [(b'', b'stack', False, _(b'import dependencies as well'))],
2042 [(b'', b'stack', False, _(b'import dependencies as well'))],
2040 _(b'DREVSPEC... [OPTIONS]'),
2043 _(b'DREVSPEC... [OPTIONS]'),
2041 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2044 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2042 )
2045 )
2043 def phabimport(ui, repo, *specs, **opts):
2046 def phabimport(ui, repo, *specs, **opts):
2044 """import patches from Phabricator for the specified Differential Revisions
2047 """import patches from Phabricator for the specified Differential Revisions
2045
2048
2046 The patches are read and applied starting at the parent of the working
2049 The patches are read and applied starting at the parent of the working
2047 directory.
2050 directory.
2048
2051
2049 See ``hg help phabread`` for how to specify DREVSPEC.
2052 See ``hg help phabread`` for how to specify DREVSPEC.
2050 """
2053 """
2051 opts = pycompat.byteskwargs(opts)
2054 opts = pycompat.byteskwargs(opts)
2052
2055
2053 # --bypass avoids losing exec and symlink bits when importing on Windows,
2056 # --bypass avoids losing exec and symlink bits when importing on Windows,
2054 # and allows importing with a dirty wdir. It also aborts instead of leaving
2057 # and allows importing with a dirty wdir. It also aborts instead of leaving
2055 # rejects.
2058 # rejects.
2056 opts[b'bypass'] = True
2059 opts[b'bypass'] = True
2057
2060
2058 # Mandatory default values, synced with commands.import
2061 # Mandatory default values, synced with commands.import
2059 opts[b'strip'] = 1
2062 opts[b'strip'] = 1
2060 opts[b'prefix'] = b''
2063 opts[b'prefix'] = b''
2061 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2064 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2062 opts[b'obsolete'] = False
2065 opts[b'obsolete'] = False
2063
2066
2064 if ui.configbool(b'phabimport', b'secret'):
2067 if ui.configbool(b'phabimport', b'secret'):
2065 opts[b'secret'] = True
2068 opts[b'secret'] = True
2066 if ui.configbool(b'phabimport', b'obsolete'):
2069 if ui.configbool(b'phabimport', b'obsolete'):
2067 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2070 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2068
2071
2069 def _write(patches):
2072 def _write(patches):
2070 parents = repo[None].parents()
2073 parents = repo[None].parents()
2071
2074
2072 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2075 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2073 for drev, contents in patches:
2076 for drev, contents in patches:
2074 ui.status(_(b'applying patch from D%s\n') % drev)
2077 ui.status(_(b'applying patch from D%s\n') % drev)
2075
2078
2076 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2079 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2077 msg, node, rej = cmdutil.tryimportone(
2080 msg, node, rej = cmdutil.tryimportone(
2078 ui,
2081 ui,
2079 repo,
2082 repo,
2080 patchdata,
2083 patchdata,
2081 parents,
2084 parents,
2082 opts,
2085 opts,
2083 [],
2086 [],
2084 None, # Never update wdir to another revision
2087 None, # Never update wdir to another revision
2085 )
2088 )
2086
2089
2087 if not node:
2090 if not node:
2088 raise error.Abort(_(b'D%s: no diffs found') % drev)
2091 raise error.Abort(_(b'D%s: no diffs found') % drev)
2089
2092
2090 ui.note(msg + b'\n')
2093 ui.note(msg + b'\n')
2091 parents = [repo[node]]
2094 parents = [repo[node]]
2092
2095
2093 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2096 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2094
2097
2095 readpatch(repo.ui, drevs, _write)
2098 readpatch(repo.ui, drevs, _write)
2096
2099
2097
2100
2098 @vcrcommand(
2101 @vcrcommand(
2099 b'phabupdate',
2102 b'phabupdate',
2100 [
2103 [
2101 (b'', b'accept', False, _(b'accept revisions')),
2104 (b'', b'accept', False, _(b'accept revisions')),
2102 (b'', b'reject', False, _(b'reject revisions')),
2105 (b'', b'reject', False, _(b'reject revisions')),
2103 (b'', b'abandon', False, _(b'abandon revisions')),
2106 (b'', b'abandon', False, _(b'abandon revisions')),
2104 (b'', b'reclaim', False, _(b'reclaim revisions')),
2107 (b'', b'reclaim', False, _(b'reclaim revisions')),
2105 (b'm', b'comment', b'', _(b'comment on the last revision')),
2108 (b'm', b'comment', b'', _(b'comment on the last revision')),
2106 ],
2109 ],
2107 _(b'DREVSPEC... [OPTIONS]'),
2110 _(b'DREVSPEC... [OPTIONS]'),
2108 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2111 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2109 optionalrepo=True,
2112 optionalrepo=True,
2110 )
2113 )
2111 def phabupdate(ui, repo, *specs, **opts):
2114 def phabupdate(ui, repo, *specs, **opts):
2112 """update Differential Revision in batch
2115 """update Differential Revision in batch
2113
2116
2114 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2117 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2115 """
2118 """
2116 opts = pycompat.byteskwargs(opts)
2119 opts = pycompat.byteskwargs(opts)
2117 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
2120 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
2118 if len(flags) > 1:
2121 if len(flags) > 1:
2119 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2122 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2120
2123
2121 actions = []
2124 actions = []
2122 for f in flags:
2125 for f in flags:
2123 actions.append({b'type': f, b'value': True})
2126 actions.append({b'type': f, b'value': True})
2124
2127
2125 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2128 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2126 for i, drev in enumerate(drevs):
2129 for i, drev in enumerate(drevs):
2127 if i + 1 == len(drevs) and opts.get(b'comment'):
2130 if i + 1 == len(drevs) and opts.get(b'comment'):
2128 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2131 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2129 if actions:
2132 if actions:
2130 params = {
2133 params = {
2131 b'objectIdentifier': drev[b'phid'],
2134 b'objectIdentifier': drev[b'phid'],
2132 b'transactions': actions,
2135 b'transactions': actions,
2133 }
2136 }
2134 callconduit(ui, b'differential.revision.edit', params)
2137 callconduit(ui, b'differential.revision.edit', params)
2135
2138
2136
2139
2137 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2140 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2138 def template_review(context, mapping):
2141 def template_review(context, mapping):
2139 """:phabreview: Object describing the review for this changeset.
2142 """:phabreview: Object describing the review for this changeset.
2140 Has attributes `url` and `id`.
2143 Has attributes `url` and `id`.
2141 """
2144 """
2142 ctx = context.resource(mapping, b'ctx')
2145 ctx = context.resource(mapping, b'ctx')
2143 m = _differentialrevisiondescre.search(ctx.description())
2146 m = _differentialrevisiondescre.search(ctx.description())
2144 if m:
2147 if m:
2145 return templateutil.hybriddict(
2148 return templateutil.hybriddict(
2146 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2149 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2147 )
2150 )
2148 else:
2151 else:
2149 tags = ctx.repo().nodetags(ctx.node())
2152 tags = ctx.repo().nodetags(ctx.node())
2150 for t in tags:
2153 for t in tags:
2151 if _differentialrevisiontagre.match(t):
2154 if _differentialrevisiontagre.match(t):
2152 url = ctx.repo().ui.config(b'phabricator', b'url')
2155 url = ctx.repo().ui.config(b'phabricator', b'url')
2153 if not url.endswith(b'/'):
2156 if not url.endswith(b'/'):
2154 url += b'/'
2157 url += b'/'
2155 url += t
2158 url += t
2156
2159
2157 return templateutil.hybriddict({b'url': url, b'id': t,})
2160 return templateutil.hybriddict({b'url': url, b'id': t,})
2158 return None
2161 return None
2159
2162
2160
2163
2161 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2164 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2162 def template_status(context, mapping):
2165 def template_status(context, mapping):
2163 """:phabstatus: String. Status of Phabricator differential.
2166 """:phabstatus: String. Status of Phabricator differential.
2164 """
2167 """
2165 ctx = context.resource(mapping, b'ctx')
2168 ctx = context.resource(mapping, b'ctx')
2166 repo = context.resource(mapping, b'repo')
2169 repo = context.resource(mapping, b'repo')
2167 ui = context.resource(mapping, b'ui')
2170 ui = context.resource(mapping, b'ui')
2168
2171
2169 rev = ctx.rev()
2172 rev = ctx.rev()
2170 try:
2173 try:
2171 drevid = getdrevmap(repo, [rev])[rev]
2174 drevid = getdrevmap(repo, [rev])[rev]
2172 except KeyError:
2175 except KeyError:
2173 return None
2176 return None
2174 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2177 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2175 for drev in drevs:
2178 for drev in drevs:
2176 if int(drev[b'id']) == drevid:
2179 if int(drev[b'id']) == drevid:
2177 return templateutil.hybriddict(
2180 return templateutil.hybriddict(
2178 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2181 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2179 )
2182 )
2180 return None
2183 return None
2181
2184
2182
2185
2183 @show.showview(b'phabstatus', csettopic=b'work')
2186 @show.showview(b'phabstatus', csettopic=b'work')
2184 def phabstatusshowview(ui, repo, displayer):
2187 def phabstatusshowview(ui, repo, displayer):
2185 """Phabricator differiential status"""
2188 """Phabricator differiential status"""
2186 revs = repo.revs('sort(_underway(), topo)')
2189 revs = repo.revs('sort(_underway(), topo)')
2187 drevmap = getdrevmap(repo, revs)
2190 drevmap = getdrevmap(repo, revs)
2188 unknownrevs, drevids, revsbydrevid = [], set(), {}
2191 unknownrevs, drevids, revsbydrevid = [], set(), {}
2189 for rev, drevid in pycompat.iteritems(drevmap):
2192 for rev, drevid in pycompat.iteritems(drevmap):
2190 if drevid is not None:
2193 if drevid is not None:
2191 drevids.add(drevid)
2194 drevids.add(drevid)
2192 revsbydrevid.setdefault(drevid, set()).add(rev)
2195 revsbydrevid.setdefault(drevid, set()).add(rev)
2193 else:
2196 else:
2194 unknownrevs.append(rev)
2197 unknownrevs.append(rev)
2195
2198
2196 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2199 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2197 drevsbyrev = {}
2200 drevsbyrev = {}
2198 for drev in drevs:
2201 for drev in drevs:
2199 for rev in revsbydrevid[int(drev[b'id'])]:
2202 for rev in revsbydrevid[int(drev[b'id'])]:
2200 drevsbyrev[rev] = drev
2203 drevsbyrev[rev] = drev
2201
2204
2202 def phabstatus(ctx):
2205 def phabstatus(ctx):
2203 drev = drevsbyrev[ctx.rev()]
2206 drev = drevsbyrev[ctx.rev()]
2204 status = ui.label(
2207 status = ui.label(
2205 b'%(statusName)s' % drev,
2208 b'%(statusName)s' % drev,
2206 b'phabricator.status.%s' % _getstatusname(drev),
2209 b'phabricator.status.%s' % _getstatusname(drev),
2207 )
2210 )
2208 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2211 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2209
2212
2210 revs -= smartset.baseset(unknownrevs)
2213 revs -= smartset.baseset(unknownrevs)
2211 revdag = graphmod.dagwalker(repo, revs)
2214 revdag = graphmod.dagwalker(repo, revs)
2212
2215
2213 ui.setconfig(b'experimental', b'graphshorten', True)
2216 ui.setconfig(b'experimental', b'graphshorten', True)
2214 displayer._exthook = phabstatus
2217 displayer._exthook = phabstatus
2215 nodelen = show.longestshortest(repo, revs)
2218 nodelen = show.longestshortest(repo, revs)
2216 logcmdutil.displaygraph(
2219 logcmdutil.displaygraph(
2217 ui,
2220 ui,
2218 repo,
2221 repo,
2219 revdag,
2222 revdag,
2220 displayer,
2223 displayer,
2221 graphmod.asciiedges,
2224 graphmod.asciiedges,
2222 props={b'nodelen': nodelen},
2225 props={b'nodelen': nodelen},
2223 )
2226 )
@@ -1,838 +1,890 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 >
5 >
6 > [auth]
6 > [auth]
7 > hgphab.schemes = https
7 > hgphab.schemes = https
8 > hgphab.prefix = phab.mercurial-scm.org
8 > hgphab.prefix = phab.mercurial-scm.org
9 > # When working on the extension and making phabricator interaction
9 > # When working on the extension and making phabricator interaction
10 > # changes, edit this to be a real phabricator token. When done, edit
10 > # changes, edit this to be a real phabricator token. When done, edit
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
12 > # token with this value.
12 > # token with this value.
13 > hgphab.phabtoken = cli-hahayouwish
13 > hgphab.phabtoken = cli-hahayouwish
14 >
14 >
15 > [phabricator]
15 > [phabricator]
16 > debug = True
16 > debug = True
17 > EOF
17 > EOF
18 $ hg init repo
18 $ hg init repo
19 $ cd repo
19 $ cd repo
20 $ cat >> .hg/hgrc <<EOF
20 $ cat >> .hg/hgrc <<EOF
21 > [phabricator]
21 > [phabricator]
22 > url = https://phab.mercurial-scm.org/
22 > url = https://phab.mercurial-scm.org/
23 > callsign = HG
23 > callsign = HG
24 > EOF
24 > EOF
25 $ VCR="$TESTDIR/phabricator"
25 $ VCR="$TESTDIR/phabricator"
26
26
27 Error is handled reasonably. We override the phabtoken here so that
27 Error is handled reasonably. We override the phabtoken here so that
28 when you're developing changes to phabricator.py you can edit the
28 when you're developing changes to phabricator.py you can edit the
29 above config and have a real token in the test but not have to edit
29 above config and have a real token in the test but not have to edit
30 this test.
30 this test.
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
34
34
35 Missing arguments don't crash, and may print the command help
35 Missing arguments don't crash, and may print the command help
36
36
37 $ hg debugcallconduit
37 $ hg debugcallconduit
38 hg debugcallconduit: invalid arguments
38 hg debugcallconduit: invalid arguments
39 hg debugcallconduit METHOD
39 hg debugcallconduit METHOD
40
40
41 call Conduit API
41 call Conduit API
42
42
43 options:
43 options:
44
44
45 (use 'hg debugcallconduit -h' to show more help)
45 (use 'hg debugcallconduit -h' to show more help)
46 [255]
46 [255]
47 $ hg phabread
47 $ hg phabread
48 abort: empty DREVSPEC set
48 abort: empty DREVSPEC set
49 [255]
49 [255]
50
50
51 Basic phabread:
51 Basic phabread:
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
53 # HG changeset patch
53 # HG changeset patch
54 # Date 1536771503 0
54 # Date 1536771503 0
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
56 exchangev2: start to implement pull with wire protocol v2
56 exchangev2: start to implement pull with wire protocol v2
57
57
58 Wire protocol version 2 will take a substantially different
58 Wire protocol version 2 will take a substantially different
59 approach to exchange than version 1 (at least as far as pulling
59 approach to exchange than version 1 (at least as far as pulling
60 is concerned).
60 is concerned).
61
61
62 This commit establishes a new exchangev2 module for holding
62 This commit establishes a new exchangev2 module for holding
63
63
64 Phabread with multiple DREVSPEC
64 Phabread with multiple DREVSPEC
65
65
66 TODO: attempt to order related revisions like --stack?
66 TODO: attempt to order related revisions like --stack?
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
68 > | grep '^Differential Revision'
68 > | grep '^Differential Revision'
69 Differential Revision: https://phab.mercurial-scm.org/D8205
69 Differential Revision: https://phab.mercurial-scm.org/D8205
70 Differential Revision: https://phab.mercurial-scm.org/D8206
70 Differential Revision: https://phab.mercurial-scm.org/D8206
71 Differential Revision: https://phab.mercurial-scm.org/D8207
71 Differential Revision: https://phab.mercurial-scm.org/D8207
72
72
73 Empty DREVSPECs don't crash
73 Empty DREVSPECs don't crash
74
74
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
76 abort: empty DREVSPEC set
76 abort: empty DREVSPEC set
77 [255]
77 [255]
78
78
79
79
80 phabupdate with an accept:
80 phabupdate with an accept:
81 $ hg phabupdate --accept D4564 \
81 $ hg phabupdate --accept D4564 \
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
83 > --test-vcr "$VCR/accept-4564.json"
83 > --test-vcr "$VCR/accept-4564.json"
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
86 [255]
86 [255]
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
88
88
89 Create a differential diff:
89 Create a differential diff:
90 $ HGENCODING=utf-8; export HGENCODING
90 $ HGENCODING=utf-8; export HGENCODING
91 $ echo alpha > alpha
91 $ echo alpha > alpha
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
93 adding alpha
93 adding alpha
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
96 new commits: ['347bf67801e5']
96 new commits: ['347bf67801e5']
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
98 $ echo more >> alpha
98 $ echo more >> alpha
99 $ HGEDITOR=true hg ci --amend
99 $ HGEDITOR=true hg ci --amend
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
101 $ echo beta > beta
101 $ echo beta > beta
102 $ hg ci --addremove -m 'create beta for phabricator test'
102 $ hg ci --addremove -m 'create beta for phabricator test'
103 adding beta
103 adding beta
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
105 c44b38f24a45 mapped to old nodes []
105 c44b38f24a45 mapped to old nodes []
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
108 new commits: ['a692622e6937']
108 new commits: ['a692622e6937']
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
110 $ unset HGENCODING
110 $ unset HGENCODING
111
111
112 The amend won't explode after posting a public commit. The local tag is left
112 The amend won't explode after posting a public commit. The local tag is left
113 behind to identify it.
113 behind to identify it.
114
114
115 $ echo 'public change' > beta
115 $ echo 'public change' > beta
116 $ hg ci -m 'create public change for phabricator testing'
116 $ hg ci -m 'create public change for phabricator testing'
117 $ hg phase --public .
117 $ hg phase --public .
118 $ echo 'draft change' > alpha
118 $ echo 'draft change' > alpha
119 $ hg ci -m 'create draft change for phabricator testing'
119 $ hg ci -m 'create draft change for phabricator testing'
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
123 warning: not updating public commit 2:7b4185ab5d16
123 warning: not updating public commit 2:7b4185ab5d16
124 new commits: ['3244dc4a3334']
124 new commits: ['3244dc4a3334']
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
126 $ hg tags -v
126 $ hg tags -v
127 tip 3:3244dc4a3334
127 tip 3:3244dc4a3334
128 D7917 2:7b4185ab5d16 local
128 D7917 2:7b4185ab5d16 local
129
129
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
131 > {
131 > {
132 > "constraints": {
132 > "constraints": {
133 > "isBot": true
133 > "isBot": true
134 > }
134 > }
135 > }
135 > }
136 > EOF
136 > EOF
137 {
137 {
138 "cursor": {
138 "cursor": {
139 "after": null,
139 "after": null,
140 "before": null,
140 "before": null,
141 "limit": 100,
141 "limit": 100,
142 "order": null
142 "order": null
143 },
143 },
144 "data": [],
144 "data": [],
145 "maps": {},
145 "maps": {},
146 "query": {
146 "query": {
147 "queryKey": null
147 "queryKey": null
148 }
148 }
149 }
149 }
150
150
151 Template keywords
151 Template keywords
152 $ hg log -T'{rev} {phabreview|json}\n'
152 $ hg log -T'{rev} {phabreview|json}\n'
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
157
157
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
159 3 https://phab.mercurial-scm.org/D7918 D7918
159 3 https://phab.mercurial-scm.org/D7918 D7918
160 2 https://phab.mercurial-scm.org/D7917 D7917
160 2 https://phab.mercurial-scm.org/D7917 D7917
161 1 https://phab.mercurial-scm.org/D7916 D7916
161 1 https://phab.mercurial-scm.org/D7916 D7916
162 0 https://phab.mercurial-scm.org/D7915 D7915
162 0 https://phab.mercurial-scm.org/D7915 D7915
163
163
164 Commenting when phabsending:
164 Commenting when phabsending:
165 $ echo comment > comment
165 $ echo comment > comment
166 $ hg ci --addremove -m "create comment for phabricator test"
166 $ hg ci --addremove -m "create comment for phabricator test"
167 adding comment
167 adding comment
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
169 D7919 - created - d5dddca9023d: create comment for phabricator test
169 D7919 - created - d5dddca9023d: create comment for phabricator test
170 new commits: ['f7db812bbe1d']
170 new commits: ['f7db812bbe1d']
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
172 $ echo comment2 >> comment
172 $ echo comment2 >> comment
173 $ hg ci --amend
173 $ hg ci --amend
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
176 1849d7828727 mapped to old nodes []
176 1849d7828727 mapped to old nodes []
177 D7919 - updated - 1849d7828727: create comment for phabricator test
177 D7919 - updated - 1849d7828727: create comment for phabricator test
178
178
179 Phabsending a skipped commit:
179 Phabsending a skipped commit:
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
181 1849d7828727 mapped to old nodes ['1849d7828727']
181 1849d7828727 mapped to old nodes ['1849d7828727']
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
183
183
184 Phabsend doesn't create an instability when rebasing existing revisions on top
184 Phabsend doesn't create an instability when rebasing existing revisions on top
185 of new revisions.
185 of new revisions.
186
186
187 $ hg init reorder
187 $ hg init reorder
188 $ cd reorder
188 $ cd reorder
189 $ cat >> .hg/hgrc <<EOF
189 $ cat >> .hg/hgrc <<EOF
190 > [phabricator]
190 > [phabricator]
191 > url = https://phab.mercurial-scm.org/
191 > url = https://phab.mercurial-scm.org/
192 > callsign = HG
192 > callsign = HG
193 > [experimental]
193 > [experimental]
194 > evolution = all
194 > evolution = all
195 > EOF
195 > EOF
196
196
197 $ echo "add" > file1.txt
197 $ echo "add" > file1.txt
198 $ hg ci -Aqm 'added'
198 $ hg ci -Aqm 'added'
199 $ echo "mod1" > file1.txt
199 $ echo "mod1" > file1.txt
200 $ hg ci -m 'modified 1'
200 $ hg ci -m 'modified 1'
201 $ echo "mod2" > file1.txt
201 $ echo "mod2" > file1.txt
202 $ hg ci -m 'modified 2'
202 $ hg ci -m 'modified 2'
203 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
203 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
204 D8433 - created - 5d3959e20d1d: modified 2
204 D8433 - created - 5d3959e20d1d: modified 2
205 new commits: ['2b4aa8a88d61']
205 new commits: ['2b4aa8a88d61']
206 $ hg log -G -T compact
206 $ hg log -G -T compact
207 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
207 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
208 | modified 2
208 | modified 2
209 |
209 |
210 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
210 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
211 | modified 1
211 | modified 1
212 |
212 |
213 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
213 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
214 added
214 added
215
215
216 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
216 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
217 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
217 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
218 D8434 - created - d549263bcb2d: modified 1
218 D8434 - created - d549263bcb2d: modified 1
219 D8433 - updated - 2b4aa8a88d61: modified 2
219 D8433 - updated - 2b4aa8a88d61: modified 2
220 new commits: ['876a60d024de']
220 new commits: ['876a60d024de']
221 new commits: ['0c6523cb1d0f']
221 new commits: ['0c6523cb1d0f']
222 $ hg log -G -T compact
222 $ hg log -G -T compact
223 @ 5[tip] 1dff6b051abf 1970-01-01 00:00 +0000 test
223 @ 5[tip] 1dff6b051abf 1970-01-01 00:00 +0000 test
224 | modified 2
224 | modified 2
225 |
225 |
226 o 4:0 eb3752621d45 1970-01-01 00:00 +0000 test
226 o 4:0 eb3752621d45 1970-01-01 00:00 +0000 test
227 | modified 1
227 | modified 1
228 |
228 |
229 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
229 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
230 added
230 added
231
231
232 Posting obsolete commits is disallowed
233
234 $ echo "mod3" > file1.txt
235 $ hg ci -m 'modified A'
236 $ echo "mod4" > file1.txt
237 $ hg ci -m 'modified B'
238
239 $ hg up '.^'
240 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
241 $ echo 'obsolete' > file1.txt
242 $ hg amend --config extensions.amend=
243 1 new orphan changesets
244 $ hg log -G
245 @ changeset: 8:8d83edb3cbac
246 | tag: tip
247 | parent: 5:1dff6b051abf
248 | user: test
249 | date: Thu Jan 01 00:00:00 1970 +0000
250 | summary: modified A
251 |
252 | * changeset: 7:d4ea1b2e3511
253 | | user: test
254 | | date: Thu Jan 01 00:00:00 1970 +0000
255 | | instability: orphan
256 | | summary: modified B
257 | |
258 | x changeset: 6:4635d7f0d1ff
259 |/ user: test
260 | date: Thu Jan 01 00:00:00 1970 +0000
261 | obsolete: rewritten using amend as 8:8d83edb3cbac
262 | summary: modified A
263 |
264 o changeset: 5:1dff6b051abf
265 | user: test
266 | date: Thu Jan 01 00:00:00 1970 +0000
267 | summary: modified 2
268 |
269 o changeset: 4:eb3752621d45
270 | parent: 0:5cbade24e0fa
271 | user: test
272 | date: Thu Jan 01 00:00:00 1970 +0000
273 | summary: modified 1
274 |
275 o changeset: 0:5cbade24e0fa
276 user: test
277 date: Thu Jan 01 00:00:00 1970 +0000
278 summary: added
279
280 $ hg phabsend -r 5::
281 abort: obsolete commits cannot be posted for review
282 [255]
283
232 $ cd ..
284 $ cd ..
233
285
234 Phabesending a new binary, a modified binary, and a removed binary
286 Phabesending a new binary, a modified binary, and a removed binary
235
287
236 >>> open('bin', 'wb').write(b'\0a') and None
288 >>> open('bin', 'wb').write(b'\0a') and None
237 $ hg ci -Am 'add binary'
289 $ hg ci -Am 'add binary'
238 adding bin
290 adding bin
239 >>> open('bin', 'wb').write(b'\0b') and None
291 >>> open('bin', 'wb').write(b'\0b') and None
240 $ hg ci -m 'modify binary'
292 $ hg ci -m 'modify binary'
241 $ hg rm bin
293 $ hg rm bin
242 $ hg ci -m 'remove binary'
294 $ hg ci -m 'remove binary'
243 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
295 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
244 uploading bin@aa24a81f55de
296 uploading bin@aa24a81f55de
245 D8007 - created - aa24a81f55de: add binary
297 D8007 - created - aa24a81f55de: add binary
246 uploading bin@d8d62a881b54
298 uploading bin@d8d62a881b54
247 D8008 - created - d8d62a881b54: modify binary
299 D8008 - created - d8d62a881b54: modify binary
248 D8009 - created - af55645b2e29: remove binary
300 D8009 - created - af55645b2e29: remove binary
249 new commits: ['b8139fbb4a57']
301 new commits: ['b8139fbb4a57']
250 new commits: ['c88ce4c2d2ad']
302 new commits: ['c88ce4c2d2ad']
251 new commits: ['75dbbc901145']
303 new commits: ['75dbbc901145']
252 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
304 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
253
305
254 Phabsend a renamed binary and a copied binary, with and without content changes
306 Phabsend a renamed binary and a copied binary, with and without content changes
255 to src and dest
307 to src and dest
256
308
257 >>> open('bin2', 'wb').write(b'\0c') and None
309 >>> open('bin2', 'wb').write(b'\0c') and None
258 $ hg ci -Am 'add another binary'
310 $ hg ci -Am 'add another binary'
259 adding bin2
311 adding bin2
260
312
261 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
313 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
262 looks much different than when viewing "bin2_moved". No idea if this is a phab
314 looks much different than when viewing "bin2_moved". No idea if this is a phab
263 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
315 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
264 though.
316 though.
265
317
266 $ hg mv bin2 bin2_moved
318 $ hg mv bin2 bin2_moved
267 $ hg ci -m "moved binary"
319 $ hg ci -m "moved binary"
268
320
269 Note: "bin2_moved" is also not viewable in phabricator with this review
321 Note: "bin2_moved" is also not viewable in phabricator with this review
270
322
271 $ hg cp bin2_moved bin2_copied
323 $ hg cp bin2_moved bin2_copied
272 $ hg ci -m "copied binary"
324 $ hg ci -m "copied binary"
273
325
274 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
326 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
275 are viewable in their proper state. "bin2_copied" is not viewable, and not
327 are viewable in their proper state. "bin2_copied" is not viewable, and not
276 listed as binary in phabricator.
328 listed as binary in phabricator.
277
329
278 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
330 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
279 $ hg mv bin2_copied bin2_moved_again
331 $ hg mv bin2_copied bin2_moved_again
280 $ hg ci -m "move+mod copied binary"
332 $ hg ci -m "move+mod copied binary"
281
333
282 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
334 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
283 viewable on each side.
335 viewable on each side.
284
336
285 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
337 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
286 $ hg cp bin2_moved bin2_moved_copied
338 $ hg cp bin2_moved bin2_moved_copied
287 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
339 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
288 $ hg ci -m "copy+mod moved binary"
340 $ hg ci -m "copy+mod moved binary"
289
341
290 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
342 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
291 uploading bin2@f42f9195e00c
343 uploading bin2@f42f9195e00c
292 D8128 - created - f42f9195e00c: add another binary
344 D8128 - created - f42f9195e00c: add another binary
293 D8129 - created - 834ab31d80ae: moved binary
345 D8129 - created - 834ab31d80ae: moved binary
294 D8130 - created - 494b750e5194: copied binary
346 D8130 - created - 494b750e5194: copied binary
295 uploading bin2_moved_again@25f766b50cc2
347 uploading bin2_moved_again@25f766b50cc2
296 D8131 - created - 25f766b50cc2: move+mod copied binary
348 D8131 - created - 25f766b50cc2: move+mod copied binary
297 uploading bin2_moved_copied@1b87b363a5e4
349 uploading bin2_moved_copied@1b87b363a5e4
298 uploading bin2_moved@1b87b363a5e4
350 uploading bin2_moved@1b87b363a5e4
299 D8132 - created - 1b87b363a5e4: copy+mod moved binary
351 D8132 - created - 1b87b363a5e4: copy+mod moved binary
300 new commits: ['90437c20312a']
352 new commits: ['90437c20312a']
301 new commits: ['f391f4da4c61']
353 new commits: ['f391f4da4c61']
302 new commits: ['da86a9f3268c']
354 new commits: ['da86a9f3268c']
303 new commits: ['003ffc16ba66']
355 new commits: ['003ffc16ba66']
304 new commits: ['13bd750c36fa']
356 new commits: ['13bd750c36fa']
305 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
357 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
306
358
307 Phabreading a DREV with a local:commits time as a string:
359 Phabreading a DREV with a local:commits time as a string:
308 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
360 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
309 # HG changeset patch
361 # HG changeset patch
310 # User Pulkit Goyal <7895pulkit@gmail.com>
362 # User Pulkit Goyal <7895pulkit@gmail.com>
311 # Date 1509404054 -19800
363 # Date 1509404054 -19800
312 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
364 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
313 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
365 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
314 repoview: add a new attribute _visibilityexceptions and related API
366 repoview: add a new attribute _visibilityexceptions and related API
315
367
316 Currently we don't have a defined way in core to make some hidden revisions
368 Currently we don't have a defined way in core to make some hidden revisions
317 visible in filtered repo. Extensions to achieve the purpose of unhiding some
369 visible in filtered repo. Extensions to achieve the purpose of unhiding some
318 hidden commits, wrap repoview.pinnedrevs() function.
370 hidden commits, wrap repoview.pinnedrevs() function.
319
371
320 To make the above task simple and have well defined API, this patch adds a new
372 To make the above task simple and have well defined API, this patch adds a new
321 attribute '_visibilityexceptions' to repoview class which will contains
373 attribute '_visibilityexceptions' to repoview class which will contains
322 the hidden revs which should be exception.
374 the hidden revs which should be exception.
323 This will allow to set different exceptions for different repoview objects
375 This will allow to set different exceptions for different repoview objects
324 backed by the same unfiltered repo.
376 backed by the same unfiltered repo.
325
377
326 This patch also adds API to add revs to the attribute set and get them.
378 This patch also adds API to add revs to the attribute set and get them.
327
379
328 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
380 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
329
381
330 Differential Revision: https://phab.mercurial-scm.org/D1285
382 Differential Revision: https://phab.mercurial-scm.org/D1285
331 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
383 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
332 --- a/mercurial/repoview.py
384 --- a/mercurial/repoview.py
333 +++ b/mercurial/repoview.py
385 +++ b/mercurial/repoview.py
334 @@ * @@ (glob)
386 @@ * @@ (glob)
335 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
387 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
336 """
388 """
337
389
338 + # hidden revs which should be visible
390 + # hidden revs which should be visible
339 + _visibilityexceptions = set()
391 + _visibilityexceptions = set()
340 +
392 +
341 def __init__(self, repo, filtername):
393 def __init__(self, repo, filtername):
342 object.__setattr__(self, r'_unfilteredrepo', repo)
394 object.__setattr__(self, r'_unfilteredrepo', repo)
343 object.__setattr__(self, r'filtername', filtername)
395 object.__setattr__(self, r'filtername', filtername)
344 @@ -231,6 +234,14 @@
396 @@ -231,6 +234,14 @@
345 return self
397 return self
346 return self.unfiltered().filtered(name)
398 return self.unfiltered().filtered(name)
347
399
348 + def addvisibilityexceptions(self, revs):
400 + def addvisibilityexceptions(self, revs):
349 + """adds hidden revs which should be visible to set of exceptions"""
401 + """adds hidden revs which should be visible to set of exceptions"""
350 + self._visibilityexceptions.update(revs)
402 + self._visibilityexceptions.update(revs)
351 +
403 +
352 + def getvisibilityexceptions(self):
404 + def getvisibilityexceptions(self):
353 + """returns the set of hidden revs which should be visible"""
405 + """returns the set of hidden revs which should be visible"""
354 + return self._visibilityexceptions
406 + return self._visibilityexceptions
355 +
407 +
356 # everything access are forwarded to the proxied repo
408 # everything access are forwarded to the proxied repo
357 def __getattr__(self, attr):
409 def __getattr__(self, attr):
358 return getattr(self._unfilteredrepo, attr)
410 return getattr(self._unfilteredrepo, attr)
359 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
411 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
360 --- a/mercurial/localrepo.py
412 --- a/mercurial/localrepo.py
361 +++ b/mercurial/localrepo.py
413 +++ b/mercurial/localrepo.py
362 @@ -570,6 +570,14 @@
414 @@ -570,6 +570,14 @@
363 def close(self):
415 def close(self):
364 self._writecaches()
416 self._writecaches()
365
417
366 + def addvisibilityexceptions(self, exceptions):
418 + def addvisibilityexceptions(self, exceptions):
367 + # should be called on a filtered repository
419 + # should be called on a filtered repository
368 + pass
420 + pass
369 +
421 +
370 + def getvisibilityexceptions(self):
422 + def getvisibilityexceptions(self):
371 + # should be called on a filtered repository
423 + # should be called on a filtered repository
372 + return set()
424 + return set()
373 +
425 +
374 def _loadextensions(self):
426 def _loadextensions(self):
375 extensions.loadall(self.ui)
427 extensions.loadall(self.ui)
376
428
377
429
378 A bad .arcconfig doesn't error out
430 A bad .arcconfig doesn't error out
379 $ echo 'garbage' > .arcconfig
431 $ echo 'garbage' > .arcconfig
380 $ hg config phabricator --debug
432 $ hg config phabricator --debug
381 invalid JSON in $TESTTMP/repo/.arcconfig
433 invalid JSON in $TESTTMP/repo/.arcconfig
382 read config from: */.hgrc (glob)
434 read config from: */.hgrc (glob)
383 */.hgrc:*: phabricator.debug=True (glob)
435 */.hgrc:*: phabricator.debug=True (glob)
384 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
436 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
385 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
437 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
386
438
387 The .arcconfig content overrides global config
439 The .arcconfig content overrides global config
388 $ cat >> $HGRCPATH << EOF
440 $ cat >> $HGRCPATH << EOF
389 > [phabricator]
441 > [phabricator]
390 > url = global
442 > url = global
391 > callsign = global
443 > callsign = global
392 > EOF
444 > EOF
393 $ cp $TESTDIR/../.arcconfig .
445 $ cp $TESTDIR/../.arcconfig .
394 $ mv .hg/hgrc .hg/hgrc.bak
446 $ mv .hg/hgrc .hg/hgrc.bak
395 $ hg config phabricator --debug
447 $ hg config phabricator --debug
396 read config from: */.hgrc (glob)
448 read config from: */.hgrc (glob)
397 */.hgrc:*: phabricator.debug=True (glob)
449 */.hgrc:*: phabricator.debug=True (glob)
398 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
450 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
399 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
451 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
400
452
401 But it doesn't override local config
453 But it doesn't override local config
402 $ cat >> .hg/hgrc << EOF
454 $ cat >> .hg/hgrc << EOF
403 > [phabricator]
455 > [phabricator]
404 > url = local
456 > url = local
405 > callsign = local
457 > callsign = local
406 > EOF
458 > EOF
407 $ hg config phabricator --debug
459 $ hg config phabricator --debug
408 read config from: */.hgrc (glob)
460 read config from: */.hgrc (glob)
409 */.hgrc:*: phabricator.debug=True (glob)
461 */.hgrc:*: phabricator.debug=True (glob)
410 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
462 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
411 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
463 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
412 $ mv .hg/hgrc.bak .hg/hgrc
464 $ mv .hg/hgrc.bak .hg/hgrc
413
465
414 Phabimport works with a stack
466 Phabimport works with a stack
415
467
416 $ cd ..
468 $ cd ..
417 $ hg clone repo repo2 -qr 1
469 $ hg clone repo repo2 -qr 1
418 $ cp repo/.hg/hgrc repo2/.hg/
470 $ cp repo/.hg/hgrc repo2/.hg/
419 $ cd repo2
471 $ cd repo2
420 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
472 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
421 applying patch from D7917
473 applying patch from D7917
422 applying patch from D7918
474 applying patch from D7918
423 $ hg log -r .: -G -Tcompact
475 $ hg log -r .: -G -Tcompact
424 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
476 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
425 | create draft change for phabricator testing
477 | create draft change for phabricator testing
426 |
478 |
427 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
479 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
428 | create public change for phabricator testing
480 | create public change for phabricator testing
429 |
481 |
430 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
482 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
431 | create beta for phabricator test
483 | create beta for phabricator test
432 ~
484 ~
433 Phabimport can create secret commits
485 Phabimport can create secret commits
434
486
435 $ hg rollback --config ui.rollback=True
487 $ hg rollback --config ui.rollback=True
436 repository tip rolled back to revision 1 (undo phabimport)
488 repository tip rolled back to revision 1 (undo phabimport)
437 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
489 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
438 > --config phabimport.secret=True
490 > --config phabimport.secret=True
439 applying patch from D7917
491 applying patch from D7917
440 applying patch from D7918
492 applying patch from D7918
441 $ hg log -r 'reverse(.:)' -T phases
493 $ hg log -r 'reverse(.:)' -T phases
442 changeset: 3:aaef04066140
494 changeset: 3:aaef04066140
443 tag: tip
495 tag: tip
444 phase: secret
496 phase: secret
445 user: test
497 user: test
446 date: Thu Jan 01 00:00:00 1970 +0000
498 date: Thu Jan 01 00:00:00 1970 +0000
447 summary: create draft change for phabricator testing
499 summary: create draft change for phabricator testing
448
500
449 changeset: 2:8de3712202d1
501 changeset: 2:8de3712202d1
450 phase: secret
502 phase: secret
451 user: test
503 user: test
452 date: Thu Jan 01 00:00:00 1970 +0000
504 date: Thu Jan 01 00:00:00 1970 +0000
453 summary: create public change for phabricator testing
505 summary: create public change for phabricator testing
454
506
455 changeset: 1:a692622e6937
507 changeset: 1:a692622e6937
456 phase: public
508 phase: public
457 user: test
509 user: test
458 date: Thu Jan 01 00:00:00 1970 +0000
510 date: Thu Jan 01 00:00:00 1970 +0000
459 summary: create beta for phabricator test
511 summary: create beta for phabricator test
460
512
461 Phabimport accepts multiple DREVSPECs
513 Phabimport accepts multiple DREVSPECs
462
514
463 $ hg rollback --config ui.rollback=True
515 $ hg rollback --config ui.rollback=True
464 repository tip rolled back to revision 1 (undo phabimport)
516 repository tip rolled back to revision 1 (undo phabimport)
465 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
517 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
466 applying patch from D7917
518 applying patch from D7917
467 applying patch from D7918
519 applying patch from D7918
468
520
469 Validate arguments with --fold
521 Validate arguments with --fold
470
522
471 $ hg phabsend --fold -r 1
523 $ hg phabsend --fold -r 1
472 abort: cannot fold a single revision
524 abort: cannot fold a single revision
473 [255]
525 [255]
474 $ hg phabsend --fold --no-amend -r 1::
526 $ hg phabsend --fold --no-amend -r 1::
475 abort: cannot fold with --no-amend
527 abort: cannot fold with --no-amend
476 [255]
528 [255]
477 $ hg phabsend --fold -r 0+3
529 $ hg phabsend --fold -r 0+3
478 abort: cannot fold non-linear revisions
530 abort: cannot fold non-linear revisions
479 [255]
531 [255]
480 $ hg phabsend --fold -r 1::
532 $ hg phabsend --fold -r 1::
481 abort: cannot fold revisions with different DREV values
533 abort: cannot fold revisions with different DREV values
482 [255]
534 [255]
483
535
484 Setup a series of commits to be folded, and include the Test Plan field multiple
536 Setup a series of commits to be folded, and include the Test Plan field multiple
485 times to test the concatenation logic. No Test Plan field in the last one to
537 times to test the concatenation logic. No Test Plan field in the last one to
486 ensure missing fields are skipped.
538 ensure missing fields are skipped.
487
539
488 $ hg init ../folded
540 $ hg init ../folded
489 $ cd ../folded
541 $ cd ../folded
490 $ cat >> .hg/hgrc <<EOF
542 $ cat >> .hg/hgrc <<EOF
491 > [phabricator]
543 > [phabricator]
492 > url = https://phab.mercurial-scm.org/
544 > url = https://phab.mercurial-scm.org/
493 > callsign = HG
545 > callsign = HG
494 > EOF
546 > EOF
495
547
496 $ echo 'added' > file.txt
548 $ echo 'added' > file.txt
497 $ hg ci -Aqm 'added file'
549 $ hg ci -Aqm 'added file'
498
550
499 $ cat > log.txt <<EOF
551 $ cat > log.txt <<EOF
500 > one: first commit to review
552 > one: first commit to review
501 >
553 >
502 > This file was modified with 'mod1' as its contents.
554 > This file was modified with 'mod1' as its contents.
503 >
555 >
504 > Test Plan:
556 > Test Plan:
505 > LOL! What testing?!
557 > LOL! What testing?!
506 > EOF
558 > EOF
507 $ echo mod1 > file.txt
559 $ echo mod1 > file.txt
508 $ hg ci -l log.txt
560 $ hg ci -l log.txt
509
561
510 $ cat > log.txt <<EOF
562 $ cat > log.txt <<EOF
511 > two: second commit to review
563 > two: second commit to review
512 >
564 >
513 > This file was modified with 'mod2' as its contents.
565 > This file was modified with 'mod2' as its contents.
514 >
566 >
515 > Test Plan:
567 > Test Plan:
516 > Haha! yeah, right.
568 > Haha! yeah, right.
517 >
569 >
518 > EOF
570 > EOF
519 $ echo mod2 > file.txt
571 $ echo mod2 > file.txt
520 $ hg ci -l log.txt
572 $ hg ci -l log.txt
521
573
522 $ echo mod3 > file.txt
574 $ echo mod3 > file.txt
523 $ hg ci -m '3: a commit with no detailed message'
575 $ hg ci -m '3: a commit with no detailed message'
524
576
525 The folding of immutable commits works...
577 The folding of immutable commits works...
526
578
527 $ hg phase -r tip --public
579 $ hg phase -r tip --public
528 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
580 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
529 D8386 - created - a959a3f69d8d: one: first commit to review
581 D8386 - created - a959a3f69d8d: one: first commit to review
530 D8386 - created - 24a4438154ba: two: second commit to review
582 D8386 - created - 24a4438154ba: two: second commit to review
531 D8386 - created - d235829e802c: 3: a commit with no detailed message
583 D8386 - created - d235829e802c: 3: a commit with no detailed message
532 warning: not updating public commit 1:a959a3f69d8d
584 warning: not updating public commit 1:a959a3f69d8d
533 warning: not updating public commit 2:24a4438154ba
585 warning: not updating public commit 2:24a4438154ba
534 warning: not updating public commit 3:d235829e802c
586 warning: not updating public commit 3:d235829e802c
535 no newnodes to update
587 no newnodes to update
536
588
537 $ hg phase -r 0 --draft --force
589 $ hg phase -r 0 --draft --force
538
590
539 ... as does the initial mutable fold...
591 ... as does the initial mutable fold...
540
592
541 $ echo y | hg phabsend --fold --confirm -r 1:: \
593 $ echo y | hg phabsend --fold --confirm -r 1:: \
542 > --test-vcr "$VCR/phabsend-fold-initial.json"
594 > --test-vcr "$VCR/phabsend-fold-initial.json"
543 NEW - a959a3f69d8d: one: first commit to review
595 NEW - a959a3f69d8d: one: first commit to review
544 NEW - 24a4438154ba: two: second commit to review
596 NEW - 24a4438154ba: two: second commit to review
545 NEW - d235829e802c: 3: a commit with no detailed message
597 NEW - d235829e802c: 3: a commit with no detailed message
546 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
598 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
547 D8387 - created - a959a3f69d8d: one: first commit to review
599 D8387 - created - a959a3f69d8d: one: first commit to review
548 D8387 - created - 24a4438154ba: two: second commit to review
600 D8387 - created - 24a4438154ba: two: second commit to review
549 D8387 - created - d235829e802c: 3: a commit with no detailed message
601 D8387 - created - d235829e802c: 3: a commit with no detailed message
550 updating local commit list for D8387
602 updating local commit list for D8387
551 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
603 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
552 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
604 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
553
605
554 ... and doesn't mangle the local commits.
606 ... and doesn't mangle the local commits.
555
607
556 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
608 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
557 3:921f8265efbd
609 3:921f8265efbd
558 3: a commit with no detailed message
610 3: a commit with no detailed message
559
611
560 Differential Revision: https://phab.mercurial-scm.org/D8387
612 Differential Revision: https://phab.mercurial-scm.org/D8387
561 2:832553266fe8
613 2:832553266fe8
562 two: second commit to review
614 two: second commit to review
563
615
564 This file was modified with 'mod2' as its contents.
616 This file was modified with 'mod2' as its contents.
565
617
566 Test Plan:
618 Test Plan:
567 Haha! yeah, right.
619 Haha! yeah, right.
568
620
569 Differential Revision: https://phab.mercurial-scm.org/D8387
621 Differential Revision: https://phab.mercurial-scm.org/D8387
570 1:602c4e738243
622 1:602c4e738243
571 one: first commit to review
623 one: first commit to review
572
624
573 This file was modified with 'mod1' as its contents.
625 This file was modified with 'mod1' as its contents.
574
626
575 Test Plan:
627 Test Plan:
576 LOL! What testing?!
628 LOL! What testing?!
577
629
578 Differential Revision: https://phab.mercurial-scm.org/D8387
630 Differential Revision: https://phab.mercurial-scm.org/D8387
579 0:98d480e0d494
631 0:98d480e0d494
580 added file
632 added file
581
633
582 Setup some obsmarkers by adding a file to the middle commit. This stress tests
634 Setup some obsmarkers by adding a file to the middle commit. This stress tests
583 getoldnodedrevmap() in later phabsends.
635 getoldnodedrevmap() in later phabsends.
584
636
585 $ hg up '.^'
637 $ hg up '.^'
586 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
638 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
587 $ echo 'modified' > file2.txt
639 $ echo 'modified' > file2.txt
588 $ hg add file2.txt
640 $ hg add file2.txt
589 $ hg amend --config experimental.evolution=all --config extensions.amend=
641 $ hg amend --config experimental.evolution=all --config extensions.amend=
590 1 new orphan changesets
642 1 new orphan changesets
591 $ hg up 3
643 $ hg up 3
592 obsolete feature not enabled but 1 markers found!
644 obsolete feature not enabled but 1 markers found!
593 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
645 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
594 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
646 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
595 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
647 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
596 rebasing 3:921f8265efbd "3: a commit with no detailed message"
648 rebasing 3:921f8265efbd "3: a commit with no detailed message"
597
649
598 When commits have changed locally, the local commit list on Phabricator is
650 When commits have changed locally, the local commit list on Phabricator is
599 updated.
651 updated.
600
652
601 $ echo y | hg phabsend --fold --confirm -r 1:: \
653 $ echo y | hg phabsend --fold --confirm -r 1:: \
602 > --test-vcr "$VCR/phabsend-fold-updated.json"
654 > --test-vcr "$VCR/phabsend-fold-updated.json"
603 obsolete feature not enabled but 2 markers found!
655 obsolete feature not enabled but 2 markers found!
604 602c4e738243 mapped to old nodes ['602c4e738243']
656 602c4e738243 mapped to old nodes ['602c4e738243']
605 0124e5474c88 mapped to old nodes ['832553266fe8']
657 0124e5474c88 mapped to old nodes ['832553266fe8']
606 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
658 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
607 D8387 - 602c4e738243: one: first commit to review
659 D8387 - 602c4e738243: one: first commit to review
608 D8387 - 0124e5474c88: two: second commit to review
660 D8387 - 0124e5474c88: two: second commit to review
609 D8387 - e4edb1fe3565: 3: a commit with no detailed message
661 D8387 - e4edb1fe3565: 3: a commit with no detailed message
610 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
662 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
611 D8387 - updated - 602c4e738243: one: first commit to review
663 D8387 - updated - 602c4e738243: one: first commit to review
612 D8387 - updated - 0124e5474c88: two: second commit to review
664 D8387 - updated - 0124e5474c88: two: second commit to review
613 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
665 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
614 obsolete feature not enabled but 2 markers found! (?)
666 obsolete feature not enabled but 2 markers found! (?)
615 updating local commit list for D8387
667 updating local commit list for D8387
616 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
668 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
617 $ hg log -Tcompact
669 $ hg log -Tcompact
618 obsolete feature not enabled but 2 markers found!
670 obsolete feature not enabled but 2 markers found!
619 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
671 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
620 3: a commit with no detailed message
672 3: a commit with no detailed message
621
673
622 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
674 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
623 two: second commit to review
675 two: second commit to review
624
676
625 1 602c4e738243 1970-01-01 00:00 +0000 test
677 1 602c4e738243 1970-01-01 00:00 +0000 test
626 one: first commit to review
678 one: first commit to review
627
679
628 0 98d480e0d494 1970-01-01 00:00 +0000 test
680 0 98d480e0d494 1970-01-01 00:00 +0000 test
629 added file
681 added file
630
682
631 When nothing has changed locally since the last phabsend, the commit list isn't
683 When nothing has changed locally since the last phabsend, the commit list isn't
632 updated, and nothing is changed locally afterward.
684 updated, and nothing is changed locally afterward.
633
685
634 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
686 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
635 obsolete feature not enabled but 2 markers found!
687 obsolete feature not enabled but 2 markers found!
636 602c4e738243 mapped to old nodes ['602c4e738243']
688 602c4e738243 mapped to old nodes ['602c4e738243']
637 0124e5474c88 mapped to old nodes ['0124e5474c88']
689 0124e5474c88 mapped to old nodes ['0124e5474c88']
638 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
690 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
639 D8387 - updated - 602c4e738243: one: first commit to review
691 D8387 - updated - 602c4e738243: one: first commit to review
640 D8387 - updated - 0124e5474c88: two: second commit to review
692 D8387 - updated - 0124e5474c88: two: second commit to review
641 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
693 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
642 obsolete feature not enabled but 2 markers found! (?)
694 obsolete feature not enabled but 2 markers found! (?)
643 local commit list for D8387 is already up-to-date
695 local commit list for D8387 is already up-to-date
644 $ hg log -Tcompact
696 $ hg log -Tcompact
645 obsolete feature not enabled but 2 markers found!
697 obsolete feature not enabled but 2 markers found!
646 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
698 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
647 3: a commit with no detailed message
699 3: a commit with no detailed message
648
700
649 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
701 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
650 two: second commit to review
702 two: second commit to review
651
703
652 1 602c4e738243 1970-01-01 00:00 +0000 test
704 1 602c4e738243 1970-01-01 00:00 +0000 test
653 one: first commit to review
705 one: first commit to review
654
706
655 0 98d480e0d494 1970-01-01 00:00 +0000 test
707 0 98d480e0d494 1970-01-01 00:00 +0000 test
656 added file
708 added file
657
709
658 Fold will accept new revisions at the end...
710 Fold will accept new revisions at the end...
659
711
660 $ echo 'another mod' > file2.txt
712 $ echo 'another mod' > file2.txt
661 $ hg ci -m 'four: extend the fold range'
713 $ hg ci -m 'four: extend the fold range'
662 obsolete feature not enabled but 2 markers found!
714 obsolete feature not enabled but 2 markers found!
663 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
715 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
664 > --config experimental.evolution=all
716 > --config experimental.evolution=all
665 602c4e738243 mapped to old nodes ['602c4e738243']
717 602c4e738243 mapped to old nodes ['602c4e738243']
666 0124e5474c88 mapped to old nodes ['0124e5474c88']
718 0124e5474c88 mapped to old nodes ['0124e5474c88']
667 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
719 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
668 D8387 - updated - 602c4e738243: one: first commit to review
720 D8387 - updated - 602c4e738243: one: first commit to review
669 D8387 - updated - 0124e5474c88: two: second commit to review
721 D8387 - updated - 0124e5474c88: two: second commit to review
670 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
722 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
671 D8387 - created - 94aaae213b23: four: extend the fold range
723 D8387 - created - 94aaae213b23: four: extend the fold range
672 updating local commit list for D8387
724 updating local commit list for D8387
673 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
725 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
674 $ hg log -r . -T '{desc}\n'
726 $ hg log -r . -T '{desc}\n'
675 four: extend the fold range
727 four: extend the fold range
676
728
677 Differential Revision: https://phab.mercurial-scm.org/D8387
729 Differential Revision: https://phab.mercurial-scm.org/D8387
678 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
730 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
679 obsolete feature not enabled but 3 markers found!
731 obsolete feature not enabled but 3 markers found!
680 1 https://phab.mercurial-scm.org/D8387 D8387
732 1 https://phab.mercurial-scm.org/D8387 D8387
681 4 https://phab.mercurial-scm.org/D8387 D8387
733 4 https://phab.mercurial-scm.org/D8387 D8387
682 5 https://phab.mercurial-scm.org/D8387 D8387
734 5 https://phab.mercurial-scm.org/D8387 D8387
683 7 https://phab.mercurial-scm.org/D8387 D8387
735 7 https://phab.mercurial-scm.org/D8387 D8387
684
736
685 ... and also accepts new revisions at the beginning of the range
737 ... and also accepts new revisions at the beginning of the range
686
738
687 It's a bit unfortunate that not having a Differential URL on the first commit
739 It's a bit unfortunate that not having a Differential URL on the first commit
688 causes a new Differential Revision to be created, though it isn't *entirely*
740 causes a new Differential Revision to be created, though it isn't *entirely*
689 unreasonable. At least this updates the subsequent commits.
741 unreasonable. At least this updates the subsequent commits.
690
742
691 TODO: See if it can reuse the existing Differential.
743 TODO: See if it can reuse the existing Differential.
692
744
693 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
745 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
694 > --config experimental.evolution=all
746 > --config experimental.evolution=all
695 602c4e738243 mapped to old nodes ['602c4e738243']
747 602c4e738243 mapped to old nodes ['602c4e738243']
696 0124e5474c88 mapped to old nodes ['0124e5474c88']
748 0124e5474c88 mapped to old nodes ['0124e5474c88']
697 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
749 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
698 51a04fea8707 mapped to old nodes ['51a04fea8707']
750 51a04fea8707 mapped to old nodes ['51a04fea8707']
699 D8388 - created - 98d480e0d494: added file
751 D8388 - created - 98d480e0d494: added file
700 D8388 - updated - 602c4e738243: one: first commit to review
752 D8388 - updated - 602c4e738243: one: first commit to review
701 D8388 - updated - 0124e5474c88: two: second commit to review
753 D8388 - updated - 0124e5474c88: two: second commit to review
702 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
754 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
703 D8388 - updated - 51a04fea8707: four: extend the fold range
755 D8388 - updated - 51a04fea8707: four: extend the fold range
704 updating local commit list for D8388
756 updating local commit list for D8388
705 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
757 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
706
758
707 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
759 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
708 obsolete feature not enabled but 8 markers found!
760 obsolete feature not enabled but 8 markers found!
709 12:ac7db67f0991
761 12:ac7db67f0991
710 four: extend the fold range
762 four: extend the fold range
711
763
712 Differential Revision: https://phab.mercurial-scm.org/D8388
764 Differential Revision: https://phab.mercurial-scm.org/D8388
713 11:30682b960804
765 11:30682b960804
714 3: a commit with no detailed message
766 3: a commit with no detailed message
715
767
716 Differential Revision: https://phab.mercurial-scm.org/D8388
768 Differential Revision: https://phab.mercurial-scm.org/D8388
717 10:3ee132d41dbc
769 10:3ee132d41dbc
718 two: second commit to review
770 two: second commit to review
719
771
720 This file was modified with 'mod2' as its contents.
772 This file was modified with 'mod2' as its contents.
721
773
722 Test Plan:
774 Test Plan:
723 Haha! yeah, right.
775 Haha! yeah, right.
724
776
725 Differential Revision: https://phab.mercurial-scm.org/D8388
777 Differential Revision: https://phab.mercurial-scm.org/D8388
726 9:6320b7d714cf
778 9:6320b7d714cf
727 one: first commit to review
779 one: first commit to review
728
780
729 This file was modified with 'mod1' as its contents.
781 This file was modified with 'mod1' as its contents.
730
782
731 Test Plan:
783 Test Plan:
732 LOL! What testing?!
784 LOL! What testing?!
733
785
734 Differential Revision: https://phab.mercurial-scm.org/D8388
786 Differential Revision: https://phab.mercurial-scm.org/D8388
735 8:15e9b14b4b4c
787 8:15e9b14b4b4c
736 added file
788 added file
737
789
738 Differential Revision: https://phab.mercurial-scm.org/D8388
790 Differential Revision: https://phab.mercurial-scm.org/D8388
739
791
740 Test phabsend --fold with an `hg split` at the end of the range
792 Test phabsend --fold with an `hg split` at the end of the range
741
793
742 $ echo foo > file3.txt
794 $ echo foo > file3.txt
743 $ hg add file3.txt
795 $ hg add file3.txt
744
796
745 $ hg log -r . -T '{desc}' > log.txt
797 $ hg log -r . -T '{desc}' > log.txt
746 $ echo 'amended mod' > file2.txt
798 $ echo 'amended mod' > file2.txt
747 $ hg ci --amend -l log.txt --config experimental.evolution=all
799 $ hg ci --amend -l log.txt --config experimental.evolution=all
748
800
749 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
801 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
750 > --config experimental.evolution=all split -r .
802 > --config experimental.evolution=all split -r .
751 > n
803 > n
752 > y
804 > y
753 > y
805 > y
754 > y
806 > y
755 > y
807 > y
756 > EOF
808 > EOF
757 diff --git a/file2.txt b/file2.txt
809 diff --git a/file2.txt b/file2.txt
758 1 hunks, 1 lines changed
810 1 hunks, 1 lines changed
759 examine changes to 'file2.txt'?
811 examine changes to 'file2.txt'?
760 (enter ? for help) [Ynesfdaq?] n
812 (enter ? for help) [Ynesfdaq?] n
761
813
762 diff --git a/file3.txt b/file3.txt
814 diff --git a/file3.txt b/file3.txt
763 new file mode 100644
815 new file mode 100644
764 examine changes to 'file3.txt'?
816 examine changes to 'file3.txt'?
765 (enter ? for help) [Ynesfdaq?] y
817 (enter ? for help) [Ynesfdaq?] y
766
818
767 @@ -0,0 +1,1 @@
819 @@ -0,0 +1,1 @@
768 +foo
820 +foo
769 record change 2/2 to 'file3.txt'?
821 record change 2/2 to 'file3.txt'?
770 (enter ? for help) [Ynesfdaq?] y
822 (enter ? for help) [Ynesfdaq?] y
771
823
772 created new head
824 created new head
773 diff --git a/file2.txt b/file2.txt
825 diff --git a/file2.txt b/file2.txt
774 1 hunks, 1 lines changed
826 1 hunks, 1 lines changed
775 examine changes to 'file2.txt'?
827 examine changes to 'file2.txt'?
776 (enter ? for help) [Ynesfdaq?] y
828 (enter ? for help) [Ynesfdaq?] y
777
829
778 @@ -1,1 +1,1 @@
830 @@ -1,1 +1,1 @@
779 -modified
831 -modified
780 +amended mod
832 +amended mod
781 record this change to 'file2.txt'?
833 record this change to 'file2.txt'?
782 (enter ? for help) [Ynesfdaq?] y
834 (enter ? for help) [Ynesfdaq?] y
783
835
784 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
836 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
785 > --config experimental.evolution=all
837 > --config experimental.evolution=all
786 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
838 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
787 6320b7d714cf mapped to old nodes ['6320b7d714cf']
839 6320b7d714cf mapped to old nodes ['6320b7d714cf']
788 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
840 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
789 30682b960804 mapped to old nodes ['30682b960804']
841 30682b960804 mapped to old nodes ['30682b960804']
790 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
842 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
791 b50946d5e490 mapped to old nodes ['ac7db67f0991']
843 b50946d5e490 mapped to old nodes ['ac7db67f0991']
792 D8388 - updated - 15e9b14b4b4c: added file
844 D8388 - updated - 15e9b14b4b4c: added file
793 D8388 - updated - 6320b7d714cf: one: first commit to review
845 D8388 - updated - 6320b7d714cf: one: first commit to review
794 D8388 - updated - 3ee132d41dbc: two: second commit to review
846 D8388 - updated - 3ee132d41dbc: two: second commit to review
795 D8388 - updated - 30682b960804: 3: a commit with no detailed message
847 D8388 - updated - 30682b960804: 3: a commit with no detailed message
796 D8388 - updated - 6bc15dc99efd: four: extend the fold range
848 D8388 - updated - 6bc15dc99efd: four: extend the fold range
797 D8388 - updated - b50946d5e490: four: extend the fold range
849 D8388 - updated - b50946d5e490: four: extend the fold range
798 updating local commit list for D8388
850 updating local commit list for D8388
799 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
851 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
800
852
801 Test phabsend --fold with an `hg fold` at the end of the range
853 Test phabsend --fold with an `hg fold` at the end of the range
802
854
803 $ hg --config experimental.evolution=all --config extensions.rebase= \
855 $ hg --config experimental.evolution=all --config extensions.rebase= \
804 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
856 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
805 rebasing 14:6bc15dc99efd "four: extend the fold range"
857 rebasing 14:6bc15dc99efd "four: extend the fold range"
806 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
858 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
807
859
808 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
860 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
809 > --config experimental.evolution=all
861 > --config experimental.evolution=all
810 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
862 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
811 6320b7d714cf mapped to old nodes ['6320b7d714cf']
863 6320b7d714cf mapped to old nodes ['6320b7d714cf']
812 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
864 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
813 30682b960804 mapped to old nodes ['30682b960804']
865 30682b960804 mapped to old nodes ['30682b960804']
814 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
866 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
815 D8388 - updated - 15e9b14b4b4c: added file
867 D8388 - updated - 15e9b14b4b4c: added file
816 D8388 - updated - 6320b7d714cf: one: first commit to review
868 D8388 - updated - 6320b7d714cf: one: first commit to review
817 D8388 - updated - 3ee132d41dbc: two: second commit to review
869 D8388 - updated - 3ee132d41dbc: two: second commit to review
818 D8388 - updated - 30682b960804: 3: a commit with no detailed message
870 D8388 - updated - 30682b960804: 3: a commit with no detailed message
819 D8388 - updated - e919cdf3d4fe: four: extend the fold range
871 D8388 - updated - e919cdf3d4fe: four: extend the fold range
820 updating local commit list for D8388
872 updating local commit list for D8388
821 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
873 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
822
874
823 $ hg log -r tip -v
875 $ hg log -r tip -v
824 obsolete feature not enabled but 12 markers found!
876 obsolete feature not enabled but 12 markers found!
825 changeset: 16:e919cdf3d4fe
877 changeset: 16:e919cdf3d4fe
826 tag: tip
878 tag: tip
827 parent: 11:30682b960804
879 parent: 11:30682b960804
828 user: test
880 user: test
829 date: Thu Jan 01 00:00:00 1970 +0000
881 date: Thu Jan 01 00:00:00 1970 +0000
830 files: file2.txt file3.txt
882 files: file2.txt file3.txt
831 description:
883 description:
832 four: extend the fold range
884 four: extend the fold range
833
885
834 Differential Revision: https://phab.mercurial-scm.org/D8388
886 Differential Revision: https://phab.mercurial-scm.org/D8388
835
887
836
888
837
889
838 $ cd ..
890 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now