##// END OF EJS Templates
phabricator: add a config knob to import in the secret phase...
Matt Harbison -
r45040:9bae1d1a default
parent child Browse files
Show More
@@ -1,1889 +1,1895 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127 eh.configitem(
128 b'phabimport', b'secret', default=False,
129 )
127
130
128 colortable = {
131 colortable = {
129 b'phabricator.action.created': b'green',
132 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
133 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
134 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
135 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
136 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
137 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
138 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
139 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
140 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
141 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
142 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
143 b'phabricator.status.changesplanned': b'red',
141 }
144 }
142
145
143 _VCR_FLAGS = [
146 _VCR_FLAGS = [
144 (
147 (
145 b'',
148 b'',
146 b'test-vcr',
149 b'test-vcr',
147 b'',
150 b'',
148 _(
151 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
152 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
153 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
154 b' (ADVANCED)'
152 ),
155 ),
153 ),
156 ),
154 ]
157 ]
155
158
156
159
157 @eh.wrapfunction(localrepo, "loadhgrc")
160 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
161 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
162 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
163 """
161 result = False
164 result = False
162 arcconfig = {}
165 arcconfig = {}
163
166
164 try:
167 try:
165 # json.loads only accepts bytes from 3.6+
168 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
169 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
170 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
171 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
172 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
173 if isinstance(x, pycompat.unicode)
171 else x,
174 else x,
172 pycompat.json_loads(rawparams),
175 pycompat.json_loads(rawparams),
173 )
176 )
174
177
175 result = True
178 result = True
176 except ValueError:
179 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
180 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
181 except IOError:
179 pass
182 pass
180
183
181 cfg = util.sortdict()
184 cfg = util.sortdict()
182
185
183 if b"repository.callsign" in arcconfig:
186 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
187 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
188
186 if b"phabricator.uri" in arcconfig:
189 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
190 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
191
189 if cfg:
192 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
193 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
194
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
195 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
196
194
197
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
198 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
199 fullflags = flags + _VCR_FLAGS
197
200
198 def hgmatcher(r1, r2):
201 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
202 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
203 return False
201 r1params = util.urlreq.parseqs(r1.body)
204 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
205 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
206 for key in r1params:
204 if key not in r2params:
207 if key not in r2params:
205 return False
208 return False
206 value = r1params[key][0]
209 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
210 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
211 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
212 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
213 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
214 if r1json != r2json:
212 return False
215 return False
213 elif r2params[key][0] != value:
216 elif r2params[key][0] != value:
214 return False
217 return False
215 return True
218 return True
216
219
217 def sanitiserequest(request):
220 def sanitiserequest(request):
218 request.body = re.sub(
221 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
222 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
223 )
221 return request
224 return request
222
225
223 def sanitiseresponse(response):
226 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
227 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
228 del response['headers']['set-cookie']
226 return response
229 return response
227
230
228 def decorate(fn):
231 def decorate(fn):
229 def inner(*args, **kwargs):
232 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
233 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
234 if cassette:
232 import hgdemandimport
235 import hgdemandimport
233
236
234 with hgdemandimport.deactivated():
237 with hgdemandimport.deactivated():
235 import vcr as vcrmod
238 import vcr as vcrmod
236 import vcr.stubs as stubs
239 import vcr.stubs as stubs
237
240
238 vcr = vcrmod.VCR(
241 vcr = vcrmod.VCR(
239 serializer='json',
242 serializer='json',
240 before_record_request=sanitiserequest,
243 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
244 before_record_response=sanitiseresponse,
242 custom_patches=[
245 custom_patches=[
243 (
246 (
244 urlmod,
247 urlmod,
245 'httpconnection',
248 'httpconnection',
246 stubs.VCRHTTPConnection,
249 stubs.VCRHTTPConnection,
247 ),
250 ),
248 (
251 (
249 urlmod,
252 urlmod,
250 'httpsconnection',
253 'httpsconnection',
251 stubs.VCRHTTPSConnection,
254 stubs.VCRHTTPSConnection,
252 ),
255 ),
253 ],
256 ],
254 )
257 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
258 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
259 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
260 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
261 return fn(*args, **kwargs)
259
262
260 cmd = util.checksignature(inner, depth=2)
263 cmd = util.checksignature(inner, depth=2)
261 cmd.__name__ = fn.__name__
264 cmd.__name__ = fn.__name__
262 cmd.__doc__ = fn.__doc__
265 cmd.__doc__ = fn.__doc__
263
266
264 return command(
267 return command(
265 name,
268 name,
266 fullflags,
269 fullflags,
267 spec,
270 spec,
268 helpcategory=helpcategory,
271 helpcategory=helpcategory,
269 optionalrepo=optionalrepo,
272 optionalrepo=optionalrepo,
270 )(cmd)
273 )(cmd)
271
274
272 return decorate
275 return decorate
273
276
274
277
275 def urlencodenested(params):
278 def urlencodenested(params):
276 """like urlencode, but works with nested parameters.
279 """like urlencode, but works with nested parameters.
277
280
278 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
281 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
279 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
282 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
280 urlencode. Note: the encoding is consistent with PHP's http_build_query.
283 urlencode. Note: the encoding is consistent with PHP's http_build_query.
281 """
284 """
282 flatparams = util.sortdict()
285 flatparams = util.sortdict()
283
286
284 def process(prefix, obj):
287 def process(prefix, obj):
285 if isinstance(obj, bool):
288 if isinstance(obj, bool):
286 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
289 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
287 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
290 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
288 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
291 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
289 if items is None:
292 if items is None:
290 flatparams[prefix] = obj
293 flatparams[prefix] = obj
291 else:
294 else:
292 for k, v in items(obj):
295 for k, v in items(obj):
293 if prefix:
296 if prefix:
294 process(b'%s[%s]' % (prefix, k), v)
297 process(b'%s[%s]' % (prefix, k), v)
295 else:
298 else:
296 process(k, v)
299 process(k, v)
297
300
298 process(b'', params)
301 process(b'', params)
299 return util.urlreq.urlencode(flatparams)
302 return util.urlreq.urlencode(flatparams)
300
303
301
304
302 def readurltoken(ui):
305 def readurltoken(ui):
303 """return conduit url, token and make sure they exist
306 """return conduit url, token and make sure they exist
304
307
305 Currently read from [auth] config section. In the future, it might
308 Currently read from [auth] config section. In the future, it might
306 make sense to read from .arcconfig and .arcrc as well.
309 make sense to read from .arcconfig and .arcrc as well.
307 """
310 """
308 url = ui.config(b'phabricator', b'url')
311 url = ui.config(b'phabricator', b'url')
309 if not url:
312 if not url:
310 raise error.Abort(
313 raise error.Abort(
311 _(b'config %s.%s is required') % (b'phabricator', b'url')
314 _(b'config %s.%s is required') % (b'phabricator', b'url')
312 )
315 )
313
316
314 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
317 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
315 token = None
318 token = None
316
319
317 if res:
320 if res:
318 group, auth = res
321 group, auth = res
319
322
320 ui.debug(b"using auth.%s.* for authentication\n" % group)
323 ui.debug(b"using auth.%s.* for authentication\n" % group)
321
324
322 token = auth.get(b'phabtoken')
325 token = auth.get(b'phabtoken')
323
326
324 if not token:
327 if not token:
325 raise error.Abort(
328 raise error.Abort(
326 _(b'Can\'t find conduit token associated to %s') % (url,)
329 _(b'Can\'t find conduit token associated to %s') % (url,)
327 )
330 )
328
331
329 return url, token
332 return url, token
330
333
331
334
332 def callconduit(ui, name, params):
335 def callconduit(ui, name, params):
333 """call Conduit API, params is a dict. return json.loads result, or None"""
336 """call Conduit API, params is a dict. return json.loads result, or None"""
334 host, token = readurltoken(ui)
337 host, token = readurltoken(ui)
335 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
338 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
336 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
339 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
337 params = params.copy()
340 params = params.copy()
338 params[b'__conduit__'] = {
341 params[b'__conduit__'] = {
339 b'token': token,
342 b'token': token,
340 }
343 }
341 rawdata = {
344 rawdata = {
342 b'params': templatefilters.json(params),
345 b'params': templatefilters.json(params),
343 b'output': b'json',
346 b'output': b'json',
344 b'__conduit__': 1,
347 b'__conduit__': 1,
345 }
348 }
346 data = urlencodenested(rawdata)
349 data = urlencodenested(rawdata)
347 curlcmd = ui.config(b'phabricator', b'curlcmd')
350 curlcmd = ui.config(b'phabricator', b'curlcmd')
348 if curlcmd:
351 if curlcmd:
349 sin, sout = procutil.popen2(
352 sin, sout = procutil.popen2(
350 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
353 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
351 )
354 )
352 sin.write(data)
355 sin.write(data)
353 sin.close()
356 sin.close()
354 body = sout.read()
357 body = sout.read()
355 else:
358 else:
356 urlopener = urlmod.opener(ui, authinfo)
359 urlopener = urlmod.opener(ui, authinfo)
357 request = util.urlreq.request(pycompat.strurl(url), data=data)
360 request = util.urlreq.request(pycompat.strurl(url), data=data)
358 with contextlib.closing(urlopener.open(request)) as rsp:
361 with contextlib.closing(urlopener.open(request)) as rsp:
359 body = rsp.read()
362 body = rsp.read()
360 ui.debug(b'Conduit Response: %s\n' % body)
363 ui.debug(b'Conduit Response: %s\n' % body)
361 parsed = pycompat.rapply(
364 parsed = pycompat.rapply(
362 lambda x: encoding.unitolocal(x)
365 lambda x: encoding.unitolocal(x)
363 if isinstance(x, pycompat.unicode)
366 if isinstance(x, pycompat.unicode)
364 else x,
367 else x,
365 # json.loads only accepts bytes from py3.6+
368 # json.loads only accepts bytes from py3.6+
366 pycompat.json_loads(encoding.unifromlocal(body)),
369 pycompat.json_loads(encoding.unifromlocal(body)),
367 )
370 )
368 if parsed.get(b'error_code'):
371 if parsed.get(b'error_code'):
369 msg = _(b'Conduit Error (%s): %s') % (
372 msg = _(b'Conduit Error (%s): %s') % (
370 parsed[b'error_code'],
373 parsed[b'error_code'],
371 parsed[b'error_info'],
374 parsed[b'error_info'],
372 )
375 )
373 raise error.Abort(msg)
376 raise error.Abort(msg)
374 return parsed[b'result']
377 return parsed[b'result']
375
378
376
379
377 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
380 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
378 def debugcallconduit(ui, repo, name):
381 def debugcallconduit(ui, repo, name):
379 """call Conduit API
382 """call Conduit API
380
383
381 Call parameters are read from stdin as a JSON blob. Result will be written
384 Call parameters are read from stdin as a JSON blob. Result will be written
382 to stdout as a JSON blob.
385 to stdout as a JSON blob.
383 """
386 """
384 # json.loads only accepts bytes from 3.6+
387 # json.loads only accepts bytes from 3.6+
385 rawparams = encoding.unifromlocal(ui.fin.read())
388 rawparams = encoding.unifromlocal(ui.fin.read())
386 # json.loads only returns unicode strings
389 # json.loads only returns unicode strings
387 params = pycompat.rapply(
390 params = pycompat.rapply(
388 lambda x: encoding.unitolocal(x)
391 lambda x: encoding.unitolocal(x)
389 if isinstance(x, pycompat.unicode)
392 if isinstance(x, pycompat.unicode)
390 else x,
393 else x,
391 pycompat.json_loads(rawparams),
394 pycompat.json_loads(rawparams),
392 )
395 )
393 # json.dumps only accepts unicode strings
396 # json.dumps only accepts unicode strings
394 result = pycompat.rapply(
397 result = pycompat.rapply(
395 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
398 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
396 callconduit(ui, name, params),
399 callconduit(ui, name, params),
397 )
400 )
398 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
401 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
399 ui.write(b'%s\n' % encoding.unitolocal(s))
402 ui.write(b'%s\n' % encoding.unitolocal(s))
400
403
401
404
402 def getrepophid(repo):
405 def getrepophid(repo):
403 """given callsign, return repository PHID or None"""
406 """given callsign, return repository PHID or None"""
404 # developer config: phabricator.repophid
407 # developer config: phabricator.repophid
405 repophid = repo.ui.config(b'phabricator', b'repophid')
408 repophid = repo.ui.config(b'phabricator', b'repophid')
406 if repophid:
409 if repophid:
407 return repophid
410 return repophid
408 callsign = repo.ui.config(b'phabricator', b'callsign')
411 callsign = repo.ui.config(b'phabricator', b'callsign')
409 if not callsign:
412 if not callsign:
410 return None
413 return None
411 query = callconduit(
414 query = callconduit(
412 repo.ui,
415 repo.ui,
413 b'diffusion.repository.search',
416 b'diffusion.repository.search',
414 {b'constraints': {b'callsigns': [callsign]}},
417 {b'constraints': {b'callsigns': [callsign]}},
415 )
418 )
416 if len(query[b'data']) == 0:
419 if len(query[b'data']) == 0:
417 return None
420 return None
418 repophid = query[b'data'][0][b'phid']
421 repophid = query[b'data'][0][b'phid']
419 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
422 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
420 return repophid
423 return repophid
421
424
422
425
423 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
426 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
424 _differentialrevisiondescre = re.compile(
427 _differentialrevisiondescre = re.compile(
425 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
428 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
426 )
429 )
427
430
428
431
429 def getoldnodedrevmap(repo, nodelist):
432 def getoldnodedrevmap(repo, nodelist):
430 """find previous nodes that has been sent to Phabricator
433 """find previous nodes that has been sent to Phabricator
431
434
432 return {node: (oldnode, Differential diff, Differential Revision ID)}
435 return {node: (oldnode, Differential diff, Differential Revision ID)}
433 for node in nodelist with known previous sent versions, or associated
436 for node in nodelist with known previous sent versions, or associated
434 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
437 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
435 be ``None``.
438 be ``None``.
436
439
437 Examines commit messages like "Differential Revision:" to get the
440 Examines commit messages like "Differential Revision:" to get the
438 association information.
441 association information.
439
442
440 If such commit message line is not found, examines all precursors and their
443 If such commit message line is not found, examines all precursors and their
441 tags. Tags with format like "D1234" are considered a match and the node
444 tags. Tags with format like "D1234" are considered a match and the node
442 with that tag, and the number after "D" (ex. 1234) will be returned.
445 with that tag, and the number after "D" (ex. 1234) will be returned.
443
446
444 The ``old node``, if not None, is guaranteed to be the last diff of
447 The ``old node``, if not None, is guaranteed to be the last diff of
445 corresponding Differential Revision, and exist in the repo.
448 corresponding Differential Revision, and exist in the repo.
446 """
449 """
447 unfi = repo.unfiltered()
450 unfi = repo.unfiltered()
448 has_node = unfi.changelog.index.has_node
451 has_node = unfi.changelog.index.has_node
449
452
450 result = {} # {node: (oldnode?, lastdiff?, drev)}
453 result = {} # {node: (oldnode?, lastdiff?, drev)}
451 toconfirm = {} # {node: (force, {precnode}, drev)}
454 toconfirm = {} # {node: (force, {precnode}, drev)}
452 for node in nodelist:
455 for node in nodelist:
453 ctx = unfi[node]
456 ctx = unfi[node]
454 # For tags like "D123", put them into "toconfirm" to verify later
457 # For tags like "D123", put them into "toconfirm" to verify later
455 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
458 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
456 for n in precnodes:
459 for n in precnodes:
457 if has_node(n):
460 if has_node(n):
458 for tag in unfi.nodetags(n):
461 for tag in unfi.nodetags(n):
459 m = _differentialrevisiontagre.match(tag)
462 m = _differentialrevisiontagre.match(tag)
460 if m:
463 if m:
461 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
464 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
462 break
465 break
463 else:
466 else:
464 continue # move to next predecessor
467 continue # move to next predecessor
465 break # found a tag, stop
468 break # found a tag, stop
466 else:
469 else:
467 # Check commit message
470 # Check commit message
468 m = _differentialrevisiondescre.search(ctx.description())
471 m = _differentialrevisiondescre.search(ctx.description())
469 if m:
472 if m:
470 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
473 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
471
474
472 # Double check if tags are genuine by collecting all old nodes from
475 # Double check if tags are genuine by collecting all old nodes from
473 # Phabricator, and expect precursors overlap with it.
476 # Phabricator, and expect precursors overlap with it.
474 if toconfirm:
477 if toconfirm:
475 drevs = [drev for force, precs, drev in toconfirm.values()]
478 drevs = [drev for force, precs, drev in toconfirm.values()]
476 alldiffs = callconduit(
479 alldiffs = callconduit(
477 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
480 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
478 )
481 )
479 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
482 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
480 for newnode, (force, precset, drev) in toconfirm.items():
483 for newnode, (force, precset, drev) in toconfirm.items():
481 diffs = [
484 diffs = [
482 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
485 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
483 ]
486 ]
484
487
485 # "precursors" as known by Phabricator
488 # "precursors" as known by Phabricator
486 phprecset = {getnode(d) for d in diffs}
489 phprecset = {getnode(d) for d in diffs}
487
490
488 # Ignore if precursors (Phabricator and local repo) do not overlap,
491 # Ignore if precursors (Phabricator and local repo) do not overlap,
489 # and force is not set (when commit message says nothing)
492 # and force is not set (when commit message says nothing)
490 if not force and not bool(phprecset & precset):
493 if not force and not bool(phprecset & precset):
491 tagname = b'D%d' % drev
494 tagname = b'D%d' % drev
492 tags.tag(
495 tags.tag(
493 repo,
496 repo,
494 tagname,
497 tagname,
495 nullid,
498 nullid,
496 message=None,
499 message=None,
497 user=None,
500 user=None,
498 date=None,
501 date=None,
499 local=True,
502 local=True,
500 )
503 )
501 unfi.ui.warn(
504 unfi.ui.warn(
502 _(
505 _(
503 b'D%d: local tag removed - does not match '
506 b'D%d: local tag removed - does not match '
504 b'Differential history\n'
507 b'Differential history\n'
505 )
508 )
506 % drev
509 % drev
507 )
510 )
508 continue
511 continue
509
512
510 # Find the last node using Phabricator metadata, and make sure it
513 # Find the last node using Phabricator metadata, and make sure it
511 # exists in the repo
514 # exists in the repo
512 oldnode = lastdiff = None
515 oldnode = lastdiff = None
513 if diffs:
516 if diffs:
514 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
517 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
515 oldnode = getnode(lastdiff)
518 oldnode = getnode(lastdiff)
516 if oldnode and not has_node(oldnode):
519 if oldnode and not has_node(oldnode):
517 oldnode = None
520 oldnode = None
518
521
519 result[newnode] = (oldnode, lastdiff, drev)
522 result[newnode] = (oldnode, lastdiff, drev)
520
523
521 return result
524 return result
522
525
523
526
524 def getdrevmap(repo, revs):
527 def getdrevmap(repo, revs):
525 """Return a dict mapping each rev in `revs` to their Differential Revision
528 """Return a dict mapping each rev in `revs` to their Differential Revision
526 ID or None.
529 ID or None.
527 """
530 """
528 result = {}
531 result = {}
529 for rev in revs:
532 for rev in revs:
530 result[rev] = None
533 result[rev] = None
531 ctx = repo[rev]
534 ctx = repo[rev]
532 # Check commit message
535 # Check commit message
533 m = _differentialrevisiondescre.search(ctx.description())
536 m = _differentialrevisiondescre.search(ctx.description())
534 if m:
537 if m:
535 result[rev] = int(m.group('id'))
538 result[rev] = int(m.group('id'))
536 continue
539 continue
537 # Check tags
540 # Check tags
538 for tag in repo.nodetags(ctx.node()):
541 for tag in repo.nodetags(ctx.node()):
539 m = _differentialrevisiontagre.match(tag)
542 m = _differentialrevisiontagre.match(tag)
540 if m:
543 if m:
541 result[rev] = int(m.group(1))
544 result[rev] = int(m.group(1))
542 break
545 break
543
546
544 return result
547 return result
545
548
546
549
547 def getdiff(ctx, diffopts):
550 def getdiff(ctx, diffopts):
548 """plain-text diff without header (user, commit message, etc)"""
551 """plain-text diff without header (user, commit message, etc)"""
549 output = util.stringio()
552 output = util.stringio()
550 for chunk, _label in patch.diffui(
553 for chunk, _label in patch.diffui(
551 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
554 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
552 ):
555 ):
553 output.write(chunk)
556 output.write(chunk)
554 return output.getvalue()
557 return output.getvalue()
555
558
556
559
557 class DiffChangeType(object):
560 class DiffChangeType(object):
558 ADD = 1
561 ADD = 1
559 CHANGE = 2
562 CHANGE = 2
560 DELETE = 3
563 DELETE = 3
561 MOVE_AWAY = 4
564 MOVE_AWAY = 4
562 COPY_AWAY = 5
565 COPY_AWAY = 5
563 MOVE_HERE = 6
566 MOVE_HERE = 6
564 COPY_HERE = 7
567 COPY_HERE = 7
565 MULTICOPY = 8
568 MULTICOPY = 8
566
569
567
570
568 class DiffFileType(object):
571 class DiffFileType(object):
569 TEXT = 1
572 TEXT = 1
570 IMAGE = 2
573 IMAGE = 2
571 BINARY = 3
574 BINARY = 3
572
575
573
576
574 @attr.s
577 @attr.s
575 class phabhunk(dict):
578 class phabhunk(dict):
576 """Represents a Differential hunk, which is owned by a Differential change
579 """Represents a Differential hunk, which is owned by a Differential change
577 """
580 """
578
581
579 oldOffset = attr.ib(default=0) # camelcase-required
582 oldOffset = attr.ib(default=0) # camelcase-required
580 oldLength = attr.ib(default=0) # camelcase-required
583 oldLength = attr.ib(default=0) # camelcase-required
581 newOffset = attr.ib(default=0) # camelcase-required
584 newOffset = attr.ib(default=0) # camelcase-required
582 newLength = attr.ib(default=0) # camelcase-required
585 newLength = attr.ib(default=0) # camelcase-required
583 corpus = attr.ib(default='')
586 corpus = attr.ib(default='')
584 # These get added to the phabchange's equivalents
587 # These get added to the phabchange's equivalents
585 addLines = attr.ib(default=0) # camelcase-required
588 addLines = attr.ib(default=0) # camelcase-required
586 delLines = attr.ib(default=0) # camelcase-required
589 delLines = attr.ib(default=0) # camelcase-required
587
590
588
591
589 @attr.s
592 @attr.s
590 class phabchange(object):
593 class phabchange(object):
591 """Represents a Differential change, owns Differential hunks and owned by a
594 """Represents a Differential change, owns Differential hunks and owned by a
592 Differential diff. Each one represents one file in a diff.
595 Differential diff. Each one represents one file in a diff.
593 """
596 """
594
597
595 currentPath = attr.ib(default=None) # camelcase-required
598 currentPath = attr.ib(default=None) # camelcase-required
596 oldPath = attr.ib(default=None) # camelcase-required
599 oldPath = attr.ib(default=None) # camelcase-required
597 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
600 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
598 metadata = attr.ib(default=attr.Factory(dict))
601 metadata = attr.ib(default=attr.Factory(dict))
599 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
602 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
600 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
603 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
601 type = attr.ib(default=DiffChangeType.CHANGE)
604 type = attr.ib(default=DiffChangeType.CHANGE)
602 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
605 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
603 commitHash = attr.ib(default=None) # camelcase-required
606 commitHash = attr.ib(default=None) # camelcase-required
604 addLines = attr.ib(default=0) # camelcase-required
607 addLines = attr.ib(default=0) # camelcase-required
605 delLines = attr.ib(default=0) # camelcase-required
608 delLines = attr.ib(default=0) # camelcase-required
606 hunks = attr.ib(default=attr.Factory(list))
609 hunks = attr.ib(default=attr.Factory(list))
607
610
608 def copynewmetadatatoold(self):
611 def copynewmetadatatoold(self):
609 for key in list(self.metadata.keys()):
612 for key in list(self.metadata.keys()):
610 newkey = key.replace(b'new:', b'old:')
613 newkey = key.replace(b'new:', b'old:')
611 self.metadata[newkey] = self.metadata[key]
614 self.metadata[newkey] = self.metadata[key]
612
615
613 def addoldmode(self, value):
616 def addoldmode(self, value):
614 self.oldProperties[b'unix:filemode'] = value
617 self.oldProperties[b'unix:filemode'] = value
615
618
616 def addnewmode(self, value):
619 def addnewmode(self, value):
617 self.newProperties[b'unix:filemode'] = value
620 self.newProperties[b'unix:filemode'] = value
618
621
619 def addhunk(self, hunk):
622 def addhunk(self, hunk):
620 if not isinstance(hunk, phabhunk):
623 if not isinstance(hunk, phabhunk):
621 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
624 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
622 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
625 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
623 # It's useful to include these stats since the Phab web UI shows them,
626 # It's useful to include these stats since the Phab web UI shows them,
624 # and uses them to estimate how large a change a Revision is. Also used
627 # and uses them to estimate how large a change a Revision is. Also used
625 # in email subjects for the [+++--] bit.
628 # in email subjects for the [+++--] bit.
626 self.addLines += hunk.addLines
629 self.addLines += hunk.addLines
627 self.delLines += hunk.delLines
630 self.delLines += hunk.delLines
628
631
629
632
630 @attr.s
633 @attr.s
631 class phabdiff(object):
634 class phabdiff(object):
632 """Represents a Differential diff, owns Differential changes. Corresponds
635 """Represents a Differential diff, owns Differential changes. Corresponds
633 to a commit.
636 to a commit.
634 """
637 """
635
638
636 # Doesn't seem to be any reason to send this (output of uname -n)
639 # Doesn't seem to be any reason to send this (output of uname -n)
637 sourceMachine = attr.ib(default=b'') # camelcase-required
640 sourceMachine = attr.ib(default=b'') # camelcase-required
638 sourcePath = attr.ib(default=b'/') # camelcase-required
641 sourcePath = attr.ib(default=b'/') # camelcase-required
639 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
642 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
640 sourceControlPath = attr.ib(default=b'/') # camelcase-required
643 sourceControlPath = attr.ib(default=b'/') # camelcase-required
641 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
644 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
642 branch = attr.ib(default=b'default')
645 branch = attr.ib(default=b'default')
643 bookmark = attr.ib(default=None)
646 bookmark = attr.ib(default=None)
644 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
647 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
645 lintStatus = attr.ib(default=b'none') # camelcase-required
648 lintStatus = attr.ib(default=b'none') # camelcase-required
646 unitStatus = attr.ib(default=b'none') # camelcase-required
649 unitStatus = attr.ib(default=b'none') # camelcase-required
647 changes = attr.ib(default=attr.Factory(dict))
650 changes = attr.ib(default=attr.Factory(dict))
648 repositoryPHID = attr.ib(default=None) # camelcase-required
651 repositoryPHID = attr.ib(default=None) # camelcase-required
649
652
650 def addchange(self, change):
653 def addchange(self, change):
651 if not isinstance(change, phabchange):
654 if not isinstance(change, phabchange):
652 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
655 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
653 self.changes[change.currentPath] = pycompat.byteskwargs(
656 self.changes[change.currentPath] = pycompat.byteskwargs(
654 attr.asdict(change)
657 attr.asdict(change)
655 )
658 )
656
659
657
660
658 def maketext(pchange, ctx, fname):
661 def maketext(pchange, ctx, fname):
659 """populate the phabchange for a text file"""
662 """populate the phabchange for a text file"""
660 repo = ctx.repo()
663 repo = ctx.repo()
661 fmatcher = match.exact([fname])
664 fmatcher = match.exact([fname])
662 diffopts = mdiff.diffopts(git=True, context=32767)
665 diffopts = mdiff.diffopts(git=True, context=32767)
663 _pfctx, _fctx, header, fhunks = next(
666 _pfctx, _fctx, header, fhunks = next(
664 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
667 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
665 )
668 )
666
669
667 for fhunk in fhunks:
670 for fhunk in fhunks:
668 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
671 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
669 corpus = b''.join(lines[1:])
672 corpus = b''.join(lines[1:])
670 shunk = list(header)
673 shunk = list(header)
671 shunk.extend(lines)
674 shunk.extend(lines)
672 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
675 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
673 patch.diffstatdata(util.iterlines(shunk))
676 patch.diffstatdata(util.iterlines(shunk))
674 )
677 )
675 pchange.addhunk(
678 pchange.addhunk(
676 phabhunk(
679 phabhunk(
677 oldOffset,
680 oldOffset,
678 oldLength,
681 oldLength,
679 newOffset,
682 newOffset,
680 newLength,
683 newLength,
681 corpus,
684 corpus,
682 addLines,
685 addLines,
683 delLines,
686 delLines,
684 )
687 )
685 )
688 )
686
689
687
690
688 def uploadchunks(fctx, fphid):
691 def uploadchunks(fctx, fphid):
689 """upload large binary files as separate chunks.
692 """upload large binary files as separate chunks.
690 Phab requests chunking over 8MiB, and splits into 4MiB chunks
693 Phab requests chunking over 8MiB, and splits into 4MiB chunks
691 """
694 """
692 ui = fctx.repo().ui
695 ui = fctx.repo().ui
693 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
696 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
694 with ui.makeprogress(
697 with ui.makeprogress(
695 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
698 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
696 ) as progress:
699 ) as progress:
697 for chunk in chunks:
700 for chunk in chunks:
698 progress.increment()
701 progress.increment()
699 if chunk[b'complete']:
702 if chunk[b'complete']:
700 continue
703 continue
701 bstart = int(chunk[b'byteStart'])
704 bstart = int(chunk[b'byteStart'])
702 bend = int(chunk[b'byteEnd'])
705 bend = int(chunk[b'byteEnd'])
703 callconduit(
706 callconduit(
704 ui,
707 ui,
705 b'file.uploadchunk',
708 b'file.uploadchunk',
706 {
709 {
707 b'filePHID': fphid,
710 b'filePHID': fphid,
708 b'byteStart': bstart,
711 b'byteStart': bstart,
709 b'data': base64.b64encode(fctx.data()[bstart:bend]),
712 b'data': base64.b64encode(fctx.data()[bstart:bend]),
710 b'dataEncoding': b'base64',
713 b'dataEncoding': b'base64',
711 },
714 },
712 )
715 )
713
716
714
717
715 def uploadfile(fctx):
718 def uploadfile(fctx):
716 """upload binary files to Phabricator"""
719 """upload binary files to Phabricator"""
717 repo = fctx.repo()
720 repo = fctx.repo()
718 ui = repo.ui
721 ui = repo.ui
719 fname = fctx.path()
722 fname = fctx.path()
720 size = fctx.size()
723 size = fctx.size()
721 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
724 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
722
725
723 # an allocate call is required first to see if an upload is even required
726 # an allocate call is required first to see if an upload is even required
724 # (Phab might already have it) and to determine if chunking is needed
727 # (Phab might already have it) and to determine if chunking is needed
725 allocateparams = {
728 allocateparams = {
726 b'name': fname,
729 b'name': fname,
727 b'contentLength': size,
730 b'contentLength': size,
728 b'contentHash': fhash,
731 b'contentHash': fhash,
729 }
732 }
730 filealloc = callconduit(ui, b'file.allocate', allocateparams)
733 filealloc = callconduit(ui, b'file.allocate', allocateparams)
731 fphid = filealloc[b'filePHID']
734 fphid = filealloc[b'filePHID']
732
735
733 if filealloc[b'upload']:
736 if filealloc[b'upload']:
734 ui.write(_(b'uploading %s\n') % bytes(fctx))
737 ui.write(_(b'uploading %s\n') % bytes(fctx))
735 if not fphid:
738 if not fphid:
736 uploadparams = {
739 uploadparams = {
737 b'name': fname,
740 b'name': fname,
738 b'data_base64': base64.b64encode(fctx.data()),
741 b'data_base64': base64.b64encode(fctx.data()),
739 }
742 }
740 fphid = callconduit(ui, b'file.upload', uploadparams)
743 fphid = callconduit(ui, b'file.upload', uploadparams)
741 else:
744 else:
742 uploadchunks(fctx, fphid)
745 uploadchunks(fctx, fphid)
743 else:
746 else:
744 ui.debug(b'server already has %s\n' % bytes(fctx))
747 ui.debug(b'server already has %s\n' % bytes(fctx))
745
748
746 if not fphid:
749 if not fphid:
747 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
750 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
748
751
749 return fphid
752 return fphid
750
753
751
754
752 def addoldbinary(pchange, oldfctx, fctx):
755 def addoldbinary(pchange, oldfctx, fctx):
753 """add the metadata for the previous version of a binary file to the
756 """add the metadata for the previous version of a binary file to the
754 phabchange for the new version
757 phabchange for the new version
755
758
756 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
759 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
757 version of the file, or None if the file is being removed.
760 version of the file, or None if the file is being removed.
758 """
761 """
759 if not fctx or fctx.cmp(oldfctx):
762 if not fctx or fctx.cmp(oldfctx):
760 # Files differ, add the old one
763 # Files differ, add the old one
761 pchange.metadata[b'old:file:size'] = oldfctx.size()
764 pchange.metadata[b'old:file:size'] = oldfctx.size()
762 mimeguess, _enc = mimetypes.guess_type(
765 mimeguess, _enc = mimetypes.guess_type(
763 encoding.unifromlocal(oldfctx.path())
766 encoding.unifromlocal(oldfctx.path())
764 )
767 )
765 if mimeguess:
768 if mimeguess:
766 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
769 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
767 mimeguess
770 mimeguess
768 )
771 )
769 fphid = uploadfile(oldfctx)
772 fphid = uploadfile(oldfctx)
770 pchange.metadata[b'old:binary-phid'] = fphid
773 pchange.metadata[b'old:binary-phid'] = fphid
771 else:
774 else:
772 # If it's left as IMAGE/BINARY web UI might try to display it
775 # If it's left as IMAGE/BINARY web UI might try to display it
773 pchange.fileType = DiffFileType.TEXT
776 pchange.fileType = DiffFileType.TEXT
774 pchange.copynewmetadatatoold()
777 pchange.copynewmetadatatoold()
775
778
776
779
777 def makebinary(pchange, fctx):
780 def makebinary(pchange, fctx):
778 """populate the phabchange for a binary file"""
781 """populate the phabchange for a binary file"""
779 pchange.fileType = DiffFileType.BINARY
782 pchange.fileType = DiffFileType.BINARY
780 fphid = uploadfile(fctx)
783 fphid = uploadfile(fctx)
781 pchange.metadata[b'new:binary-phid'] = fphid
784 pchange.metadata[b'new:binary-phid'] = fphid
782 pchange.metadata[b'new:file:size'] = fctx.size()
785 pchange.metadata[b'new:file:size'] = fctx.size()
783 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
786 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
784 if mimeguess:
787 if mimeguess:
785 mimeguess = pycompat.bytestr(mimeguess)
788 mimeguess = pycompat.bytestr(mimeguess)
786 pchange.metadata[b'new:file:mime-type'] = mimeguess
789 pchange.metadata[b'new:file:mime-type'] = mimeguess
787 if mimeguess.startswith(b'image/'):
790 if mimeguess.startswith(b'image/'):
788 pchange.fileType = DiffFileType.IMAGE
791 pchange.fileType = DiffFileType.IMAGE
789
792
790
793
791 # Copied from mercurial/patch.py
794 # Copied from mercurial/patch.py
792 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
795 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
793
796
794
797
795 def notutf8(fctx):
798 def notutf8(fctx):
796 """detect non-UTF-8 text files since Phabricator requires them to be marked
799 """detect non-UTF-8 text files since Phabricator requires them to be marked
797 as binary
800 as binary
798 """
801 """
799 try:
802 try:
800 fctx.data().decode('utf-8')
803 fctx.data().decode('utf-8')
801 return False
804 return False
802 except UnicodeDecodeError:
805 except UnicodeDecodeError:
803 fctx.repo().ui.write(
806 fctx.repo().ui.write(
804 _(b'file %s detected as non-UTF-8, marked as binary\n')
807 _(b'file %s detected as non-UTF-8, marked as binary\n')
805 % fctx.path()
808 % fctx.path()
806 )
809 )
807 return True
810 return True
808
811
809
812
810 def addremoved(pdiff, ctx, removed):
813 def addremoved(pdiff, ctx, removed):
811 """add removed files to the phabdiff. Shouldn't include moves"""
814 """add removed files to the phabdiff. Shouldn't include moves"""
812 for fname in removed:
815 for fname in removed:
813 pchange = phabchange(
816 pchange = phabchange(
814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
817 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
815 )
818 )
816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
819 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
817 oldfctx = ctx.p1()[fname]
820 oldfctx = ctx.p1()[fname]
818 if not (oldfctx.isbinary() or notutf8(oldfctx)):
821 if not (oldfctx.isbinary() or notutf8(oldfctx)):
819 maketext(pchange, ctx, fname)
822 maketext(pchange, ctx, fname)
820
823
821 pdiff.addchange(pchange)
824 pdiff.addchange(pchange)
822
825
823
826
824 def addmodified(pdiff, ctx, modified):
827 def addmodified(pdiff, ctx, modified):
825 """add modified files to the phabdiff"""
828 """add modified files to the phabdiff"""
826 for fname in modified:
829 for fname in modified:
827 fctx = ctx[fname]
830 fctx = ctx[fname]
828 oldfctx = fctx.p1()
831 oldfctx = fctx.p1()
829 pchange = phabchange(currentPath=fname, oldPath=fname)
832 pchange = phabchange(currentPath=fname, oldPath=fname)
830 filemode = gitmode[ctx[fname].flags()]
833 filemode = gitmode[ctx[fname].flags()]
831 originalmode = gitmode[ctx.p1()[fname].flags()]
834 originalmode = gitmode[ctx.p1()[fname].flags()]
832 if filemode != originalmode:
835 if filemode != originalmode:
833 pchange.addoldmode(originalmode)
836 pchange.addoldmode(originalmode)
834 pchange.addnewmode(filemode)
837 pchange.addnewmode(filemode)
835
838
836 if (
839 if (
837 fctx.isbinary()
840 fctx.isbinary()
838 or notutf8(fctx)
841 or notutf8(fctx)
839 or oldfctx.isbinary()
842 or oldfctx.isbinary()
840 or notutf8(oldfctx)
843 or notutf8(oldfctx)
841 ):
844 ):
842 makebinary(pchange, fctx)
845 makebinary(pchange, fctx)
843 addoldbinary(pchange, fctx.p1(), fctx)
846 addoldbinary(pchange, fctx.p1(), fctx)
844 else:
847 else:
845 maketext(pchange, ctx, fname)
848 maketext(pchange, ctx, fname)
846
849
847 pdiff.addchange(pchange)
850 pdiff.addchange(pchange)
848
851
849
852
850 def addadded(pdiff, ctx, added, removed):
853 def addadded(pdiff, ctx, added, removed):
851 """add file adds to the phabdiff, both new files and copies/moves"""
854 """add file adds to the phabdiff, both new files and copies/moves"""
852 # Keep track of files that've been recorded as moved/copied, so if there are
855 # Keep track of files that've been recorded as moved/copied, so if there are
853 # additional copies we can mark them (moves get removed from removed)
856 # additional copies we can mark them (moves get removed from removed)
854 copiedchanges = {}
857 copiedchanges = {}
855 movedchanges = {}
858 movedchanges = {}
856 for fname in added:
859 for fname in added:
857 fctx = ctx[fname]
860 fctx = ctx[fname]
858 oldfctx = None
861 oldfctx = None
859 pchange = phabchange(currentPath=fname)
862 pchange = phabchange(currentPath=fname)
860
863
861 filemode = gitmode[ctx[fname].flags()]
864 filemode = gitmode[ctx[fname].flags()]
862 renamed = fctx.renamed()
865 renamed = fctx.renamed()
863
866
864 if renamed:
867 if renamed:
865 originalfname = renamed[0]
868 originalfname = renamed[0]
866 oldfctx = ctx.p1()[originalfname]
869 oldfctx = ctx.p1()[originalfname]
867 originalmode = gitmode[oldfctx.flags()]
870 originalmode = gitmode[oldfctx.flags()]
868 pchange.oldPath = originalfname
871 pchange.oldPath = originalfname
869
872
870 if originalfname in removed:
873 if originalfname in removed:
871 origpchange = phabchange(
874 origpchange = phabchange(
872 currentPath=originalfname,
875 currentPath=originalfname,
873 oldPath=originalfname,
876 oldPath=originalfname,
874 type=DiffChangeType.MOVE_AWAY,
877 type=DiffChangeType.MOVE_AWAY,
875 awayPaths=[fname],
878 awayPaths=[fname],
876 )
879 )
877 movedchanges[originalfname] = origpchange
880 movedchanges[originalfname] = origpchange
878 removed.remove(originalfname)
881 removed.remove(originalfname)
879 pchange.type = DiffChangeType.MOVE_HERE
882 pchange.type = DiffChangeType.MOVE_HERE
880 elif originalfname in movedchanges:
883 elif originalfname in movedchanges:
881 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
884 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
882 movedchanges[originalfname].awayPaths.append(fname)
885 movedchanges[originalfname].awayPaths.append(fname)
883 pchange.type = DiffChangeType.COPY_HERE
886 pchange.type = DiffChangeType.COPY_HERE
884 else: # pure copy
887 else: # pure copy
885 if originalfname not in copiedchanges:
888 if originalfname not in copiedchanges:
886 origpchange = phabchange(
889 origpchange = phabchange(
887 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
890 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
888 )
891 )
889 copiedchanges[originalfname] = origpchange
892 copiedchanges[originalfname] = origpchange
890 else:
893 else:
891 origpchange = copiedchanges[originalfname]
894 origpchange = copiedchanges[originalfname]
892 origpchange.awayPaths.append(fname)
895 origpchange.awayPaths.append(fname)
893 pchange.type = DiffChangeType.COPY_HERE
896 pchange.type = DiffChangeType.COPY_HERE
894
897
895 if filemode != originalmode:
898 if filemode != originalmode:
896 pchange.addoldmode(originalmode)
899 pchange.addoldmode(originalmode)
897 pchange.addnewmode(filemode)
900 pchange.addnewmode(filemode)
898 else: # Brand-new file
901 else: # Brand-new file
899 pchange.addnewmode(gitmode[fctx.flags()])
902 pchange.addnewmode(gitmode[fctx.flags()])
900 pchange.type = DiffChangeType.ADD
903 pchange.type = DiffChangeType.ADD
901
904
902 if (
905 if (
903 fctx.isbinary()
906 fctx.isbinary()
904 or notutf8(fctx)
907 or notutf8(fctx)
905 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
908 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
906 ):
909 ):
907 makebinary(pchange, fctx)
910 makebinary(pchange, fctx)
908 if renamed:
911 if renamed:
909 addoldbinary(pchange, oldfctx, fctx)
912 addoldbinary(pchange, oldfctx, fctx)
910 else:
913 else:
911 maketext(pchange, ctx, fname)
914 maketext(pchange, ctx, fname)
912
915
913 pdiff.addchange(pchange)
916 pdiff.addchange(pchange)
914
917
915 for _path, copiedchange in copiedchanges.items():
918 for _path, copiedchange in copiedchanges.items():
916 pdiff.addchange(copiedchange)
919 pdiff.addchange(copiedchange)
917 for _path, movedchange in movedchanges.items():
920 for _path, movedchange in movedchanges.items():
918 pdiff.addchange(movedchange)
921 pdiff.addchange(movedchange)
919
922
920
923
921 def creatediff(ctx):
924 def creatediff(ctx):
922 """create a Differential Diff"""
925 """create a Differential Diff"""
923 repo = ctx.repo()
926 repo = ctx.repo()
924 repophid = getrepophid(repo)
927 repophid = getrepophid(repo)
925 # Create a "Differential Diff" via "differential.creatediff" API
928 # Create a "Differential Diff" via "differential.creatediff" API
926 pdiff = phabdiff(
929 pdiff = phabdiff(
927 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
930 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
928 branch=b'%s' % ctx.branch(),
931 branch=b'%s' % ctx.branch(),
929 )
932 )
930 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
933 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
931 # addadded will remove moved files from removed, so addremoved won't get
934 # addadded will remove moved files from removed, so addremoved won't get
932 # them
935 # them
933 addadded(pdiff, ctx, added, removed)
936 addadded(pdiff, ctx, added, removed)
934 addmodified(pdiff, ctx, modified)
937 addmodified(pdiff, ctx, modified)
935 addremoved(pdiff, ctx, removed)
938 addremoved(pdiff, ctx, removed)
936 if repophid:
939 if repophid:
937 pdiff.repositoryPHID = repophid
940 pdiff.repositoryPHID = repophid
938 diff = callconduit(
941 diff = callconduit(
939 repo.ui,
942 repo.ui,
940 b'differential.creatediff',
943 b'differential.creatediff',
941 pycompat.byteskwargs(attr.asdict(pdiff)),
944 pycompat.byteskwargs(attr.asdict(pdiff)),
942 )
945 )
943 if not diff:
946 if not diff:
944 raise error.Abort(_(b'cannot create diff for %s') % ctx)
947 raise error.Abort(_(b'cannot create diff for %s') % ctx)
945 return diff
948 return diff
946
949
947
950
948 def writediffproperties(ctx, diff):
951 def writediffproperties(ctx, diff):
949 """write metadata to diff so patches could be applied losslessly"""
952 """write metadata to diff so patches could be applied losslessly"""
950 # creatediff returns with a diffid but query returns with an id
953 # creatediff returns with a diffid but query returns with an id
951 diffid = diff.get(b'diffid', diff.get(b'id'))
954 diffid = diff.get(b'diffid', diff.get(b'id'))
952 params = {
955 params = {
953 b'diff_id': diffid,
956 b'diff_id': diffid,
954 b'name': b'hg:meta',
957 b'name': b'hg:meta',
955 b'data': templatefilters.json(
958 b'data': templatefilters.json(
956 {
959 {
957 b'user': ctx.user(),
960 b'user': ctx.user(),
958 b'date': b'%d %d' % ctx.date(),
961 b'date': b'%d %d' % ctx.date(),
959 b'branch': ctx.branch(),
962 b'branch': ctx.branch(),
960 b'node': ctx.hex(),
963 b'node': ctx.hex(),
961 b'parent': ctx.p1().hex(),
964 b'parent': ctx.p1().hex(),
962 }
965 }
963 ),
966 ),
964 }
967 }
965 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
968 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
966
969
967 params = {
970 params = {
968 b'diff_id': diffid,
971 b'diff_id': diffid,
969 b'name': b'local:commits',
972 b'name': b'local:commits',
970 b'data': templatefilters.json(
973 b'data': templatefilters.json(
971 {
974 {
972 ctx.hex(): {
975 ctx.hex(): {
973 b'author': stringutil.person(ctx.user()),
976 b'author': stringutil.person(ctx.user()),
974 b'authorEmail': stringutil.email(ctx.user()),
977 b'authorEmail': stringutil.email(ctx.user()),
975 b'time': int(ctx.date()[0]),
978 b'time': int(ctx.date()[0]),
976 b'commit': ctx.hex(),
979 b'commit': ctx.hex(),
977 b'parents': [ctx.p1().hex()],
980 b'parents': [ctx.p1().hex()],
978 b'branch': ctx.branch(),
981 b'branch': ctx.branch(),
979 },
982 },
980 }
983 }
981 ),
984 ),
982 }
985 }
983 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
986 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
984
987
985
988
986 def createdifferentialrevision(
989 def createdifferentialrevision(
987 ctx,
990 ctx,
988 revid=None,
991 revid=None,
989 parentrevphid=None,
992 parentrevphid=None,
990 oldnode=None,
993 oldnode=None,
991 olddiff=None,
994 olddiff=None,
992 actions=None,
995 actions=None,
993 comment=None,
996 comment=None,
994 ):
997 ):
995 """create or update a Differential Revision
998 """create or update a Differential Revision
996
999
997 If revid is None, create a new Differential Revision, otherwise update
1000 If revid is None, create a new Differential Revision, otherwise update
998 revid. If parentrevphid is not None, set it as a dependency.
1001 revid. If parentrevphid is not None, set it as a dependency.
999
1002
1000 If oldnode is not None, check if the patch content (without commit message
1003 If oldnode is not None, check if the patch content (without commit message
1001 and metadata) has changed before creating another diff.
1004 and metadata) has changed before creating another diff.
1002
1005
1003 If actions is not None, they will be appended to the transaction.
1006 If actions is not None, they will be appended to the transaction.
1004 """
1007 """
1005 repo = ctx.repo()
1008 repo = ctx.repo()
1006 if oldnode:
1009 if oldnode:
1007 diffopts = mdiff.diffopts(git=True, context=32767)
1010 diffopts = mdiff.diffopts(git=True, context=32767)
1008 oldctx = repo.unfiltered()[oldnode]
1011 oldctx = repo.unfiltered()[oldnode]
1009 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1012 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1010 else:
1013 else:
1011 neednewdiff = True
1014 neednewdiff = True
1012
1015
1013 transactions = []
1016 transactions = []
1014 if neednewdiff:
1017 if neednewdiff:
1015 diff = creatediff(ctx)
1018 diff = creatediff(ctx)
1016 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1019 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1017 if comment:
1020 if comment:
1018 transactions.append({b'type': b'comment', b'value': comment})
1021 transactions.append({b'type': b'comment', b'value': comment})
1019 else:
1022 else:
1020 # Even if we don't need to upload a new diff because the patch content
1023 # Even if we don't need to upload a new diff because the patch content
1021 # does not change. We might still need to update its metadata so
1024 # does not change. We might still need to update its metadata so
1022 # pushers could know the correct node metadata.
1025 # pushers could know the correct node metadata.
1023 assert olddiff
1026 assert olddiff
1024 diff = olddiff
1027 diff = olddiff
1025 writediffproperties(ctx, diff)
1028 writediffproperties(ctx, diff)
1026
1029
1027 # Set the parent Revision every time, so commit re-ordering is picked-up
1030 # Set the parent Revision every time, so commit re-ordering is picked-up
1028 if parentrevphid:
1031 if parentrevphid:
1029 transactions.append(
1032 transactions.append(
1030 {b'type': b'parents.set', b'value': [parentrevphid]}
1033 {b'type': b'parents.set', b'value': [parentrevphid]}
1031 )
1034 )
1032
1035
1033 if actions:
1036 if actions:
1034 transactions += actions
1037 transactions += actions
1035
1038
1036 # Parse commit message and update related fields.
1039 # Parse commit message and update related fields.
1037 desc = ctx.description()
1040 desc = ctx.description()
1038 info = callconduit(
1041 info = callconduit(
1039 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1042 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1040 )
1043 )
1041 for k, v in info[b'fields'].items():
1044 for k, v in info[b'fields'].items():
1042 if k in [b'title', b'summary', b'testPlan']:
1045 if k in [b'title', b'summary', b'testPlan']:
1043 transactions.append({b'type': k, b'value': v})
1046 transactions.append({b'type': k, b'value': v})
1044
1047
1045 params = {b'transactions': transactions}
1048 params = {b'transactions': transactions}
1046 if revid is not None:
1049 if revid is not None:
1047 # Update an existing Differential Revision
1050 # Update an existing Differential Revision
1048 params[b'objectIdentifier'] = revid
1051 params[b'objectIdentifier'] = revid
1049
1052
1050 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1053 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1051 if not revision:
1054 if not revision:
1052 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1055 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1053
1056
1054 return revision, diff
1057 return revision, diff
1055
1058
1056
1059
1057 def userphids(ui, names):
1060 def userphids(ui, names):
1058 """convert user names to PHIDs"""
1061 """convert user names to PHIDs"""
1059 names = [name.lower() for name in names]
1062 names = [name.lower() for name in names]
1060 query = {b'constraints': {b'usernames': names}}
1063 query = {b'constraints': {b'usernames': names}}
1061 result = callconduit(ui, b'user.search', query)
1064 result = callconduit(ui, b'user.search', query)
1062 # username not found is not an error of the API. So check if we have missed
1065 # username not found is not an error of the API. So check if we have missed
1063 # some names here.
1066 # some names here.
1064 data = result[b'data']
1067 data = result[b'data']
1065 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1068 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1066 unresolved = set(names) - resolved
1069 unresolved = set(names) - resolved
1067 if unresolved:
1070 if unresolved:
1068 raise error.Abort(
1071 raise error.Abort(
1069 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1072 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1070 )
1073 )
1071 return [entry[b'phid'] for entry in data]
1074 return [entry[b'phid'] for entry in data]
1072
1075
1073
1076
1074 @vcrcommand(
1077 @vcrcommand(
1075 b'phabsend',
1078 b'phabsend',
1076 [
1079 [
1077 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1080 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1078 (b'', b'amend', True, _(b'update commit messages')),
1081 (b'', b'amend', True, _(b'update commit messages')),
1079 (b'', b'reviewer', [], _(b'specify reviewers')),
1082 (b'', b'reviewer', [], _(b'specify reviewers')),
1080 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1083 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1081 (
1084 (
1082 b'm',
1085 b'm',
1083 b'comment',
1086 b'comment',
1084 b'',
1087 b'',
1085 _(b'add a comment to Revisions with new/updated Diffs'),
1088 _(b'add a comment to Revisions with new/updated Diffs'),
1086 ),
1089 ),
1087 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1090 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1088 ],
1091 ],
1089 _(b'REV [OPTIONS]'),
1092 _(b'REV [OPTIONS]'),
1090 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1093 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1091 )
1094 )
1092 def phabsend(ui, repo, *revs, **opts):
1095 def phabsend(ui, repo, *revs, **opts):
1093 """upload changesets to Phabricator
1096 """upload changesets to Phabricator
1094
1097
1095 If there are multiple revisions specified, they will be send as a stack
1098 If there are multiple revisions specified, they will be send as a stack
1096 with a linear dependencies relationship using the order specified by the
1099 with a linear dependencies relationship using the order specified by the
1097 revset.
1100 revset.
1098
1101
1099 For the first time uploading changesets, local tags will be created to
1102 For the first time uploading changesets, local tags will be created to
1100 maintain the association. After the first time, phabsend will check
1103 maintain the association. After the first time, phabsend will check
1101 obsstore and tags information so it can figure out whether to update an
1104 obsstore and tags information so it can figure out whether to update an
1102 existing Differential Revision, or create a new one.
1105 existing Differential Revision, or create a new one.
1103
1106
1104 If --amend is set, update commit messages so they have the
1107 If --amend is set, update commit messages so they have the
1105 ``Differential Revision`` URL, remove related tags. This is similar to what
1108 ``Differential Revision`` URL, remove related tags. This is similar to what
1106 arcanist will do, and is more desired in author-push workflows. Otherwise,
1109 arcanist will do, and is more desired in author-push workflows. Otherwise,
1107 use local tags to record the ``Differential Revision`` association.
1110 use local tags to record the ``Differential Revision`` association.
1108
1111
1109 The --confirm option lets you confirm changesets before sending them. You
1112 The --confirm option lets you confirm changesets before sending them. You
1110 can also add following to your configuration file to make it default
1113 can also add following to your configuration file to make it default
1111 behaviour::
1114 behaviour::
1112
1115
1113 [phabsend]
1116 [phabsend]
1114 confirm = true
1117 confirm = true
1115
1118
1116 phabsend will check obsstore and the above association to decide whether to
1119 phabsend will check obsstore and the above association to decide whether to
1117 update an existing Differential Revision, or create a new one.
1120 update an existing Differential Revision, or create a new one.
1118 """
1121 """
1119 opts = pycompat.byteskwargs(opts)
1122 opts = pycompat.byteskwargs(opts)
1120 revs = list(revs) + opts.get(b'rev', [])
1123 revs = list(revs) + opts.get(b'rev', [])
1121 revs = scmutil.revrange(repo, revs)
1124 revs = scmutil.revrange(repo, revs)
1122 revs.sort() # ascending order to preserve topological parent/child in phab
1125 revs.sort() # ascending order to preserve topological parent/child in phab
1123
1126
1124 if not revs:
1127 if not revs:
1125 raise error.Abort(_(b'phabsend requires at least one changeset'))
1128 raise error.Abort(_(b'phabsend requires at least one changeset'))
1126 if opts.get(b'amend'):
1129 if opts.get(b'amend'):
1127 cmdutil.checkunfinished(repo)
1130 cmdutil.checkunfinished(repo)
1128
1131
1129 # {newnode: (oldnode, olddiff, olddrev}
1132 # {newnode: (oldnode, olddiff, olddrev}
1130 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1133 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1131
1134
1132 confirm = ui.configbool(b'phabsend', b'confirm')
1135 confirm = ui.configbool(b'phabsend', b'confirm')
1133 confirm |= bool(opts.get(b'confirm'))
1136 confirm |= bool(opts.get(b'confirm'))
1134 if confirm:
1137 if confirm:
1135 confirmed = _confirmbeforesend(repo, revs, oldmap)
1138 confirmed = _confirmbeforesend(repo, revs, oldmap)
1136 if not confirmed:
1139 if not confirmed:
1137 raise error.Abort(_(b'phabsend cancelled'))
1140 raise error.Abort(_(b'phabsend cancelled'))
1138
1141
1139 actions = []
1142 actions = []
1140 reviewers = opts.get(b'reviewer', [])
1143 reviewers = opts.get(b'reviewer', [])
1141 blockers = opts.get(b'blocker', [])
1144 blockers = opts.get(b'blocker', [])
1142 phids = []
1145 phids = []
1143 if reviewers:
1146 if reviewers:
1144 phids.extend(userphids(repo.ui, reviewers))
1147 phids.extend(userphids(repo.ui, reviewers))
1145 if blockers:
1148 if blockers:
1146 phids.extend(
1149 phids.extend(
1147 map(
1150 map(
1148 lambda phid: b'blocking(%s)' % phid,
1151 lambda phid: b'blocking(%s)' % phid,
1149 userphids(repo.ui, blockers),
1152 userphids(repo.ui, blockers),
1150 )
1153 )
1151 )
1154 )
1152 if phids:
1155 if phids:
1153 actions.append({b'type': b'reviewers.add', b'value': phids})
1156 actions.append({b'type': b'reviewers.add', b'value': phids})
1154
1157
1155 drevids = [] # [int]
1158 drevids = [] # [int]
1156 diffmap = {} # {newnode: diff}
1159 diffmap = {} # {newnode: diff}
1157
1160
1158 # Send patches one by one so we know their Differential Revision PHIDs and
1161 # Send patches one by one so we know their Differential Revision PHIDs and
1159 # can provide dependency relationship
1162 # can provide dependency relationship
1160 lastrevphid = None
1163 lastrevphid = None
1161 for rev in revs:
1164 for rev in revs:
1162 ui.debug(b'sending rev %d\n' % rev)
1165 ui.debug(b'sending rev %d\n' % rev)
1163 ctx = repo[rev]
1166 ctx = repo[rev]
1164
1167
1165 # Get Differential Revision ID
1168 # Get Differential Revision ID
1166 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1169 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1167 if oldnode != ctx.node() or opts.get(b'amend'):
1170 if oldnode != ctx.node() or opts.get(b'amend'):
1168 # Create or update Differential Revision
1171 # Create or update Differential Revision
1169 revision, diff = createdifferentialrevision(
1172 revision, diff = createdifferentialrevision(
1170 ctx,
1173 ctx,
1171 revid,
1174 revid,
1172 lastrevphid,
1175 lastrevphid,
1173 oldnode,
1176 oldnode,
1174 olddiff,
1177 olddiff,
1175 actions,
1178 actions,
1176 opts.get(b'comment'),
1179 opts.get(b'comment'),
1177 )
1180 )
1178 diffmap[ctx.node()] = diff
1181 diffmap[ctx.node()] = diff
1179 newrevid = int(revision[b'object'][b'id'])
1182 newrevid = int(revision[b'object'][b'id'])
1180 newrevphid = revision[b'object'][b'phid']
1183 newrevphid = revision[b'object'][b'phid']
1181 if revid:
1184 if revid:
1182 action = b'updated'
1185 action = b'updated'
1183 else:
1186 else:
1184 action = b'created'
1187 action = b'created'
1185
1188
1186 # Create a local tag to note the association, if commit message
1189 # Create a local tag to note the association, if commit message
1187 # does not have it already
1190 # does not have it already
1188 m = _differentialrevisiondescre.search(ctx.description())
1191 m = _differentialrevisiondescre.search(ctx.description())
1189 if not m or int(m.group('id')) != newrevid:
1192 if not m or int(m.group('id')) != newrevid:
1190 tagname = b'D%d' % newrevid
1193 tagname = b'D%d' % newrevid
1191 tags.tag(
1194 tags.tag(
1192 repo,
1195 repo,
1193 tagname,
1196 tagname,
1194 ctx.node(),
1197 ctx.node(),
1195 message=None,
1198 message=None,
1196 user=None,
1199 user=None,
1197 date=None,
1200 date=None,
1198 local=True,
1201 local=True,
1199 )
1202 )
1200 else:
1203 else:
1201 # Nothing changed. But still set "newrevphid" so the next revision
1204 # Nothing changed. But still set "newrevphid" so the next revision
1202 # could depend on this one and "newrevid" for the summary line.
1205 # could depend on this one and "newrevid" for the summary line.
1203 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1206 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1204 newrevid = revid
1207 newrevid = revid
1205 action = b'skipped'
1208 action = b'skipped'
1206
1209
1207 actiondesc = ui.label(
1210 actiondesc = ui.label(
1208 {
1211 {
1209 b'created': _(b'created'),
1212 b'created': _(b'created'),
1210 b'skipped': _(b'skipped'),
1213 b'skipped': _(b'skipped'),
1211 b'updated': _(b'updated'),
1214 b'updated': _(b'updated'),
1212 }[action],
1215 }[action],
1213 b'phabricator.action.%s' % action,
1216 b'phabricator.action.%s' % action,
1214 )
1217 )
1215 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1216 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1217 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1218 ui.write(
1221 ui.write(
1219 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1222 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1220 )
1223 )
1221 drevids.append(newrevid)
1224 drevids.append(newrevid)
1222 lastrevphid = newrevphid
1225 lastrevphid = newrevphid
1223
1226
1224 # Update commit messages and remove tags
1227 # Update commit messages and remove tags
1225 if opts.get(b'amend'):
1228 if opts.get(b'amend'):
1226 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1227 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1230 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1228 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1231 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1229 wnode = unfi[b'.'].node()
1232 wnode = unfi[b'.'].node()
1230 mapping = {} # {oldnode: [newnode]}
1233 mapping = {} # {oldnode: [newnode]}
1231 for i, rev in enumerate(revs):
1234 for i, rev in enumerate(revs):
1232 old = unfi[rev]
1235 old = unfi[rev]
1233 drevid = drevids[i]
1236 drevid = drevids[i]
1234 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1237 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1235 newdesc = getdescfromdrev(drev)
1238 newdesc = getdescfromdrev(drev)
1236 # Make sure commit message contain "Differential Revision"
1239 # Make sure commit message contain "Differential Revision"
1237 if old.description() != newdesc:
1240 if old.description() != newdesc:
1238 if old.phase() == phases.public:
1241 if old.phase() == phases.public:
1239 ui.warn(
1242 ui.warn(
1240 _(b"warning: not updating public commit %s\n")
1243 _(b"warning: not updating public commit %s\n")
1241 % scmutil.formatchangeid(old)
1244 % scmutil.formatchangeid(old)
1242 )
1245 )
1243 continue
1246 continue
1244 parents = [
1247 parents = [
1245 mapping.get(old.p1().node(), (old.p1(),))[0],
1248 mapping.get(old.p1().node(), (old.p1(),))[0],
1246 mapping.get(old.p2().node(), (old.p2(),))[0],
1249 mapping.get(old.p2().node(), (old.p2(),))[0],
1247 ]
1250 ]
1248 new = context.metadataonlyctx(
1251 new = context.metadataonlyctx(
1249 repo,
1252 repo,
1250 old,
1253 old,
1251 parents=parents,
1254 parents=parents,
1252 text=newdesc,
1255 text=newdesc,
1253 user=old.user(),
1256 user=old.user(),
1254 date=old.date(),
1257 date=old.date(),
1255 extra=old.extra(),
1258 extra=old.extra(),
1256 )
1259 )
1257
1260
1258 newnode = new.commit()
1261 newnode = new.commit()
1259
1262
1260 mapping[old.node()] = [newnode]
1263 mapping[old.node()] = [newnode]
1261 # Update diff property
1264 # Update diff property
1262 # If it fails just warn and keep going, otherwise the DREV
1265 # If it fails just warn and keep going, otherwise the DREV
1263 # associations will be lost
1266 # associations will be lost
1264 try:
1267 try:
1265 writediffproperties(unfi[newnode], diffmap[old.node()])
1268 writediffproperties(unfi[newnode], diffmap[old.node()])
1266 except util.urlerr.urlerror:
1269 except util.urlerr.urlerror:
1267 ui.warnnoi18n(
1270 ui.warnnoi18n(
1268 b'Failed to update metadata for D%d\n' % drevid
1271 b'Failed to update metadata for D%d\n' % drevid
1269 )
1272 )
1270 # Remove local tags since it's no longer necessary
1273 # Remove local tags since it's no longer necessary
1271 tagname = b'D%d' % drevid
1274 tagname = b'D%d' % drevid
1272 if tagname in repo.tags():
1275 if tagname in repo.tags():
1273 tags.tag(
1276 tags.tag(
1274 repo,
1277 repo,
1275 tagname,
1278 tagname,
1276 nullid,
1279 nullid,
1277 message=None,
1280 message=None,
1278 user=None,
1281 user=None,
1279 date=None,
1282 date=None,
1280 local=True,
1283 local=True,
1281 )
1284 )
1282 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1285 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1283 if wnode in mapping:
1286 if wnode in mapping:
1284 unfi.setparents(mapping[wnode][0])
1287 unfi.setparents(mapping[wnode][0])
1285
1288
1286
1289
1287 # Map from "hg:meta" keys to header understood by "hg import". The order is
1290 # Map from "hg:meta" keys to header understood by "hg import". The order is
1288 # consistent with "hg export" output.
1291 # consistent with "hg export" output.
1289 _metanamemap = util.sortdict(
1292 _metanamemap = util.sortdict(
1290 [
1293 [
1291 (b'user', b'User'),
1294 (b'user', b'User'),
1292 (b'date', b'Date'),
1295 (b'date', b'Date'),
1293 (b'branch', b'Branch'),
1296 (b'branch', b'Branch'),
1294 (b'node', b'Node ID'),
1297 (b'node', b'Node ID'),
1295 (b'parent', b'Parent '),
1298 (b'parent', b'Parent '),
1296 ]
1299 ]
1297 )
1300 )
1298
1301
1299
1302
1300 def _confirmbeforesend(repo, revs, oldmap):
1303 def _confirmbeforesend(repo, revs, oldmap):
1301 url, token = readurltoken(repo.ui)
1304 url, token = readurltoken(repo.ui)
1302 ui = repo.ui
1305 ui = repo.ui
1303 for rev in revs:
1306 for rev in revs:
1304 ctx = repo[rev]
1307 ctx = repo[rev]
1305 desc = ctx.description().splitlines()[0]
1308 desc = ctx.description().splitlines()[0]
1306 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1309 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1307 if drevid:
1310 if drevid:
1308 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1311 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1309 else:
1312 else:
1310 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1313 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1311
1314
1312 ui.write(
1315 ui.write(
1313 _(b'%s - %s: %s\n')
1316 _(b'%s - %s: %s\n')
1314 % (
1317 % (
1315 drevdesc,
1318 drevdesc,
1316 ui.label(bytes(ctx), b'phabricator.node'),
1319 ui.label(bytes(ctx), b'phabricator.node'),
1317 ui.label(desc, b'phabricator.desc'),
1320 ui.label(desc, b'phabricator.desc'),
1318 )
1321 )
1319 )
1322 )
1320
1323
1321 if ui.promptchoice(
1324 if ui.promptchoice(
1322 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1325 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1323 ):
1326 ):
1324 return False
1327 return False
1325
1328
1326 return True
1329 return True
1327
1330
1328
1331
1329 _knownstatusnames = {
1332 _knownstatusnames = {
1330 b'accepted',
1333 b'accepted',
1331 b'needsreview',
1334 b'needsreview',
1332 b'needsrevision',
1335 b'needsrevision',
1333 b'closed',
1336 b'closed',
1334 b'abandoned',
1337 b'abandoned',
1335 b'changesplanned',
1338 b'changesplanned',
1336 }
1339 }
1337
1340
1338
1341
1339 def _getstatusname(drev):
1342 def _getstatusname(drev):
1340 """get normalized status name from a Differential Revision"""
1343 """get normalized status name from a Differential Revision"""
1341 return drev[b'statusName'].replace(b' ', b'').lower()
1344 return drev[b'statusName'].replace(b' ', b'').lower()
1342
1345
1343
1346
1344 # Small language to specify differential revisions. Support symbols: (), :X,
1347 # Small language to specify differential revisions. Support symbols: (), :X,
1345 # +, and -.
1348 # +, and -.
1346
1349
1347 _elements = {
1350 _elements = {
1348 # token-type: binding-strength, primary, prefix, infix, suffix
1351 # token-type: binding-strength, primary, prefix, infix, suffix
1349 b'(': (12, None, (b'group', 1, b')'), None, None),
1352 b'(': (12, None, (b'group', 1, b')'), None, None),
1350 b':': (8, None, (b'ancestors', 8), None, None),
1353 b':': (8, None, (b'ancestors', 8), None, None),
1351 b'&': (5, None, None, (b'and_', 5), None),
1354 b'&': (5, None, None, (b'and_', 5), None),
1352 b'+': (4, None, None, (b'add', 4), None),
1355 b'+': (4, None, None, (b'add', 4), None),
1353 b'-': (4, None, None, (b'sub', 4), None),
1356 b'-': (4, None, None, (b'sub', 4), None),
1354 b')': (0, None, None, None, None),
1357 b')': (0, None, None, None, None),
1355 b'symbol': (0, b'symbol', None, None, None),
1358 b'symbol': (0, b'symbol', None, None, None),
1356 b'end': (0, None, None, None, None),
1359 b'end': (0, None, None, None, None),
1357 }
1360 }
1358
1361
1359
1362
1360 def _tokenize(text):
1363 def _tokenize(text):
1361 view = memoryview(text) # zero-copy slice
1364 view = memoryview(text) # zero-copy slice
1362 special = b'():+-& '
1365 special = b'():+-& '
1363 pos = 0
1366 pos = 0
1364 length = len(text)
1367 length = len(text)
1365 while pos < length:
1368 while pos < length:
1366 symbol = b''.join(
1369 symbol = b''.join(
1367 itertools.takewhile(
1370 itertools.takewhile(
1368 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1371 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1369 )
1372 )
1370 )
1373 )
1371 if symbol:
1374 if symbol:
1372 yield (b'symbol', symbol, pos)
1375 yield (b'symbol', symbol, pos)
1373 pos += len(symbol)
1376 pos += len(symbol)
1374 else: # special char, ignore space
1377 else: # special char, ignore space
1375 if text[pos : pos + 1] != b' ':
1378 if text[pos : pos + 1] != b' ':
1376 yield (text[pos : pos + 1], None, pos)
1379 yield (text[pos : pos + 1], None, pos)
1377 pos += 1
1380 pos += 1
1378 yield (b'end', None, pos)
1381 yield (b'end', None, pos)
1379
1382
1380
1383
1381 def _parse(text):
1384 def _parse(text):
1382 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1385 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1383 if pos != len(text):
1386 if pos != len(text):
1384 raise error.ParseError(b'invalid token', pos)
1387 raise error.ParseError(b'invalid token', pos)
1385 return tree
1388 return tree
1386
1389
1387
1390
1388 def _parsedrev(symbol):
1391 def _parsedrev(symbol):
1389 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1392 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1390 if symbol.startswith(b'D') and symbol[1:].isdigit():
1393 if symbol.startswith(b'D') and symbol[1:].isdigit():
1391 return int(symbol[1:])
1394 return int(symbol[1:])
1392 if symbol.isdigit():
1395 if symbol.isdigit():
1393 return int(symbol)
1396 return int(symbol)
1394
1397
1395
1398
1396 def _prefetchdrevs(tree):
1399 def _prefetchdrevs(tree):
1397 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1400 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1398 drevs = set()
1401 drevs = set()
1399 ancestordrevs = set()
1402 ancestordrevs = set()
1400 op = tree[0]
1403 op = tree[0]
1401 if op == b'symbol':
1404 if op == b'symbol':
1402 r = _parsedrev(tree[1])
1405 r = _parsedrev(tree[1])
1403 if r:
1406 if r:
1404 drevs.add(r)
1407 drevs.add(r)
1405 elif op == b'ancestors':
1408 elif op == b'ancestors':
1406 r, a = _prefetchdrevs(tree[1])
1409 r, a = _prefetchdrevs(tree[1])
1407 drevs.update(r)
1410 drevs.update(r)
1408 ancestordrevs.update(r)
1411 ancestordrevs.update(r)
1409 ancestordrevs.update(a)
1412 ancestordrevs.update(a)
1410 else:
1413 else:
1411 for t in tree[1:]:
1414 for t in tree[1:]:
1412 r, a = _prefetchdrevs(t)
1415 r, a = _prefetchdrevs(t)
1413 drevs.update(r)
1416 drevs.update(r)
1414 ancestordrevs.update(a)
1417 ancestordrevs.update(a)
1415 return drevs, ancestordrevs
1418 return drevs, ancestordrevs
1416
1419
1417
1420
1418 def querydrev(ui, spec):
1421 def querydrev(ui, spec):
1419 """return a list of "Differential Revision" dicts
1422 """return a list of "Differential Revision" dicts
1420
1423
1421 spec is a string using a simple query language, see docstring in phabread
1424 spec is a string using a simple query language, see docstring in phabread
1422 for details.
1425 for details.
1423
1426
1424 A "Differential Revision dict" looks like:
1427 A "Differential Revision dict" looks like:
1425
1428
1426 {
1429 {
1427 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1430 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1428 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1431 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1429 "auxiliary": {
1432 "auxiliary": {
1430 "phabricator:depends-on": [
1433 "phabricator:depends-on": [
1431 "PHID-DREV-gbapp366kutjebt7agcd"
1434 "PHID-DREV-gbapp366kutjebt7agcd"
1432 ]
1435 ]
1433 "phabricator:projects": [],
1436 "phabricator:projects": [],
1434 },
1437 },
1435 "branch": "default",
1438 "branch": "default",
1436 "ccs": [],
1439 "ccs": [],
1437 "commits": [],
1440 "commits": [],
1438 "dateCreated": "1499181406",
1441 "dateCreated": "1499181406",
1439 "dateModified": "1499182103",
1442 "dateModified": "1499182103",
1440 "diffs": [
1443 "diffs": [
1441 "3",
1444 "3",
1442 "4",
1445 "4",
1443 ],
1446 ],
1444 "hashes": [],
1447 "hashes": [],
1445 "id": "2",
1448 "id": "2",
1446 "lineCount": "2",
1449 "lineCount": "2",
1447 "phid": "PHID-DREV-672qvysjcczopag46qty",
1450 "phid": "PHID-DREV-672qvysjcczopag46qty",
1448 "properties": {},
1451 "properties": {},
1449 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1452 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1450 "reviewers": [],
1453 "reviewers": [],
1451 "sourcePath": null
1454 "sourcePath": null
1452 "status": "0",
1455 "status": "0",
1453 "statusName": "Needs Review",
1456 "statusName": "Needs Review",
1454 "summary": "",
1457 "summary": "",
1455 "testPlan": "",
1458 "testPlan": "",
1456 "title": "example",
1459 "title": "example",
1457 "uri": "https://phab.example.com/D2",
1460 "uri": "https://phab.example.com/D2",
1458 }
1461 }
1459 """
1462 """
1460 # TODO: replace differential.query and differential.querydiffs with
1463 # TODO: replace differential.query and differential.querydiffs with
1461 # differential.diff.search because the former (and their output) are
1464 # differential.diff.search because the former (and their output) are
1462 # frozen, and planned to be deprecated and removed.
1465 # frozen, and planned to be deprecated and removed.
1463
1466
1464 def fetch(params):
1467 def fetch(params):
1465 """params -> single drev or None"""
1468 """params -> single drev or None"""
1466 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1469 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1467 if key in prefetched:
1470 if key in prefetched:
1468 return prefetched[key]
1471 return prefetched[key]
1469 drevs = callconduit(ui, b'differential.query', params)
1472 drevs = callconduit(ui, b'differential.query', params)
1470 # Fill prefetched with the result
1473 # Fill prefetched with the result
1471 for drev in drevs:
1474 for drev in drevs:
1472 prefetched[drev[b'phid']] = drev
1475 prefetched[drev[b'phid']] = drev
1473 prefetched[int(drev[b'id'])] = drev
1476 prefetched[int(drev[b'id'])] = drev
1474 if key not in prefetched:
1477 if key not in prefetched:
1475 raise error.Abort(
1478 raise error.Abort(
1476 _(b'cannot get Differential Revision %r') % params
1479 _(b'cannot get Differential Revision %r') % params
1477 )
1480 )
1478 return prefetched[key]
1481 return prefetched[key]
1479
1482
1480 def getstack(topdrevids):
1483 def getstack(topdrevids):
1481 """given a top, get a stack from the bottom, [id] -> [id]"""
1484 """given a top, get a stack from the bottom, [id] -> [id]"""
1482 visited = set()
1485 visited = set()
1483 result = []
1486 result = []
1484 queue = [{b'ids': [i]} for i in topdrevids]
1487 queue = [{b'ids': [i]} for i in topdrevids]
1485 while queue:
1488 while queue:
1486 params = queue.pop()
1489 params = queue.pop()
1487 drev = fetch(params)
1490 drev = fetch(params)
1488 if drev[b'id'] in visited:
1491 if drev[b'id'] in visited:
1489 continue
1492 continue
1490 visited.add(drev[b'id'])
1493 visited.add(drev[b'id'])
1491 result.append(int(drev[b'id']))
1494 result.append(int(drev[b'id']))
1492 auxiliary = drev.get(b'auxiliary', {})
1495 auxiliary = drev.get(b'auxiliary', {})
1493 depends = auxiliary.get(b'phabricator:depends-on', [])
1496 depends = auxiliary.get(b'phabricator:depends-on', [])
1494 for phid in depends:
1497 for phid in depends:
1495 queue.append({b'phids': [phid]})
1498 queue.append({b'phids': [phid]})
1496 result.reverse()
1499 result.reverse()
1497 return smartset.baseset(result)
1500 return smartset.baseset(result)
1498
1501
1499 # Initialize prefetch cache
1502 # Initialize prefetch cache
1500 prefetched = {} # {id or phid: drev}
1503 prefetched = {} # {id or phid: drev}
1501
1504
1502 tree = _parse(spec)
1505 tree = _parse(spec)
1503 drevs, ancestordrevs = _prefetchdrevs(tree)
1506 drevs, ancestordrevs = _prefetchdrevs(tree)
1504
1507
1505 # developer config: phabricator.batchsize
1508 # developer config: phabricator.batchsize
1506 batchsize = ui.configint(b'phabricator', b'batchsize')
1509 batchsize = ui.configint(b'phabricator', b'batchsize')
1507
1510
1508 # Prefetch Differential Revisions in batch
1511 # Prefetch Differential Revisions in batch
1509 tofetch = set(drevs)
1512 tofetch = set(drevs)
1510 for r in ancestordrevs:
1513 for r in ancestordrevs:
1511 tofetch.update(range(max(1, r - batchsize), r + 1))
1514 tofetch.update(range(max(1, r - batchsize), r + 1))
1512 if drevs:
1515 if drevs:
1513 fetch({b'ids': list(tofetch)})
1516 fetch({b'ids': list(tofetch)})
1514 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1517 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1515
1518
1516 # Walk through the tree, return smartsets
1519 # Walk through the tree, return smartsets
1517 def walk(tree):
1520 def walk(tree):
1518 op = tree[0]
1521 op = tree[0]
1519 if op == b'symbol':
1522 if op == b'symbol':
1520 drev = _parsedrev(tree[1])
1523 drev = _parsedrev(tree[1])
1521 if drev:
1524 if drev:
1522 return smartset.baseset([drev])
1525 return smartset.baseset([drev])
1523 elif tree[1] in _knownstatusnames:
1526 elif tree[1] in _knownstatusnames:
1524 drevs = [
1527 drevs = [
1525 r
1528 r
1526 for r in validids
1529 for r in validids
1527 if _getstatusname(prefetched[r]) == tree[1]
1530 if _getstatusname(prefetched[r]) == tree[1]
1528 ]
1531 ]
1529 return smartset.baseset(drevs)
1532 return smartset.baseset(drevs)
1530 else:
1533 else:
1531 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1534 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1532 elif op in {b'and_', b'add', b'sub'}:
1535 elif op in {b'and_', b'add', b'sub'}:
1533 assert len(tree) == 3
1536 assert len(tree) == 3
1534 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1537 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1535 elif op == b'group':
1538 elif op == b'group':
1536 return walk(tree[1])
1539 return walk(tree[1])
1537 elif op == b'ancestors':
1540 elif op == b'ancestors':
1538 return getstack(walk(tree[1]))
1541 return getstack(walk(tree[1]))
1539 else:
1542 else:
1540 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1543 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1541
1544
1542 return [prefetched[r] for r in walk(tree)]
1545 return [prefetched[r] for r in walk(tree)]
1543
1546
1544
1547
1545 def getdescfromdrev(drev):
1548 def getdescfromdrev(drev):
1546 """get description (commit message) from "Differential Revision"
1549 """get description (commit message) from "Differential Revision"
1547
1550
1548 This is similar to differential.getcommitmessage API. But we only care
1551 This is similar to differential.getcommitmessage API. But we only care
1549 about limited fields: title, summary, test plan, and URL.
1552 about limited fields: title, summary, test plan, and URL.
1550 """
1553 """
1551 title = drev[b'title']
1554 title = drev[b'title']
1552 summary = drev[b'summary'].rstrip()
1555 summary = drev[b'summary'].rstrip()
1553 testplan = drev[b'testPlan'].rstrip()
1556 testplan = drev[b'testPlan'].rstrip()
1554 if testplan:
1557 if testplan:
1555 testplan = b'Test Plan:\n%s' % testplan
1558 testplan = b'Test Plan:\n%s' % testplan
1556 uri = b'Differential Revision: %s' % drev[b'uri']
1559 uri = b'Differential Revision: %s' % drev[b'uri']
1557 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1560 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1558
1561
1559
1562
1560 def getdiffmeta(diff):
1563 def getdiffmeta(diff):
1561 """get commit metadata (date, node, user, p1) from a diff object
1564 """get commit metadata (date, node, user, p1) from a diff object
1562
1565
1563 The metadata could be "hg:meta", sent by phabsend, like:
1566 The metadata could be "hg:meta", sent by phabsend, like:
1564
1567
1565 "properties": {
1568 "properties": {
1566 "hg:meta": {
1569 "hg:meta": {
1567 "branch": "default",
1570 "branch": "default",
1568 "date": "1499571514 25200",
1571 "date": "1499571514 25200",
1569 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1572 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 "user": "Foo Bar <foo@example.com>",
1573 "user": "Foo Bar <foo@example.com>",
1571 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1574 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1572 }
1575 }
1573 }
1576 }
1574
1577
1575 Or converted from "local:commits", sent by "arc", like:
1578 Or converted from "local:commits", sent by "arc", like:
1576
1579
1577 "properties": {
1580 "properties": {
1578 "local:commits": {
1581 "local:commits": {
1579 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1582 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1580 "author": "Foo Bar",
1583 "author": "Foo Bar",
1581 "authorEmail": "foo@example.com"
1584 "authorEmail": "foo@example.com"
1582 "branch": "default",
1585 "branch": "default",
1583 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1586 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1584 "local": "1000",
1587 "local": "1000",
1585 "message": "...",
1588 "message": "...",
1586 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1589 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1587 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1590 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1588 "summary": "...",
1591 "summary": "...",
1589 "tag": "",
1592 "tag": "",
1590 "time": 1499546314,
1593 "time": 1499546314,
1591 }
1594 }
1592 }
1595 }
1593 }
1596 }
1594
1597
1595 Note: metadata extracted from "local:commits" will lose time zone
1598 Note: metadata extracted from "local:commits" will lose time zone
1596 information.
1599 information.
1597 """
1600 """
1598 props = diff.get(b'properties') or {}
1601 props = diff.get(b'properties') or {}
1599 meta = props.get(b'hg:meta')
1602 meta = props.get(b'hg:meta')
1600 if not meta:
1603 if not meta:
1601 if props.get(b'local:commits'):
1604 if props.get(b'local:commits'):
1602 commit = sorted(props[b'local:commits'].values())[0]
1605 commit = sorted(props[b'local:commits'].values())[0]
1603 meta = {}
1606 meta = {}
1604 if b'author' in commit and b'authorEmail' in commit:
1607 if b'author' in commit and b'authorEmail' in commit:
1605 meta[b'user'] = b'%s <%s>' % (
1608 meta[b'user'] = b'%s <%s>' % (
1606 commit[b'author'],
1609 commit[b'author'],
1607 commit[b'authorEmail'],
1610 commit[b'authorEmail'],
1608 )
1611 )
1609 if b'time' in commit:
1612 if b'time' in commit:
1610 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1613 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1611 if b'branch' in commit:
1614 if b'branch' in commit:
1612 meta[b'branch'] = commit[b'branch']
1615 meta[b'branch'] = commit[b'branch']
1613 node = commit.get(b'commit', commit.get(b'rev'))
1616 node = commit.get(b'commit', commit.get(b'rev'))
1614 if node:
1617 if node:
1615 meta[b'node'] = node
1618 meta[b'node'] = node
1616 if len(commit.get(b'parents', ())) >= 1:
1619 if len(commit.get(b'parents', ())) >= 1:
1617 meta[b'parent'] = commit[b'parents'][0]
1620 meta[b'parent'] = commit[b'parents'][0]
1618 else:
1621 else:
1619 meta = {}
1622 meta = {}
1620 if b'date' not in meta and b'dateCreated' in diff:
1623 if b'date' not in meta and b'dateCreated' in diff:
1621 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1624 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1622 if b'branch' not in meta and diff.get(b'branch'):
1625 if b'branch' not in meta and diff.get(b'branch'):
1623 meta[b'branch'] = diff[b'branch']
1626 meta[b'branch'] = diff[b'branch']
1624 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1627 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1625 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1628 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1626 return meta
1629 return meta
1627
1630
1628
1631
1629 def readpatch(ui, drevs, write):
1632 def readpatch(ui, drevs, write):
1630 """generate plain-text patch readable by 'hg import'
1633 """generate plain-text patch readable by 'hg import'
1631
1634
1632 write takes a list of (DREV, bytes), where DREV is the differential number
1635 write takes a list of (DREV, bytes), where DREV is the differential number
1633 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1636 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1634 to be imported. drevs is what "querydrev" returns, results of
1637 to be imported. drevs is what "querydrev" returns, results of
1635 "differential.query".
1638 "differential.query".
1636 """
1639 """
1637 # Prefetch hg:meta property for all diffs
1640 # Prefetch hg:meta property for all diffs
1638 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1641 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1639 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1642 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1640
1643
1641 patches = []
1644 patches = []
1642
1645
1643 # Generate patch for each drev
1646 # Generate patch for each drev
1644 for drev in drevs:
1647 for drev in drevs:
1645 ui.note(_(b'reading D%s\n') % drev[b'id'])
1648 ui.note(_(b'reading D%s\n') % drev[b'id'])
1646
1649
1647 diffid = max(int(v) for v in drev[b'diffs'])
1650 diffid = max(int(v) for v in drev[b'diffs'])
1648 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1651 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1649 desc = getdescfromdrev(drev)
1652 desc = getdescfromdrev(drev)
1650 header = b'# HG changeset patch\n'
1653 header = b'# HG changeset patch\n'
1651
1654
1652 # Try to preserve metadata from hg:meta property. Write hg patch
1655 # Try to preserve metadata from hg:meta property. Write hg patch
1653 # headers that can be read by the "import" command. See patchheadermap
1656 # headers that can be read by the "import" command. See patchheadermap
1654 # and extract in mercurial/patch.py for supported headers.
1657 # and extract in mercurial/patch.py for supported headers.
1655 meta = getdiffmeta(diffs[b'%d' % diffid])
1658 meta = getdiffmeta(diffs[b'%d' % diffid])
1656 for k in _metanamemap.keys():
1659 for k in _metanamemap.keys():
1657 if k in meta:
1660 if k in meta:
1658 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1661 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1659
1662
1660 content = b'%s%s\n%s' % (header, desc, body)
1663 content = b'%s%s\n%s' % (header, desc, body)
1661 patches.append((drev[b'id'], content))
1664 patches.append((drev[b'id'], content))
1662
1665
1663 # Write patches to the supplied callback
1666 # Write patches to the supplied callback
1664 write(patches)
1667 write(patches)
1665
1668
1666
1669
1667 @vcrcommand(
1670 @vcrcommand(
1668 b'phabread',
1671 b'phabread',
1669 [(b'', b'stack', False, _(b'read dependencies'))],
1672 [(b'', b'stack', False, _(b'read dependencies'))],
1670 _(b'DREVSPEC [OPTIONS]'),
1673 _(b'DREVSPEC [OPTIONS]'),
1671 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1674 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1672 optionalrepo=True,
1675 optionalrepo=True,
1673 )
1676 )
1674 def phabread(ui, repo, spec, **opts):
1677 def phabread(ui, repo, spec, **opts):
1675 """print patches from Phabricator suitable for importing
1678 """print patches from Phabricator suitable for importing
1676
1679
1677 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1680 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1678 the number ``123``. It could also have common operators like ``+``, ``-``,
1681 the number ``123``. It could also have common operators like ``+``, ``-``,
1679 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1682 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1680 select a stack.
1683 select a stack.
1681
1684
1682 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1685 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1683 could be used to filter patches by status. For performance reason, they
1686 could be used to filter patches by status. For performance reason, they
1684 only represent a subset of non-status selections and cannot be used alone.
1687 only represent a subset of non-status selections and cannot be used alone.
1685
1688
1686 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1689 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1687 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1690 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1688 stack up to D9.
1691 stack up to D9.
1689
1692
1690 If --stack is given, follow dependencies information and read all patches.
1693 If --stack is given, follow dependencies information and read all patches.
1691 It is equivalent to the ``:`` operator.
1694 It is equivalent to the ``:`` operator.
1692 """
1695 """
1693 opts = pycompat.byteskwargs(opts)
1696 opts = pycompat.byteskwargs(opts)
1694 if opts.get(b'stack'):
1697 if opts.get(b'stack'):
1695 spec = b':(%s)' % spec
1698 spec = b':(%s)' % spec
1696 drevs = querydrev(ui, spec)
1699 drevs = querydrev(ui, spec)
1697
1700
1698 def _write(patches):
1701 def _write(patches):
1699 for drev, content in patches:
1702 for drev, content in patches:
1700 ui.write(content)
1703 ui.write(content)
1701
1704
1702 readpatch(ui, drevs, _write)
1705 readpatch(ui, drevs, _write)
1703
1706
1704
1707
1705 @vcrcommand(
1708 @vcrcommand(
1706 b'phabimport',
1709 b'phabimport',
1707 [(b'', b'stack', False, _(b'import dependencies as well'))],
1710 [(b'', b'stack', False, _(b'import dependencies as well'))],
1708 _(b'DREVSPEC [OPTIONS]'),
1711 _(b'DREVSPEC [OPTIONS]'),
1709 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1712 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1710 )
1713 )
1711 def phabimport(ui, repo, spec, **opts):
1714 def phabimport(ui, repo, spec, **opts):
1712 """import patches from Phabricator for the specified Differential Revisions
1715 """import patches from Phabricator for the specified Differential Revisions
1713
1716
1714 The patches are read and applied starting at the parent of the working
1717 The patches are read and applied starting at the parent of the working
1715 directory.
1718 directory.
1716
1719
1717 See ``hg help phabread`` for how to specify DREVSPEC.
1720 See ``hg help phabread`` for how to specify DREVSPEC.
1718 """
1721 """
1719 opts = pycompat.byteskwargs(opts)
1722 opts = pycompat.byteskwargs(opts)
1720
1723
1721 # --bypass avoids losing exec and symlink bits when importing on Windows,
1724 # --bypass avoids losing exec and symlink bits when importing on Windows,
1722 # and allows importing with a dirty wdir. It also aborts instead of leaving
1725 # and allows importing with a dirty wdir. It also aborts instead of leaving
1723 # rejects.
1726 # rejects.
1724 opts[b'bypass'] = True
1727 opts[b'bypass'] = True
1725
1728
1726 # Mandatory default values, synced with commands.import
1729 # Mandatory default values, synced with commands.import
1727 opts[b'strip'] = 1
1730 opts[b'strip'] = 1
1728 opts[b'prefix'] = b''
1731 opts[b'prefix'] = b''
1729 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1732 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1730 opts[b'obsolete'] = False
1733 opts[b'obsolete'] = False
1731
1734
1735 if ui.configbool(b'phabimport', b'secret'):
1736 opts[b'secret'] = True
1737
1732 def _write(patches):
1738 def _write(patches):
1733 parents = repo[None].parents()
1739 parents = repo[None].parents()
1734
1740
1735 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1741 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1736 for drev, contents in patches:
1742 for drev, contents in patches:
1737 ui.status(_(b'applying patch from D%s\n') % drev)
1743 ui.status(_(b'applying patch from D%s\n') % drev)
1738
1744
1739 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1745 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1740 msg, node, rej = cmdutil.tryimportone(
1746 msg, node, rej = cmdutil.tryimportone(
1741 ui,
1747 ui,
1742 repo,
1748 repo,
1743 patchdata,
1749 patchdata,
1744 parents,
1750 parents,
1745 opts,
1751 opts,
1746 [],
1752 [],
1747 None, # Never update wdir to another revision
1753 None, # Never update wdir to another revision
1748 )
1754 )
1749
1755
1750 if not node:
1756 if not node:
1751 raise error.Abort(_(b'D%s: no diffs found') % drev)
1757 raise error.Abort(_(b'D%s: no diffs found') % drev)
1752
1758
1753 ui.note(msg + b'\n')
1759 ui.note(msg + b'\n')
1754 parents = [repo[node]]
1760 parents = [repo[node]]
1755
1761
1756 opts = pycompat.byteskwargs(opts)
1762 opts = pycompat.byteskwargs(opts)
1757 if opts.get(b'stack'):
1763 if opts.get(b'stack'):
1758 spec = b':(%s)' % spec
1764 spec = b':(%s)' % spec
1759 drevs = querydrev(repo.ui, spec)
1765 drevs = querydrev(repo.ui, spec)
1760
1766
1761 readpatch(repo.ui, drevs, _write)
1767 readpatch(repo.ui, drevs, _write)
1762
1768
1763
1769
1764 @vcrcommand(
1770 @vcrcommand(
1765 b'phabupdate',
1771 b'phabupdate',
1766 [
1772 [
1767 (b'', b'accept', False, _(b'accept revisions')),
1773 (b'', b'accept', False, _(b'accept revisions')),
1768 (b'', b'reject', False, _(b'reject revisions')),
1774 (b'', b'reject', False, _(b'reject revisions')),
1769 (b'', b'abandon', False, _(b'abandon revisions')),
1775 (b'', b'abandon', False, _(b'abandon revisions')),
1770 (b'', b'reclaim', False, _(b'reclaim revisions')),
1776 (b'', b'reclaim', False, _(b'reclaim revisions')),
1771 (b'm', b'comment', b'', _(b'comment on the last revision')),
1777 (b'm', b'comment', b'', _(b'comment on the last revision')),
1772 ],
1778 ],
1773 _(b'DREVSPEC [OPTIONS]'),
1779 _(b'DREVSPEC [OPTIONS]'),
1774 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1780 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1775 optionalrepo=True,
1781 optionalrepo=True,
1776 )
1782 )
1777 def phabupdate(ui, repo, spec, **opts):
1783 def phabupdate(ui, repo, spec, **opts):
1778 """update Differential Revision in batch
1784 """update Differential Revision in batch
1779
1785
1780 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1786 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1781 """
1787 """
1782 opts = pycompat.byteskwargs(opts)
1788 opts = pycompat.byteskwargs(opts)
1783 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1789 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1784 if len(flags) > 1:
1790 if len(flags) > 1:
1785 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1791 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1786
1792
1787 actions = []
1793 actions = []
1788 for f in flags:
1794 for f in flags:
1789 actions.append({b'type': f, b'value': True})
1795 actions.append({b'type': f, b'value': True})
1790
1796
1791 drevs = querydrev(ui, spec)
1797 drevs = querydrev(ui, spec)
1792 for i, drev in enumerate(drevs):
1798 for i, drev in enumerate(drevs):
1793 if i + 1 == len(drevs) and opts.get(b'comment'):
1799 if i + 1 == len(drevs) and opts.get(b'comment'):
1794 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1800 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1795 if actions:
1801 if actions:
1796 params = {
1802 params = {
1797 b'objectIdentifier': drev[b'phid'],
1803 b'objectIdentifier': drev[b'phid'],
1798 b'transactions': actions,
1804 b'transactions': actions,
1799 }
1805 }
1800 callconduit(ui, b'differential.revision.edit', params)
1806 callconduit(ui, b'differential.revision.edit', params)
1801
1807
1802
1808
1803 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1809 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1804 def template_review(context, mapping):
1810 def template_review(context, mapping):
1805 """:phabreview: Object describing the review for this changeset.
1811 """:phabreview: Object describing the review for this changeset.
1806 Has attributes `url` and `id`.
1812 Has attributes `url` and `id`.
1807 """
1813 """
1808 ctx = context.resource(mapping, b'ctx')
1814 ctx = context.resource(mapping, b'ctx')
1809 m = _differentialrevisiondescre.search(ctx.description())
1815 m = _differentialrevisiondescre.search(ctx.description())
1810 if m:
1816 if m:
1811 return templateutil.hybriddict(
1817 return templateutil.hybriddict(
1812 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1818 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1813 )
1819 )
1814 else:
1820 else:
1815 tags = ctx.repo().nodetags(ctx.node())
1821 tags = ctx.repo().nodetags(ctx.node())
1816 for t in tags:
1822 for t in tags:
1817 if _differentialrevisiontagre.match(t):
1823 if _differentialrevisiontagre.match(t):
1818 url = ctx.repo().ui.config(b'phabricator', b'url')
1824 url = ctx.repo().ui.config(b'phabricator', b'url')
1819 if not url.endswith(b'/'):
1825 if not url.endswith(b'/'):
1820 url += b'/'
1826 url += b'/'
1821 url += t
1827 url += t
1822
1828
1823 return templateutil.hybriddict({b'url': url, b'id': t,})
1829 return templateutil.hybriddict({b'url': url, b'id': t,})
1824 return None
1830 return None
1825
1831
1826
1832
1827 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1833 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1828 def template_status(context, mapping):
1834 def template_status(context, mapping):
1829 """:phabstatus: String. Status of Phabricator differential.
1835 """:phabstatus: String. Status of Phabricator differential.
1830 """
1836 """
1831 ctx = context.resource(mapping, b'ctx')
1837 ctx = context.resource(mapping, b'ctx')
1832 repo = context.resource(mapping, b'repo')
1838 repo = context.resource(mapping, b'repo')
1833 ui = context.resource(mapping, b'ui')
1839 ui = context.resource(mapping, b'ui')
1834
1840
1835 rev = ctx.rev()
1841 rev = ctx.rev()
1836 try:
1842 try:
1837 drevid = getdrevmap(repo, [rev])[rev]
1843 drevid = getdrevmap(repo, [rev])[rev]
1838 except KeyError:
1844 except KeyError:
1839 return None
1845 return None
1840 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1846 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1841 for drev in drevs:
1847 for drev in drevs:
1842 if int(drev[b'id']) == drevid:
1848 if int(drev[b'id']) == drevid:
1843 return templateutil.hybriddict(
1849 return templateutil.hybriddict(
1844 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1850 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1845 )
1851 )
1846 return None
1852 return None
1847
1853
1848
1854
1849 @show.showview(b'phabstatus', csettopic=b'work')
1855 @show.showview(b'phabstatus', csettopic=b'work')
1850 def phabstatusshowview(ui, repo, displayer):
1856 def phabstatusshowview(ui, repo, displayer):
1851 """Phabricator differiential status"""
1857 """Phabricator differiential status"""
1852 revs = repo.revs('sort(_underway(), topo)')
1858 revs = repo.revs('sort(_underway(), topo)')
1853 drevmap = getdrevmap(repo, revs)
1859 drevmap = getdrevmap(repo, revs)
1854 unknownrevs, drevids, revsbydrevid = [], set(), {}
1860 unknownrevs, drevids, revsbydrevid = [], set(), {}
1855 for rev, drevid in pycompat.iteritems(drevmap):
1861 for rev, drevid in pycompat.iteritems(drevmap):
1856 if drevid is not None:
1862 if drevid is not None:
1857 drevids.add(drevid)
1863 drevids.add(drevid)
1858 revsbydrevid.setdefault(drevid, set()).add(rev)
1864 revsbydrevid.setdefault(drevid, set()).add(rev)
1859 else:
1865 else:
1860 unknownrevs.append(rev)
1866 unknownrevs.append(rev)
1861
1867
1862 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1868 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1863 drevsbyrev = {}
1869 drevsbyrev = {}
1864 for drev in drevs:
1870 for drev in drevs:
1865 for rev in revsbydrevid[int(drev[b'id'])]:
1871 for rev in revsbydrevid[int(drev[b'id'])]:
1866 drevsbyrev[rev] = drev
1872 drevsbyrev[rev] = drev
1867
1873
1868 def phabstatus(ctx):
1874 def phabstatus(ctx):
1869 drev = drevsbyrev[ctx.rev()]
1875 drev = drevsbyrev[ctx.rev()]
1870 status = ui.label(
1876 status = ui.label(
1871 b'%(statusName)s' % drev,
1877 b'%(statusName)s' % drev,
1872 b'phabricator.status.%s' % _getstatusname(drev),
1878 b'phabricator.status.%s' % _getstatusname(drev),
1873 )
1879 )
1874 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1880 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1875
1881
1876 revs -= smartset.baseset(unknownrevs)
1882 revs -= smartset.baseset(unknownrevs)
1877 revdag = graphmod.dagwalker(repo, revs)
1883 revdag = graphmod.dagwalker(repo, revs)
1878
1884
1879 ui.setconfig(b'experimental', b'graphshorten', True)
1885 ui.setconfig(b'experimental', b'graphshorten', True)
1880 displayer._exthook = phabstatus
1886 displayer._exthook = phabstatus
1881 nodelen = show.longestshortest(repo, revs)
1887 nodelen = show.longestshortest(repo, revs)
1882 logcmdutil.displaygraph(
1888 logcmdutil.displaygraph(
1883 ui,
1889 ui,
1884 repo,
1890 repo,
1885 revdag,
1891 revdag,
1886 displayer,
1892 displayer,
1887 graphmod.asciiedges,
1893 graphmod.asciiedges,
1888 props={b'nodelen': nodelen},
1894 props={b'nodelen': nodelen},
1889 )
1895 )
@@ -1,349 +1,383 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 > EOF
5 > EOF
6 $ hg init repo
6 $ hg init repo
7 $ cd repo
7 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
9 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
10 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
11 > callsign = HG
12 >
12 >
13 > [auth]
13 > [auth]
14 > hgphab.schemes = https
14 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
15 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
16 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
17 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 > # token with this value.
19 > # token with this value.
20 > hgphab.phabtoken = cli-hahayouwish
20 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
21 > EOF
22 $ VCR="$TESTDIR/phabricator"
22 $ VCR="$TESTDIR/phabricator"
23
23
24 Error is handled reasonably. We override the phabtoken here so that
24 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
25 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
26 above config and have a real token in the test but not have to edit
27 this test.
27 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
31
32 Missing arguments print the command help
32 Missing arguments print the command help
33
33
34 $ hg phabread
34 $ hg phabread
35 hg phabread: invalid arguments
35 hg phabread: invalid arguments
36 hg phabread DREVSPEC [OPTIONS]
36 hg phabread DREVSPEC [OPTIONS]
37
37
38 print patches from Phabricator suitable for importing
38 print patches from Phabricator suitable for importing
39
39
40 options:
40 options:
41
41
42 --stack read dependencies
42 --stack read dependencies
43
43
44 (use 'hg phabread -h' to show more help)
44 (use 'hg phabread -h' to show more help)
45 [255]
45 [255]
46
46
47 Basic phabread:
47 Basic phabread:
48 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
48 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
49 # HG changeset patch
49 # HG changeset patch
50 # Date 1536771503 0
50 # Date 1536771503 0
51 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
51 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
52 exchangev2: start to implement pull with wire protocol v2
52 exchangev2: start to implement pull with wire protocol v2
53
53
54 Wire protocol version 2 will take a substantially different
54 Wire protocol version 2 will take a substantially different
55 approach to exchange than version 1 (at least as far as pulling
55 approach to exchange than version 1 (at least as far as pulling
56 is concerned).
56 is concerned).
57
57
58 This commit establishes a new exchangev2 module for holding
58 This commit establishes a new exchangev2 module for holding
59
59
60 phabupdate with an accept:
60 phabupdate with an accept:
61 $ hg phabupdate --accept D4564 \
61 $ hg phabupdate --accept D4564 \
62 > -m 'I think I like where this is headed. Will read rest of series later.'\
62 > -m 'I think I like where this is headed. Will read rest of series later.'\
63 > --test-vcr "$VCR/accept-4564.json"
63 > --test-vcr "$VCR/accept-4564.json"
64 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
64 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
65 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
65 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
66 [255]
66 [255]
67 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
67 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
68
68
69 Create a differential diff:
69 Create a differential diff:
70 $ HGENCODING=utf-8; export HGENCODING
70 $ HGENCODING=utf-8; export HGENCODING
71 $ echo alpha > alpha
71 $ echo alpha > alpha
72 $ hg ci --addremove -m 'create alpha for phabricator test €'
72 $ hg ci --addremove -m 'create alpha for phabricator test €'
73 adding alpha
73 adding alpha
74 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
74 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
75 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
75 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
76 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
76 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
77 $ echo more >> alpha
77 $ echo more >> alpha
78 $ HGEDITOR=true hg ci --amend
78 $ HGEDITOR=true hg ci --amend
79 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
79 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
80 $ echo beta > beta
80 $ echo beta > beta
81 $ hg ci --addremove -m 'create beta for phabricator test'
81 $ hg ci --addremove -m 'create beta for phabricator test'
82 adding beta
82 adding beta
83 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
83 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
84 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
84 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
85 D7916 - created - 9e6901f21d5b: create beta for phabricator test
85 D7916 - created - 9e6901f21d5b: create beta for phabricator test
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
87 $ unset HGENCODING
87 $ unset HGENCODING
88
88
89 The amend won't explode after posting a public commit. The local tag is left
89 The amend won't explode after posting a public commit. The local tag is left
90 behind to identify it.
90 behind to identify it.
91
91
92 $ echo 'public change' > beta
92 $ echo 'public change' > beta
93 $ hg ci -m 'create public change for phabricator testing'
93 $ hg ci -m 'create public change for phabricator testing'
94 $ hg phase --public .
94 $ hg phase --public .
95 $ echo 'draft change' > alpha
95 $ echo 'draft change' > alpha
96 $ hg ci -m 'create draft change for phabricator testing'
96 $ hg ci -m 'create draft change for phabricator testing'
97 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
97 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
98 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
98 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
99 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
99 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
100 warning: not updating public commit 2:7b4185ab5d16
100 warning: not updating public commit 2:7b4185ab5d16
101 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
101 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
102 $ hg tags -v
102 $ hg tags -v
103 tip 3:3244dc4a3334
103 tip 3:3244dc4a3334
104 D7917 2:7b4185ab5d16 local
104 D7917 2:7b4185ab5d16 local
105
105
106 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
106 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
107 > {
107 > {
108 > "constraints": {
108 > "constraints": {
109 > "isBot": true
109 > "isBot": true
110 > }
110 > }
111 > }
111 > }
112 > EOF
112 > EOF
113 {
113 {
114 "cursor": {
114 "cursor": {
115 "after": null,
115 "after": null,
116 "before": null,
116 "before": null,
117 "limit": 100,
117 "limit": 100,
118 "order": null
118 "order": null
119 },
119 },
120 "data": [],
120 "data": [],
121 "maps": {},
121 "maps": {},
122 "query": {
122 "query": {
123 "queryKey": null
123 "queryKey": null
124 }
124 }
125 }
125 }
126
126
127 Template keywords
127 Template keywords
128 $ hg log -T'{rev} {phabreview|json}\n'
128 $ hg log -T'{rev} {phabreview|json}\n'
129 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
129 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
130 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
130 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
131 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
131 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
132 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
132 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
133
133
134 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
134 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
135 3 https://phab.mercurial-scm.org/D7918 D7918
135 3 https://phab.mercurial-scm.org/D7918 D7918
136 2 https://phab.mercurial-scm.org/D7917 D7917
136 2 https://phab.mercurial-scm.org/D7917 D7917
137 1 https://phab.mercurial-scm.org/D7916 D7916
137 1 https://phab.mercurial-scm.org/D7916 D7916
138 0 https://phab.mercurial-scm.org/D7915 D7915
138 0 https://phab.mercurial-scm.org/D7915 D7915
139
139
140 Commenting when phabsending:
140 Commenting when phabsending:
141 $ echo comment > comment
141 $ echo comment > comment
142 $ hg ci --addremove -m "create comment for phabricator test"
142 $ hg ci --addremove -m "create comment for phabricator test"
143 adding comment
143 adding comment
144 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
144 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
145 D7919 - created - d5dddca9023d: create comment for phabricator test
145 D7919 - created - d5dddca9023d: create comment for phabricator test
146 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
146 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
147 $ echo comment2 >> comment
147 $ echo comment2 >> comment
148 $ hg ci --amend
148 $ hg ci --amend
149 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
149 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
150 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
150 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
151 D7919 - updated - 1849d7828727: create comment for phabricator test
151 D7919 - updated - 1849d7828727: create comment for phabricator test
152
152
153 Phabsending a skipped commit:
153 Phabsending a skipped commit:
154 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
154 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
155 D7919 - skipped - 1849d7828727: create comment for phabricator test
155 D7919 - skipped - 1849d7828727: create comment for phabricator test
156
156
157 Phabesending a new binary, a modified binary, and a removed binary
157 Phabesending a new binary, a modified binary, and a removed binary
158
158
159 >>> open('bin', 'wb').write(b'\0a') and None
159 >>> open('bin', 'wb').write(b'\0a') and None
160 $ hg ci -Am 'add binary'
160 $ hg ci -Am 'add binary'
161 adding bin
161 adding bin
162 >>> open('bin', 'wb').write(b'\0b') and None
162 >>> open('bin', 'wb').write(b'\0b') and None
163 $ hg ci -m 'modify binary'
163 $ hg ci -m 'modify binary'
164 $ hg rm bin
164 $ hg rm bin
165 $ hg ci -m 'remove binary'
165 $ hg ci -m 'remove binary'
166 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
166 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
167 uploading bin@aa24a81f55de
167 uploading bin@aa24a81f55de
168 D8007 - created - aa24a81f55de: add binary
168 D8007 - created - aa24a81f55de: add binary
169 uploading bin@d8d62a881b54
169 uploading bin@d8d62a881b54
170 D8008 - created - d8d62a881b54: modify binary
170 D8008 - created - d8d62a881b54: modify binary
171 D8009 - created - af55645b2e29: remove binary
171 D8009 - created - af55645b2e29: remove binary
172 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
172 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
173
173
174 Phabsend a renamed binary and a copied binary, with and without content changes
174 Phabsend a renamed binary and a copied binary, with and without content changes
175 to src and dest
175 to src and dest
176
176
177 >>> open('bin2', 'wb').write(b'\0c') and None
177 >>> open('bin2', 'wb').write(b'\0c') and None
178 $ hg ci -Am 'add another binary'
178 $ hg ci -Am 'add another binary'
179 adding bin2
179 adding bin2
180
180
181 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
181 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
182 looks much different than when viewing "bin2_moved". No idea if this is a phab
182 looks much different than when viewing "bin2_moved". No idea if this is a phab
183 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
183 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
184 though.
184 though.
185
185
186 $ hg mv bin2 bin2_moved
186 $ hg mv bin2 bin2_moved
187 $ hg ci -m "moved binary"
187 $ hg ci -m "moved binary"
188
188
189 Note: "bin2_moved" is also not viewable in phabricator with this review
189 Note: "bin2_moved" is also not viewable in phabricator with this review
190
190
191 $ hg cp bin2_moved bin2_copied
191 $ hg cp bin2_moved bin2_copied
192 $ hg ci -m "copied binary"
192 $ hg ci -m "copied binary"
193
193
194 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
194 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
195 are viewable in their proper state. "bin2_copied" is not viewable, and not
195 are viewable in their proper state. "bin2_copied" is not viewable, and not
196 listed as binary in phabricator.
196 listed as binary in phabricator.
197
197
198 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
198 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
199 $ hg mv bin2_copied bin2_moved_again
199 $ hg mv bin2_copied bin2_moved_again
200 $ hg ci -m "move+mod copied binary"
200 $ hg ci -m "move+mod copied binary"
201
201
202 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
202 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
203 viewable on each side.
203 viewable on each side.
204
204
205 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
205 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
206 $ hg cp bin2_moved bin2_moved_copied
206 $ hg cp bin2_moved bin2_moved_copied
207 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
207 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
208 $ hg ci -m "copy+mod moved binary"
208 $ hg ci -m "copy+mod moved binary"
209
209
210 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
210 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
211 uploading bin2@f42f9195e00c
211 uploading bin2@f42f9195e00c
212 D8128 - created - f42f9195e00c: add another binary
212 D8128 - created - f42f9195e00c: add another binary
213 D8129 - created - 834ab31d80ae: moved binary
213 D8129 - created - 834ab31d80ae: moved binary
214 D8130 - created - 494b750e5194: copied binary
214 D8130 - created - 494b750e5194: copied binary
215 uploading bin2_moved_again@25f766b50cc2
215 uploading bin2_moved_again@25f766b50cc2
216 D8131 - created - 25f766b50cc2: move+mod copied binary
216 D8131 - created - 25f766b50cc2: move+mod copied binary
217 uploading bin2_moved_copied@1b87b363a5e4
217 uploading bin2_moved_copied@1b87b363a5e4
218 uploading bin2_moved@1b87b363a5e4
218 uploading bin2_moved@1b87b363a5e4
219 D8132 - created - 1b87b363a5e4: copy+mod moved binary
219 D8132 - created - 1b87b363a5e4: copy+mod moved binary
220 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
220 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
221
221
222 Phabreading a DREV with a local:commits time as a string:
222 Phabreading a DREV with a local:commits time as a string:
223 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
223 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
224 # HG changeset patch
224 # HG changeset patch
225 # User Pulkit Goyal <7895pulkit@gmail.com>
225 # User Pulkit Goyal <7895pulkit@gmail.com>
226 # Date 1509404054 -19800
226 # Date 1509404054 -19800
227 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
227 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
228 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
228 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
229 repoview: add a new attribute _visibilityexceptions and related API
229 repoview: add a new attribute _visibilityexceptions and related API
230
230
231 Currently we don't have a defined way in core to make some hidden revisions
231 Currently we don't have a defined way in core to make some hidden revisions
232 visible in filtered repo. Extensions to achieve the purpose of unhiding some
232 visible in filtered repo. Extensions to achieve the purpose of unhiding some
233 hidden commits, wrap repoview.pinnedrevs() function.
233 hidden commits, wrap repoview.pinnedrevs() function.
234
234
235 To make the above task simple and have well defined API, this patch adds a new
235 To make the above task simple and have well defined API, this patch adds a new
236 attribute '_visibilityexceptions' to repoview class which will contains
236 attribute '_visibilityexceptions' to repoview class which will contains
237 the hidden revs which should be exception.
237 the hidden revs which should be exception.
238 This will allow to set different exceptions for different repoview objects
238 This will allow to set different exceptions for different repoview objects
239 backed by the same unfiltered repo.
239 backed by the same unfiltered repo.
240
240
241 This patch also adds API to add revs to the attribute set and get them.
241 This patch also adds API to add revs to the attribute set and get them.
242
242
243 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
243 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
244
244
245 Differential Revision: https://phab.mercurial-scm.org/D1285
245 Differential Revision: https://phab.mercurial-scm.org/D1285
246 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
246 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
247 --- a/mercurial/repoview.py
247 --- a/mercurial/repoview.py
248 +++ b/mercurial/repoview.py
248 +++ b/mercurial/repoview.py
249 @@ * @@ (glob)
249 @@ * @@ (glob)
250 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
250 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
251 """
251 """
252
252
253 + # hidden revs which should be visible
253 + # hidden revs which should be visible
254 + _visibilityexceptions = set()
254 + _visibilityexceptions = set()
255 +
255 +
256 def __init__(self, repo, filtername):
256 def __init__(self, repo, filtername):
257 object.__setattr__(self, r'_unfilteredrepo', repo)
257 object.__setattr__(self, r'_unfilteredrepo', repo)
258 object.__setattr__(self, r'filtername', filtername)
258 object.__setattr__(self, r'filtername', filtername)
259 @@ -231,6 +234,14 @@
259 @@ -231,6 +234,14 @@
260 return self
260 return self
261 return self.unfiltered().filtered(name)
261 return self.unfiltered().filtered(name)
262
262
263 + def addvisibilityexceptions(self, revs):
263 + def addvisibilityexceptions(self, revs):
264 + """adds hidden revs which should be visible to set of exceptions"""
264 + """adds hidden revs which should be visible to set of exceptions"""
265 + self._visibilityexceptions.update(revs)
265 + self._visibilityexceptions.update(revs)
266 +
266 +
267 + def getvisibilityexceptions(self):
267 + def getvisibilityexceptions(self):
268 + """returns the set of hidden revs which should be visible"""
268 + """returns the set of hidden revs which should be visible"""
269 + return self._visibilityexceptions
269 + return self._visibilityexceptions
270 +
270 +
271 # everything access are forwarded to the proxied repo
271 # everything access are forwarded to the proxied repo
272 def __getattr__(self, attr):
272 def __getattr__(self, attr):
273 return getattr(self._unfilteredrepo, attr)
273 return getattr(self._unfilteredrepo, attr)
274 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
274 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
275 --- a/mercurial/localrepo.py
275 --- a/mercurial/localrepo.py
276 +++ b/mercurial/localrepo.py
276 +++ b/mercurial/localrepo.py
277 @@ -570,6 +570,14 @@
277 @@ -570,6 +570,14 @@
278 def close(self):
278 def close(self):
279 self._writecaches()
279 self._writecaches()
280
280
281 + def addvisibilityexceptions(self, exceptions):
281 + def addvisibilityexceptions(self, exceptions):
282 + # should be called on a filtered repository
282 + # should be called on a filtered repository
283 + pass
283 + pass
284 +
284 +
285 + def getvisibilityexceptions(self):
285 + def getvisibilityexceptions(self):
286 + # should be called on a filtered repository
286 + # should be called on a filtered repository
287 + return set()
287 + return set()
288 +
288 +
289 def _loadextensions(self):
289 def _loadextensions(self):
290 extensions.loadall(self.ui)
290 extensions.loadall(self.ui)
291
291
292
292
293 A bad .arcconfig doesn't error out
293 A bad .arcconfig doesn't error out
294 $ echo 'garbage' > .arcconfig
294 $ echo 'garbage' > .arcconfig
295 $ hg config phabricator --debug
295 $ hg config phabricator --debug
296 invalid JSON in $TESTTMP/repo/.arcconfig
296 invalid JSON in $TESTTMP/repo/.arcconfig
297 read config from: */.hgrc (glob)
297 read config from: */.hgrc (glob)
298 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
298 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
299 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
299 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
300
300
301 The .arcconfig content overrides global config
301 The .arcconfig content overrides global config
302 $ cat >> $HGRCPATH << EOF
302 $ cat >> $HGRCPATH << EOF
303 > [phabricator]
303 > [phabricator]
304 > url = global
304 > url = global
305 > callsign = global
305 > callsign = global
306 > EOF
306 > EOF
307 $ cp $TESTDIR/../.arcconfig .
307 $ cp $TESTDIR/../.arcconfig .
308 $ mv .hg/hgrc .hg/hgrc.bak
308 $ mv .hg/hgrc .hg/hgrc.bak
309 $ hg config phabricator --debug
309 $ hg config phabricator --debug
310 read config from: */.hgrc (glob)
310 read config from: */.hgrc (glob)
311 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
311 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
312 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
312 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
313
313
314 But it doesn't override local config
314 But it doesn't override local config
315 $ cat >> .hg/hgrc << EOF
315 $ cat >> .hg/hgrc << EOF
316 > [phabricator]
316 > [phabricator]
317 > url = local
317 > url = local
318 > callsign = local
318 > callsign = local
319 > EOF
319 > EOF
320 $ hg config phabricator --debug
320 $ hg config phabricator --debug
321 read config from: */.hgrc (glob)
321 read config from: */.hgrc (glob)
322 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
322 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
323 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
323 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
324 $ mv .hg/hgrc.bak .hg/hgrc
324 $ mv .hg/hgrc.bak .hg/hgrc
325
325
326 Phabimport works with a stack
326 Phabimport works with a stack
327
327
328 $ cd ..
328 $ cd ..
329 $ hg clone repo repo2 -qr 1
329 $ hg clone repo repo2 -qr 1
330 $ cp repo/.hg/hgrc repo2/.hg/
330 $ cp repo/.hg/hgrc repo2/.hg/
331 $ cd repo2
331 $ cd repo2
332 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
332 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
333 applying patch from D7917
333 applying patch from D7917
334 applying patch from D7918
334 applying patch from D7918
335 $ hg log -G -Tcompact
335 $ hg log -G -Tcompact
336 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
336 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
337 | create draft change for phabricator testing
337 | create draft change for phabricator testing
338 |
338 |
339 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
339 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
340 | create public change for phabricator testing
340 | create public change for phabricator testing
341 |
341 |
342 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
342 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
343 | create beta for phabricator test
343 | create beta for phabricator test
344 |
344 |
345 o 0 c44b38f24a45 1970-01-01 00:00 +0000 test
345 o 0 c44b38f24a45 1970-01-01 00:00 +0000 test
346 create alpha for phabricator test \x80 (esc)
346 create alpha for phabricator test \x80 (esc)
347
347
348 Phabimport can create secret commits
349
350 $ hg rollback --config ui.rollback=True
351 repository tip rolled back to revision 1 (undo phabimport)
352 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
353 > --config phabimport.secret=True
354 applying patch from D7917
355 applying patch from D7918
356 $ hg log -T phases
357 changeset: 3:aaef04066140
358 tag: tip
359 phase: secret
360 user: test
361 date: Thu Jan 01 00:00:00 1970 +0000
362 summary: create draft change for phabricator testing
363
364 changeset: 2:8de3712202d1
365 phase: secret
366 user: test
367 date: Thu Jan 01 00:00:00 1970 +0000
368 summary: create public change for phabricator testing
369
370 changeset: 1:a692622e6937
371 phase: public
372 user: test
373 date: Thu Jan 01 00:00:00 1970 +0000
374 summary: create beta for phabricator test
375
376 changeset: 0:c44b38f24a45
377 phase: public
378 user: test
379 date: Thu Jan 01 00:00:00 1970 +0000
380 summary: create alpha for phabricator test \x80 (esc)
381
348
382
349 $ cd ..
383 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now