##// END OF EJS Templates
phabricator: use .arcconfig for `phabricator.url` if not set locally...
Matt Harbison -
r44587:ff396501 default
parent child Browse files
Show More
@@ -1,6 +1,7 b''
1 {
1 {
2 "conduit_uri": "https://phab.mercurial-scm.org/api",
2 "conduit_uri": "https://phab.mercurial-scm.org/api",
3 "phabricator.uri": "https://phab.mercurial-scm.org/",
3 "repository.callsign": "HG",
4 "repository.callsign": "HG",
4 "arc.land.onto.default": "@",
5 "arc.land.onto.default": "@",
5 "base": "hg:.^"
6 "base": "hg:.^"
6 }
7 }
@@ -1,1792 +1,1797 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
182
181 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
182 ui.applyconfig(
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
183 {(b"phabricator", b"callsign"): arcconfig[b"repository.callsign"]},
185
184 source=wdirvfs.join(b".arcconfig"),
186 if b"phabricator.uri" in arcconfig:
185 )
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
186
191
187 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
188
193
189
194
190 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
191 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
192
197
193 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
194 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
195 return False
200 return False
196 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
197 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
198 for key in r1params:
203 for key in r1params:
199 if key not in r2params:
204 if key not in r2params:
200 return False
205 return False
201 value = r1params[key][0]
206 value = r1params[key][0]
202 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
203 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
204 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
205 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
206 if r1json != r2json:
211 if r1json != r2json:
207 return False
212 return False
208 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
209 return False
214 return False
210 return True
215 return True
211
216
212 def sanitiserequest(request):
217 def sanitiserequest(request):
213 request.body = re.sub(
218 request.body = re.sub(
214 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
215 )
220 )
216 return request
221 return request
217
222
218 def sanitiseresponse(response):
223 def sanitiseresponse(response):
219 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
220 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
221 return response
226 return response
222
227
223 def decorate(fn):
228 def decorate(fn):
224 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
225 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
226 if cassette:
231 if cassette:
227 import hgdemandimport
232 import hgdemandimport
228
233
229 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
230 import vcr as vcrmod
235 import vcr as vcrmod
231 import vcr.stubs as stubs
236 import vcr.stubs as stubs
232
237
233 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
234 serializer='json',
239 serializer='json',
235 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
236 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
237 custom_patches=[
242 custom_patches=[
238 (
243 (
239 urlmod,
244 urlmod,
240 'httpconnection',
245 'httpconnection',
241 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
242 ),
247 ),
243 (
248 (
244 urlmod,
249 urlmod,
245 'httpsconnection',
250 'httpsconnection',
246 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
247 ),
252 ),
248 ],
253 ],
249 )
254 )
250 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
251 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
252 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
253 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
254
259
255 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
256 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
257 return command(
262 return command(
258 name,
263 name,
259 fullflags,
264 fullflags,
260 spec,
265 spec,
261 helpcategory=helpcategory,
266 helpcategory=helpcategory,
262 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
263 )(inner)
268 )(inner)
264
269
265 return decorate
270 return decorate
266
271
267
272
268 def urlencodenested(params):
273 def urlencodenested(params):
269 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
270
275
271 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
272 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
273 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
274 """
279 """
275 flatparams = util.sortdict()
280 flatparams = util.sortdict()
276
281
277 def process(prefix, obj):
282 def process(prefix, obj):
278 if isinstance(obj, bool):
283 if isinstance(obj, bool):
279 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
280 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
281 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
282 if items is None:
287 if items is None:
283 flatparams[prefix] = obj
288 flatparams[prefix] = obj
284 else:
289 else:
285 for k, v in items(obj):
290 for k, v in items(obj):
286 if prefix:
291 if prefix:
287 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
288 else:
293 else:
289 process(k, v)
294 process(k, v)
290
295
291 process(b'', params)
296 process(b'', params)
292 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
293
298
294
299
295 def readurltoken(ui):
300 def readurltoken(ui):
296 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
297
302
298 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
299 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
300 """
305 """
301 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
302 if not url:
307 if not url:
303 raise error.Abort(
308 raise error.Abort(
304 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
305 )
310 )
306
311
307 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
308 token = None
313 token = None
309
314
310 if res:
315 if res:
311 group, auth = res
316 group, auth = res
312
317
313 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
314
319
315 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
316
321
317 if not token:
322 if not token:
318 raise error.Abort(
323 raise error.Abort(
319 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
320 )
325 )
321
326
322 return url, token
327 return url, token
323
328
324
329
325 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
326 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
327 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
328 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
329 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
330 params = params.copy()
335 params = params.copy()
331 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
332 b'token': token,
337 b'token': token,
333 }
338 }
334 rawdata = {
339 rawdata = {
335 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
336 b'output': b'json',
341 b'output': b'json',
337 b'__conduit__': 1,
342 b'__conduit__': 1,
338 }
343 }
339 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
340 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
341 if curlcmd:
346 if curlcmd:
342 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
343 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
344 )
349 )
345 sin.write(data)
350 sin.write(data)
346 sin.close()
351 sin.close()
347 body = sout.read()
352 body = sout.read()
348 else:
353 else:
349 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
350 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
351 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
352 body = rsp.read()
357 body = rsp.read()
353 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
354 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
355 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
356 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
357 else x,
362 else x,
358 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
359 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
360 )
365 )
361 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
362 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
363 parsed[b'error_code'],
368 parsed[b'error_code'],
364 parsed[b'error_info'],
369 parsed[b'error_info'],
365 )
370 )
366 raise error.Abort(msg)
371 raise error.Abort(msg)
367 return parsed[b'result']
372 return parsed[b'result']
368
373
369
374
370 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
371 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
372 """call Conduit API
377 """call Conduit API
373
378
374 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
375 to stdout as a JSON blob.
380 to stdout as a JSON blob.
376 """
381 """
377 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
378 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
379 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
380 params = pycompat.rapply(
385 params = pycompat.rapply(
381 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
382 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
383 else x,
388 else x,
384 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
385 )
390 )
386 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
387 result = pycompat.rapply(
392 result = pycompat.rapply(
388 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
389 callconduit(ui, name, params),
394 callconduit(ui, name, params),
390 )
395 )
391 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
392 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
393
398
394
399
395 def getrepophid(repo):
400 def getrepophid(repo):
396 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
397 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
398 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
399 if repophid:
404 if repophid:
400 return repophid
405 return repophid
401 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
402 if not callsign:
407 if not callsign:
403 return None
408 return None
404 query = callconduit(
409 query = callconduit(
405 repo.ui,
410 repo.ui,
406 b'diffusion.repository.search',
411 b'diffusion.repository.search',
407 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
408 )
413 )
409 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
410 return None
415 return None
411 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
412 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
413 return repophid
418 return repophid
414
419
415
420
416 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
417 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
418 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
419 )
424 )
420
425
421
426
422 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
423 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
424
429
425 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
426 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
427 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
428 be ``None``.
433 be ``None``.
429
434
430 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
431 association information.
436 association information.
432
437
433 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
434 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
435 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
436
441
437 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
438 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
439 """
444 """
440 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
441 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
442
447
443 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
444 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
445 for node in nodelist:
450 for node in nodelist:
446 ctx = unfi[node]
451 ctx = unfi[node]
447 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
448 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
449 for n in precnodes:
454 for n in precnodes:
450 if has_node(n):
455 if has_node(n):
451 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
452 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
453 if m:
458 if m:
454 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
455 break
460 break
456 else:
461 else:
457 continue # move to next predecessor
462 continue # move to next predecessor
458 break # found a tag, stop
463 break # found a tag, stop
459 else:
464 else:
460 # Check commit message
465 # Check commit message
461 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
462 if m:
467 if m:
463 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
464
469
465 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
466 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
467 if toconfirm:
472 if toconfirm:
468 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
469 alldiffs = callconduit(
474 alldiffs = callconduit(
470 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
471 )
476 )
472 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
473 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
474 diffs = [
479 diffs = [
475 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
476 ]
481 ]
477
482
478 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
479 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
480
485
481 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
482 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
483 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
484 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
485 tags.tag(
490 tags.tag(
486 repo,
491 repo,
487 tagname,
492 tagname,
488 nullid,
493 nullid,
489 message=None,
494 message=None,
490 user=None,
495 user=None,
491 date=None,
496 date=None,
492 local=True,
497 local=True,
493 )
498 )
494 unfi.ui.warn(
499 unfi.ui.warn(
495 _(
500 _(
496 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
497 b'Differential history\n'
502 b'Differential history\n'
498 )
503 )
499 % drev
504 % drev
500 )
505 )
501 continue
506 continue
502
507
503 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
504 # exists in the repo
509 # exists in the repo
505 oldnode = lastdiff = None
510 oldnode = lastdiff = None
506 if diffs:
511 if diffs:
507 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
508 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
509 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
510 oldnode = None
515 oldnode = None
511
516
512 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
513
518
514 return result
519 return result
515
520
516
521
517 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
518 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
519 ID or None.
524 ID or None.
520 """
525 """
521 result = {}
526 result = {}
522 for rev in revs:
527 for rev in revs:
523 result[rev] = None
528 result[rev] = None
524 ctx = repo[rev]
529 ctx = repo[rev]
525 # Check commit message
530 # Check commit message
526 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
527 if m:
532 if m:
528 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
529 continue
534 continue
530 # Check tags
535 # Check tags
531 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
532 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
533 if m:
538 if m:
534 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
535 break
540 break
536
541
537 return result
542 return result
538
543
539
544
540 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
541 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
542 output = util.stringio()
547 output = util.stringio()
543 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
544 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
545 ):
550 ):
546 output.write(chunk)
551 output.write(chunk)
547 return output.getvalue()
552 return output.getvalue()
548
553
549
554
550 class DiffChangeType(object):
555 class DiffChangeType(object):
551 ADD = 1
556 ADD = 1
552 CHANGE = 2
557 CHANGE = 2
553 DELETE = 3
558 DELETE = 3
554 MOVE_AWAY = 4
559 MOVE_AWAY = 4
555 COPY_AWAY = 5
560 COPY_AWAY = 5
556 MOVE_HERE = 6
561 MOVE_HERE = 6
557 COPY_HERE = 7
562 COPY_HERE = 7
558 MULTICOPY = 8
563 MULTICOPY = 8
559
564
560
565
561 class DiffFileType(object):
566 class DiffFileType(object):
562 TEXT = 1
567 TEXT = 1
563 IMAGE = 2
568 IMAGE = 2
564 BINARY = 3
569 BINARY = 3
565
570
566
571
567 @attr.s
572 @attr.s
568 class phabhunk(dict):
573 class phabhunk(dict):
569 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
570 """
575 """
571
576
572 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
573 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
574 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
575 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
576 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
577 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
578 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
579 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
580
585
581
586
582 @attr.s
587 @attr.s
583 class phabchange(object):
588 class phabchange(object):
584 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
585 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
586 """
591 """
587
592
588 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
589 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
590 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
591 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
592 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
593 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
594 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
595 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
596 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
597 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
598 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
599 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
600
605
601 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
602 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
603 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
604 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
605
610
606 def addoldmode(self, value):
611 def addoldmode(self, value):
607 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
608
613
609 def addnewmode(self, value):
614 def addnewmode(self, value):
610 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
611
616
612 def addhunk(self, hunk):
617 def addhunk(self, hunk):
613 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
614 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
615 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
616 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
617 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
618 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
619 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
620 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
621
626
622
627
623 @attr.s
628 @attr.s
624 class phabdiff(object):
629 class phabdiff(object):
625 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
626 to a commit.
631 to a commit.
627 """
632 """
628
633
629 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
630 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
631 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
632 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
633 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
634 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
635 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
636 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
637 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
638 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
639 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
640 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
641 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
642
647
643 def addchange(self, change):
648 def addchange(self, change):
644 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
645 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
646 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
647 attr.asdict(change)
652 attr.asdict(change)
648 )
653 )
649
654
650
655
651 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
652 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
653 repo = ctx.repo()
658 repo = ctx.repo()
654 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
655 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
656 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
657 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
658 )
663 )
659
664
660 for fhunk in fhunks:
665 for fhunk in fhunks:
661 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
662 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
663 shunk = list(header)
668 shunk = list(header)
664 shunk.extend(lines)
669 shunk.extend(lines)
665 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
666 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
667 )
672 )
668 pchange.addhunk(
673 pchange.addhunk(
669 phabhunk(
674 phabhunk(
670 oldOffset,
675 oldOffset,
671 oldLength,
676 oldLength,
672 newOffset,
677 newOffset,
673 newLength,
678 newLength,
674 corpus,
679 corpus,
675 addLines,
680 addLines,
676 delLines,
681 delLines,
677 )
682 )
678 )
683 )
679
684
680
685
681 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
682 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
683 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
684 """
689 """
685 ui = fctx.repo().ui
690 ui = fctx.repo().ui
686 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
687 with ui.makeprogress(
692 with ui.makeprogress(
688 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
689 ) as progress:
694 ) as progress:
690 for chunk in chunks:
695 for chunk in chunks:
691 progress.increment()
696 progress.increment()
692 if chunk[b'complete']:
697 if chunk[b'complete']:
693 continue
698 continue
694 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
695 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
696 callconduit(
701 callconduit(
697 ui,
702 ui,
698 b'file.uploadchunk',
703 b'file.uploadchunk',
699 {
704 {
700 b'filePHID': fphid,
705 b'filePHID': fphid,
701 b'byteStart': bstart,
706 b'byteStart': bstart,
702 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
703 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
704 },
709 },
705 )
710 )
706
711
707
712
708 def uploadfile(fctx):
713 def uploadfile(fctx):
709 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
710 repo = fctx.repo()
715 repo = fctx.repo()
711 ui = repo.ui
716 ui = repo.ui
712 fname = fctx.path()
717 fname = fctx.path()
713 size = fctx.size()
718 size = fctx.size()
714 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
715
720
716 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
717 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
718 allocateparams = {
723 allocateparams = {
719 b'name': fname,
724 b'name': fname,
720 b'contentLength': size,
725 b'contentLength': size,
721 b'contentHash': fhash,
726 b'contentHash': fhash,
722 }
727 }
723 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
724 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
725
730
726 if filealloc[b'upload']:
731 if filealloc[b'upload']:
727 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
728 if not fphid:
733 if not fphid:
729 uploadparams = {
734 uploadparams = {
730 b'name': fname,
735 b'name': fname,
731 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
732 }
737 }
733 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
734 else:
739 else:
735 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
736 else:
741 else:
737 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
738
743
739 if not fphid:
744 if not fphid:
740 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
741
746
742 return fphid
747 return fphid
743
748
744
749
745 def addoldbinary(pchange, fctx, originalfname):
750 def addoldbinary(pchange, fctx, originalfname):
746 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
747 phabchange for the new version
752 phabchange for the new version
748 """
753 """
749 oldfctx = fctx.p1()[originalfname]
754 oldfctx = fctx.p1()[originalfname]
750 if fctx.cmp(oldfctx):
755 if fctx.cmp(oldfctx):
751 # Files differ, add the old one
756 # Files differ, add the old one
752 pchange.metadata[b'old:file:size'] = oldfctx.size()
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
753 mimeguess, _enc = mimetypes.guess_type(
758 mimeguess, _enc = mimetypes.guess_type(
754 encoding.unifromlocal(oldfctx.path())
759 encoding.unifromlocal(oldfctx.path())
755 )
760 )
756 if mimeguess:
761 if mimeguess:
757 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
758 mimeguess
763 mimeguess
759 )
764 )
760 fphid = uploadfile(oldfctx)
765 fphid = uploadfile(oldfctx)
761 pchange.metadata[b'old:binary-phid'] = fphid
766 pchange.metadata[b'old:binary-phid'] = fphid
762 else:
767 else:
763 # If it's left as IMAGE/BINARY web UI might try to display it
768 # If it's left as IMAGE/BINARY web UI might try to display it
764 pchange.fileType = DiffFileType.TEXT
769 pchange.fileType = DiffFileType.TEXT
765 pchange.copynewmetadatatoold()
770 pchange.copynewmetadatatoold()
766
771
767
772
768 def makebinary(pchange, fctx):
773 def makebinary(pchange, fctx):
769 """populate the phabchange for a binary file"""
774 """populate the phabchange for a binary file"""
770 pchange.fileType = DiffFileType.BINARY
775 pchange.fileType = DiffFileType.BINARY
771 fphid = uploadfile(fctx)
776 fphid = uploadfile(fctx)
772 pchange.metadata[b'new:binary-phid'] = fphid
777 pchange.metadata[b'new:binary-phid'] = fphid
773 pchange.metadata[b'new:file:size'] = fctx.size()
778 pchange.metadata[b'new:file:size'] = fctx.size()
774 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
775 if mimeguess:
780 if mimeguess:
776 mimeguess = pycompat.bytestr(mimeguess)
781 mimeguess = pycompat.bytestr(mimeguess)
777 pchange.metadata[b'new:file:mime-type'] = mimeguess
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
778 if mimeguess.startswith(b'image/'):
783 if mimeguess.startswith(b'image/'):
779 pchange.fileType = DiffFileType.IMAGE
784 pchange.fileType = DiffFileType.IMAGE
780
785
781
786
782 # Copied from mercurial/patch.py
787 # Copied from mercurial/patch.py
783 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
784
789
785
790
786 def notutf8(fctx):
791 def notutf8(fctx):
787 """detect non-UTF-8 text files since Phabricator requires them to be marked
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
788 as binary
793 as binary
789 """
794 """
790 try:
795 try:
791 fctx.data().decode('utf-8')
796 fctx.data().decode('utf-8')
792 if fctx.parents():
797 if fctx.parents():
793 fctx.p1().data().decode('utf-8')
798 fctx.p1().data().decode('utf-8')
794 return False
799 return False
795 except UnicodeDecodeError:
800 except UnicodeDecodeError:
796 fctx.repo().ui.write(
801 fctx.repo().ui.write(
797 _(b'file %s detected as non-UTF-8, marked as binary\n')
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
798 % fctx.path()
803 % fctx.path()
799 )
804 )
800 return True
805 return True
801
806
802
807
803 def addremoved(pdiff, ctx, removed):
808 def addremoved(pdiff, ctx, removed):
804 """add removed files to the phabdiff. Shouldn't include moves"""
809 """add removed files to the phabdiff. Shouldn't include moves"""
805 for fname in removed:
810 for fname in removed:
806 pchange = phabchange(
811 pchange = phabchange(
807 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
808 )
813 )
809 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
810 fctx = ctx.p1()[fname]
815 fctx = ctx.p1()[fname]
811 if not (fctx.isbinary() or notutf8(fctx)):
816 if not (fctx.isbinary() or notutf8(fctx)):
812 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
813
818
814 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
815
820
816
821
817 def addmodified(pdiff, ctx, modified):
822 def addmodified(pdiff, ctx, modified):
818 """add modified files to the phabdiff"""
823 """add modified files to the phabdiff"""
819 for fname in modified:
824 for fname in modified:
820 fctx = ctx[fname]
825 fctx = ctx[fname]
821 pchange = phabchange(currentPath=fname, oldPath=fname)
826 pchange = phabchange(currentPath=fname, oldPath=fname)
822 filemode = gitmode[ctx[fname].flags()]
827 filemode = gitmode[ctx[fname].flags()]
823 originalmode = gitmode[ctx.p1()[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
824 if filemode != originalmode:
829 if filemode != originalmode:
825 pchange.addoldmode(originalmode)
830 pchange.addoldmode(originalmode)
826 pchange.addnewmode(filemode)
831 pchange.addnewmode(filemode)
827
832
828 if fctx.isbinary() or notutf8(fctx):
833 if fctx.isbinary() or notutf8(fctx):
829 makebinary(pchange, fctx)
834 makebinary(pchange, fctx)
830 addoldbinary(pchange, fctx, fname)
835 addoldbinary(pchange, fctx, fname)
831 else:
836 else:
832 maketext(pchange, ctx, fname)
837 maketext(pchange, ctx, fname)
833
838
834 pdiff.addchange(pchange)
839 pdiff.addchange(pchange)
835
840
836
841
837 def addadded(pdiff, ctx, added, removed):
842 def addadded(pdiff, ctx, added, removed):
838 """add file adds to the phabdiff, both new files and copies/moves"""
843 """add file adds to the phabdiff, both new files and copies/moves"""
839 # Keep track of files that've been recorded as moved/copied, so if there are
844 # Keep track of files that've been recorded as moved/copied, so if there are
840 # additional copies we can mark them (moves get removed from removed)
845 # additional copies we can mark them (moves get removed from removed)
841 copiedchanges = {}
846 copiedchanges = {}
842 movedchanges = {}
847 movedchanges = {}
843 for fname in added:
848 for fname in added:
844 fctx = ctx[fname]
849 fctx = ctx[fname]
845 pchange = phabchange(currentPath=fname)
850 pchange = phabchange(currentPath=fname)
846
851
847 filemode = gitmode[ctx[fname].flags()]
852 filemode = gitmode[ctx[fname].flags()]
848 renamed = fctx.renamed()
853 renamed = fctx.renamed()
849
854
850 if renamed:
855 if renamed:
851 originalfname = renamed[0]
856 originalfname = renamed[0]
852 originalmode = gitmode[ctx.p1()[originalfname].flags()]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
853 pchange.oldPath = originalfname
858 pchange.oldPath = originalfname
854
859
855 if originalfname in removed:
860 if originalfname in removed:
856 origpchange = phabchange(
861 origpchange = phabchange(
857 currentPath=originalfname,
862 currentPath=originalfname,
858 oldPath=originalfname,
863 oldPath=originalfname,
859 type=DiffChangeType.MOVE_AWAY,
864 type=DiffChangeType.MOVE_AWAY,
860 awayPaths=[fname],
865 awayPaths=[fname],
861 )
866 )
862 movedchanges[originalfname] = origpchange
867 movedchanges[originalfname] = origpchange
863 removed.remove(originalfname)
868 removed.remove(originalfname)
864 pchange.type = DiffChangeType.MOVE_HERE
869 pchange.type = DiffChangeType.MOVE_HERE
865 elif originalfname in movedchanges:
870 elif originalfname in movedchanges:
866 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
867 movedchanges[originalfname].awayPaths.append(fname)
872 movedchanges[originalfname].awayPaths.append(fname)
868 pchange.type = DiffChangeType.COPY_HERE
873 pchange.type = DiffChangeType.COPY_HERE
869 else: # pure copy
874 else: # pure copy
870 if originalfname not in copiedchanges:
875 if originalfname not in copiedchanges:
871 origpchange = phabchange(
876 origpchange = phabchange(
872 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
873 )
878 )
874 copiedchanges[originalfname] = origpchange
879 copiedchanges[originalfname] = origpchange
875 else:
880 else:
876 origpchange = copiedchanges[originalfname]
881 origpchange = copiedchanges[originalfname]
877 origpchange.awayPaths.append(fname)
882 origpchange.awayPaths.append(fname)
878 pchange.type = DiffChangeType.COPY_HERE
883 pchange.type = DiffChangeType.COPY_HERE
879
884
880 if filemode != originalmode:
885 if filemode != originalmode:
881 pchange.addoldmode(originalmode)
886 pchange.addoldmode(originalmode)
882 pchange.addnewmode(filemode)
887 pchange.addnewmode(filemode)
883 else: # Brand-new file
888 else: # Brand-new file
884 pchange.addnewmode(gitmode[fctx.flags()])
889 pchange.addnewmode(gitmode[fctx.flags()])
885 pchange.type = DiffChangeType.ADD
890 pchange.type = DiffChangeType.ADD
886
891
887 if fctx.isbinary() or notutf8(fctx):
892 if fctx.isbinary() or notutf8(fctx):
888 makebinary(pchange, fctx)
893 makebinary(pchange, fctx)
889 if renamed:
894 if renamed:
890 addoldbinary(pchange, fctx, originalfname)
895 addoldbinary(pchange, fctx, originalfname)
891 else:
896 else:
892 maketext(pchange, ctx, fname)
897 maketext(pchange, ctx, fname)
893
898
894 pdiff.addchange(pchange)
899 pdiff.addchange(pchange)
895
900
896 for _path, copiedchange in copiedchanges.items():
901 for _path, copiedchange in copiedchanges.items():
897 pdiff.addchange(copiedchange)
902 pdiff.addchange(copiedchange)
898 for _path, movedchange in movedchanges.items():
903 for _path, movedchange in movedchanges.items():
899 pdiff.addchange(movedchange)
904 pdiff.addchange(movedchange)
900
905
901
906
902 def creatediff(ctx):
907 def creatediff(ctx):
903 """create a Differential Diff"""
908 """create a Differential Diff"""
904 repo = ctx.repo()
909 repo = ctx.repo()
905 repophid = getrepophid(repo)
910 repophid = getrepophid(repo)
906 # Create a "Differential Diff" via "differential.creatediff" API
911 # Create a "Differential Diff" via "differential.creatediff" API
907 pdiff = phabdiff(
912 pdiff = phabdiff(
908 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
909 branch=b'%s' % ctx.branch(),
914 branch=b'%s' % ctx.branch(),
910 )
915 )
911 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
912 # addadded will remove moved files from removed, so addremoved won't get
917 # addadded will remove moved files from removed, so addremoved won't get
913 # them
918 # them
914 addadded(pdiff, ctx, added, removed)
919 addadded(pdiff, ctx, added, removed)
915 addmodified(pdiff, ctx, modified)
920 addmodified(pdiff, ctx, modified)
916 addremoved(pdiff, ctx, removed)
921 addremoved(pdiff, ctx, removed)
917 if repophid:
922 if repophid:
918 pdiff.repositoryPHID = repophid
923 pdiff.repositoryPHID = repophid
919 diff = callconduit(
924 diff = callconduit(
920 repo.ui,
925 repo.ui,
921 b'differential.creatediff',
926 b'differential.creatediff',
922 pycompat.byteskwargs(attr.asdict(pdiff)),
927 pycompat.byteskwargs(attr.asdict(pdiff)),
923 )
928 )
924 if not diff:
929 if not diff:
925 raise error.Abort(_(b'cannot create diff for %s') % ctx)
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
926 return diff
931 return diff
927
932
928
933
929 def writediffproperties(ctx, diff):
934 def writediffproperties(ctx, diff):
930 """write metadata to diff so patches could be applied losslessly"""
935 """write metadata to diff so patches could be applied losslessly"""
931 # creatediff returns with a diffid but query returns with an id
936 # creatediff returns with a diffid but query returns with an id
932 diffid = diff.get(b'diffid', diff.get(b'id'))
937 diffid = diff.get(b'diffid', diff.get(b'id'))
933 params = {
938 params = {
934 b'diff_id': diffid,
939 b'diff_id': diffid,
935 b'name': b'hg:meta',
940 b'name': b'hg:meta',
936 b'data': templatefilters.json(
941 b'data': templatefilters.json(
937 {
942 {
938 b'user': ctx.user(),
943 b'user': ctx.user(),
939 b'date': b'%d %d' % ctx.date(),
944 b'date': b'%d %d' % ctx.date(),
940 b'branch': ctx.branch(),
945 b'branch': ctx.branch(),
941 b'node': ctx.hex(),
946 b'node': ctx.hex(),
942 b'parent': ctx.p1().hex(),
947 b'parent': ctx.p1().hex(),
943 }
948 }
944 ),
949 ),
945 }
950 }
946 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
947
952
948 params = {
953 params = {
949 b'diff_id': diffid,
954 b'diff_id': diffid,
950 b'name': b'local:commits',
955 b'name': b'local:commits',
951 b'data': templatefilters.json(
956 b'data': templatefilters.json(
952 {
957 {
953 ctx.hex(): {
958 ctx.hex(): {
954 b'author': stringutil.person(ctx.user()),
959 b'author': stringutil.person(ctx.user()),
955 b'authorEmail': stringutil.email(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
956 b'time': int(ctx.date()[0]),
961 b'time': int(ctx.date()[0]),
957 b'commit': ctx.hex(),
962 b'commit': ctx.hex(),
958 b'parents': [ctx.p1().hex()],
963 b'parents': [ctx.p1().hex()],
959 b'branch': ctx.branch(),
964 b'branch': ctx.branch(),
960 },
965 },
961 }
966 }
962 ),
967 ),
963 }
968 }
964 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
965
970
966
971
967 def createdifferentialrevision(
972 def createdifferentialrevision(
968 ctx,
973 ctx,
969 revid=None,
974 revid=None,
970 parentrevphid=None,
975 parentrevphid=None,
971 oldnode=None,
976 oldnode=None,
972 olddiff=None,
977 olddiff=None,
973 actions=None,
978 actions=None,
974 comment=None,
979 comment=None,
975 ):
980 ):
976 """create or update a Differential Revision
981 """create or update a Differential Revision
977
982
978 If revid is None, create a new Differential Revision, otherwise update
983 If revid is None, create a new Differential Revision, otherwise update
979 revid. If parentrevphid is not None, set it as a dependency.
984 revid. If parentrevphid is not None, set it as a dependency.
980
985
981 If oldnode is not None, check if the patch content (without commit message
986 If oldnode is not None, check if the patch content (without commit message
982 and metadata) has changed before creating another diff.
987 and metadata) has changed before creating another diff.
983
988
984 If actions is not None, they will be appended to the transaction.
989 If actions is not None, they will be appended to the transaction.
985 """
990 """
986 repo = ctx.repo()
991 repo = ctx.repo()
987 if oldnode:
992 if oldnode:
988 diffopts = mdiff.diffopts(git=True, context=32767)
993 diffopts = mdiff.diffopts(git=True, context=32767)
989 oldctx = repo.unfiltered()[oldnode]
994 oldctx = repo.unfiltered()[oldnode]
990 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
991 else:
996 else:
992 neednewdiff = True
997 neednewdiff = True
993
998
994 transactions = []
999 transactions = []
995 if neednewdiff:
1000 if neednewdiff:
996 diff = creatediff(ctx)
1001 diff = creatediff(ctx)
997 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
998 if comment:
1003 if comment:
999 transactions.append({b'type': b'comment', b'value': comment})
1004 transactions.append({b'type': b'comment', b'value': comment})
1000 else:
1005 else:
1001 # Even if we don't need to upload a new diff because the patch content
1006 # Even if we don't need to upload a new diff because the patch content
1002 # does not change. We might still need to update its metadata so
1007 # does not change. We might still need to update its metadata so
1003 # pushers could know the correct node metadata.
1008 # pushers could know the correct node metadata.
1004 assert olddiff
1009 assert olddiff
1005 diff = olddiff
1010 diff = olddiff
1006 writediffproperties(ctx, diff)
1011 writediffproperties(ctx, diff)
1007
1012
1008 # Set the parent Revision every time, so commit re-ordering is picked-up
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1009 if parentrevphid:
1014 if parentrevphid:
1010 transactions.append(
1015 transactions.append(
1011 {b'type': b'parents.set', b'value': [parentrevphid]}
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1012 )
1017 )
1013
1018
1014 if actions:
1019 if actions:
1015 transactions += actions
1020 transactions += actions
1016
1021
1017 # Parse commit message and update related fields.
1022 # Parse commit message and update related fields.
1018 desc = ctx.description()
1023 desc = ctx.description()
1019 info = callconduit(
1024 info = callconduit(
1020 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1021 )
1026 )
1022 for k, v in info[b'fields'].items():
1027 for k, v in info[b'fields'].items():
1023 if k in [b'title', b'summary', b'testPlan']:
1028 if k in [b'title', b'summary', b'testPlan']:
1024 transactions.append({b'type': k, b'value': v})
1029 transactions.append({b'type': k, b'value': v})
1025
1030
1026 params = {b'transactions': transactions}
1031 params = {b'transactions': transactions}
1027 if revid is not None:
1032 if revid is not None:
1028 # Update an existing Differential Revision
1033 # Update an existing Differential Revision
1029 params[b'objectIdentifier'] = revid
1034 params[b'objectIdentifier'] = revid
1030
1035
1031 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1032 if not revision:
1037 if not revision:
1033 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1034
1039
1035 return revision, diff
1040 return revision, diff
1036
1041
1037
1042
1038 def userphids(repo, names):
1043 def userphids(repo, names):
1039 """convert user names to PHIDs"""
1044 """convert user names to PHIDs"""
1040 names = [name.lower() for name in names]
1045 names = [name.lower() for name in names]
1041 query = {b'constraints': {b'usernames': names}}
1046 query = {b'constraints': {b'usernames': names}}
1042 result = callconduit(repo.ui, b'user.search', query)
1047 result = callconduit(repo.ui, b'user.search', query)
1043 # username not found is not an error of the API. So check if we have missed
1048 # username not found is not an error of the API. So check if we have missed
1044 # some names here.
1049 # some names here.
1045 data = result[b'data']
1050 data = result[b'data']
1046 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1047 unresolved = set(names) - resolved
1052 unresolved = set(names) - resolved
1048 if unresolved:
1053 if unresolved:
1049 raise error.Abort(
1054 raise error.Abort(
1050 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1051 )
1056 )
1052 return [entry[b'phid'] for entry in data]
1057 return [entry[b'phid'] for entry in data]
1053
1058
1054
1059
1055 @vcrcommand(
1060 @vcrcommand(
1056 b'phabsend',
1061 b'phabsend',
1057 [
1062 [
1058 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1059 (b'', b'amend', True, _(b'update commit messages')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1060 (b'', b'reviewer', [], _(b'specify reviewers')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1061 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1062 (
1067 (
1063 b'm',
1068 b'm',
1064 b'comment',
1069 b'comment',
1065 b'',
1070 b'',
1066 _(b'add a comment to Revisions with new/updated Diffs'),
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1067 ),
1072 ),
1068 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1069 ],
1074 ],
1070 _(b'REV [OPTIONS]'),
1075 _(b'REV [OPTIONS]'),
1071 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1072 )
1077 )
1073 def phabsend(ui, repo, *revs, **opts):
1078 def phabsend(ui, repo, *revs, **opts):
1074 """upload changesets to Phabricator
1079 """upload changesets to Phabricator
1075
1080
1076 If there are multiple revisions specified, they will be send as a stack
1081 If there are multiple revisions specified, they will be send as a stack
1077 with a linear dependencies relationship using the order specified by the
1082 with a linear dependencies relationship using the order specified by the
1078 revset.
1083 revset.
1079
1084
1080 For the first time uploading changesets, local tags will be created to
1085 For the first time uploading changesets, local tags will be created to
1081 maintain the association. After the first time, phabsend will check
1086 maintain the association. After the first time, phabsend will check
1082 obsstore and tags information so it can figure out whether to update an
1087 obsstore and tags information so it can figure out whether to update an
1083 existing Differential Revision, or create a new one.
1088 existing Differential Revision, or create a new one.
1084
1089
1085 If --amend is set, update commit messages so they have the
1090 If --amend is set, update commit messages so they have the
1086 ``Differential Revision`` URL, remove related tags. This is similar to what
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1087 arcanist will do, and is more desired in author-push workflows. Otherwise,
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1088 use local tags to record the ``Differential Revision`` association.
1093 use local tags to record the ``Differential Revision`` association.
1089
1094
1090 The --confirm option lets you confirm changesets before sending them. You
1095 The --confirm option lets you confirm changesets before sending them. You
1091 can also add following to your configuration file to make it default
1096 can also add following to your configuration file to make it default
1092 behaviour::
1097 behaviour::
1093
1098
1094 [phabsend]
1099 [phabsend]
1095 confirm = true
1100 confirm = true
1096
1101
1097 phabsend will check obsstore and the above association to decide whether to
1102 phabsend will check obsstore and the above association to decide whether to
1098 update an existing Differential Revision, or create a new one.
1103 update an existing Differential Revision, or create a new one.
1099 """
1104 """
1100 opts = pycompat.byteskwargs(opts)
1105 opts = pycompat.byteskwargs(opts)
1101 revs = list(revs) + opts.get(b'rev', [])
1106 revs = list(revs) + opts.get(b'rev', [])
1102 revs = scmutil.revrange(repo, revs)
1107 revs = scmutil.revrange(repo, revs)
1103 revs.sort() # ascending order to preserve topological parent/child in phab
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1104
1109
1105 if not revs:
1110 if not revs:
1106 raise error.Abort(_(b'phabsend requires at least one changeset'))
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1107 if opts.get(b'amend'):
1112 if opts.get(b'amend'):
1108 cmdutil.checkunfinished(repo)
1113 cmdutil.checkunfinished(repo)
1109
1114
1110 # {newnode: (oldnode, olddiff, olddrev}
1115 # {newnode: (oldnode, olddiff, olddrev}
1111 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1112
1117
1113 confirm = ui.configbool(b'phabsend', b'confirm')
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1114 confirm |= bool(opts.get(b'confirm'))
1119 confirm |= bool(opts.get(b'confirm'))
1115 if confirm:
1120 if confirm:
1116 confirmed = _confirmbeforesend(repo, revs, oldmap)
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1117 if not confirmed:
1122 if not confirmed:
1118 raise error.Abort(_(b'phabsend cancelled'))
1123 raise error.Abort(_(b'phabsend cancelled'))
1119
1124
1120 actions = []
1125 actions = []
1121 reviewers = opts.get(b'reviewer', [])
1126 reviewers = opts.get(b'reviewer', [])
1122 blockers = opts.get(b'blocker', [])
1127 blockers = opts.get(b'blocker', [])
1123 phids = []
1128 phids = []
1124 if reviewers:
1129 if reviewers:
1125 phids.extend(userphids(repo, reviewers))
1130 phids.extend(userphids(repo, reviewers))
1126 if blockers:
1131 if blockers:
1127 phids.extend(
1132 phids.extend(
1128 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1133 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1129 )
1134 )
1130 if phids:
1135 if phids:
1131 actions.append({b'type': b'reviewers.add', b'value': phids})
1136 actions.append({b'type': b'reviewers.add', b'value': phids})
1132
1137
1133 drevids = [] # [int]
1138 drevids = [] # [int]
1134 diffmap = {} # {newnode: diff}
1139 diffmap = {} # {newnode: diff}
1135
1140
1136 # Send patches one by one so we know their Differential Revision PHIDs and
1141 # Send patches one by one so we know their Differential Revision PHIDs and
1137 # can provide dependency relationship
1142 # can provide dependency relationship
1138 lastrevphid = None
1143 lastrevphid = None
1139 for rev in revs:
1144 for rev in revs:
1140 ui.debug(b'sending rev %d\n' % rev)
1145 ui.debug(b'sending rev %d\n' % rev)
1141 ctx = repo[rev]
1146 ctx = repo[rev]
1142
1147
1143 # Get Differential Revision ID
1148 # Get Differential Revision ID
1144 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1149 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1145 if oldnode != ctx.node() or opts.get(b'amend'):
1150 if oldnode != ctx.node() or opts.get(b'amend'):
1146 # Create or update Differential Revision
1151 # Create or update Differential Revision
1147 revision, diff = createdifferentialrevision(
1152 revision, diff = createdifferentialrevision(
1148 ctx,
1153 ctx,
1149 revid,
1154 revid,
1150 lastrevphid,
1155 lastrevphid,
1151 oldnode,
1156 oldnode,
1152 olddiff,
1157 olddiff,
1153 actions,
1158 actions,
1154 opts.get(b'comment'),
1159 opts.get(b'comment'),
1155 )
1160 )
1156 diffmap[ctx.node()] = diff
1161 diffmap[ctx.node()] = diff
1157 newrevid = int(revision[b'object'][b'id'])
1162 newrevid = int(revision[b'object'][b'id'])
1158 newrevphid = revision[b'object'][b'phid']
1163 newrevphid = revision[b'object'][b'phid']
1159 if revid:
1164 if revid:
1160 action = b'updated'
1165 action = b'updated'
1161 else:
1166 else:
1162 action = b'created'
1167 action = b'created'
1163
1168
1164 # Create a local tag to note the association, if commit message
1169 # Create a local tag to note the association, if commit message
1165 # does not have it already
1170 # does not have it already
1166 m = _differentialrevisiondescre.search(ctx.description())
1171 m = _differentialrevisiondescre.search(ctx.description())
1167 if not m or int(m.group('id')) != newrevid:
1172 if not m or int(m.group('id')) != newrevid:
1168 tagname = b'D%d' % newrevid
1173 tagname = b'D%d' % newrevid
1169 tags.tag(
1174 tags.tag(
1170 repo,
1175 repo,
1171 tagname,
1176 tagname,
1172 ctx.node(),
1177 ctx.node(),
1173 message=None,
1178 message=None,
1174 user=None,
1179 user=None,
1175 date=None,
1180 date=None,
1176 local=True,
1181 local=True,
1177 )
1182 )
1178 else:
1183 else:
1179 # Nothing changed. But still set "newrevphid" so the next revision
1184 # Nothing changed. But still set "newrevphid" so the next revision
1180 # could depend on this one and "newrevid" for the summary line.
1185 # could depend on this one and "newrevid" for the summary line.
1181 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1186 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1182 newrevid = revid
1187 newrevid = revid
1183 action = b'skipped'
1188 action = b'skipped'
1184
1189
1185 actiondesc = ui.label(
1190 actiondesc = ui.label(
1186 {
1191 {
1187 b'created': _(b'created'),
1192 b'created': _(b'created'),
1188 b'skipped': _(b'skipped'),
1193 b'skipped': _(b'skipped'),
1189 b'updated': _(b'updated'),
1194 b'updated': _(b'updated'),
1190 }[action],
1195 }[action],
1191 b'phabricator.action.%s' % action,
1196 b'phabricator.action.%s' % action,
1192 )
1197 )
1193 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1198 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1194 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1199 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1195 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1200 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1196 ui.write(
1201 ui.write(
1197 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1202 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1198 )
1203 )
1199 drevids.append(newrevid)
1204 drevids.append(newrevid)
1200 lastrevphid = newrevphid
1205 lastrevphid = newrevphid
1201
1206
1202 # Update commit messages and remove tags
1207 # Update commit messages and remove tags
1203 if opts.get(b'amend'):
1208 if opts.get(b'amend'):
1204 unfi = repo.unfiltered()
1209 unfi = repo.unfiltered()
1205 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1210 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1206 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1211 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1207 wnode = unfi[b'.'].node()
1212 wnode = unfi[b'.'].node()
1208 mapping = {} # {oldnode: [newnode]}
1213 mapping = {} # {oldnode: [newnode]}
1209 for i, rev in enumerate(revs):
1214 for i, rev in enumerate(revs):
1210 old = unfi[rev]
1215 old = unfi[rev]
1211 drevid = drevids[i]
1216 drevid = drevids[i]
1212 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1217 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1213 newdesc = getdescfromdrev(drev)
1218 newdesc = getdescfromdrev(drev)
1214 # Make sure commit message contain "Differential Revision"
1219 # Make sure commit message contain "Differential Revision"
1215 if old.description() != newdesc:
1220 if old.description() != newdesc:
1216 if old.phase() == phases.public:
1221 if old.phase() == phases.public:
1217 ui.warn(
1222 ui.warn(
1218 _(b"warning: not updating public commit %s\n")
1223 _(b"warning: not updating public commit %s\n")
1219 % scmutil.formatchangeid(old)
1224 % scmutil.formatchangeid(old)
1220 )
1225 )
1221 continue
1226 continue
1222 parents = [
1227 parents = [
1223 mapping.get(old.p1().node(), (old.p1(),))[0],
1228 mapping.get(old.p1().node(), (old.p1(),))[0],
1224 mapping.get(old.p2().node(), (old.p2(),))[0],
1229 mapping.get(old.p2().node(), (old.p2(),))[0],
1225 ]
1230 ]
1226 new = context.metadataonlyctx(
1231 new = context.metadataonlyctx(
1227 repo,
1232 repo,
1228 old,
1233 old,
1229 parents=parents,
1234 parents=parents,
1230 text=newdesc,
1235 text=newdesc,
1231 user=old.user(),
1236 user=old.user(),
1232 date=old.date(),
1237 date=old.date(),
1233 extra=old.extra(),
1238 extra=old.extra(),
1234 )
1239 )
1235
1240
1236 newnode = new.commit()
1241 newnode = new.commit()
1237
1242
1238 mapping[old.node()] = [newnode]
1243 mapping[old.node()] = [newnode]
1239 # Update diff property
1244 # Update diff property
1240 # If it fails just warn and keep going, otherwise the DREV
1245 # If it fails just warn and keep going, otherwise the DREV
1241 # associations will be lost
1246 # associations will be lost
1242 try:
1247 try:
1243 writediffproperties(unfi[newnode], diffmap[old.node()])
1248 writediffproperties(unfi[newnode], diffmap[old.node()])
1244 except util.urlerr.urlerror:
1249 except util.urlerr.urlerror:
1245 ui.warnnoi18n(
1250 ui.warnnoi18n(
1246 b'Failed to update metadata for D%d\n' % drevid
1251 b'Failed to update metadata for D%d\n' % drevid
1247 )
1252 )
1248 # Remove local tags since it's no longer necessary
1253 # Remove local tags since it's no longer necessary
1249 tagname = b'D%d' % drevid
1254 tagname = b'D%d' % drevid
1250 if tagname in repo.tags():
1255 if tagname in repo.tags():
1251 tags.tag(
1256 tags.tag(
1252 repo,
1257 repo,
1253 tagname,
1258 tagname,
1254 nullid,
1259 nullid,
1255 message=None,
1260 message=None,
1256 user=None,
1261 user=None,
1257 date=None,
1262 date=None,
1258 local=True,
1263 local=True,
1259 )
1264 )
1260 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1265 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1261 if wnode in mapping:
1266 if wnode in mapping:
1262 unfi.setparents(mapping[wnode][0])
1267 unfi.setparents(mapping[wnode][0])
1263
1268
1264
1269
1265 # Map from "hg:meta" keys to header understood by "hg import". The order is
1270 # Map from "hg:meta" keys to header understood by "hg import". The order is
1266 # consistent with "hg export" output.
1271 # consistent with "hg export" output.
1267 _metanamemap = util.sortdict(
1272 _metanamemap = util.sortdict(
1268 [
1273 [
1269 (b'user', b'User'),
1274 (b'user', b'User'),
1270 (b'date', b'Date'),
1275 (b'date', b'Date'),
1271 (b'branch', b'Branch'),
1276 (b'branch', b'Branch'),
1272 (b'node', b'Node ID'),
1277 (b'node', b'Node ID'),
1273 (b'parent', b'Parent '),
1278 (b'parent', b'Parent '),
1274 ]
1279 ]
1275 )
1280 )
1276
1281
1277
1282
1278 def _confirmbeforesend(repo, revs, oldmap):
1283 def _confirmbeforesend(repo, revs, oldmap):
1279 url, token = readurltoken(repo.ui)
1284 url, token = readurltoken(repo.ui)
1280 ui = repo.ui
1285 ui = repo.ui
1281 for rev in revs:
1286 for rev in revs:
1282 ctx = repo[rev]
1287 ctx = repo[rev]
1283 desc = ctx.description().splitlines()[0]
1288 desc = ctx.description().splitlines()[0]
1284 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1289 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1285 if drevid:
1290 if drevid:
1286 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1291 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1287 else:
1292 else:
1288 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1293 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1289
1294
1290 ui.write(
1295 ui.write(
1291 _(b'%s - %s: %s\n')
1296 _(b'%s - %s: %s\n')
1292 % (
1297 % (
1293 drevdesc,
1298 drevdesc,
1294 ui.label(bytes(ctx), b'phabricator.node'),
1299 ui.label(bytes(ctx), b'phabricator.node'),
1295 ui.label(desc, b'phabricator.desc'),
1300 ui.label(desc, b'phabricator.desc'),
1296 )
1301 )
1297 )
1302 )
1298
1303
1299 if ui.promptchoice(
1304 if ui.promptchoice(
1300 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1305 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1301 ):
1306 ):
1302 return False
1307 return False
1303
1308
1304 return True
1309 return True
1305
1310
1306
1311
1307 _knownstatusnames = {
1312 _knownstatusnames = {
1308 b'accepted',
1313 b'accepted',
1309 b'needsreview',
1314 b'needsreview',
1310 b'needsrevision',
1315 b'needsrevision',
1311 b'closed',
1316 b'closed',
1312 b'abandoned',
1317 b'abandoned',
1313 b'changesplanned',
1318 b'changesplanned',
1314 }
1319 }
1315
1320
1316
1321
1317 def _getstatusname(drev):
1322 def _getstatusname(drev):
1318 """get normalized status name from a Differential Revision"""
1323 """get normalized status name from a Differential Revision"""
1319 return drev[b'statusName'].replace(b' ', b'').lower()
1324 return drev[b'statusName'].replace(b' ', b'').lower()
1320
1325
1321
1326
1322 # Small language to specify differential revisions. Support symbols: (), :X,
1327 # Small language to specify differential revisions. Support symbols: (), :X,
1323 # +, and -.
1328 # +, and -.
1324
1329
1325 _elements = {
1330 _elements = {
1326 # token-type: binding-strength, primary, prefix, infix, suffix
1331 # token-type: binding-strength, primary, prefix, infix, suffix
1327 b'(': (12, None, (b'group', 1, b')'), None, None),
1332 b'(': (12, None, (b'group', 1, b')'), None, None),
1328 b':': (8, None, (b'ancestors', 8), None, None),
1333 b':': (8, None, (b'ancestors', 8), None, None),
1329 b'&': (5, None, None, (b'and_', 5), None),
1334 b'&': (5, None, None, (b'and_', 5), None),
1330 b'+': (4, None, None, (b'add', 4), None),
1335 b'+': (4, None, None, (b'add', 4), None),
1331 b'-': (4, None, None, (b'sub', 4), None),
1336 b'-': (4, None, None, (b'sub', 4), None),
1332 b')': (0, None, None, None, None),
1337 b')': (0, None, None, None, None),
1333 b'symbol': (0, b'symbol', None, None, None),
1338 b'symbol': (0, b'symbol', None, None, None),
1334 b'end': (0, None, None, None, None),
1339 b'end': (0, None, None, None, None),
1335 }
1340 }
1336
1341
1337
1342
1338 def _tokenize(text):
1343 def _tokenize(text):
1339 view = memoryview(text) # zero-copy slice
1344 view = memoryview(text) # zero-copy slice
1340 special = b'():+-& '
1345 special = b'():+-& '
1341 pos = 0
1346 pos = 0
1342 length = len(text)
1347 length = len(text)
1343 while pos < length:
1348 while pos < length:
1344 symbol = b''.join(
1349 symbol = b''.join(
1345 itertools.takewhile(
1350 itertools.takewhile(
1346 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1351 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1347 )
1352 )
1348 )
1353 )
1349 if symbol:
1354 if symbol:
1350 yield (b'symbol', symbol, pos)
1355 yield (b'symbol', symbol, pos)
1351 pos += len(symbol)
1356 pos += len(symbol)
1352 else: # special char, ignore space
1357 else: # special char, ignore space
1353 if text[pos : pos + 1] != b' ':
1358 if text[pos : pos + 1] != b' ':
1354 yield (text[pos : pos + 1], None, pos)
1359 yield (text[pos : pos + 1], None, pos)
1355 pos += 1
1360 pos += 1
1356 yield (b'end', None, pos)
1361 yield (b'end', None, pos)
1357
1362
1358
1363
1359 def _parse(text):
1364 def _parse(text):
1360 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1365 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1361 if pos != len(text):
1366 if pos != len(text):
1362 raise error.ParseError(b'invalid token', pos)
1367 raise error.ParseError(b'invalid token', pos)
1363 return tree
1368 return tree
1364
1369
1365
1370
1366 def _parsedrev(symbol):
1371 def _parsedrev(symbol):
1367 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1372 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1368 if symbol.startswith(b'D') and symbol[1:].isdigit():
1373 if symbol.startswith(b'D') and symbol[1:].isdigit():
1369 return int(symbol[1:])
1374 return int(symbol[1:])
1370 if symbol.isdigit():
1375 if symbol.isdigit():
1371 return int(symbol)
1376 return int(symbol)
1372
1377
1373
1378
1374 def _prefetchdrevs(tree):
1379 def _prefetchdrevs(tree):
1375 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1380 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1376 drevs = set()
1381 drevs = set()
1377 ancestordrevs = set()
1382 ancestordrevs = set()
1378 op = tree[0]
1383 op = tree[0]
1379 if op == b'symbol':
1384 if op == b'symbol':
1380 r = _parsedrev(tree[1])
1385 r = _parsedrev(tree[1])
1381 if r:
1386 if r:
1382 drevs.add(r)
1387 drevs.add(r)
1383 elif op == b'ancestors':
1388 elif op == b'ancestors':
1384 r, a = _prefetchdrevs(tree[1])
1389 r, a = _prefetchdrevs(tree[1])
1385 drevs.update(r)
1390 drevs.update(r)
1386 ancestordrevs.update(r)
1391 ancestordrevs.update(r)
1387 ancestordrevs.update(a)
1392 ancestordrevs.update(a)
1388 else:
1393 else:
1389 for t in tree[1:]:
1394 for t in tree[1:]:
1390 r, a = _prefetchdrevs(t)
1395 r, a = _prefetchdrevs(t)
1391 drevs.update(r)
1396 drevs.update(r)
1392 ancestordrevs.update(a)
1397 ancestordrevs.update(a)
1393 return drevs, ancestordrevs
1398 return drevs, ancestordrevs
1394
1399
1395
1400
1396 def querydrev(repo, spec):
1401 def querydrev(repo, spec):
1397 """return a list of "Differential Revision" dicts
1402 """return a list of "Differential Revision" dicts
1398
1403
1399 spec is a string using a simple query language, see docstring in phabread
1404 spec is a string using a simple query language, see docstring in phabread
1400 for details.
1405 for details.
1401
1406
1402 A "Differential Revision dict" looks like:
1407 A "Differential Revision dict" looks like:
1403
1408
1404 {
1409 {
1405 "id": "2",
1410 "id": "2",
1406 "phid": "PHID-DREV-672qvysjcczopag46qty",
1411 "phid": "PHID-DREV-672qvysjcczopag46qty",
1407 "title": "example",
1412 "title": "example",
1408 "uri": "https://phab.example.com/D2",
1413 "uri": "https://phab.example.com/D2",
1409 "dateCreated": "1499181406",
1414 "dateCreated": "1499181406",
1410 "dateModified": "1499182103",
1415 "dateModified": "1499182103",
1411 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1416 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1412 "status": "0",
1417 "status": "0",
1413 "statusName": "Needs Review",
1418 "statusName": "Needs Review",
1414 "properties": [],
1419 "properties": [],
1415 "branch": null,
1420 "branch": null,
1416 "summary": "",
1421 "summary": "",
1417 "testPlan": "",
1422 "testPlan": "",
1418 "lineCount": "2",
1423 "lineCount": "2",
1419 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1424 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1420 "diffs": [
1425 "diffs": [
1421 "3",
1426 "3",
1422 "4",
1427 "4",
1423 ],
1428 ],
1424 "commits": [],
1429 "commits": [],
1425 "reviewers": [],
1430 "reviewers": [],
1426 "ccs": [],
1431 "ccs": [],
1427 "hashes": [],
1432 "hashes": [],
1428 "auxiliary": {
1433 "auxiliary": {
1429 "phabricator:projects": [],
1434 "phabricator:projects": [],
1430 "phabricator:depends-on": [
1435 "phabricator:depends-on": [
1431 "PHID-DREV-gbapp366kutjebt7agcd"
1436 "PHID-DREV-gbapp366kutjebt7agcd"
1432 ]
1437 ]
1433 },
1438 },
1434 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1439 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1435 "sourcePath": null
1440 "sourcePath": null
1436 }
1441 }
1437 """
1442 """
1438
1443
1439 def fetch(params):
1444 def fetch(params):
1440 """params -> single drev or None"""
1445 """params -> single drev or None"""
1441 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1446 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1442 if key in prefetched:
1447 if key in prefetched:
1443 return prefetched[key]
1448 return prefetched[key]
1444 drevs = callconduit(repo.ui, b'differential.query', params)
1449 drevs = callconduit(repo.ui, b'differential.query', params)
1445 # Fill prefetched with the result
1450 # Fill prefetched with the result
1446 for drev in drevs:
1451 for drev in drevs:
1447 prefetched[drev[b'phid']] = drev
1452 prefetched[drev[b'phid']] = drev
1448 prefetched[int(drev[b'id'])] = drev
1453 prefetched[int(drev[b'id'])] = drev
1449 if key not in prefetched:
1454 if key not in prefetched:
1450 raise error.Abort(
1455 raise error.Abort(
1451 _(b'cannot get Differential Revision %r') % params
1456 _(b'cannot get Differential Revision %r') % params
1452 )
1457 )
1453 return prefetched[key]
1458 return prefetched[key]
1454
1459
1455 def getstack(topdrevids):
1460 def getstack(topdrevids):
1456 """given a top, get a stack from the bottom, [id] -> [id]"""
1461 """given a top, get a stack from the bottom, [id] -> [id]"""
1457 visited = set()
1462 visited = set()
1458 result = []
1463 result = []
1459 queue = [{b'ids': [i]} for i in topdrevids]
1464 queue = [{b'ids': [i]} for i in topdrevids]
1460 while queue:
1465 while queue:
1461 params = queue.pop()
1466 params = queue.pop()
1462 drev = fetch(params)
1467 drev = fetch(params)
1463 if drev[b'id'] in visited:
1468 if drev[b'id'] in visited:
1464 continue
1469 continue
1465 visited.add(drev[b'id'])
1470 visited.add(drev[b'id'])
1466 result.append(int(drev[b'id']))
1471 result.append(int(drev[b'id']))
1467 auxiliary = drev.get(b'auxiliary', {})
1472 auxiliary = drev.get(b'auxiliary', {})
1468 depends = auxiliary.get(b'phabricator:depends-on', [])
1473 depends = auxiliary.get(b'phabricator:depends-on', [])
1469 for phid in depends:
1474 for phid in depends:
1470 queue.append({b'phids': [phid]})
1475 queue.append({b'phids': [phid]})
1471 result.reverse()
1476 result.reverse()
1472 return smartset.baseset(result)
1477 return smartset.baseset(result)
1473
1478
1474 # Initialize prefetch cache
1479 # Initialize prefetch cache
1475 prefetched = {} # {id or phid: drev}
1480 prefetched = {} # {id or phid: drev}
1476
1481
1477 tree = _parse(spec)
1482 tree = _parse(spec)
1478 drevs, ancestordrevs = _prefetchdrevs(tree)
1483 drevs, ancestordrevs = _prefetchdrevs(tree)
1479
1484
1480 # developer config: phabricator.batchsize
1485 # developer config: phabricator.batchsize
1481 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1486 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1482
1487
1483 # Prefetch Differential Revisions in batch
1488 # Prefetch Differential Revisions in batch
1484 tofetch = set(drevs)
1489 tofetch = set(drevs)
1485 for r in ancestordrevs:
1490 for r in ancestordrevs:
1486 tofetch.update(range(max(1, r - batchsize), r + 1))
1491 tofetch.update(range(max(1, r - batchsize), r + 1))
1487 if drevs:
1492 if drevs:
1488 fetch({b'ids': list(tofetch)})
1493 fetch({b'ids': list(tofetch)})
1489 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1494 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1490
1495
1491 # Walk through the tree, return smartsets
1496 # Walk through the tree, return smartsets
1492 def walk(tree):
1497 def walk(tree):
1493 op = tree[0]
1498 op = tree[0]
1494 if op == b'symbol':
1499 if op == b'symbol':
1495 drev = _parsedrev(tree[1])
1500 drev = _parsedrev(tree[1])
1496 if drev:
1501 if drev:
1497 return smartset.baseset([drev])
1502 return smartset.baseset([drev])
1498 elif tree[1] in _knownstatusnames:
1503 elif tree[1] in _knownstatusnames:
1499 drevs = [
1504 drevs = [
1500 r
1505 r
1501 for r in validids
1506 for r in validids
1502 if _getstatusname(prefetched[r]) == tree[1]
1507 if _getstatusname(prefetched[r]) == tree[1]
1503 ]
1508 ]
1504 return smartset.baseset(drevs)
1509 return smartset.baseset(drevs)
1505 else:
1510 else:
1506 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1511 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1507 elif op in {b'and_', b'add', b'sub'}:
1512 elif op in {b'and_', b'add', b'sub'}:
1508 assert len(tree) == 3
1513 assert len(tree) == 3
1509 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1514 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1510 elif op == b'group':
1515 elif op == b'group':
1511 return walk(tree[1])
1516 return walk(tree[1])
1512 elif op == b'ancestors':
1517 elif op == b'ancestors':
1513 return getstack(walk(tree[1]))
1518 return getstack(walk(tree[1]))
1514 else:
1519 else:
1515 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1520 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1516
1521
1517 return [prefetched[r] for r in walk(tree)]
1522 return [prefetched[r] for r in walk(tree)]
1518
1523
1519
1524
1520 def getdescfromdrev(drev):
1525 def getdescfromdrev(drev):
1521 """get description (commit message) from "Differential Revision"
1526 """get description (commit message) from "Differential Revision"
1522
1527
1523 This is similar to differential.getcommitmessage API. But we only care
1528 This is similar to differential.getcommitmessage API. But we only care
1524 about limited fields: title, summary, test plan, and URL.
1529 about limited fields: title, summary, test plan, and URL.
1525 """
1530 """
1526 title = drev[b'title']
1531 title = drev[b'title']
1527 summary = drev[b'summary'].rstrip()
1532 summary = drev[b'summary'].rstrip()
1528 testplan = drev[b'testPlan'].rstrip()
1533 testplan = drev[b'testPlan'].rstrip()
1529 if testplan:
1534 if testplan:
1530 testplan = b'Test Plan:\n%s' % testplan
1535 testplan = b'Test Plan:\n%s' % testplan
1531 uri = b'Differential Revision: %s' % drev[b'uri']
1536 uri = b'Differential Revision: %s' % drev[b'uri']
1532 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1537 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1533
1538
1534
1539
1535 def getdiffmeta(diff):
1540 def getdiffmeta(diff):
1536 """get commit metadata (date, node, user, p1) from a diff object
1541 """get commit metadata (date, node, user, p1) from a diff object
1537
1542
1538 The metadata could be "hg:meta", sent by phabsend, like:
1543 The metadata could be "hg:meta", sent by phabsend, like:
1539
1544
1540 "properties": {
1545 "properties": {
1541 "hg:meta": {
1546 "hg:meta": {
1542 "date": "1499571514 25200",
1547 "date": "1499571514 25200",
1543 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1548 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1544 "user": "Foo Bar <foo@example.com>",
1549 "user": "Foo Bar <foo@example.com>",
1545 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1550 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1546 }
1551 }
1547 }
1552 }
1548
1553
1549 Or converted from "local:commits", sent by "arc", like:
1554 Or converted from "local:commits", sent by "arc", like:
1550
1555
1551 "properties": {
1556 "properties": {
1552 "local:commits": {
1557 "local:commits": {
1553 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1558 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1554 "author": "Foo Bar",
1559 "author": "Foo Bar",
1555 "time": 1499546314,
1560 "time": 1499546314,
1556 "branch": "default",
1561 "branch": "default",
1557 "tag": "",
1562 "tag": "",
1558 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1563 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1559 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1564 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1560 "local": "1000",
1565 "local": "1000",
1561 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1566 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1562 "summary": "...",
1567 "summary": "...",
1563 "message": "...",
1568 "message": "...",
1564 "authorEmail": "foo@example.com"
1569 "authorEmail": "foo@example.com"
1565 }
1570 }
1566 }
1571 }
1567 }
1572 }
1568
1573
1569 Note: metadata extracted from "local:commits" will lose time zone
1574 Note: metadata extracted from "local:commits" will lose time zone
1570 information.
1575 information.
1571 """
1576 """
1572 props = diff.get(b'properties') or {}
1577 props = diff.get(b'properties') or {}
1573 meta = props.get(b'hg:meta')
1578 meta = props.get(b'hg:meta')
1574 if not meta:
1579 if not meta:
1575 if props.get(b'local:commits'):
1580 if props.get(b'local:commits'):
1576 commit = sorted(props[b'local:commits'].values())[0]
1581 commit = sorted(props[b'local:commits'].values())[0]
1577 meta = {}
1582 meta = {}
1578 if b'author' in commit and b'authorEmail' in commit:
1583 if b'author' in commit and b'authorEmail' in commit:
1579 meta[b'user'] = b'%s <%s>' % (
1584 meta[b'user'] = b'%s <%s>' % (
1580 commit[b'author'],
1585 commit[b'author'],
1581 commit[b'authorEmail'],
1586 commit[b'authorEmail'],
1582 )
1587 )
1583 if b'time' in commit:
1588 if b'time' in commit:
1584 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1589 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1585 if b'branch' in commit:
1590 if b'branch' in commit:
1586 meta[b'branch'] = commit[b'branch']
1591 meta[b'branch'] = commit[b'branch']
1587 node = commit.get(b'commit', commit.get(b'rev'))
1592 node = commit.get(b'commit', commit.get(b'rev'))
1588 if node:
1593 if node:
1589 meta[b'node'] = node
1594 meta[b'node'] = node
1590 if len(commit.get(b'parents', ())) >= 1:
1595 if len(commit.get(b'parents', ())) >= 1:
1591 meta[b'parent'] = commit[b'parents'][0]
1596 meta[b'parent'] = commit[b'parents'][0]
1592 else:
1597 else:
1593 meta = {}
1598 meta = {}
1594 if b'date' not in meta and b'dateCreated' in diff:
1599 if b'date' not in meta and b'dateCreated' in diff:
1595 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1600 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1596 if b'branch' not in meta and diff.get(b'branch'):
1601 if b'branch' not in meta and diff.get(b'branch'):
1597 meta[b'branch'] = diff[b'branch']
1602 meta[b'branch'] = diff[b'branch']
1598 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1603 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1599 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1604 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1600 return meta
1605 return meta
1601
1606
1602
1607
1603 def readpatch(repo, drevs, write):
1608 def readpatch(repo, drevs, write):
1604 """generate plain-text patch readable by 'hg import'
1609 """generate plain-text patch readable by 'hg import'
1605
1610
1606 write is usually ui.write. drevs is what "querydrev" returns, results of
1611 write is usually ui.write. drevs is what "querydrev" returns, results of
1607 "differential.query".
1612 "differential.query".
1608 """
1613 """
1609 # Prefetch hg:meta property for all diffs
1614 # Prefetch hg:meta property for all diffs
1610 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1615 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1611 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1616 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1612
1617
1613 # Generate patch for each drev
1618 # Generate patch for each drev
1614 for drev in drevs:
1619 for drev in drevs:
1615 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1620 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1616
1621
1617 diffid = max(int(v) for v in drev[b'diffs'])
1622 diffid = max(int(v) for v in drev[b'diffs'])
1618 body = callconduit(
1623 body = callconduit(
1619 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1624 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1620 )
1625 )
1621 desc = getdescfromdrev(drev)
1626 desc = getdescfromdrev(drev)
1622 header = b'# HG changeset patch\n'
1627 header = b'# HG changeset patch\n'
1623
1628
1624 # Try to preserve metadata from hg:meta property. Write hg patch
1629 # Try to preserve metadata from hg:meta property. Write hg patch
1625 # headers that can be read by the "import" command. See patchheadermap
1630 # headers that can be read by the "import" command. See patchheadermap
1626 # and extract in mercurial/patch.py for supported headers.
1631 # and extract in mercurial/patch.py for supported headers.
1627 meta = getdiffmeta(diffs[b'%d' % diffid])
1632 meta = getdiffmeta(diffs[b'%d' % diffid])
1628 for k in _metanamemap.keys():
1633 for k in _metanamemap.keys():
1629 if k in meta:
1634 if k in meta:
1630 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1635 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1631
1636
1632 content = b'%s%s\n%s' % (header, desc, body)
1637 content = b'%s%s\n%s' % (header, desc, body)
1633 write(content)
1638 write(content)
1634
1639
1635
1640
1636 @vcrcommand(
1641 @vcrcommand(
1637 b'phabread',
1642 b'phabread',
1638 [(b'', b'stack', False, _(b'read dependencies'))],
1643 [(b'', b'stack', False, _(b'read dependencies'))],
1639 _(b'DREVSPEC [OPTIONS]'),
1644 _(b'DREVSPEC [OPTIONS]'),
1640 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1645 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1641 )
1646 )
1642 def phabread(ui, repo, spec, **opts):
1647 def phabread(ui, repo, spec, **opts):
1643 """print patches from Phabricator suitable for importing
1648 """print patches from Phabricator suitable for importing
1644
1649
1645 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1650 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1646 the number ``123``. It could also have common operators like ``+``, ``-``,
1651 the number ``123``. It could also have common operators like ``+``, ``-``,
1647 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1652 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1648 select a stack.
1653 select a stack.
1649
1654
1650 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1655 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1651 could be used to filter patches by status. For performance reason, they
1656 could be used to filter patches by status. For performance reason, they
1652 only represent a subset of non-status selections and cannot be used alone.
1657 only represent a subset of non-status selections and cannot be used alone.
1653
1658
1654 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1659 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1655 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1660 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1656 stack up to D9.
1661 stack up to D9.
1657
1662
1658 If --stack is given, follow dependencies information and read all patches.
1663 If --stack is given, follow dependencies information and read all patches.
1659 It is equivalent to the ``:`` operator.
1664 It is equivalent to the ``:`` operator.
1660 """
1665 """
1661 opts = pycompat.byteskwargs(opts)
1666 opts = pycompat.byteskwargs(opts)
1662 if opts.get(b'stack'):
1667 if opts.get(b'stack'):
1663 spec = b':(%s)' % spec
1668 spec = b':(%s)' % spec
1664 drevs = querydrev(repo, spec)
1669 drevs = querydrev(repo, spec)
1665 readpatch(repo, drevs, ui.write)
1670 readpatch(repo, drevs, ui.write)
1666
1671
1667
1672
1668 @vcrcommand(
1673 @vcrcommand(
1669 b'phabupdate',
1674 b'phabupdate',
1670 [
1675 [
1671 (b'', b'accept', False, _(b'accept revisions')),
1676 (b'', b'accept', False, _(b'accept revisions')),
1672 (b'', b'reject', False, _(b'reject revisions')),
1677 (b'', b'reject', False, _(b'reject revisions')),
1673 (b'', b'abandon', False, _(b'abandon revisions')),
1678 (b'', b'abandon', False, _(b'abandon revisions')),
1674 (b'', b'reclaim', False, _(b'reclaim revisions')),
1679 (b'', b'reclaim', False, _(b'reclaim revisions')),
1675 (b'm', b'comment', b'', _(b'comment on the last revision')),
1680 (b'm', b'comment', b'', _(b'comment on the last revision')),
1676 ],
1681 ],
1677 _(b'DREVSPEC [OPTIONS]'),
1682 _(b'DREVSPEC [OPTIONS]'),
1678 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1683 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1679 )
1684 )
1680 def phabupdate(ui, repo, spec, **opts):
1685 def phabupdate(ui, repo, spec, **opts):
1681 """update Differential Revision in batch
1686 """update Differential Revision in batch
1682
1687
1683 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1688 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1684 """
1689 """
1685 opts = pycompat.byteskwargs(opts)
1690 opts = pycompat.byteskwargs(opts)
1686 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1691 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1687 if len(flags) > 1:
1692 if len(flags) > 1:
1688 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1693 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1689
1694
1690 actions = []
1695 actions = []
1691 for f in flags:
1696 for f in flags:
1692 actions.append({b'type': f, b'value': True})
1697 actions.append({b'type': f, b'value': True})
1693
1698
1694 drevs = querydrev(repo, spec)
1699 drevs = querydrev(repo, spec)
1695 for i, drev in enumerate(drevs):
1700 for i, drev in enumerate(drevs):
1696 if i + 1 == len(drevs) and opts.get(b'comment'):
1701 if i + 1 == len(drevs) and opts.get(b'comment'):
1697 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1702 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1698 if actions:
1703 if actions:
1699 params = {
1704 params = {
1700 b'objectIdentifier': drev[b'phid'],
1705 b'objectIdentifier': drev[b'phid'],
1701 b'transactions': actions,
1706 b'transactions': actions,
1702 }
1707 }
1703 callconduit(ui, b'differential.revision.edit', params)
1708 callconduit(ui, b'differential.revision.edit', params)
1704
1709
1705
1710
1706 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1711 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1707 def template_review(context, mapping):
1712 def template_review(context, mapping):
1708 """:phabreview: Object describing the review for this changeset.
1713 """:phabreview: Object describing the review for this changeset.
1709 Has attributes `url` and `id`.
1714 Has attributes `url` and `id`.
1710 """
1715 """
1711 ctx = context.resource(mapping, b'ctx')
1716 ctx = context.resource(mapping, b'ctx')
1712 m = _differentialrevisiondescre.search(ctx.description())
1717 m = _differentialrevisiondescre.search(ctx.description())
1713 if m:
1718 if m:
1714 return templateutil.hybriddict(
1719 return templateutil.hybriddict(
1715 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1720 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1716 )
1721 )
1717 else:
1722 else:
1718 tags = ctx.repo().nodetags(ctx.node())
1723 tags = ctx.repo().nodetags(ctx.node())
1719 for t in tags:
1724 for t in tags:
1720 if _differentialrevisiontagre.match(t):
1725 if _differentialrevisiontagre.match(t):
1721 url = ctx.repo().ui.config(b'phabricator', b'url')
1726 url = ctx.repo().ui.config(b'phabricator', b'url')
1722 if not url.endswith(b'/'):
1727 if not url.endswith(b'/'):
1723 url += b'/'
1728 url += b'/'
1724 url += t
1729 url += t
1725
1730
1726 return templateutil.hybriddict({b'url': url, b'id': t,})
1731 return templateutil.hybriddict({b'url': url, b'id': t,})
1727 return None
1732 return None
1728
1733
1729
1734
1730 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1735 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1731 def template_status(context, mapping):
1736 def template_status(context, mapping):
1732 """:phabstatus: String. Status of Phabricator differential.
1737 """:phabstatus: String. Status of Phabricator differential.
1733 """
1738 """
1734 ctx = context.resource(mapping, b'ctx')
1739 ctx = context.resource(mapping, b'ctx')
1735 repo = context.resource(mapping, b'repo')
1740 repo = context.resource(mapping, b'repo')
1736 ui = context.resource(mapping, b'ui')
1741 ui = context.resource(mapping, b'ui')
1737
1742
1738 rev = ctx.rev()
1743 rev = ctx.rev()
1739 try:
1744 try:
1740 drevid = getdrevmap(repo, [rev])[rev]
1745 drevid = getdrevmap(repo, [rev])[rev]
1741 except KeyError:
1746 except KeyError:
1742 return None
1747 return None
1743 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1748 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1744 for drev in drevs:
1749 for drev in drevs:
1745 if int(drev[b'id']) == drevid:
1750 if int(drev[b'id']) == drevid:
1746 return templateutil.hybriddict(
1751 return templateutil.hybriddict(
1747 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1752 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1748 )
1753 )
1749 return None
1754 return None
1750
1755
1751
1756
1752 @show.showview(b'phabstatus', csettopic=b'work')
1757 @show.showview(b'phabstatus', csettopic=b'work')
1753 def phabstatusshowview(ui, repo, displayer):
1758 def phabstatusshowview(ui, repo, displayer):
1754 """Phabricator differiential status"""
1759 """Phabricator differiential status"""
1755 revs = repo.revs('sort(_underway(), topo)')
1760 revs = repo.revs('sort(_underway(), topo)')
1756 drevmap = getdrevmap(repo, revs)
1761 drevmap = getdrevmap(repo, revs)
1757 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1762 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1758 for rev, drevid in pycompat.iteritems(drevmap):
1763 for rev, drevid in pycompat.iteritems(drevmap):
1759 if drevid is not None:
1764 if drevid is not None:
1760 drevids.add(drevid)
1765 drevids.add(drevid)
1761 revsbydrevid.setdefault(drevid, set([])).add(rev)
1766 revsbydrevid.setdefault(drevid, set([])).add(rev)
1762 else:
1767 else:
1763 unknownrevs.append(rev)
1768 unknownrevs.append(rev)
1764
1769
1765 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1770 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1766 drevsbyrev = {}
1771 drevsbyrev = {}
1767 for drev in drevs:
1772 for drev in drevs:
1768 for rev in revsbydrevid[int(drev[b'id'])]:
1773 for rev in revsbydrevid[int(drev[b'id'])]:
1769 drevsbyrev[rev] = drev
1774 drevsbyrev[rev] = drev
1770
1775
1771 def phabstatus(ctx):
1776 def phabstatus(ctx):
1772 drev = drevsbyrev[ctx.rev()]
1777 drev = drevsbyrev[ctx.rev()]
1773 status = ui.label(
1778 status = ui.label(
1774 b'%(statusName)s' % drev,
1779 b'%(statusName)s' % drev,
1775 b'phabricator.status.%s' % _getstatusname(drev),
1780 b'phabricator.status.%s' % _getstatusname(drev),
1776 )
1781 )
1777 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1782 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1778
1783
1779 revs -= smartset.baseset(unknownrevs)
1784 revs -= smartset.baseset(unknownrevs)
1780 revdag = graphmod.dagwalker(repo, revs)
1785 revdag = graphmod.dagwalker(repo, revs)
1781
1786
1782 ui.setconfig(b'experimental', b'graphshorten', True)
1787 ui.setconfig(b'experimental', b'graphshorten', True)
1783 displayer._exthook = phabstatus
1788 displayer._exthook = phabstatus
1784 nodelen = show.longestshortest(repo, revs)
1789 nodelen = show.longestshortest(repo, revs)
1785 logcmdutil.displaygraph(
1790 logcmdutil.displaygraph(
1786 ui,
1791 ui,
1787 repo,
1792 repo,
1788 revdag,
1793 revdag,
1789 displayer,
1794 displayer,
1790 graphmod.asciiedges,
1795 graphmod.asciiedges,
1791 props={b'nodelen': nodelen},
1796 props={b'nodelen': nodelen},
1792 )
1797 )
@@ -1,246 +1,246 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 > EOF
5 > EOF
6 $ hg init repo
6 $ hg init repo
7 $ cd repo
7 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
9 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
10 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
11 > callsign = HG
12 >
12 >
13 > [auth]
13 > [auth]
14 > hgphab.schemes = https
14 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
15 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
16 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
17 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 > # token with this value.
19 > # token with this value.
20 > hgphab.phabtoken = cli-hahayouwish
20 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
21 > EOF
22 $ VCR="$TESTDIR/phabricator"
22 $ VCR="$TESTDIR/phabricator"
23
23
24 Error is handled reasonably. We override the phabtoken here so that
24 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
25 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
26 above config and have a real token in the test but not have to edit
27 this test.
27 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
31
32 Basic phabread:
32 Basic phabread:
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
34 # HG changeset patch
34 # HG changeset patch
35 # Date 1536771503 0
35 # Date 1536771503 0
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
37 exchangev2: start to implement pull with wire protocol v2
37 exchangev2: start to implement pull with wire protocol v2
38
38
39 Wire protocol version 2 will take a substantially different
39 Wire protocol version 2 will take a substantially different
40 approach to exchange than version 1 (at least as far as pulling
40 approach to exchange than version 1 (at least as far as pulling
41 is concerned).
41 is concerned).
42
42
43 This commit establishes a new exchangev2 module for holding
43 This commit establishes a new exchangev2 module for holding
44
44
45 phabupdate with an accept:
45 phabupdate with an accept:
46 $ hg phabupdate --accept D4564 \
46 $ hg phabupdate --accept D4564 \
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
48 > --test-vcr "$VCR/accept-4564.json"
48 > --test-vcr "$VCR/accept-4564.json"
49 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
49 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
50 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
50 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
51 [255]
51 [255]
52 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
52 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
53
53
54 Create a differential diff:
54 Create a differential diff:
55 $ HGENCODING=utf-8; export HGENCODING
55 $ HGENCODING=utf-8; export HGENCODING
56 $ echo alpha > alpha
56 $ echo alpha > alpha
57 $ hg ci --addremove -m 'create alpha for phabricator test €'
57 $ hg ci --addremove -m 'create alpha for phabricator test €'
58 adding alpha
58 adding alpha
59 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
59 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
60 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
60 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
61 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
61 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
62 $ echo more >> alpha
62 $ echo more >> alpha
63 $ HGEDITOR=true hg ci --amend
63 $ HGEDITOR=true hg ci --amend
64 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
64 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
65 $ echo beta > beta
65 $ echo beta > beta
66 $ hg ci --addremove -m 'create beta for phabricator test'
66 $ hg ci --addremove -m 'create beta for phabricator test'
67 adding beta
67 adding beta
68 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
68 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
69 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
69 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
70 D7916 - created - 9e6901f21d5b: create beta for phabricator test
70 D7916 - created - 9e6901f21d5b: create beta for phabricator test
71 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
71 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
72 $ unset HGENCODING
72 $ unset HGENCODING
73
73
74 The amend won't explode after posting a public commit. The local tag is left
74 The amend won't explode after posting a public commit. The local tag is left
75 behind to identify it.
75 behind to identify it.
76
76
77 $ echo 'public change' > beta
77 $ echo 'public change' > beta
78 $ hg ci -m 'create public change for phabricator testing'
78 $ hg ci -m 'create public change for phabricator testing'
79 $ hg phase --public .
79 $ hg phase --public .
80 $ echo 'draft change' > alpha
80 $ echo 'draft change' > alpha
81 $ hg ci -m 'create draft change for phabricator testing'
81 $ hg ci -m 'create draft change for phabricator testing'
82 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
82 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
83 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
83 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
84 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
84 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
85 warning: not updating public commit 2:7b4185ab5d16
85 warning: not updating public commit 2:7b4185ab5d16
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
87 $ hg tags -v
87 $ hg tags -v
88 tip 3:3244dc4a3334
88 tip 3:3244dc4a3334
89 D7917 2:7b4185ab5d16 local
89 D7917 2:7b4185ab5d16 local
90
90
91 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
91 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
92 > {
92 > {
93 > "constraints": {
93 > "constraints": {
94 > "isBot": true
94 > "isBot": true
95 > }
95 > }
96 > }
96 > }
97 > EOF
97 > EOF
98 {
98 {
99 "cursor": {
99 "cursor": {
100 "after": null,
100 "after": null,
101 "before": null,
101 "before": null,
102 "limit": 100,
102 "limit": 100,
103 "order": null
103 "order": null
104 },
104 },
105 "data": [],
105 "data": [],
106 "maps": {},
106 "maps": {},
107 "query": {
107 "query": {
108 "queryKey": null
108 "queryKey": null
109 }
109 }
110 }
110 }
111
111
112 Template keywords
112 Template keywords
113 $ hg log -T'{rev} {phabreview|json}\n'
113 $ hg log -T'{rev} {phabreview|json}\n'
114 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
114 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
115 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
115 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
116 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
116 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
117 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
117 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
118
118
119 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
119 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
120 3 https://phab.mercurial-scm.org/D7918 D7918
120 3 https://phab.mercurial-scm.org/D7918 D7918
121 2 https://phab.mercurial-scm.org/D7917 D7917
121 2 https://phab.mercurial-scm.org/D7917 D7917
122 1 https://phab.mercurial-scm.org/D7916 D7916
122 1 https://phab.mercurial-scm.org/D7916 D7916
123 0 https://phab.mercurial-scm.org/D7915 D7915
123 0 https://phab.mercurial-scm.org/D7915 D7915
124
124
125 Commenting when phabsending:
125 Commenting when phabsending:
126 $ echo comment > comment
126 $ echo comment > comment
127 $ hg ci --addremove -m "create comment for phabricator test"
127 $ hg ci --addremove -m "create comment for phabricator test"
128 adding comment
128 adding comment
129 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
129 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
130 D7919 - created - d5dddca9023d: create comment for phabricator test
130 D7919 - created - d5dddca9023d: create comment for phabricator test
131 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
131 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
132 $ echo comment2 >> comment
132 $ echo comment2 >> comment
133 $ hg ci --amend
133 $ hg ci --amend
134 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
134 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
135 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
135 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
136 D7919 - updated - 1849d7828727: create comment for phabricator test
136 D7919 - updated - 1849d7828727: create comment for phabricator test
137
137
138 Phabsending a skipped commit:
138 Phabsending a skipped commit:
139 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
139 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
140 D7919 - skipped - 1849d7828727: create comment for phabricator test
140 D7919 - skipped - 1849d7828727: create comment for phabricator test
141
141
142 Phabreading a DREV with a local:commits time as a string:
142 Phabreading a DREV with a local:commits time as a string:
143 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
143 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
144 # HG changeset patch
144 # HG changeset patch
145 # User Pulkit Goyal <7895pulkit@gmail.com>
145 # User Pulkit Goyal <7895pulkit@gmail.com>
146 # Date 1509404054 -19800
146 # Date 1509404054 -19800
147 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
147 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
148 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
148 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
149 repoview: add a new attribute _visibilityexceptions and related API
149 repoview: add a new attribute _visibilityexceptions and related API
150
150
151 Currently we don't have a defined way in core to make some hidden revisions
151 Currently we don't have a defined way in core to make some hidden revisions
152 visible in filtered repo. Extensions to achieve the purpose of unhiding some
152 visible in filtered repo. Extensions to achieve the purpose of unhiding some
153 hidden commits, wrap repoview.pinnedrevs() function.
153 hidden commits, wrap repoview.pinnedrevs() function.
154
154
155 To make the above task simple and have well defined API, this patch adds a new
155 To make the above task simple and have well defined API, this patch adds a new
156 attribute '_visibilityexceptions' to repoview class which will contains
156 attribute '_visibilityexceptions' to repoview class which will contains
157 the hidden revs which should be exception.
157 the hidden revs which should be exception.
158 This will allow to set different exceptions for different repoview objects
158 This will allow to set different exceptions for different repoview objects
159 backed by the same unfiltered repo.
159 backed by the same unfiltered repo.
160
160
161 This patch also adds API to add revs to the attribute set and get them.
161 This patch also adds API to add revs to the attribute set and get them.
162
162
163 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
163 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
164
164
165 Differential Revision: https://phab.mercurial-scm.org/D1285
165 Differential Revision: https://phab.mercurial-scm.org/D1285
166 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
166 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
167 --- a/mercurial/repoview.py
167 --- a/mercurial/repoview.py
168 +++ b/mercurial/repoview.py
168 +++ b/mercurial/repoview.py
169 @@ * @@ (glob)
169 @@ * @@ (glob)
170 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
170 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
171 """
171 """
172
172
173 + # hidden revs which should be visible
173 + # hidden revs which should be visible
174 + _visibilityexceptions = set()
174 + _visibilityexceptions = set()
175 +
175 +
176 def __init__(self, repo, filtername):
176 def __init__(self, repo, filtername):
177 object.__setattr__(self, r'_unfilteredrepo', repo)
177 object.__setattr__(self, r'_unfilteredrepo', repo)
178 object.__setattr__(self, r'filtername', filtername)
178 object.__setattr__(self, r'filtername', filtername)
179 @@ -231,6 +234,14 @@
179 @@ -231,6 +234,14 @@
180 return self
180 return self
181 return self.unfiltered().filtered(name)
181 return self.unfiltered().filtered(name)
182
182
183 + def addvisibilityexceptions(self, revs):
183 + def addvisibilityexceptions(self, revs):
184 + """adds hidden revs which should be visible to set of exceptions"""
184 + """adds hidden revs which should be visible to set of exceptions"""
185 + self._visibilityexceptions.update(revs)
185 + self._visibilityexceptions.update(revs)
186 +
186 +
187 + def getvisibilityexceptions(self):
187 + def getvisibilityexceptions(self):
188 + """returns the set of hidden revs which should be visible"""
188 + """returns the set of hidden revs which should be visible"""
189 + return self._visibilityexceptions
189 + return self._visibilityexceptions
190 +
190 +
191 # everything access are forwarded to the proxied repo
191 # everything access are forwarded to the proxied repo
192 def __getattr__(self, attr):
192 def __getattr__(self, attr):
193 return getattr(self._unfilteredrepo, attr)
193 return getattr(self._unfilteredrepo, attr)
194 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
194 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
195 --- a/mercurial/localrepo.py
195 --- a/mercurial/localrepo.py
196 +++ b/mercurial/localrepo.py
196 +++ b/mercurial/localrepo.py
197 @@ -570,6 +570,14 @@
197 @@ -570,6 +570,14 @@
198 def close(self):
198 def close(self):
199 self._writecaches()
199 self._writecaches()
200
200
201 + def addvisibilityexceptions(self, exceptions):
201 + def addvisibilityexceptions(self, exceptions):
202 + # should be called on a filtered repository
202 + # should be called on a filtered repository
203 + pass
203 + pass
204 +
204 +
205 + def getvisibilityexceptions(self):
205 + def getvisibilityexceptions(self):
206 + # should be called on a filtered repository
206 + # should be called on a filtered repository
207 + return set()
207 + return set()
208 +
208 +
209 def _loadextensions(self):
209 def _loadextensions(self):
210 extensions.loadall(self.ui)
210 extensions.loadall(self.ui)
211
211
212
212
213 A bad .arcconfig doesn't error out
213 A bad .arcconfig doesn't error out
214 $ echo 'garbage' > .arcconfig
214 $ echo 'garbage' > .arcconfig
215 $ hg config phabricator --debug
215 $ hg config phabricator --debug
216 invalid JSON in $TESTTMP/repo/.arcconfig
216 invalid JSON in $TESTTMP/repo/.arcconfig
217 read config from: */.hgrc (glob)
217 read config from: */.hgrc (glob)
218 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
218 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
219 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
219 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
220
220
221 The .arcconfig content overrides global config
221 The .arcconfig content overrides global config
222 $ cat >> $HGRCPATH << EOF
222 $ cat >> $HGRCPATH << EOF
223 > [phabricator]
223 > [phabricator]
224 > url = global
224 > url = global
225 > callsign = global
225 > callsign = global
226 > EOF
226 > EOF
227 $ cp $TESTDIR/../.arcconfig .
227 $ cp $TESTDIR/../.arcconfig .
228 $ mv .hg/hgrc .hg/hgrc.bak
228 $ mv .hg/hgrc .hg/hgrc.bak
229 $ hg config phabricator --debug
229 $ hg config phabricator --debug
230 read config from: */.hgrc (glob)
230 read config from: */.hgrc (glob)
231 */.hgrc:*: phabricator.url=global (glob)
232 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
231 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
232 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
233
233
234 But it doesn't override local config
234 But it doesn't override local config
235 $ cat >> .hg/hgrc << EOF
235 $ cat >> .hg/hgrc << EOF
236 > [phabricator]
236 > [phabricator]
237 > url = local
237 > url = local
238 > callsign = local
238 > callsign = local
239 > EOF
239 > EOF
240 $ hg config phabricator --debug
240 $ hg config phabricator --debug
241 read config from: */.hgrc (glob)
241 read config from: */.hgrc (glob)
242 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
242 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
243 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
243 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
244 $ mv .hg/hgrc.bak .hg/hgrc
244 $ mv .hg/hgrc.bak .hg/hgrc
245
245
246 $ cd ..
246 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now