##// END OF EJS Templates
py3: pass a bytestring into querydrev instead of a string that'll TypeError...
Ian Moody -
r43220:6fb281f3 default
parent child Browse files
Show More
@@ -1,1094 +1,1094 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
128 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def hgmatcher(r1, r2):
130 def hgmatcher(r1, r2):
131 if r1.uri != r2.uri or r1.method != r2.method:
131 if r1.uri != r2.uri or r1.method != r2.method:
132 return False
132 return False
133 r1params = r1.body.split(b'&')
133 r1params = r1.body.split(b'&')
134 r2params = r2.body.split(b'&')
134 r2params = r2.body.split(b'&')
135 return set(r1params) == set(r2params)
135 return set(r1params) == set(r2params)
136
136
137 def sanitiserequest(request):
137 def sanitiserequest(request):
138 request.body = re.sub(
138 request.body = re.sub(
139 r'cli-[a-z0-9]+',
139 r'cli-[a-z0-9]+',
140 r'cli-hahayouwish',
140 r'cli-hahayouwish',
141 request.body
141 request.body
142 )
142 )
143 return request
143 return request
144
144
145 def sanitiseresponse(response):
145 def sanitiseresponse(response):
146 if r'set-cookie' in response[r'headers']:
146 if r'set-cookie' in response[r'headers']:
147 del response[r'headers'][r'set-cookie']
147 del response[r'headers'][r'set-cookie']
148 return response
148 return response
149
149
150 def decorate(fn):
150 def decorate(fn):
151 def inner(*args, **kwargs):
151 def inner(*args, **kwargs):
152 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
152 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
153 if cassette:
153 if cassette:
154 import hgdemandimport
154 import hgdemandimport
155 with hgdemandimport.deactivated():
155 with hgdemandimport.deactivated():
156 import vcr as vcrmod
156 import vcr as vcrmod
157 import vcr.stubs as stubs
157 import vcr.stubs as stubs
158 vcr = vcrmod.VCR(
158 vcr = vcrmod.VCR(
159 serializer=r'json',
159 serializer=r'json',
160 before_record_request=sanitiserequest,
160 before_record_request=sanitiserequest,
161 before_record_response=sanitiseresponse,
161 before_record_response=sanitiseresponse,
162 custom_patches=[
162 custom_patches=[
163 (urlmod, r'httpconnection',
163 (urlmod, r'httpconnection',
164 stubs.VCRHTTPConnection),
164 stubs.VCRHTTPConnection),
165 (urlmod, r'httpsconnection',
165 (urlmod, r'httpsconnection',
166 stubs.VCRHTTPSConnection),
166 stubs.VCRHTTPSConnection),
167 ])
167 ])
168 vcr.register_matcher(r'hgmatcher', hgmatcher)
168 vcr.register_matcher(r'hgmatcher', hgmatcher)
169 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
169 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
170 return fn(*args, **kwargs)
170 return fn(*args, **kwargs)
171 return fn(*args, **kwargs)
171 return fn(*args, **kwargs)
172 inner.__name__ = fn.__name__
172 inner.__name__ = fn.__name__
173 inner.__doc__ = fn.__doc__
173 inner.__doc__ = fn.__doc__
174 return command(name, fullflags, spec, helpcategory=helpcategory,
174 return command(name, fullflags, spec, helpcategory=helpcategory,
175 optionalrepo=optionalrepo)(inner)
175 optionalrepo=optionalrepo)(inner)
176 return decorate
176 return decorate
177
177
178 def urlencodenested(params):
178 def urlencodenested(params):
179 """like urlencode, but works with nested parameters.
179 """like urlencode, but works with nested parameters.
180
180
181 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
181 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
182 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
182 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
183 urlencode. Note: the encoding is consistent with PHP's http_build_query.
183 urlencode. Note: the encoding is consistent with PHP's http_build_query.
184 """
184 """
185 flatparams = util.sortdict()
185 flatparams = util.sortdict()
186 def process(prefix, obj):
186 def process(prefix, obj):
187 if isinstance(obj, bool):
187 if isinstance(obj, bool):
188 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
188 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
189 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
189 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
190 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
190 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
191 if items is None:
191 if items is None:
192 flatparams[prefix] = obj
192 flatparams[prefix] = obj
193 else:
193 else:
194 for k, v in items(obj):
194 for k, v in items(obj):
195 if prefix:
195 if prefix:
196 process(b'%s[%s]' % (prefix, k), v)
196 process(b'%s[%s]' % (prefix, k), v)
197 else:
197 else:
198 process(k, v)
198 process(k, v)
199 process(b'', params)
199 process(b'', params)
200 return util.urlreq.urlencode(flatparams)
200 return util.urlreq.urlencode(flatparams)
201
201
202 def readurltoken(ui):
202 def readurltoken(ui):
203 """return conduit url, token and make sure they exist
203 """return conduit url, token and make sure they exist
204
204
205 Currently read from [auth] config section. In the future, it might
205 Currently read from [auth] config section. In the future, it might
206 make sense to read from .arcconfig and .arcrc as well.
206 make sense to read from .arcconfig and .arcrc as well.
207 """
207 """
208 url = ui.config(b'phabricator', b'url')
208 url = ui.config(b'phabricator', b'url')
209 if not url:
209 if not url:
210 raise error.Abort(_(b'config %s.%s is required')
210 raise error.Abort(_(b'config %s.%s is required')
211 % (b'phabricator', b'url'))
211 % (b'phabricator', b'url'))
212
212
213 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
213 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
214 token = None
214 token = None
215
215
216 if res:
216 if res:
217 group, auth = res
217 group, auth = res
218
218
219 ui.debug(b"using auth.%s.* for authentication\n" % group)
219 ui.debug(b"using auth.%s.* for authentication\n" % group)
220
220
221 token = auth.get(b'phabtoken')
221 token = auth.get(b'phabtoken')
222
222
223 if not token:
223 if not token:
224 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
224 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
225 % (url,))
225 % (url,))
226
226
227 return url, token
227 return url, token
228
228
229 def callconduit(ui, name, params):
229 def callconduit(ui, name, params):
230 """call Conduit API, params is a dict. return json.loads result, or None"""
230 """call Conduit API, params is a dict. return json.loads result, or None"""
231 host, token = readurltoken(ui)
231 host, token = readurltoken(ui)
232 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
232 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
233 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
233 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
234 params = params.copy()
234 params = params.copy()
235 params[b'api.token'] = token
235 params[b'api.token'] = token
236 data = urlencodenested(params)
236 data = urlencodenested(params)
237 curlcmd = ui.config(b'phabricator', b'curlcmd')
237 curlcmd = ui.config(b'phabricator', b'curlcmd')
238 if curlcmd:
238 if curlcmd:
239 sin, sout = procutil.popen2(b'%s -d @- %s'
239 sin, sout = procutil.popen2(b'%s -d @- %s'
240 % (curlcmd, procutil.shellquote(url)))
240 % (curlcmd, procutil.shellquote(url)))
241 sin.write(data)
241 sin.write(data)
242 sin.close()
242 sin.close()
243 body = sout.read()
243 body = sout.read()
244 else:
244 else:
245 urlopener = urlmod.opener(ui, authinfo)
245 urlopener = urlmod.opener(ui, authinfo)
246 request = util.urlreq.request(pycompat.strurl(url), data=data)
246 request = util.urlreq.request(pycompat.strurl(url), data=data)
247 with contextlib.closing(urlopener.open(request)) as rsp:
247 with contextlib.closing(urlopener.open(request)) as rsp:
248 body = rsp.read()
248 body = rsp.read()
249 ui.debug(b'Conduit Response: %s\n' % body)
249 ui.debug(b'Conduit Response: %s\n' % body)
250 parsed = pycompat.rapply(
250 parsed = pycompat.rapply(
251 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
251 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
252 else x,
252 else x,
253 json.loads(body)
253 json.loads(body)
254 )
254 )
255 if parsed.get(b'error_code'):
255 if parsed.get(b'error_code'):
256 msg = (_(b'Conduit Error (%s): %s')
256 msg = (_(b'Conduit Error (%s): %s')
257 % (parsed[b'error_code'], parsed[b'error_info']))
257 % (parsed[b'error_code'], parsed[b'error_info']))
258 raise error.Abort(msg)
258 raise error.Abort(msg)
259 return parsed[b'result']
259 return parsed[b'result']
260
260
261 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
261 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
262 def debugcallconduit(ui, repo, name):
262 def debugcallconduit(ui, repo, name):
263 """call Conduit API
263 """call Conduit API
264
264
265 Call parameters are read from stdin as a JSON blob. Result will be written
265 Call parameters are read from stdin as a JSON blob. Result will be written
266 to stdout as a JSON blob.
266 to stdout as a JSON blob.
267 """
267 """
268 # json.loads only accepts bytes from 3.6+
268 # json.loads only accepts bytes from 3.6+
269 rawparams = encoding.unifromlocal(ui.fin.read())
269 rawparams = encoding.unifromlocal(ui.fin.read())
270 # json.loads only returns unicode strings
270 # json.loads only returns unicode strings
271 params = pycompat.rapply(lambda x:
271 params = pycompat.rapply(lambda x:
272 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
272 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
273 json.loads(rawparams)
273 json.loads(rawparams)
274 )
274 )
275 # json.dumps only accepts unicode strings
275 # json.dumps only accepts unicode strings
276 result = pycompat.rapply(lambda x:
276 result = pycompat.rapply(lambda x:
277 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
277 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
278 callconduit(ui, name, params)
278 callconduit(ui, name, params)
279 )
279 )
280 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
280 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
281 ui.write(b'%s\n' % encoding.unitolocal(s))
281 ui.write(b'%s\n' % encoding.unitolocal(s))
282
282
283 def getrepophid(repo):
283 def getrepophid(repo):
284 """given callsign, return repository PHID or None"""
284 """given callsign, return repository PHID or None"""
285 # developer config: phabricator.repophid
285 # developer config: phabricator.repophid
286 repophid = repo.ui.config(b'phabricator', b'repophid')
286 repophid = repo.ui.config(b'phabricator', b'repophid')
287 if repophid:
287 if repophid:
288 return repophid
288 return repophid
289 callsign = repo.ui.config(b'phabricator', b'callsign')
289 callsign = repo.ui.config(b'phabricator', b'callsign')
290 if not callsign:
290 if not callsign:
291 return None
291 return None
292 query = callconduit(repo.ui, b'diffusion.repository.search',
292 query = callconduit(repo.ui, b'diffusion.repository.search',
293 {b'constraints': {b'callsigns': [callsign]}})
293 {b'constraints': {b'callsigns': [callsign]}})
294 if len(query[b'data']) == 0:
294 if len(query[b'data']) == 0:
295 return None
295 return None
296 repophid = query[b'data'][0][b'phid']
296 repophid = query[b'data'][0][b'phid']
297 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
297 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
298 return repophid
298 return repophid
299
299
300 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
300 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
301 _differentialrevisiondescre = re.compile(
301 _differentialrevisiondescre = re.compile(
302 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
302 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
303
303
304 def getoldnodedrevmap(repo, nodelist):
304 def getoldnodedrevmap(repo, nodelist):
305 """find previous nodes that has been sent to Phabricator
305 """find previous nodes that has been sent to Phabricator
306
306
307 return {node: (oldnode, Differential diff, Differential Revision ID)}
307 return {node: (oldnode, Differential diff, Differential Revision ID)}
308 for node in nodelist with known previous sent versions, or associated
308 for node in nodelist with known previous sent versions, or associated
309 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
309 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
310 be ``None``.
310 be ``None``.
311
311
312 Examines commit messages like "Differential Revision:" to get the
312 Examines commit messages like "Differential Revision:" to get the
313 association information.
313 association information.
314
314
315 If such commit message line is not found, examines all precursors and their
315 If such commit message line is not found, examines all precursors and their
316 tags. Tags with format like "D1234" are considered a match and the node
316 tags. Tags with format like "D1234" are considered a match and the node
317 with that tag, and the number after "D" (ex. 1234) will be returned.
317 with that tag, and the number after "D" (ex. 1234) will be returned.
318
318
319 The ``old node``, if not None, is guaranteed to be the last diff of
319 The ``old node``, if not None, is guaranteed to be the last diff of
320 corresponding Differential Revision, and exist in the repo.
320 corresponding Differential Revision, and exist in the repo.
321 """
321 """
322 unfi = repo.unfiltered()
322 unfi = repo.unfiltered()
323 nodemap = unfi.changelog.nodemap
323 nodemap = unfi.changelog.nodemap
324
324
325 result = {} # {node: (oldnode?, lastdiff?, drev)}
325 result = {} # {node: (oldnode?, lastdiff?, drev)}
326 toconfirm = {} # {node: (force, {precnode}, drev)}
326 toconfirm = {} # {node: (force, {precnode}, drev)}
327 for node in nodelist:
327 for node in nodelist:
328 ctx = unfi[node]
328 ctx = unfi[node]
329 # For tags like "D123", put them into "toconfirm" to verify later
329 # For tags like "D123", put them into "toconfirm" to verify later
330 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
330 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
331 for n in precnodes:
331 for n in precnodes:
332 if n in nodemap:
332 if n in nodemap:
333 for tag in unfi.nodetags(n):
333 for tag in unfi.nodetags(n):
334 m = _differentialrevisiontagre.match(tag)
334 m = _differentialrevisiontagre.match(tag)
335 if m:
335 if m:
336 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
336 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
337 continue
337 continue
338
338
339 # Check commit message
339 # Check commit message
340 m = _differentialrevisiondescre.search(ctx.description())
340 m = _differentialrevisiondescre.search(ctx.description())
341 if m:
341 if m:
342 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
342 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
343
343
344 # Double check if tags are genuine by collecting all old nodes from
344 # Double check if tags are genuine by collecting all old nodes from
345 # Phabricator, and expect precursors overlap with it.
345 # Phabricator, and expect precursors overlap with it.
346 if toconfirm:
346 if toconfirm:
347 drevs = [drev for force, precs, drev in toconfirm.values()]
347 drevs = [drev for force, precs, drev in toconfirm.values()]
348 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
348 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
349 {b'revisionIDs': drevs})
349 {b'revisionIDs': drevs})
350 getnode = lambda d: bin(
350 getnode = lambda d: bin(
351 getdiffmeta(d).get(b'node', b'')) or None
351 getdiffmeta(d).get(b'node', b'')) or None
352 for newnode, (force, precset, drev) in toconfirm.items():
352 for newnode, (force, precset, drev) in toconfirm.items():
353 diffs = [d for d in alldiffs.values()
353 diffs = [d for d in alldiffs.values()
354 if int(d[b'revisionID']) == drev]
354 if int(d[b'revisionID']) == drev]
355
355
356 # "precursors" as known by Phabricator
356 # "precursors" as known by Phabricator
357 phprecset = set(getnode(d) for d in diffs)
357 phprecset = set(getnode(d) for d in diffs)
358
358
359 # Ignore if precursors (Phabricator and local repo) do not overlap,
359 # Ignore if precursors (Phabricator and local repo) do not overlap,
360 # and force is not set (when commit message says nothing)
360 # and force is not set (when commit message says nothing)
361 if not force and not bool(phprecset & precset):
361 if not force and not bool(phprecset & precset):
362 tagname = b'D%d' % drev
362 tagname = b'D%d' % drev
363 tags.tag(repo, tagname, nullid, message=None, user=None,
363 tags.tag(repo, tagname, nullid, message=None, user=None,
364 date=None, local=True)
364 date=None, local=True)
365 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
365 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
366 b'Differential history\n') % drev)
366 b'Differential history\n') % drev)
367 continue
367 continue
368
368
369 # Find the last node using Phabricator metadata, and make sure it
369 # Find the last node using Phabricator metadata, and make sure it
370 # exists in the repo
370 # exists in the repo
371 oldnode = lastdiff = None
371 oldnode = lastdiff = None
372 if diffs:
372 if diffs:
373 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
373 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
374 oldnode = getnode(lastdiff)
374 oldnode = getnode(lastdiff)
375 if oldnode and oldnode not in nodemap:
375 if oldnode and oldnode not in nodemap:
376 oldnode = None
376 oldnode = None
377
377
378 result[newnode] = (oldnode, lastdiff, drev)
378 result[newnode] = (oldnode, lastdiff, drev)
379
379
380 return result
380 return result
381
381
382 def getdiff(ctx, diffopts):
382 def getdiff(ctx, diffopts):
383 """plain-text diff without header (user, commit message, etc)"""
383 """plain-text diff without header (user, commit message, etc)"""
384 output = util.stringio()
384 output = util.stringio()
385 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
385 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
386 None, opts=diffopts):
386 None, opts=diffopts):
387 output.write(chunk)
387 output.write(chunk)
388 return output.getvalue()
388 return output.getvalue()
389
389
390 def creatediff(ctx):
390 def creatediff(ctx):
391 """create a Differential Diff"""
391 """create a Differential Diff"""
392 repo = ctx.repo()
392 repo = ctx.repo()
393 repophid = getrepophid(repo)
393 repophid = getrepophid(repo)
394 # Create a "Differential Diff" via "differential.createrawdiff" API
394 # Create a "Differential Diff" via "differential.createrawdiff" API
395 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
395 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
396 if repophid:
396 if repophid:
397 params[b'repositoryPHID'] = repophid
397 params[b'repositoryPHID'] = repophid
398 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
398 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
399 if not diff:
399 if not diff:
400 raise error.Abort(_(b'cannot create diff for %s') % ctx)
400 raise error.Abort(_(b'cannot create diff for %s') % ctx)
401 return diff
401 return diff
402
402
403 def writediffproperties(ctx, diff):
403 def writediffproperties(ctx, diff):
404 """write metadata to diff so patches could be applied losslessly"""
404 """write metadata to diff so patches could be applied losslessly"""
405 params = {
405 params = {
406 b'diff_id': diff[b'id'],
406 b'diff_id': diff[b'id'],
407 b'name': b'hg:meta',
407 b'name': b'hg:meta',
408 b'data': templatefilters.json({
408 b'data': templatefilters.json({
409 b'user': ctx.user(),
409 b'user': ctx.user(),
410 b'date': b'%d %d' % ctx.date(),
410 b'date': b'%d %d' % ctx.date(),
411 b'branch': ctx.branch(),
411 b'branch': ctx.branch(),
412 b'node': ctx.hex(),
412 b'node': ctx.hex(),
413 b'parent': ctx.p1().hex(),
413 b'parent': ctx.p1().hex(),
414 }),
414 }),
415 }
415 }
416 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
416 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
417
417
418 params = {
418 params = {
419 b'diff_id': diff[b'id'],
419 b'diff_id': diff[b'id'],
420 b'name': b'local:commits',
420 b'name': b'local:commits',
421 b'data': templatefilters.json({
421 b'data': templatefilters.json({
422 ctx.hex(): {
422 ctx.hex(): {
423 b'author': stringutil.person(ctx.user()),
423 b'author': stringutil.person(ctx.user()),
424 b'authorEmail': stringutil.email(ctx.user()),
424 b'authorEmail': stringutil.email(ctx.user()),
425 b'time': int(ctx.date()[0]),
425 b'time': int(ctx.date()[0]),
426 b'commit': ctx.hex(),
426 b'commit': ctx.hex(),
427 b'parents': [ctx.p1().hex()],
427 b'parents': [ctx.p1().hex()],
428 b'branch': ctx.branch(),
428 b'branch': ctx.branch(),
429 },
429 },
430 }),
430 }),
431 }
431 }
432 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
432 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
433
433
434 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
434 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
435 oldnode=None, olddiff=None, actions=None,
435 oldnode=None, olddiff=None, actions=None,
436 comment=None):
436 comment=None):
437 """create or update a Differential Revision
437 """create or update a Differential Revision
438
438
439 If revid is None, create a new Differential Revision, otherwise update
439 If revid is None, create a new Differential Revision, otherwise update
440 revid. If parentrevphid is not None, set it as a dependency.
440 revid. If parentrevphid is not None, set it as a dependency.
441
441
442 If oldnode is not None, check if the patch content (without commit message
442 If oldnode is not None, check if the patch content (without commit message
443 and metadata) has changed before creating another diff.
443 and metadata) has changed before creating another diff.
444
444
445 If actions is not None, they will be appended to the transaction.
445 If actions is not None, they will be appended to the transaction.
446 """
446 """
447 repo = ctx.repo()
447 repo = ctx.repo()
448 if oldnode:
448 if oldnode:
449 diffopts = mdiff.diffopts(git=True, context=32767)
449 diffopts = mdiff.diffopts(git=True, context=32767)
450 oldctx = repo.unfiltered()[oldnode]
450 oldctx = repo.unfiltered()[oldnode]
451 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
451 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
452 else:
452 else:
453 neednewdiff = True
453 neednewdiff = True
454
454
455 transactions = []
455 transactions = []
456 if neednewdiff:
456 if neednewdiff:
457 diff = creatediff(ctx)
457 diff = creatediff(ctx)
458 transactions.append({b'type': b'update', b'value': diff[b'phid']})
458 transactions.append({b'type': b'update', b'value': diff[b'phid']})
459 if comment:
459 if comment:
460 transactions.append({b'type': b'comment', b'value': comment})
460 transactions.append({b'type': b'comment', b'value': comment})
461 else:
461 else:
462 # Even if we don't need to upload a new diff because the patch content
462 # Even if we don't need to upload a new diff because the patch content
463 # does not change. We might still need to update its metadata so
463 # does not change. We might still need to update its metadata so
464 # pushers could know the correct node metadata.
464 # pushers could know the correct node metadata.
465 assert olddiff
465 assert olddiff
466 diff = olddiff
466 diff = olddiff
467 writediffproperties(ctx, diff)
467 writediffproperties(ctx, diff)
468
468
469 # Set the parent Revision every time, so commit re-ordering is picked-up
469 # Set the parent Revision every time, so commit re-ordering is picked-up
470 if parentrevphid:
470 if parentrevphid:
471 transactions.append({b'type': b'parents.set',
471 transactions.append({b'type': b'parents.set',
472 b'value': [parentrevphid]})
472 b'value': [parentrevphid]})
473
473
474 if actions:
474 if actions:
475 transactions += actions
475 transactions += actions
476
476
477 # Parse commit message and update related fields.
477 # Parse commit message and update related fields.
478 desc = ctx.description()
478 desc = ctx.description()
479 info = callconduit(repo.ui, b'differential.parsecommitmessage',
479 info = callconduit(repo.ui, b'differential.parsecommitmessage',
480 {b'corpus': desc})
480 {b'corpus': desc})
481 for k, v in info[b'fields'].items():
481 for k, v in info[b'fields'].items():
482 if k in [b'title', b'summary', b'testPlan']:
482 if k in [b'title', b'summary', b'testPlan']:
483 transactions.append({b'type': k, b'value': v})
483 transactions.append({b'type': k, b'value': v})
484
484
485 params = {b'transactions': transactions}
485 params = {b'transactions': transactions}
486 if revid is not None:
486 if revid is not None:
487 # Update an existing Differential Revision
487 # Update an existing Differential Revision
488 params[b'objectIdentifier'] = revid
488 params[b'objectIdentifier'] = revid
489
489
490 revision = callconduit(repo.ui, b'differential.revision.edit', params)
490 revision = callconduit(repo.ui, b'differential.revision.edit', params)
491 if not revision:
491 if not revision:
492 raise error.Abort(_(b'cannot create revision for %s') % ctx)
492 raise error.Abort(_(b'cannot create revision for %s') % ctx)
493
493
494 return revision, diff
494 return revision, diff
495
495
496 def userphids(repo, names):
496 def userphids(repo, names):
497 """convert user names to PHIDs"""
497 """convert user names to PHIDs"""
498 names = [name.lower() for name in names]
498 names = [name.lower() for name in names]
499 query = {b'constraints': {b'usernames': names}}
499 query = {b'constraints': {b'usernames': names}}
500 result = callconduit(repo.ui, b'user.search', query)
500 result = callconduit(repo.ui, b'user.search', query)
501 # username not found is not an error of the API. So check if we have missed
501 # username not found is not an error of the API. So check if we have missed
502 # some names here.
502 # some names here.
503 data = result[b'data']
503 data = result[b'data']
504 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
504 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
505 unresolved = set(names) - resolved
505 unresolved = set(names) - resolved
506 if unresolved:
506 if unresolved:
507 raise error.Abort(_(b'unknown username: %s')
507 raise error.Abort(_(b'unknown username: %s')
508 % b' '.join(sorted(unresolved)))
508 % b' '.join(sorted(unresolved)))
509 return [entry[b'phid'] for entry in data]
509 return [entry[b'phid'] for entry in data]
510
510
511 @vcrcommand(b'phabsend',
511 @vcrcommand(b'phabsend',
512 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
512 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
513 (b'', b'amend', True, _(b'update commit messages')),
513 (b'', b'amend', True, _(b'update commit messages')),
514 (b'', b'reviewer', [], _(b'specify reviewers')),
514 (b'', b'reviewer', [], _(b'specify reviewers')),
515 (b'', b'blocker', [], _(b'specify blocking reviewers')),
515 (b'', b'blocker', [], _(b'specify blocking reviewers')),
516 (b'm', b'comment', b'',
516 (b'm', b'comment', b'',
517 _(b'add a comment to Revisions with new/updated Diffs')),
517 _(b'add a comment to Revisions with new/updated Diffs')),
518 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
518 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
519 _(b'REV [OPTIONS]'),
519 _(b'REV [OPTIONS]'),
520 helpcategory=command.CATEGORY_IMPORT_EXPORT)
520 helpcategory=command.CATEGORY_IMPORT_EXPORT)
521 def phabsend(ui, repo, *revs, **opts):
521 def phabsend(ui, repo, *revs, **opts):
522 """upload changesets to Phabricator
522 """upload changesets to Phabricator
523
523
524 If there are multiple revisions specified, they will be send as a stack
524 If there are multiple revisions specified, they will be send as a stack
525 with a linear dependencies relationship using the order specified by the
525 with a linear dependencies relationship using the order specified by the
526 revset.
526 revset.
527
527
528 For the first time uploading changesets, local tags will be created to
528 For the first time uploading changesets, local tags will be created to
529 maintain the association. After the first time, phabsend will check
529 maintain the association. After the first time, phabsend will check
530 obsstore and tags information so it can figure out whether to update an
530 obsstore and tags information so it can figure out whether to update an
531 existing Differential Revision, or create a new one.
531 existing Differential Revision, or create a new one.
532
532
533 If --amend is set, update commit messages so they have the
533 If --amend is set, update commit messages so they have the
534 ``Differential Revision`` URL, remove related tags. This is similar to what
534 ``Differential Revision`` URL, remove related tags. This is similar to what
535 arcanist will do, and is more desired in author-push workflows. Otherwise,
535 arcanist will do, and is more desired in author-push workflows. Otherwise,
536 use local tags to record the ``Differential Revision`` association.
536 use local tags to record the ``Differential Revision`` association.
537
537
538 The --confirm option lets you confirm changesets before sending them. You
538 The --confirm option lets you confirm changesets before sending them. You
539 can also add following to your configuration file to make it default
539 can also add following to your configuration file to make it default
540 behaviour::
540 behaviour::
541
541
542 [phabsend]
542 [phabsend]
543 confirm = true
543 confirm = true
544
544
545 phabsend will check obsstore and the above association to decide whether to
545 phabsend will check obsstore and the above association to decide whether to
546 update an existing Differential Revision, or create a new one.
546 update an existing Differential Revision, or create a new one.
547 """
547 """
548 opts = pycompat.byteskwargs(opts)
548 opts = pycompat.byteskwargs(opts)
549 revs = list(revs) + opts.get(b'rev', [])
549 revs = list(revs) + opts.get(b'rev', [])
550 revs = scmutil.revrange(repo, revs)
550 revs = scmutil.revrange(repo, revs)
551
551
552 if not revs:
552 if not revs:
553 raise error.Abort(_(b'phabsend requires at least one changeset'))
553 raise error.Abort(_(b'phabsend requires at least one changeset'))
554 if opts.get(b'amend'):
554 if opts.get(b'amend'):
555 cmdutil.checkunfinished(repo)
555 cmdutil.checkunfinished(repo)
556
556
557 # {newnode: (oldnode, olddiff, olddrev}
557 # {newnode: (oldnode, olddiff, olddrev}
558 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
558 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
559
559
560 confirm = ui.configbool(b'phabsend', b'confirm')
560 confirm = ui.configbool(b'phabsend', b'confirm')
561 confirm |= bool(opts.get(b'confirm'))
561 confirm |= bool(opts.get(b'confirm'))
562 if confirm:
562 if confirm:
563 confirmed = _confirmbeforesend(repo, revs, oldmap)
563 confirmed = _confirmbeforesend(repo, revs, oldmap)
564 if not confirmed:
564 if not confirmed:
565 raise error.Abort(_(b'phabsend cancelled'))
565 raise error.Abort(_(b'phabsend cancelled'))
566
566
567 actions = []
567 actions = []
568 reviewers = opts.get(b'reviewer', [])
568 reviewers = opts.get(b'reviewer', [])
569 blockers = opts.get(b'blocker', [])
569 blockers = opts.get(b'blocker', [])
570 phids = []
570 phids = []
571 if reviewers:
571 if reviewers:
572 phids.extend(userphids(repo, reviewers))
572 phids.extend(userphids(repo, reviewers))
573 if blockers:
573 if blockers:
574 phids.extend(map(
574 phids.extend(map(
575 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
575 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
576 ))
576 ))
577 if phids:
577 if phids:
578 actions.append({b'type': b'reviewers.add', b'value': phids})
578 actions.append({b'type': b'reviewers.add', b'value': phids})
579
579
580 drevids = [] # [int]
580 drevids = [] # [int]
581 diffmap = {} # {newnode: diff}
581 diffmap = {} # {newnode: diff}
582
582
583 # Send patches one by one so we know their Differential Revision PHIDs and
583 # Send patches one by one so we know their Differential Revision PHIDs and
584 # can provide dependency relationship
584 # can provide dependency relationship
585 lastrevphid = None
585 lastrevphid = None
586 for rev in revs:
586 for rev in revs:
587 ui.debug(b'sending rev %d\n' % rev)
587 ui.debug(b'sending rev %d\n' % rev)
588 ctx = repo[rev]
588 ctx = repo[rev]
589
589
590 # Get Differential Revision ID
590 # Get Differential Revision ID
591 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
591 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
592 if oldnode != ctx.node() or opts.get(b'amend'):
592 if oldnode != ctx.node() or opts.get(b'amend'):
593 # Create or update Differential Revision
593 # Create or update Differential Revision
594 revision, diff = createdifferentialrevision(
594 revision, diff = createdifferentialrevision(
595 ctx, revid, lastrevphid, oldnode, olddiff, actions,
595 ctx, revid, lastrevphid, oldnode, olddiff, actions,
596 opts.get(b'comment'))
596 opts.get(b'comment'))
597 diffmap[ctx.node()] = diff
597 diffmap[ctx.node()] = diff
598 newrevid = int(revision[b'object'][b'id'])
598 newrevid = int(revision[b'object'][b'id'])
599 newrevphid = revision[b'object'][b'phid']
599 newrevphid = revision[b'object'][b'phid']
600 if revid:
600 if revid:
601 action = b'updated'
601 action = b'updated'
602 else:
602 else:
603 action = b'created'
603 action = b'created'
604
604
605 # Create a local tag to note the association, if commit message
605 # Create a local tag to note the association, if commit message
606 # does not have it already
606 # does not have it already
607 m = _differentialrevisiondescre.search(ctx.description())
607 m = _differentialrevisiondescre.search(ctx.description())
608 if not m or int(m.group(r'id')) != newrevid:
608 if not m or int(m.group(r'id')) != newrevid:
609 tagname = b'D%d' % newrevid
609 tagname = b'D%d' % newrevid
610 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
610 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
611 date=None, local=True)
611 date=None, local=True)
612 else:
612 else:
613 # Nothing changed. But still set "newrevphid" so the next revision
613 # Nothing changed. But still set "newrevphid" so the next revision
614 # could depend on this one and "newrevid" for the summary line.
614 # could depend on this one and "newrevid" for the summary line.
615 newrevphid = querydrev(repo, str(revid))[0][b'phid']
615 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
616 newrevid = revid
616 newrevid = revid
617 action = b'skipped'
617 action = b'skipped'
618
618
619 actiondesc = ui.label(
619 actiondesc = ui.label(
620 {b'created': _(b'created'),
620 {b'created': _(b'created'),
621 b'skipped': _(b'skipped'),
621 b'skipped': _(b'skipped'),
622 b'updated': _(b'updated')}[action],
622 b'updated': _(b'updated')}[action],
623 b'phabricator.action.%s' % action)
623 b'phabricator.action.%s' % action)
624 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
624 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
625 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
625 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
626 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
626 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
627 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
627 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
628 desc))
628 desc))
629 drevids.append(newrevid)
629 drevids.append(newrevid)
630 lastrevphid = newrevphid
630 lastrevphid = newrevphid
631
631
632 # Update commit messages and remove tags
632 # Update commit messages and remove tags
633 if opts.get(b'amend'):
633 if opts.get(b'amend'):
634 unfi = repo.unfiltered()
634 unfi = repo.unfiltered()
635 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
635 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
636 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
636 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
637 wnode = unfi[b'.'].node()
637 wnode = unfi[b'.'].node()
638 mapping = {} # {oldnode: [newnode]}
638 mapping = {} # {oldnode: [newnode]}
639 for i, rev in enumerate(revs):
639 for i, rev in enumerate(revs):
640 old = unfi[rev]
640 old = unfi[rev]
641 drevid = drevids[i]
641 drevid = drevids[i]
642 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
642 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
643 newdesc = getdescfromdrev(drev)
643 newdesc = getdescfromdrev(drev)
644 # Make sure commit message contain "Differential Revision"
644 # Make sure commit message contain "Differential Revision"
645 if old.description() != newdesc:
645 if old.description() != newdesc:
646 if old.phase() == phases.public:
646 if old.phase() == phases.public:
647 ui.warn(_("warning: not updating public commit %s\n")
647 ui.warn(_("warning: not updating public commit %s\n")
648 % scmutil.formatchangeid(old))
648 % scmutil.formatchangeid(old))
649 continue
649 continue
650 parents = [
650 parents = [
651 mapping.get(old.p1().node(), (old.p1(),))[0],
651 mapping.get(old.p1().node(), (old.p1(),))[0],
652 mapping.get(old.p2().node(), (old.p2(),))[0],
652 mapping.get(old.p2().node(), (old.p2(),))[0],
653 ]
653 ]
654 new = context.metadataonlyctx(
654 new = context.metadataonlyctx(
655 repo, old, parents=parents, text=newdesc,
655 repo, old, parents=parents, text=newdesc,
656 user=old.user(), date=old.date(), extra=old.extra())
656 user=old.user(), date=old.date(), extra=old.extra())
657
657
658 newnode = new.commit()
658 newnode = new.commit()
659
659
660 mapping[old.node()] = [newnode]
660 mapping[old.node()] = [newnode]
661 # Update diff property
661 # Update diff property
662 # If it fails just warn and keep going, otherwise the DREV
662 # If it fails just warn and keep going, otherwise the DREV
663 # associations will be lost
663 # associations will be lost
664 try:
664 try:
665 writediffproperties(unfi[newnode], diffmap[old.node()])
665 writediffproperties(unfi[newnode], diffmap[old.node()])
666 except util.urlerr.urlerror:
666 except util.urlerr.urlerror:
667 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
667 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
668 # Remove local tags since it's no longer necessary
668 # Remove local tags since it's no longer necessary
669 tagname = b'D%d' % drevid
669 tagname = b'D%d' % drevid
670 if tagname in repo.tags():
670 if tagname in repo.tags():
671 tags.tag(repo, tagname, nullid, message=None, user=None,
671 tags.tag(repo, tagname, nullid, message=None, user=None,
672 date=None, local=True)
672 date=None, local=True)
673 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
673 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
674 if wnode in mapping:
674 if wnode in mapping:
675 unfi.setparents(mapping[wnode][0])
675 unfi.setparents(mapping[wnode][0])
676
676
677 # Map from "hg:meta" keys to header understood by "hg import". The order is
677 # Map from "hg:meta" keys to header understood by "hg import". The order is
678 # consistent with "hg export" output.
678 # consistent with "hg export" output.
679 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
679 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
680 (b'branch', b'Branch'), (b'node', b'Node ID'),
680 (b'branch', b'Branch'), (b'node', b'Node ID'),
681 (b'parent', b'Parent ')])
681 (b'parent', b'Parent ')])
682
682
683 def _confirmbeforesend(repo, revs, oldmap):
683 def _confirmbeforesend(repo, revs, oldmap):
684 url, token = readurltoken(repo.ui)
684 url, token = readurltoken(repo.ui)
685 ui = repo.ui
685 ui = repo.ui
686 for rev in revs:
686 for rev in revs:
687 ctx = repo[rev]
687 ctx = repo[rev]
688 desc = ctx.description().splitlines()[0]
688 desc = ctx.description().splitlines()[0]
689 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
689 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
690 if drevid:
690 if drevid:
691 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
691 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
692 else:
692 else:
693 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
693 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
694
694
695 ui.write(_(b'%s - %s: %s\n')
695 ui.write(_(b'%s - %s: %s\n')
696 % (drevdesc,
696 % (drevdesc,
697 ui.label(bytes(ctx), b'phabricator.node'),
697 ui.label(bytes(ctx), b'phabricator.node'),
698 ui.label(desc, b'phabricator.desc')))
698 ui.label(desc, b'phabricator.desc')))
699
699
700 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
700 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
701 b'$$ &Yes $$ &No') % url):
701 b'$$ &Yes $$ &No') % url):
702 return False
702 return False
703
703
704 return True
704 return True
705
705
706 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
706 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
707 b'abandoned'}
707 b'abandoned'}
708
708
709 def _getstatusname(drev):
709 def _getstatusname(drev):
710 """get normalized status name from a Differential Revision"""
710 """get normalized status name from a Differential Revision"""
711 return drev[b'statusName'].replace(b' ', b'').lower()
711 return drev[b'statusName'].replace(b' ', b'').lower()
712
712
713 # Small language to specify differential revisions. Support symbols: (), :X,
713 # Small language to specify differential revisions. Support symbols: (), :X,
714 # +, and -.
714 # +, and -.
715
715
716 _elements = {
716 _elements = {
717 # token-type: binding-strength, primary, prefix, infix, suffix
717 # token-type: binding-strength, primary, prefix, infix, suffix
718 b'(': (12, None, (b'group', 1, b')'), None, None),
718 b'(': (12, None, (b'group', 1, b')'), None, None),
719 b':': (8, None, (b'ancestors', 8), None, None),
719 b':': (8, None, (b'ancestors', 8), None, None),
720 b'&': (5, None, None, (b'and_', 5), None),
720 b'&': (5, None, None, (b'and_', 5), None),
721 b'+': (4, None, None, (b'add', 4), None),
721 b'+': (4, None, None, (b'add', 4), None),
722 b'-': (4, None, None, (b'sub', 4), None),
722 b'-': (4, None, None, (b'sub', 4), None),
723 b')': (0, None, None, None, None),
723 b')': (0, None, None, None, None),
724 b'symbol': (0, b'symbol', None, None, None),
724 b'symbol': (0, b'symbol', None, None, None),
725 b'end': (0, None, None, None, None),
725 b'end': (0, None, None, None, None),
726 }
726 }
727
727
728 def _tokenize(text):
728 def _tokenize(text):
729 view = memoryview(text) # zero-copy slice
729 view = memoryview(text) # zero-copy slice
730 special = b'():+-& '
730 special = b'():+-& '
731 pos = 0
731 pos = 0
732 length = len(text)
732 length = len(text)
733 while pos < length:
733 while pos < length:
734 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
734 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
735 pycompat.iterbytestr(view[pos:])))
735 pycompat.iterbytestr(view[pos:])))
736 if symbol:
736 if symbol:
737 yield (b'symbol', symbol, pos)
737 yield (b'symbol', symbol, pos)
738 pos += len(symbol)
738 pos += len(symbol)
739 else: # special char, ignore space
739 else: # special char, ignore space
740 if text[pos] != b' ':
740 if text[pos] != b' ':
741 yield (text[pos], None, pos)
741 yield (text[pos], None, pos)
742 pos += 1
742 pos += 1
743 yield (b'end', None, pos)
743 yield (b'end', None, pos)
744
744
745 def _parse(text):
745 def _parse(text):
746 tree, pos = parser.parser(_elements).parse(_tokenize(text))
746 tree, pos = parser.parser(_elements).parse(_tokenize(text))
747 if pos != len(text):
747 if pos != len(text):
748 raise error.ParseError(b'invalid token', pos)
748 raise error.ParseError(b'invalid token', pos)
749 return tree
749 return tree
750
750
751 def _parsedrev(symbol):
751 def _parsedrev(symbol):
752 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
752 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
753 if symbol.startswith(b'D') and symbol[1:].isdigit():
753 if symbol.startswith(b'D') and symbol[1:].isdigit():
754 return int(symbol[1:])
754 return int(symbol[1:])
755 if symbol.isdigit():
755 if symbol.isdigit():
756 return int(symbol)
756 return int(symbol)
757
757
758 def _prefetchdrevs(tree):
758 def _prefetchdrevs(tree):
759 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
759 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
760 drevs = set()
760 drevs = set()
761 ancestordrevs = set()
761 ancestordrevs = set()
762 op = tree[0]
762 op = tree[0]
763 if op == b'symbol':
763 if op == b'symbol':
764 r = _parsedrev(tree[1])
764 r = _parsedrev(tree[1])
765 if r:
765 if r:
766 drevs.add(r)
766 drevs.add(r)
767 elif op == b'ancestors':
767 elif op == b'ancestors':
768 r, a = _prefetchdrevs(tree[1])
768 r, a = _prefetchdrevs(tree[1])
769 drevs.update(r)
769 drevs.update(r)
770 ancestordrevs.update(r)
770 ancestordrevs.update(r)
771 ancestordrevs.update(a)
771 ancestordrevs.update(a)
772 else:
772 else:
773 for t in tree[1:]:
773 for t in tree[1:]:
774 r, a = _prefetchdrevs(t)
774 r, a = _prefetchdrevs(t)
775 drevs.update(r)
775 drevs.update(r)
776 ancestordrevs.update(a)
776 ancestordrevs.update(a)
777 return drevs, ancestordrevs
777 return drevs, ancestordrevs
778
778
779 def querydrev(repo, spec):
779 def querydrev(repo, spec):
780 """return a list of "Differential Revision" dicts
780 """return a list of "Differential Revision" dicts
781
781
782 spec is a string using a simple query language, see docstring in phabread
782 spec is a string using a simple query language, see docstring in phabread
783 for details.
783 for details.
784
784
785 A "Differential Revision dict" looks like:
785 A "Differential Revision dict" looks like:
786
786
787 {
787 {
788 "id": "2",
788 "id": "2",
789 "phid": "PHID-DREV-672qvysjcczopag46qty",
789 "phid": "PHID-DREV-672qvysjcczopag46qty",
790 "title": "example",
790 "title": "example",
791 "uri": "https://phab.example.com/D2",
791 "uri": "https://phab.example.com/D2",
792 "dateCreated": "1499181406",
792 "dateCreated": "1499181406",
793 "dateModified": "1499182103",
793 "dateModified": "1499182103",
794 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
794 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
795 "status": "0",
795 "status": "0",
796 "statusName": "Needs Review",
796 "statusName": "Needs Review",
797 "properties": [],
797 "properties": [],
798 "branch": null,
798 "branch": null,
799 "summary": "",
799 "summary": "",
800 "testPlan": "",
800 "testPlan": "",
801 "lineCount": "2",
801 "lineCount": "2",
802 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
802 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
803 "diffs": [
803 "diffs": [
804 "3",
804 "3",
805 "4",
805 "4",
806 ],
806 ],
807 "commits": [],
807 "commits": [],
808 "reviewers": [],
808 "reviewers": [],
809 "ccs": [],
809 "ccs": [],
810 "hashes": [],
810 "hashes": [],
811 "auxiliary": {
811 "auxiliary": {
812 "phabricator:projects": [],
812 "phabricator:projects": [],
813 "phabricator:depends-on": [
813 "phabricator:depends-on": [
814 "PHID-DREV-gbapp366kutjebt7agcd"
814 "PHID-DREV-gbapp366kutjebt7agcd"
815 ]
815 ]
816 },
816 },
817 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
817 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
818 "sourcePath": null
818 "sourcePath": null
819 }
819 }
820 """
820 """
821 def fetch(params):
821 def fetch(params):
822 """params -> single drev or None"""
822 """params -> single drev or None"""
823 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
823 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
824 if key in prefetched:
824 if key in prefetched:
825 return prefetched[key]
825 return prefetched[key]
826 drevs = callconduit(repo.ui, b'differential.query', params)
826 drevs = callconduit(repo.ui, b'differential.query', params)
827 # Fill prefetched with the result
827 # Fill prefetched with the result
828 for drev in drevs:
828 for drev in drevs:
829 prefetched[drev[b'phid']] = drev
829 prefetched[drev[b'phid']] = drev
830 prefetched[int(drev[b'id'])] = drev
830 prefetched[int(drev[b'id'])] = drev
831 if key not in prefetched:
831 if key not in prefetched:
832 raise error.Abort(_(b'cannot get Differential Revision %r')
832 raise error.Abort(_(b'cannot get Differential Revision %r')
833 % params)
833 % params)
834 return prefetched[key]
834 return prefetched[key]
835
835
836 def getstack(topdrevids):
836 def getstack(topdrevids):
837 """given a top, get a stack from the bottom, [id] -> [id]"""
837 """given a top, get a stack from the bottom, [id] -> [id]"""
838 visited = set()
838 visited = set()
839 result = []
839 result = []
840 queue = [{b'ids': [i]} for i in topdrevids]
840 queue = [{b'ids': [i]} for i in topdrevids]
841 while queue:
841 while queue:
842 params = queue.pop()
842 params = queue.pop()
843 drev = fetch(params)
843 drev = fetch(params)
844 if drev[b'id'] in visited:
844 if drev[b'id'] in visited:
845 continue
845 continue
846 visited.add(drev[b'id'])
846 visited.add(drev[b'id'])
847 result.append(int(drev[b'id']))
847 result.append(int(drev[b'id']))
848 auxiliary = drev.get(b'auxiliary', {})
848 auxiliary = drev.get(b'auxiliary', {})
849 depends = auxiliary.get(b'phabricator:depends-on', [])
849 depends = auxiliary.get(b'phabricator:depends-on', [])
850 for phid in depends:
850 for phid in depends:
851 queue.append({b'phids': [phid]})
851 queue.append({b'phids': [phid]})
852 result.reverse()
852 result.reverse()
853 return smartset.baseset(result)
853 return smartset.baseset(result)
854
854
855 # Initialize prefetch cache
855 # Initialize prefetch cache
856 prefetched = {} # {id or phid: drev}
856 prefetched = {} # {id or phid: drev}
857
857
858 tree = _parse(spec)
858 tree = _parse(spec)
859 drevs, ancestordrevs = _prefetchdrevs(tree)
859 drevs, ancestordrevs = _prefetchdrevs(tree)
860
860
861 # developer config: phabricator.batchsize
861 # developer config: phabricator.batchsize
862 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
862 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
863
863
864 # Prefetch Differential Revisions in batch
864 # Prefetch Differential Revisions in batch
865 tofetch = set(drevs)
865 tofetch = set(drevs)
866 for r in ancestordrevs:
866 for r in ancestordrevs:
867 tofetch.update(range(max(1, r - batchsize), r + 1))
867 tofetch.update(range(max(1, r - batchsize), r + 1))
868 if drevs:
868 if drevs:
869 fetch({b'ids': list(tofetch)})
869 fetch({b'ids': list(tofetch)})
870 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
870 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
871
871
872 # Walk through the tree, return smartsets
872 # Walk through the tree, return smartsets
873 def walk(tree):
873 def walk(tree):
874 op = tree[0]
874 op = tree[0]
875 if op == b'symbol':
875 if op == b'symbol':
876 drev = _parsedrev(tree[1])
876 drev = _parsedrev(tree[1])
877 if drev:
877 if drev:
878 return smartset.baseset([drev])
878 return smartset.baseset([drev])
879 elif tree[1] in _knownstatusnames:
879 elif tree[1] in _knownstatusnames:
880 drevs = [r for r in validids
880 drevs = [r for r in validids
881 if _getstatusname(prefetched[r]) == tree[1]]
881 if _getstatusname(prefetched[r]) == tree[1]]
882 return smartset.baseset(drevs)
882 return smartset.baseset(drevs)
883 else:
883 else:
884 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
884 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
885 elif op in {b'and_', b'add', b'sub'}:
885 elif op in {b'and_', b'add', b'sub'}:
886 assert len(tree) == 3
886 assert len(tree) == 3
887 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
887 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
888 elif op == b'group':
888 elif op == b'group':
889 return walk(tree[1])
889 return walk(tree[1])
890 elif op == b'ancestors':
890 elif op == b'ancestors':
891 return getstack(walk(tree[1]))
891 return getstack(walk(tree[1]))
892 else:
892 else:
893 raise error.ProgrammingError(b'illegal tree: %r' % tree)
893 raise error.ProgrammingError(b'illegal tree: %r' % tree)
894
894
895 return [prefetched[r] for r in walk(tree)]
895 return [prefetched[r] for r in walk(tree)]
896
896
897 def getdescfromdrev(drev):
897 def getdescfromdrev(drev):
898 """get description (commit message) from "Differential Revision"
898 """get description (commit message) from "Differential Revision"
899
899
900 This is similar to differential.getcommitmessage API. But we only care
900 This is similar to differential.getcommitmessage API. But we only care
901 about limited fields: title, summary, test plan, and URL.
901 about limited fields: title, summary, test plan, and URL.
902 """
902 """
903 title = drev[b'title']
903 title = drev[b'title']
904 summary = drev[b'summary'].rstrip()
904 summary = drev[b'summary'].rstrip()
905 testplan = drev[b'testPlan'].rstrip()
905 testplan = drev[b'testPlan'].rstrip()
906 if testplan:
906 if testplan:
907 testplan = b'Test Plan:\n%s' % testplan
907 testplan = b'Test Plan:\n%s' % testplan
908 uri = b'Differential Revision: %s' % drev[b'uri']
908 uri = b'Differential Revision: %s' % drev[b'uri']
909 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
909 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
910
910
911 def getdiffmeta(diff):
911 def getdiffmeta(diff):
912 """get commit metadata (date, node, user, p1) from a diff object
912 """get commit metadata (date, node, user, p1) from a diff object
913
913
914 The metadata could be "hg:meta", sent by phabsend, like:
914 The metadata could be "hg:meta", sent by phabsend, like:
915
915
916 "properties": {
916 "properties": {
917 "hg:meta": {
917 "hg:meta": {
918 "date": "1499571514 25200",
918 "date": "1499571514 25200",
919 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
919 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
920 "user": "Foo Bar <foo@example.com>",
920 "user": "Foo Bar <foo@example.com>",
921 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
921 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
922 }
922 }
923 }
923 }
924
924
925 Or converted from "local:commits", sent by "arc", like:
925 Or converted from "local:commits", sent by "arc", like:
926
926
927 "properties": {
927 "properties": {
928 "local:commits": {
928 "local:commits": {
929 "98c08acae292b2faf60a279b4189beb6cff1414d": {
929 "98c08acae292b2faf60a279b4189beb6cff1414d": {
930 "author": "Foo Bar",
930 "author": "Foo Bar",
931 "time": 1499546314,
931 "time": 1499546314,
932 "branch": "default",
932 "branch": "default",
933 "tag": "",
933 "tag": "",
934 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
934 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
935 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
935 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
936 "local": "1000",
936 "local": "1000",
937 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
937 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
938 "summary": "...",
938 "summary": "...",
939 "message": "...",
939 "message": "...",
940 "authorEmail": "foo@example.com"
940 "authorEmail": "foo@example.com"
941 }
941 }
942 }
942 }
943 }
943 }
944
944
945 Note: metadata extracted from "local:commits" will lose time zone
945 Note: metadata extracted from "local:commits" will lose time zone
946 information.
946 information.
947 """
947 """
948 props = diff.get(b'properties') or {}
948 props = diff.get(b'properties') or {}
949 meta = props.get(b'hg:meta')
949 meta = props.get(b'hg:meta')
950 if not meta:
950 if not meta:
951 if props.get(b'local:commits'):
951 if props.get(b'local:commits'):
952 commit = sorted(props[b'local:commits'].values())[0]
952 commit = sorted(props[b'local:commits'].values())[0]
953 meta = {}
953 meta = {}
954 if b'author' in commit and b'authorEmail' in commit:
954 if b'author' in commit and b'authorEmail' in commit:
955 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
955 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
956 commit[b'authorEmail'])
956 commit[b'authorEmail'])
957 if b'time' in commit:
957 if b'time' in commit:
958 meta[b'date'] = b'%d 0' % int(commit[b'time'])
958 meta[b'date'] = b'%d 0' % int(commit[b'time'])
959 if b'branch' in commit:
959 if b'branch' in commit:
960 meta[b'branch'] = commit[b'branch']
960 meta[b'branch'] = commit[b'branch']
961 node = commit.get(b'commit', commit.get(b'rev'))
961 node = commit.get(b'commit', commit.get(b'rev'))
962 if node:
962 if node:
963 meta[b'node'] = node
963 meta[b'node'] = node
964 if len(commit.get(b'parents', ())) >= 1:
964 if len(commit.get(b'parents', ())) >= 1:
965 meta[b'parent'] = commit[b'parents'][0]
965 meta[b'parent'] = commit[b'parents'][0]
966 else:
966 else:
967 meta = {}
967 meta = {}
968 if b'date' not in meta and b'dateCreated' in diff:
968 if b'date' not in meta and b'dateCreated' in diff:
969 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
969 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
970 if b'branch' not in meta and diff.get(b'branch'):
970 if b'branch' not in meta and diff.get(b'branch'):
971 meta[b'branch'] = diff[b'branch']
971 meta[b'branch'] = diff[b'branch']
972 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
972 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
973 meta[b'parent'] = diff[b'sourceControlBaseRevision']
973 meta[b'parent'] = diff[b'sourceControlBaseRevision']
974 return meta
974 return meta
975
975
976 def readpatch(repo, drevs, write):
976 def readpatch(repo, drevs, write):
977 """generate plain-text patch readable by 'hg import'
977 """generate plain-text patch readable by 'hg import'
978
978
979 write is usually ui.write. drevs is what "querydrev" returns, results of
979 write is usually ui.write. drevs is what "querydrev" returns, results of
980 "differential.query".
980 "differential.query".
981 """
981 """
982 # Prefetch hg:meta property for all diffs
982 # Prefetch hg:meta property for all diffs
983 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
983 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
984 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
984 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
985
985
986 # Generate patch for each drev
986 # Generate patch for each drev
987 for drev in drevs:
987 for drev in drevs:
988 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
988 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
989
989
990 diffid = max(int(v) for v in drev[b'diffs'])
990 diffid = max(int(v) for v in drev[b'diffs'])
991 body = callconduit(repo.ui, b'differential.getrawdiff',
991 body = callconduit(repo.ui, b'differential.getrawdiff',
992 {b'diffID': diffid})
992 {b'diffID': diffid})
993 desc = getdescfromdrev(drev)
993 desc = getdescfromdrev(drev)
994 header = b'# HG changeset patch\n'
994 header = b'# HG changeset patch\n'
995
995
996 # Try to preserve metadata from hg:meta property. Write hg patch
996 # Try to preserve metadata from hg:meta property. Write hg patch
997 # headers that can be read by the "import" command. See patchheadermap
997 # headers that can be read by the "import" command. See patchheadermap
998 # and extract in mercurial/patch.py for supported headers.
998 # and extract in mercurial/patch.py for supported headers.
999 meta = getdiffmeta(diffs[b'%d' % diffid])
999 meta = getdiffmeta(diffs[b'%d' % diffid])
1000 for k in _metanamemap.keys():
1000 for k in _metanamemap.keys():
1001 if k in meta:
1001 if k in meta:
1002 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1002 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1003
1003
1004 content = b'%s%s\n%s' % (header, desc, body)
1004 content = b'%s%s\n%s' % (header, desc, body)
1005 write(content)
1005 write(content)
1006
1006
1007 @vcrcommand(b'phabread',
1007 @vcrcommand(b'phabread',
1008 [(b'', b'stack', False, _(b'read dependencies'))],
1008 [(b'', b'stack', False, _(b'read dependencies'))],
1009 _(b'DREVSPEC [OPTIONS]'),
1009 _(b'DREVSPEC [OPTIONS]'),
1010 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1010 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1011 def phabread(ui, repo, spec, **opts):
1011 def phabread(ui, repo, spec, **opts):
1012 """print patches from Phabricator suitable for importing
1012 """print patches from Phabricator suitable for importing
1013
1013
1014 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1014 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1015 the number ``123``. It could also have common operators like ``+``, ``-``,
1015 the number ``123``. It could also have common operators like ``+``, ``-``,
1016 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1016 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1017 select a stack.
1017 select a stack.
1018
1018
1019 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1019 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1020 could be used to filter patches by status. For performance reason, they
1020 could be used to filter patches by status. For performance reason, they
1021 only represent a subset of non-status selections and cannot be used alone.
1021 only represent a subset of non-status selections and cannot be used alone.
1022
1022
1023 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1023 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1024 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1024 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1025 stack up to D9.
1025 stack up to D9.
1026
1026
1027 If --stack is given, follow dependencies information and read all patches.
1027 If --stack is given, follow dependencies information and read all patches.
1028 It is equivalent to the ``:`` operator.
1028 It is equivalent to the ``:`` operator.
1029 """
1029 """
1030 opts = pycompat.byteskwargs(opts)
1030 opts = pycompat.byteskwargs(opts)
1031 if opts.get(b'stack'):
1031 if opts.get(b'stack'):
1032 spec = b':(%s)' % spec
1032 spec = b':(%s)' % spec
1033 drevs = querydrev(repo, spec)
1033 drevs = querydrev(repo, spec)
1034 readpatch(repo, drevs, ui.write)
1034 readpatch(repo, drevs, ui.write)
1035
1035
1036 @vcrcommand(b'phabupdate',
1036 @vcrcommand(b'phabupdate',
1037 [(b'', b'accept', False, _(b'accept revisions')),
1037 [(b'', b'accept', False, _(b'accept revisions')),
1038 (b'', b'reject', False, _(b'reject revisions')),
1038 (b'', b'reject', False, _(b'reject revisions')),
1039 (b'', b'abandon', False, _(b'abandon revisions')),
1039 (b'', b'abandon', False, _(b'abandon revisions')),
1040 (b'', b'reclaim', False, _(b'reclaim revisions')),
1040 (b'', b'reclaim', False, _(b'reclaim revisions')),
1041 (b'm', b'comment', b'', _(b'comment on the last revision')),
1041 (b'm', b'comment', b'', _(b'comment on the last revision')),
1042 ], _(b'DREVSPEC [OPTIONS]'),
1042 ], _(b'DREVSPEC [OPTIONS]'),
1043 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1043 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1044 def phabupdate(ui, repo, spec, **opts):
1044 def phabupdate(ui, repo, spec, **opts):
1045 """update Differential Revision in batch
1045 """update Differential Revision in batch
1046
1046
1047 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1047 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1048 """
1048 """
1049 opts = pycompat.byteskwargs(opts)
1049 opts = pycompat.byteskwargs(opts)
1050 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1050 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1051 if len(flags) > 1:
1051 if len(flags) > 1:
1052 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1052 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1053
1053
1054 actions = []
1054 actions = []
1055 for f in flags:
1055 for f in flags:
1056 actions.append({b'type': f, b'value': b'true'})
1056 actions.append({b'type': f, b'value': b'true'})
1057
1057
1058 drevs = querydrev(repo, spec)
1058 drevs = querydrev(repo, spec)
1059 for i, drev in enumerate(drevs):
1059 for i, drev in enumerate(drevs):
1060 if i + 1 == len(drevs) and opts.get(b'comment'):
1060 if i + 1 == len(drevs) and opts.get(b'comment'):
1061 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1061 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1062 if actions:
1062 if actions:
1063 params = {b'objectIdentifier': drev[b'phid'],
1063 params = {b'objectIdentifier': drev[b'phid'],
1064 b'transactions': actions}
1064 b'transactions': actions}
1065 callconduit(ui, b'differential.revision.edit', params)
1065 callconduit(ui, b'differential.revision.edit', params)
1066
1066
1067 templatekeyword = registrar.templatekeyword()
1067 templatekeyword = registrar.templatekeyword()
1068
1068
1069 @templatekeyword(b'phabreview', requires={b'ctx'})
1069 @templatekeyword(b'phabreview', requires={b'ctx'})
1070 def template_review(context, mapping):
1070 def template_review(context, mapping):
1071 """:phabreview: Object describing the review for this changeset.
1071 """:phabreview: Object describing the review for this changeset.
1072 Has attributes `url` and `id`.
1072 Has attributes `url` and `id`.
1073 """
1073 """
1074 ctx = context.resource(mapping, b'ctx')
1074 ctx = context.resource(mapping, b'ctx')
1075 m = _differentialrevisiondescre.search(ctx.description())
1075 m = _differentialrevisiondescre.search(ctx.description())
1076 if m:
1076 if m:
1077 return templateutil.hybriddict({
1077 return templateutil.hybriddict({
1078 b'url': m.group(r'url'),
1078 b'url': m.group(r'url'),
1079 b'id': b"D%s" % m.group(r'id'),
1079 b'id': b"D%s" % m.group(r'id'),
1080 })
1080 })
1081 else:
1081 else:
1082 tags = ctx.repo().nodetags(ctx.node())
1082 tags = ctx.repo().nodetags(ctx.node())
1083 for t in tags:
1083 for t in tags:
1084 if _differentialrevisiontagre.match(t):
1084 if _differentialrevisiontagre.match(t):
1085 url = ctx.repo().ui.config(b'phabricator', b'url')
1085 url = ctx.repo().ui.config(b'phabricator', b'url')
1086 if not url.endswith(b'/'):
1086 if not url.endswith(b'/'):
1087 url += b'/'
1087 url += b'/'
1088 url += t
1088 url += t
1089
1089
1090 return templateutil.hybriddict({
1090 return templateutil.hybriddict({
1091 b'url': url,
1091 b'url': url,
1092 b'id': t,
1092 b'id': t,
1093 })
1093 })
1094 return None
1094 return None
@@ -1,209 +1,157 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 > EOF
5 > EOF
6 $ hg init repo
6 $ hg init repo
7 $ cd repo
7 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
9 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
10 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
11 > callsign = HG
12 >
12 >
13 > [auth]
13 > [auth]
14 > hgphab.schemes = https
14 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
15 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
16 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
17 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 > # token with this value.
19 > # token with this value.
20 > hgphab.phabtoken = cli-hahayouwish
20 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
21 > EOF
22 $ VCR="$TESTDIR/phabricator"
22 $ VCR="$TESTDIR/phabricator"
23
23
24 Error is handled reasonably. We override the phabtoken here so that
24 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
25 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
26 above config and have a real token in the test but not have to edit
27 this test.
27 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
31
32 Basic phabread:
32 Basic phabread:
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
34 # HG changeset patch
34 # HG changeset patch
35 # Date 1536771503 0
35 # Date 1536771503 0
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
37 exchangev2: start to implement pull with wire protocol v2
37 exchangev2: start to implement pull with wire protocol v2
38
38
39 Wire protocol version 2 will take a substantially different
39 Wire protocol version 2 will take a substantially different
40 approach to exchange than version 1 (at least as far as pulling
40 approach to exchange than version 1 (at least as far as pulling
41 is concerned).
41 is concerned).
42
42
43 This commit establishes a new exchangev2 module for holding
43 This commit establishes a new exchangev2 module for holding
44
44
45 phabupdate with an accept:
45 phabupdate with an accept:
46 $ hg phabupdate --accept D4564 \
46 $ hg phabupdate --accept D4564 \
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
48 > --test-vcr "$VCR/accept-4564.json"
48 > --test-vcr "$VCR/accept-4564.json"
49
49
50 Create a differential diff:
50 Create a differential diff:
51 $ HGENCODING=utf-8; export HGENCODING
51 $ HGENCODING=utf-8; export HGENCODING
52 $ echo alpha > alpha
52 $ echo alpha > alpha
53 $ hg ci --addremove -m 'create alpha for phabricator test €'
53 $ hg ci --addremove -m 'create alpha for phabricator test €'
54 adding alpha
54 adding alpha
55 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
55 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
56 D1190 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
56 D1190 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
57 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
57 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
58 $ echo more >> alpha
58 $ echo more >> alpha
59 $ HGEDITOR=true hg ci --amend
59 $ HGEDITOR=true hg ci --amend
60 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a86ed7d85e86-b7a54f3b-amend.hg
60 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a86ed7d85e86-b7a54f3b-amend.hg
61 $ echo beta > beta
61 $ echo beta > beta
62 $ hg ci --addremove -m 'create beta for phabricator test'
62 $ hg ci --addremove -m 'create beta for phabricator test'
63 adding beta
63 adding beta
64 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
64 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
65 D1190 - updated - d940d39fb603: create alpha for phabricator test \xe2\x82\xac (esc)
65 D1190 - updated - d940d39fb603: create alpha for phabricator test \xe2\x82\xac (esc)
66 D1191 - created - 4b2486dfc8c7: create beta for phabricator test
66 D1191 - created - 4b2486dfc8c7: create beta for phabricator test
67 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/4b2486dfc8c7-d90584fa-phabsend.hg
67 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/4b2486dfc8c7-d90584fa-phabsend.hg
68 $ unset HGENCODING
68 $ unset HGENCODING
69
69
70 The amend won't explode after posting a public commit. The local tag is left
70 The amend won't explode after posting a public commit. The local tag is left
71 behind to identify it.
71 behind to identify it.
72
72
73 $ echo 'public change' > beta
73 $ echo 'public change' > beta
74 $ hg ci -m 'create public change for phabricator testing'
74 $ hg ci -m 'create public change for phabricator testing'
75 $ hg phase --public .
75 $ hg phase --public .
76 $ echo 'draft change' > alpha
76 $ echo 'draft change' > alpha
77 $ hg ci -m 'create draft change for phabricator testing'
77 $ hg ci -m 'create draft change for phabricator testing'
78 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
78 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
79 D1192 - created - 24ffd6bca53a: create public change for phabricator testing
79 D1192 - created - 24ffd6bca53a: create public change for phabricator testing
80 D1193 - created - ac331633be79: create draft change for phabricator testing
80 D1193 - created - ac331633be79: create draft change for phabricator testing
81 warning: not updating public commit 2:24ffd6bca53a
81 warning: not updating public commit 2:24ffd6bca53a
82 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ac331633be79-719b961c-phabsend.hg
82 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ac331633be79-719b961c-phabsend.hg
83 $ hg tags -v
83 $ hg tags -v
84 tip 3:a19f1434f9a5
84 tip 3:a19f1434f9a5
85 D1192 2:24ffd6bca53a local
85 D1192 2:24ffd6bca53a local
86
86
87 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
87 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
88 > {
88 > {
89 > "constraints": {
89 > "constraints": {
90 > "isBot": true
90 > "isBot": true
91 > }
91 > }
92 > }
92 > }
93 > EOF
93 > EOF
94 {
94 {
95 "cursor": {
95 "cursor": {
96 "after": null,
96 "after": null,
97 "before": null,
97 "before": null,
98 "limit": 100,
98 "limit": 100,
99 "order": null
99 "order": null
100 },
100 },
101 "data": [],
101 "data": [],
102 "maps": {},
102 "maps": {},
103 "query": {
103 "query": {
104 "queryKey": null
104 "queryKey": null
105 }
105 }
106 }
106 }
107
107
108 Template keywords
108 Template keywords
109 $ hg log -T'{rev} {phabreview|json}\n'
109 $ hg log -T'{rev} {phabreview|json}\n'
110 3 {"id": "D1193", "url": "https://phab.mercurial-scm.org/D1193"}
110 3 {"id": "D1193", "url": "https://phab.mercurial-scm.org/D1193"}
111 2 {"id": "D1192", "url": "https://phab.mercurial-scm.org/D1192"}
111 2 {"id": "D1192", "url": "https://phab.mercurial-scm.org/D1192"}
112 1 {"id": "D1191", "url": "https://phab.mercurial-scm.org/D1191"}
112 1 {"id": "D1191", "url": "https://phab.mercurial-scm.org/D1191"}
113 0 {"id": "D1190", "url": "https://phab.mercurial-scm.org/D1190"}
113 0 {"id": "D1190", "url": "https://phab.mercurial-scm.org/D1190"}
114
114
115 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
115 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
116 3 https://phab.mercurial-scm.org/D1193 D1193
116 3 https://phab.mercurial-scm.org/D1193 D1193
117 2 https://phab.mercurial-scm.org/D1192 D1192
117 2 https://phab.mercurial-scm.org/D1192 D1192
118 1 https://phab.mercurial-scm.org/D1191 D1191
118 1 https://phab.mercurial-scm.org/D1191 D1191
119 0 https://phab.mercurial-scm.org/D1190 D1190
119 0 https://phab.mercurial-scm.org/D1190 D1190
120
120
121 Commenting when phabsending:
121 Commenting when phabsending:
122 $ echo comment > comment
122 $ echo comment > comment
123 $ hg ci --addremove -m "create comment for phabricator test"
123 $ hg ci --addremove -m "create comment for phabricator test"
124 adding comment
124 adding comment
125 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
125 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
126 D1253 - created - a7ee4bac036a: create comment for phabricator test
126 D1253 - created - a7ee4bac036a: create comment for phabricator test
127 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a7ee4bac036a-8009b5a0-phabsend.hg
127 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a7ee4bac036a-8009b5a0-phabsend.hg
128 $ echo comment2 >> comment
128 $ echo comment2 >> comment
129 $ hg ci --amend
129 $ hg ci --amend
130 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/81fce7de1b7d-05339e5b-amend.hg
130 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/81fce7de1b7d-05339e5b-amend.hg
131 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
131 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
132 D1253 - updated - 1acd4b60af38: create comment for phabricator test
132 D1253 - updated - 1acd4b60af38: create comment for phabricator test
133
133
134 Phabsending a skipped commit:
134 Phabsending a skipped commit:
135 #if no-py3
136 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
135 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
137 D1253 - skipped - 1acd4b60af38: create comment for phabricator test
136 D1253 - skipped - 1acd4b60af38: create comment for phabricator test
138 #endif
139 BROKEN: shouldn't error under py3
140 #if py3
141 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
142 ** unknown exception encountered, please report by visiting
143 ** https://mercurial-scm.org/wiki/BugTracker
144 ** Python 3* (glob)
145 ** Mercurial Distributed SCM (version *) (glob)
146 ** Extensions loaded: phabricator
147 Traceback (most recent call last):
148 File "*/install/bin/hg", line *, in <module> (glob)
149 dispatch.run()
150 File "*/install/lib/python/mercurial/dispatch.py", line *, in run (glob)
151 status = dispatch(req)
152 File "*/install/lib/python/mercurial/dispatch.py", line *, in dispatch (glob)
153 ret = _runcatch(req) or 0
154 File "*/install/lib/python/mercurial/dispatch.py", line *, in _runcatch (glob)
155 return _callcatch(ui, _runcatchfunc)
156 File "*/install/lib/python/mercurial/dispatch.py", line *, in _callcatch (glob)
157 return scmutil.callcatch(ui, func)
158 File "*/install/lib/python/mercurial/scmutil.py", line *, in callcatch (glob)
159 return func()
160 File "*/install/lib/python/mercurial/dispatch.py", line *, in _runcatchfunc (glob)
161 return _dispatch(req)
162 File "*/install/lib/python/mercurial/dispatch.py", line *, in _dispatch (glob)
163 cmdpats, cmdoptions)
164 File "*/install/lib/python/mercurial/dispatch.py", line *, in runcommand (glob)
165 ret = _runcommand(ui, options, cmd, d)
166 File "*/install/lib/python/mercurial/dispatch.py", line *, in _runcommand (glob)
167 return cmdfunc()
168 File "*/install/lib/python/mercurial/dispatch.py", line *, in <lambda> (glob)
169 d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
170 File "*/install/lib/python/mercurial/util.py", line *, in check (glob)
171 return func(*args, **kwargs)
172 File "*/install/lib/python/hgext/phabricator.py", line *, in inner (glob)
173 return fn(*args, **kwargs)
174 File "*/install/lib/python/hgext/phabricator.py", line *, in phabsend (glob)
175 newrevphid = querydrev(repo, str(revid))[0][b'phid']
176 File "*/install/lib/python/hgext/phabricator.py", line *, in querydrev (glob)
177 tree = _parse(spec)
178 File "*/install/lib/python/hgext/phabricator.py", line *, in _parse (glob)
179 tree, pos = parser.parser(_elements).parse(_tokenize(text))
180 File "*/install/lib/python/mercurial/parser.py", line *, in parse (glob)
181 self._advance()
182 File "*/install/lib/python/mercurial/parser.py", line *, in _advance (glob)
183 self.current = next(self._iter, None)
184 File "*/install/lib/python/hgext/phabricator.py", line *, in _tokenize (glob)
185 view = memoryview(text) # zero-copy slice
186 TypeError: memoryview: a bytes-like object is required, not 'str'
187 [1]
188 #endif
189
137
190 Phabreading a DREV with a local:commits time as a string:
138 Phabreading a DREV with a local:commits time as a string:
191 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
139 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
192 # HG changeset patch
140 # HG changeset patch
193 # User test <test>
141 # User test <test>
194 # Date 1562019844 0
142 # Date 1562019844 0
195 # Branch default
143 # Branch default
196 # Node ID da5c8c6bf23a36b6e3af011bc3734460692c23ce
144 # Node ID da5c8c6bf23a36b6e3af011bc3734460692c23ce
197 # Parent 1f634396406d03e565ed645370e5fecd062cf215
145 # Parent 1f634396406d03e565ed645370e5fecd062cf215
198 test string time
146 test string time
199
147
200 Differential Revision: https://phab.mercurial-scm.org/D1285
148 Differential Revision: https://phab.mercurial-scm.org/D1285
201 diff --git a/test b/test
149 diff --git a/test b/test
202 new file mode 100644
150 new file mode 100644
203 --- /dev/null
151 --- /dev/null
204 +++ b/test
152 +++ b/test
205 @@ * @@ (glob)
153 @@ * @@ (glob)
206 +test
154 +test
207
155
208
156
209 $ cd ..
157 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now