##// END OF EJS Templates
phabricator: add a "phabstatus" template keyword...
Denis Laxalde -
r44292:79c01212 default
parent child Browse files
Show More
@@ -1,1723 +1,1745 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 logcmdutil,
69 logcmdutil,
70 match,
70 match,
71 mdiff,
71 mdiff,
72 obsutil,
72 obsutil,
73 parser,
73 parser,
74 patch,
74 patch,
75 phases,
75 phases,
76 pycompat,
76 pycompat,
77 scmutil,
77 scmutil,
78 smartset,
78 smartset,
79 tags,
79 tags,
80 templatefilters,
80 templatefilters,
81 templateutil,
81 templateutil,
82 url as urlmod,
82 url as urlmod,
83 util,
83 util,
84 )
84 )
85 from mercurial.utils import (
85 from mercurial.utils import (
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89 from . import show
89 from . import show
90
90
91
91
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # be specifying the version(s) of Mercurial they are tested with, or
94 # be specifying the version(s) of Mercurial they are tested with, or
95 # leave the attribute unspecified.
95 # leave the attribute unspecified.
96 testedwith = b'ships-with-hg-core'
96 testedwith = b'ships-with-hg-core'
97
97
98 eh = exthelper.exthelper()
98 eh = exthelper.exthelper()
99
99
100 cmdtable = eh.cmdtable
100 cmdtable = eh.cmdtable
101 command = eh.command
101 command = eh.command
102 configtable = eh.configtable
102 configtable = eh.configtable
103 templatekeyword = eh.templatekeyword
103 templatekeyword = eh.templatekeyword
104
104
105 # developer config: phabricator.batchsize
105 # developer config: phabricator.batchsize
106 eh.configitem(
106 eh.configitem(
107 b'phabricator', b'batchsize', default=12,
107 b'phabricator', b'batchsize', default=12,
108 )
108 )
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'callsign', default=None,
110 b'phabricator', b'callsign', default=None,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'curlcmd', default=None,
113 b'phabricator', b'curlcmd', default=None,
114 )
114 )
115 # developer config: phabricator.repophid
115 # developer config: phabricator.repophid
116 eh.configitem(
116 eh.configitem(
117 b'phabricator', b'repophid', default=None,
117 b'phabricator', b'repophid', default=None,
118 )
118 )
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'url', default=None,
120 b'phabricator', b'url', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabsend', b'confirm', default=False,
123 b'phabsend', b'confirm', default=False,
124 )
124 )
125
125
126 colortable = {
126 colortable = {
127 b'phabricator.action.created': b'green',
127 b'phabricator.action.created': b'green',
128 b'phabricator.action.skipped': b'magenta',
128 b'phabricator.action.skipped': b'magenta',
129 b'phabricator.action.updated': b'magenta',
129 b'phabricator.action.updated': b'magenta',
130 b'phabricator.desc': b'',
130 b'phabricator.desc': b'',
131 b'phabricator.drev': b'bold',
131 b'phabricator.drev': b'bold',
132 b'phabricator.node': b'',
132 b'phabricator.node': b'',
133 }
133 }
134
134
135 _VCR_FLAGS = [
135 _VCR_FLAGS = [
136 (
136 (
137 b'',
137 b'',
138 b'test-vcr',
138 b'test-vcr',
139 b'',
139 b'',
140 _(
140 _(
141 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
141 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
142 b', otherwise will mock all http requests using the specified vcr file.'
142 b', otherwise will mock all http requests using the specified vcr file.'
143 b' (ADVANCED)'
143 b' (ADVANCED)'
144 ),
144 ),
145 ),
145 ),
146 ]
146 ]
147
147
148
148
149 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
149 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
150 fullflags = flags + _VCR_FLAGS
150 fullflags = flags + _VCR_FLAGS
151
151
152 def hgmatcher(r1, r2):
152 def hgmatcher(r1, r2):
153 if r1.uri != r2.uri or r1.method != r2.method:
153 if r1.uri != r2.uri or r1.method != r2.method:
154 return False
154 return False
155 r1params = util.urlreq.parseqs(r1.body)
155 r1params = util.urlreq.parseqs(r1.body)
156 r2params = util.urlreq.parseqs(r2.body)
156 r2params = util.urlreq.parseqs(r2.body)
157 for key in r1params:
157 for key in r1params:
158 if key not in r2params:
158 if key not in r2params:
159 return False
159 return False
160 value = r1params[key][0]
160 value = r1params[key][0]
161 # we want to compare json payloads without worrying about ordering
161 # we want to compare json payloads without worrying about ordering
162 if value.startswith(b'{') and value.endswith(b'}'):
162 if value.startswith(b'{') and value.endswith(b'}'):
163 r1json = pycompat.json_loads(value)
163 r1json = pycompat.json_loads(value)
164 r2json = pycompat.json_loads(r2params[key][0])
164 r2json = pycompat.json_loads(r2params[key][0])
165 if r1json != r2json:
165 if r1json != r2json:
166 return False
166 return False
167 elif r2params[key][0] != value:
167 elif r2params[key][0] != value:
168 return False
168 return False
169 return True
169 return True
170
170
171 def sanitiserequest(request):
171 def sanitiserequest(request):
172 request.body = re.sub(
172 request.body = re.sub(
173 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
173 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
174 )
174 )
175 return request
175 return request
176
176
177 def sanitiseresponse(response):
177 def sanitiseresponse(response):
178 if 'set-cookie' in response['headers']:
178 if 'set-cookie' in response['headers']:
179 del response['headers']['set-cookie']
179 del response['headers']['set-cookie']
180 return response
180 return response
181
181
182 def decorate(fn):
182 def decorate(fn):
183 def inner(*args, **kwargs):
183 def inner(*args, **kwargs):
184 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
184 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
185 if cassette:
185 if cassette:
186 import hgdemandimport
186 import hgdemandimport
187
187
188 with hgdemandimport.deactivated():
188 with hgdemandimport.deactivated():
189 import vcr as vcrmod
189 import vcr as vcrmod
190 import vcr.stubs as stubs
190 import vcr.stubs as stubs
191
191
192 vcr = vcrmod.VCR(
192 vcr = vcrmod.VCR(
193 serializer='json',
193 serializer='json',
194 before_record_request=sanitiserequest,
194 before_record_request=sanitiserequest,
195 before_record_response=sanitiseresponse,
195 before_record_response=sanitiseresponse,
196 custom_patches=[
196 custom_patches=[
197 (
197 (
198 urlmod,
198 urlmod,
199 'httpconnection',
199 'httpconnection',
200 stubs.VCRHTTPConnection,
200 stubs.VCRHTTPConnection,
201 ),
201 ),
202 (
202 (
203 urlmod,
203 urlmod,
204 'httpsconnection',
204 'httpsconnection',
205 stubs.VCRHTTPSConnection,
205 stubs.VCRHTTPSConnection,
206 ),
206 ),
207 ],
207 ],
208 )
208 )
209 vcr.register_matcher('hgmatcher', hgmatcher)
209 vcr.register_matcher('hgmatcher', hgmatcher)
210 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
210 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
211 return fn(*args, **kwargs)
211 return fn(*args, **kwargs)
212 return fn(*args, **kwargs)
212 return fn(*args, **kwargs)
213
213
214 inner.__name__ = fn.__name__
214 inner.__name__ = fn.__name__
215 inner.__doc__ = fn.__doc__
215 inner.__doc__ = fn.__doc__
216 return command(
216 return command(
217 name,
217 name,
218 fullflags,
218 fullflags,
219 spec,
219 spec,
220 helpcategory=helpcategory,
220 helpcategory=helpcategory,
221 optionalrepo=optionalrepo,
221 optionalrepo=optionalrepo,
222 )(inner)
222 )(inner)
223
223
224 return decorate
224 return decorate
225
225
226
226
227 def urlencodenested(params):
227 def urlencodenested(params):
228 """like urlencode, but works with nested parameters.
228 """like urlencode, but works with nested parameters.
229
229
230 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
230 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
231 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
231 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
232 urlencode. Note: the encoding is consistent with PHP's http_build_query.
232 urlencode. Note: the encoding is consistent with PHP's http_build_query.
233 """
233 """
234 flatparams = util.sortdict()
234 flatparams = util.sortdict()
235
235
236 def process(prefix, obj):
236 def process(prefix, obj):
237 if isinstance(obj, bool):
237 if isinstance(obj, bool):
238 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
238 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
239 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
239 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
240 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
240 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
241 if items is None:
241 if items is None:
242 flatparams[prefix] = obj
242 flatparams[prefix] = obj
243 else:
243 else:
244 for k, v in items(obj):
244 for k, v in items(obj):
245 if prefix:
245 if prefix:
246 process(b'%s[%s]' % (prefix, k), v)
246 process(b'%s[%s]' % (prefix, k), v)
247 else:
247 else:
248 process(k, v)
248 process(k, v)
249
249
250 process(b'', params)
250 process(b'', params)
251 return util.urlreq.urlencode(flatparams)
251 return util.urlreq.urlencode(flatparams)
252
252
253
253
254 def readurltoken(ui):
254 def readurltoken(ui):
255 """return conduit url, token and make sure they exist
255 """return conduit url, token and make sure they exist
256
256
257 Currently read from [auth] config section. In the future, it might
257 Currently read from [auth] config section. In the future, it might
258 make sense to read from .arcconfig and .arcrc as well.
258 make sense to read from .arcconfig and .arcrc as well.
259 """
259 """
260 url = ui.config(b'phabricator', b'url')
260 url = ui.config(b'phabricator', b'url')
261 if not url:
261 if not url:
262 raise error.Abort(
262 raise error.Abort(
263 _(b'config %s.%s is required') % (b'phabricator', b'url')
263 _(b'config %s.%s is required') % (b'phabricator', b'url')
264 )
264 )
265
265
266 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
266 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
267 token = None
267 token = None
268
268
269 if res:
269 if res:
270 group, auth = res
270 group, auth = res
271
271
272 ui.debug(b"using auth.%s.* for authentication\n" % group)
272 ui.debug(b"using auth.%s.* for authentication\n" % group)
273
273
274 token = auth.get(b'phabtoken')
274 token = auth.get(b'phabtoken')
275
275
276 if not token:
276 if not token:
277 raise error.Abort(
277 raise error.Abort(
278 _(b'Can\'t find conduit token associated to %s') % (url,)
278 _(b'Can\'t find conduit token associated to %s') % (url,)
279 )
279 )
280
280
281 return url, token
281 return url, token
282
282
283
283
284 def callconduit(ui, name, params):
284 def callconduit(ui, name, params):
285 """call Conduit API, params is a dict. return json.loads result, or None"""
285 """call Conduit API, params is a dict. return json.loads result, or None"""
286 host, token = readurltoken(ui)
286 host, token = readurltoken(ui)
287 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
287 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
288 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
288 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
289 params = params.copy()
289 params = params.copy()
290 params[b'__conduit__'] = {
290 params[b'__conduit__'] = {
291 b'token': token,
291 b'token': token,
292 }
292 }
293 rawdata = {
293 rawdata = {
294 b'params': templatefilters.json(params),
294 b'params': templatefilters.json(params),
295 b'output': b'json',
295 b'output': b'json',
296 b'__conduit__': 1,
296 b'__conduit__': 1,
297 }
297 }
298 data = urlencodenested(rawdata)
298 data = urlencodenested(rawdata)
299 curlcmd = ui.config(b'phabricator', b'curlcmd')
299 curlcmd = ui.config(b'phabricator', b'curlcmd')
300 if curlcmd:
300 if curlcmd:
301 sin, sout = procutil.popen2(
301 sin, sout = procutil.popen2(
302 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
302 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
303 )
303 )
304 sin.write(data)
304 sin.write(data)
305 sin.close()
305 sin.close()
306 body = sout.read()
306 body = sout.read()
307 else:
307 else:
308 urlopener = urlmod.opener(ui, authinfo)
308 urlopener = urlmod.opener(ui, authinfo)
309 request = util.urlreq.request(pycompat.strurl(url), data=data)
309 request = util.urlreq.request(pycompat.strurl(url), data=data)
310 with contextlib.closing(urlopener.open(request)) as rsp:
310 with contextlib.closing(urlopener.open(request)) as rsp:
311 body = rsp.read()
311 body = rsp.read()
312 ui.debug(b'Conduit Response: %s\n' % body)
312 ui.debug(b'Conduit Response: %s\n' % body)
313 parsed = pycompat.rapply(
313 parsed = pycompat.rapply(
314 lambda x: encoding.unitolocal(x)
314 lambda x: encoding.unitolocal(x)
315 if isinstance(x, pycompat.unicode)
315 if isinstance(x, pycompat.unicode)
316 else x,
316 else x,
317 # json.loads only accepts bytes from py3.6+
317 # json.loads only accepts bytes from py3.6+
318 pycompat.json_loads(encoding.unifromlocal(body)),
318 pycompat.json_loads(encoding.unifromlocal(body)),
319 )
319 )
320 if parsed.get(b'error_code'):
320 if parsed.get(b'error_code'):
321 msg = _(b'Conduit Error (%s): %s') % (
321 msg = _(b'Conduit Error (%s): %s') % (
322 parsed[b'error_code'],
322 parsed[b'error_code'],
323 parsed[b'error_info'],
323 parsed[b'error_info'],
324 )
324 )
325 raise error.Abort(msg)
325 raise error.Abort(msg)
326 return parsed[b'result']
326 return parsed[b'result']
327
327
328
328
329 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
329 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
330 def debugcallconduit(ui, repo, name):
330 def debugcallconduit(ui, repo, name):
331 """call Conduit API
331 """call Conduit API
332
332
333 Call parameters are read from stdin as a JSON blob. Result will be written
333 Call parameters are read from stdin as a JSON blob. Result will be written
334 to stdout as a JSON blob.
334 to stdout as a JSON blob.
335 """
335 """
336 # json.loads only accepts bytes from 3.6+
336 # json.loads only accepts bytes from 3.6+
337 rawparams = encoding.unifromlocal(ui.fin.read())
337 rawparams = encoding.unifromlocal(ui.fin.read())
338 # json.loads only returns unicode strings
338 # json.loads only returns unicode strings
339 params = pycompat.rapply(
339 params = pycompat.rapply(
340 lambda x: encoding.unitolocal(x)
340 lambda x: encoding.unitolocal(x)
341 if isinstance(x, pycompat.unicode)
341 if isinstance(x, pycompat.unicode)
342 else x,
342 else x,
343 pycompat.json_loads(rawparams),
343 pycompat.json_loads(rawparams),
344 )
344 )
345 # json.dumps only accepts unicode strings
345 # json.dumps only accepts unicode strings
346 result = pycompat.rapply(
346 result = pycompat.rapply(
347 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
347 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
348 callconduit(ui, name, params),
348 callconduit(ui, name, params),
349 )
349 )
350 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
350 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
351 ui.write(b'%s\n' % encoding.unitolocal(s))
351 ui.write(b'%s\n' % encoding.unitolocal(s))
352
352
353
353
354 def getrepophid(repo):
354 def getrepophid(repo):
355 """given callsign, return repository PHID or None"""
355 """given callsign, return repository PHID or None"""
356 # developer config: phabricator.repophid
356 # developer config: phabricator.repophid
357 repophid = repo.ui.config(b'phabricator', b'repophid')
357 repophid = repo.ui.config(b'phabricator', b'repophid')
358 if repophid:
358 if repophid:
359 return repophid
359 return repophid
360 callsign = repo.ui.config(b'phabricator', b'callsign')
360 callsign = repo.ui.config(b'phabricator', b'callsign')
361 if not callsign:
361 if not callsign:
362 return None
362 return None
363 query = callconduit(
363 query = callconduit(
364 repo.ui,
364 repo.ui,
365 b'diffusion.repository.search',
365 b'diffusion.repository.search',
366 {b'constraints': {b'callsigns': [callsign]}},
366 {b'constraints': {b'callsigns': [callsign]}},
367 )
367 )
368 if len(query[b'data']) == 0:
368 if len(query[b'data']) == 0:
369 return None
369 return None
370 repophid = query[b'data'][0][b'phid']
370 repophid = query[b'data'][0][b'phid']
371 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
371 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
372 return repophid
372 return repophid
373
373
374
374
375 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
375 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
376 _differentialrevisiondescre = re.compile(
376 _differentialrevisiondescre = re.compile(
377 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
377 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
378 )
378 )
379
379
380
380
381 def getoldnodedrevmap(repo, nodelist):
381 def getoldnodedrevmap(repo, nodelist):
382 """find previous nodes that has been sent to Phabricator
382 """find previous nodes that has been sent to Phabricator
383
383
384 return {node: (oldnode, Differential diff, Differential Revision ID)}
384 return {node: (oldnode, Differential diff, Differential Revision ID)}
385 for node in nodelist with known previous sent versions, or associated
385 for node in nodelist with known previous sent versions, or associated
386 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
386 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
387 be ``None``.
387 be ``None``.
388
388
389 Examines commit messages like "Differential Revision:" to get the
389 Examines commit messages like "Differential Revision:" to get the
390 association information.
390 association information.
391
391
392 If such commit message line is not found, examines all precursors and their
392 If such commit message line is not found, examines all precursors and their
393 tags. Tags with format like "D1234" are considered a match and the node
393 tags. Tags with format like "D1234" are considered a match and the node
394 with that tag, and the number after "D" (ex. 1234) will be returned.
394 with that tag, and the number after "D" (ex. 1234) will be returned.
395
395
396 The ``old node``, if not None, is guaranteed to be the last diff of
396 The ``old node``, if not None, is guaranteed to be the last diff of
397 corresponding Differential Revision, and exist in the repo.
397 corresponding Differential Revision, and exist in the repo.
398 """
398 """
399 unfi = repo.unfiltered()
399 unfi = repo.unfiltered()
400 has_node = unfi.changelog.index.has_node
400 has_node = unfi.changelog.index.has_node
401
401
402 result = {} # {node: (oldnode?, lastdiff?, drev)}
402 result = {} # {node: (oldnode?, lastdiff?, drev)}
403 toconfirm = {} # {node: (force, {precnode}, drev)}
403 toconfirm = {} # {node: (force, {precnode}, drev)}
404 for node in nodelist:
404 for node in nodelist:
405 ctx = unfi[node]
405 ctx = unfi[node]
406 # For tags like "D123", put them into "toconfirm" to verify later
406 # For tags like "D123", put them into "toconfirm" to verify later
407 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
407 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
408 for n in precnodes:
408 for n in precnodes:
409 if has_node(n):
409 if has_node(n):
410 for tag in unfi.nodetags(n):
410 for tag in unfi.nodetags(n):
411 m = _differentialrevisiontagre.match(tag)
411 m = _differentialrevisiontagre.match(tag)
412 if m:
412 if m:
413 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
413 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
414 break
414 break
415 else:
415 else:
416 continue # move to next predecessor
416 continue # move to next predecessor
417 break # found a tag, stop
417 break # found a tag, stop
418 else:
418 else:
419 # Check commit message
419 # Check commit message
420 m = _differentialrevisiondescre.search(ctx.description())
420 m = _differentialrevisiondescre.search(ctx.description())
421 if m:
421 if m:
422 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
422 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
423
423
424 # Double check if tags are genuine by collecting all old nodes from
424 # Double check if tags are genuine by collecting all old nodes from
425 # Phabricator, and expect precursors overlap with it.
425 # Phabricator, and expect precursors overlap with it.
426 if toconfirm:
426 if toconfirm:
427 drevs = [drev for force, precs, drev in toconfirm.values()]
427 drevs = [drev for force, precs, drev in toconfirm.values()]
428 alldiffs = callconduit(
428 alldiffs = callconduit(
429 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
429 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
430 )
430 )
431 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
431 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
432 for newnode, (force, precset, drev) in toconfirm.items():
432 for newnode, (force, precset, drev) in toconfirm.items():
433 diffs = [
433 diffs = [
434 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
434 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
435 ]
435 ]
436
436
437 # "precursors" as known by Phabricator
437 # "precursors" as known by Phabricator
438 phprecset = set(getnode(d) for d in diffs)
438 phprecset = set(getnode(d) for d in diffs)
439
439
440 # Ignore if precursors (Phabricator and local repo) do not overlap,
440 # Ignore if precursors (Phabricator and local repo) do not overlap,
441 # and force is not set (when commit message says nothing)
441 # and force is not set (when commit message says nothing)
442 if not force and not bool(phprecset & precset):
442 if not force and not bool(phprecset & precset):
443 tagname = b'D%d' % drev
443 tagname = b'D%d' % drev
444 tags.tag(
444 tags.tag(
445 repo,
445 repo,
446 tagname,
446 tagname,
447 nullid,
447 nullid,
448 message=None,
448 message=None,
449 user=None,
449 user=None,
450 date=None,
450 date=None,
451 local=True,
451 local=True,
452 )
452 )
453 unfi.ui.warn(
453 unfi.ui.warn(
454 _(
454 _(
455 b'D%d: local tag removed - does not match '
455 b'D%d: local tag removed - does not match '
456 b'Differential history\n'
456 b'Differential history\n'
457 )
457 )
458 % drev
458 % drev
459 )
459 )
460 continue
460 continue
461
461
462 # Find the last node using Phabricator metadata, and make sure it
462 # Find the last node using Phabricator metadata, and make sure it
463 # exists in the repo
463 # exists in the repo
464 oldnode = lastdiff = None
464 oldnode = lastdiff = None
465 if diffs:
465 if diffs:
466 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
466 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
467 oldnode = getnode(lastdiff)
467 oldnode = getnode(lastdiff)
468 if oldnode and not has_node(oldnode):
468 if oldnode and not has_node(oldnode):
469 oldnode = None
469 oldnode = None
470
470
471 result[newnode] = (oldnode, lastdiff, drev)
471 result[newnode] = (oldnode, lastdiff, drev)
472
472
473 return result
473 return result
474
474
475
475
476 def getdrevmap(repo, revs):
476 def getdrevmap(repo, revs):
477 """Return a dict mapping each rev in `revs` to their Differential Revision
477 """Return a dict mapping each rev in `revs` to their Differential Revision
478 ID or None.
478 ID or None.
479 """
479 """
480 result = {}
480 result = {}
481 for rev in revs:
481 for rev in revs:
482 result[rev] = None
482 result[rev] = None
483 ctx = repo[rev]
483 ctx = repo[rev]
484 # Check commit message
484 # Check commit message
485 m = _differentialrevisiondescre.search(ctx.description())
485 m = _differentialrevisiondescre.search(ctx.description())
486 if m:
486 if m:
487 result[rev] = int(m.group('id'))
487 result[rev] = int(m.group('id'))
488 continue
488 continue
489 # Check tags
489 # Check tags
490 for tag in repo.nodetags(ctx.node()):
490 for tag in repo.nodetags(ctx.node()):
491 m = _differentialrevisiontagre.match(tag)
491 m = _differentialrevisiontagre.match(tag)
492 if m:
492 if m:
493 result[rev] = int(m.group(1))
493 result[rev] = int(m.group(1))
494 break
494 break
495
495
496 return result
496 return result
497
497
498
498
499 def getdiff(ctx, diffopts):
499 def getdiff(ctx, diffopts):
500 """plain-text diff without header (user, commit message, etc)"""
500 """plain-text diff without header (user, commit message, etc)"""
501 output = util.stringio()
501 output = util.stringio()
502 for chunk, _label in patch.diffui(
502 for chunk, _label in patch.diffui(
503 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
503 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
504 ):
504 ):
505 output.write(chunk)
505 output.write(chunk)
506 return output.getvalue()
506 return output.getvalue()
507
507
508
508
509 class DiffChangeType(object):
509 class DiffChangeType(object):
510 ADD = 1
510 ADD = 1
511 CHANGE = 2
511 CHANGE = 2
512 DELETE = 3
512 DELETE = 3
513 MOVE_AWAY = 4
513 MOVE_AWAY = 4
514 COPY_AWAY = 5
514 COPY_AWAY = 5
515 MOVE_HERE = 6
515 MOVE_HERE = 6
516 COPY_HERE = 7
516 COPY_HERE = 7
517 MULTICOPY = 8
517 MULTICOPY = 8
518
518
519
519
520 class DiffFileType(object):
520 class DiffFileType(object):
521 TEXT = 1
521 TEXT = 1
522 IMAGE = 2
522 IMAGE = 2
523 BINARY = 3
523 BINARY = 3
524
524
525
525
526 @attr.s
526 @attr.s
527 class phabhunk(dict):
527 class phabhunk(dict):
528 """Represents a Differential hunk, which is owned by a Differential change
528 """Represents a Differential hunk, which is owned by a Differential change
529 """
529 """
530
530
531 oldOffset = attr.ib(default=0) # camelcase-required
531 oldOffset = attr.ib(default=0) # camelcase-required
532 oldLength = attr.ib(default=0) # camelcase-required
532 oldLength = attr.ib(default=0) # camelcase-required
533 newOffset = attr.ib(default=0) # camelcase-required
533 newOffset = attr.ib(default=0) # camelcase-required
534 newLength = attr.ib(default=0) # camelcase-required
534 newLength = attr.ib(default=0) # camelcase-required
535 corpus = attr.ib(default='')
535 corpus = attr.ib(default='')
536 # These get added to the phabchange's equivalents
536 # These get added to the phabchange's equivalents
537 addLines = attr.ib(default=0) # camelcase-required
537 addLines = attr.ib(default=0) # camelcase-required
538 delLines = attr.ib(default=0) # camelcase-required
538 delLines = attr.ib(default=0) # camelcase-required
539
539
540
540
541 @attr.s
541 @attr.s
542 class phabchange(object):
542 class phabchange(object):
543 """Represents a Differential change, owns Differential hunks and owned by a
543 """Represents a Differential change, owns Differential hunks and owned by a
544 Differential diff. Each one represents one file in a diff.
544 Differential diff. Each one represents one file in a diff.
545 """
545 """
546
546
547 currentPath = attr.ib(default=None) # camelcase-required
547 currentPath = attr.ib(default=None) # camelcase-required
548 oldPath = attr.ib(default=None) # camelcase-required
548 oldPath = attr.ib(default=None) # camelcase-required
549 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
549 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
550 metadata = attr.ib(default=attr.Factory(dict))
550 metadata = attr.ib(default=attr.Factory(dict))
551 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
551 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
553 type = attr.ib(default=DiffChangeType.CHANGE)
553 type = attr.ib(default=DiffChangeType.CHANGE)
554 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
554 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
555 commitHash = attr.ib(default=None) # camelcase-required
555 commitHash = attr.ib(default=None) # camelcase-required
556 addLines = attr.ib(default=0) # camelcase-required
556 addLines = attr.ib(default=0) # camelcase-required
557 delLines = attr.ib(default=0) # camelcase-required
557 delLines = attr.ib(default=0) # camelcase-required
558 hunks = attr.ib(default=attr.Factory(list))
558 hunks = attr.ib(default=attr.Factory(list))
559
559
560 def copynewmetadatatoold(self):
560 def copynewmetadatatoold(self):
561 for key in list(self.metadata.keys()):
561 for key in list(self.metadata.keys()):
562 newkey = key.replace(b'new:', b'old:')
562 newkey = key.replace(b'new:', b'old:')
563 self.metadata[newkey] = self.metadata[key]
563 self.metadata[newkey] = self.metadata[key]
564
564
565 def addoldmode(self, value):
565 def addoldmode(self, value):
566 self.oldProperties[b'unix:filemode'] = value
566 self.oldProperties[b'unix:filemode'] = value
567
567
568 def addnewmode(self, value):
568 def addnewmode(self, value):
569 self.newProperties[b'unix:filemode'] = value
569 self.newProperties[b'unix:filemode'] = value
570
570
571 def addhunk(self, hunk):
571 def addhunk(self, hunk):
572 if not isinstance(hunk, phabhunk):
572 if not isinstance(hunk, phabhunk):
573 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
573 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
574 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
574 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
575 # It's useful to include these stats since the Phab web UI shows them,
575 # It's useful to include these stats since the Phab web UI shows them,
576 # and uses them to estimate how large a change a Revision is. Also used
576 # and uses them to estimate how large a change a Revision is. Also used
577 # in email subjects for the [+++--] bit.
577 # in email subjects for the [+++--] bit.
578 self.addLines += hunk.addLines
578 self.addLines += hunk.addLines
579 self.delLines += hunk.delLines
579 self.delLines += hunk.delLines
580
580
581
581
582 @attr.s
582 @attr.s
583 class phabdiff(object):
583 class phabdiff(object):
584 """Represents a Differential diff, owns Differential changes. Corresponds
584 """Represents a Differential diff, owns Differential changes. Corresponds
585 to a commit.
585 to a commit.
586 """
586 """
587
587
588 # Doesn't seem to be any reason to send this (output of uname -n)
588 # Doesn't seem to be any reason to send this (output of uname -n)
589 sourceMachine = attr.ib(default=b'') # camelcase-required
589 sourceMachine = attr.ib(default=b'') # camelcase-required
590 sourcePath = attr.ib(default=b'/') # camelcase-required
590 sourcePath = attr.ib(default=b'/') # camelcase-required
591 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
591 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
592 sourceControlPath = attr.ib(default=b'/') # camelcase-required
592 sourceControlPath = attr.ib(default=b'/') # camelcase-required
593 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
593 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
594 branch = attr.ib(default=b'default')
594 branch = attr.ib(default=b'default')
595 bookmark = attr.ib(default=None)
595 bookmark = attr.ib(default=None)
596 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
596 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
597 lintStatus = attr.ib(default=b'none') # camelcase-required
597 lintStatus = attr.ib(default=b'none') # camelcase-required
598 unitStatus = attr.ib(default=b'none') # camelcase-required
598 unitStatus = attr.ib(default=b'none') # camelcase-required
599 changes = attr.ib(default=attr.Factory(dict))
599 changes = attr.ib(default=attr.Factory(dict))
600 repositoryPHID = attr.ib(default=None) # camelcase-required
600 repositoryPHID = attr.ib(default=None) # camelcase-required
601
601
602 def addchange(self, change):
602 def addchange(self, change):
603 if not isinstance(change, phabchange):
603 if not isinstance(change, phabchange):
604 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
604 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
605 self.changes[change.currentPath] = pycompat.byteskwargs(
605 self.changes[change.currentPath] = pycompat.byteskwargs(
606 attr.asdict(change)
606 attr.asdict(change)
607 )
607 )
608
608
609
609
610 def maketext(pchange, ctx, fname):
610 def maketext(pchange, ctx, fname):
611 """populate the phabchange for a text file"""
611 """populate the phabchange for a text file"""
612 repo = ctx.repo()
612 repo = ctx.repo()
613 fmatcher = match.exact([fname])
613 fmatcher = match.exact([fname])
614 diffopts = mdiff.diffopts(git=True, context=32767)
614 diffopts = mdiff.diffopts(git=True, context=32767)
615 _pfctx, _fctx, header, fhunks = next(
615 _pfctx, _fctx, header, fhunks = next(
616 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
616 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
617 )
617 )
618
618
619 for fhunk in fhunks:
619 for fhunk in fhunks:
620 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
620 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
621 corpus = b''.join(lines[1:])
621 corpus = b''.join(lines[1:])
622 shunk = list(header)
622 shunk = list(header)
623 shunk.extend(lines)
623 shunk.extend(lines)
624 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
624 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
625 patch.diffstatdata(util.iterlines(shunk))
625 patch.diffstatdata(util.iterlines(shunk))
626 )
626 )
627 pchange.addhunk(
627 pchange.addhunk(
628 phabhunk(
628 phabhunk(
629 oldOffset,
629 oldOffset,
630 oldLength,
630 oldLength,
631 newOffset,
631 newOffset,
632 newLength,
632 newLength,
633 corpus,
633 corpus,
634 addLines,
634 addLines,
635 delLines,
635 delLines,
636 )
636 )
637 )
637 )
638
638
639
639
640 def uploadchunks(fctx, fphid):
640 def uploadchunks(fctx, fphid):
641 """upload large binary files as separate chunks.
641 """upload large binary files as separate chunks.
642 Phab requests chunking over 8MiB, and splits into 4MiB chunks
642 Phab requests chunking over 8MiB, and splits into 4MiB chunks
643 """
643 """
644 ui = fctx.repo().ui
644 ui = fctx.repo().ui
645 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
645 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
646 with ui.makeprogress(
646 with ui.makeprogress(
647 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
647 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
648 ) as progress:
648 ) as progress:
649 for chunk in chunks:
649 for chunk in chunks:
650 progress.increment()
650 progress.increment()
651 if chunk[b'complete']:
651 if chunk[b'complete']:
652 continue
652 continue
653 bstart = int(chunk[b'byteStart'])
653 bstart = int(chunk[b'byteStart'])
654 bend = int(chunk[b'byteEnd'])
654 bend = int(chunk[b'byteEnd'])
655 callconduit(
655 callconduit(
656 ui,
656 ui,
657 b'file.uploadchunk',
657 b'file.uploadchunk',
658 {
658 {
659 b'filePHID': fphid,
659 b'filePHID': fphid,
660 b'byteStart': bstart,
660 b'byteStart': bstart,
661 b'data': base64.b64encode(fctx.data()[bstart:bend]),
661 b'data': base64.b64encode(fctx.data()[bstart:bend]),
662 b'dataEncoding': b'base64',
662 b'dataEncoding': b'base64',
663 },
663 },
664 )
664 )
665
665
666
666
667 def uploadfile(fctx):
667 def uploadfile(fctx):
668 """upload binary files to Phabricator"""
668 """upload binary files to Phabricator"""
669 repo = fctx.repo()
669 repo = fctx.repo()
670 ui = repo.ui
670 ui = repo.ui
671 fname = fctx.path()
671 fname = fctx.path()
672 size = fctx.size()
672 size = fctx.size()
673 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
673 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
674
674
675 # an allocate call is required first to see if an upload is even required
675 # an allocate call is required first to see if an upload is even required
676 # (Phab might already have it) and to determine if chunking is needed
676 # (Phab might already have it) and to determine if chunking is needed
677 allocateparams = {
677 allocateparams = {
678 b'name': fname,
678 b'name': fname,
679 b'contentLength': size,
679 b'contentLength': size,
680 b'contentHash': fhash,
680 b'contentHash': fhash,
681 }
681 }
682 filealloc = callconduit(ui, b'file.allocate', allocateparams)
682 filealloc = callconduit(ui, b'file.allocate', allocateparams)
683 fphid = filealloc[b'filePHID']
683 fphid = filealloc[b'filePHID']
684
684
685 if filealloc[b'upload']:
685 if filealloc[b'upload']:
686 ui.write(_(b'uploading %s\n') % bytes(fctx))
686 ui.write(_(b'uploading %s\n') % bytes(fctx))
687 if not fphid:
687 if not fphid:
688 uploadparams = {
688 uploadparams = {
689 b'name': fname,
689 b'name': fname,
690 b'data_base64': base64.b64encode(fctx.data()),
690 b'data_base64': base64.b64encode(fctx.data()),
691 }
691 }
692 fphid = callconduit(ui, b'file.upload', uploadparams)
692 fphid = callconduit(ui, b'file.upload', uploadparams)
693 else:
693 else:
694 uploadchunks(fctx, fphid)
694 uploadchunks(fctx, fphid)
695 else:
695 else:
696 ui.debug(b'server already has %s\n' % bytes(fctx))
696 ui.debug(b'server already has %s\n' % bytes(fctx))
697
697
698 if not fphid:
698 if not fphid:
699 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
699 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
700
700
701 return fphid
701 return fphid
702
702
703
703
704 def addoldbinary(pchange, fctx, originalfname):
704 def addoldbinary(pchange, fctx, originalfname):
705 """add the metadata for the previous version of a binary file to the
705 """add the metadata for the previous version of a binary file to the
706 phabchange for the new version
706 phabchange for the new version
707 """
707 """
708 oldfctx = fctx.p1()[originalfname]
708 oldfctx = fctx.p1()[originalfname]
709 if fctx.cmp(oldfctx):
709 if fctx.cmp(oldfctx):
710 # Files differ, add the old one
710 # Files differ, add the old one
711 pchange.metadata[b'old:file:size'] = oldfctx.size()
711 pchange.metadata[b'old:file:size'] = oldfctx.size()
712 mimeguess, _enc = mimetypes.guess_type(
712 mimeguess, _enc = mimetypes.guess_type(
713 encoding.unifromlocal(oldfctx.path())
713 encoding.unifromlocal(oldfctx.path())
714 )
714 )
715 if mimeguess:
715 if mimeguess:
716 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
716 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
717 mimeguess
717 mimeguess
718 )
718 )
719 fphid = uploadfile(oldfctx)
719 fphid = uploadfile(oldfctx)
720 pchange.metadata[b'old:binary-phid'] = fphid
720 pchange.metadata[b'old:binary-phid'] = fphid
721 else:
721 else:
722 # If it's left as IMAGE/BINARY web UI might try to display it
722 # If it's left as IMAGE/BINARY web UI might try to display it
723 pchange.fileType = DiffFileType.TEXT
723 pchange.fileType = DiffFileType.TEXT
724 pchange.copynewmetadatatoold()
724 pchange.copynewmetadatatoold()
725
725
726
726
727 def makebinary(pchange, fctx):
727 def makebinary(pchange, fctx):
728 """populate the phabchange for a binary file"""
728 """populate the phabchange for a binary file"""
729 pchange.fileType = DiffFileType.BINARY
729 pchange.fileType = DiffFileType.BINARY
730 fphid = uploadfile(fctx)
730 fphid = uploadfile(fctx)
731 pchange.metadata[b'new:binary-phid'] = fphid
731 pchange.metadata[b'new:binary-phid'] = fphid
732 pchange.metadata[b'new:file:size'] = fctx.size()
732 pchange.metadata[b'new:file:size'] = fctx.size()
733 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
733 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
734 if mimeguess:
734 if mimeguess:
735 mimeguess = pycompat.bytestr(mimeguess)
735 mimeguess = pycompat.bytestr(mimeguess)
736 pchange.metadata[b'new:file:mime-type'] = mimeguess
736 pchange.metadata[b'new:file:mime-type'] = mimeguess
737 if mimeguess.startswith(b'image/'):
737 if mimeguess.startswith(b'image/'):
738 pchange.fileType = DiffFileType.IMAGE
738 pchange.fileType = DiffFileType.IMAGE
739
739
740
740
741 # Copied from mercurial/patch.py
741 # Copied from mercurial/patch.py
742 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
742 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
743
743
744
744
745 def notutf8(fctx):
745 def notutf8(fctx):
746 """detect non-UTF-8 text files since Phabricator requires them to be marked
746 """detect non-UTF-8 text files since Phabricator requires them to be marked
747 as binary
747 as binary
748 """
748 """
749 try:
749 try:
750 fctx.data().decode('utf-8')
750 fctx.data().decode('utf-8')
751 if fctx.parents():
751 if fctx.parents():
752 fctx.p1().data().decode('utf-8')
752 fctx.p1().data().decode('utf-8')
753 return False
753 return False
754 except UnicodeDecodeError:
754 except UnicodeDecodeError:
755 fctx.repo().ui.write(
755 fctx.repo().ui.write(
756 _(b'file %s detected as non-UTF-8, marked as binary\n')
756 _(b'file %s detected as non-UTF-8, marked as binary\n')
757 % fctx.path()
757 % fctx.path()
758 )
758 )
759 return True
759 return True
760
760
761
761
762 def addremoved(pdiff, ctx, removed):
762 def addremoved(pdiff, ctx, removed):
763 """add removed files to the phabdiff. Shouldn't include moves"""
763 """add removed files to the phabdiff. Shouldn't include moves"""
764 for fname in removed:
764 for fname in removed:
765 pchange = phabchange(
765 pchange = phabchange(
766 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
766 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
767 )
767 )
768 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
768 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
769 fctx = ctx.p1()[fname]
769 fctx = ctx.p1()[fname]
770 if not (fctx.isbinary() or notutf8(fctx)):
770 if not (fctx.isbinary() or notutf8(fctx)):
771 maketext(pchange, ctx, fname)
771 maketext(pchange, ctx, fname)
772
772
773 pdiff.addchange(pchange)
773 pdiff.addchange(pchange)
774
774
775
775
776 def addmodified(pdiff, ctx, modified):
776 def addmodified(pdiff, ctx, modified):
777 """add modified files to the phabdiff"""
777 """add modified files to the phabdiff"""
778 for fname in modified:
778 for fname in modified:
779 fctx = ctx[fname]
779 fctx = ctx[fname]
780 pchange = phabchange(currentPath=fname, oldPath=fname)
780 pchange = phabchange(currentPath=fname, oldPath=fname)
781 filemode = gitmode[ctx[fname].flags()]
781 filemode = gitmode[ctx[fname].flags()]
782 originalmode = gitmode[ctx.p1()[fname].flags()]
782 originalmode = gitmode[ctx.p1()[fname].flags()]
783 if filemode != originalmode:
783 if filemode != originalmode:
784 pchange.addoldmode(originalmode)
784 pchange.addoldmode(originalmode)
785 pchange.addnewmode(filemode)
785 pchange.addnewmode(filemode)
786
786
787 if fctx.isbinary() or notutf8(fctx):
787 if fctx.isbinary() or notutf8(fctx):
788 makebinary(pchange, fctx)
788 makebinary(pchange, fctx)
789 addoldbinary(pchange, fctx, fname)
789 addoldbinary(pchange, fctx, fname)
790 else:
790 else:
791 maketext(pchange, ctx, fname)
791 maketext(pchange, ctx, fname)
792
792
793 pdiff.addchange(pchange)
793 pdiff.addchange(pchange)
794
794
795
795
796 def addadded(pdiff, ctx, added, removed):
796 def addadded(pdiff, ctx, added, removed):
797 """add file adds to the phabdiff, both new files and copies/moves"""
797 """add file adds to the phabdiff, both new files and copies/moves"""
798 # Keep track of files that've been recorded as moved/copied, so if there are
798 # Keep track of files that've been recorded as moved/copied, so if there are
799 # additional copies we can mark them (moves get removed from removed)
799 # additional copies we can mark them (moves get removed from removed)
800 copiedchanges = {}
800 copiedchanges = {}
801 movedchanges = {}
801 movedchanges = {}
802 for fname in added:
802 for fname in added:
803 fctx = ctx[fname]
803 fctx = ctx[fname]
804 pchange = phabchange(currentPath=fname)
804 pchange = phabchange(currentPath=fname)
805
805
806 filemode = gitmode[ctx[fname].flags()]
806 filemode = gitmode[ctx[fname].flags()]
807 renamed = fctx.renamed()
807 renamed = fctx.renamed()
808
808
809 if renamed:
809 if renamed:
810 originalfname = renamed[0]
810 originalfname = renamed[0]
811 originalmode = gitmode[ctx.p1()[originalfname].flags()]
811 originalmode = gitmode[ctx.p1()[originalfname].flags()]
812 pchange.oldPath = originalfname
812 pchange.oldPath = originalfname
813
813
814 if originalfname in removed:
814 if originalfname in removed:
815 origpchange = phabchange(
815 origpchange = phabchange(
816 currentPath=originalfname,
816 currentPath=originalfname,
817 oldPath=originalfname,
817 oldPath=originalfname,
818 type=DiffChangeType.MOVE_AWAY,
818 type=DiffChangeType.MOVE_AWAY,
819 awayPaths=[fname],
819 awayPaths=[fname],
820 )
820 )
821 movedchanges[originalfname] = origpchange
821 movedchanges[originalfname] = origpchange
822 removed.remove(originalfname)
822 removed.remove(originalfname)
823 pchange.type = DiffChangeType.MOVE_HERE
823 pchange.type = DiffChangeType.MOVE_HERE
824 elif originalfname in movedchanges:
824 elif originalfname in movedchanges:
825 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
825 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
826 movedchanges[originalfname].awayPaths.append(fname)
826 movedchanges[originalfname].awayPaths.append(fname)
827 pchange.type = DiffChangeType.COPY_HERE
827 pchange.type = DiffChangeType.COPY_HERE
828 else: # pure copy
828 else: # pure copy
829 if originalfname not in copiedchanges:
829 if originalfname not in copiedchanges:
830 origpchange = phabchange(
830 origpchange = phabchange(
831 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
831 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
832 )
832 )
833 copiedchanges[originalfname] = origpchange
833 copiedchanges[originalfname] = origpchange
834 else:
834 else:
835 origpchange = copiedchanges[originalfname]
835 origpchange = copiedchanges[originalfname]
836 origpchange.awayPaths.append(fname)
836 origpchange.awayPaths.append(fname)
837 pchange.type = DiffChangeType.COPY_HERE
837 pchange.type = DiffChangeType.COPY_HERE
838
838
839 if filemode != originalmode:
839 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
842 else: # Brand-new file
842 else: # Brand-new file
843 pchange.addnewmode(gitmode[fctx.flags()])
843 pchange.addnewmode(gitmode[fctx.flags()])
844 pchange.type = DiffChangeType.ADD
844 pchange.type = DiffChangeType.ADD
845
845
846 if fctx.isbinary() or notutf8(fctx):
846 if fctx.isbinary() or notutf8(fctx):
847 makebinary(pchange, fctx)
847 makebinary(pchange, fctx)
848 if renamed:
848 if renamed:
849 addoldbinary(pchange, fctx, originalfname)
849 addoldbinary(pchange, fctx, originalfname)
850 else:
850 else:
851 maketext(pchange, ctx, fname)
851 maketext(pchange, ctx, fname)
852
852
853 pdiff.addchange(pchange)
853 pdiff.addchange(pchange)
854
854
855 for _path, copiedchange in copiedchanges.items():
855 for _path, copiedchange in copiedchanges.items():
856 pdiff.addchange(copiedchange)
856 pdiff.addchange(copiedchange)
857 for _path, movedchange in movedchanges.items():
857 for _path, movedchange in movedchanges.items():
858 pdiff.addchange(movedchange)
858 pdiff.addchange(movedchange)
859
859
860
860
861 def creatediff(ctx):
861 def creatediff(ctx):
862 """create a Differential Diff"""
862 """create a Differential Diff"""
863 repo = ctx.repo()
863 repo = ctx.repo()
864 repophid = getrepophid(repo)
864 repophid = getrepophid(repo)
865 # Create a "Differential Diff" via "differential.creatediff" API
865 # Create a "Differential Diff" via "differential.creatediff" API
866 pdiff = phabdiff(
866 pdiff = phabdiff(
867 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
867 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
868 branch=b'%s' % ctx.branch(),
868 branch=b'%s' % ctx.branch(),
869 )
869 )
870 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
870 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
871 # addadded will remove moved files from removed, so addremoved won't get
871 # addadded will remove moved files from removed, so addremoved won't get
872 # them
872 # them
873 addadded(pdiff, ctx, added, removed)
873 addadded(pdiff, ctx, added, removed)
874 addmodified(pdiff, ctx, modified)
874 addmodified(pdiff, ctx, modified)
875 addremoved(pdiff, ctx, removed)
875 addremoved(pdiff, ctx, removed)
876 if repophid:
876 if repophid:
877 pdiff.repositoryPHID = repophid
877 pdiff.repositoryPHID = repophid
878 diff = callconduit(
878 diff = callconduit(
879 repo.ui,
879 repo.ui,
880 b'differential.creatediff',
880 b'differential.creatediff',
881 pycompat.byteskwargs(attr.asdict(pdiff)),
881 pycompat.byteskwargs(attr.asdict(pdiff)),
882 )
882 )
883 if not diff:
883 if not diff:
884 raise error.Abort(_(b'cannot create diff for %s') % ctx)
884 raise error.Abort(_(b'cannot create diff for %s') % ctx)
885 return diff
885 return diff
886
886
887
887
888 def writediffproperties(ctx, diff):
888 def writediffproperties(ctx, diff):
889 """write metadata to diff so patches could be applied losslessly"""
889 """write metadata to diff so patches could be applied losslessly"""
890 # creatediff returns with a diffid but query returns with an id
890 # creatediff returns with a diffid but query returns with an id
891 diffid = diff.get(b'diffid', diff.get(b'id'))
891 diffid = diff.get(b'diffid', diff.get(b'id'))
892 params = {
892 params = {
893 b'diff_id': diffid,
893 b'diff_id': diffid,
894 b'name': b'hg:meta',
894 b'name': b'hg:meta',
895 b'data': templatefilters.json(
895 b'data': templatefilters.json(
896 {
896 {
897 b'user': ctx.user(),
897 b'user': ctx.user(),
898 b'date': b'%d %d' % ctx.date(),
898 b'date': b'%d %d' % ctx.date(),
899 b'branch': ctx.branch(),
899 b'branch': ctx.branch(),
900 b'node': ctx.hex(),
900 b'node': ctx.hex(),
901 b'parent': ctx.p1().hex(),
901 b'parent': ctx.p1().hex(),
902 }
902 }
903 ),
903 ),
904 }
904 }
905 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
905 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
906
906
907 params = {
907 params = {
908 b'diff_id': diffid,
908 b'diff_id': diffid,
909 b'name': b'local:commits',
909 b'name': b'local:commits',
910 b'data': templatefilters.json(
910 b'data': templatefilters.json(
911 {
911 {
912 ctx.hex(): {
912 ctx.hex(): {
913 b'author': stringutil.person(ctx.user()),
913 b'author': stringutil.person(ctx.user()),
914 b'authorEmail': stringutil.email(ctx.user()),
914 b'authorEmail': stringutil.email(ctx.user()),
915 b'time': int(ctx.date()[0]),
915 b'time': int(ctx.date()[0]),
916 b'commit': ctx.hex(),
916 b'commit': ctx.hex(),
917 b'parents': [ctx.p1().hex()],
917 b'parents': [ctx.p1().hex()],
918 b'branch': ctx.branch(),
918 b'branch': ctx.branch(),
919 },
919 },
920 }
920 }
921 ),
921 ),
922 }
922 }
923 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
923 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
924
924
925
925
926 def createdifferentialrevision(
926 def createdifferentialrevision(
927 ctx,
927 ctx,
928 revid=None,
928 revid=None,
929 parentrevphid=None,
929 parentrevphid=None,
930 oldnode=None,
930 oldnode=None,
931 olddiff=None,
931 olddiff=None,
932 actions=None,
932 actions=None,
933 comment=None,
933 comment=None,
934 ):
934 ):
935 """create or update a Differential Revision
935 """create or update a Differential Revision
936
936
937 If revid is None, create a new Differential Revision, otherwise update
937 If revid is None, create a new Differential Revision, otherwise update
938 revid. If parentrevphid is not None, set it as a dependency.
938 revid. If parentrevphid is not None, set it as a dependency.
939
939
940 If oldnode is not None, check if the patch content (without commit message
940 If oldnode is not None, check if the patch content (without commit message
941 and metadata) has changed before creating another diff.
941 and metadata) has changed before creating another diff.
942
942
943 If actions is not None, they will be appended to the transaction.
943 If actions is not None, they will be appended to the transaction.
944 """
944 """
945 repo = ctx.repo()
945 repo = ctx.repo()
946 if oldnode:
946 if oldnode:
947 diffopts = mdiff.diffopts(git=True, context=32767)
947 diffopts = mdiff.diffopts(git=True, context=32767)
948 oldctx = repo.unfiltered()[oldnode]
948 oldctx = repo.unfiltered()[oldnode]
949 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
949 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
950 else:
950 else:
951 neednewdiff = True
951 neednewdiff = True
952
952
953 transactions = []
953 transactions = []
954 if neednewdiff:
954 if neednewdiff:
955 diff = creatediff(ctx)
955 diff = creatediff(ctx)
956 transactions.append({b'type': b'update', b'value': diff[b'phid']})
956 transactions.append({b'type': b'update', b'value': diff[b'phid']})
957 if comment:
957 if comment:
958 transactions.append({b'type': b'comment', b'value': comment})
958 transactions.append({b'type': b'comment', b'value': comment})
959 else:
959 else:
960 # Even if we don't need to upload a new diff because the patch content
960 # Even if we don't need to upload a new diff because the patch content
961 # does not change. We might still need to update its metadata so
961 # does not change. We might still need to update its metadata so
962 # pushers could know the correct node metadata.
962 # pushers could know the correct node metadata.
963 assert olddiff
963 assert olddiff
964 diff = olddiff
964 diff = olddiff
965 writediffproperties(ctx, diff)
965 writediffproperties(ctx, diff)
966
966
967 # Set the parent Revision every time, so commit re-ordering is picked-up
967 # Set the parent Revision every time, so commit re-ordering is picked-up
968 if parentrevphid:
968 if parentrevphid:
969 transactions.append(
969 transactions.append(
970 {b'type': b'parents.set', b'value': [parentrevphid]}
970 {b'type': b'parents.set', b'value': [parentrevphid]}
971 )
971 )
972
972
973 if actions:
973 if actions:
974 transactions += actions
974 transactions += actions
975
975
976 # Parse commit message and update related fields.
976 # Parse commit message and update related fields.
977 desc = ctx.description()
977 desc = ctx.description()
978 info = callconduit(
978 info = callconduit(
979 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
979 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
980 )
980 )
981 for k, v in info[b'fields'].items():
981 for k, v in info[b'fields'].items():
982 if k in [b'title', b'summary', b'testPlan']:
982 if k in [b'title', b'summary', b'testPlan']:
983 transactions.append({b'type': k, b'value': v})
983 transactions.append({b'type': k, b'value': v})
984
984
985 params = {b'transactions': transactions}
985 params = {b'transactions': transactions}
986 if revid is not None:
986 if revid is not None:
987 # Update an existing Differential Revision
987 # Update an existing Differential Revision
988 params[b'objectIdentifier'] = revid
988 params[b'objectIdentifier'] = revid
989
989
990 revision = callconduit(repo.ui, b'differential.revision.edit', params)
990 revision = callconduit(repo.ui, b'differential.revision.edit', params)
991 if not revision:
991 if not revision:
992 raise error.Abort(_(b'cannot create revision for %s') % ctx)
992 raise error.Abort(_(b'cannot create revision for %s') % ctx)
993
993
994 return revision, diff
994 return revision, diff
995
995
996
996
997 def userphids(repo, names):
997 def userphids(repo, names):
998 """convert user names to PHIDs"""
998 """convert user names to PHIDs"""
999 names = [name.lower() for name in names]
999 names = [name.lower() for name in names]
1000 query = {b'constraints': {b'usernames': names}}
1000 query = {b'constraints': {b'usernames': names}}
1001 result = callconduit(repo.ui, b'user.search', query)
1001 result = callconduit(repo.ui, b'user.search', query)
1002 # username not found is not an error of the API. So check if we have missed
1002 # username not found is not an error of the API. So check if we have missed
1003 # some names here.
1003 # some names here.
1004 data = result[b'data']
1004 data = result[b'data']
1005 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1005 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1006 unresolved = set(names) - resolved
1006 unresolved = set(names) - resolved
1007 if unresolved:
1007 if unresolved:
1008 raise error.Abort(
1008 raise error.Abort(
1009 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1009 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1010 )
1010 )
1011 return [entry[b'phid'] for entry in data]
1011 return [entry[b'phid'] for entry in data]
1012
1012
1013
1013
1014 @vcrcommand(
1014 @vcrcommand(
1015 b'phabsend',
1015 b'phabsend',
1016 [
1016 [
1017 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1017 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1018 (b'', b'amend', True, _(b'update commit messages')),
1018 (b'', b'amend', True, _(b'update commit messages')),
1019 (b'', b'reviewer', [], _(b'specify reviewers')),
1019 (b'', b'reviewer', [], _(b'specify reviewers')),
1020 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1020 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1021 (
1021 (
1022 b'm',
1022 b'm',
1023 b'comment',
1023 b'comment',
1024 b'',
1024 b'',
1025 _(b'add a comment to Revisions with new/updated Diffs'),
1025 _(b'add a comment to Revisions with new/updated Diffs'),
1026 ),
1026 ),
1027 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1027 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1028 ],
1028 ],
1029 _(b'REV [OPTIONS]'),
1029 _(b'REV [OPTIONS]'),
1030 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1030 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1031 )
1031 )
1032 def phabsend(ui, repo, *revs, **opts):
1032 def phabsend(ui, repo, *revs, **opts):
1033 """upload changesets to Phabricator
1033 """upload changesets to Phabricator
1034
1034
1035 If there are multiple revisions specified, they will be send as a stack
1035 If there are multiple revisions specified, they will be send as a stack
1036 with a linear dependencies relationship using the order specified by the
1036 with a linear dependencies relationship using the order specified by the
1037 revset.
1037 revset.
1038
1038
1039 For the first time uploading changesets, local tags will be created to
1039 For the first time uploading changesets, local tags will be created to
1040 maintain the association. After the first time, phabsend will check
1040 maintain the association. After the first time, phabsend will check
1041 obsstore and tags information so it can figure out whether to update an
1041 obsstore and tags information so it can figure out whether to update an
1042 existing Differential Revision, or create a new one.
1042 existing Differential Revision, or create a new one.
1043
1043
1044 If --amend is set, update commit messages so they have the
1044 If --amend is set, update commit messages so they have the
1045 ``Differential Revision`` URL, remove related tags. This is similar to what
1045 ``Differential Revision`` URL, remove related tags. This is similar to what
1046 arcanist will do, and is more desired in author-push workflows. Otherwise,
1046 arcanist will do, and is more desired in author-push workflows. Otherwise,
1047 use local tags to record the ``Differential Revision`` association.
1047 use local tags to record the ``Differential Revision`` association.
1048
1048
1049 The --confirm option lets you confirm changesets before sending them. You
1049 The --confirm option lets you confirm changesets before sending them. You
1050 can also add following to your configuration file to make it default
1050 can also add following to your configuration file to make it default
1051 behaviour::
1051 behaviour::
1052
1052
1053 [phabsend]
1053 [phabsend]
1054 confirm = true
1054 confirm = true
1055
1055
1056 phabsend will check obsstore and the above association to decide whether to
1056 phabsend will check obsstore and the above association to decide whether to
1057 update an existing Differential Revision, or create a new one.
1057 update an existing Differential Revision, or create a new one.
1058 """
1058 """
1059 opts = pycompat.byteskwargs(opts)
1059 opts = pycompat.byteskwargs(opts)
1060 revs = list(revs) + opts.get(b'rev', [])
1060 revs = list(revs) + opts.get(b'rev', [])
1061 revs = scmutil.revrange(repo, revs)
1061 revs = scmutil.revrange(repo, revs)
1062
1062
1063 if not revs:
1063 if not revs:
1064 raise error.Abort(_(b'phabsend requires at least one changeset'))
1064 raise error.Abort(_(b'phabsend requires at least one changeset'))
1065 if opts.get(b'amend'):
1065 if opts.get(b'amend'):
1066 cmdutil.checkunfinished(repo)
1066 cmdutil.checkunfinished(repo)
1067
1067
1068 # {newnode: (oldnode, olddiff, olddrev}
1068 # {newnode: (oldnode, olddiff, olddrev}
1069 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1069 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1070
1070
1071 confirm = ui.configbool(b'phabsend', b'confirm')
1071 confirm = ui.configbool(b'phabsend', b'confirm')
1072 confirm |= bool(opts.get(b'confirm'))
1072 confirm |= bool(opts.get(b'confirm'))
1073 if confirm:
1073 if confirm:
1074 confirmed = _confirmbeforesend(repo, revs, oldmap)
1074 confirmed = _confirmbeforesend(repo, revs, oldmap)
1075 if not confirmed:
1075 if not confirmed:
1076 raise error.Abort(_(b'phabsend cancelled'))
1076 raise error.Abort(_(b'phabsend cancelled'))
1077
1077
1078 actions = []
1078 actions = []
1079 reviewers = opts.get(b'reviewer', [])
1079 reviewers = opts.get(b'reviewer', [])
1080 blockers = opts.get(b'blocker', [])
1080 blockers = opts.get(b'blocker', [])
1081 phids = []
1081 phids = []
1082 if reviewers:
1082 if reviewers:
1083 phids.extend(userphids(repo, reviewers))
1083 phids.extend(userphids(repo, reviewers))
1084 if blockers:
1084 if blockers:
1085 phids.extend(
1085 phids.extend(
1086 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1086 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1087 )
1087 )
1088 if phids:
1088 if phids:
1089 actions.append({b'type': b'reviewers.add', b'value': phids})
1089 actions.append({b'type': b'reviewers.add', b'value': phids})
1090
1090
1091 drevids = [] # [int]
1091 drevids = [] # [int]
1092 diffmap = {} # {newnode: diff}
1092 diffmap = {} # {newnode: diff}
1093
1093
1094 # Send patches one by one so we know their Differential Revision PHIDs and
1094 # Send patches one by one so we know their Differential Revision PHIDs and
1095 # can provide dependency relationship
1095 # can provide dependency relationship
1096 lastrevphid = None
1096 lastrevphid = None
1097 for rev in revs:
1097 for rev in revs:
1098 ui.debug(b'sending rev %d\n' % rev)
1098 ui.debug(b'sending rev %d\n' % rev)
1099 ctx = repo[rev]
1099 ctx = repo[rev]
1100
1100
1101 # Get Differential Revision ID
1101 # Get Differential Revision ID
1102 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1102 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1103 if oldnode != ctx.node() or opts.get(b'amend'):
1103 if oldnode != ctx.node() or opts.get(b'amend'):
1104 # Create or update Differential Revision
1104 # Create or update Differential Revision
1105 revision, diff = createdifferentialrevision(
1105 revision, diff = createdifferentialrevision(
1106 ctx,
1106 ctx,
1107 revid,
1107 revid,
1108 lastrevphid,
1108 lastrevphid,
1109 oldnode,
1109 oldnode,
1110 olddiff,
1110 olddiff,
1111 actions,
1111 actions,
1112 opts.get(b'comment'),
1112 opts.get(b'comment'),
1113 )
1113 )
1114 diffmap[ctx.node()] = diff
1114 diffmap[ctx.node()] = diff
1115 newrevid = int(revision[b'object'][b'id'])
1115 newrevid = int(revision[b'object'][b'id'])
1116 newrevphid = revision[b'object'][b'phid']
1116 newrevphid = revision[b'object'][b'phid']
1117 if revid:
1117 if revid:
1118 action = b'updated'
1118 action = b'updated'
1119 else:
1119 else:
1120 action = b'created'
1120 action = b'created'
1121
1121
1122 # Create a local tag to note the association, if commit message
1122 # Create a local tag to note the association, if commit message
1123 # does not have it already
1123 # does not have it already
1124 m = _differentialrevisiondescre.search(ctx.description())
1124 m = _differentialrevisiondescre.search(ctx.description())
1125 if not m or int(m.group('id')) != newrevid:
1125 if not m or int(m.group('id')) != newrevid:
1126 tagname = b'D%d' % newrevid
1126 tagname = b'D%d' % newrevid
1127 tags.tag(
1127 tags.tag(
1128 repo,
1128 repo,
1129 tagname,
1129 tagname,
1130 ctx.node(),
1130 ctx.node(),
1131 message=None,
1131 message=None,
1132 user=None,
1132 user=None,
1133 date=None,
1133 date=None,
1134 local=True,
1134 local=True,
1135 )
1135 )
1136 else:
1136 else:
1137 # Nothing changed. But still set "newrevphid" so the next revision
1137 # Nothing changed. But still set "newrevphid" so the next revision
1138 # could depend on this one and "newrevid" for the summary line.
1138 # could depend on this one and "newrevid" for the summary line.
1139 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1139 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1140 newrevid = revid
1140 newrevid = revid
1141 action = b'skipped'
1141 action = b'skipped'
1142
1142
1143 actiondesc = ui.label(
1143 actiondesc = ui.label(
1144 {
1144 {
1145 b'created': _(b'created'),
1145 b'created': _(b'created'),
1146 b'skipped': _(b'skipped'),
1146 b'skipped': _(b'skipped'),
1147 b'updated': _(b'updated'),
1147 b'updated': _(b'updated'),
1148 }[action],
1148 }[action],
1149 b'phabricator.action.%s' % action,
1149 b'phabricator.action.%s' % action,
1150 )
1150 )
1151 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1151 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1152 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1152 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1153 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1153 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1154 ui.write(
1154 ui.write(
1155 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1155 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1156 )
1156 )
1157 drevids.append(newrevid)
1157 drevids.append(newrevid)
1158 lastrevphid = newrevphid
1158 lastrevphid = newrevphid
1159
1159
1160 # Update commit messages and remove tags
1160 # Update commit messages and remove tags
1161 if opts.get(b'amend'):
1161 if opts.get(b'amend'):
1162 unfi = repo.unfiltered()
1162 unfi = repo.unfiltered()
1163 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1163 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1164 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1164 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1165 wnode = unfi[b'.'].node()
1165 wnode = unfi[b'.'].node()
1166 mapping = {} # {oldnode: [newnode]}
1166 mapping = {} # {oldnode: [newnode]}
1167 for i, rev in enumerate(revs):
1167 for i, rev in enumerate(revs):
1168 old = unfi[rev]
1168 old = unfi[rev]
1169 drevid = drevids[i]
1169 drevid = drevids[i]
1170 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1170 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1171 newdesc = getdescfromdrev(drev)
1171 newdesc = getdescfromdrev(drev)
1172 # Make sure commit message contain "Differential Revision"
1172 # Make sure commit message contain "Differential Revision"
1173 if old.description() != newdesc:
1173 if old.description() != newdesc:
1174 if old.phase() == phases.public:
1174 if old.phase() == phases.public:
1175 ui.warn(
1175 ui.warn(
1176 _(b"warning: not updating public commit %s\n")
1176 _(b"warning: not updating public commit %s\n")
1177 % scmutil.formatchangeid(old)
1177 % scmutil.formatchangeid(old)
1178 )
1178 )
1179 continue
1179 continue
1180 parents = [
1180 parents = [
1181 mapping.get(old.p1().node(), (old.p1(),))[0],
1181 mapping.get(old.p1().node(), (old.p1(),))[0],
1182 mapping.get(old.p2().node(), (old.p2(),))[0],
1182 mapping.get(old.p2().node(), (old.p2(),))[0],
1183 ]
1183 ]
1184 new = context.metadataonlyctx(
1184 new = context.metadataonlyctx(
1185 repo,
1185 repo,
1186 old,
1186 old,
1187 parents=parents,
1187 parents=parents,
1188 text=newdesc,
1188 text=newdesc,
1189 user=old.user(),
1189 user=old.user(),
1190 date=old.date(),
1190 date=old.date(),
1191 extra=old.extra(),
1191 extra=old.extra(),
1192 )
1192 )
1193
1193
1194 newnode = new.commit()
1194 newnode = new.commit()
1195
1195
1196 mapping[old.node()] = [newnode]
1196 mapping[old.node()] = [newnode]
1197 # Update diff property
1197 # Update diff property
1198 # If it fails just warn and keep going, otherwise the DREV
1198 # If it fails just warn and keep going, otherwise the DREV
1199 # associations will be lost
1199 # associations will be lost
1200 try:
1200 try:
1201 writediffproperties(unfi[newnode], diffmap[old.node()])
1201 writediffproperties(unfi[newnode], diffmap[old.node()])
1202 except util.urlerr.urlerror:
1202 except util.urlerr.urlerror:
1203 ui.warnnoi18n(
1203 ui.warnnoi18n(
1204 b'Failed to update metadata for D%d\n' % drevid
1204 b'Failed to update metadata for D%d\n' % drevid
1205 )
1205 )
1206 # Remove local tags since it's no longer necessary
1206 # Remove local tags since it's no longer necessary
1207 tagname = b'D%d' % drevid
1207 tagname = b'D%d' % drevid
1208 if tagname in repo.tags():
1208 if tagname in repo.tags():
1209 tags.tag(
1209 tags.tag(
1210 repo,
1210 repo,
1211 tagname,
1211 tagname,
1212 nullid,
1212 nullid,
1213 message=None,
1213 message=None,
1214 user=None,
1214 user=None,
1215 date=None,
1215 date=None,
1216 local=True,
1216 local=True,
1217 )
1217 )
1218 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1218 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1219 if wnode in mapping:
1219 if wnode in mapping:
1220 unfi.setparents(mapping[wnode][0])
1220 unfi.setparents(mapping[wnode][0])
1221
1221
1222
1222
1223 # Map from "hg:meta" keys to header understood by "hg import". The order is
1223 # Map from "hg:meta" keys to header understood by "hg import". The order is
1224 # consistent with "hg export" output.
1224 # consistent with "hg export" output.
1225 _metanamemap = util.sortdict(
1225 _metanamemap = util.sortdict(
1226 [
1226 [
1227 (b'user', b'User'),
1227 (b'user', b'User'),
1228 (b'date', b'Date'),
1228 (b'date', b'Date'),
1229 (b'branch', b'Branch'),
1229 (b'branch', b'Branch'),
1230 (b'node', b'Node ID'),
1230 (b'node', b'Node ID'),
1231 (b'parent', b'Parent '),
1231 (b'parent', b'Parent '),
1232 ]
1232 ]
1233 )
1233 )
1234
1234
1235
1235
1236 def _confirmbeforesend(repo, revs, oldmap):
1236 def _confirmbeforesend(repo, revs, oldmap):
1237 url, token = readurltoken(repo.ui)
1237 url, token = readurltoken(repo.ui)
1238 ui = repo.ui
1238 ui = repo.ui
1239 for rev in revs:
1239 for rev in revs:
1240 ctx = repo[rev]
1240 ctx = repo[rev]
1241 desc = ctx.description().splitlines()[0]
1241 desc = ctx.description().splitlines()[0]
1242 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1242 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1243 if drevid:
1243 if drevid:
1244 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1244 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1245 else:
1245 else:
1246 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1246 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1247
1247
1248 ui.write(
1248 ui.write(
1249 _(b'%s - %s: %s\n')
1249 _(b'%s - %s: %s\n')
1250 % (
1250 % (
1251 drevdesc,
1251 drevdesc,
1252 ui.label(bytes(ctx), b'phabricator.node'),
1252 ui.label(bytes(ctx), b'phabricator.node'),
1253 ui.label(desc, b'phabricator.desc'),
1253 ui.label(desc, b'phabricator.desc'),
1254 )
1254 )
1255 )
1255 )
1256
1256
1257 if ui.promptchoice(
1257 if ui.promptchoice(
1258 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1258 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1259 ):
1259 ):
1260 return False
1260 return False
1261
1261
1262 return True
1262 return True
1263
1263
1264
1264
1265 _knownstatusnames = {
1265 _knownstatusnames = {
1266 b'accepted',
1266 b'accepted',
1267 b'needsreview',
1267 b'needsreview',
1268 b'needsrevision',
1268 b'needsrevision',
1269 b'closed',
1269 b'closed',
1270 b'abandoned',
1270 b'abandoned',
1271 }
1271 }
1272
1272
1273
1273
1274 def _getstatusname(drev):
1274 def _getstatusname(drev):
1275 """get normalized status name from a Differential Revision"""
1275 """get normalized status name from a Differential Revision"""
1276 return drev[b'statusName'].replace(b' ', b'').lower()
1276 return drev[b'statusName'].replace(b' ', b'').lower()
1277
1277
1278
1278
1279 # Small language to specify differential revisions. Support symbols: (), :X,
1279 # Small language to specify differential revisions. Support symbols: (), :X,
1280 # +, and -.
1280 # +, and -.
1281
1281
1282 _elements = {
1282 _elements = {
1283 # token-type: binding-strength, primary, prefix, infix, suffix
1283 # token-type: binding-strength, primary, prefix, infix, suffix
1284 b'(': (12, None, (b'group', 1, b')'), None, None),
1284 b'(': (12, None, (b'group', 1, b')'), None, None),
1285 b':': (8, None, (b'ancestors', 8), None, None),
1285 b':': (8, None, (b'ancestors', 8), None, None),
1286 b'&': (5, None, None, (b'and_', 5), None),
1286 b'&': (5, None, None, (b'and_', 5), None),
1287 b'+': (4, None, None, (b'add', 4), None),
1287 b'+': (4, None, None, (b'add', 4), None),
1288 b'-': (4, None, None, (b'sub', 4), None),
1288 b'-': (4, None, None, (b'sub', 4), None),
1289 b')': (0, None, None, None, None),
1289 b')': (0, None, None, None, None),
1290 b'symbol': (0, b'symbol', None, None, None),
1290 b'symbol': (0, b'symbol', None, None, None),
1291 b'end': (0, None, None, None, None),
1291 b'end': (0, None, None, None, None),
1292 }
1292 }
1293
1293
1294
1294
1295 def _tokenize(text):
1295 def _tokenize(text):
1296 view = memoryview(text) # zero-copy slice
1296 view = memoryview(text) # zero-copy slice
1297 special = b'():+-& '
1297 special = b'():+-& '
1298 pos = 0
1298 pos = 0
1299 length = len(text)
1299 length = len(text)
1300 while pos < length:
1300 while pos < length:
1301 symbol = b''.join(
1301 symbol = b''.join(
1302 itertools.takewhile(
1302 itertools.takewhile(
1303 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1303 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1304 )
1304 )
1305 )
1305 )
1306 if symbol:
1306 if symbol:
1307 yield (b'symbol', symbol, pos)
1307 yield (b'symbol', symbol, pos)
1308 pos += len(symbol)
1308 pos += len(symbol)
1309 else: # special char, ignore space
1309 else: # special char, ignore space
1310 if text[pos : pos + 1] != b' ':
1310 if text[pos : pos + 1] != b' ':
1311 yield (text[pos : pos + 1], None, pos)
1311 yield (text[pos : pos + 1], None, pos)
1312 pos += 1
1312 pos += 1
1313 yield (b'end', None, pos)
1313 yield (b'end', None, pos)
1314
1314
1315
1315
1316 def _parse(text):
1316 def _parse(text):
1317 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1317 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1318 if pos != len(text):
1318 if pos != len(text):
1319 raise error.ParseError(b'invalid token', pos)
1319 raise error.ParseError(b'invalid token', pos)
1320 return tree
1320 return tree
1321
1321
1322
1322
1323 def _parsedrev(symbol):
1323 def _parsedrev(symbol):
1324 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1324 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1325 if symbol.startswith(b'D') and symbol[1:].isdigit():
1325 if symbol.startswith(b'D') and symbol[1:].isdigit():
1326 return int(symbol[1:])
1326 return int(symbol[1:])
1327 if symbol.isdigit():
1327 if symbol.isdigit():
1328 return int(symbol)
1328 return int(symbol)
1329
1329
1330
1330
1331 def _prefetchdrevs(tree):
1331 def _prefetchdrevs(tree):
1332 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1332 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1333 drevs = set()
1333 drevs = set()
1334 ancestordrevs = set()
1334 ancestordrevs = set()
1335 op = tree[0]
1335 op = tree[0]
1336 if op == b'symbol':
1336 if op == b'symbol':
1337 r = _parsedrev(tree[1])
1337 r = _parsedrev(tree[1])
1338 if r:
1338 if r:
1339 drevs.add(r)
1339 drevs.add(r)
1340 elif op == b'ancestors':
1340 elif op == b'ancestors':
1341 r, a = _prefetchdrevs(tree[1])
1341 r, a = _prefetchdrevs(tree[1])
1342 drevs.update(r)
1342 drevs.update(r)
1343 ancestordrevs.update(r)
1343 ancestordrevs.update(r)
1344 ancestordrevs.update(a)
1344 ancestordrevs.update(a)
1345 else:
1345 else:
1346 for t in tree[1:]:
1346 for t in tree[1:]:
1347 r, a = _prefetchdrevs(t)
1347 r, a = _prefetchdrevs(t)
1348 drevs.update(r)
1348 drevs.update(r)
1349 ancestordrevs.update(a)
1349 ancestordrevs.update(a)
1350 return drevs, ancestordrevs
1350 return drevs, ancestordrevs
1351
1351
1352
1352
1353 def querydrev(repo, spec):
1353 def querydrev(repo, spec):
1354 """return a list of "Differential Revision" dicts
1354 """return a list of "Differential Revision" dicts
1355
1355
1356 spec is a string using a simple query language, see docstring in phabread
1356 spec is a string using a simple query language, see docstring in phabread
1357 for details.
1357 for details.
1358
1358
1359 A "Differential Revision dict" looks like:
1359 A "Differential Revision dict" looks like:
1360
1360
1361 {
1361 {
1362 "id": "2",
1362 "id": "2",
1363 "phid": "PHID-DREV-672qvysjcczopag46qty",
1363 "phid": "PHID-DREV-672qvysjcczopag46qty",
1364 "title": "example",
1364 "title": "example",
1365 "uri": "https://phab.example.com/D2",
1365 "uri": "https://phab.example.com/D2",
1366 "dateCreated": "1499181406",
1366 "dateCreated": "1499181406",
1367 "dateModified": "1499182103",
1367 "dateModified": "1499182103",
1368 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1368 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1369 "status": "0",
1369 "status": "0",
1370 "statusName": "Needs Review",
1370 "statusName": "Needs Review",
1371 "properties": [],
1371 "properties": [],
1372 "branch": null,
1372 "branch": null,
1373 "summary": "",
1373 "summary": "",
1374 "testPlan": "",
1374 "testPlan": "",
1375 "lineCount": "2",
1375 "lineCount": "2",
1376 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1376 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1377 "diffs": [
1377 "diffs": [
1378 "3",
1378 "3",
1379 "4",
1379 "4",
1380 ],
1380 ],
1381 "commits": [],
1381 "commits": [],
1382 "reviewers": [],
1382 "reviewers": [],
1383 "ccs": [],
1383 "ccs": [],
1384 "hashes": [],
1384 "hashes": [],
1385 "auxiliary": {
1385 "auxiliary": {
1386 "phabricator:projects": [],
1386 "phabricator:projects": [],
1387 "phabricator:depends-on": [
1387 "phabricator:depends-on": [
1388 "PHID-DREV-gbapp366kutjebt7agcd"
1388 "PHID-DREV-gbapp366kutjebt7agcd"
1389 ]
1389 ]
1390 },
1390 },
1391 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1391 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1392 "sourcePath": null
1392 "sourcePath": null
1393 }
1393 }
1394 """
1394 """
1395
1395
1396 def fetch(params):
1396 def fetch(params):
1397 """params -> single drev or None"""
1397 """params -> single drev or None"""
1398 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1398 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1399 if key in prefetched:
1399 if key in prefetched:
1400 return prefetched[key]
1400 return prefetched[key]
1401 drevs = callconduit(repo.ui, b'differential.query', params)
1401 drevs = callconduit(repo.ui, b'differential.query', params)
1402 # Fill prefetched with the result
1402 # Fill prefetched with the result
1403 for drev in drevs:
1403 for drev in drevs:
1404 prefetched[drev[b'phid']] = drev
1404 prefetched[drev[b'phid']] = drev
1405 prefetched[int(drev[b'id'])] = drev
1405 prefetched[int(drev[b'id'])] = drev
1406 if key not in prefetched:
1406 if key not in prefetched:
1407 raise error.Abort(
1407 raise error.Abort(
1408 _(b'cannot get Differential Revision %r') % params
1408 _(b'cannot get Differential Revision %r') % params
1409 )
1409 )
1410 return prefetched[key]
1410 return prefetched[key]
1411
1411
1412 def getstack(topdrevids):
1412 def getstack(topdrevids):
1413 """given a top, get a stack from the bottom, [id] -> [id]"""
1413 """given a top, get a stack from the bottom, [id] -> [id]"""
1414 visited = set()
1414 visited = set()
1415 result = []
1415 result = []
1416 queue = [{b'ids': [i]} for i in topdrevids]
1416 queue = [{b'ids': [i]} for i in topdrevids]
1417 while queue:
1417 while queue:
1418 params = queue.pop()
1418 params = queue.pop()
1419 drev = fetch(params)
1419 drev = fetch(params)
1420 if drev[b'id'] in visited:
1420 if drev[b'id'] in visited:
1421 continue
1421 continue
1422 visited.add(drev[b'id'])
1422 visited.add(drev[b'id'])
1423 result.append(int(drev[b'id']))
1423 result.append(int(drev[b'id']))
1424 auxiliary = drev.get(b'auxiliary', {})
1424 auxiliary = drev.get(b'auxiliary', {})
1425 depends = auxiliary.get(b'phabricator:depends-on', [])
1425 depends = auxiliary.get(b'phabricator:depends-on', [])
1426 for phid in depends:
1426 for phid in depends:
1427 queue.append({b'phids': [phid]})
1427 queue.append({b'phids': [phid]})
1428 result.reverse()
1428 result.reverse()
1429 return smartset.baseset(result)
1429 return smartset.baseset(result)
1430
1430
1431 # Initialize prefetch cache
1431 # Initialize prefetch cache
1432 prefetched = {} # {id or phid: drev}
1432 prefetched = {} # {id or phid: drev}
1433
1433
1434 tree = _parse(spec)
1434 tree = _parse(spec)
1435 drevs, ancestordrevs = _prefetchdrevs(tree)
1435 drevs, ancestordrevs = _prefetchdrevs(tree)
1436
1436
1437 # developer config: phabricator.batchsize
1437 # developer config: phabricator.batchsize
1438 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1438 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1439
1439
1440 # Prefetch Differential Revisions in batch
1440 # Prefetch Differential Revisions in batch
1441 tofetch = set(drevs)
1441 tofetch = set(drevs)
1442 for r in ancestordrevs:
1442 for r in ancestordrevs:
1443 tofetch.update(range(max(1, r - batchsize), r + 1))
1443 tofetch.update(range(max(1, r - batchsize), r + 1))
1444 if drevs:
1444 if drevs:
1445 fetch({b'ids': list(tofetch)})
1445 fetch({b'ids': list(tofetch)})
1446 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1446 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1447
1447
1448 # Walk through the tree, return smartsets
1448 # Walk through the tree, return smartsets
1449 def walk(tree):
1449 def walk(tree):
1450 op = tree[0]
1450 op = tree[0]
1451 if op == b'symbol':
1451 if op == b'symbol':
1452 drev = _parsedrev(tree[1])
1452 drev = _parsedrev(tree[1])
1453 if drev:
1453 if drev:
1454 return smartset.baseset([drev])
1454 return smartset.baseset([drev])
1455 elif tree[1] in _knownstatusnames:
1455 elif tree[1] in _knownstatusnames:
1456 drevs = [
1456 drevs = [
1457 r
1457 r
1458 for r in validids
1458 for r in validids
1459 if _getstatusname(prefetched[r]) == tree[1]
1459 if _getstatusname(prefetched[r]) == tree[1]
1460 ]
1460 ]
1461 return smartset.baseset(drevs)
1461 return smartset.baseset(drevs)
1462 else:
1462 else:
1463 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1463 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1464 elif op in {b'and_', b'add', b'sub'}:
1464 elif op in {b'and_', b'add', b'sub'}:
1465 assert len(tree) == 3
1465 assert len(tree) == 3
1466 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1466 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1467 elif op == b'group':
1467 elif op == b'group':
1468 return walk(tree[1])
1468 return walk(tree[1])
1469 elif op == b'ancestors':
1469 elif op == b'ancestors':
1470 return getstack(walk(tree[1]))
1470 return getstack(walk(tree[1]))
1471 else:
1471 else:
1472 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1472 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1473
1473
1474 return [prefetched[r] for r in walk(tree)]
1474 return [prefetched[r] for r in walk(tree)]
1475
1475
1476
1476
1477 def getdescfromdrev(drev):
1477 def getdescfromdrev(drev):
1478 """get description (commit message) from "Differential Revision"
1478 """get description (commit message) from "Differential Revision"
1479
1479
1480 This is similar to differential.getcommitmessage API. But we only care
1480 This is similar to differential.getcommitmessage API. But we only care
1481 about limited fields: title, summary, test plan, and URL.
1481 about limited fields: title, summary, test plan, and URL.
1482 """
1482 """
1483 title = drev[b'title']
1483 title = drev[b'title']
1484 summary = drev[b'summary'].rstrip()
1484 summary = drev[b'summary'].rstrip()
1485 testplan = drev[b'testPlan'].rstrip()
1485 testplan = drev[b'testPlan'].rstrip()
1486 if testplan:
1486 if testplan:
1487 testplan = b'Test Plan:\n%s' % testplan
1487 testplan = b'Test Plan:\n%s' % testplan
1488 uri = b'Differential Revision: %s' % drev[b'uri']
1488 uri = b'Differential Revision: %s' % drev[b'uri']
1489 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1489 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1490
1490
1491
1491
1492 def getdiffmeta(diff):
1492 def getdiffmeta(diff):
1493 """get commit metadata (date, node, user, p1) from a diff object
1493 """get commit metadata (date, node, user, p1) from a diff object
1494
1494
1495 The metadata could be "hg:meta", sent by phabsend, like:
1495 The metadata could be "hg:meta", sent by phabsend, like:
1496
1496
1497 "properties": {
1497 "properties": {
1498 "hg:meta": {
1498 "hg:meta": {
1499 "date": "1499571514 25200",
1499 "date": "1499571514 25200",
1500 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1500 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1501 "user": "Foo Bar <foo@example.com>",
1501 "user": "Foo Bar <foo@example.com>",
1502 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1502 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1503 }
1503 }
1504 }
1504 }
1505
1505
1506 Or converted from "local:commits", sent by "arc", like:
1506 Or converted from "local:commits", sent by "arc", like:
1507
1507
1508 "properties": {
1508 "properties": {
1509 "local:commits": {
1509 "local:commits": {
1510 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1510 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1511 "author": "Foo Bar",
1511 "author": "Foo Bar",
1512 "time": 1499546314,
1512 "time": 1499546314,
1513 "branch": "default",
1513 "branch": "default",
1514 "tag": "",
1514 "tag": "",
1515 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1515 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1516 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1516 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1517 "local": "1000",
1517 "local": "1000",
1518 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1518 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1519 "summary": "...",
1519 "summary": "...",
1520 "message": "...",
1520 "message": "...",
1521 "authorEmail": "foo@example.com"
1521 "authorEmail": "foo@example.com"
1522 }
1522 }
1523 }
1523 }
1524 }
1524 }
1525
1525
1526 Note: metadata extracted from "local:commits" will lose time zone
1526 Note: metadata extracted from "local:commits" will lose time zone
1527 information.
1527 information.
1528 """
1528 """
1529 props = diff.get(b'properties') or {}
1529 props = diff.get(b'properties') or {}
1530 meta = props.get(b'hg:meta')
1530 meta = props.get(b'hg:meta')
1531 if not meta:
1531 if not meta:
1532 if props.get(b'local:commits'):
1532 if props.get(b'local:commits'):
1533 commit = sorted(props[b'local:commits'].values())[0]
1533 commit = sorted(props[b'local:commits'].values())[0]
1534 meta = {}
1534 meta = {}
1535 if b'author' in commit and b'authorEmail' in commit:
1535 if b'author' in commit and b'authorEmail' in commit:
1536 meta[b'user'] = b'%s <%s>' % (
1536 meta[b'user'] = b'%s <%s>' % (
1537 commit[b'author'],
1537 commit[b'author'],
1538 commit[b'authorEmail'],
1538 commit[b'authorEmail'],
1539 )
1539 )
1540 if b'time' in commit:
1540 if b'time' in commit:
1541 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1541 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1542 if b'branch' in commit:
1542 if b'branch' in commit:
1543 meta[b'branch'] = commit[b'branch']
1543 meta[b'branch'] = commit[b'branch']
1544 node = commit.get(b'commit', commit.get(b'rev'))
1544 node = commit.get(b'commit', commit.get(b'rev'))
1545 if node:
1545 if node:
1546 meta[b'node'] = node
1546 meta[b'node'] = node
1547 if len(commit.get(b'parents', ())) >= 1:
1547 if len(commit.get(b'parents', ())) >= 1:
1548 meta[b'parent'] = commit[b'parents'][0]
1548 meta[b'parent'] = commit[b'parents'][0]
1549 else:
1549 else:
1550 meta = {}
1550 meta = {}
1551 if b'date' not in meta and b'dateCreated' in diff:
1551 if b'date' not in meta and b'dateCreated' in diff:
1552 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1552 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1553 if b'branch' not in meta and diff.get(b'branch'):
1553 if b'branch' not in meta and diff.get(b'branch'):
1554 meta[b'branch'] = diff[b'branch']
1554 meta[b'branch'] = diff[b'branch']
1555 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1555 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1556 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1556 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1557 return meta
1557 return meta
1558
1558
1559
1559
1560 def readpatch(repo, drevs, write):
1560 def readpatch(repo, drevs, write):
1561 """generate plain-text patch readable by 'hg import'
1561 """generate plain-text patch readable by 'hg import'
1562
1562
1563 write is usually ui.write. drevs is what "querydrev" returns, results of
1563 write is usually ui.write. drevs is what "querydrev" returns, results of
1564 "differential.query".
1564 "differential.query".
1565 """
1565 """
1566 # Prefetch hg:meta property for all diffs
1566 # Prefetch hg:meta property for all diffs
1567 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1567 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1568 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1568 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1569
1569
1570 # Generate patch for each drev
1570 # Generate patch for each drev
1571 for drev in drevs:
1571 for drev in drevs:
1572 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1572 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1573
1573
1574 diffid = max(int(v) for v in drev[b'diffs'])
1574 diffid = max(int(v) for v in drev[b'diffs'])
1575 body = callconduit(
1575 body = callconduit(
1576 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1576 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1577 )
1577 )
1578 desc = getdescfromdrev(drev)
1578 desc = getdescfromdrev(drev)
1579 header = b'# HG changeset patch\n'
1579 header = b'# HG changeset patch\n'
1580
1580
1581 # Try to preserve metadata from hg:meta property. Write hg patch
1581 # Try to preserve metadata from hg:meta property. Write hg patch
1582 # headers that can be read by the "import" command. See patchheadermap
1582 # headers that can be read by the "import" command. See patchheadermap
1583 # and extract in mercurial/patch.py for supported headers.
1583 # and extract in mercurial/patch.py for supported headers.
1584 meta = getdiffmeta(diffs[b'%d' % diffid])
1584 meta = getdiffmeta(diffs[b'%d' % diffid])
1585 for k in _metanamemap.keys():
1585 for k in _metanamemap.keys():
1586 if k in meta:
1586 if k in meta:
1587 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1587 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1588
1588
1589 content = b'%s%s\n%s' % (header, desc, body)
1589 content = b'%s%s\n%s' % (header, desc, body)
1590 write(content)
1590 write(content)
1591
1591
1592
1592
1593 @vcrcommand(
1593 @vcrcommand(
1594 b'phabread',
1594 b'phabread',
1595 [(b'', b'stack', False, _(b'read dependencies'))],
1595 [(b'', b'stack', False, _(b'read dependencies'))],
1596 _(b'DREVSPEC [OPTIONS]'),
1596 _(b'DREVSPEC [OPTIONS]'),
1597 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1597 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1598 )
1598 )
1599 def phabread(ui, repo, spec, **opts):
1599 def phabread(ui, repo, spec, **opts):
1600 """print patches from Phabricator suitable for importing
1600 """print patches from Phabricator suitable for importing
1601
1601
1602 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1602 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1603 the number ``123``. It could also have common operators like ``+``, ``-``,
1603 the number ``123``. It could also have common operators like ``+``, ``-``,
1604 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1604 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1605 select a stack.
1605 select a stack.
1606
1606
1607 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1607 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1608 could be used to filter patches by status. For performance reason, they
1608 could be used to filter patches by status. For performance reason, they
1609 only represent a subset of non-status selections and cannot be used alone.
1609 only represent a subset of non-status selections and cannot be used alone.
1610
1610
1611 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1611 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1612 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1612 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1613 stack up to D9.
1613 stack up to D9.
1614
1614
1615 If --stack is given, follow dependencies information and read all patches.
1615 If --stack is given, follow dependencies information and read all patches.
1616 It is equivalent to the ``:`` operator.
1616 It is equivalent to the ``:`` operator.
1617 """
1617 """
1618 opts = pycompat.byteskwargs(opts)
1618 opts = pycompat.byteskwargs(opts)
1619 if opts.get(b'stack'):
1619 if opts.get(b'stack'):
1620 spec = b':(%s)' % spec
1620 spec = b':(%s)' % spec
1621 drevs = querydrev(repo, spec)
1621 drevs = querydrev(repo, spec)
1622 readpatch(repo, drevs, ui.write)
1622 readpatch(repo, drevs, ui.write)
1623
1623
1624
1624
1625 @vcrcommand(
1625 @vcrcommand(
1626 b'phabupdate',
1626 b'phabupdate',
1627 [
1627 [
1628 (b'', b'accept', False, _(b'accept revisions')),
1628 (b'', b'accept', False, _(b'accept revisions')),
1629 (b'', b'reject', False, _(b'reject revisions')),
1629 (b'', b'reject', False, _(b'reject revisions')),
1630 (b'', b'abandon', False, _(b'abandon revisions')),
1630 (b'', b'abandon', False, _(b'abandon revisions')),
1631 (b'', b'reclaim', False, _(b'reclaim revisions')),
1631 (b'', b'reclaim', False, _(b'reclaim revisions')),
1632 (b'm', b'comment', b'', _(b'comment on the last revision')),
1632 (b'm', b'comment', b'', _(b'comment on the last revision')),
1633 ],
1633 ],
1634 _(b'DREVSPEC [OPTIONS]'),
1634 _(b'DREVSPEC [OPTIONS]'),
1635 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1635 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1636 )
1636 )
1637 def phabupdate(ui, repo, spec, **opts):
1637 def phabupdate(ui, repo, spec, **opts):
1638 """update Differential Revision in batch
1638 """update Differential Revision in batch
1639
1639
1640 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1640 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1641 """
1641 """
1642 opts = pycompat.byteskwargs(opts)
1642 opts = pycompat.byteskwargs(opts)
1643 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1643 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1644 if len(flags) > 1:
1644 if len(flags) > 1:
1645 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1645 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1646
1646
1647 actions = []
1647 actions = []
1648 for f in flags:
1648 for f in flags:
1649 actions.append({b'type': f, b'value': True})
1649 actions.append({b'type': f, b'value': True})
1650
1650
1651 drevs = querydrev(repo, spec)
1651 drevs = querydrev(repo, spec)
1652 for i, drev in enumerate(drevs):
1652 for i, drev in enumerate(drevs):
1653 if i + 1 == len(drevs) and opts.get(b'comment'):
1653 if i + 1 == len(drevs) and opts.get(b'comment'):
1654 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1654 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1655 if actions:
1655 if actions:
1656 params = {
1656 params = {
1657 b'objectIdentifier': drev[b'phid'],
1657 b'objectIdentifier': drev[b'phid'],
1658 b'transactions': actions,
1658 b'transactions': actions,
1659 }
1659 }
1660 callconduit(ui, b'differential.revision.edit', params)
1660 callconduit(ui, b'differential.revision.edit', params)
1661
1661
1662
1662
1663 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1663 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1664 def template_review(context, mapping):
1664 def template_review(context, mapping):
1665 """:phabreview: Object describing the review for this changeset.
1665 """:phabreview: Object describing the review for this changeset.
1666 Has attributes `url` and `id`.
1666 Has attributes `url` and `id`.
1667 """
1667 """
1668 ctx = context.resource(mapping, b'ctx')
1668 ctx = context.resource(mapping, b'ctx')
1669 m = _differentialrevisiondescre.search(ctx.description())
1669 m = _differentialrevisiondescre.search(ctx.description())
1670 if m:
1670 if m:
1671 return templateutil.hybriddict(
1671 return templateutil.hybriddict(
1672 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1672 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1673 )
1673 )
1674 else:
1674 else:
1675 tags = ctx.repo().nodetags(ctx.node())
1675 tags = ctx.repo().nodetags(ctx.node())
1676 for t in tags:
1676 for t in tags:
1677 if _differentialrevisiontagre.match(t):
1677 if _differentialrevisiontagre.match(t):
1678 url = ctx.repo().ui.config(b'phabricator', b'url')
1678 url = ctx.repo().ui.config(b'phabricator', b'url')
1679 if not url.endswith(b'/'):
1679 if not url.endswith(b'/'):
1680 url += b'/'
1680 url += b'/'
1681 url += t
1681 url += t
1682
1682
1683 return templateutil.hybriddict({b'url': url, b'id': t,})
1683 return templateutil.hybriddict({b'url': url, b'id': t,})
1684 return None
1684 return None
1685
1685
1686
1686
1687 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1688 def template_status(context, mapping):
1689 """:phabstatus: String. Status of Phabricator differential.
1690 """
1691 ctx = context.resource(mapping, b'ctx')
1692 repo = context.resource(mapping, b'repo')
1693 ui = context.resource(mapping, b'ui')
1694
1695 rev = ctx.rev()
1696 try:
1697 drevid = getdrevmap(repo, [rev])[rev]
1698 except KeyError:
1699 return None
1700 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1701 for drev in drevs:
1702 if int(drev[b'id']) == drevid:
1703 return templateutil.hybriddict(
1704 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1705 )
1706 return None
1707
1708
1687 @show.showview(b'phabstatus', csettopic=b'work')
1709 @show.showview(b'phabstatus', csettopic=b'work')
1688 def phabstatusshowview(ui, repo, displayer):
1710 def phabstatusshowview(ui, repo, displayer):
1689 """Phabricator differiential status"""
1711 """Phabricator differiential status"""
1690 revs = repo.revs('sort(_underway(), topo)')
1712 revs = repo.revs('sort(_underway(), topo)')
1691 drevmap = getdrevmap(repo, revs)
1713 drevmap = getdrevmap(repo, revs)
1692 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1714 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1693 for rev, drevid in pycompat.iteritems(drevmap):
1715 for rev, drevid in pycompat.iteritems(drevmap):
1694 if drevid is not None:
1716 if drevid is not None:
1695 drevids.add(drevid)
1717 drevids.add(drevid)
1696 revsbydrevid.setdefault(drevid, set([])).add(rev)
1718 revsbydrevid.setdefault(drevid, set([])).add(rev)
1697 else:
1719 else:
1698 unknownrevs.append(rev)
1720 unknownrevs.append(rev)
1699
1721
1700 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1722 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1701 drevsbyrev = {}
1723 drevsbyrev = {}
1702 for drev in drevs:
1724 for drev in drevs:
1703 for rev in revsbydrevid[int(drev[b'id'])]:
1725 for rev in revsbydrevid[int(drev[b'id'])]:
1704 drevsbyrev[rev] = drev
1726 drevsbyrev[rev] = drev
1705
1727
1706 def phabstatus(ctx):
1728 def phabstatus(ctx):
1707 drev = drevsbyrev[ctx.rev()]
1729 drev = drevsbyrev[ctx.rev()]
1708 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1730 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1709
1731
1710 revs -= smartset.baseset(unknownrevs)
1732 revs -= smartset.baseset(unknownrevs)
1711 revdag = graphmod.dagwalker(repo, revs)
1733 revdag = graphmod.dagwalker(repo, revs)
1712
1734
1713 ui.setconfig(b'experimental', b'graphshorten', True)
1735 ui.setconfig(b'experimental', b'graphshorten', True)
1714 displayer._exthook = phabstatus
1736 displayer._exthook = phabstatus
1715 nodelen = show.longestshortest(repo, revs)
1737 nodelen = show.longestshortest(repo, revs)
1716 logcmdutil.displaygraph(
1738 logcmdutil.displaygraph(
1717 ui,
1739 ui,
1718 repo,
1740 repo,
1719 revdag,
1741 revdag,
1720 displayer,
1742 displayer,
1721 graphmod.asciiedges,
1743 graphmod.asciiedges,
1722 props={b'nodelen': nodelen},
1744 props={b'nodelen': nodelen},
1723 )
1745 )
General Comments 0
You need to be logged in to leave comments. Login now