##// END OF EJS Templates
phabricator: add support for using the vcr library to mock interactions...
Augie Fackler -
r39686:d8f07b16 default
parent child Browse files
Show More
@@ -1,981 +1,1012
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration
7 """simple Phabricator integration
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import itertools
44 import itertools
45 import json
45 import json
46 import operator
46 import operator
47 import re
47 import re
48
48
49 from mercurial.node import bin, nullid
49 from mercurial.node import bin, nullid
50 from mercurial.i18n import _
50 from mercurial.i18n import _
51 from mercurial import (
51 from mercurial import (
52 cmdutil,
52 cmdutil,
53 context,
53 context,
54 encoding,
54 encoding,
55 error,
55 error,
56 httpconnection as httpconnectionmod,
56 httpconnection as httpconnectionmod,
57 mdiff,
57 mdiff,
58 obsutil,
58 obsutil,
59 parser,
59 parser,
60 patch,
60 patch,
61 registrar,
61 registrar,
62 scmutil,
62 scmutil,
63 smartset,
63 smartset,
64 tags,
64 tags,
65 url as urlmod,
65 url as urlmod,
66 util,
66 util,
67 )
67 )
68 from mercurial.utils import (
68 from mercurial.utils import (
69 procutil,
69 procutil,
70 stringutil,
70 stringutil,
71 )
71 )
72
72
73 cmdtable = {}
73 cmdtable = {}
74 command = registrar.command(cmdtable)
74 command = registrar.command(cmdtable)
75
75
76 configtable = {}
76 configtable = {}
77 configitem = registrar.configitem(configtable)
77 configitem = registrar.configitem(configtable)
78
78
79 # developer config: phabricator.batchsize
79 # developer config: phabricator.batchsize
80 configitem(b'phabricator', b'batchsize',
80 configitem(b'phabricator', b'batchsize',
81 default=12,
81 default=12,
82 )
82 )
83 configitem(b'phabricator', b'callsign',
83 configitem(b'phabricator', b'callsign',
84 default=None,
84 default=None,
85 )
85 )
86 configitem(b'phabricator', b'curlcmd',
86 configitem(b'phabricator', b'curlcmd',
87 default=None,
87 default=None,
88 )
88 )
89 # developer config: phabricator.repophid
89 # developer config: phabricator.repophid
90 configitem(b'phabricator', b'repophid',
90 configitem(b'phabricator', b'repophid',
91 default=None,
91 default=None,
92 )
92 )
93 configitem(b'phabricator', b'url',
93 configitem(b'phabricator', b'url',
94 default=None,
94 default=None,
95 )
95 )
96 configitem(b'phabsend', b'confirm',
96 configitem(b'phabsend', b'confirm',
97 default=False,
97 default=False,
98 )
98 )
99
99
100 colortable = {
100 colortable = {
101 b'phabricator.action.created': b'green',
101 b'phabricator.action.created': b'green',
102 b'phabricator.action.skipped': b'magenta',
102 b'phabricator.action.skipped': b'magenta',
103 b'phabricator.action.updated': b'magenta',
103 b'phabricator.action.updated': b'magenta',
104 b'phabricator.desc': b'',
104 b'phabricator.desc': b'',
105 b'phabricator.drev': b'bold',
105 b'phabricator.drev': b'bold',
106 b'phabricator.node': b'',
106 b'phabricator.node': b'',
107 }
107 }
108
108
109 _VCR_FLAGS = [
110 (b'', b'test-vcr', b'',
111 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
112 b', otherwise will mock all http requests using the specified vcr file.'
113 b' (ADVANCED)'
114 )),
115 ]
116
117 def vcrcommand(name, flags, spec):
118 fullflags = flags + _VCR_FLAGS
119 def decorate(fn):
120 def inner(*args, **kwargs):
121 cassette = kwargs.pop(r'test_vcr', None)
122 if cassette:
123 import hgdemandimport
124 with hgdemandimport.deactivated():
125 import vcr as vcrmod
126 import vcr.stubs as stubs
127 vcr = vcrmod.VCR(
128 serializer=r'json',
129 custom_patches=[
130 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
131 (urlmod, 'httpsconnection', stubs.VCRHTTPSConnection),
132 ])
133 with vcr.use_cassette(cassette):
134 return fn(*args, **kwargs)
135 return fn(*args, **kwargs)
136 inner.__name__ = fn.__name__
137 return command(name, fullflags, spec)(inner)
138 return decorate
139
109 def urlencodenested(params):
140 def urlencodenested(params):
110 """like urlencode, but works with nested parameters.
141 """like urlencode, but works with nested parameters.
111
142
112 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
143 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
113 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
144 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
114 urlencode. Note: the encoding is consistent with PHP's http_build_query.
145 urlencode. Note: the encoding is consistent with PHP's http_build_query.
115 """
146 """
116 flatparams = util.sortdict()
147 flatparams = util.sortdict()
117 def process(prefix, obj):
148 def process(prefix, obj):
118 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
149 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
119 if items is None:
150 if items is None:
120 flatparams[prefix] = obj
151 flatparams[prefix] = obj
121 else:
152 else:
122 for k, v in items(obj):
153 for k, v in items(obj):
123 if prefix:
154 if prefix:
124 process(b'%s[%s]' % (prefix, k), v)
155 process(b'%s[%s]' % (prefix, k), v)
125 else:
156 else:
126 process(k, v)
157 process(k, v)
127 process(b'', params)
158 process(b'', params)
128 return util.urlreq.urlencode(flatparams)
159 return util.urlreq.urlencode(flatparams)
129
160
130 printed_token_warning = False
161 printed_token_warning = False
131
162
132 def readlegacytoken(repo, url):
163 def readlegacytoken(repo, url):
133 """Transitional support for old phabricator tokens.
164 """Transitional support for old phabricator tokens.
134
165
135 Remove before the 4.7 release.
166 Remove before the 4.7 release.
136 """
167 """
137 groups = {}
168 groups = {}
138 for key, val in repo.ui.configitems(b'phabricator.auth'):
169 for key, val in repo.ui.configitems(b'phabricator.auth'):
139 if b'.' not in key:
170 if b'.' not in key:
140 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n")
171 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n")
141 % key)
172 % key)
142 continue
173 continue
143 group, setting = key.rsplit(b'.', 1)
174 group, setting = key.rsplit(b'.', 1)
144 groups.setdefault(group, {})[setting] = val
175 groups.setdefault(group, {})[setting] = val
145
176
146 token = None
177 token = None
147 for group, auth in groups.iteritems():
178 for group, auth in groups.iteritems():
148 if url != auth.get(b'url'):
179 if url != auth.get(b'url'):
149 continue
180 continue
150 token = auth.get(b'token')
181 token = auth.get(b'token')
151 if token:
182 if token:
152 break
183 break
153
184
154 global printed_token_warning
185 global printed_token_warning
155
186
156 if token and not printed_token_warning:
187 if token and not printed_token_warning:
157 printed_token_warning = True
188 printed_token_warning = True
158 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please '
189 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please '
159 b'migrate to auth.phabtoken.\n'))
190 b'migrate to auth.phabtoken.\n'))
160 return token
191 return token
161
192
162 def readurltoken(repo):
193 def readurltoken(repo):
163 """return conduit url, token and make sure they exist
194 """return conduit url, token and make sure they exist
164
195
165 Currently read from [auth] config section. In the future, it might
196 Currently read from [auth] config section. In the future, it might
166 make sense to read from .arcconfig and .arcrc as well.
197 make sense to read from .arcconfig and .arcrc as well.
167 """
198 """
168 url = repo.ui.config(b'phabricator', b'url')
199 url = repo.ui.config(b'phabricator', b'url')
169 if not url:
200 if not url:
170 raise error.Abort(_(b'config %s.%s is required')
201 raise error.Abort(_(b'config %s.%s is required')
171 % (b'phabricator', b'url'))
202 % (b'phabricator', b'url'))
172
203
173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
204 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
174 token = None
205 token = None
175
206
176 if res:
207 if res:
177 group, auth = res
208 group, auth = res
178
209
179 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
210 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
180
211
181 token = auth.get(b'phabtoken')
212 token = auth.get(b'phabtoken')
182
213
183 if not token:
214 if not token:
184 token = readlegacytoken(repo, url)
215 token = readlegacytoken(repo, url)
185 if not token:
216 if not token:
186 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
217 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
187 % (url,))
218 % (url,))
188
219
189 return url, token
220 return url, token
190
221
191 def callconduit(repo, name, params):
222 def callconduit(repo, name, params):
192 """call Conduit API, params is a dict. return json.loads result, or None"""
223 """call Conduit API, params is a dict. return json.loads result, or None"""
193 host, token = readurltoken(repo)
224 host, token = readurltoken(repo)
194 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
225 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
195 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
226 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
196 params = params.copy()
227 params = params.copy()
197 params[b'api.token'] = token
228 params[b'api.token'] = token
198 data = urlencodenested(params)
229 data = urlencodenested(params)
199 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
230 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
200 if curlcmd:
231 if curlcmd:
201 sin, sout = procutil.popen2(b'%s -d @- %s'
232 sin, sout = procutil.popen2(b'%s -d @- %s'
202 % (curlcmd, procutil.shellquote(url)))
233 % (curlcmd, procutil.shellquote(url)))
203 sin.write(data)
234 sin.write(data)
204 sin.close()
235 sin.close()
205 body = sout.read()
236 body = sout.read()
206 else:
237 else:
207 urlopener = urlmod.opener(repo.ui, authinfo)
238 urlopener = urlmod.opener(repo.ui, authinfo)
208 request = util.urlreq.request(url, data=data)
239 request = util.urlreq.request(url, data=data)
209 body = urlopener.open(request).read()
240 body = urlopener.open(request).read()
210 repo.ui.debug(b'Conduit Response: %s\n' % body)
241 repo.ui.debug(b'Conduit Response: %s\n' % body)
211 parsed = json.loads(body)
242 parsed = json.loads(body)
212 if parsed.get(r'error_code'):
243 if parsed.get(r'error_code'):
213 msg = (_(b'Conduit Error (%s): %s')
244 msg = (_(b'Conduit Error (%s): %s')
214 % (parsed[r'error_code'], parsed[r'error_info']))
245 % (parsed[r'error_code'], parsed[r'error_info']))
215 raise error.Abort(msg)
246 raise error.Abort(msg)
216 return parsed[r'result']
247 return parsed[r'result']
217
248
218 @command(b'debugcallconduit', [], _(b'METHOD'))
249 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
219 def debugcallconduit(ui, repo, name):
250 def debugcallconduit(ui, repo, name):
220 """call Conduit API
251 """call Conduit API
221
252
222 Call parameters are read from stdin as a JSON blob. Result will be written
253 Call parameters are read from stdin as a JSON blob. Result will be written
223 to stdout as a JSON blob.
254 to stdout as a JSON blob.
224 """
255 """
225 params = json.loads(ui.fin.read())
256 params = json.loads(ui.fin.read())
226 result = callconduit(repo, name, params)
257 result = callconduit(repo, name, params)
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
258 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
228 ui.write(b'%s\n' % s)
259 ui.write(b'%s\n' % s)
229
260
230 def getrepophid(repo):
261 def getrepophid(repo):
231 """given callsign, return repository PHID or None"""
262 """given callsign, return repository PHID or None"""
232 # developer config: phabricator.repophid
263 # developer config: phabricator.repophid
233 repophid = repo.ui.config(b'phabricator', b'repophid')
264 repophid = repo.ui.config(b'phabricator', b'repophid')
234 if repophid:
265 if repophid:
235 return repophid
266 return repophid
236 callsign = repo.ui.config(b'phabricator', b'callsign')
267 callsign = repo.ui.config(b'phabricator', b'callsign')
237 if not callsign:
268 if not callsign:
238 return None
269 return None
239 query = callconduit(repo, b'diffusion.repository.search',
270 query = callconduit(repo, b'diffusion.repository.search',
240 {b'constraints': {b'callsigns': [callsign]}})
271 {b'constraints': {b'callsigns': [callsign]}})
241 if len(query[r'data']) == 0:
272 if len(query[r'data']) == 0:
242 return None
273 return None
243 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
274 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
244 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
275 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
245 return repophid
276 return repophid
246
277
247 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
278 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
248 _differentialrevisiondescre = re.compile(
279 _differentialrevisiondescre = re.compile(
249 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
280 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
250
281
251 def getoldnodedrevmap(repo, nodelist):
282 def getoldnodedrevmap(repo, nodelist):
252 """find previous nodes that has been sent to Phabricator
283 """find previous nodes that has been sent to Phabricator
253
284
254 return {node: (oldnode, Differential diff, Differential Revision ID)}
285 return {node: (oldnode, Differential diff, Differential Revision ID)}
255 for node in nodelist with known previous sent versions, or associated
286 for node in nodelist with known previous sent versions, or associated
256 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
287 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
257 be ``None``.
288 be ``None``.
258
289
259 Examines commit messages like "Differential Revision:" to get the
290 Examines commit messages like "Differential Revision:" to get the
260 association information.
291 association information.
261
292
262 If such commit message line is not found, examines all precursors and their
293 If such commit message line is not found, examines all precursors and their
263 tags. Tags with format like "D1234" are considered a match and the node
294 tags. Tags with format like "D1234" are considered a match and the node
264 with that tag, and the number after "D" (ex. 1234) will be returned.
295 with that tag, and the number after "D" (ex. 1234) will be returned.
265
296
266 The ``old node``, if not None, is guaranteed to be the last diff of
297 The ``old node``, if not None, is guaranteed to be the last diff of
267 corresponding Differential Revision, and exist in the repo.
298 corresponding Differential Revision, and exist in the repo.
268 """
299 """
269 url, token = readurltoken(repo)
300 url, token = readurltoken(repo)
270 unfi = repo.unfiltered()
301 unfi = repo.unfiltered()
271 nodemap = unfi.changelog.nodemap
302 nodemap = unfi.changelog.nodemap
272
303
273 result = {} # {node: (oldnode?, lastdiff?, drev)}
304 result = {} # {node: (oldnode?, lastdiff?, drev)}
274 toconfirm = {} # {node: (force, {precnode}, drev)}
305 toconfirm = {} # {node: (force, {precnode}, drev)}
275 for node in nodelist:
306 for node in nodelist:
276 ctx = unfi[node]
307 ctx = unfi[node]
277 # For tags like "D123", put them into "toconfirm" to verify later
308 # For tags like "D123", put them into "toconfirm" to verify later
278 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
309 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
279 for n in precnodes:
310 for n in precnodes:
280 if n in nodemap:
311 if n in nodemap:
281 for tag in unfi.nodetags(n):
312 for tag in unfi.nodetags(n):
282 m = _differentialrevisiontagre.match(tag)
313 m = _differentialrevisiontagre.match(tag)
283 if m:
314 if m:
284 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
315 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
285 continue
316 continue
286
317
287 # Check commit message
318 # Check commit message
288 m = _differentialrevisiondescre.search(ctx.description())
319 m = _differentialrevisiondescre.search(ctx.description())
289 if m:
320 if m:
290 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
321 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
291
322
292 # Double check if tags are genuine by collecting all old nodes from
323 # Double check if tags are genuine by collecting all old nodes from
293 # Phabricator, and expect precursors overlap with it.
324 # Phabricator, and expect precursors overlap with it.
294 if toconfirm:
325 if toconfirm:
295 drevs = [drev for force, precs, drev in toconfirm.values()]
326 drevs = [drev for force, precs, drev in toconfirm.values()]
296 alldiffs = callconduit(unfi, b'differential.querydiffs',
327 alldiffs = callconduit(unfi, b'differential.querydiffs',
297 {b'revisionIDs': drevs})
328 {b'revisionIDs': drevs})
298 getnode = lambda d: bin(encoding.unitolocal(
329 getnode = lambda d: bin(encoding.unitolocal(
299 getdiffmeta(d).get(r'node', b''))) or None
330 getdiffmeta(d).get(r'node', b''))) or None
300 for newnode, (force, precset, drev) in toconfirm.items():
331 for newnode, (force, precset, drev) in toconfirm.items():
301 diffs = [d for d in alldiffs.values()
332 diffs = [d for d in alldiffs.values()
302 if int(d[r'revisionID']) == drev]
333 if int(d[r'revisionID']) == drev]
303
334
304 # "precursors" as known by Phabricator
335 # "precursors" as known by Phabricator
305 phprecset = set(getnode(d) for d in diffs)
336 phprecset = set(getnode(d) for d in diffs)
306
337
307 # Ignore if precursors (Phabricator and local repo) do not overlap,
338 # Ignore if precursors (Phabricator and local repo) do not overlap,
308 # and force is not set (when commit message says nothing)
339 # and force is not set (when commit message says nothing)
309 if not force and not bool(phprecset & precset):
340 if not force and not bool(phprecset & precset):
310 tagname = b'D%d' % drev
341 tagname = b'D%d' % drev
311 tags.tag(repo, tagname, nullid, message=None, user=None,
342 tags.tag(repo, tagname, nullid, message=None, user=None,
312 date=None, local=True)
343 date=None, local=True)
313 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
344 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
314 b'Differential history\n') % drev)
345 b'Differential history\n') % drev)
315 continue
346 continue
316
347
317 # Find the last node using Phabricator metadata, and make sure it
348 # Find the last node using Phabricator metadata, and make sure it
318 # exists in the repo
349 # exists in the repo
319 oldnode = lastdiff = None
350 oldnode = lastdiff = None
320 if diffs:
351 if diffs:
321 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
352 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
322 oldnode = getnode(lastdiff)
353 oldnode = getnode(lastdiff)
323 if oldnode and oldnode not in nodemap:
354 if oldnode and oldnode not in nodemap:
324 oldnode = None
355 oldnode = None
325
356
326 result[newnode] = (oldnode, lastdiff, drev)
357 result[newnode] = (oldnode, lastdiff, drev)
327
358
328 return result
359 return result
329
360
330 def getdiff(ctx, diffopts):
361 def getdiff(ctx, diffopts):
331 """plain-text diff without header (user, commit message, etc)"""
362 """plain-text diff without header (user, commit message, etc)"""
332 output = util.stringio()
363 output = util.stringio()
333 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
364 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
334 None, opts=diffopts):
365 None, opts=diffopts):
335 output.write(chunk)
366 output.write(chunk)
336 return output.getvalue()
367 return output.getvalue()
337
368
338 def creatediff(ctx):
369 def creatediff(ctx):
339 """create a Differential Diff"""
370 """create a Differential Diff"""
340 repo = ctx.repo()
371 repo = ctx.repo()
341 repophid = getrepophid(repo)
372 repophid = getrepophid(repo)
342 # Create a "Differential Diff" via "differential.createrawdiff" API
373 # Create a "Differential Diff" via "differential.createrawdiff" API
343 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
374 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
344 if repophid:
375 if repophid:
345 params[b'repositoryPHID'] = repophid
376 params[b'repositoryPHID'] = repophid
346 diff = callconduit(repo, b'differential.createrawdiff', params)
377 diff = callconduit(repo, b'differential.createrawdiff', params)
347 if not diff:
378 if not diff:
348 raise error.Abort(_(b'cannot create diff for %s') % ctx)
379 raise error.Abort(_(b'cannot create diff for %s') % ctx)
349 return diff
380 return diff
350
381
351 def writediffproperties(ctx, diff):
382 def writediffproperties(ctx, diff):
352 """write metadata to diff so patches could be applied losslessly"""
383 """write metadata to diff so patches could be applied losslessly"""
353 params = {
384 params = {
354 b'diff_id': diff[r'id'],
385 b'diff_id': diff[r'id'],
355 b'name': b'hg:meta',
386 b'name': b'hg:meta',
356 b'data': json.dumps({
387 b'data': json.dumps({
357 b'user': ctx.user(),
388 b'user': ctx.user(),
358 b'date': b'%d %d' % ctx.date(),
389 b'date': b'%d %d' % ctx.date(),
359 b'node': ctx.hex(),
390 b'node': ctx.hex(),
360 b'parent': ctx.p1().hex(),
391 b'parent': ctx.p1().hex(),
361 }),
392 }),
362 }
393 }
363 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
394 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
364
395
365 params = {
396 params = {
366 b'diff_id': diff[r'id'],
397 b'diff_id': diff[r'id'],
367 b'name': b'local:commits',
398 b'name': b'local:commits',
368 b'data': json.dumps({
399 b'data': json.dumps({
369 ctx.hex(): {
400 ctx.hex(): {
370 b'author': stringutil.person(ctx.user()),
401 b'author': stringutil.person(ctx.user()),
371 b'authorEmail': stringutil.email(ctx.user()),
402 b'authorEmail': stringutil.email(ctx.user()),
372 b'time': ctx.date()[0],
403 b'time': ctx.date()[0],
373 },
404 },
374 }),
405 }),
375 }
406 }
376 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
407 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
377
408
378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
409 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
379 olddiff=None, actions=None):
410 olddiff=None, actions=None):
380 """create or update a Differential Revision
411 """create or update a Differential Revision
381
412
382 If revid is None, create a new Differential Revision, otherwise update
413 If revid is None, create a new Differential Revision, otherwise update
383 revid. If parentrevid is not None, set it as a dependency.
414 revid. If parentrevid is not None, set it as a dependency.
384
415
385 If oldnode is not None, check if the patch content (without commit message
416 If oldnode is not None, check if the patch content (without commit message
386 and metadata) has changed before creating another diff.
417 and metadata) has changed before creating another diff.
387
418
388 If actions is not None, they will be appended to the transaction.
419 If actions is not None, they will be appended to the transaction.
389 """
420 """
390 repo = ctx.repo()
421 repo = ctx.repo()
391 if oldnode:
422 if oldnode:
392 diffopts = mdiff.diffopts(git=True, context=32767)
423 diffopts = mdiff.diffopts(git=True, context=32767)
393 oldctx = repo.unfiltered()[oldnode]
424 oldctx = repo.unfiltered()[oldnode]
394 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
425 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
395 else:
426 else:
396 neednewdiff = True
427 neednewdiff = True
397
428
398 transactions = []
429 transactions = []
399 if neednewdiff:
430 if neednewdiff:
400 diff = creatediff(ctx)
431 diff = creatediff(ctx)
401 transactions.append({b'type': b'update', b'value': diff[r'phid']})
432 transactions.append({b'type': b'update', b'value': diff[r'phid']})
402 else:
433 else:
403 # Even if we don't need to upload a new diff because the patch content
434 # Even if we don't need to upload a new diff because the patch content
404 # does not change. We might still need to update its metadata so
435 # does not change. We might still need to update its metadata so
405 # pushers could know the correct node metadata.
436 # pushers could know the correct node metadata.
406 assert olddiff
437 assert olddiff
407 diff = olddiff
438 diff = olddiff
408 writediffproperties(ctx, diff)
439 writediffproperties(ctx, diff)
409
440
410 # Use a temporary summary to set dependency. There might be better ways but
441 # Use a temporary summary to set dependency. There might be better ways but
411 # I cannot find them for now. But do not do that if we are updating an
442 # I cannot find them for now. But do not do that if we are updating an
412 # existing revision (revid is not None) since that introduces visible
443 # existing revision (revid is not None) since that introduces visible
413 # churns (someone edited "Summary" twice) on the web page.
444 # churns (someone edited "Summary" twice) on the web page.
414 if parentrevid and revid is None:
445 if parentrevid and revid is None:
415 summary = b'Depends on D%s' % parentrevid
446 summary = b'Depends on D%s' % parentrevid
416 transactions += [{b'type': b'summary', b'value': summary},
447 transactions += [{b'type': b'summary', b'value': summary},
417 {b'type': b'summary', b'value': b' '}]
448 {b'type': b'summary', b'value': b' '}]
418
449
419 if actions:
450 if actions:
420 transactions += actions
451 transactions += actions
421
452
422 # Parse commit message and update related fields.
453 # Parse commit message and update related fields.
423 desc = ctx.description()
454 desc = ctx.description()
424 info = callconduit(repo, b'differential.parsecommitmessage',
455 info = callconduit(repo, b'differential.parsecommitmessage',
425 {b'corpus': desc})
456 {b'corpus': desc})
426 for k, v in info[r'fields'].items():
457 for k, v in info[r'fields'].items():
427 if k in [b'title', b'summary', b'testPlan']:
458 if k in [b'title', b'summary', b'testPlan']:
428 transactions.append({b'type': k, b'value': v})
459 transactions.append({b'type': k, b'value': v})
429
460
430 params = {b'transactions': transactions}
461 params = {b'transactions': transactions}
431 if revid is not None:
462 if revid is not None:
432 # Update an existing Differential Revision
463 # Update an existing Differential Revision
433 params[b'objectIdentifier'] = revid
464 params[b'objectIdentifier'] = revid
434
465
435 revision = callconduit(repo, b'differential.revision.edit', params)
466 revision = callconduit(repo, b'differential.revision.edit', params)
436 if not revision:
467 if not revision:
437 raise error.Abort(_(b'cannot create revision for %s') % ctx)
468 raise error.Abort(_(b'cannot create revision for %s') % ctx)
438
469
439 return revision, diff
470 return revision, diff
440
471
441 def userphids(repo, names):
472 def userphids(repo, names):
442 """convert user names to PHIDs"""
473 """convert user names to PHIDs"""
443 query = {b'constraints': {b'usernames': names}}
474 query = {b'constraints': {b'usernames': names}}
444 result = callconduit(repo, b'user.search', query)
475 result = callconduit(repo, b'user.search', query)
445 # username not found is not an error of the API. So check if we have missed
476 # username not found is not an error of the API. So check if we have missed
446 # some names here.
477 # some names here.
447 data = result[r'data']
478 data = result[r'data']
448 resolved = set(entry[r'fields'][r'username'] for entry in data)
479 resolved = set(entry[r'fields'][r'username'] for entry in data)
449 unresolved = set(names) - resolved
480 unresolved = set(names) - resolved
450 if unresolved:
481 if unresolved:
451 raise error.Abort(_(b'unknown username: %s')
482 raise error.Abort(_(b'unknown username: %s')
452 % b' '.join(sorted(unresolved)))
483 % b' '.join(sorted(unresolved)))
453 return [entry[r'phid'] for entry in data]
484 return [entry[r'phid'] for entry in data]
454
485
455 @command(b'phabsend',
486 @vcrcommand(b'phabsend',
456 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
457 (b'', b'amend', True, _(b'update commit messages')),
488 (b'', b'amend', True, _(b'update commit messages')),
458 (b'', b'reviewer', [], _(b'specify reviewers')),
489 (b'', b'reviewer', [], _(b'specify reviewers')),
459 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
460 _(b'REV [OPTIONS]'))
491 _(b'REV [OPTIONS]'))
461 def phabsend(ui, repo, *revs, **opts):
492 def phabsend(ui, repo, *revs, **opts):
462 """upload changesets to Phabricator
493 """upload changesets to Phabricator
463
494
464 If there are multiple revisions specified, they will be send as a stack
495 If there are multiple revisions specified, they will be send as a stack
465 with a linear dependencies relationship using the order specified by the
496 with a linear dependencies relationship using the order specified by the
466 revset.
497 revset.
467
498
468 For the first time uploading changesets, local tags will be created to
499 For the first time uploading changesets, local tags will be created to
469 maintain the association. After the first time, phabsend will check
500 maintain the association. After the first time, phabsend will check
470 obsstore and tags information so it can figure out whether to update an
501 obsstore and tags information so it can figure out whether to update an
471 existing Differential Revision, or create a new one.
502 existing Differential Revision, or create a new one.
472
503
473 If --amend is set, update commit messages so they have the
504 If --amend is set, update commit messages so they have the
474 ``Differential Revision`` URL, remove related tags. This is similar to what
505 ``Differential Revision`` URL, remove related tags. This is similar to what
475 arcanist will do, and is more desired in author-push workflows. Otherwise,
506 arcanist will do, and is more desired in author-push workflows. Otherwise,
476 use local tags to record the ``Differential Revision`` association.
507 use local tags to record the ``Differential Revision`` association.
477
508
478 The --confirm option lets you confirm changesets before sending them. You
509 The --confirm option lets you confirm changesets before sending them. You
479 can also add following to your configuration file to make it default
510 can also add following to your configuration file to make it default
480 behaviour::
511 behaviour::
481
512
482 [phabsend]
513 [phabsend]
483 confirm = true
514 confirm = true
484
515
485 phabsend will check obsstore and the above association to decide whether to
516 phabsend will check obsstore and the above association to decide whether to
486 update an existing Differential Revision, or create a new one.
517 update an existing Differential Revision, or create a new one.
487 """
518 """
488 revs = list(revs) + opts.get(b'rev', [])
519 revs = list(revs) + opts.get(b'rev', [])
489 revs = scmutil.revrange(repo, revs)
520 revs = scmutil.revrange(repo, revs)
490
521
491 if not revs:
522 if not revs:
492 raise error.Abort(_(b'phabsend requires at least one changeset'))
523 raise error.Abort(_(b'phabsend requires at least one changeset'))
493 if opts.get(b'amend'):
524 if opts.get(b'amend'):
494 cmdutil.checkunfinished(repo)
525 cmdutil.checkunfinished(repo)
495
526
496 # {newnode: (oldnode, olddiff, olddrev}
527 # {newnode: (oldnode, olddiff, olddrev}
497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
528 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
498
529
499 confirm = ui.configbool(b'phabsend', b'confirm')
530 confirm = ui.configbool(b'phabsend', b'confirm')
500 confirm |= bool(opts.get(b'confirm'))
531 confirm |= bool(opts.get(b'confirm'))
501 if confirm:
532 if confirm:
502 confirmed = _confirmbeforesend(repo, revs, oldmap)
533 confirmed = _confirmbeforesend(repo, revs, oldmap)
503 if not confirmed:
534 if not confirmed:
504 raise error.Abort(_(b'phabsend cancelled'))
535 raise error.Abort(_(b'phabsend cancelled'))
505
536
506 actions = []
537 actions = []
507 reviewers = opts.get(b'reviewer', [])
538 reviewers = opts.get(b'reviewer', [])
508 if reviewers:
539 if reviewers:
509 phids = userphids(repo, reviewers)
540 phids = userphids(repo, reviewers)
510 actions.append({b'type': b'reviewers.add', b'value': phids})
541 actions.append({b'type': b'reviewers.add', b'value': phids})
511
542
512 drevids = [] # [int]
543 drevids = [] # [int]
513 diffmap = {} # {newnode: diff}
544 diffmap = {} # {newnode: diff}
514
545
515 # Send patches one by one so we know their Differential Revision IDs and
546 # Send patches one by one so we know their Differential Revision IDs and
516 # can provide dependency relationship
547 # can provide dependency relationship
517 lastrevid = None
548 lastrevid = None
518 for rev in revs:
549 for rev in revs:
519 ui.debug(b'sending rev %d\n' % rev)
550 ui.debug(b'sending rev %d\n' % rev)
520 ctx = repo[rev]
551 ctx = repo[rev]
521
552
522 # Get Differential Revision ID
553 # Get Differential Revision ID
523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
554 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
524 if oldnode != ctx.node() or opts.get(b'amend'):
555 if oldnode != ctx.node() or opts.get(b'amend'):
525 # Create or update Differential Revision
556 # Create or update Differential Revision
526 revision, diff = createdifferentialrevision(
557 revision, diff = createdifferentialrevision(
527 ctx, revid, lastrevid, oldnode, olddiff, actions)
558 ctx, revid, lastrevid, oldnode, olddiff, actions)
528 diffmap[ctx.node()] = diff
559 diffmap[ctx.node()] = diff
529 newrevid = int(revision[r'object'][r'id'])
560 newrevid = int(revision[r'object'][r'id'])
530 if revid:
561 if revid:
531 action = b'updated'
562 action = b'updated'
532 else:
563 else:
533 action = b'created'
564 action = b'created'
534
565
535 # Create a local tag to note the association, if commit message
566 # Create a local tag to note the association, if commit message
536 # does not have it already
567 # does not have it already
537 m = _differentialrevisiondescre.search(ctx.description())
568 m = _differentialrevisiondescre.search(ctx.description())
538 if not m or int(m.group(b'id')) != newrevid:
569 if not m or int(m.group(b'id')) != newrevid:
539 tagname = b'D%d' % newrevid
570 tagname = b'D%d' % newrevid
540 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
571 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
541 date=None, local=True)
572 date=None, local=True)
542 else:
573 else:
543 # Nothing changed. But still set "newrevid" so the next revision
574 # Nothing changed. But still set "newrevid" so the next revision
544 # could depend on this one.
575 # could depend on this one.
545 newrevid = revid
576 newrevid = revid
546 action = b'skipped'
577 action = b'skipped'
547
578
548 actiondesc = ui.label(
579 actiondesc = ui.label(
549 {b'created': _(b'created'),
580 {b'created': _(b'created'),
550 b'skipped': _(b'skipped'),
581 b'skipped': _(b'skipped'),
551 b'updated': _(b'updated')}[action],
582 b'updated': _(b'updated')}[action],
552 b'phabricator.action.%s' % action)
583 b'phabricator.action.%s' % action)
553 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
584 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
554 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
585 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
555 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
586 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
556 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
587 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
557 desc))
588 desc))
558 drevids.append(newrevid)
589 drevids.append(newrevid)
559 lastrevid = newrevid
590 lastrevid = newrevid
560
591
561 # Update commit messages and remove tags
592 # Update commit messages and remove tags
562 if opts.get(b'amend'):
593 if opts.get(b'amend'):
563 unfi = repo.unfiltered()
594 unfi = repo.unfiltered()
564 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
595 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
565 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
596 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
566 wnode = unfi[b'.'].node()
597 wnode = unfi[b'.'].node()
567 mapping = {} # {oldnode: [newnode]}
598 mapping = {} # {oldnode: [newnode]}
568 for i, rev in enumerate(revs):
599 for i, rev in enumerate(revs):
569 old = unfi[rev]
600 old = unfi[rev]
570 drevid = drevids[i]
601 drevid = drevids[i]
571 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
602 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
572 newdesc = getdescfromdrev(drev)
603 newdesc = getdescfromdrev(drev)
573 newdesc = encoding.unitolocal(newdesc)
604 newdesc = encoding.unitolocal(newdesc)
574 # Make sure commit message contain "Differential Revision"
605 # Make sure commit message contain "Differential Revision"
575 if old.description() != newdesc:
606 if old.description() != newdesc:
576 parents = [
607 parents = [
577 mapping.get(old.p1().node(), (old.p1(),))[0],
608 mapping.get(old.p1().node(), (old.p1(),))[0],
578 mapping.get(old.p2().node(), (old.p2(),))[0],
609 mapping.get(old.p2().node(), (old.p2(),))[0],
579 ]
610 ]
580 new = context.metadataonlyctx(
611 new = context.metadataonlyctx(
581 repo, old, parents=parents, text=newdesc,
612 repo, old, parents=parents, text=newdesc,
582 user=old.user(), date=old.date(), extra=old.extra())
613 user=old.user(), date=old.date(), extra=old.extra())
583
614
584 newnode = new.commit()
615 newnode = new.commit()
585
616
586 mapping[old.node()] = [newnode]
617 mapping[old.node()] = [newnode]
587 # Update diff property
618 # Update diff property
588 writediffproperties(unfi[newnode], diffmap[old.node()])
619 writediffproperties(unfi[newnode], diffmap[old.node()])
589 # Remove local tags since it's no longer necessary
620 # Remove local tags since it's no longer necessary
590 tagname = b'D%d' % drevid
621 tagname = b'D%d' % drevid
591 if tagname in repo.tags():
622 if tagname in repo.tags():
592 tags.tag(repo, tagname, nullid, message=None, user=None,
623 tags.tag(repo, tagname, nullid, message=None, user=None,
593 date=None, local=True)
624 date=None, local=True)
594 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
625 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
595 if wnode in mapping:
626 if wnode in mapping:
596 unfi.setparents(mapping[wnode][0])
627 unfi.setparents(mapping[wnode][0])
597
628
598 # Map from "hg:meta" keys to header understood by "hg import". The order is
629 # Map from "hg:meta" keys to header understood by "hg import". The order is
599 # consistent with "hg export" output.
630 # consistent with "hg export" output.
600 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
631 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
601 (r'node', b'Node ID'), (r'parent', b'Parent ')])
632 (r'node', b'Node ID'), (r'parent', b'Parent ')])
602
633
603 def _confirmbeforesend(repo, revs, oldmap):
634 def _confirmbeforesend(repo, revs, oldmap):
604 url, token = readurltoken(repo)
635 url, token = readurltoken(repo)
605 ui = repo.ui
636 ui = repo.ui
606 for rev in revs:
637 for rev in revs:
607 ctx = repo[rev]
638 ctx = repo[rev]
608 desc = ctx.description().splitlines()[0]
639 desc = ctx.description().splitlines()[0]
609 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
640 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
610 if drevid:
641 if drevid:
611 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
642 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
612 else:
643 else:
613 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
644 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
614
645
615 ui.write(_(b'%s - %s: %s\n')
646 ui.write(_(b'%s - %s: %s\n')
616 % (drevdesc,
647 % (drevdesc,
617 ui.label(bytes(ctx), b'phabricator.node'),
648 ui.label(bytes(ctx), b'phabricator.node'),
618 ui.label(desc, b'phabricator.desc')))
649 ui.label(desc, b'phabricator.desc')))
619
650
620 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
651 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
621 b'$$ &Yes $$ &No') % url):
652 b'$$ &Yes $$ &No') % url):
622 return False
653 return False
623
654
624 return True
655 return True
625
656
626 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
657 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
627 b'abandoned'}
658 b'abandoned'}
628
659
629 def _getstatusname(drev):
660 def _getstatusname(drev):
630 """get normalized status name from a Differential Revision"""
661 """get normalized status name from a Differential Revision"""
631 return drev[r'statusName'].replace(b' ', b'').lower()
662 return drev[r'statusName'].replace(b' ', b'').lower()
632
663
633 # Small language to specify differential revisions. Support symbols: (), :X,
664 # Small language to specify differential revisions. Support symbols: (), :X,
634 # +, and -.
665 # +, and -.
635
666
636 _elements = {
667 _elements = {
637 # token-type: binding-strength, primary, prefix, infix, suffix
668 # token-type: binding-strength, primary, prefix, infix, suffix
638 b'(': (12, None, (b'group', 1, b')'), None, None),
669 b'(': (12, None, (b'group', 1, b')'), None, None),
639 b':': (8, None, (b'ancestors', 8), None, None),
670 b':': (8, None, (b'ancestors', 8), None, None),
640 b'&': (5, None, None, (b'and_', 5), None),
671 b'&': (5, None, None, (b'and_', 5), None),
641 b'+': (4, None, None, (b'add', 4), None),
672 b'+': (4, None, None, (b'add', 4), None),
642 b'-': (4, None, None, (b'sub', 4), None),
673 b'-': (4, None, None, (b'sub', 4), None),
643 b')': (0, None, None, None, None),
674 b')': (0, None, None, None, None),
644 b'symbol': (0, b'symbol', None, None, None),
675 b'symbol': (0, b'symbol', None, None, None),
645 b'end': (0, None, None, None, None),
676 b'end': (0, None, None, None, None),
646 }
677 }
647
678
648 def _tokenize(text):
679 def _tokenize(text):
649 view = memoryview(text) # zero-copy slice
680 view = memoryview(text) # zero-copy slice
650 special = b'():+-& '
681 special = b'():+-& '
651 pos = 0
682 pos = 0
652 length = len(text)
683 length = len(text)
653 while pos < length:
684 while pos < length:
654 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
685 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
655 view[pos:]))
686 view[pos:]))
656 if symbol:
687 if symbol:
657 yield (b'symbol', symbol, pos)
688 yield (b'symbol', symbol, pos)
658 pos += len(symbol)
689 pos += len(symbol)
659 else: # special char, ignore space
690 else: # special char, ignore space
660 if text[pos] != b' ':
691 if text[pos] != b' ':
661 yield (text[pos], None, pos)
692 yield (text[pos], None, pos)
662 pos += 1
693 pos += 1
663 yield (b'end', None, pos)
694 yield (b'end', None, pos)
664
695
665 def _parse(text):
696 def _parse(text):
666 tree, pos = parser.parser(_elements).parse(_tokenize(text))
697 tree, pos = parser.parser(_elements).parse(_tokenize(text))
667 if pos != len(text):
698 if pos != len(text):
668 raise error.ParseError(b'invalid token', pos)
699 raise error.ParseError(b'invalid token', pos)
669 return tree
700 return tree
670
701
671 def _parsedrev(symbol):
702 def _parsedrev(symbol):
672 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
703 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
673 if symbol.startswith(b'D') and symbol[1:].isdigit():
704 if symbol.startswith(b'D') and symbol[1:].isdigit():
674 return int(symbol[1:])
705 return int(symbol[1:])
675 if symbol.isdigit():
706 if symbol.isdigit():
676 return int(symbol)
707 return int(symbol)
677
708
678 def _prefetchdrevs(tree):
709 def _prefetchdrevs(tree):
679 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
710 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
680 drevs = set()
711 drevs = set()
681 ancestordrevs = set()
712 ancestordrevs = set()
682 op = tree[0]
713 op = tree[0]
683 if op == b'symbol':
714 if op == b'symbol':
684 r = _parsedrev(tree[1])
715 r = _parsedrev(tree[1])
685 if r:
716 if r:
686 drevs.add(r)
717 drevs.add(r)
687 elif op == b'ancestors':
718 elif op == b'ancestors':
688 r, a = _prefetchdrevs(tree[1])
719 r, a = _prefetchdrevs(tree[1])
689 drevs.update(r)
720 drevs.update(r)
690 ancestordrevs.update(r)
721 ancestordrevs.update(r)
691 ancestordrevs.update(a)
722 ancestordrevs.update(a)
692 else:
723 else:
693 for t in tree[1:]:
724 for t in tree[1:]:
694 r, a = _prefetchdrevs(t)
725 r, a = _prefetchdrevs(t)
695 drevs.update(r)
726 drevs.update(r)
696 ancestordrevs.update(a)
727 ancestordrevs.update(a)
697 return drevs, ancestordrevs
728 return drevs, ancestordrevs
698
729
699 def querydrev(repo, spec):
730 def querydrev(repo, spec):
700 """return a list of "Differential Revision" dicts
731 """return a list of "Differential Revision" dicts
701
732
702 spec is a string using a simple query language, see docstring in phabread
733 spec is a string using a simple query language, see docstring in phabread
703 for details.
734 for details.
704
735
705 A "Differential Revision dict" looks like:
736 A "Differential Revision dict" looks like:
706
737
707 {
738 {
708 "id": "2",
739 "id": "2",
709 "phid": "PHID-DREV-672qvysjcczopag46qty",
740 "phid": "PHID-DREV-672qvysjcczopag46qty",
710 "title": "example",
741 "title": "example",
711 "uri": "https://phab.example.com/D2",
742 "uri": "https://phab.example.com/D2",
712 "dateCreated": "1499181406",
743 "dateCreated": "1499181406",
713 "dateModified": "1499182103",
744 "dateModified": "1499182103",
714 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
745 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
715 "status": "0",
746 "status": "0",
716 "statusName": "Needs Review",
747 "statusName": "Needs Review",
717 "properties": [],
748 "properties": [],
718 "branch": null,
749 "branch": null,
719 "summary": "",
750 "summary": "",
720 "testPlan": "",
751 "testPlan": "",
721 "lineCount": "2",
752 "lineCount": "2",
722 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
753 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
723 "diffs": [
754 "diffs": [
724 "3",
755 "3",
725 "4",
756 "4",
726 ],
757 ],
727 "commits": [],
758 "commits": [],
728 "reviewers": [],
759 "reviewers": [],
729 "ccs": [],
760 "ccs": [],
730 "hashes": [],
761 "hashes": [],
731 "auxiliary": {
762 "auxiliary": {
732 "phabricator:projects": [],
763 "phabricator:projects": [],
733 "phabricator:depends-on": [
764 "phabricator:depends-on": [
734 "PHID-DREV-gbapp366kutjebt7agcd"
765 "PHID-DREV-gbapp366kutjebt7agcd"
735 ]
766 ]
736 },
767 },
737 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
768 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
738 "sourcePath": null
769 "sourcePath": null
739 }
770 }
740 """
771 """
741 def fetch(params):
772 def fetch(params):
742 """params -> single drev or None"""
773 """params -> single drev or None"""
743 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
774 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
744 if key in prefetched:
775 if key in prefetched:
745 return prefetched[key]
776 return prefetched[key]
746 drevs = callconduit(repo, b'differential.query', params)
777 drevs = callconduit(repo, b'differential.query', params)
747 # Fill prefetched with the result
778 # Fill prefetched with the result
748 for drev in drevs:
779 for drev in drevs:
749 prefetched[drev[r'phid']] = drev
780 prefetched[drev[r'phid']] = drev
750 prefetched[int(drev[r'id'])] = drev
781 prefetched[int(drev[r'id'])] = drev
751 if key not in prefetched:
782 if key not in prefetched:
752 raise error.Abort(_(b'cannot get Differential Revision %r')
783 raise error.Abort(_(b'cannot get Differential Revision %r')
753 % params)
784 % params)
754 return prefetched[key]
785 return prefetched[key]
755
786
756 def getstack(topdrevids):
787 def getstack(topdrevids):
757 """given a top, get a stack from the bottom, [id] -> [id]"""
788 """given a top, get a stack from the bottom, [id] -> [id]"""
758 visited = set()
789 visited = set()
759 result = []
790 result = []
760 queue = [{r'ids': [i]} for i in topdrevids]
791 queue = [{r'ids': [i]} for i in topdrevids]
761 while queue:
792 while queue:
762 params = queue.pop()
793 params = queue.pop()
763 drev = fetch(params)
794 drev = fetch(params)
764 if drev[r'id'] in visited:
795 if drev[r'id'] in visited:
765 continue
796 continue
766 visited.add(drev[r'id'])
797 visited.add(drev[r'id'])
767 result.append(int(drev[r'id']))
798 result.append(int(drev[r'id']))
768 auxiliary = drev.get(r'auxiliary', {})
799 auxiliary = drev.get(r'auxiliary', {})
769 depends = auxiliary.get(r'phabricator:depends-on', [])
800 depends = auxiliary.get(r'phabricator:depends-on', [])
770 for phid in depends:
801 for phid in depends:
771 queue.append({b'phids': [phid]})
802 queue.append({b'phids': [phid]})
772 result.reverse()
803 result.reverse()
773 return smartset.baseset(result)
804 return smartset.baseset(result)
774
805
775 # Initialize prefetch cache
806 # Initialize prefetch cache
776 prefetched = {} # {id or phid: drev}
807 prefetched = {} # {id or phid: drev}
777
808
778 tree = _parse(spec)
809 tree = _parse(spec)
779 drevs, ancestordrevs = _prefetchdrevs(tree)
810 drevs, ancestordrevs = _prefetchdrevs(tree)
780
811
781 # developer config: phabricator.batchsize
812 # developer config: phabricator.batchsize
782 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
813 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
783
814
784 # Prefetch Differential Revisions in batch
815 # Prefetch Differential Revisions in batch
785 tofetch = set(drevs)
816 tofetch = set(drevs)
786 for r in ancestordrevs:
817 for r in ancestordrevs:
787 tofetch.update(range(max(1, r - batchsize), r + 1))
818 tofetch.update(range(max(1, r - batchsize), r + 1))
788 if drevs:
819 if drevs:
789 fetch({r'ids': list(tofetch)})
820 fetch({r'ids': list(tofetch)})
790 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
821 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
791
822
792 # Walk through the tree, return smartsets
823 # Walk through the tree, return smartsets
793 def walk(tree):
824 def walk(tree):
794 op = tree[0]
825 op = tree[0]
795 if op == b'symbol':
826 if op == b'symbol':
796 drev = _parsedrev(tree[1])
827 drev = _parsedrev(tree[1])
797 if drev:
828 if drev:
798 return smartset.baseset([drev])
829 return smartset.baseset([drev])
799 elif tree[1] in _knownstatusnames:
830 elif tree[1] in _knownstatusnames:
800 drevs = [r for r in validids
831 drevs = [r for r in validids
801 if _getstatusname(prefetched[r]) == tree[1]]
832 if _getstatusname(prefetched[r]) == tree[1]]
802 return smartset.baseset(drevs)
833 return smartset.baseset(drevs)
803 else:
834 else:
804 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
835 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
805 elif op in {b'and_', b'add', b'sub'}:
836 elif op in {b'and_', b'add', b'sub'}:
806 assert len(tree) == 3
837 assert len(tree) == 3
807 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
838 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
808 elif op == b'group':
839 elif op == b'group':
809 return walk(tree[1])
840 return walk(tree[1])
810 elif op == b'ancestors':
841 elif op == b'ancestors':
811 return getstack(walk(tree[1]))
842 return getstack(walk(tree[1]))
812 else:
843 else:
813 raise error.ProgrammingError(b'illegal tree: %r' % tree)
844 raise error.ProgrammingError(b'illegal tree: %r' % tree)
814
845
815 return [prefetched[r] for r in walk(tree)]
846 return [prefetched[r] for r in walk(tree)]
816
847
817 def getdescfromdrev(drev):
848 def getdescfromdrev(drev):
818 """get description (commit message) from "Differential Revision"
849 """get description (commit message) from "Differential Revision"
819
850
820 This is similar to differential.getcommitmessage API. But we only care
851 This is similar to differential.getcommitmessage API. But we only care
821 about limited fields: title, summary, test plan, and URL.
852 about limited fields: title, summary, test plan, and URL.
822 """
853 """
823 title = drev[r'title']
854 title = drev[r'title']
824 summary = drev[r'summary'].rstrip()
855 summary = drev[r'summary'].rstrip()
825 testplan = drev[r'testPlan'].rstrip()
856 testplan = drev[r'testPlan'].rstrip()
826 if testplan:
857 if testplan:
827 testplan = b'Test Plan:\n%s' % testplan
858 testplan = b'Test Plan:\n%s' % testplan
828 uri = b'Differential Revision: %s' % drev[r'uri']
859 uri = b'Differential Revision: %s' % drev[r'uri']
829 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
860 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
830
861
831 def getdiffmeta(diff):
862 def getdiffmeta(diff):
832 """get commit metadata (date, node, user, p1) from a diff object
863 """get commit metadata (date, node, user, p1) from a diff object
833
864
834 The metadata could be "hg:meta", sent by phabsend, like:
865 The metadata could be "hg:meta", sent by phabsend, like:
835
866
836 "properties": {
867 "properties": {
837 "hg:meta": {
868 "hg:meta": {
838 "date": "1499571514 25200",
869 "date": "1499571514 25200",
839 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
840 "user": "Foo Bar <foo@example.com>",
871 "user": "Foo Bar <foo@example.com>",
841 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
872 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
842 }
873 }
843 }
874 }
844
875
845 Or converted from "local:commits", sent by "arc", like:
876 Or converted from "local:commits", sent by "arc", like:
846
877
847 "properties": {
878 "properties": {
848 "local:commits": {
879 "local:commits": {
849 "98c08acae292b2faf60a279b4189beb6cff1414d": {
880 "98c08acae292b2faf60a279b4189beb6cff1414d": {
850 "author": "Foo Bar",
881 "author": "Foo Bar",
851 "time": 1499546314,
882 "time": 1499546314,
852 "branch": "default",
883 "branch": "default",
853 "tag": "",
884 "tag": "",
854 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
885 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
886 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
856 "local": "1000",
887 "local": "1000",
857 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
888 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
858 "summary": "...",
889 "summary": "...",
859 "message": "...",
890 "message": "...",
860 "authorEmail": "foo@example.com"
891 "authorEmail": "foo@example.com"
861 }
892 }
862 }
893 }
863 }
894 }
864
895
865 Note: metadata extracted from "local:commits" will lose time zone
896 Note: metadata extracted from "local:commits" will lose time zone
866 information.
897 information.
867 """
898 """
868 props = diff.get(r'properties') or {}
899 props = diff.get(r'properties') or {}
869 meta = props.get(r'hg:meta')
900 meta = props.get(r'hg:meta')
870 if not meta and props.get(r'local:commits'):
901 if not meta and props.get(r'local:commits'):
871 commit = sorted(props[r'local:commits'].values())[0]
902 commit = sorted(props[r'local:commits'].values())[0]
872 meta = {
903 meta = {
873 r'date': r'%d 0' % commit[r'time'],
904 r'date': r'%d 0' % commit[r'time'],
874 r'node': commit[r'rev'],
905 r'node': commit[r'rev'],
875 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
906 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
876 }
907 }
877 if len(commit.get(r'parents', ())) >= 1:
908 if len(commit.get(r'parents', ())) >= 1:
878 meta[r'parent'] = commit[r'parents'][0]
909 meta[r'parent'] = commit[r'parents'][0]
879 return meta or {}
910 return meta or {}
880
911
881 def readpatch(repo, drevs, write):
912 def readpatch(repo, drevs, write):
882 """generate plain-text patch readable by 'hg import'
913 """generate plain-text patch readable by 'hg import'
883
914
884 write is usually ui.write. drevs is what "querydrev" returns, results of
915 write is usually ui.write. drevs is what "querydrev" returns, results of
885 "differential.query".
916 "differential.query".
886 """
917 """
887 # Prefetch hg:meta property for all diffs
918 # Prefetch hg:meta property for all diffs
888 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
919 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
889 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
920 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
890
921
891 # Generate patch for each drev
922 # Generate patch for each drev
892 for drev in drevs:
923 for drev in drevs:
893 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
924 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
894
925
895 diffid = max(int(v) for v in drev[r'diffs'])
926 diffid = max(int(v) for v in drev[r'diffs'])
896 body = callconduit(repo, b'differential.getrawdiff',
927 body = callconduit(repo, b'differential.getrawdiff',
897 {b'diffID': diffid})
928 {b'diffID': diffid})
898 desc = getdescfromdrev(drev)
929 desc = getdescfromdrev(drev)
899 header = b'# HG changeset patch\n'
930 header = b'# HG changeset patch\n'
900
931
901 # Try to preserve metadata from hg:meta property. Write hg patch
932 # Try to preserve metadata from hg:meta property. Write hg patch
902 # headers that can be read by the "import" command. See patchheadermap
933 # headers that can be read by the "import" command. See patchheadermap
903 # and extract in mercurial/patch.py for supported headers.
934 # and extract in mercurial/patch.py for supported headers.
904 meta = getdiffmeta(diffs[str(diffid)])
935 meta = getdiffmeta(diffs[str(diffid)])
905 for k in _metanamemap.keys():
936 for k in _metanamemap.keys():
906 if k in meta:
937 if k in meta:
907 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
938 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
908
939
909 content = b'%s%s\n%s' % (header, desc, body)
940 content = b'%s%s\n%s' % (header, desc, body)
910 write(encoding.unitolocal(content))
941 write(encoding.unitolocal(content))
911
942
912 @command(b'phabread',
943 @vcrcommand(b'phabread',
913 [(b'', b'stack', False, _(b'read dependencies'))],
944 [(b'', b'stack', False, _(b'read dependencies'))],
914 _(b'DREVSPEC [OPTIONS]'))
945 _(b'DREVSPEC [OPTIONS]'))
915 def phabread(ui, repo, spec, **opts):
946 def phabread(ui, repo, spec, **opts):
916 """print patches from Phabricator suitable for importing
947 """print patches from Phabricator suitable for importing
917
948
918 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
949 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
919 the number ``123``. It could also have common operators like ``+``, ``-``,
950 the number ``123``. It could also have common operators like ``+``, ``-``,
920 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
951 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
921 select a stack.
952 select a stack.
922
953
923 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
954 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
924 could be used to filter patches by status. For performance reason, they
955 could be used to filter patches by status. For performance reason, they
925 only represent a subset of non-status selections and cannot be used alone.
956 only represent a subset of non-status selections and cannot be used alone.
926
957
927 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
958 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
928 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
959 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
929 stack up to D9.
960 stack up to D9.
930
961
931 If --stack is given, follow dependencies information and read all patches.
962 If --stack is given, follow dependencies information and read all patches.
932 It is equivalent to the ``:`` operator.
963 It is equivalent to the ``:`` operator.
933 """
964 """
934 if opts.get(b'stack'):
965 if opts.get(b'stack'):
935 spec = b':(%s)' % spec
966 spec = b':(%s)' % spec
936 drevs = querydrev(repo, spec)
967 drevs = querydrev(repo, spec)
937 readpatch(repo, drevs, ui.write)
968 readpatch(repo, drevs, ui.write)
938
969
939 @command(b'phabupdate',
970 @vcrcommand(b'phabupdate',
940 [(b'', b'accept', False, _(b'accept revisions')),
971 [(b'', b'accept', False, _(b'accept revisions')),
941 (b'', b'reject', False, _(b'reject revisions')),
972 (b'', b'reject', False, _(b'reject revisions')),
942 (b'', b'abandon', False, _(b'abandon revisions')),
973 (b'', b'abandon', False, _(b'abandon revisions')),
943 (b'', b'reclaim', False, _(b'reclaim revisions')),
974 (b'', b'reclaim', False, _(b'reclaim revisions')),
944 (b'm', b'comment', b'', _(b'comment on the last revision')),
975 (b'm', b'comment', b'', _(b'comment on the last revision')),
945 ], _(b'DREVSPEC [OPTIONS]'))
976 ], _(b'DREVSPEC [OPTIONS]'))
946 def phabupdate(ui, repo, spec, **opts):
977 def phabupdate(ui, repo, spec, **opts):
947 """update Differential Revision in batch
978 """update Differential Revision in batch
948
979
949 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
980 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
950 """
981 """
951 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
982 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
952 if len(flags) > 1:
983 if len(flags) > 1:
953 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
984 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
954
985
955 actions = []
986 actions = []
956 for f in flags:
987 for f in flags:
957 actions.append({b'type': f, b'value': b'true'})
988 actions.append({b'type': f, b'value': b'true'})
958
989
959 drevs = querydrev(repo, spec)
990 drevs = querydrev(repo, spec)
960 for i, drev in enumerate(drevs):
991 for i, drev in enumerate(drevs):
961 if i + 1 == len(drevs) and opts.get(b'comment'):
992 if i + 1 == len(drevs) and opts.get(b'comment'):
962 actions.append({b'type': b'comment', b'value': opts[b'comment']})
993 actions.append({b'type': b'comment', b'value': opts[b'comment']})
963 if actions:
994 if actions:
964 params = {b'objectIdentifier': drev[r'phid'],
995 params = {b'objectIdentifier': drev[r'phid'],
965 b'transactions': actions}
996 b'transactions': actions}
966 callconduit(repo, b'differential.revision.edit', params)
997 callconduit(repo, b'differential.revision.edit', params)
967
998
968 templatekeyword = registrar.templatekeyword()
999 templatekeyword = registrar.templatekeyword()
969
1000
970 @templatekeyword(b'phabreview', requires={b'ctx'})
1001 @templatekeyword(b'phabreview', requires={b'ctx'})
971 def template_review(context, mapping):
1002 def template_review(context, mapping):
972 """:phabreview: Object describing the review for this changeset.
1003 """:phabreview: Object describing the review for this changeset.
973 Has attributes `url` and `id`.
1004 Has attributes `url` and `id`.
974 """
1005 """
975 ctx = context.resource(mapping, b'ctx')
1006 ctx = context.resource(mapping, b'ctx')
976 m = _differentialrevisiondescre.search(ctx.description())
1007 m = _differentialrevisiondescre.search(ctx.description())
977 if m:
1008 if m:
978 return {
1009 return {
979 b'url': m.group(b'url'),
1010 b'url': m.group(b'url'),
980 b'id': b"D{}".format(m.group(b'id')),
1011 b'id': b"D{}".format(m.group(b'id')),
981 }
1012 }
General Comments 0
You need to be logged in to leave comments. Login now