Show More
@@ -1,1651 +1,1651 b'' | |||
|
1 | 1 | # phabricator.py - simple Phabricator integration |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2017 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | """simple Phabricator integration (EXPERIMENTAL) |
|
8 | 8 | |
|
9 | 9 | This extension provides a ``phabsend`` command which sends a stack of |
|
10 | 10 | changesets to Phabricator, and a ``phabread`` command which prints a stack of |
|
11 | 11 | revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command |
|
12 | 12 | to update statuses in batch. |
|
13 | 13 | |
|
14 | 14 | By default, Phabricator requires ``Test Plan`` which might prevent some |
|
15 | 15 | changeset from being sent. The requirement could be disabled by changing |
|
16 | 16 | ``differential.require-test-plan-field`` config server side. |
|
17 | 17 | |
|
18 | 18 | Config:: |
|
19 | 19 | |
|
20 | 20 | [phabricator] |
|
21 | 21 | # Phabricator URL |
|
22 | 22 | url = https://phab.example.com/ |
|
23 | 23 | |
|
24 | 24 | # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its |
|
25 | 25 | # callsign is "FOO". |
|
26 | 26 | callsign = FOO |
|
27 | 27 | |
|
28 | 28 | # curl command to use. If not set (default), use builtin HTTP library to |
|
29 | 29 | # communicate. If set, use the specified curl command. This could be useful |
|
30 | 30 | # if you need to specify advanced options that is not easily supported by |
|
31 | 31 | # the internal library. |
|
32 | 32 | curlcmd = curl --connect-timeout 2 --retry 3 --silent |
|
33 | 33 | |
|
34 | 34 | [auth] |
|
35 | 35 | example.schemes = https |
|
36 | 36 | example.prefix = phab.example.com |
|
37 | 37 | |
|
38 | 38 | # API token. Get it from https://$HOST/conduit/login/ |
|
39 | 39 | example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx |
|
40 | 40 | """ |
|
41 | 41 | |
|
42 | 42 | from __future__ import absolute_import |
|
43 | 43 | |
|
44 | 44 | import base64 |
|
45 | 45 | import contextlib |
|
46 | 46 | import hashlib |
|
47 | 47 | import itertools |
|
48 | 48 | import json |
|
49 | 49 | import mimetypes |
|
50 | 50 | import operator |
|
51 | 51 | import re |
|
52 | 52 | |
|
53 | 53 | from mercurial.node import bin, nullid |
|
54 | 54 | from mercurial.i18n import _ |
|
55 | 55 | from mercurial.pycompat import getattr |
|
56 | 56 | from mercurial.thirdparty import attr |
|
57 | 57 | from mercurial import ( |
|
58 | 58 | cmdutil, |
|
59 | 59 | context, |
|
60 | 60 | encoding, |
|
61 | 61 | error, |
|
62 | 62 | exthelper, |
|
63 | 63 | httpconnection as httpconnectionmod, |
|
64 | 64 | match, |
|
65 | 65 | mdiff, |
|
66 | 66 | obsutil, |
|
67 | 67 | parser, |
|
68 | 68 | patch, |
|
69 | 69 | phases, |
|
70 | 70 | pycompat, |
|
71 | 71 | scmutil, |
|
72 | 72 | smartset, |
|
73 | 73 | tags, |
|
74 | 74 | templatefilters, |
|
75 | 75 | templateutil, |
|
76 | 76 | url as urlmod, |
|
77 | 77 | util, |
|
78 | 78 | ) |
|
79 | 79 | from mercurial.utils import ( |
|
80 | 80 | procutil, |
|
81 | 81 | stringutil, |
|
82 | 82 | ) |
|
83 | 83 | |
|
84 | 84 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
85 | 85 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
86 | 86 | # be specifying the version(s) of Mercurial they are tested with, or |
|
87 | 87 | # leave the attribute unspecified. |
|
88 | 88 | testedwith = b'ships-with-hg-core' |
|
89 | 89 | |
|
90 | 90 | eh = exthelper.exthelper() |
|
91 | 91 | |
|
92 | 92 | cmdtable = eh.cmdtable |
|
93 | 93 | command = eh.command |
|
94 | 94 | configtable = eh.configtable |
|
95 | 95 | templatekeyword = eh.templatekeyword |
|
96 | 96 | |
|
97 | 97 | # developer config: phabricator.batchsize |
|
98 | 98 | eh.configitem( |
|
99 | 99 | b'phabricator', b'batchsize', default=12, |
|
100 | 100 | ) |
|
101 | 101 | eh.configitem( |
|
102 | 102 | b'phabricator', b'callsign', default=None, |
|
103 | 103 | ) |
|
104 | 104 | eh.configitem( |
|
105 | 105 | b'phabricator', b'curlcmd', default=None, |
|
106 | 106 | ) |
|
107 | 107 | # developer config: phabricator.repophid |
|
108 | 108 | eh.configitem( |
|
109 | 109 | b'phabricator', b'repophid', default=None, |
|
110 | 110 | ) |
|
111 | 111 | eh.configitem( |
|
112 | 112 | b'phabricator', b'url', default=None, |
|
113 | 113 | ) |
|
114 | 114 | eh.configitem( |
|
115 | 115 | b'phabsend', b'confirm', default=False, |
|
116 | 116 | ) |
|
117 | 117 | |
|
118 | 118 | colortable = { |
|
119 | 119 | b'phabricator.action.created': b'green', |
|
120 | 120 | b'phabricator.action.skipped': b'magenta', |
|
121 | 121 | b'phabricator.action.updated': b'magenta', |
|
122 | 122 | b'phabricator.desc': b'', |
|
123 | 123 | b'phabricator.drev': b'bold', |
|
124 | 124 | b'phabricator.node': b'', |
|
125 | 125 | } |
|
126 | 126 | |
|
127 | 127 | _VCR_FLAGS = [ |
|
128 | 128 | ( |
|
129 | 129 | b'', |
|
130 | 130 | b'test-vcr', |
|
131 | 131 | b'', |
|
132 | 132 | _( |
|
133 | 133 | b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
|
134 | 134 | b', otherwise will mock all http requests using the specified vcr file.' |
|
135 | 135 | b' (ADVANCED)' |
|
136 | 136 | ), |
|
137 | 137 | ), |
|
138 | 138 | ] |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): |
|
142 | 142 | fullflags = flags + _VCR_FLAGS |
|
143 | 143 | |
|
144 | 144 | def hgmatcher(r1, r2): |
|
145 | 145 | if r1.uri != r2.uri or r1.method != r2.method: |
|
146 | 146 | return False |
|
147 | 147 | r1params = util.urlreq.parseqs(r1.body) |
|
148 | 148 | r2params = util.urlreq.parseqs(r2.body) |
|
149 | 149 | for key in r1params: |
|
150 | 150 | if key not in r2params: |
|
151 | 151 | return False |
|
152 | 152 | value = r1params[key][0] |
|
153 | 153 | # we want to compare json payloads without worrying about ordering |
|
154 | 154 | if value.startswith(b'{') and value.endswith(b'}'): |
|
155 | 155 | r1json = json.loads(value) |
|
156 | 156 | r2json = json.loads(r2params[key][0]) |
|
157 | 157 | if r1json != r2json: |
|
158 | 158 | return False |
|
159 | 159 | elif r2params[key][0] != value: |
|
160 | 160 | return False |
|
161 | 161 | return True |
|
162 | 162 | |
|
163 | 163 | def sanitiserequest(request): |
|
164 | 164 | request.body = re.sub( |
|
165 | 165 | br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body |
|
166 | 166 | ) |
|
167 | 167 | return request |
|
168 | 168 | |
|
169 | 169 | def sanitiseresponse(response): |
|
170 | 170 | if r'set-cookie' in response[r'headers']: |
|
171 | 171 | del response[r'headers'][r'set-cookie'] |
|
172 | 172 | return response |
|
173 | 173 | |
|
174 | 174 | def decorate(fn): |
|
175 | 175 | def inner(*args, **kwargs): |
|
176 | 176 | cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None)) |
|
177 | 177 | if cassette: |
|
178 | 178 | import hgdemandimport |
|
179 | 179 | |
|
180 | 180 | with hgdemandimport.deactivated(): |
|
181 | 181 | import vcr as vcrmod |
|
182 | 182 | import vcr.stubs as stubs |
|
183 | 183 | |
|
184 | 184 | vcr = vcrmod.VCR( |
|
185 | 185 | serializer=r'json', |
|
186 | 186 | before_record_request=sanitiserequest, |
|
187 | 187 | before_record_response=sanitiseresponse, |
|
188 | 188 | custom_patches=[ |
|
189 | 189 | ( |
|
190 | 190 | urlmod, |
|
191 | 191 | r'httpconnection', |
|
192 | 192 | stubs.VCRHTTPConnection, |
|
193 | 193 | ), |
|
194 | 194 | ( |
|
195 | 195 | urlmod, |
|
196 | 196 | r'httpsconnection', |
|
197 | 197 | stubs.VCRHTTPSConnection, |
|
198 | 198 | ), |
|
199 | 199 | ], |
|
200 | 200 | ) |
|
201 | 201 | vcr.register_matcher(r'hgmatcher', hgmatcher) |
|
202 | 202 | with vcr.use_cassette(cassette, match_on=[r'hgmatcher']): |
|
203 | 203 | return fn(*args, **kwargs) |
|
204 | 204 | return fn(*args, **kwargs) |
|
205 | 205 | |
|
206 | 206 | inner.__name__ = fn.__name__ |
|
207 | 207 | inner.__doc__ = fn.__doc__ |
|
208 | 208 | return command( |
|
209 | 209 | name, |
|
210 | 210 | fullflags, |
|
211 | 211 | spec, |
|
212 | 212 | helpcategory=helpcategory, |
|
213 | 213 | optionalrepo=optionalrepo, |
|
214 | 214 | )(inner) |
|
215 | 215 | |
|
216 | 216 | return decorate |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | def urlencodenested(params): |
|
220 | 220 | """like urlencode, but works with nested parameters. |
|
221 | 221 | |
|
222 | 222 | For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
|
223 | 223 | flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
|
224 | 224 | urlencode. Note: the encoding is consistent with PHP's http_build_query. |
|
225 | 225 | """ |
|
226 | 226 | flatparams = util.sortdict() |
|
227 | 227 | |
|
228 | 228 | def process(prefix, obj): |
|
229 | 229 | if isinstance(obj, bool): |
|
230 | 230 | obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form |
|
231 | 231 | lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] |
|
232 | 232 | items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) |
|
233 | 233 | if items is None: |
|
234 | 234 | flatparams[prefix] = obj |
|
235 | 235 | else: |
|
236 | 236 | for k, v in items(obj): |
|
237 | 237 | if prefix: |
|
238 | 238 | process(b'%s[%s]' % (prefix, k), v) |
|
239 | 239 | else: |
|
240 | 240 | process(k, v) |
|
241 | 241 | |
|
242 | 242 | process(b'', params) |
|
243 | 243 | return util.urlreq.urlencode(flatparams) |
|
244 | 244 | |
|
245 | 245 | |
|
246 | 246 | def readurltoken(ui): |
|
247 | 247 | """return conduit url, token and make sure they exist |
|
248 | 248 | |
|
249 | 249 | Currently read from [auth] config section. In the future, it might |
|
250 | 250 | make sense to read from .arcconfig and .arcrc as well. |
|
251 | 251 | """ |
|
252 | 252 | url = ui.config(b'phabricator', b'url') |
|
253 | 253 | if not url: |
|
254 | 254 | raise error.Abort( |
|
255 | 255 | _(b'config %s.%s is required') % (b'phabricator', b'url') |
|
256 | 256 | ) |
|
257 | 257 | |
|
258 | 258 | res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) |
|
259 | 259 | token = None |
|
260 | 260 | |
|
261 | 261 | if res: |
|
262 | 262 | group, auth = res |
|
263 | 263 | |
|
264 | 264 | ui.debug(b"using auth.%s.* for authentication\n" % group) |
|
265 | 265 | |
|
266 | 266 | token = auth.get(b'phabtoken') |
|
267 | 267 | |
|
268 | 268 | if not token: |
|
269 | 269 | raise error.Abort( |
|
270 | 270 | _(b'Can\'t find conduit token associated to %s') % (url,) |
|
271 | 271 | ) |
|
272 | 272 | |
|
273 | 273 | return url, token |
|
274 | 274 | |
|
275 | 275 | |
|
276 | 276 | def callconduit(ui, name, params): |
|
277 | 277 | """call Conduit API, params is a dict. return json.loads result, or None""" |
|
278 | 278 | host, token = readurltoken(ui) |
|
279 | 279 | url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() |
|
280 | 280 | ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) |
|
281 | 281 | params = params.copy() |
|
282 | 282 | params[b'__conduit__'] = { |
|
283 | 283 | b'token': token, |
|
284 | 284 | } |
|
285 | 285 | rawdata = { |
|
286 | 286 | b'params': templatefilters.json(params), |
|
287 | 287 | b'output': b'json', |
|
288 | 288 | b'__conduit__': 1, |
|
289 | 289 | } |
|
290 | 290 | data = urlencodenested(rawdata) |
|
291 | 291 | curlcmd = ui.config(b'phabricator', b'curlcmd') |
|
292 | 292 | if curlcmd: |
|
293 | 293 | sin, sout = procutil.popen2( |
|
294 | 294 | b'%s -d @- %s' % (curlcmd, procutil.shellquote(url)) |
|
295 | 295 | ) |
|
296 | 296 | sin.write(data) |
|
297 | 297 | sin.close() |
|
298 | 298 | body = sout.read() |
|
299 | 299 | else: |
|
300 | 300 | urlopener = urlmod.opener(ui, authinfo) |
|
301 | 301 | request = util.urlreq.request(pycompat.strurl(url), data=data) |
|
302 | 302 | with contextlib.closing(urlopener.open(request)) as rsp: |
|
303 | 303 | body = rsp.read() |
|
304 | 304 | ui.debug(b'Conduit Response: %s\n' % body) |
|
305 | 305 | parsed = pycompat.rapply( |
|
306 | 306 | lambda x: encoding.unitolocal(x) |
|
307 | 307 | if isinstance(x, pycompat.unicode) |
|
308 | 308 | else x, |
|
309 | 309 | # json.loads only accepts bytes from py3.6+ |
|
310 | 310 | json.loads(encoding.unifromlocal(body)), |
|
311 | 311 | ) |
|
312 | 312 | if parsed.get(b'error_code'): |
|
313 | 313 | msg = _(b'Conduit Error (%s): %s') % ( |
|
314 | 314 | parsed[b'error_code'], |
|
315 | 315 | parsed[b'error_info'], |
|
316 | 316 | ) |
|
317 | 317 | raise error.Abort(msg) |
|
318 | 318 | return parsed[b'result'] |
|
319 | 319 | |
|
320 | 320 | |
|
321 | 321 | @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) |
|
322 | 322 | def debugcallconduit(ui, repo, name): |
|
323 | 323 | """call Conduit API |
|
324 | 324 | |
|
325 | 325 | Call parameters are read from stdin as a JSON blob. Result will be written |
|
326 | 326 | to stdout as a JSON blob. |
|
327 | 327 | """ |
|
328 | 328 | # json.loads only accepts bytes from 3.6+ |
|
329 | 329 | rawparams = encoding.unifromlocal(ui.fin.read()) |
|
330 | 330 | # json.loads only returns unicode strings |
|
331 | 331 | params = pycompat.rapply( |
|
332 | 332 | lambda x: encoding.unitolocal(x) |
|
333 | 333 | if isinstance(x, pycompat.unicode) |
|
334 | 334 | else x, |
|
335 | 335 | json.loads(rawparams), |
|
336 | 336 | ) |
|
337 | 337 | # json.dumps only accepts unicode strings |
|
338 | 338 | result = pycompat.rapply( |
|
339 | 339 | lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x, |
|
340 | 340 | callconduit(ui, name, params), |
|
341 | 341 | ) |
|
342 | 342 | s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) |
|
343 | 343 | ui.write(b'%s\n' % encoding.unitolocal(s)) |
|
344 | 344 | |
|
345 | 345 | |
|
346 | 346 | def getrepophid(repo): |
|
347 | 347 | """given callsign, return repository PHID or None""" |
|
348 | 348 | # developer config: phabricator.repophid |
|
349 | 349 | repophid = repo.ui.config(b'phabricator', b'repophid') |
|
350 | 350 | if repophid: |
|
351 | 351 | return repophid |
|
352 | 352 | callsign = repo.ui.config(b'phabricator', b'callsign') |
|
353 | 353 | if not callsign: |
|
354 | 354 | return None |
|
355 | 355 | query = callconduit( |
|
356 | 356 | repo.ui, |
|
357 | 357 | b'diffusion.repository.search', |
|
358 | 358 | {b'constraints': {b'callsigns': [callsign]}}, |
|
359 | 359 | ) |
|
360 | 360 | if len(query[b'data']) == 0: |
|
361 | 361 | return None |
|
362 | 362 | repophid = query[b'data'][0][b'phid'] |
|
363 | 363 | repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
|
364 | 364 | return repophid |
|
365 | 365 | |
|
366 | 366 | |
|
367 | 367 | _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
|
368 | 368 | _differentialrevisiondescre = re.compile( |
|
369 | 369 | br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M |
|
370 | 370 | ) |
|
371 | 371 | |
|
372 | 372 | |
|
373 | 373 | def getoldnodedrevmap(repo, nodelist): |
|
374 | 374 | """find previous nodes that has been sent to Phabricator |
|
375 | 375 | |
|
376 | 376 | return {node: (oldnode, Differential diff, Differential Revision ID)} |
|
377 | 377 | for node in nodelist with known previous sent versions, or associated |
|
378 | 378 | Differential Revision IDs. ``oldnode`` and ``Differential diff`` could |
|
379 | 379 | be ``None``. |
|
380 | 380 | |
|
381 | 381 | Examines commit messages like "Differential Revision:" to get the |
|
382 | 382 | association information. |
|
383 | 383 | |
|
384 | 384 | If such commit message line is not found, examines all precursors and their |
|
385 | 385 | tags. Tags with format like "D1234" are considered a match and the node |
|
386 | 386 | with that tag, and the number after "D" (ex. 1234) will be returned. |
|
387 | 387 | |
|
388 | 388 | The ``old node``, if not None, is guaranteed to be the last diff of |
|
389 | 389 | corresponding Differential Revision, and exist in the repo. |
|
390 | 390 | """ |
|
391 | 391 | unfi = repo.unfiltered() |
|
392 | 392 | nodemap = unfi.changelog.nodemap |
|
393 | 393 | |
|
394 | 394 | result = {} # {node: (oldnode?, lastdiff?, drev)} |
|
395 | 395 | toconfirm = {} # {node: (force, {precnode}, drev)} |
|
396 | 396 | for node in nodelist: |
|
397 | 397 | ctx = unfi[node] |
|
398 | 398 | # For tags like "D123", put them into "toconfirm" to verify later |
|
399 | 399 | precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node])) |
|
400 | 400 | for n in precnodes: |
|
401 | 401 | if n in nodemap: |
|
402 | 402 | for tag in unfi.nodetags(n): |
|
403 | 403 | m = _differentialrevisiontagre.match(tag) |
|
404 | 404 | if m: |
|
405 | 405 | toconfirm[node] = (0, set(precnodes), int(m.group(1))) |
|
406 | 406 | continue |
|
407 | 407 | |
|
408 | 408 | # Check commit message |
|
409 | 409 | m = _differentialrevisiondescre.search(ctx.description()) |
|
410 | 410 | if m: |
|
411 | 411 | toconfirm[node] = (1, set(precnodes), int(m.group(r'id'))) |
|
412 | 412 | |
|
413 | 413 | # Double check if tags are genuine by collecting all old nodes from |
|
414 | 414 | # Phabricator, and expect precursors overlap with it. |
|
415 | 415 | if toconfirm: |
|
416 | 416 | drevs = [drev for force, precs, drev in toconfirm.values()] |
|
417 | 417 | alldiffs = callconduit( |
|
418 | 418 | unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs} |
|
419 | 419 | ) |
|
420 | 420 | getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None |
|
421 | 421 | for newnode, (force, precset, drev) in toconfirm.items(): |
|
422 | 422 | diffs = [ |
|
423 | 423 | d for d in alldiffs.values() if int(d[b'revisionID']) == drev |
|
424 | 424 | ] |
|
425 | 425 | |
|
426 | 426 | # "precursors" as known by Phabricator |
|
427 | 427 | phprecset = set(getnode(d) for d in diffs) |
|
428 | 428 | |
|
429 | 429 | # Ignore if precursors (Phabricator and local repo) do not overlap, |
|
430 | 430 | # and force is not set (when commit message says nothing) |
|
431 | 431 | if not force and not bool(phprecset & precset): |
|
432 | 432 | tagname = b'D%d' % drev |
|
433 | 433 | tags.tag( |
|
434 | 434 | repo, |
|
435 | 435 | tagname, |
|
436 | 436 | nullid, |
|
437 | 437 | message=None, |
|
438 | 438 | user=None, |
|
439 | 439 | date=None, |
|
440 | 440 | local=True, |
|
441 | 441 | ) |
|
442 | 442 | unfi.ui.warn( |
|
443 | 443 | _( |
|
444 | 444 | b'D%s: local tag removed - does not match ' |
|
445 | 445 | b'Differential history\n' |
|
446 | 446 | ) |
|
447 | 447 | % drev |
|
448 | 448 | ) |
|
449 | 449 | continue |
|
450 | 450 | |
|
451 | 451 | # Find the last node using Phabricator metadata, and make sure it |
|
452 | 452 | # exists in the repo |
|
453 | 453 | oldnode = lastdiff = None |
|
454 | 454 | if diffs: |
|
455 | 455 | lastdiff = max(diffs, key=lambda d: int(d[b'id'])) |
|
456 | 456 | oldnode = getnode(lastdiff) |
|
457 | 457 | if oldnode and oldnode not in nodemap: |
|
458 | 458 | oldnode = None |
|
459 | 459 | |
|
460 | 460 | result[newnode] = (oldnode, lastdiff, drev) |
|
461 | 461 | |
|
462 | 462 | return result |
|
463 | 463 | |
|
464 | 464 | |
|
465 | 465 | def getdiff(ctx, diffopts): |
|
466 | 466 | """plain-text diff without header (user, commit message, etc)""" |
|
467 | 467 | output = util.stringio() |
|
468 | 468 | for chunk, _label in patch.diffui( |
|
469 | 469 | ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts |
|
470 | 470 | ): |
|
471 | 471 | output.write(chunk) |
|
472 | 472 | return output.getvalue() |
|
473 | 473 | |
|
474 | 474 | |
|
475 | 475 | class DiffChangeType(object): |
|
476 | 476 | ADD = 1 |
|
477 | 477 | CHANGE = 2 |
|
478 | 478 | DELETE = 3 |
|
479 | 479 | MOVE_AWAY = 4 |
|
480 | 480 | COPY_AWAY = 5 |
|
481 | 481 | MOVE_HERE = 6 |
|
482 | 482 | COPY_HERE = 7 |
|
483 | 483 | MULTICOPY = 8 |
|
484 | 484 | |
|
485 | 485 | |
|
486 | 486 | class DiffFileType(object): |
|
487 | 487 | TEXT = 1 |
|
488 | 488 | IMAGE = 2 |
|
489 | 489 | BINARY = 3 |
|
490 | 490 | |
|
491 | 491 | |
|
492 | 492 | @attr.s |
|
493 | 493 | class phabhunk(dict): |
|
494 | 494 | """Represents a Differential hunk, which is owned by a Differential change |
|
495 | 495 | """ |
|
496 | 496 | |
|
497 | 497 | oldOffset = attr.ib(default=0) # camelcase-required |
|
498 | 498 | oldLength = attr.ib(default=0) # camelcase-required |
|
499 | 499 | newOffset = attr.ib(default=0) # camelcase-required |
|
500 | 500 | newLength = attr.ib(default=0) # camelcase-required |
|
501 | 501 | corpus = attr.ib(default='') |
|
502 | 502 | # These get added to the phabchange's equivalents |
|
503 | 503 | addLines = attr.ib(default=0) # camelcase-required |
|
504 | 504 | delLines = attr.ib(default=0) # camelcase-required |
|
505 | 505 | |
|
506 | 506 | |
|
507 | 507 | @attr.s |
|
508 | 508 | class phabchange(object): |
|
509 | 509 | """Represents a Differential change, owns Differential hunks and owned by a |
|
510 | 510 | Differential diff. Each one represents one file in a diff. |
|
511 | 511 | """ |
|
512 | 512 | |
|
513 | 513 | currentPath = attr.ib(default=None) # camelcase-required |
|
514 | 514 | oldPath = attr.ib(default=None) # camelcase-required |
|
515 | 515 | awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required |
|
516 | 516 | metadata = attr.ib(default=attr.Factory(dict)) |
|
517 | 517 | oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
518 | 518 | newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
519 | 519 | type = attr.ib(default=DiffChangeType.CHANGE) |
|
520 | 520 | fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required |
|
521 | 521 | commitHash = attr.ib(default=None) # camelcase-required |
|
522 | 522 | addLines = attr.ib(default=0) # camelcase-required |
|
523 | 523 | delLines = attr.ib(default=0) # camelcase-required |
|
524 | 524 | hunks = attr.ib(default=attr.Factory(list)) |
|
525 | 525 | |
|
526 | 526 | def copynewmetadatatoold(self): |
|
527 | 527 | for key in list(self.metadata.keys()): |
|
528 | 528 | newkey = key.replace(b'new:', b'old:') |
|
529 | 529 | self.metadata[newkey] = self.metadata[key] |
|
530 | 530 | |
|
531 | 531 | def addoldmode(self, value): |
|
532 | 532 | self.oldProperties[b'unix:filemode'] = value |
|
533 | 533 | |
|
534 | 534 | def addnewmode(self, value): |
|
535 | 535 | self.newProperties[b'unix:filemode'] = value |
|
536 | 536 | |
|
537 | 537 | def addhunk(self, hunk): |
|
538 | 538 | if not isinstance(hunk, phabhunk): |
|
539 | 539 | raise error.Abort(b'phabchange.addhunk only takes phabhunks') |
|
540 | 540 | self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk))) |
|
541 | 541 | # It's useful to include these stats since the Phab web UI shows them, |
|
542 | 542 | # and uses them to estimate how large a change a Revision is. Also used |
|
543 | 543 | # in email subjects for the [+++--] bit. |
|
544 | 544 | self.addLines += hunk.addLines |
|
545 | 545 | self.delLines += hunk.delLines |
|
546 | 546 | |
|
547 | 547 | |
|
548 | 548 | @attr.s |
|
549 | 549 | class phabdiff(object): |
|
550 | 550 | """Represents a Differential diff, owns Differential changes. Corresponds |
|
551 | 551 | to a commit. |
|
552 | 552 | """ |
|
553 | 553 | |
|
554 | 554 | # Doesn't seem to be any reason to send this (output of uname -n) |
|
555 | 555 | sourceMachine = attr.ib(default=b'') # camelcase-required |
|
556 | 556 | sourcePath = attr.ib(default=b'/') # camelcase-required |
|
557 | 557 | sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required |
|
558 | 558 | sourceControlPath = attr.ib(default=b'/') # camelcase-required |
|
559 | 559 | sourceControlSystem = attr.ib(default=b'hg') # camelcase-required |
|
560 | 560 | branch = attr.ib(default=b'default') |
|
561 | 561 | bookmark = attr.ib(default=None) |
|
562 | 562 | creationMethod = attr.ib(default=b'phabsend') # camelcase-required |
|
563 | 563 | lintStatus = attr.ib(default=b'none') # camelcase-required |
|
564 | 564 | unitStatus = attr.ib(default=b'none') # camelcase-required |
|
565 | 565 | changes = attr.ib(default=attr.Factory(dict)) |
|
566 | 566 | repositoryPHID = attr.ib(default=None) # camelcase-required |
|
567 | 567 | |
|
568 | 568 | def addchange(self, change): |
|
569 | 569 | if not isinstance(change, phabchange): |
|
570 | 570 | raise error.Abort(b'phabdiff.addchange only takes phabchanges') |
|
571 | 571 | self.changes[change.currentPath] = pycompat.byteskwargs( |
|
572 | 572 | attr.asdict(change) |
|
573 | 573 | ) |
|
574 | 574 | |
|
575 | 575 | |
|
576 | 576 | def maketext(pchange, ctx, fname): |
|
577 | 577 | """populate the phabchange for a text file""" |
|
578 | 578 | repo = ctx.repo() |
|
579 | 579 | fmatcher = match.exact([fname]) |
|
580 | 580 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
581 | 581 | _pfctx, _fctx, header, fhunks = next( |
|
582 | 582 | patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts) |
|
583 | 583 | ) |
|
584 | 584 | |
|
585 | 585 | for fhunk in fhunks: |
|
586 | 586 | (oldOffset, oldLength, newOffset, newLength), lines = fhunk |
|
587 | 587 | corpus = b''.join(lines[1:]) |
|
588 | 588 | shunk = list(header) |
|
589 | 589 | shunk.extend(lines) |
|
590 | 590 | _mf, _mt, addLines, delLines, _hb = patch.diffstatsum( |
|
591 | 591 | patch.diffstatdata(util.iterlines(shunk)) |
|
592 | 592 | ) |
|
593 | 593 | pchange.addhunk( |
|
594 | 594 | phabhunk( |
|
595 | 595 | oldOffset, |
|
596 | 596 | oldLength, |
|
597 | 597 | newOffset, |
|
598 | 598 | newLength, |
|
599 | 599 | corpus, |
|
600 | 600 | addLines, |
|
601 | 601 | delLines, |
|
602 | 602 | ) |
|
603 | 603 | ) |
|
604 | 604 | |
|
605 | 605 | |
|
606 | 606 | def uploadchunks(fctx, fphid): |
|
607 | 607 | """upload large binary files as separate chunks. |
|
608 | 608 | Phab requests chunking over 8MiB, and splits into 4MiB chunks |
|
609 | 609 | """ |
|
610 | 610 | ui = fctx.repo().ui |
|
611 | 611 | chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid}) |
|
612 | 612 | progress = ui.makeprogress( |
|
613 | 613 | _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks) |
|
614 | 614 | ) |
|
615 | 615 | for chunk in chunks: |
|
616 | 616 | progress.increment() |
|
617 | 617 | if chunk[b'complete']: |
|
618 | 618 | continue |
|
619 | 619 | bstart = int(chunk[b'byteStart']) |
|
620 | 620 | bend = int(chunk[b'byteEnd']) |
|
621 | 621 | callconduit( |
|
622 | 622 | ui, |
|
623 | 623 | b'file.uploadchunk', |
|
624 | 624 | { |
|
625 | 625 | b'filePHID': fphid, |
|
626 | 626 | b'byteStart': bstart, |
|
627 | 627 | b'data': base64.b64encode(fctx.data()[bstart:bend]), |
|
628 | 628 | b'dataEncoding': b'base64', |
|
629 | 629 | }, |
|
630 | 630 | ) |
|
631 | 631 | progress.complete() |
|
632 | 632 | |
|
633 | 633 | |
|
634 | 634 | def uploadfile(fctx): |
|
635 | 635 | """upload binary files to Phabricator""" |
|
636 | 636 | repo = fctx.repo() |
|
637 | 637 | ui = repo.ui |
|
638 | 638 | fname = fctx.path() |
|
639 | 639 | size = fctx.size() |
|
640 | 640 | fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest()) |
|
641 | 641 | |
|
642 | 642 | # an allocate call is required first to see if an upload is even required |
|
643 | 643 | # (Phab might already have it) and to determine if chunking is needed |
|
644 | 644 | allocateparams = { |
|
645 | 645 | b'name': fname, |
|
646 | 646 | b'contentLength': size, |
|
647 | 647 | b'contentHash': fhash, |
|
648 | 648 | } |
|
649 | 649 | filealloc = callconduit(ui, b'file.allocate', allocateparams) |
|
650 | 650 | fphid = filealloc[b'filePHID'] |
|
651 | 651 | |
|
652 | 652 | if filealloc[b'upload']: |
|
653 | 653 | ui.write(_(b'uploading %s\n') % bytes(fctx)) |
|
654 | 654 | if not fphid: |
|
655 | 655 | uploadparams = { |
|
656 | 656 | b'name': fname, |
|
657 | 657 | b'data_base64': base64.b64encode(fctx.data()), |
|
658 | 658 | } |
|
659 | 659 | fphid = callconduit(ui, b'file.upload', uploadparams) |
|
660 | 660 | else: |
|
661 | 661 | uploadchunks(fctx, fphid) |
|
662 | 662 | else: |
|
663 | 663 | ui.debug(b'server already has %s\n' % bytes(fctx)) |
|
664 | 664 | |
|
665 | 665 | if not fphid: |
|
666 | 666 | raise error.Abort(b'Upload of %s failed.' % bytes(fctx)) |
|
667 | 667 | |
|
668 | 668 | return fphid |
|
669 | 669 | |
|
670 | 670 | |
|
671 | 671 | def addoldbinary(pchange, fctx, originalfname): |
|
672 | 672 | """add the metadata for the previous version of a binary file to the |
|
673 | 673 | phabchange for the new version |
|
674 | 674 | """ |
|
675 | 675 | oldfctx = fctx.p1()[originalfname] |
|
676 | 676 | if fctx.cmp(oldfctx): |
|
677 | 677 | # Files differ, add the old one |
|
678 | 678 | pchange.metadata[b'old:file:size'] = oldfctx.size() |
|
679 | 679 | mimeguess, _enc = mimetypes.guess_type( |
|
680 | 680 | encoding.unifromlocal(oldfctx.path()) |
|
681 | 681 | ) |
|
682 | 682 | if mimeguess: |
|
683 | 683 | pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr( |
|
684 | 684 | mimeguess |
|
685 | 685 | ) |
|
686 | 686 | fphid = uploadfile(oldfctx) |
|
687 | 687 | pchange.metadata[b'old:binary-phid'] = fphid |
|
688 | 688 | else: |
|
689 | 689 | # If it's left as IMAGE/BINARY web UI might try to display it |
|
690 | 690 | pchange.fileType = DiffFileType.TEXT |
|
691 | 691 | pchange.copynewmetadatatoold() |
|
692 | 692 | |
|
693 | 693 | |
|
694 | 694 | def makebinary(pchange, fctx): |
|
695 | 695 | """populate the phabchange for a binary file""" |
|
696 | 696 | pchange.fileType = DiffFileType.BINARY |
|
697 | 697 | fphid = uploadfile(fctx) |
|
698 | 698 | pchange.metadata[b'new:binary-phid'] = fphid |
|
699 | 699 | pchange.metadata[b'new:file:size'] = fctx.size() |
|
700 | 700 | mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path())) |
|
701 | 701 | if mimeguess: |
|
702 | 702 | mimeguess = pycompat.bytestr(mimeguess) |
|
703 | 703 | pchange.metadata[b'new:file:mime-type'] = mimeguess |
|
704 | 704 | if mimeguess.startswith(b'image/'): |
|
705 | 705 | pchange.fileType = DiffFileType.IMAGE |
|
706 | 706 | |
|
707 | 707 | |
|
708 | 708 | # Copied from mercurial/patch.py |
|
709 | 709 | gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'} |
|
710 | 710 | |
|
711 | 711 | |
|
712 | 712 | def notutf8(fctx): |
|
713 | 713 | """detect non-UTF-8 text files since Phabricator requires them to be marked |
|
714 | 714 | as binary |
|
715 | 715 | """ |
|
716 | 716 | try: |
|
717 | 717 | fctx.data().decode('utf-8') |
|
718 | 718 | if fctx.parents(): |
|
719 | 719 | fctx.p1().data().decode('utf-8') |
|
720 | 720 | return False |
|
721 | 721 | except UnicodeDecodeError: |
|
722 | 722 | fctx.repo().ui.write( |
|
723 | 723 | _(b'file %s detected as non-UTF-8, marked as binary\n') |
|
724 | 724 | % fctx.path() |
|
725 | 725 | ) |
|
726 | 726 | return True |
|
727 | 727 | |
|
728 | 728 | |
|
729 | 729 | def addremoved(pdiff, ctx, removed): |
|
730 | 730 | """add removed files to the phabdiff. Shouldn't include moves""" |
|
731 | 731 | for fname in removed: |
|
732 | 732 | pchange = phabchange( |
|
733 | 733 | currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE |
|
734 | 734 | ) |
|
735 | 735 | pchange.addoldmode(gitmode[ctx.p1()[fname].flags()]) |
|
736 | 736 | fctx = ctx.p1()[fname] |
|
737 | 737 | if not (fctx.isbinary() or notutf8(fctx)): |
|
738 | 738 | maketext(pchange, ctx, fname) |
|
739 | 739 | |
|
740 | 740 | pdiff.addchange(pchange) |
|
741 | 741 | |
|
742 | 742 | |
|
743 | 743 | def addmodified(pdiff, ctx, modified): |
|
744 | 744 | """add modified files to the phabdiff""" |
|
745 | 745 | for fname in modified: |
|
746 | 746 | fctx = ctx[fname] |
|
747 | 747 | pchange = phabchange(currentPath=fname, oldPath=fname) |
|
748 | 748 | filemode = gitmode[ctx[fname].flags()] |
|
749 | 749 | originalmode = gitmode[ctx.p1()[fname].flags()] |
|
750 | 750 | if filemode != originalmode: |
|
751 | 751 | pchange.addoldmode(originalmode) |
|
752 | 752 | pchange.addnewmode(filemode) |
|
753 | 753 | |
|
754 | 754 | if fctx.isbinary() or notutf8(fctx): |
|
755 | 755 | makebinary(pchange, fctx) |
|
756 | 756 | addoldbinary(pchange, fctx, fname) |
|
757 | 757 | else: |
|
758 | 758 | maketext(pchange, ctx, fname) |
|
759 | 759 | |
|
760 | 760 | pdiff.addchange(pchange) |
|
761 | 761 | |
|
762 | 762 | |
|
763 | 763 | def addadded(pdiff, ctx, added, removed): |
|
764 | 764 | """add file adds to the phabdiff, both new files and copies/moves""" |
|
765 | 765 | # Keep track of files that've been recorded as moved/copied, so if there are |
|
766 | 766 | # additional copies we can mark them (moves get removed from removed) |
|
767 | 767 | copiedchanges = {} |
|
768 | 768 | movedchanges = {} |
|
769 | 769 | for fname in added: |
|
770 | 770 | fctx = ctx[fname] |
|
771 | 771 | pchange = phabchange(currentPath=fname) |
|
772 | 772 | |
|
773 | 773 | filemode = gitmode[ctx[fname].flags()] |
|
774 | 774 | renamed = fctx.renamed() |
|
775 | 775 | |
|
776 | 776 | if renamed: |
|
777 | 777 | originalfname = renamed[0] |
|
778 | 778 | originalmode = gitmode[ctx.p1()[originalfname].flags()] |
|
779 | 779 | pchange.oldPath = originalfname |
|
780 | 780 | |
|
781 | 781 | if originalfname in removed: |
|
782 | 782 | origpchange = phabchange( |
|
783 | 783 | currentPath=originalfname, |
|
784 | 784 | oldPath=originalfname, |
|
785 | 785 | type=DiffChangeType.MOVE_AWAY, |
|
786 | 786 | awayPaths=[fname], |
|
787 | 787 | ) |
|
788 | 788 | movedchanges[originalfname] = origpchange |
|
789 | 789 | removed.remove(originalfname) |
|
790 | 790 | pchange.type = DiffChangeType.MOVE_HERE |
|
791 | 791 | elif originalfname in movedchanges: |
|
792 | 792 | movedchanges[originalfname].type = DiffChangeType.MULTICOPY |
|
793 | 793 | movedchanges[originalfname].awayPaths.append(fname) |
|
794 | 794 | pchange.type = DiffChangeType.COPY_HERE |
|
795 | 795 | else: # pure copy |
|
796 | 796 | if originalfname not in copiedchanges: |
|
797 | 797 | origpchange = phabchange( |
|
798 | 798 | currentPath=originalfname, type=DiffChangeType.COPY_AWAY |
|
799 | 799 | ) |
|
800 | 800 | copiedchanges[originalfname] = origpchange |
|
801 | 801 | else: |
|
802 | 802 | origpchange = copiedchanges[originalfname] |
|
803 | 803 | origpchange.awayPaths.append(fname) |
|
804 | 804 | pchange.type = DiffChangeType.COPY_HERE |
|
805 | 805 | |
|
806 | 806 | if filemode != originalmode: |
|
807 | 807 | pchange.addoldmode(originalmode) |
|
808 | 808 | pchange.addnewmode(filemode) |
|
809 | 809 | else: # Brand-new file |
|
810 | 810 | pchange.addnewmode(gitmode[fctx.flags()]) |
|
811 | 811 | pchange.type = DiffChangeType.ADD |
|
812 | 812 | |
|
813 | 813 | if fctx.isbinary() or notutf8(fctx): |
|
814 | 814 | makebinary(pchange, fctx) |
|
815 | 815 | if renamed: |
|
816 | 816 | addoldbinary(pchange, fctx, originalfname) |
|
817 | 817 | else: |
|
818 | 818 | maketext(pchange, ctx, fname) |
|
819 | 819 | |
|
820 | 820 | pdiff.addchange(pchange) |
|
821 | 821 | |
|
822 | 822 | for _path, copiedchange in copiedchanges.items(): |
|
823 | 823 | pdiff.addchange(copiedchange) |
|
824 | 824 | for _path, movedchange in movedchanges.items(): |
|
825 | 825 | pdiff.addchange(movedchange) |
|
826 | 826 | |
|
827 | 827 | |
|
828 | 828 | def creatediff(ctx): |
|
829 | 829 | """create a Differential Diff""" |
|
830 | 830 | repo = ctx.repo() |
|
831 | 831 | repophid = getrepophid(repo) |
|
832 | 832 | # Create a "Differential Diff" via "differential.creatediff" API |
|
833 | 833 | pdiff = phabdiff( |
|
834 | 834 | sourceControlBaseRevision=b'%s' % ctx.p1().hex(), |
|
835 | 835 | branch=b'%s' % ctx.branch(), |
|
836 | 836 | ) |
|
837 | 837 | modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx) |
|
838 | 838 | # addadded will remove moved files from removed, so addremoved won't get |
|
839 | 839 | # them |
|
840 | 840 | addadded(pdiff, ctx, added, removed) |
|
841 | 841 | addmodified(pdiff, ctx, modified) |
|
842 | 842 | addremoved(pdiff, ctx, removed) |
|
843 | 843 | if repophid: |
|
844 | 844 | pdiff.repositoryPHID = repophid |
|
845 | 845 | diff = callconduit( |
|
846 | 846 | repo.ui, |
|
847 | 847 | b'differential.creatediff', |
|
848 | 848 | pycompat.byteskwargs(attr.asdict(pdiff)), |
|
849 | 849 | ) |
|
850 | 850 | if not diff: |
|
851 | 851 | raise error.Abort(_(b'cannot create diff for %s') % ctx) |
|
852 | 852 | return diff |
|
853 | 853 | |
|
854 | 854 | |
|
855 | 855 | def writediffproperties(ctx, diff): |
|
856 | 856 | """write metadata to diff so patches could be applied losslessly""" |
|
857 | 857 | # creatediff returns with a diffid but query returns with an id |
|
858 | 858 | diffid = diff.get(b'diffid', diff.get(b'id')) |
|
859 | 859 | params = { |
|
860 | 860 | b'diff_id': diffid, |
|
861 | 861 | b'name': b'hg:meta', |
|
862 | 862 | b'data': templatefilters.json( |
|
863 | 863 | { |
|
864 | 864 | b'user': ctx.user(), |
|
865 | 865 | b'date': b'%d %d' % ctx.date(), |
|
866 | 866 | b'branch': ctx.branch(), |
|
867 | 867 | b'node': ctx.hex(), |
|
868 | 868 | b'parent': ctx.p1().hex(), |
|
869 | 869 | } |
|
870 | 870 | ), |
|
871 | 871 | } |
|
872 | 872 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
|
873 | 873 | |
|
874 | 874 | params = { |
|
875 | 875 | b'diff_id': diffid, |
|
876 | 876 | b'name': b'local:commits', |
|
877 | 877 | b'data': templatefilters.json( |
|
878 | 878 | { |
|
879 | 879 | ctx.hex(): { |
|
880 | 880 | b'author': stringutil.person(ctx.user()), |
|
881 | 881 | b'authorEmail': stringutil.email(ctx.user()), |
|
882 | 882 | b'time': int(ctx.date()[0]), |
|
883 | 883 | b'commit': ctx.hex(), |
|
884 | 884 | b'parents': [ctx.p1().hex()], |
|
885 | 885 | b'branch': ctx.branch(), |
|
886 | 886 | }, |
|
887 | 887 | } |
|
888 | 888 | ), |
|
889 | 889 | } |
|
890 | 890 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
|
891 | 891 | |
|
892 | 892 | |
|
893 | 893 | def createdifferentialrevision( |
|
894 | 894 | ctx, |
|
895 | 895 | revid=None, |
|
896 | 896 | parentrevphid=None, |
|
897 | 897 | oldnode=None, |
|
898 | 898 | olddiff=None, |
|
899 | 899 | actions=None, |
|
900 | 900 | comment=None, |
|
901 | 901 | ): |
|
902 | 902 | """create or update a Differential Revision |
|
903 | 903 | |
|
904 | 904 | If revid is None, create a new Differential Revision, otherwise update |
|
905 | 905 | revid. If parentrevphid is not None, set it as a dependency. |
|
906 | 906 | |
|
907 | 907 | If oldnode is not None, check if the patch content (without commit message |
|
908 | 908 | and metadata) has changed before creating another diff. |
|
909 | 909 | |
|
910 | 910 | If actions is not None, they will be appended to the transaction. |
|
911 | 911 | """ |
|
912 | 912 | repo = ctx.repo() |
|
913 | 913 | if oldnode: |
|
914 | 914 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
915 | 915 | oldctx = repo.unfiltered()[oldnode] |
|
916 | 916 | neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts) |
|
917 | 917 | else: |
|
918 | 918 | neednewdiff = True |
|
919 | 919 | |
|
920 | 920 | transactions = [] |
|
921 | 921 | if neednewdiff: |
|
922 | 922 | diff = creatediff(ctx) |
|
923 | 923 | transactions.append({b'type': b'update', b'value': diff[b'phid']}) |
|
924 | 924 | if comment: |
|
925 | 925 | transactions.append({b'type': b'comment', b'value': comment}) |
|
926 | 926 | else: |
|
927 | 927 | # Even if we don't need to upload a new diff because the patch content |
|
928 | 928 | # does not change. We might still need to update its metadata so |
|
929 | 929 | # pushers could know the correct node metadata. |
|
930 | 930 | assert olddiff |
|
931 | 931 | diff = olddiff |
|
932 | 932 | writediffproperties(ctx, diff) |
|
933 | 933 | |
|
934 | 934 | # Set the parent Revision every time, so commit re-ordering is picked-up |
|
935 | 935 | if parentrevphid: |
|
936 | 936 | transactions.append( |
|
937 | 937 | {b'type': b'parents.set', b'value': [parentrevphid]} |
|
938 | 938 | ) |
|
939 | 939 | |
|
940 | 940 | if actions: |
|
941 | 941 | transactions += actions |
|
942 | 942 | |
|
943 | 943 | # Parse commit message and update related fields. |
|
944 | 944 | desc = ctx.description() |
|
945 | 945 | info = callconduit( |
|
946 | 946 | repo.ui, b'differential.parsecommitmessage', {b'corpus': desc} |
|
947 | 947 | ) |
|
948 | 948 | for k, v in info[b'fields'].items(): |
|
949 | 949 | if k in [b'title', b'summary', b'testPlan']: |
|
950 | 950 | transactions.append({b'type': k, b'value': v}) |
|
951 | 951 | |
|
952 | 952 | params = {b'transactions': transactions} |
|
953 | 953 | if revid is not None: |
|
954 | 954 | # Update an existing Differential Revision |
|
955 | 955 | params[b'objectIdentifier'] = revid |
|
956 | 956 | |
|
957 | 957 | revision = callconduit(repo.ui, b'differential.revision.edit', params) |
|
958 | 958 | if not revision: |
|
959 | 959 | raise error.Abort(_(b'cannot create revision for %s') % ctx) |
|
960 | 960 | |
|
961 | 961 | return revision, diff |
|
962 | 962 | |
|
963 | 963 | |
|
964 | 964 | def userphids(repo, names): |
|
965 | 965 | """convert user names to PHIDs""" |
|
966 | 966 | names = [name.lower() for name in names] |
|
967 | 967 | query = {b'constraints': {b'usernames': names}} |
|
968 | 968 | result = callconduit(repo.ui, b'user.search', query) |
|
969 | 969 | # username not found is not an error of the API. So check if we have missed |
|
970 | 970 | # some names here. |
|
971 | 971 | data = result[b'data'] |
|
972 | 972 | resolved = set(entry[b'fields'][b'username'].lower() for entry in data) |
|
973 | 973 | unresolved = set(names) - resolved |
|
974 | 974 | if unresolved: |
|
975 | 975 | raise error.Abort( |
|
976 | 976 | _(b'unknown username: %s') % b' '.join(sorted(unresolved)) |
|
977 | 977 | ) |
|
978 | 978 | return [entry[b'phid'] for entry in data] |
|
979 | 979 | |
|
980 | 980 | |
|
981 | 981 | @vcrcommand( |
|
982 | 982 | b'phabsend', |
|
983 | 983 | [ |
|
984 | 984 | (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
|
985 | 985 | (b'', b'amend', True, _(b'update commit messages')), |
|
986 | 986 | (b'', b'reviewer', [], _(b'specify reviewers')), |
|
987 | 987 | (b'', b'blocker', [], _(b'specify blocking reviewers')), |
|
988 | 988 | ( |
|
989 | 989 | b'm', |
|
990 | 990 | b'comment', |
|
991 | 991 | b'', |
|
992 | 992 | _(b'add a comment to Revisions with new/updated Diffs'), |
|
993 | 993 | ), |
|
994 | 994 | (b'', b'confirm', None, _(b'ask for confirmation before sending')), |
|
995 | 995 | ], |
|
996 | 996 | _(b'REV [OPTIONS]'), |
|
997 | 997 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
998 | 998 | ) |
|
999 | 999 | def phabsend(ui, repo, *revs, **opts): |
|
1000 | 1000 | """upload changesets to Phabricator |
|
1001 | 1001 | |
|
1002 | 1002 | If there are multiple revisions specified, they will be send as a stack |
|
1003 | 1003 | with a linear dependencies relationship using the order specified by the |
|
1004 | 1004 | revset. |
|
1005 | 1005 | |
|
1006 | 1006 | For the first time uploading changesets, local tags will be created to |
|
1007 | 1007 | maintain the association. After the first time, phabsend will check |
|
1008 | 1008 | obsstore and tags information so it can figure out whether to update an |
|
1009 | 1009 | existing Differential Revision, or create a new one. |
|
1010 | 1010 | |
|
1011 | 1011 | If --amend is set, update commit messages so they have the |
|
1012 | 1012 | ``Differential Revision`` URL, remove related tags. This is similar to what |
|
1013 | 1013 | arcanist will do, and is more desired in author-push workflows. Otherwise, |
|
1014 | 1014 | use local tags to record the ``Differential Revision`` association. |
|
1015 | 1015 | |
|
1016 | 1016 | The --confirm option lets you confirm changesets before sending them. You |
|
1017 | 1017 | can also add following to your configuration file to make it default |
|
1018 | 1018 | behaviour:: |
|
1019 | 1019 | |
|
1020 | 1020 | [phabsend] |
|
1021 | 1021 | confirm = true |
|
1022 | 1022 | |
|
1023 | 1023 | phabsend will check obsstore and the above association to decide whether to |
|
1024 | 1024 | update an existing Differential Revision, or create a new one. |
|
1025 | 1025 | """ |
|
1026 | 1026 | opts = pycompat.byteskwargs(opts) |
|
1027 | 1027 | revs = list(revs) + opts.get(b'rev', []) |
|
1028 | 1028 | revs = scmutil.revrange(repo, revs) |
|
1029 | 1029 | |
|
1030 | 1030 | if not revs: |
|
1031 | 1031 | raise error.Abort(_(b'phabsend requires at least one changeset')) |
|
1032 | 1032 | if opts.get(b'amend'): |
|
1033 | 1033 | cmdutil.checkunfinished(repo) |
|
1034 | 1034 | |
|
1035 | 1035 | # {newnode: (oldnode, olddiff, olddrev} |
|
1036 | 1036 | oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
|
1037 | 1037 | |
|
1038 | 1038 | confirm = ui.configbool(b'phabsend', b'confirm') |
|
1039 | 1039 | confirm |= bool(opts.get(b'confirm')) |
|
1040 | 1040 | if confirm: |
|
1041 | 1041 | confirmed = _confirmbeforesend(repo, revs, oldmap) |
|
1042 | 1042 | if not confirmed: |
|
1043 | 1043 | raise error.Abort(_(b'phabsend cancelled')) |
|
1044 | 1044 | |
|
1045 | 1045 | actions = [] |
|
1046 | 1046 | reviewers = opts.get(b'reviewer', []) |
|
1047 | 1047 | blockers = opts.get(b'blocker', []) |
|
1048 | 1048 | phids = [] |
|
1049 | 1049 | if reviewers: |
|
1050 | 1050 | phids.extend(userphids(repo, reviewers)) |
|
1051 | 1051 | if blockers: |
|
1052 | 1052 | phids.extend( |
|
1053 | 1053 | map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)) |
|
1054 | 1054 | ) |
|
1055 | 1055 | if phids: |
|
1056 | 1056 | actions.append({b'type': b'reviewers.add', b'value': phids}) |
|
1057 | 1057 | |
|
1058 | 1058 | drevids = [] # [int] |
|
1059 | 1059 | diffmap = {} # {newnode: diff} |
|
1060 | 1060 | |
|
1061 | 1061 | # Send patches one by one so we know their Differential Revision PHIDs and |
|
1062 | 1062 | # can provide dependency relationship |
|
1063 | 1063 | lastrevphid = None |
|
1064 | 1064 | for rev in revs: |
|
1065 | 1065 | ui.debug(b'sending rev %d\n' % rev) |
|
1066 | 1066 | ctx = repo[rev] |
|
1067 | 1067 | |
|
1068 | 1068 | # Get Differential Revision ID |
|
1069 | 1069 | oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
|
1070 | 1070 | if oldnode != ctx.node() or opts.get(b'amend'): |
|
1071 | 1071 | # Create or update Differential Revision |
|
1072 | 1072 | revision, diff = createdifferentialrevision( |
|
1073 | 1073 | ctx, |
|
1074 | 1074 | revid, |
|
1075 | 1075 | lastrevphid, |
|
1076 | 1076 | oldnode, |
|
1077 | 1077 | olddiff, |
|
1078 | 1078 | actions, |
|
1079 | 1079 | opts.get(b'comment'), |
|
1080 | 1080 | ) |
|
1081 | 1081 | diffmap[ctx.node()] = diff |
|
1082 | 1082 | newrevid = int(revision[b'object'][b'id']) |
|
1083 | 1083 | newrevphid = revision[b'object'][b'phid'] |
|
1084 | 1084 | if revid: |
|
1085 | 1085 | action = b'updated' |
|
1086 | 1086 | else: |
|
1087 | 1087 | action = b'created' |
|
1088 | 1088 | |
|
1089 | 1089 | # Create a local tag to note the association, if commit message |
|
1090 | 1090 | # does not have it already |
|
1091 | 1091 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1092 | 1092 | if not m or int(m.group(r'id')) != newrevid: |
|
1093 | 1093 | tagname = b'D%d' % newrevid |
|
1094 | 1094 | tags.tag( |
|
1095 | 1095 | repo, |
|
1096 | 1096 | tagname, |
|
1097 | 1097 | ctx.node(), |
|
1098 | 1098 | message=None, |
|
1099 | 1099 | user=None, |
|
1100 | 1100 | date=None, |
|
1101 | 1101 | local=True, |
|
1102 | 1102 | ) |
|
1103 | 1103 | else: |
|
1104 | 1104 | # Nothing changed. But still set "newrevphid" so the next revision |
|
1105 | 1105 | # could depend on this one and "newrevid" for the summary line. |
|
1106 | 1106 | newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid'] |
|
1107 | 1107 | newrevid = revid |
|
1108 | 1108 | action = b'skipped' |
|
1109 | 1109 | |
|
1110 | 1110 | actiondesc = ui.label( |
|
1111 | 1111 | { |
|
1112 | 1112 | b'created': _(b'created'), |
|
1113 | 1113 | b'skipped': _(b'skipped'), |
|
1114 | 1114 | b'updated': _(b'updated'), |
|
1115 | 1115 | }[action], |
|
1116 | 1116 | b'phabricator.action.%s' % action, |
|
1117 | 1117 | ) |
|
1118 | 1118 | drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') |
|
1119 | 1119 | nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
|
1120 | 1120 | desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
|
1121 | 1121 | ui.write( |
|
1122 | 1122 | _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc) |
|
1123 | 1123 | ) |
|
1124 | 1124 | drevids.append(newrevid) |
|
1125 | 1125 | lastrevphid = newrevphid |
|
1126 | 1126 | |
|
1127 | 1127 | # Update commit messages and remove tags |
|
1128 | 1128 | if opts.get(b'amend'): |
|
1129 | 1129 | unfi = repo.unfiltered() |
|
1130 | 1130 | drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) |
|
1131 | 1131 | with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
|
1132 | 1132 | wnode = unfi[b'.'].node() |
|
1133 | 1133 | mapping = {} # {oldnode: [newnode]} |
|
1134 | 1134 | for i, rev in enumerate(revs): |
|
1135 | 1135 | old = unfi[rev] |
|
1136 | 1136 | drevid = drevids[i] |
|
1137 | 1137 | drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
|
1138 | 1138 | newdesc = getdescfromdrev(drev) |
|
1139 | 1139 | # Make sure commit message contain "Differential Revision" |
|
1140 | 1140 | if old.description() != newdesc: |
|
1141 | 1141 | if old.phase() == phases.public: |
|
1142 | 1142 | ui.warn( |
|
1143 | 1143 | _(b"warning: not updating public commit %s\n") |
|
1144 | 1144 | % scmutil.formatchangeid(old) |
|
1145 | 1145 | ) |
|
1146 | 1146 | continue |
|
1147 | 1147 | parents = [ |
|
1148 | 1148 | mapping.get(old.p1().node(), (old.p1(),))[0], |
|
1149 | 1149 | mapping.get(old.p2().node(), (old.p2(),))[0], |
|
1150 | 1150 | ] |
|
1151 | 1151 | new = context.metadataonlyctx( |
|
1152 | 1152 | repo, |
|
1153 | 1153 | old, |
|
1154 | 1154 | parents=parents, |
|
1155 | 1155 | text=newdesc, |
|
1156 | 1156 | user=old.user(), |
|
1157 | 1157 | date=old.date(), |
|
1158 | 1158 | extra=old.extra(), |
|
1159 | 1159 | ) |
|
1160 | 1160 | |
|
1161 | 1161 | newnode = new.commit() |
|
1162 | 1162 | |
|
1163 | 1163 | mapping[old.node()] = [newnode] |
|
1164 | 1164 | # Update diff property |
|
1165 | 1165 | # If it fails just warn and keep going, otherwise the DREV |
|
1166 | 1166 | # associations will be lost |
|
1167 | 1167 | try: |
|
1168 | 1168 | writediffproperties(unfi[newnode], diffmap[old.node()]) |
|
1169 | 1169 | except util.urlerr.urlerror: |
|
1170 | 1170 | ui.warnnoi18n( |
|
1171 |
b'Failed to update metadata for D% |
|
|
1171 | b'Failed to update metadata for D%d\n' % drevid | |
|
1172 | 1172 | ) |
|
1173 | 1173 | # Remove local tags since it's no longer necessary |
|
1174 | 1174 | tagname = b'D%d' % drevid |
|
1175 | 1175 | if tagname in repo.tags(): |
|
1176 | 1176 | tags.tag( |
|
1177 | 1177 | repo, |
|
1178 | 1178 | tagname, |
|
1179 | 1179 | nullid, |
|
1180 | 1180 | message=None, |
|
1181 | 1181 | user=None, |
|
1182 | 1182 | date=None, |
|
1183 | 1183 | local=True, |
|
1184 | 1184 | ) |
|
1185 | 1185 | scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
|
1186 | 1186 | if wnode in mapping: |
|
1187 | 1187 | unfi.setparents(mapping[wnode][0]) |
|
1188 | 1188 | |
|
1189 | 1189 | |
|
1190 | 1190 | # Map from "hg:meta" keys to header understood by "hg import". The order is |
|
1191 | 1191 | # consistent with "hg export" output. |
|
1192 | 1192 | _metanamemap = util.sortdict( |
|
1193 | 1193 | [ |
|
1194 | 1194 | (b'user', b'User'), |
|
1195 | 1195 | (b'date', b'Date'), |
|
1196 | 1196 | (b'branch', b'Branch'), |
|
1197 | 1197 | (b'node', b'Node ID'), |
|
1198 | 1198 | (b'parent', b'Parent '), |
|
1199 | 1199 | ] |
|
1200 | 1200 | ) |
|
1201 | 1201 | |
|
1202 | 1202 | |
|
1203 | 1203 | def _confirmbeforesend(repo, revs, oldmap): |
|
1204 | 1204 | url, token = readurltoken(repo.ui) |
|
1205 | 1205 | ui = repo.ui |
|
1206 | 1206 | for rev in revs: |
|
1207 | 1207 | ctx = repo[rev] |
|
1208 | 1208 | desc = ctx.description().splitlines()[0] |
|
1209 | 1209 | oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
|
1210 | 1210 | if drevid: |
|
1211 |
drevdesc = ui.label(b'D% |
|
|
1211 | drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev') | |
|
1212 | 1212 | else: |
|
1213 | 1213 | drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
|
1214 | 1214 | |
|
1215 | 1215 | ui.write( |
|
1216 | 1216 | _(b'%s - %s: %s\n') |
|
1217 | 1217 | % ( |
|
1218 | 1218 | drevdesc, |
|
1219 | 1219 | ui.label(bytes(ctx), b'phabricator.node'), |
|
1220 | 1220 | ui.label(desc, b'phabricator.desc'), |
|
1221 | 1221 | ) |
|
1222 | 1222 | ) |
|
1223 | 1223 | |
|
1224 | 1224 | if ui.promptchoice( |
|
1225 | 1225 | _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url |
|
1226 | 1226 | ): |
|
1227 | 1227 | return False |
|
1228 | 1228 | |
|
1229 | 1229 | return True |
|
1230 | 1230 | |
|
1231 | 1231 | |
|
1232 | 1232 | _knownstatusnames = { |
|
1233 | 1233 | b'accepted', |
|
1234 | 1234 | b'needsreview', |
|
1235 | 1235 | b'needsrevision', |
|
1236 | 1236 | b'closed', |
|
1237 | 1237 | b'abandoned', |
|
1238 | 1238 | } |
|
1239 | 1239 | |
|
1240 | 1240 | |
|
1241 | 1241 | def _getstatusname(drev): |
|
1242 | 1242 | """get normalized status name from a Differential Revision""" |
|
1243 | 1243 | return drev[b'statusName'].replace(b' ', b'').lower() |
|
1244 | 1244 | |
|
1245 | 1245 | |
|
1246 | 1246 | # Small language to specify differential revisions. Support symbols: (), :X, |
|
1247 | 1247 | # +, and -. |
|
1248 | 1248 | |
|
1249 | 1249 | _elements = { |
|
1250 | 1250 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
1251 | 1251 | b'(': (12, None, (b'group', 1, b')'), None, None), |
|
1252 | 1252 | b':': (8, None, (b'ancestors', 8), None, None), |
|
1253 | 1253 | b'&': (5, None, None, (b'and_', 5), None), |
|
1254 | 1254 | b'+': (4, None, None, (b'add', 4), None), |
|
1255 | 1255 | b'-': (4, None, None, (b'sub', 4), None), |
|
1256 | 1256 | b')': (0, None, None, None, None), |
|
1257 | 1257 | b'symbol': (0, b'symbol', None, None, None), |
|
1258 | 1258 | b'end': (0, None, None, None, None), |
|
1259 | 1259 | } |
|
1260 | 1260 | |
|
1261 | 1261 | |
|
1262 | 1262 | def _tokenize(text): |
|
1263 | 1263 | view = memoryview(text) # zero-copy slice |
|
1264 | 1264 | special = b'():+-& ' |
|
1265 | 1265 | pos = 0 |
|
1266 | 1266 | length = len(text) |
|
1267 | 1267 | while pos < length: |
|
1268 | 1268 | symbol = b''.join( |
|
1269 | 1269 | itertools.takewhile( |
|
1270 | 1270 | lambda ch: ch not in special, pycompat.iterbytestr(view[pos:]) |
|
1271 | 1271 | ) |
|
1272 | 1272 | ) |
|
1273 | 1273 | if symbol: |
|
1274 | 1274 | yield (b'symbol', symbol, pos) |
|
1275 | 1275 | pos += len(symbol) |
|
1276 | 1276 | else: # special char, ignore space |
|
1277 | 1277 | if text[pos : pos + 1] != b' ': |
|
1278 | 1278 | yield (text[pos : pos + 1], None, pos) |
|
1279 | 1279 | pos += 1 |
|
1280 | 1280 | yield (b'end', None, pos) |
|
1281 | 1281 | |
|
1282 | 1282 | |
|
1283 | 1283 | def _parse(text): |
|
1284 | 1284 | tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
|
1285 | 1285 | if pos != len(text): |
|
1286 | 1286 | raise error.ParseError(b'invalid token', pos) |
|
1287 | 1287 | return tree |
|
1288 | 1288 | |
|
1289 | 1289 | |
|
1290 | 1290 | def _parsedrev(symbol): |
|
1291 | 1291 | """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
|
1292 | 1292 | if symbol.startswith(b'D') and symbol[1:].isdigit(): |
|
1293 | 1293 | return int(symbol[1:]) |
|
1294 | 1294 | if symbol.isdigit(): |
|
1295 | 1295 | return int(symbol) |
|
1296 | 1296 | |
|
1297 | 1297 | |
|
1298 | 1298 | def _prefetchdrevs(tree): |
|
1299 | 1299 | """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
|
1300 | 1300 | drevs = set() |
|
1301 | 1301 | ancestordrevs = set() |
|
1302 | 1302 | op = tree[0] |
|
1303 | 1303 | if op == b'symbol': |
|
1304 | 1304 | r = _parsedrev(tree[1]) |
|
1305 | 1305 | if r: |
|
1306 | 1306 | drevs.add(r) |
|
1307 | 1307 | elif op == b'ancestors': |
|
1308 | 1308 | r, a = _prefetchdrevs(tree[1]) |
|
1309 | 1309 | drevs.update(r) |
|
1310 | 1310 | ancestordrevs.update(r) |
|
1311 | 1311 | ancestordrevs.update(a) |
|
1312 | 1312 | else: |
|
1313 | 1313 | for t in tree[1:]: |
|
1314 | 1314 | r, a = _prefetchdrevs(t) |
|
1315 | 1315 | drevs.update(r) |
|
1316 | 1316 | ancestordrevs.update(a) |
|
1317 | 1317 | return drevs, ancestordrevs |
|
1318 | 1318 | |
|
1319 | 1319 | |
|
1320 | 1320 | def querydrev(repo, spec): |
|
1321 | 1321 | """return a list of "Differential Revision" dicts |
|
1322 | 1322 | |
|
1323 | 1323 | spec is a string using a simple query language, see docstring in phabread |
|
1324 | 1324 | for details. |
|
1325 | 1325 | |
|
1326 | 1326 | A "Differential Revision dict" looks like: |
|
1327 | 1327 | |
|
1328 | 1328 | { |
|
1329 | 1329 | "id": "2", |
|
1330 | 1330 | "phid": "PHID-DREV-672qvysjcczopag46qty", |
|
1331 | 1331 | "title": "example", |
|
1332 | 1332 | "uri": "https://phab.example.com/D2", |
|
1333 | 1333 | "dateCreated": "1499181406", |
|
1334 | 1334 | "dateModified": "1499182103", |
|
1335 | 1335 | "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye", |
|
1336 | 1336 | "status": "0", |
|
1337 | 1337 | "statusName": "Needs Review", |
|
1338 | 1338 | "properties": [], |
|
1339 | 1339 | "branch": null, |
|
1340 | 1340 | "summary": "", |
|
1341 | 1341 | "testPlan": "", |
|
1342 | 1342 | "lineCount": "2", |
|
1343 | 1343 | "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72", |
|
1344 | 1344 | "diffs": [ |
|
1345 | 1345 | "3", |
|
1346 | 1346 | "4", |
|
1347 | 1347 | ], |
|
1348 | 1348 | "commits": [], |
|
1349 | 1349 | "reviewers": [], |
|
1350 | 1350 | "ccs": [], |
|
1351 | 1351 | "hashes": [], |
|
1352 | 1352 | "auxiliary": { |
|
1353 | 1353 | "phabricator:projects": [], |
|
1354 | 1354 | "phabricator:depends-on": [ |
|
1355 | 1355 | "PHID-DREV-gbapp366kutjebt7agcd" |
|
1356 | 1356 | ] |
|
1357 | 1357 | }, |
|
1358 | 1358 | "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv", |
|
1359 | 1359 | "sourcePath": null |
|
1360 | 1360 | } |
|
1361 | 1361 | """ |
|
1362 | 1362 | |
|
1363 | 1363 | def fetch(params): |
|
1364 | 1364 | """params -> single drev or None""" |
|
1365 | 1365 | key = (params.get(b'ids') or params.get(b'phids') or [None])[0] |
|
1366 | 1366 | if key in prefetched: |
|
1367 | 1367 | return prefetched[key] |
|
1368 | 1368 | drevs = callconduit(repo.ui, b'differential.query', params) |
|
1369 | 1369 | # Fill prefetched with the result |
|
1370 | 1370 | for drev in drevs: |
|
1371 | 1371 | prefetched[drev[b'phid']] = drev |
|
1372 | 1372 | prefetched[int(drev[b'id'])] = drev |
|
1373 | 1373 | if key not in prefetched: |
|
1374 | 1374 | raise error.Abort( |
|
1375 | 1375 | _(b'cannot get Differential Revision %r') % params |
|
1376 | 1376 | ) |
|
1377 | 1377 | return prefetched[key] |
|
1378 | 1378 | |
|
1379 | 1379 | def getstack(topdrevids): |
|
1380 | 1380 | """given a top, get a stack from the bottom, [id] -> [id]""" |
|
1381 | 1381 | visited = set() |
|
1382 | 1382 | result = [] |
|
1383 | 1383 | queue = [{b'ids': [i]} for i in topdrevids] |
|
1384 | 1384 | while queue: |
|
1385 | 1385 | params = queue.pop() |
|
1386 | 1386 | drev = fetch(params) |
|
1387 | 1387 | if drev[b'id'] in visited: |
|
1388 | 1388 | continue |
|
1389 | 1389 | visited.add(drev[b'id']) |
|
1390 | 1390 | result.append(int(drev[b'id'])) |
|
1391 | 1391 | auxiliary = drev.get(b'auxiliary', {}) |
|
1392 | 1392 | depends = auxiliary.get(b'phabricator:depends-on', []) |
|
1393 | 1393 | for phid in depends: |
|
1394 | 1394 | queue.append({b'phids': [phid]}) |
|
1395 | 1395 | result.reverse() |
|
1396 | 1396 | return smartset.baseset(result) |
|
1397 | 1397 | |
|
1398 | 1398 | # Initialize prefetch cache |
|
1399 | 1399 | prefetched = {} # {id or phid: drev} |
|
1400 | 1400 | |
|
1401 | 1401 | tree = _parse(spec) |
|
1402 | 1402 | drevs, ancestordrevs = _prefetchdrevs(tree) |
|
1403 | 1403 | |
|
1404 | 1404 | # developer config: phabricator.batchsize |
|
1405 | 1405 | batchsize = repo.ui.configint(b'phabricator', b'batchsize') |
|
1406 | 1406 | |
|
1407 | 1407 | # Prefetch Differential Revisions in batch |
|
1408 | 1408 | tofetch = set(drevs) |
|
1409 | 1409 | for r in ancestordrevs: |
|
1410 | 1410 | tofetch.update(range(max(1, r - batchsize), r + 1)) |
|
1411 | 1411 | if drevs: |
|
1412 | 1412 | fetch({b'ids': list(tofetch)}) |
|
1413 | 1413 | validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) |
|
1414 | 1414 | |
|
1415 | 1415 | # Walk through the tree, return smartsets |
|
1416 | 1416 | def walk(tree): |
|
1417 | 1417 | op = tree[0] |
|
1418 | 1418 | if op == b'symbol': |
|
1419 | 1419 | drev = _parsedrev(tree[1]) |
|
1420 | 1420 | if drev: |
|
1421 | 1421 | return smartset.baseset([drev]) |
|
1422 | 1422 | elif tree[1] in _knownstatusnames: |
|
1423 | 1423 | drevs = [ |
|
1424 | 1424 | r |
|
1425 | 1425 | for r in validids |
|
1426 | 1426 | if _getstatusname(prefetched[r]) == tree[1] |
|
1427 | 1427 | ] |
|
1428 | 1428 | return smartset.baseset(drevs) |
|
1429 | 1429 | else: |
|
1430 | 1430 | raise error.Abort(_(b'unknown symbol: %s') % tree[1]) |
|
1431 | 1431 | elif op in {b'and_', b'add', b'sub'}: |
|
1432 | 1432 | assert len(tree) == 3 |
|
1433 | 1433 | return getattr(operator, op)(walk(tree[1]), walk(tree[2])) |
|
1434 | 1434 | elif op == b'group': |
|
1435 | 1435 | return walk(tree[1]) |
|
1436 | 1436 | elif op == b'ancestors': |
|
1437 | 1437 | return getstack(walk(tree[1])) |
|
1438 | 1438 | else: |
|
1439 | 1439 | raise error.ProgrammingError(b'illegal tree: %r' % tree) |
|
1440 | 1440 | |
|
1441 | 1441 | return [prefetched[r] for r in walk(tree)] |
|
1442 | 1442 | |
|
1443 | 1443 | |
|
1444 | 1444 | def getdescfromdrev(drev): |
|
1445 | 1445 | """get description (commit message) from "Differential Revision" |
|
1446 | 1446 | |
|
1447 | 1447 | This is similar to differential.getcommitmessage API. But we only care |
|
1448 | 1448 | about limited fields: title, summary, test plan, and URL. |
|
1449 | 1449 | """ |
|
1450 | 1450 | title = drev[b'title'] |
|
1451 | 1451 | summary = drev[b'summary'].rstrip() |
|
1452 | 1452 | testplan = drev[b'testPlan'].rstrip() |
|
1453 | 1453 | if testplan: |
|
1454 | 1454 | testplan = b'Test Plan:\n%s' % testplan |
|
1455 | 1455 | uri = b'Differential Revision: %s' % drev[b'uri'] |
|
1456 | 1456 | return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) |
|
1457 | 1457 | |
|
1458 | 1458 | |
|
1459 | 1459 | def getdiffmeta(diff): |
|
1460 | 1460 | """get commit metadata (date, node, user, p1) from a diff object |
|
1461 | 1461 | |
|
1462 | 1462 | The metadata could be "hg:meta", sent by phabsend, like: |
|
1463 | 1463 | |
|
1464 | 1464 | "properties": { |
|
1465 | 1465 | "hg:meta": { |
|
1466 | 1466 | "date": "1499571514 25200", |
|
1467 | 1467 | "node": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1468 | 1468 | "user": "Foo Bar <foo@example.com>", |
|
1469 | 1469 | "parent": "6d0abad76b30e4724a37ab8721d630394070fe16" |
|
1470 | 1470 | } |
|
1471 | 1471 | } |
|
1472 | 1472 | |
|
1473 | 1473 | Or converted from "local:commits", sent by "arc", like: |
|
1474 | 1474 | |
|
1475 | 1475 | "properties": { |
|
1476 | 1476 | "local:commits": { |
|
1477 | 1477 | "98c08acae292b2faf60a279b4189beb6cff1414d": { |
|
1478 | 1478 | "author": "Foo Bar", |
|
1479 | 1479 | "time": 1499546314, |
|
1480 | 1480 | "branch": "default", |
|
1481 | 1481 | "tag": "", |
|
1482 | 1482 | "commit": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1483 | 1483 | "rev": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1484 | 1484 | "local": "1000", |
|
1485 | 1485 | "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"], |
|
1486 | 1486 | "summary": "...", |
|
1487 | 1487 | "message": "...", |
|
1488 | 1488 | "authorEmail": "foo@example.com" |
|
1489 | 1489 | } |
|
1490 | 1490 | } |
|
1491 | 1491 | } |
|
1492 | 1492 | |
|
1493 | 1493 | Note: metadata extracted from "local:commits" will lose time zone |
|
1494 | 1494 | information. |
|
1495 | 1495 | """ |
|
1496 | 1496 | props = diff.get(b'properties') or {} |
|
1497 | 1497 | meta = props.get(b'hg:meta') |
|
1498 | 1498 | if not meta: |
|
1499 | 1499 | if props.get(b'local:commits'): |
|
1500 | 1500 | commit = sorted(props[b'local:commits'].values())[0] |
|
1501 | 1501 | meta = {} |
|
1502 | 1502 | if b'author' in commit and b'authorEmail' in commit: |
|
1503 | 1503 | meta[b'user'] = b'%s <%s>' % ( |
|
1504 | 1504 | commit[b'author'], |
|
1505 | 1505 | commit[b'authorEmail'], |
|
1506 | 1506 | ) |
|
1507 | 1507 | if b'time' in commit: |
|
1508 | 1508 | meta[b'date'] = b'%d 0' % int(commit[b'time']) |
|
1509 | 1509 | if b'branch' in commit: |
|
1510 | 1510 | meta[b'branch'] = commit[b'branch'] |
|
1511 | 1511 | node = commit.get(b'commit', commit.get(b'rev')) |
|
1512 | 1512 | if node: |
|
1513 | 1513 | meta[b'node'] = node |
|
1514 | 1514 | if len(commit.get(b'parents', ())) >= 1: |
|
1515 | 1515 | meta[b'parent'] = commit[b'parents'][0] |
|
1516 | 1516 | else: |
|
1517 | 1517 | meta = {} |
|
1518 | 1518 | if b'date' not in meta and b'dateCreated' in diff: |
|
1519 | 1519 | meta[b'date'] = b'%s 0' % diff[b'dateCreated'] |
|
1520 | 1520 | if b'branch' not in meta and diff.get(b'branch'): |
|
1521 | 1521 | meta[b'branch'] = diff[b'branch'] |
|
1522 | 1522 | if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'): |
|
1523 | 1523 | meta[b'parent'] = diff[b'sourceControlBaseRevision'] |
|
1524 | 1524 | return meta |
|
1525 | 1525 | |
|
1526 | 1526 | |
|
1527 | 1527 | def readpatch(repo, drevs, write): |
|
1528 | 1528 | """generate plain-text patch readable by 'hg import' |
|
1529 | 1529 | |
|
1530 | 1530 | write is usually ui.write. drevs is what "querydrev" returns, results of |
|
1531 | 1531 | "differential.query". |
|
1532 | 1532 | """ |
|
1533 | 1533 | # Prefetch hg:meta property for all diffs |
|
1534 | 1534 | diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs)) |
|
1535 | 1535 | diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids}) |
|
1536 | 1536 | |
|
1537 | 1537 | # Generate patch for each drev |
|
1538 | 1538 | for drev in drevs: |
|
1539 | 1539 | repo.ui.note(_(b'reading D%s\n') % drev[b'id']) |
|
1540 | 1540 | |
|
1541 | 1541 | diffid = max(int(v) for v in drev[b'diffs']) |
|
1542 | 1542 | body = callconduit( |
|
1543 | 1543 | repo.ui, b'differential.getrawdiff', {b'diffID': diffid} |
|
1544 | 1544 | ) |
|
1545 | 1545 | desc = getdescfromdrev(drev) |
|
1546 | 1546 | header = b'# HG changeset patch\n' |
|
1547 | 1547 | |
|
1548 | 1548 | # Try to preserve metadata from hg:meta property. Write hg patch |
|
1549 | 1549 | # headers that can be read by the "import" command. See patchheadermap |
|
1550 | 1550 | # and extract in mercurial/patch.py for supported headers. |
|
1551 | 1551 | meta = getdiffmeta(diffs[b'%d' % diffid]) |
|
1552 | 1552 | for k in _metanamemap.keys(): |
|
1553 | 1553 | if k in meta: |
|
1554 | 1554 | header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
|
1555 | 1555 | |
|
1556 | 1556 | content = b'%s%s\n%s' % (header, desc, body) |
|
1557 | 1557 | write(content) |
|
1558 | 1558 | |
|
1559 | 1559 | |
|
1560 | 1560 | @vcrcommand( |
|
1561 | 1561 | b'phabread', |
|
1562 | 1562 | [(b'', b'stack', False, _(b'read dependencies'))], |
|
1563 | 1563 | _(b'DREVSPEC [OPTIONS]'), |
|
1564 | 1564 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1565 | 1565 | ) |
|
1566 | 1566 | def phabread(ui, repo, spec, **opts): |
|
1567 | 1567 | """print patches from Phabricator suitable for importing |
|
1568 | 1568 | |
|
1569 | 1569 | DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
|
1570 | 1570 | the number ``123``. It could also have common operators like ``+``, ``-``, |
|
1571 | 1571 | ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to |
|
1572 | 1572 | select a stack. |
|
1573 | 1573 | |
|
1574 | 1574 | ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision`` |
|
1575 | 1575 | could be used to filter patches by status. For performance reason, they |
|
1576 | 1576 | only represent a subset of non-status selections and cannot be used alone. |
|
1577 | 1577 | |
|
1578 | 1578 | For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude |
|
1579 | 1579 | D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a |
|
1580 | 1580 | stack up to D9. |
|
1581 | 1581 | |
|
1582 | 1582 | If --stack is given, follow dependencies information and read all patches. |
|
1583 | 1583 | It is equivalent to the ``:`` operator. |
|
1584 | 1584 | """ |
|
1585 | 1585 | opts = pycompat.byteskwargs(opts) |
|
1586 | 1586 | if opts.get(b'stack'): |
|
1587 | 1587 | spec = b':(%s)' % spec |
|
1588 | 1588 | drevs = querydrev(repo, spec) |
|
1589 | 1589 | readpatch(repo, drevs, ui.write) |
|
1590 | 1590 | |
|
1591 | 1591 | |
|
1592 | 1592 | @vcrcommand( |
|
1593 | 1593 | b'phabupdate', |
|
1594 | 1594 | [ |
|
1595 | 1595 | (b'', b'accept', False, _(b'accept revisions')), |
|
1596 | 1596 | (b'', b'reject', False, _(b'reject revisions')), |
|
1597 | 1597 | (b'', b'abandon', False, _(b'abandon revisions')), |
|
1598 | 1598 | (b'', b'reclaim', False, _(b'reclaim revisions')), |
|
1599 | 1599 | (b'm', b'comment', b'', _(b'comment on the last revision')), |
|
1600 | 1600 | ], |
|
1601 | 1601 | _(b'DREVSPEC [OPTIONS]'), |
|
1602 | 1602 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1603 | 1603 | ) |
|
1604 | 1604 | def phabupdate(ui, repo, spec, **opts): |
|
1605 | 1605 | """update Differential Revision in batch |
|
1606 | 1606 | |
|
1607 | 1607 | DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
|
1608 | 1608 | """ |
|
1609 | 1609 | opts = pycompat.byteskwargs(opts) |
|
1610 | 1610 | flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] |
|
1611 | 1611 | if len(flags) > 1: |
|
1612 | 1612 | raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) |
|
1613 | 1613 | |
|
1614 | 1614 | actions = [] |
|
1615 | 1615 | for f in flags: |
|
1616 | 1616 | actions.append({b'type': f, b'value': b'true'}) |
|
1617 | 1617 | |
|
1618 | 1618 | drevs = querydrev(repo, spec) |
|
1619 | 1619 | for i, drev in enumerate(drevs): |
|
1620 | 1620 | if i + 1 == len(drevs) and opts.get(b'comment'): |
|
1621 | 1621 | actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
|
1622 | 1622 | if actions: |
|
1623 | 1623 | params = { |
|
1624 | 1624 | b'objectIdentifier': drev[b'phid'], |
|
1625 | 1625 | b'transactions': actions, |
|
1626 | 1626 | } |
|
1627 | 1627 | callconduit(ui, b'differential.revision.edit', params) |
|
1628 | 1628 | |
|
1629 | 1629 | |
|
1630 | 1630 | @eh.templatekeyword(b'phabreview', requires={b'ctx'}) |
|
1631 | 1631 | def template_review(context, mapping): |
|
1632 | 1632 | """:phabreview: Object describing the review for this changeset. |
|
1633 | 1633 | Has attributes `url` and `id`. |
|
1634 | 1634 | """ |
|
1635 | 1635 | ctx = context.resource(mapping, b'ctx') |
|
1636 | 1636 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1637 | 1637 | if m: |
|
1638 | 1638 | return templateutil.hybriddict( |
|
1639 | 1639 | {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),} |
|
1640 | 1640 | ) |
|
1641 | 1641 | else: |
|
1642 | 1642 | tags = ctx.repo().nodetags(ctx.node()) |
|
1643 | 1643 | for t in tags: |
|
1644 | 1644 | if _differentialrevisiontagre.match(t): |
|
1645 | 1645 | url = ctx.repo().ui.config(b'phabricator', b'url') |
|
1646 | 1646 | if not url.endswith(b'/'): |
|
1647 | 1647 | url += b'/' |
|
1648 | 1648 | url += t |
|
1649 | 1649 | |
|
1650 | 1650 | return templateutil.hybriddict({b'url': url, b'id': t,}) |
|
1651 | 1651 | return None |
General Comments 0
You need to be logged in to leave comments.
Login now