##// END OF EJS Templates
procutil: bulk-replace function calls to point to new module
Yuya Nishihara -
r37138:a8a902d7 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,35 +1,37
1 1 #!/usr/bin/env python
2 2 # Dump revlogs as raw data stream
3 3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
4 4
5 5 from __future__ import absolute_import, print_function
6 6
7 7 import sys
8 8 from mercurial import (
9 9 node,
10 10 revlog,
11 util,
11 )
12 from mercurial.utils import (
13 procutil,
12 14 )
13 15
14 16 for fp in (sys.stdin, sys.stdout, sys.stderr):
15 util.setbinary(fp)
17 procutil.setbinary(fp)
16 18
17 19 def binopen(path, mode='rb'):
18 20 if 'b' not in mode:
19 21 mode = mode + 'b'
20 22 return open(path, mode)
21 23
22 24 for f in sys.argv[1:]:
23 25 r = revlog.revlog(binopen, f)
24 26 print("file:", f)
25 27 for i in r:
26 28 n = r.node(i)
27 29 p = r.parents(n)
28 30 d = r.revision(n)
29 31 print("node:", node.hex(n))
30 32 print("linkrev:", r.linkrev(i))
31 33 print("parents:", node.hex(p[0]), node.hex(p[1]))
32 34 print("length:", len(d))
33 35 print("-start-")
34 36 print(d)
35 37 print("-end-")
@@ -1,924 +1,928
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # API token. Get it from https://$HOST/conduit/login/
25 25 # Deprecated: see [phabricator.auth] below
26 26 #token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [phabricator.auth]
39 39 example.url = https://phab.example.com/
40 40 # API token. Get it from https://$HOST/conduit/login/
41 41 example.token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
42 42 """
43 43
44 44 from __future__ import absolute_import
45 45
46 46 import itertools
47 47 import json
48 48 import operator
49 49 import re
50 50
51 51 from mercurial.node import bin, nullid
52 52 from mercurial.i18n import _
53 53 from mercurial import (
54 54 cmdutil,
55 55 context,
56 56 encoding,
57 57 error,
58 58 mdiff,
59 59 obsutil,
60 60 parser,
61 61 patch,
62 62 registrar,
63 63 scmutil,
64 64 smartset,
65 65 tags,
66 66 url as urlmod,
67 67 util,
68 68 )
69 from mercurial.utils import (
70 procutil,
71 )
69 72
70 73 cmdtable = {}
71 74 command = registrar.command(cmdtable)
72 75
73 76 colortable = {
74 77 'phabricator.action.created': 'green',
75 78 'phabricator.action.skipped': 'magenta',
76 79 'phabricator.action.updated': 'magenta',
77 80 'phabricator.desc': '',
78 81 'phabricator.drev': 'bold',
79 82 'phabricator.node': '',
80 83 }
81 84
82 85 def urlencodenested(params):
83 86 """like urlencode, but works with nested parameters.
84 87
85 88 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
86 89 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
87 90 urlencode. Note: the encoding is consistent with PHP's http_build_query.
88 91 """
89 92 flatparams = util.sortdict()
90 93 def process(prefix, obj):
91 94 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
92 95 if items is None:
93 96 flatparams[prefix] = obj
94 97 else:
95 98 for k, v in items(obj):
96 99 if prefix:
97 100 process('%s[%s]' % (prefix, k), v)
98 101 else:
99 102 process(k, v)
100 103 process('', params)
101 104 return util.urlreq.urlencode(flatparams)
102 105
103 106 printed_token_warning = False
104 107
105 108 def readlegacytoken(repo):
106 109 """Transitional support for old phabricator tokens.
107 110
108 111 Remove before the 4.6 release.
109 112 """
110 113 global printed_token_warning
111 114 token = repo.ui.config('phabricator', 'token')
112 115 if token and not printed_token_warning:
113 116 printed_token_warning = True
114 117 repo.ui.warn(_('phabricator.token is deprecated - please '
115 118 'migrate to the phabricator.auth section.\n'))
116 119 return token
117 120
118 121 def readurltoken(repo):
119 122 """return conduit url, token and make sure they exist
120 123
121 124 Currently read from [phabricator] config section. In the future, it might
122 125 make sense to read from .arcconfig and .arcrc as well.
123 126 """
124 127 url = repo.ui.config('phabricator', 'url')
125 128 if not url:
126 129 raise error.Abort(_('config %s.%s is required')
127 130 % ('phabricator', 'url'))
128 131
129 132 groups = {}
130 133 for key, val in repo.ui.configitems('phabricator.auth'):
131 134 if '.' not in key:
132 135 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
133 136 % key)
134 137 continue
135 138 group, setting = key.rsplit('.', 1)
136 139 groups.setdefault(group, {})[setting] = val
137 140
138 141 token = None
139 142 for group, auth in groups.iteritems():
140 143 if url != auth.get('url'):
141 144 continue
142 145 token = auth.get('token')
143 146 if token:
144 147 break
145 148
146 149 if not token:
147 150 token = readlegacytoken(repo)
148 151 if not token:
149 152 raise error.Abort(_('Can\'t find conduit token associated to %s')
150 153 % (url,))
151 154
152 155 return url, token
153 156
154 157 def callconduit(repo, name, params):
155 158 """call Conduit API, params is a dict. return json.loads result, or None"""
156 159 host, token = readurltoken(repo)
157 160 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
158 161 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
159 162 params = params.copy()
160 163 params['api.token'] = token
161 164 data = urlencodenested(params)
162 165 curlcmd = repo.ui.config('phabricator', 'curlcmd')
163 166 if curlcmd:
164 sin, sout = util.popen2('%s -d @- %s' % (curlcmd, util.shellquote(url)))
167 sin, sout = procutil.popen2('%s -d @- %s'
168 % (curlcmd, procutil.shellquote(url)))
165 169 sin.write(data)
166 170 sin.close()
167 171 body = sout.read()
168 172 else:
169 173 urlopener = urlmod.opener(repo.ui, authinfo)
170 174 request = util.urlreq.request(url, data=data)
171 175 body = urlopener.open(request).read()
172 176 repo.ui.debug('Conduit Response: %s\n' % body)
173 177 parsed = json.loads(body)
174 178 if parsed.get(r'error_code'):
175 179 msg = (_('Conduit Error (%s): %s')
176 180 % (parsed[r'error_code'], parsed[r'error_info']))
177 181 raise error.Abort(msg)
178 182 return parsed[r'result']
179 183
180 184 @command('debugcallconduit', [], _('METHOD'))
181 185 def debugcallconduit(ui, repo, name):
182 186 """call Conduit API
183 187
184 188 Call parameters are read from stdin as a JSON blob. Result will be written
185 189 to stdout as a JSON blob.
186 190 """
187 191 params = json.loads(ui.fin.read())
188 192 result = callconduit(repo, name, params)
189 193 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
190 194 ui.write('%s\n' % s)
191 195
192 196 def getrepophid(repo):
193 197 """given callsign, return repository PHID or None"""
194 198 # developer config: phabricator.repophid
195 199 repophid = repo.ui.config('phabricator', 'repophid')
196 200 if repophid:
197 201 return repophid
198 202 callsign = repo.ui.config('phabricator', 'callsign')
199 203 if not callsign:
200 204 return None
201 205 query = callconduit(repo, 'diffusion.repository.search',
202 206 {'constraints': {'callsigns': [callsign]}})
203 207 if len(query[r'data']) == 0:
204 208 return None
205 209 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
206 210 repo.ui.setconfig('phabricator', 'repophid', repophid)
207 211 return repophid
208 212
209 213 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
210 214 _differentialrevisiondescre = re.compile(
211 215 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
212 216
213 217 def getoldnodedrevmap(repo, nodelist):
214 218 """find previous nodes that has been sent to Phabricator
215 219
216 220 return {node: (oldnode, Differential diff, Differential Revision ID)}
217 221 for node in nodelist with known previous sent versions, or associated
218 222 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
219 223 be ``None``.
220 224
221 225 Examines commit messages like "Differential Revision:" to get the
222 226 association information.
223 227
224 228 If such commit message line is not found, examines all precursors and their
225 229 tags. Tags with format like "D1234" are considered a match and the node
226 230 with that tag, and the number after "D" (ex. 1234) will be returned.
227 231
228 232 The ``old node``, if not None, is guaranteed to be the last diff of
229 233 corresponding Differential Revision, and exist in the repo.
230 234 """
231 235 url, token = readurltoken(repo)
232 236 unfi = repo.unfiltered()
233 237 nodemap = unfi.changelog.nodemap
234 238
235 239 result = {} # {node: (oldnode?, lastdiff?, drev)}
236 240 toconfirm = {} # {node: (force, {precnode}, drev)}
237 241 for node in nodelist:
238 242 ctx = unfi[node]
239 243 # For tags like "D123", put them into "toconfirm" to verify later
240 244 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
241 245 for n in precnodes:
242 246 if n in nodemap:
243 247 for tag in unfi.nodetags(n):
244 248 m = _differentialrevisiontagre.match(tag)
245 249 if m:
246 250 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
247 251 continue
248 252
249 253 # Check commit message
250 254 m = _differentialrevisiondescre.search(ctx.description())
251 255 if m:
252 256 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
253 257
254 258 # Double check if tags are genuine by collecting all old nodes from
255 259 # Phabricator, and expect precursors overlap with it.
256 260 if toconfirm:
257 261 drevs = [drev for force, precs, drev in toconfirm.values()]
258 262 alldiffs = callconduit(unfi, 'differential.querydiffs',
259 263 {'revisionIDs': drevs})
260 264 getnode = lambda d: bin(encoding.unitolocal(
261 265 getdiffmeta(d).get(r'node', ''))) or None
262 266 for newnode, (force, precset, drev) in toconfirm.items():
263 267 diffs = [d for d in alldiffs.values()
264 268 if int(d[r'revisionID']) == drev]
265 269
266 270 # "precursors" as known by Phabricator
267 271 phprecset = set(getnode(d) for d in diffs)
268 272
269 273 # Ignore if precursors (Phabricator and local repo) do not overlap,
270 274 # and force is not set (when commit message says nothing)
271 275 if not force and not bool(phprecset & precset):
272 276 tagname = 'D%d' % drev
273 277 tags.tag(repo, tagname, nullid, message=None, user=None,
274 278 date=None, local=True)
275 279 unfi.ui.warn(_('D%s: local tag removed - does not match '
276 280 'Differential history\n') % drev)
277 281 continue
278 282
279 283 # Find the last node using Phabricator metadata, and make sure it
280 284 # exists in the repo
281 285 oldnode = lastdiff = None
282 286 if diffs:
283 287 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
284 288 oldnode = getnode(lastdiff)
285 289 if oldnode and oldnode not in nodemap:
286 290 oldnode = None
287 291
288 292 result[newnode] = (oldnode, lastdiff, drev)
289 293
290 294 return result
291 295
292 296 def getdiff(ctx, diffopts):
293 297 """plain-text diff without header (user, commit message, etc)"""
294 298 output = util.stringio()
295 299 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
296 300 None, opts=diffopts):
297 301 output.write(chunk)
298 302 return output.getvalue()
299 303
300 304 def creatediff(ctx):
301 305 """create a Differential Diff"""
302 306 repo = ctx.repo()
303 307 repophid = getrepophid(repo)
304 308 # Create a "Differential Diff" via "differential.createrawdiff" API
305 309 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
306 310 if repophid:
307 311 params['repositoryPHID'] = repophid
308 312 diff = callconduit(repo, 'differential.createrawdiff', params)
309 313 if not diff:
310 314 raise error.Abort(_('cannot create diff for %s') % ctx)
311 315 return diff
312 316
313 317 def writediffproperties(ctx, diff):
314 318 """write metadata to diff so patches could be applied losslessly"""
315 319 params = {
316 320 'diff_id': diff[r'id'],
317 321 'name': 'hg:meta',
318 322 'data': json.dumps({
319 323 'user': ctx.user(),
320 324 'date': '%d %d' % ctx.date(),
321 325 'node': ctx.hex(),
322 326 'parent': ctx.p1().hex(),
323 327 }),
324 328 }
325 329 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
326 330
327 331 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
328 332 olddiff=None, actions=None):
329 333 """create or update a Differential Revision
330 334
331 335 If revid is None, create a new Differential Revision, otherwise update
332 336 revid. If parentrevid is not None, set it as a dependency.
333 337
334 338 If oldnode is not None, check if the patch content (without commit message
335 339 and metadata) has changed before creating another diff.
336 340
337 341 If actions is not None, they will be appended to the transaction.
338 342 """
339 343 repo = ctx.repo()
340 344 if oldnode:
341 345 diffopts = mdiff.diffopts(git=True, context=32767)
342 346 oldctx = repo.unfiltered()[oldnode]
343 347 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
344 348 else:
345 349 neednewdiff = True
346 350
347 351 transactions = []
348 352 if neednewdiff:
349 353 diff = creatediff(ctx)
350 354 transactions.append({'type': 'update', 'value': diff[r'phid']})
351 355 else:
352 356 # Even if we don't need to upload a new diff because the patch content
353 357 # does not change. We might still need to update its metadata so
354 358 # pushers could know the correct node metadata.
355 359 assert olddiff
356 360 diff = olddiff
357 361 writediffproperties(ctx, diff)
358 362
359 363 # Use a temporary summary to set dependency. There might be better ways but
360 364 # I cannot find them for now. But do not do that if we are updating an
361 365 # existing revision (revid is not None) since that introduces visible
362 366 # churns (someone edited "Summary" twice) on the web page.
363 367 if parentrevid and revid is None:
364 368 summary = 'Depends on D%s' % parentrevid
365 369 transactions += [{'type': 'summary', 'value': summary},
366 370 {'type': 'summary', 'value': ' '}]
367 371
368 372 if actions:
369 373 transactions += actions
370 374
371 375 # Parse commit message and update related fields.
372 376 desc = ctx.description()
373 377 info = callconduit(repo, 'differential.parsecommitmessage',
374 378 {'corpus': desc})
375 379 for k, v in info[r'fields'].items():
376 380 if k in ['title', 'summary', 'testPlan']:
377 381 transactions.append({'type': k, 'value': v})
378 382
379 383 params = {'transactions': transactions}
380 384 if revid is not None:
381 385 # Update an existing Differential Revision
382 386 params['objectIdentifier'] = revid
383 387
384 388 revision = callconduit(repo, 'differential.revision.edit', params)
385 389 if not revision:
386 390 raise error.Abort(_('cannot create revision for %s') % ctx)
387 391
388 392 return revision, diff
389 393
390 394 def userphids(repo, names):
391 395 """convert user names to PHIDs"""
392 396 query = {'constraints': {'usernames': names}}
393 397 result = callconduit(repo, 'user.search', query)
394 398 # username not found is not an error of the API. So check if we have missed
395 399 # some names here.
396 400 data = result[r'data']
397 401 resolved = set(entry[r'fields'][r'username'] for entry in data)
398 402 unresolved = set(names) - resolved
399 403 if unresolved:
400 404 raise error.Abort(_('unknown username: %s')
401 405 % ' '.join(sorted(unresolved)))
402 406 return [entry[r'phid'] for entry in data]
403 407
404 408 @command('phabsend',
405 409 [('r', 'rev', [], _('revisions to send'), _('REV')),
406 410 ('', 'amend', True, _('update commit messages')),
407 411 ('', 'reviewer', [], _('specify reviewers')),
408 412 ('', 'confirm', None, _('ask for confirmation before sending'))],
409 413 _('REV [OPTIONS]'))
410 414 def phabsend(ui, repo, *revs, **opts):
411 415 """upload changesets to Phabricator
412 416
413 417 If there are multiple revisions specified, they will be send as a stack
414 418 with a linear dependencies relationship using the order specified by the
415 419 revset.
416 420
417 421 For the first time uploading changesets, local tags will be created to
418 422 maintain the association. After the first time, phabsend will check
419 423 obsstore and tags information so it can figure out whether to update an
420 424 existing Differential Revision, or create a new one.
421 425
422 426 If --amend is set, update commit messages so they have the
423 427 ``Differential Revision`` URL, remove related tags. This is similar to what
424 428 arcanist will do, and is more desired in author-push workflows. Otherwise,
425 429 use local tags to record the ``Differential Revision`` association.
426 430
427 431 The --confirm option lets you confirm changesets before sending them. You
428 432 can also add following to your configuration file to make it default
429 433 behaviour::
430 434
431 435 [phabsend]
432 436 confirm = true
433 437
434 438 phabsend will check obsstore and the above association to decide whether to
435 439 update an existing Differential Revision, or create a new one.
436 440 """
437 441 revs = list(revs) + opts.get('rev', [])
438 442 revs = scmutil.revrange(repo, revs)
439 443
440 444 if not revs:
441 445 raise error.Abort(_('phabsend requires at least one changeset'))
442 446 if opts.get('amend'):
443 447 cmdutil.checkunfinished(repo)
444 448
445 449 # {newnode: (oldnode, olddiff, olddrev}
446 450 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
447 451
448 452 confirm = ui.configbool('phabsend', 'confirm')
449 453 confirm |= bool(opts.get('confirm'))
450 454 if confirm:
451 455 confirmed = _confirmbeforesend(repo, revs, oldmap)
452 456 if not confirmed:
453 457 raise error.Abort(_('phabsend cancelled'))
454 458
455 459 actions = []
456 460 reviewers = opts.get('reviewer', [])
457 461 if reviewers:
458 462 phids = userphids(repo, reviewers)
459 463 actions.append({'type': 'reviewers.add', 'value': phids})
460 464
461 465 drevids = [] # [int]
462 466 diffmap = {} # {newnode: diff}
463 467
464 468 # Send patches one by one so we know their Differential Revision IDs and
465 469 # can provide dependency relationship
466 470 lastrevid = None
467 471 for rev in revs:
468 472 ui.debug('sending rev %d\n' % rev)
469 473 ctx = repo[rev]
470 474
471 475 # Get Differential Revision ID
472 476 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
473 477 if oldnode != ctx.node() or opts.get('amend'):
474 478 # Create or update Differential Revision
475 479 revision, diff = createdifferentialrevision(
476 480 ctx, revid, lastrevid, oldnode, olddiff, actions)
477 481 diffmap[ctx.node()] = diff
478 482 newrevid = int(revision[r'object'][r'id'])
479 483 if revid:
480 484 action = 'updated'
481 485 else:
482 486 action = 'created'
483 487
484 488 # Create a local tag to note the association, if commit message
485 489 # does not have it already
486 490 m = _differentialrevisiondescre.search(ctx.description())
487 491 if not m or int(m.group('id')) != newrevid:
488 492 tagname = 'D%d' % newrevid
489 493 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
490 494 date=None, local=True)
491 495 else:
492 496 # Nothing changed. But still set "newrevid" so the next revision
493 497 # could depend on this one.
494 498 newrevid = revid
495 499 action = 'skipped'
496 500
497 501 actiondesc = ui.label(
498 502 {'created': _('created'),
499 503 'skipped': _('skipped'),
500 504 'updated': _('updated')}[action],
501 505 'phabricator.action.%s' % action)
502 506 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
503 507 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
504 508 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
505 509 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
506 510 desc))
507 511 drevids.append(newrevid)
508 512 lastrevid = newrevid
509 513
510 514 # Update commit messages and remove tags
511 515 if opts.get('amend'):
512 516 unfi = repo.unfiltered()
513 517 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
514 518 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
515 519 wnode = unfi['.'].node()
516 520 mapping = {} # {oldnode: [newnode]}
517 521 for i, rev in enumerate(revs):
518 522 old = unfi[rev]
519 523 drevid = drevids[i]
520 524 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
521 525 newdesc = getdescfromdrev(drev)
522 526 # Make sure commit message contain "Differential Revision"
523 527 if old.description() != newdesc:
524 528 parents = [
525 529 mapping.get(old.p1().node(), (old.p1(),))[0],
526 530 mapping.get(old.p2().node(), (old.p2(),))[0],
527 531 ]
528 532 new = context.metadataonlyctx(
529 533 repo, old, parents=parents, text=newdesc,
530 534 user=old.user(), date=old.date(), extra=old.extra())
531 535 newnode = new.commit()
532 536 mapping[old.node()] = [newnode]
533 537 # Update diff property
534 538 writediffproperties(unfi[newnode], diffmap[old.node()])
535 539 # Remove local tags since it's no longer necessary
536 540 tagname = 'D%d' % drevid
537 541 if tagname in repo.tags():
538 542 tags.tag(repo, tagname, nullid, message=None, user=None,
539 543 date=None, local=True)
540 544 scmutil.cleanupnodes(repo, mapping, 'phabsend')
541 545 if wnode in mapping:
542 546 unfi.setparents(mapping[wnode][0])
543 547
544 548 # Map from "hg:meta" keys to header understood by "hg import". The order is
545 549 # consistent with "hg export" output.
546 550 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
547 551 (r'node', 'Node ID'), (r'parent', 'Parent ')])
548 552
549 553 def _confirmbeforesend(repo, revs, oldmap):
550 554 url, token = readurltoken(repo)
551 555 ui = repo.ui
552 556 for rev in revs:
553 557 ctx = repo[rev]
554 558 desc = ctx.description().splitlines()[0]
555 559 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
556 560 if drevid:
557 561 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
558 562 else:
559 563 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
560 564
561 565 ui.write(_('%s - %s: %s\n') % (drevdesc,
562 566 ui.label(bytes(ctx), 'phabricator.node'),
563 567 ui.label(desc, 'phabricator.desc')))
564 568
565 569 if ui.promptchoice(_('Send the above changes to %s (yn)?'
566 570 '$$ &Yes $$ &No') % url):
567 571 return False
568 572
569 573 return True
570 574
571 575 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
572 576 'abandoned'}
573 577
574 578 def _getstatusname(drev):
575 579 """get normalized status name from a Differential Revision"""
576 580 return drev[r'statusName'].replace(' ', '').lower()
577 581
578 582 # Small language to specify differential revisions. Support symbols: (), :X,
579 583 # +, and -.
580 584
581 585 _elements = {
582 586 # token-type: binding-strength, primary, prefix, infix, suffix
583 587 '(': (12, None, ('group', 1, ')'), None, None),
584 588 ':': (8, None, ('ancestors', 8), None, None),
585 589 '&': (5, None, None, ('and_', 5), None),
586 590 '+': (4, None, None, ('add', 4), None),
587 591 '-': (4, None, None, ('sub', 4), None),
588 592 ')': (0, None, None, None, None),
589 593 'symbol': (0, 'symbol', None, None, None),
590 594 'end': (0, None, None, None, None),
591 595 }
592 596
593 597 def _tokenize(text):
594 598 view = memoryview(text) # zero-copy slice
595 599 special = '():+-& '
596 600 pos = 0
597 601 length = len(text)
598 602 while pos < length:
599 603 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
600 604 view[pos:]))
601 605 if symbol:
602 606 yield ('symbol', symbol, pos)
603 607 pos += len(symbol)
604 608 else: # special char, ignore space
605 609 if text[pos] != ' ':
606 610 yield (text[pos], None, pos)
607 611 pos += 1
608 612 yield ('end', None, pos)
609 613
610 614 def _parse(text):
611 615 tree, pos = parser.parser(_elements).parse(_tokenize(text))
612 616 if pos != len(text):
613 617 raise error.ParseError('invalid token', pos)
614 618 return tree
615 619
616 620 def _parsedrev(symbol):
617 621 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
618 622 if symbol.startswith('D') and symbol[1:].isdigit():
619 623 return int(symbol[1:])
620 624 if symbol.isdigit():
621 625 return int(symbol)
622 626
623 627 def _prefetchdrevs(tree):
624 628 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
625 629 drevs = set()
626 630 ancestordrevs = set()
627 631 op = tree[0]
628 632 if op == 'symbol':
629 633 r = _parsedrev(tree[1])
630 634 if r:
631 635 drevs.add(r)
632 636 elif op == 'ancestors':
633 637 r, a = _prefetchdrevs(tree[1])
634 638 drevs.update(r)
635 639 ancestordrevs.update(r)
636 640 ancestordrevs.update(a)
637 641 else:
638 642 for t in tree[1:]:
639 643 r, a = _prefetchdrevs(t)
640 644 drevs.update(r)
641 645 ancestordrevs.update(a)
642 646 return drevs, ancestordrevs
643 647
644 648 def querydrev(repo, spec):
645 649 """return a list of "Differential Revision" dicts
646 650
647 651 spec is a string using a simple query language, see docstring in phabread
648 652 for details.
649 653
650 654 A "Differential Revision dict" looks like:
651 655
652 656 {
653 657 "id": "2",
654 658 "phid": "PHID-DREV-672qvysjcczopag46qty",
655 659 "title": "example",
656 660 "uri": "https://phab.example.com/D2",
657 661 "dateCreated": "1499181406",
658 662 "dateModified": "1499182103",
659 663 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
660 664 "status": "0",
661 665 "statusName": "Needs Review",
662 666 "properties": [],
663 667 "branch": null,
664 668 "summary": "",
665 669 "testPlan": "",
666 670 "lineCount": "2",
667 671 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
668 672 "diffs": [
669 673 "3",
670 674 "4",
671 675 ],
672 676 "commits": [],
673 677 "reviewers": [],
674 678 "ccs": [],
675 679 "hashes": [],
676 680 "auxiliary": {
677 681 "phabricator:projects": [],
678 682 "phabricator:depends-on": [
679 683 "PHID-DREV-gbapp366kutjebt7agcd"
680 684 ]
681 685 },
682 686 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
683 687 "sourcePath": null
684 688 }
685 689 """
686 690 def fetch(params):
687 691 """params -> single drev or None"""
688 692 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
689 693 if key in prefetched:
690 694 return prefetched[key]
691 695 drevs = callconduit(repo, 'differential.query', params)
692 696 # Fill prefetched with the result
693 697 for drev in drevs:
694 698 prefetched[drev[r'phid']] = drev
695 699 prefetched[int(drev[r'id'])] = drev
696 700 if key not in prefetched:
697 701 raise error.Abort(_('cannot get Differential Revision %r') % params)
698 702 return prefetched[key]
699 703
700 704 def getstack(topdrevids):
701 705 """given a top, get a stack from the bottom, [id] -> [id]"""
702 706 visited = set()
703 707 result = []
704 708 queue = [{r'ids': [i]} for i in topdrevids]
705 709 while queue:
706 710 params = queue.pop()
707 711 drev = fetch(params)
708 712 if drev[r'id'] in visited:
709 713 continue
710 714 visited.add(drev[r'id'])
711 715 result.append(int(drev[r'id']))
712 716 auxiliary = drev.get(r'auxiliary', {})
713 717 depends = auxiliary.get(r'phabricator:depends-on', [])
714 718 for phid in depends:
715 719 queue.append({'phids': [phid]})
716 720 result.reverse()
717 721 return smartset.baseset(result)
718 722
719 723 # Initialize prefetch cache
720 724 prefetched = {} # {id or phid: drev}
721 725
722 726 tree = _parse(spec)
723 727 drevs, ancestordrevs = _prefetchdrevs(tree)
724 728
725 729 # developer config: phabricator.batchsize
726 730 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
727 731
728 732 # Prefetch Differential Revisions in batch
729 733 tofetch = set(drevs)
730 734 for r in ancestordrevs:
731 735 tofetch.update(range(max(1, r - batchsize), r + 1))
732 736 if drevs:
733 737 fetch({r'ids': list(tofetch)})
734 738 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
735 739
736 740 # Walk through the tree, return smartsets
737 741 def walk(tree):
738 742 op = tree[0]
739 743 if op == 'symbol':
740 744 drev = _parsedrev(tree[1])
741 745 if drev:
742 746 return smartset.baseset([drev])
743 747 elif tree[1] in _knownstatusnames:
744 748 drevs = [r for r in validids
745 749 if _getstatusname(prefetched[r]) == tree[1]]
746 750 return smartset.baseset(drevs)
747 751 else:
748 752 raise error.Abort(_('unknown symbol: %s') % tree[1])
749 753 elif op in {'and_', 'add', 'sub'}:
750 754 assert len(tree) == 3
751 755 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
752 756 elif op == 'group':
753 757 return walk(tree[1])
754 758 elif op == 'ancestors':
755 759 return getstack(walk(tree[1]))
756 760 else:
757 761 raise error.ProgrammingError('illegal tree: %r' % tree)
758 762
759 763 return [prefetched[r] for r in walk(tree)]
760 764
761 765 def getdescfromdrev(drev):
762 766 """get description (commit message) from "Differential Revision"
763 767
764 768 This is similar to differential.getcommitmessage API. But we only care
765 769 about limited fields: title, summary, test plan, and URL.
766 770 """
767 771 title = drev[r'title']
768 772 summary = drev[r'summary'].rstrip()
769 773 testplan = drev[r'testPlan'].rstrip()
770 774 if testplan:
771 775 testplan = 'Test Plan:\n%s' % testplan
772 776 uri = 'Differential Revision: %s' % drev[r'uri']
773 777 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
774 778
775 779 def getdiffmeta(diff):
776 780 """get commit metadata (date, node, user, p1) from a diff object
777 781
778 782 The metadata could be "hg:meta", sent by phabsend, like:
779 783
780 784 "properties": {
781 785 "hg:meta": {
782 786 "date": "1499571514 25200",
783 787 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
784 788 "user": "Foo Bar <foo@example.com>",
785 789 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
786 790 }
787 791 }
788 792
789 793 Or converted from "local:commits", sent by "arc", like:
790 794
791 795 "properties": {
792 796 "local:commits": {
793 797 "98c08acae292b2faf60a279b4189beb6cff1414d": {
794 798 "author": "Foo Bar",
795 799 "time": 1499546314,
796 800 "branch": "default",
797 801 "tag": "",
798 802 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
799 803 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
800 804 "local": "1000",
801 805 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
802 806 "summary": "...",
803 807 "message": "...",
804 808 "authorEmail": "foo@example.com"
805 809 }
806 810 }
807 811 }
808 812
809 813 Note: metadata extracted from "local:commits" will lose time zone
810 814 information.
811 815 """
812 816 props = diff.get(r'properties') or {}
813 817 meta = props.get(r'hg:meta')
814 818 if not meta and props.get(r'local:commits'):
815 819 commit = sorted(props[r'local:commits'].values())[0]
816 820 meta = {
817 821 r'date': r'%d 0' % commit[r'time'],
818 822 r'node': commit[r'rev'],
819 823 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
820 824 }
821 825 if len(commit.get(r'parents', ())) >= 1:
822 826 meta[r'parent'] = commit[r'parents'][0]
823 827 return meta or {}
824 828
825 829 def readpatch(repo, drevs, write):
826 830 """generate plain-text patch readable by 'hg import'
827 831
828 832 write is usually ui.write. drevs is what "querydrev" returns, results of
829 833 "differential.query".
830 834 """
831 835 # Prefetch hg:meta property for all diffs
832 836 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
833 837 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
834 838
835 839 # Generate patch for each drev
836 840 for drev in drevs:
837 841 repo.ui.note(_('reading D%s\n') % drev[r'id'])
838 842
839 843 diffid = max(int(v) for v in drev[r'diffs'])
840 844 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
841 845 desc = getdescfromdrev(drev)
842 846 header = '# HG changeset patch\n'
843 847
844 848 # Try to preserve metadata from hg:meta property. Write hg patch
845 849 # headers that can be read by the "import" command. See patchheadermap
846 850 # and extract in mercurial/patch.py for supported headers.
847 851 meta = getdiffmeta(diffs[str(diffid)])
848 852 for k in _metanamemap.keys():
849 853 if k in meta:
850 854 header += '# %s %s\n' % (_metanamemap[k], meta[k])
851 855
852 856 content = '%s%s\n%s' % (header, desc, body)
853 857 write(encoding.unitolocal(content))
854 858
855 859 @command('phabread',
856 860 [('', 'stack', False, _('read dependencies'))],
857 861 _('DREVSPEC [OPTIONS]'))
858 862 def phabread(ui, repo, spec, **opts):
859 863 """print patches from Phabricator suitable for importing
860 864
861 865 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
862 866 the number ``123``. It could also have common operators like ``+``, ``-``,
863 867 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
864 868 select a stack.
865 869
866 870 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
867 871 could be used to filter patches by status. For performance reason, they
868 872 only represent a subset of non-status selections and cannot be used alone.
869 873
870 874 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
871 875 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
872 876 stack up to D9.
873 877
874 878 If --stack is given, follow dependencies information and read all patches.
875 879 It is equivalent to the ``:`` operator.
876 880 """
877 881 if opts.get('stack'):
878 882 spec = ':(%s)' % spec
879 883 drevs = querydrev(repo, spec)
880 884 readpatch(repo, drevs, ui.write)
881 885
882 886 @command('phabupdate',
883 887 [('', 'accept', False, _('accept revisions')),
884 888 ('', 'reject', False, _('reject revisions')),
885 889 ('', 'abandon', False, _('abandon revisions')),
886 890 ('', 'reclaim', False, _('reclaim revisions')),
887 891 ('m', 'comment', '', _('comment on the last revision')),
888 892 ], _('DREVSPEC [OPTIONS]'))
889 893 def phabupdate(ui, repo, spec, **opts):
890 894 """update Differential Revision in batch
891 895
892 896 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
893 897 """
894 898 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
895 899 if len(flags) > 1:
896 900 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
897 901
898 902 actions = []
899 903 for f in flags:
900 904 actions.append({'type': f, 'value': 'true'})
901 905
902 906 drevs = querydrev(repo, spec)
903 907 for i, drev in enumerate(drevs):
904 908 if i + 1 == len(drevs) and opts.get('comment'):
905 909 actions.append({'type': 'comment', 'value': opts['comment']})
906 910 if actions:
907 911 params = {'objectIdentifier': drev[r'phid'],
908 912 'transactions': actions}
909 913 callconduit(repo, 'differential.revision.edit', params)
910 914
911 915 templatekeyword = registrar.templatekeyword()
912 916
913 917 @templatekeyword('phabreview', requires={'ctx'})
914 918 def template_review(context, mapping):
915 919 """:phabreview: Object describing the review for this changeset.
916 920 Has attributes `url` and `id`.
917 921 """
918 922 ctx = context.resource(mapping, 'ctx')
919 923 m = _differentialrevisiondescre.search(ctx.description())
920 924 if m:
921 925 return {
922 926 'url': m.group('url'),
923 927 'id': "D{}".format(m.group('id')),
924 928 }
@@ -1,81 +1,83
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import
3 3
4 4 import getopt
5 5 import sys
6 6
7 7 import hgdemandimport
8 8 hgdemandimport.enable()
9 9
10 10 from mercurial.i18n import _
11 11 from mercurial import (
12 12 context,
13 13 error,
14 14 fancyopts,
15 15 simplemerge,
16 16 ui as uimod,
17 util,
17 )
18 from mercurial.utils import (
19 procutil,
18 20 )
19 21
20 22 options = [('L', 'label', [], _('labels to use on conflict markers')),
21 23 ('a', 'text', None, _('treat all files as text')),
22 24 ('p', 'print', None,
23 25 _('print results instead of overwriting LOCAL')),
24 26 ('', 'no-minimal', None, _('no effect (DEPRECATED)')),
25 27 ('h', 'help', None, _('display help and exit')),
26 28 ('q', 'quiet', None, _('suppress output'))]
27 29
28 30 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
29 31
30 32 Simple three-way file merge utility with a minimal feature set.
31 33
32 34 Apply to LOCAL the changes necessary to go from BASE to OTHER.
33 35
34 36 By default, LOCAL is overwritten with the results of this operation.
35 37 ''')
36 38
37 39 class ParseError(Exception):
38 40 """Exception raised on errors in parsing the command line."""
39 41
40 42 def showhelp():
41 43 sys.stdout.write(usage)
42 44 sys.stdout.write('\noptions:\n')
43 45
44 46 out_opts = []
45 47 for shortopt, longopt, default, desc in options:
46 48 out_opts.append(('%2s%s' % (shortopt and '-%s' % shortopt,
47 49 longopt and ' --%s' % longopt),
48 50 '%s' % desc))
49 51 opts_len = max([len(opt[0]) for opt in out_opts])
50 52 for first, second in out_opts:
51 53 sys.stdout.write(' %-*s %s\n' % (opts_len, first, second))
52 54
53 55 try:
54 56 for fp in (sys.stdin, sys.stdout, sys.stderr):
55 util.setbinary(fp)
57 procutil.setbinary(fp)
56 58
57 59 opts = {}
58 60 try:
59 61 args = fancyopts.fancyopts(sys.argv[1:], options, opts)
60 62 except getopt.GetoptError as e:
61 63 raise ParseError(e)
62 64 if opts['help']:
63 65 showhelp()
64 66 sys.exit(0)
65 67 if len(args) != 3:
66 68 raise ParseError(_('wrong number of arguments'))
67 69 local, base, other = args
68 70 sys.exit(simplemerge.simplemerge(uimod.ui.load(),
69 71 context.arbitraryfilectx(local),
70 72 context.arbitraryfilectx(base),
71 73 context.arbitraryfilectx(other),
72 74 **opts))
73 75 except ParseError as e:
74 76 sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
75 77 showhelp()
76 78 sys.exit(1)
77 79 except error.Abort as e:
78 80 sys.stderr.write("abort: %s\n" % e)
79 81 sys.exit(255)
80 82 except KeyboardInterrupt:
81 83 sys.exit(255)
@@ -1,46 +1,48
1 1 #!/usr/bin/env python
2 2 # Undump a dump from dumprevlog
3 3 # $ hg init
4 4 # $ undumprevlog < repo.dump
5 5
6 6 from __future__ import absolute_import, print_function
7 7
8 8 import sys
9 9 from mercurial import (
10 10 node,
11 11 revlog,
12 12 transaction,
13 util,
14 13 vfs as vfsmod,
15 14 )
15 from mercurial.utils import (
16 procutil,
17 )
16 18
17 19 for fp in (sys.stdin, sys.stdout, sys.stderr):
18 util.setbinary(fp)
20 procutil.setbinary(fp)
19 21
20 22 opener = vfsmod.vfs('.', False)
21 23 tr = transaction.transaction(sys.stderr.write, opener, {'store': opener},
22 24 "undump.journal")
23 25 while True:
24 26 l = sys.stdin.readline()
25 27 if not l:
26 28 break
27 29 if l.startswith("file:"):
28 30 f = l[6:-1]
29 31 r = revlog.revlog(opener, f)
30 32 print(f)
31 33 elif l.startswith("node:"):
32 34 n = node.bin(l[6:-1])
33 35 elif l.startswith("linkrev:"):
34 36 lr = int(l[9:-1])
35 37 elif l.startswith("parents:"):
36 38 p = l[9:-1].split()
37 39 p1 = node.bin(p[0])
38 40 p2 = node.bin(p[1])
39 41 elif l.startswith("length:"):
40 42 length = int(l[8:-1])
41 43 sys.stdin.readline() # start marker
42 44 d = sys.stdin.read(length)
43 45 sys.stdin.readline() # end marker
44 46 r.addrevision(d, tr, lr, p1, p2)
45 47
46 48 tr.close()
@@ -1,377 +1,380
1 1 # acl.py - changeset access control for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''hooks for controlling repository access
9 9
10 10 This hook makes it possible to allow or deny write access to given
11 11 branches and paths of a repository when receiving incoming changesets
12 12 via pretxnchangegroup and pretxncommit.
13 13
14 14 The authorization is matched based on the local user name on the
15 15 system where the hook runs, and not the committer of the original
16 16 changeset (since the latter is merely informative).
17 17
18 18 The acl hook is best used along with a restricted shell like hgsh,
19 19 preventing authenticating users from doing anything other than pushing
20 20 or pulling. The hook is not safe to use if users have interactive
21 21 shell access, as they can then disable the hook. Nor is it safe if
22 22 remote users share an account, because then there is no way to
23 23 distinguish them.
24 24
25 25 The order in which access checks are performed is:
26 26
27 27 1) Deny list for branches (section ``acl.deny.branches``)
28 28 2) Allow list for branches (section ``acl.allow.branches``)
29 29 3) Deny list for paths (section ``acl.deny``)
30 30 4) Allow list for paths (section ``acl.allow``)
31 31
32 32 The allow and deny sections take key-value pairs.
33 33
34 34 Branch-based Access Control
35 35 ---------------------------
36 36
37 37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
38 38 have branch-based access control. Keys in these sections can be
39 39 either:
40 40
41 41 - a branch name, or
42 42 - an asterisk, to match any branch;
43 43
44 44 The corresponding values can be either:
45 45
46 46 - a comma-separated list containing users and groups, or
47 47 - an asterisk, to match anyone;
48 48
49 49 You can add the "!" prefix to a user or group name to invert the sense
50 50 of the match.
51 51
52 52 Path-based Access Control
53 53 -------------------------
54 54
55 55 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
56 56 access control. Keys in these sections accept a subtree pattern (with
57 57 a glob syntax by default). The corresponding values follow the same
58 58 syntax as the other sections above.
59 59
60 60 Groups
61 61 ------
62 62
63 63 Group names must be prefixed with an ``@`` symbol. Specifying a group
64 64 name has the same effect as specifying all the users in that group.
65 65
66 66 You can define group members in the ``acl.groups`` section.
67 67 If a group name is not defined there, and Mercurial is running under
68 68 a Unix-like system, the list of users will be taken from the OS.
69 69 Otherwise, an exception will be raised.
70 70
71 71 Example Configuration
72 72 ---------------------
73 73
74 74 ::
75 75
76 76 [hooks]
77 77
78 78 # Use this if you want to check access restrictions at commit time
79 79 pretxncommit.acl = python:hgext.acl.hook
80 80
81 81 # Use this if you want to check access restrictions for pull, push,
82 82 # bundle and serve.
83 83 pretxnchangegroup.acl = python:hgext.acl.hook
84 84
85 85 [acl]
86 86 # Allow or deny access for incoming changes only if their source is
87 87 # listed here, let them pass otherwise. Source is "serve" for all
88 88 # remote access (http or ssh), "push", "pull" or "bundle" when the
89 89 # related commands are run locally.
90 90 # Default: serve
91 91 sources = serve
92 92
93 93 [acl.deny.branches]
94 94
95 95 # Everyone is denied to the frozen branch:
96 96 frozen-branch = *
97 97
98 98 # A bad user is denied on all branches:
99 99 * = bad-user
100 100
101 101 [acl.allow.branches]
102 102
103 103 # A few users are allowed on branch-a:
104 104 branch-a = user-1, user-2, user-3
105 105
106 106 # Only one user is allowed on branch-b:
107 107 branch-b = user-1
108 108
109 109 # The super user is allowed on any branch:
110 110 * = super-user
111 111
112 112 # Everyone is allowed on branch-for-tests:
113 113 branch-for-tests = *
114 114
115 115 [acl.deny]
116 116 # This list is checked first. If a match is found, acl.allow is not
117 117 # checked. All users are granted access if acl.deny is not present.
118 118 # Format for both lists: glob pattern = user, ..., @group, ...
119 119
120 120 # To match everyone, use an asterisk for the user:
121 121 # my/glob/pattern = *
122 122
123 123 # user6 will not have write access to any file:
124 124 ** = user6
125 125
126 126 # Group "hg-denied" will not have write access to any file:
127 127 ** = @hg-denied
128 128
129 129 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
130 130 # everyone being able to change all other files. See below.
131 131 src/main/resources/DONT-TOUCH-THIS.txt = *
132 132
133 133 [acl.allow]
134 134 # if acl.allow is not present, all users are allowed by default
135 135 # empty acl.allow = no users allowed
136 136
137 137 # User "doc_writer" has write access to any file under the "docs"
138 138 # folder:
139 139 docs/** = doc_writer
140 140
141 141 # User "jack" and group "designers" have write access to any file
142 142 # under the "images" folder:
143 143 images/** = jack, @designers
144 144
145 145 # Everyone (except for "user6" and "@hg-denied" - see acl.deny above)
146 146 # will have write access to any file under the "resources" folder
147 147 # (except for 1 file. See acl.deny):
148 148 src/main/resources/** = *
149 149
150 150 .hgtags = release_engineer
151 151
152 152 Examples using the "!" prefix
153 153 .............................
154 154
155 155 Suppose there's a branch that only a given user (or group) should be able to
156 156 push to, and you don't want to restrict access to any other branch that may
157 157 be created.
158 158
159 159 The "!" prefix allows you to prevent anyone except a given user or group to
160 160 push changesets in a given branch or path.
161 161
162 162 In the examples below, we will:
163 163 1) Deny access to branch "ring" to anyone but user "gollum"
164 164 2) Deny access to branch "lake" to anyone but members of the group "hobbit"
165 165 3) Deny access to a file to anyone but user "gollum"
166 166
167 167 ::
168 168
169 169 [acl.allow.branches]
170 170 # Empty
171 171
172 172 [acl.deny.branches]
173 173
174 174 # 1) only 'gollum' can commit to branch 'ring';
175 175 # 'gollum' and anyone else can still commit to any other branch.
176 176 ring = !gollum
177 177
178 178 # 2) only members of the group 'hobbit' can commit to branch 'lake';
179 179 # 'hobbit' members and anyone else can still commit to any other branch.
180 180 lake = !@hobbit
181 181
182 182 # You can also deny access based on file paths:
183 183
184 184 [acl.allow]
185 185 # Empty
186 186
187 187 [acl.deny]
188 188 # 3) only 'gollum' can change the file below;
189 189 # 'gollum' and anyone else can still change any other file.
190 190 /misty/mountains/cave/ring = !gollum
191 191
192 192 '''
193 193
194 194 from __future__ import absolute_import
195 195
196 196 from mercurial.i18n import _
197 197 from mercurial import (
198 198 error,
199 199 extensions,
200 200 match,
201 201 registrar,
202 202 util,
203 203 )
204 from mercurial.utils import (
205 procutil,
206 )
204 207
205 208 urlreq = util.urlreq
206 209
207 210 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
208 211 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
209 212 # be specifying the version(s) of Mercurial they are tested with, or
210 213 # leave the attribute unspecified.
211 214 testedwith = 'ships-with-hg-core'
212 215
213 216 configtable = {}
214 217 configitem = registrar.configitem(configtable)
215 218
216 219 # deprecated config: acl.config
217 220 configitem('acl', 'config',
218 221 default=None,
219 222 )
220 223 configitem('acl.groups', '.*',
221 224 default=None,
222 225 generic=True,
223 226 )
224 227 configitem('acl.deny.branches', '.*',
225 228 default=None,
226 229 generic=True,
227 230 )
228 231 configitem('acl.allow.branches', '.*',
229 232 default=None,
230 233 generic=True,
231 234 )
232 235 configitem('acl.deny', '.*',
233 236 default=None,
234 237 generic=True,
235 238 )
236 239 configitem('acl.allow', '.*',
237 240 default=None,
238 241 generic=True,
239 242 )
240 243 configitem('acl', 'sources',
241 244 default=lambda: ['serve'],
242 245 )
243 246
244 247 def _getusers(ui, group):
245 248
246 249 # First, try to use group definition from section [acl.groups]
247 250 hgrcusers = ui.configlist('acl.groups', group)
248 251 if hgrcusers:
249 252 return hgrcusers
250 253
251 254 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
252 255 # If no users found in group definition, get users from OS-level group
253 256 try:
254 257 return util.groupmembers(group)
255 258 except KeyError:
256 259 raise error.Abort(_("group '%s' is undefined") % group)
257 260
258 261 def _usermatch(ui, user, usersorgroups):
259 262
260 263 if usersorgroups == '*':
261 264 return True
262 265
263 266 for ug in usersorgroups.replace(',', ' ').split():
264 267
265 268 if ug.startswith('!'):
266 269 # Test for excluded user or group. Format:
267 270 # if ug is a user name: !username
268 271 # if ug is a group name: !@groupname
269 272 ug = ug[1:]
270 273 if not ug.startswith('@') and user != ug \
271 274 or ug.startswith('@') and user not in _getusers(ui, ug[1:]):
272 275 return True
273 276
274 277 # Test for user or group. Format:
275 278 # if ug is a user name: username
276 279 # if ug is a group name: @groupname
277 280 elif user == ug \
278 281 or ug.startswith('@') and user in _getusers(ui, ug[1:]):
279 282 return True
280 283
281 284 return False
282 285
283 286 def buildmatch(ui, repo, user, key):
284 287 '''return tuple of (match function, list enabled).'''
285 288 if not ui.has_section(key):
286 289 ui.debug('acl: %s not enabled\n' % key)
287 290 return None
288 291
289 292 pats = [pat for pat, users in ui.configitems(key)
290 293 if _usermatch(ui, user, users)]
291 294 ui.debug('acl: %s enabled, %d entries for user %s\n' %
292 295 (key, len(pats), user))
293 296
294 297 # Branch-based ACL
295 298 if not repo:
296 299 if pats:
297 300 # If there's an asterisk (meaning "any branch"), always return True;
298 301 # Otherwise, test if b is in pats
299 302 if '*' in pats:
300 303 return util.always
301 304 return lambda b: b in pats
302 305 return util.never
303 306
304 307 # Path-based ACL
305 308 if pats:
306 309 return match.match(repo.root, '', pats)
307 310 return util.never
308 311
309 312 def ensureenabled(ui):
310 313 """make sure the extension is enabled when used as hook
311 314
312 315 When acl is used through hooks, the extension is never formally loaded and
313 316 enabled. This has some side effect, for example the config declaration is
314 317 never loaded. This function ensure the extension is enabled when running
315 318 hooks.
316 319 """
317 320 if 'acl' in ui._knownconfig:
318 321 return
319 322 ui.setconfig('extensions', 'acl', '', source='internal')
320 323 extensions.loadall(ui, ['acl'])
321 324
322 325 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
323 326
324 327 ensureenabled(ui)
325 328
326 329 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
327 330 raise error.Abort(_('config error - hook type "%s" cannot stop '
328 331 'incoming changesets nor commits') % hooktype)
329 332 if (hooktype == 'pretxnchangegroup' and
330 333 source not in ui.configlist('acl', 'sources')):
331 334 ui.debug('acl: changes have source "%s" - skipping\n' % source)
332 335 return
333 336
334 337 user = None
335 338 if source == 'serve' and r'url' in kwargs:
336 339 url = kwargs[r'url'].split(':')
337 340 if url[0] == 'remote' and url[1].startswith('http'):
338 341 user = urlreq.unquote(url[3])
339 342
340 343 if user is None:
341 user = util.getuser()
344 user = procutil.getuser()
342 345
343 346 ui.debug('acl: checking access for user "%s"\n' % user)
344 347
345 348 # deprecated config: acl.config
346 349 cfg = ui.config('acl', 'config')
347 350 if cfg:
348 351 ui.readconfig(cfg, sections=['acl.groups', 'acl.allow.branches',
349 352 'acl.deny.branches', 'acl.allow', 'acl.deny'])
350 353
351 354 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
352 355 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
353 356 allow = buildmatch(ui, repo, user, 'acl.allow')
354 357 deny = buildmatch(ui, repo, user, 'acl.deny')
355 358
356 359 for rev in xrange(repo[node].rev(), len(repo)):
357 360 ctx = repo[rev]
358 361 branch = ctx.branch()
359 362 if denybranches and denybranches(branch):
360 363 raise error.Abort(_('acl: user "%s" denied on branch "%s"'
361 364 ' (changeset "%s")')
362 365 % (user, branch, ctx))
363 366 if allowbranches and not allowbranches(branch):
364 367 raise error.Abort(_('acl: user "%s" not allowed on branch "%s"'
365 368 ' (changeset "%s")')
366 369 % (user, branch, ctx))
367 370 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
368 371 % (ctx, branch))
369 372
370 373 for f in ctx.files():
371 374 if deny and deny(f):
372 375 raise error.Abort(_('acl: user "%s" denied on "%s"'
373 376 ' (changeset "%s")') % (user, f, ctx))
374 377 if allow and not allow(f):
375 378 raise error.Abort(_('acl: user "%s" not allowed on "%s"'
376 379 ' (changeset "%s")') % (user, f, ctx))
377 380 ui.debug('acl: path access granted: "%s"\n' % ctx)
@@ -1,251 +1,254
1 1 # blackbox.py - log repository events to a file for post-mortem debugging
2 2 #
3 3 # Copyright 2010 Nicolas Dumazet
4 4 # Copyright 2013 Facebook, Inc.
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """log repository events to a blackbox for debugging
10 10
11 11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 12 The events that get logged can be configured via the blackbox.track config key.
13 13
14 14 Examples::
15 15
16 16 [blackbox]
17 17 track = *
18 18 # dirty is *EXPENSIVE* (slow);
19 19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 20 dirty = True
21 21 # record the source of log messages
22 22 logsource = True
23 23
24 24 [blackbox]
25 25 track = command, commandfinish, commandexception, exthook, pythonhook
26 26
27 27 [blackbox]
28 28 track = incoming
29 29
30 30 [blackbox]
31 31 # limit the size of a log file
32 32 maxsize = 1.5 MB
33 33 # rotate up to N log files when the current one gets too big
34 34 maxfiles = 3
35 35
36 36 """
37 37
38 38 from __future__ import absolute_import
39 39
40 40 import errno
41 41 import re
42 42
43 43 from mercurial.i18n import _
44 44 from mercurial.node import hex
45 45
46 46 from mercurial import (
47 47 encoding,
48 48 registrar,
49 49 ui as uimod,
50 50 util,
51 51 )
52 from mercurial.utils import dateutil
52 from mercurial.utils import (
53 dateutil,
54 procutil,
55 )
53 56
54 57 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
55 58 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
56 59 # be specifying the version(s) of Mercurial they are tested with, or
57 60 # leave the attribute unspecified.
58 61 testedwith = 'ships-with-hg-core'
59 62
60 63 cmdtable = {}
61 64 command = registrar.command(cmdtable)
62 65
63 66 configtable = {}
64 67 configitem = registrar.configitem(configtable)
65 68
66 69 configitem('blackbox', 'dirty',
67 70 default=False,
68 71 )
69 72 configitem('blackbox', 'maxsize',
70 73 default='1 MB',
71 74 )
72 75 configitem('blackbox', 'logsource',
73 76 default=False,
74 77 )
75 78 configitem('blackbox', 'maxfiles',
76 79 default=7,
77 80 )
78 81 configitem('blackbox', 'track',
79 82 default=lambda: ['*'],
80 83 )
81 84
82 85 lastui = None
83 86
84 87 def _openlogfile(ui, vfs):
85 88 def rotate(oldpath, newpath):
86 89 try:
87 90 vfs.unlink(newpath)
88 91 except OSError as err:
89 92 if err.errno != errno.ENOENT:
90 93 ui.debug("warning: cannot remove '%s': %s\n" %
91 94 (newpath, err.strerror))
92 95 try:
93 96 if newpath:
94 97 vfs.rename(oldpath, newpath)
95 98 except OSError as err:
96 99 if err.errno != errno.ENOENT:
97 100 ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
98 101 (newpath, oldpath, err.strerror))
99 102
100 103 maxsize = ui.configbytes('blackbox', 'maxsize')
101 104 name = 'blackbox.log'
102 105 if maxsize > 0:
103 106 try:
104 107 st = vfs.stat(name)
105 108 except OSError:
106 109 pass
107 110 else:
108 111 if st.st_size >= maxsize:
109 112 path = vfs.join(name)
110 113 maxfiles = ui.configint('blackbox', 'maxfiles')
111 114 for i in xrange(maxfiles - 1, 1, -1):
112 115 rotate(oldpath='%s.%d' % (path, i - 1),
113 116 newpath='%s.%d' % (path, i))
114 117 rotate(oldpath=path,
115 118 newpath=maxfiles > 0 and path + '.1')
116 119 return vfs(name, 'a')
117 120
118 121 def wrapui(ui):
119 122 class blackboxui(ui.__class__):
120 123 @property
121 124 def _bbvfs(self):
122 125 vfs = None
123 126 repo = getattr(self, '_bbrepo', None)
124 127 if repo:
125 128 vfs = repo.vfs
126 129 if not vfs.isdir('.'):
127 130 vfs = None
128 131 return vfs
129 132
130 133 @util.propertycache
131 134 def track(self):
132 135 return self.configlist('blackbox', 'track')
133 136
134 137 def debug(self, *msg, **opts):
135 138 super(blackboxui, self).debug(*msg, **opts)
136 139 if self.debugflag:
137 140 self.log('debug', '%s', ''.join(msg))
138 141
139 142 def log(self, event, *msg, **opts):
140 143 global lastui
141 144 super(blackboxui, self).log(event, *msg, **opts)
142 145
143 146 if not '*' in self.track and not event in self.track:
144 147 return
145 148
146 149 if self._bbvfs:
147 150 ui = self
148 151 else:
149 152 # certain ui instances exist outside the context of
150 153 # a repo, so just default to the last blackbox that
151 154 # was seen.
152 155 ui = lastui
153 156
154 157 if not ui:
155 158 return
156 159 vfs = ui._bbvfs
157 160 if not vfs:
158 161 return
159 162
160 163 repo = getattr(ui, '_bbrepo', None)
161 164 if not lastui or repo:
162 165 lastui = ui
163 166 if getattr(ui, '_bbinlog', False):
164 167 # recursion and failure guard
165 168 return
166 169 ui._bbinlog = True
167 170 default = self.configdate('devel', 'default-date')
168 171 date = dateutil.datestr(default, '%Y/%m/%d %H:%M:%S')
169 user = util.getuser()
170 pid = '%d' % util.getpid()
172 user = procutil.getuser()
173 pid = '%d' % procutil.getpid()
171 174 formattedmsg = msg[0] % msg[1:]
172 175 rev = '(unknown)'
173 176 changed = ''
174 177 if repo:
175 178 ctx = repo[None]
176 179 parents = ctx.parents()
177 180 rev = ('+'.join([hex(p.node()) for p in parents]))
178 181 if (ui.configbool('blackbox', 'dirty') and
179 182 ctx.dirty(missing=True, merge=False, branch=False)):
180 183 changed = '+'
181 184 if ui.configbool('blackbox', 'logsource'):
182 185 src = ' [%s]' % event
183 186 else:
184 187 src = ''
185 188 try:
186 189 fmt = '%s %s @%s%s (%s)%s> %s'
187 190 args = (date, user, rev, changed, pid, src, formattedmsg)
188 191 with _openlogfile(ui, vfs) as fp:
189 192 fp.write(fmt % args)
190 193 except (IOError, OSError) as err:
191 194 self.debug('warning: cannot write to blackbox.log: %s\n' %
192 195 encoding.strtolocal(err.strerror))
193 196 # do not restore _bbinlog intentionally to avoid failed
194 197 # logging again
195 198 else:
196 199 ui._bbinlog = False
197 200
198 201 def setrepo(self, repo):
199 202 self._bbrepo = repo
200 203
201 204 ui.__class__ = blackboxui
202 205 uimod.ui = blackboxui
203 206
204 207 def uisetup(ui):
205 208 wrapui(ui)
206 209
207 210 def reposetup(ui, repo):
208 211 # During 'hg pull' a httppeer repo is created to represent the remote repo.
209 212 # It doesn't have a .hg directory to put a blackbox in, so we don't do
210 213 # the blackbox setup for it.
211 214 if not repo.local():
212 215 return
213 216
214 217 if util.safehasattr(ui, 'setrepo'):
215 218 ui.setrepo(repo)
216 219
217 220 # Set lastui even if ui.log is not called. This gives blackbox a
218 221 # fallback place to log.
219 222 global lastui
220 223 if lastui is None:
221 224 lastui = ui
222 225
223 226 repo._wlockfreeprefix.add('blackbox.log')
224 227
225 228 @command('^blackbox',
226 229 [('l', 'limit', 10, _('the number of events to show')),
227 230 ],
228 231 _('hg blackbox [OPTION]...'))
229 232 def blackbox(ui, repo, *revs, **opts):
230 233 '''view the recent repository events
231 234 '''
232 235
233 236 if not repo.vfs.exists('blackbox.log'):
234 237 return
235 238
236 239 limit = opts.get(r'limit')
237 240 fp = repo.vfs('blackbox.log', 'r')
238 241 lines = fp.read().split('\n')
239 242
240 243 count = 0
241 244 output = []
242 245 for line in reversed(lines):
243 246 if count >= limit:
244 247 break
245 248
246 249 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
247 250 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
248 251 count += 1
249 252 output.append(line)
250 253
251 254 ui.status('\n'.join(reversed(output)))
@@ -1,1128 +1,1129
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 # Copyright 2011-4 Jim Hague <jim.hague@acm.org>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''hooks for integrating with the Bugzilla bug tracker
10 10
11 11 This hook extension adds comments on bugs in Bugzilla when changesets
12 12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
13 13 the Mercurial template mechanism.
14 14
15 15 The bug references can optionally include an update for Bugzilla of the
16 16 hours spent working on the bug. Bugs can also be marked fixed.
17 17
18 18 Four basic modes of access to Bugzilla are provided:
19 19
20 20 1. Access via the Bugzilla REST-API. Requires bugzilla 5.0 or later.
21 21
22 22 2. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
23 23
24 24 3. Check data via the Bugzilla XMLRPC interface and submit bug change
25 25 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
26 26
27 27 4. Writing directly to the Bugzilla database. Only Bugzilla installations
28 28 using MySQL are supported. Requires Python MySQLdb.
29 29
30 30 Writing directly to the database is susceptible to schema changes, and
31 31 relies on a Bugzilla contrib script to send out bug change
32 32 notification emails. This script runs as the user running Mercurial,
33 33 must be run on the host with the Bugzilla install, and requires
34 34 permission to read Bugzilla configuration details and the necessary
35 35 MySQL user and password to have full access rights to the Bugzilla
36 36 database. For these reasons this access mode is now considered
37 37 deprecated, and will not be updated for new Bugzilla versions going
38 38 forward. Only adding comments is supported in this access mode.
39 39
40 40 Access via XMLRPC needs a Bugzilla username and password to be specified
41 41 in the configuration. Comments are added under that username. Since the
42 42 configuration must be readable by all Mercurial users, it is recommended
43 43 that the rights of that user are restricted in Bugzilla to the minimum
44 44 necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
45 45
46 46 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
47 47 email to the Bugzilla email interface to submit comments to bugs.
48 48 The From: address in the email is set to the email address of the Mercurial
49 49 user, so the comment appears to come from the Mercurial user. In the event
50 50 that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
51 51 user, the email associated with the Bugzilla username used to log into
52 52 Bugzilla is used instead as the source of the comment. Marking bugs fixed
53 53 works on all supported Bugzilla versions.
54 54
55 55 Access via the REST-API needs either a Bugzilla username and password
56 56 or an apikey specified in the configuration. Comments are made under
57 57 the given username or the user associated with the apikey in Bugzilla.
58 58
59 59 Configuration items common to all access modes:
60 60
61 61 bugzilla.version
62 62 The access type to use. Values recognized are:
63 63
64 64 :``restapi``: Bugzilla REST-API, Bugzilla 5.0 and later.
65 65 :``xmlrpc``: Bugzilla XMLRPC interface.
66 66 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
67 67 :``3.0``: MySQL access, Bugzilla 3.0 and later.
68 68 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
69 69 including 3.0.
70 70 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
71 71 including 2.18.
72 72
73 73 bugzilla.regexp
74 74 Regular expression to match bug IDs for update in changeset commit message.
75 75 It must contain one "()" named group ``<ids>`` containing the bug
76 76 IDs separated by non-digit characters. It may also contain
77 77 a named group ``<hours>`` with a floating-point number giving the
78 78 hours worked on the bug. If no named groups are present, the first
79 79 "()" group is assumed to contain the bug IDs, and work time is not
80 80 updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
81 81 ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
82 82 variations thereof, followed by an hours number prefixed by ``h`` or
83 83 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
84 84
85 85 bugzilla.fixregexp
86 86 Regular expression to match bug IDs for marking fixed in changeset
87 87 commit message. This must contain a "()" named group ``<ids>` containing
88 88 the bug IDs separated by non-digit characters. It may also contain
89 89 a named group ``<hours>`` with a floating-point number giving the
90 90 hours worked on the bug. If no named groups are present, the first
91 91 "()" group is assumed to contain the bug IDs, and work time is not
92 92 updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
93 93 ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
94 94 variations thereof, followed by an hours number prefixed by ``h`` or
95 95 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
96 96
97 97 bugzilla.fixstatus
98 98 The status to set a bug to when marking fixed. Default ``RESOLVED``.
99 99
100 100 bugzilla.fixresolution
101 101 The resolution to set a bug to when marking fixed. Default ``FIXED``.
102 102
103 103 bugzilla.style
104 104 The style file to use when formatting comments.
105 105
106 106 bugzilla.template
107 107 Template to use when formatting comments. Overrides style if
108 108 specified. In addition to the usual Mercurial keywords, the
109 109 extension specifies:
110 110
111 111 :``{bug}``: The Bugzilla bug ID.
112 112 :``{root}``: The full pathname of the Mercurial repository.
113 113 :``{webroot}``: Stripped pathname of the Mercurial repository.
114 114 :``{hgweb}``: Base URL for browsing Mercurial repositories.
115 115
116 116 Default ``changeset {node|short} in repo {root} refers to bug
117 117 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
118 118
119 119 bugzilla.strip
120 120 The number of path separator characters to strip from the front of
121 121 the Mercurial repository path (``{root}`` in templates) to produce
122 122 ``{webroot}``. For example, a repository with ``{root}``
123 123 ``/var/local/my-project`` with a strip of 2 gives a value for
124 124 ``{webroot}`` of ``my-project``. Default 0.
125 125
126 126 web.baseurl
127 127 Base URL for browsing Mercurial repositories. Referenced from
128 128 templates as ``{hgweb}``.
129 129
130 130 Configuration items common to XMLRPC+email and MySQL access modes:
131 131
132 132 bugzilla.usermap
133 133 Path of file containing Mercurial committer email to Bugzilla user email
134 134 mappings. If specified, the file should contain one mapping per
135 135 line::
136 136
137 137 committer = Bugzilla user
138 138
139 139 See also the ``[usermap]`` section.
140 140
141 141 The ``[usermap]`` section is used to specify mappings of Mercurial
142 142 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
143 143 Contains entries of the form ``committer = Bugzilla user``.
144 144
145 145 XMLRPC and REST-API access mode configuration:
146 146
147 147 bugzilla.bzurl
148 148 The base URL for the Bugzilla installation.
149 149 Default ``http://localhost/bugzilla``.
150 150
151 151 bugzilla.user
152 152 The username to use to log into Bugzilla via XMLRPC. Default
153 153 ``bugs``.
154 154
155 155 bugzilla.password
156 156 The password for Bugzilla login.
157 157
158 158 REST-API access mode uses the options listed above as well as:
159 159
160 160 bugzilla.apikey
161 161 An apikey generated on the Bugzilla instance for api access.
162 162 Using an apikey removes the need to store the user and password
163 163 options.
164 164
165 165 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
166 166 and also:
167 167
168 168 bugzilla.bzemail
169 169 The Bugzilla email address.
170 170
171 171 In addition, the Mercurial email settings must be configured. See the
172 172 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
173 173
174 174 MySQL access mode configuration:
175 175
176 176 bugzilla.host
177 177 Hostname of the MySQL server holding the Bugzilla database.
178 178 Default ``localhost``.
179 179
180 180 bugzilla.db
181 181 Name of the Bugzilla database in MySQL. Default ``bugs``.
182 182
183 183 bugzilla.user
184 184 Username to use to access MySQL server. Default ``bugs``.
185 185
186 186 bugzilla.password
187 187 Password to use to access MySQL server.
188 188
189 189 bugzilla.timeout
190 190 Database connection timeout (seconds). Default 5.
191 191
192 192 bugzilla.bzuser
193 193 Fallback Bugzilla user name to record comments with, if changeset
194 194 committer cannot be found as a Bugzilla user.
195 195
196 196 bugzilla.bzdir
197 197 Bugzilla install directory. Used by default notify. Default
198 198 ``/var/www/html/bugzilla``.
199 199
200 200 bugzilla.notify
201 201 The command to run to get Bugzilla to send bug change notification
202 202 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
203 203 id) and ``user`` (committer bugzilla email). Default depends on
204 204 version; from 2.18 it is "cd %(bzdir)s && perl -T
205 205 contrib/sendbugmail.pl %(id)s %(user)s".
206 206
207 207 Activating the extension::
208 208
209 209 [extensions]
210 210 bugzilla =
211 211
212 212 [hooks]
213 213 # run bugzilla hook on every change pulled or pushed in here
214 214 incoming.bugzilla = python:hgext.bugzilla.hook
215 215
216 216 Example configurations:
217 217
218 218 XMLRPC example configuration. This uses the Bugzilla at
219 219 ``http://my-project.org/bugzilla``, logging in as user
220 220 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
221 221 collection of Mercurial repositories in ``/var/local/hg/repos/``,
222 222 with a web interface at ``http://my-project.org/hg``. ::
223 223
224 224 [bugzilla]
225 225 bzurl=http://my-project.org/bugzilla
226 226 user=bugmail@my-project.org
227 227 password=plugh
228 228 version=xmlrpc
229 229 template=Changeset {node|short} in {root|basename}.
230 230 {hgweb}/{webroot}/rev/{node|short}\\n
231 231 {desc}\\n
232 232 strip=5
233 233
234 234 [web]
235 235 baseurl=http://my-project.org/hg
236 236
237 237 XMLRPC+email example configuration. This uses the Bugzilla at
238 238 ``http://my-project.org/bugzilla``, logging in as user
239 239 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
240 240 collection of Mercurial repositories in ``/var/local/hg/repos/``,
241 241 with a web interface at ``http://my-project.org/hg``. Bug comments
242 242 are sent to the Bugzilla email address
243 243 ``bugzilla@my-project.org``. ::
244 244
245 245 [bugzilla]
246 246 bzurl=http://my-project.org/bugzilla
247 247 user=bugmail@my-project.org
248 248 password=plugh
249 249 version=xmlrpc+email
250 250 bzemail=bugzilla@my-project.org
251 251 template=Changeset {node|short} in {root|basename}.
252 252 {hgweb}/{webroot}/rev/{node|short}\\n
253 253 {desc}\\n
254 254 strip=5
255 255
256 256 [web]
257 257 baseurl=http://my-project.org/hg
258 258
259 259 [usermap]
260 260 user@emaildomain.com=user.name@bugzilladomain.com
261 261
262 262 MySQL example configuration. This has a local Bugzilla 3.2 installation
263 263 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
264 264 the Bugzilla database name is ``bugs`` and MySQL is
265 265 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
266 266 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
267 267 with a web interface at ``http://my-project.org/hg``. ::
268 268
269 269 [bugzilla]
270 270 host=localhost
271 271 password=XYZZY
272 272 version=3.0
273 273 bzuser=unknown@domain.com
274 274 bzdir=/opt/bugzilla-3.2
275 275 template=Changeset {node|short} in {root|basename}.
276 276 {hgweb}/{webroot}/rev/{node|short}\\n
277 277 {desc}\\n
278 278 strip=5
279 279
280 280 [web]
281 281 baseurl=http://my-project.org/hg
282 282
283 283 [usermap]
284 284 user@emaildomain.com=user.name@bugzilladomain.com
285 285
286 286 All the above add a comment to the Bugzilla bug record of the form::
287 287
288 288 Changeset 3b16791d6642 in repository-name.
289 289 http://my-project.org/hg/repository-name/rev/3b16791d6642
290 290
291 291 Changeset commit comment. Bug 1234.
292 292 '''
293 293
294 294 from __future__ import absolute_import
295 295
296 296 import json
297 297 import re
298 298 import time
299 299
300 300 from mercurial.i18n import _
301 301 from mercurial.node import short
302 302 from mercurial import (
303 303 error,
304 304 logcmdutil,
305 305 mail,
306 306 registrar,
307 307 url,
308 308 util,
309 309 )
310 310 from mercurial.utils import (
311 procutil,
311 312 stringutil,
312 313 )
313 314
314 315 xmlrpclib = util.xmlrpclib
315 316
316 317 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
317 318 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
318 319 # be specifying the version(s) of Mercurial they are tested with, or
319 320 # leave the attribute unspecified.
320 321 testedwith = 'ships-with-hg-core'
321 322
322 323 configtable = {}
323 324 configitem = registrar.configitem(configtable)
324 325
325 326 configitem('bugzilla', 'apikey',
326 327 default='',
327 328 )
328 329 configitem('bugzilla', 'bzdir',
329 330 default='/var/www/html/bugzilla',
330 331 )
331 332 configitem('bugzilla', 'bzemail',
332 333 default=None,
333 334 )
334 335 configitem('bugzilla', 'bzurl',
335 336 default='http://localhost/bugzilla/',
336 337 )
337 338 configitem('bugzilla', 'bzuser',
338 339 default=None,
339 340 )
340 341 configitem('bugzilla', 'db',
341 342 default='bugs',
342 343 )
343 344 configitem('bugzilla', 'fixregexp',
344 345 default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
345 346 r'(?:nos?\.?|num(?:ber)?s?)?\s*'
346 347 r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
347 348 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
348 349 )
349 350 configitem('bugzilla', 'fixresolution',
350 351 default='FIXED',
351 352 )
352 353 configitem('bugzilla', 'fixstatus',
353 354 default='RESOLVED',
354 355 )
355 356 configitem('bugzilla', 'host',
356 357 default='localhost',
357 358 )
358 359 configitem('bugzilla', 'notify',
359 360 default=configitem.dynamicdefault,
360 361 )
361 362 configitem('bugzilla', 'password',
362 363 default=None,
363 364 )
364 365 configitem('bugzilla', 'regexp',
365 366 default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
366 367 r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
367 368 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
368 369 )
369 370 configitem('bugzilla', 'strip',
370 371 default=0,
371 372 )
372 373 configitem('bugzilla', 'style',
373 374 default=None,
374 375 )
375 376 configitem('bugzilla', 'template',
376 377 default=None,
377 378 )
378 379 configitem('bugzilla', 'timeout',
379 380 default=5,
380 381 )
381 382 configitem('bugzilla', 'user',
382 383 default='bugs',
383 384 )
384 385 configitem('bugzilla', 'usermap',
385 386 default=None,
386 387 )
387 388 configitem('bugzilla', 'version',
388 389 default=None,
389 390 )
390 391
391 392 class bzaccess(object):
392 393 '''Base class for access to Bugzilla.'''
393 394
394 395 def __init__(self, ui):
395 396 self.ui = ui
396 397 usermap = self.ui.config('bugzilla', 'usermap')
397 398 if usermap:
398 399 self.ui.readconfig(usermap, sections=['usermap'])
399 400
400 401 def map_committer(self, user):
401 402 '''map name of committer to Bugzilla user name.'''
402 403 for committer, bzuser in self.ui.configitems('usermap'):
403 404 if committer.lower() == user.lower():
404 405 return bzuser
405 406 return user
406 407
407 408 # Methods to be implemented by access classes.
408 409 #
409 410 # 'bugs' is a dict keyed on bug id, where values are a dict holding
410 411 # updates to bug state. Recognized dict keys are:
411 412 #
412 413 # 'hours': Value, float containing work hours to be updated.
413 414 # 'fix': If key present, bug is to be marked fixed. Value ignored.
414 415
415 416 def filter_real_bug_ids(self, bugs):
416 417 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
417 418
418 419 def filter_cset_known_bug_ids(self, node, bugs):
419 420 '''remove bug IDs where node occurs in comment text from bugs.'''
420 421
421 422 def updatebug(self, bugid, newstate, text, committer):
422 423 '''update the specified bug. Add comment text and set new states.
423 424
424 425 If possible add the comment as being from the committer of
425 426 the changeset. Otherwise use the default Bugzilla user.
426 427 '''
427 428
428 429 def notify(self, bugs, committer):
429 430 '''Force sending of Bugzilla notification emails.
430 431
431 432 Only required if the access method does not trigger notification
432 433 emails automatically.
433 434 '''
434 435
435 436 # Bugzilla via direct access to MySQL database.
436 437 class bzmysql(bzaccess):
437 438 '''Support for direct MySQL access to Bugzilla.
438 439
439 440 The earliest Bugzilla version this is tested with is version 2.16.
440 441
441 442 If your Bugzilla is version 3.4 or above, you are strongly
442 443 recommended to use the XMLRPC access method instead.
443 444 '''
444 445
445 446 @staticmethod
446 447 def sql_buglist(ids):
447 448 '''return SQL-friendly list of bug ids'''
448 449 return '(' + ','.join(map(str, ids)) + ')'
449 450
450 451 _MySQLdb = None
451 452
452 453 def __init__(self, ui):
453 454 try:
454 455 import MySQLdb as mysql
455 456 bzmysql._MySQLdb = mysql
456 457 except ImportError as err:
457 458 raise error.Abort(_('python mysql support not available: %s') % err)
458 459
459 460 bzaccess.__init__(self, ui)
460 461
461 462 host = self.ui.config('bugzilla', 'host')
462 463 user = self.ui.config('bugzilla', 'user')
463 464 passwd = self.ui.config('bugzilla', 'password')
464 465 db = self.ui.config('bugzilla', 'db')
465 466 timeout = int(self.ui.config('bugzilla', 'timeout'))
466 467 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
467 468 (host, db, user, '*' * len(passwd)))
468 469 self.conn = bzmysql._MySQLdb.connect(host=host,
469 470 user=user, passwd=passwd,
470 471 db=db,
471 472 connect_timeout=timeout)
472 473 self.cursor = self.conn.cursor()
473 474 self.longdesc_id = self.get_longdesc_id()
474 475 self.user_ids = {}
475 476 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
476 477
477 478 def run(self, *args, **kwargs):
478 479 '''run a query.'''
479 480 self.ui.note(_('query: %s %s\n') % (args, kwargs))
480 481 try:
481 482 self.cursor.execute(*args, **kwargs)
482 483 except bzmysql._MySQLdb.MySQLError:
483 484 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
484 485 raise
485 486
486 487 def get_longdesc_id(self):
487 488 '''get identity of longdesc field'''
488 489 self.run('select fieldid from fielddefs where name = "longdesc"')
489 490 ids = self.cursor.fetchall()
490 491 if len(ids) != 1:
491 492 raise error.Abort(_('unknown database schema'))
492 493 return ids[0][0]
493 494
494 495 def filter_real_bug_ids(self, bugs):
495 496 '''filter not-existing bugs from set.'''
496 497 self.run('select bug_id from bugs where bug_id in %s' %
497 498 bzmysql.sql_buglist(bugs.keys()))
498 499 existing = [id for (id,) in self.cursor.fetchall()]
499 500 for id in bugs.keys():
500 501 if id not in existing:
501 502 self.ui.status(_('bug %d does not exist\n') % id)
502 503 del bugs[id]
503 504
504 505 def filter_cset_known_bug_ids(self, node, bugs):
505 506 '''filter bug ids that already refer to this changeset from set.'''
506 507 self.run('''select bug_id from longdescs where
507 508 bug_id in %s and thetext like "%%%s%%"''' %
508 509 (bzmysql.sql_buglist(bugs.keys()), short(node)))
509 510 for (id,) in self.cursor.fetchall():
510 511 self.ui.status(_('bug %d already knows about changeset %s\n') %
511 512 (id, short(node)))
512 513 del bugs[id]
513 514
514 515 def notify(self, bugs, committer):
515 516 '''tell bugzilla to send mail.'''
516 517 self.ui.status(_('telling bugzilla to send mail:\n'))
517 518 (user, userid) = self.get_bugzilla_user(committer)
518 519 for id in bugs.keys():
519 520 self.ui.status(_(' bug %s\n') % id)
520 521 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
521 522 bzdir = self.ui.config('bugzilla', 'bzdir')
522 523 try:
523 524 # Backwards-compatible with old notify string, which
524 525 # took one string. This will throw with a new format
525 526 # string.
526 527 cmd = cmdfmt % id
527 528 except TypeError:
528 529 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
529 530 self.ui.note(_('running notify command %s\n') % cmd)
530 fp = util.popen('(%s) 2>&1' % cmd)
531 fp = procutil.popen('(%s) 2>&1' % cmd)
531 532 out = fp.read()
532 533 ret = fp.close()
533 534 if ret:
534 535 self.ui.warn(out)
535 536 raise error.Abort(_('bugzilla notify command %s') %
536 util.explainexit(ret)[0])
537 procutil.explainexit(ret)[0])
537 538 self.ui.status(_('done\n'))
538 539
539 540 def get_user_id(self, user):
540 541 '''look up numeric bugzilla user id.'''
541 542 try:
542 543 return self.user_ids[user]
543 544 except KeyError:
544 545 try:
545 546 userid = int(user)
546 547 except ValueError:
547 548 self.ui.note(_('looking up user %s\n') % user)
548 549 self.run('''select userid from profiles
549 550 where login_name like %s''', user)
550 551 all = self.cursor.fetchall()
551 552 if len(all) != 1:
552 553 raise KeyError(user)
553 554 userid = int(all[0][0])
554 555 self.user_ids[user] = userid
555 556 return userid
556 557
557 558 def get_bugzilla_user(self, committer):
558 559 '''See if committer is a registered bugzilla user. Return
559 560 bugzilla username and userid if so. If not, return default
560 561 bugzilla username and userid.'''
561 562 user = self.map_committer(committer)
562 563 try:
563 564 userid = self.get_user_id(user)
564 565 except KeyError:
565 566 try:
566 567 defaultuser = self.ui.config('bugzilla', 'bzuser')
567 568 if not defaultuser:
568 569 raise error.Abort(_('cannot find bugzilla user id for %s') %
569 570 user)
570 571 userid = self.get_user_id(defaultuser)
571 572 user = defaultuser
572 573 except KeyError:
573 574 raise error.Abort(_('cannot find bugzilla user id for %s or %s')
574 575 % (user, defaultuser))
575 576 return (user, userid)
576 577
577 578 def updatebug(self, bugid, newstate, text, committer):
578 579 '''update bug state with comment text.
579 580
580 581 Try adding comment as committer of changeset, otherwise as
581 582 default bugzilla user.'''
582 583 if len(newstate) > 0:
583 584 self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
584 585
585 586 (user, userid) = self.get_bugzilla_user(committer)
586 587 now = time.strftime(r'%Y-%m-%d %H:%M:%S')
587 588 self.run('''insert into longdescs
588 589 (bug_id, who, bug_when, thetext)
589 590 values (%s, %s, %s, %s)''',
590 591 (bugid, userid, now, text))
591 592 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
592 593 values (%s, %s, %s, %s)''',
593 594 (bugid, userid, now, self.longdesc_id))
594 595 self.conn.commit()
595 596
596 597 class bzmysql_2_18(bzmysql):
597 598 '''support for bugzilla 2.18 series.'''
598 599
599 600 def __init__(self, ui):
600 601 bzmysql.__init__(self, ui)
601 602 self.default_notify = \
602 603 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
603 604
604 605 class bzmysql_3_0(bzmysql_2_18):
605 606 '''support for bugzilla 3.0 series.'''
606 607
607 608 def __init__(self, ui):
608 609 bzmysql_2_18.__init__(self, ui)
609 610
610 611 def get_longdesc_id(self):
611 612 '''get identity of longdesc field'''
612 613 self.run('select id from fielddefs where name = "longdesc"')
613 614 ids = self.cursor.fetchall()
614 615 if len(ids) != 1:
615 616 raise error.Abort(_('unknown database schema'))
616 617 return ids[0][0]
617 618
618 619 # Bugzilla via XMLRPC interface.
619 620
620 621 class cookietransportrequest(object):
621 622 """A Transport request method that retains cookies over its lifetime.
622 623
623 624 The regular xmlrpclib transports ignore cookies. Which causes
624 625 a bit of a problem when you need a cookie-based login, as with
625 626 the Bugzilla XMLRPC interface prior to 4.4.3.
626 627
627 628 So this is a helper for defining a Transport which looks for
628 629 cookies being set in responses and saves them to add to all future
629 630 requests.
630 631 """
631 632
632 633 # Inspiration drawn from
633 634 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
634 635 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
635 636
636 637 cookies = []
637 638 def send_cookies(self, connection):
638 639 if self.cookies:
639 640 for cookie in self.cookies:
640 641 connection.putheader("Cookie", cookie)
641 642
642 643 def request(self, host, handler, request_body, verbose=0):
643 644 self.verbose = verbose
644 645 self.accept_gzip_encoding = False
645 646
646 647 # issue XML-RPC request
647 648 h = self.make_connection(host)
648 649 if verbose:
649 650 h.set_debuglevel(1)
650 651
651 652 self.send_request(h, handler, request_body)
652 653 self.send_host(h, host)
653 654 self.send_cookies(h)
654 655 self.send_user_agent(h)
655 656 self.send_content(h, request_body)
656 657
657 658 # Deal with differences between Python 2.6 and 2.7.
658 659 # In the former h is a HTTP(S). In the latter it's a
659 660 # HTTP(S)Connection. Luckily, the 2.6 implementation of
660 661 # HTTP(S) has an underlying HTTP(S)Connection, so extract
661 662 # that and use it.
662 663 try:
663 664 response = h.getresponse()
664 665 except AttributeError:
665 666 response = h._conn.getresponse()
666 667
667 668 # Add any cookie definitions to our list.
668 669 for header in response.msg.getallmatchingheaders("Set-Cookie"):
669 670 val = header.split(": ", 1)[1]
670 671 cookie = val.split(";", 1)[0]
671 672 self.cookies.append(cookie)
672 673
673 674 if response.status != 200:
674 675 raise xmlrpclib.ProtocolError(host + handler, response.status,
675 676 response.reason, response.msg.headers)
676 677
677 678 payload = response.read()
678 679 parser, unmarshaller = self.getparser()
679 680 parser.feed(payload)
680 681 parser.close()
681 682
682 683 return unmarshaller.close()
683 684
684 685 # The explicit calls to the underlying xmlrpclib __init__() methods are
685 686 # necessary. The xmlrpclib.Transport classes are old-style classes, and
686 687 # it turns out their __init__() doesn't get called when doing multiple
687 688 # inheritance with a new-style class.
688 689 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
689 690 def __init__(self, use_datetime=0):
690 691 if util.safehasattr(xmlrpclib.Transport, "__init__"):
691 692 xmlrpclib.Transport.__init__(self, use_datetime)
692 693
693 694 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
694 695 def __init__(self, use_datetime=0):
695 696 if util.safehasattr(xmlrpclib.Transport, "__init__"):
696 697 xmlrpclib.SafeTransport.__init__(self, use_datetime)
697 698
698 699 class bzxmlrpc(bzaccess):
699 700 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
700 701
701 702 Requires a minimum Bugzilla version 3.4.
702 703 """
703 704
704 705 def __init__(self, ui):
705 706 bzaccess.__init__(self, ui)
706 707
707 708 bzweb = self.ui.config('bugzilla', 'bzurl')
708 709 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
709 710
710 711 user = self.ui.config('bugzilla', 'user')
711 712 passwd = self.ui.config('bugzilla', 'password')
712 713
713 714 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
714 715 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
715 716
716 717 self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
717 718 ver = self.bzproxy.Bugzilla.version()['version'].split('.')
718 719 self.bzvermajor = int(ver[0])
719 720 self.bzverminor = int(ver[1])
720 721 login = self.bzproxy.User.login({'login': user, 'password': passwd,
721 722 'restrict_login': True})
722 723 self.bztoken = login.get('token', '')
723 724
724 725 def transport(self, uri):
725 726 if util.urlreq.urlparse(uri, "http")[0] == "https":
726 727 return cookiesafetransport()
727 728 else:
728 729 return cookietransport()
729 730
730 731 def get_bug_comments(self, id):
731 732 """Return a string with all comment text for a bug."""
732 733 c = self.bzproxy.Bug.comments({'ids': [id],
733 734 'include_fields': ['text'],
734 735 'token': self.bztoken})
735 736 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
736 737
737 738 def filter_real_bug_ids(self, bugs):
738 739 probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
739 740 'include_fields': [],
740 741 'permissive': True,
741 742 'token': self.bztoken,
742 743 })
743 744 for badbug in probe['faults']:
744 745 id = badbug['id']
745 746 self.ui.status(_('bug %d does not exist\n') % id)
746 747 del bugs[id]
747 748
748 749 def filter_cset_known_bug_ids(self, node, bugs):
749 750 for id in sorted(bugs.keys()):
750 751 if self.get_bug_comments(id).find(short(node)) != -1:
751 752 self.ui.status(_('bug %d already knows about changeset %s\n') %
752 753 (id, short(node)))
753 754 del bugs[id]
754 755
755 756 def updatebug(self, bugid, newstate, text, committer):
756 757 args = {}
757 758 if 'hours' in newstate:
758 759 args['work_time'] = newstate['hours']
759 760
760 761 if self.bzvermajor >= 4:
761 762 args['ids'] = [bugid]
762 763 args['comment'] = {'body' : text}
763 764 if 'fix' in newstate:
764 765 args['status'] = self.fixstatus
765 766 args['resolution'] = self.fixresolution
766 767 args['token'] = self.bztoken
767 768 self.bzproxy.Bug.update(args)
768 769 else:
769 770 if 'fix' in newstate:
770 771 self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
771 772 "to mark bugs fixed\n"))
772 773 args['id'] = bugid
773 774 args['comment'] = text
774 775 self.bzproxy.Bug.add_comment(args)
775 776
776 777 class bzxmlrpcemail(bzxmlrpc):
777 778 """Read data from Bugzilla via XMLRPC, send updates via email.
778 779
779 780 Advantages of sending updates via email:
780 781 1. Comments can be added as any user, not just logged in user.
781 782 2. Bug statuses or other fields not accessible via XMLRPC can
782 783 potentially be updated.
783 784
784 785 There is no XMLRPC function to change bug status before Bugzilla
785 786 4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
786 787 But bugs can be marked fixed via email from 3.4 onwards.
787 788 """
788 789
789 790 # The email interface changes subtly between 3.4 and 3.6. In 3.4,
790 791 # in-email fields are specified as '@<fieldname> = <value>'. In
791 792 # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
792 793 # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
793 794 # compatibility, but rather than rely on this use the new format for
794 795 # 4.0 onwards.
795 796
796 797 def __init__(self, ui):
797 798 bzxmlrpc.__init__(self, ui)
798 799
799 800 self.bzemail = self.ui.config('bugzilla', 'bzemail')
800 801 if not self.bzemail:
801 802 raise error.Abort(_("configuration 'bzemail' missing"))
802 803 mail.validateconfig(self.ui)
803 804
804 805 def makecommandline(self, fieldname, value):
805 806 if self.bzvermajor >= 4:
806 807 return "@%s %s" % (fieldname, str(value))
807 808 else:
808 809 if fieldname == "id":
809 810 fieldname = "bug_id"
810 811 return "@%s = %s" % (fieldname, str(value))
811 812
812 813 def send_bug_modify_email(self, bugid, commands, comment, committer):
813 814 '''send modification message to Bugzilla bug via email.
814 815
815 816 The message format is documented in the Bugzilla email_in.pl
816 817 specification. commands is a list of command lines, comment is the
817 818 comment text.
818 819
819 820 To stop users from crafting commit comments with
820 821 Bugzilla commands, specify the bug ID via the message body, rather
821 822 than the subject line, and leave a blank line after it.
822 823 '''
823 824 user = self.map_committer(committer)
824 825 matches = self.bzproxy.User.get({'match': [user],
825 826 'token': self.bztoken})
826 827 if not matches['users']:
827 828 user = self.ui.config('bugzilla', 'user')
828 829 matches = self.bzproxy.User.get({'match': [user],
829 830 'token': self.bztoken})
830 831 if not matches['users']:
831 832 raise error.Abort(_("default bugzilla user %s email not found")
832 833 % user)
833 834 user = matches['users'][0]['email']
834 835 commands.append(self.makecommandline("id", bugid))
835 836
836 837 text = "\n".join(commands) + "\n\n" + comment
837 838
838 839 _charsets = mail._charsets(self.ui)
839 840 user = mail.addressencode(self.ui, user, _charsets)
840 841 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
841 842 msg = mail.mimeencode(self.ui, text, _charsets)
842 843 msg['From'] = user
843 844 msg['To'] = bzemail
844 845 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
845 846 sendmail = mail.connect(self.ui)
846 847 sendmail(user, bzemail, msg.as_string())
847 848
848 849 def updatebug(self, bugid, newstate, text, committer):
849 850 cmds = []
850 851 if 'hours' in newstate:
851 852 cmds.append(self.makecommandline("work_time", newstate['hours']))
852 853 if 'fix' in newstate:
853 854 cmds.append(self.makecommandline("bug_status", self.fixstatus))
854 855 cmds.append(self.makecommandline("resolution", self.fixresolution))
855 856 self.send_bug_modify_email(bugid, cmds, text, committer)
856 857
857 858 class NotFound(LookupError):
858 859 pass
859 860
860 861 class bzrestapi(bzaccess):
861 862 """Read and write bugzilla data using the REST API available since
862 863 Bugzilla 5.0.
863 864 """
864 865 def __init__(self, ui):
865 866 bzaccess.__init__(self, ui)
866 867 bz = self.ui.config('bugzilla', 'bzurl')
867 868 self.bzroot = '/'.join([bz, 'rest'])
868 869 self.apikey = self.ui.config('bugzilla', 'apikey')
869 870 self.user = self.ui.config('bugzilla', 'user')
870 871 self.passwd = self.ui.config('bugzilla', 'password')
871 872 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
872 873 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
873 874
874 875 def apiurl(self, targets, include_fields=None):
875 876 url = '/'.join([self.bzroot] + [str(t) for t in targets])
876 877 qv = {}
877 878 if self.apikey:
878 879 qv['api_key'] = self.apikey
879 880 elif self.user and self.passwd:
880 881 qv['login'] = self.user
881 882 qv['password'] = self.passwd
882 883 if include_fields:
883 884 qv['include_fields'] = include_fields
884 885 if qv:
885 886 url = '%s?%s' % (url, util.urlreq.urlencode(qv))
886 887 return url
887 888
888 889 def _fetch(self, burl):
889 890 try:
890 891 resp = url.open(self.ui, burl)
891 892 return json.loads(resp.read())
892 893 except util.urlerr.httperror as inst:
893 894 if inst.code == 401:
894 895 raise error.Abort(_('authorization failed'))
895 896 if inst.code == 404:
896 897 raise NotFound()
897 898 else:
898 899 raise
899 900
900 901 def _submit(self, burl, data, method='POST'):
901 902 data = json.dumps(data)
902 903 if method == 'PUT':
903 904 class putrequest(util.urlreq.request):
904 905 def get_method(self):
905 906 return 'PUT'
906 907 request_type = putrequest
907 908 else:
908 909 request_type = util.urlreq.request
909 910 req = request_type(burl, data,
910 911 {'Content-Type': 'application/json'})
911 912 try:
912 913 resp = url.opener(self.ui).open(req)
913 914 return json.loads(resp.read())
914 915 except util.urlerr.httperror as inst:
915 916 if inst.code == 401:
916 917 raise error.Abort(_('authorization failed'))
917 918 if inst.code == 404:
918 919 raise NotFound()
919 920 else:
920 921 raise
921 922
922 923 def filter_real_bug_ids(self, bugs):
923 924 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
924 925 badbugs = set()
925 926 for bugid in bugs:
926 927 burl = self.apiurl(('bug', bugid), include_fields='status')
927 928 try:
928 929 self._fetch(burl)
929 930 except NotFound:
930 931 badbugs.add(bugid)
931 932 for bugid in badbugs:
932 933 del bugs[bugid]
933 934
934 935 def filter_cset_known_bug_ids(self, node, bugs):
935 936 '''remove bug IDs where node occurs in comment text from bugs.'''
936 937 sn = short(node)
937 938 for bugid in bugs.keys():
938 939 burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
939 940 result = self._fetch(burl)
940 941 comments = result['bugs'][str(bugid)]['comments']
941 942 if any(sn in c['text'] for c in comments):
942 943 self.ui.status(_('bug %d already knows about changeset %s\n') %
943 944 (bugid, sn))
944 945 del bugs[bugid]
945 946
946 947 def updatebug(self, bugid, newstate, text, committer):
947 948 '''update the specified bug. Add comment text and set new states.
948 949
949 950 If possible add the comment as being from the committer of
950 951 the changeset. Otherwise use the default Bugzilla user.
951 952 '''
952 953 bugmod = {}
953 954 if 'hours' in newstate:
954 955 bugmod['work_time'] = newstate['hours']
955 956 if 'fix' in newstate:
956 957 bugmod['status'] = self.fixstatus
957 958 bugmod['resolution'] = self.fixresolution
958 959 if bugmod:
959 960 # if we have to change the bugs state do it here
960 961 bugmod['comment'] = {
961 962 'comment': text,
962 963 'is_private': False,
963 964 'is_markdown': False,
964 965 }
965 966 burl = self.apiurl(('bug', bugid))
966 967 self._submit(burl, bugmod, method='PUT')
967 968 self.ui.debug('updated bug %s\n' % bugid)
968 969 else:
969 970 burl = self.apiurl(('bug', bugid, 'comment'))
970 971 self._submit(burl, {
971 972 'comment': text,
972 973 'is_private': False,
973 974 'is_markdown': False,
974 975 })
975 976 self.ui.debug('added comment to bug %s\n' % bugid)
976 977
977 978 def notify(self, bugs, committer):
978 979 '''Force sending of Bugzilla notification emails.
979 980
980 981 Only required if the access method does not trigger notification
981 982 emails automatically.
982 983 '''
983 984 pass
984 985
985 986 class bugzilla(object):
986 987 # supported versions of bugzilla. different versions have
987 988 # different schemas.
988 989 _versions = {
989 990 '2.16': bzmysql,
990 991 '2.18': bzmysql_2_18,
991 992 '3.0': bzmysql_3_0,
992 993 'xmlrpc': bzxmlrpc,
993 994 'xmlrpc+email': bzxmlrpcemail,
994 995 'restapi': bzrestapi,
995 996 }
996 997
997 998 def __init__(self, ui, repo):
998 999 self.ui = ui
999 1000 self.repo = repo
1000 1001
1001 1002 bzversion = self.ui.config('bugzilla', 'version')
1002 1003 try:
1003 1004 bzclass = bugzilla._versions[bzversion]
1004 1005 except KeyError:
1005 1006 raise error.Abort(_('bugzilla version %s not supported') %
1006 1007 bzversion)
1007 1008 self.bzdriver = bzclass(self.ui)
1008 1009
1009 1010 self.bug_re = re.compile(
1010 1011 self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
1011 1012 self.fix_re = re.compile(
1012 1013 self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
1013 1014 self.split_re = re.compile(r'\D+')
1014 1015
1015 1016 def find_bugs(self, ctx):
1016 1017 '''return bugs dictionary created from commit comment.
1017 1018
1018 1019 Extract bug info from changeset comments. Filter out any that are
1019 1020 not known to Bugzilla, and any that already have a reference to
1020 1021 the given changeset in their comments.
1021 1022 '''
1022 1023 start = 0
1023 1024 hours = 0.0
1024 1025 bugs = {}
1025 1026 bugmatch = self.bug_re.search(ctx.description(), start)
1026 1027 fixmatch = self.fix_re.search(ctx.description(), start)
1027 1028 while True:
1028 1029 bugattribs = {}
1029 1030 if not bugmatch and not fixmatch:
1030 1031 break
1031 1032 if not bugmatch:
1032 1033 m = fixmatch
1033 1034 elif not fixmatch:
1034 1035 m = bugmatch
1035 1036 else:
1036 1037 if bugmatch.start() < fixmatch.start():
1037 1038 m = bugmatch
1038 1039 else:
1039 1040 m = fixmatch
1040 1041 start = m.end()
1041 1042 if m is bugmatch:
1042 1043 bugmatch = self.bug_re.search(ctx.description(), start)
1043 1044 if 'fix' in bugattribs:
1044 1045 del bugattribs['fix']
1045 1046 else:
1046 1047 fixmatch = self.fix_re.search(ctx.description(), start)
1047 1048 bugattribs['fix'] = None
1048 1049
1049 1050 try:
1050 1051 ids = m.group('ids')
1051 1052 except IndexError:
1052 1053 ids = m.group(1)
1053 1054 try:
1054 1055 hours = float(m.group('hours'))
1055 1056 bugattribs['hours'] = hours
1056 1057 except IndexError:
1057 1058 pass
1058 1059 except TypeError:
1059 1060 pass
1060 1061 except ValueError:
1061 1062 self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
1062 1063
1063 1064 for id in self.split_re.split(ids):
1064 1065 if not id:
1065 1066 continue
1066 1067 bugs[int(id)] = bugattribs
1067 1068 if bugs:
1068 1069 self.bzdriver.filter_real_bug_ids(bugs)
1069 1070 if bugs:
1070 1071 self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
1071 1072 return bugs
1072 1073
1073 1074 def update(self, bugid, newstate, ctx):
1074 1075 '''update bugzilla bug with reference to changeset.'''
1075 1076
1076 1077 def webroot(root):
1077 1078 '''strip leading prefix of repo root and turn into
1078 1079 url-safe path.'''
1079 1080 count = int(self.ui.config('bugzilla', 'strip'))
1080 1081 root = util.pconvert(root)
1081 1082 while count > 0:
1082 1083 c = root.find('/')
1083 1084 if c == -1:
1084 1085 break
1085 1086 root = root[c + 1:]
1086 1087 count -= 1
1087 1088 return root
1088 1089
1089 1090 mapfile = None
1090 1091 tmpl = self.ui.config('bugzilla', 'template')
1091 1092 if not tmpl:
1092 1093 mapfile = self.ui.config('bugzilla', 'style')
1093 1094 if not mapfile and not tmpl:
1094 1095 tmpl = _('changeset {node|short} in repo {root} refers '
1095 1096 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
1096 1097 spec = logcmdutil.templatespec(tmpl, mapfile)
1097 1098 t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
1098 1099 self.ui.pushbuffer()
1099 1100 t.show(ctx, changes=ctx.changeset(),
1100 1101 bug=str(bugid),
1101 1102 hgweb=self.ui.config('web', 'baseurl'),
1102 1103 root=self.repo.root,
1103 1104 webroot=webroot(self.repo.root))
1104 1105 data = self.ui.popbuffer()
1105 1106 self.bzdriver.updatebug(bugid, newstate, data,
1106 1107 stringutil.email(ctx.user()))
1107 1108
1108 1109 def notify(self, bugs, committer):
1109 1110 '''ensure Bugzilla users are notified of bug change.'''
1110 1111 self.bzdriver.notify(bugs, committer)
1111 1112
1112 1113 def hook(ui, repo, hooktype, node=None, **kwargs):
1113 1114 '''add comment to bugzilla for each changeset that refers to a
1114 1115 bugzilla bug id. only add a comment once per bug, so same change
1115 1116 seen multiple times does not fill bug with duplicate data.'''
1116 1117 if node is None:
1117 1118 raise error.Abort(_('hook type %s does not pass a changeset id') %
1118 1119 hooktype)
1119 1120 try:
1120 1121 bz = bugzilla(ui, repo)
1121 1122 ctx = repo[node]
1122 1123 bugs = bz.find_bugs(ctx)
1123 1124 if bugs:
1124 1125 for bug in bugs:
1125 1126 bz.update(bug, bugs[bug], ctx)
1126 1127 bz.notify(bugs, stringutil.email(ctx.user()))
1127 1128 except Exception as e:
1128 1129 raise error.Abort(_('Bugzilla error: %s') % e)
@@ -1,548 +1,551
1 1 # common.py - common code for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import base64
10 10 import datetime
11 11 import errno
12 12 import os
13 13 import re
14 14 import shlex
15 15 import subprocess
16 16
17 17 from mercurial.i18n import _
18 18 from mercurial import (
19 19 encoding,
20 20 error,
21 21 phases,
22 22 pycompat,
23 23 util,
24 24 )
25 from mercurial.utils import (
26 procutil,
27 )
25 28
26 29 pickle = util.pickle
27 30 propertycache = util.propertycache
28 31
29 32 def _encodeornone(d):
30 33 if d is None:
31 34 return
32 35 return d.encode('latin1')
33 36
34 37 class _shlexpy3proxy(object):
35 38
36 39 def __init__(self, l):
37 40 self._l = l
38 41
39 42 def __iter__(self):
40 43 return (_encodeornone(v) for v in self._l)
41 44
42 45 def get_token(self):
43 46 return _encodeornone(self._l.get_token())
44 47
45 48 @property
46 49 def infile(self):
47 50 return self._l.infile or '<unknown>'
48 51
49 52 @property
50 53 def lineno(self):
51 54 return self._l.lineno
52 55
53 56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
54 57 if data is None:
55 58 if pycompat.ispy3:
56 59 data = open(filepath, 'r', encoding=r'latin1')
57 60 else:
58 61 data = open(filepath, 'r')
59 62 else:
60 63 if filepath is not None:
61 64 raise error.ProgrammingError(
62 65 'shlexer only accepts data or filepath, not both')
63 66 if pycompat.ispy3:
64 67 data = data.decode('latin1')
65 68 l = shlex.shlex(data, infile=filepath, posix=True)
66 69 if whitespace is not None:
67 70 l.whitespace_split = True
68 71 if pycompat.ispy3:
69 72 l.whitespace += whitespace.decode('latin1')
70 73 else:
71 74 l.whitespace += whitespace
72 75 if wordchars is not None:
73 76 if pycompat.ispy3:
74 77 l.wordchars += wordchars.decode('latin1')
75 78 else:
76 79 l.wordchars += wordchars
77 80 if pycompat.ispy3:
78 81 return _shlexpy3proxy(l)
79 82 return l
80 83
81 84 def encodeargs(args):
82 85 def encodearg(s):
83 86 lines = base64.encodestring(s)
84 87 lines = [l.splitlines()[0] for l in lines]
85 88 return ''.join(lines)
86 89
87 90 s = pickle.dumps(args)
88 91 return encodearg(s)
89 92
90 93 def decodeargs(s):
91 94 s = base64.decodestring(s)
92 95 return pickle.loads(s)
93 96
94 97 class MissingTool(Exception):
95 98 pass
96 99
97 100 def checktool(exe, name=None, abort=True):
98 101 name = name or exe
99 if not util.findexe(exe):
102 if not procutil.findexe(exe):
100 103 if abort:
101 104 exc = error.Abort
102 105 else:
103 106 exc = MissingTool
104 107 raise exc(_('cannot find required "%s" tool') % name)
105 108
106 109 class NoRepo(Exception):
107 110 pass
108 111
109 112 SKIPREV = 'SKIP'
110 113
111 114 class commit(object):
112 115 def __init__(self, author, date, desc, parents, branch=None, rev=None,
113 116 extra=None, sortkey=None, saverev=True, phase=phases.draft,
114 117 optparents=None):
115 118 self.author = author or 'unknown'
116 119 self.date = date or '0 0'
117 120 self.desc = desc
118 121 self.parents = parents # will be converted and used as parents
119 122 self.optparents = optparents or [] # will be used if already converted
120 123 self.branch = branch
121 124 self.rev = rev
122 125 self.extra = extra or {}
123 126 self.sortkey = sortkey
124 127 self.saverev = saverev
125 128 self.phase = phase
126 129
127 130 class converter_source(object):
128 131 """Conversion source interface"""
129 132
130 133 def __init__(self, ui, repotype, path=None, revs=None):
131 134 """Initialize conversion source (or raise NoRepo("message")
132 135 exception if path is not a valid repository)"""
133 136 self.ui = ui
134 137 self.path = path
135 138 self.revs = revs
136 139 self.repotype = repotype
137 140
138 141 self.encoding = 'utf-8'
139 142
140 143 def checkhexformat(self, revstr, mapname='splicemap'):
141 144 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
142 145 such format for their revision numbering
143 146 """
144 147 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
145 148 raise error.Abort(_('%s entry %s is not a valid revision'
146 149 ' identifier') % (mapname, revstr))
147 150
148 151 def before(self):
149 152 pass
150 153
151 154 def after(self):
152 155 pass
153 156
154 157 def targetfilebelongstosource(self, targetfilename):
155 158 """Returns true if the given targetfile belongs to the source repo. This
156 159 is useful when only a subdirectory of the target belongs to the source
157 160 repo."""
158 161 # For normal full repo converts, this is always True.
159 162 return True
160 163
161 164 def setrevmap(self, revmap):
162 165 """set the map of already-converted revisions"""
163 166
164 167 def getheads(self):
165 168 """Return a list of this repository's heads"""
166 169 raise NotImplementedError
167 170
168 171 def getfile(self, name, rev):
169 172 """Return a pair (data, mode) where data is the file content
170 173 as a string and mode one of '', 'x' or 'l'. rev is the
171 174 identifier returned by a previous call to getchanges().
172 175 Data is None if file is missing/deleted in rev.
173 176 """
174 177 raise NotImplementedError
175 178
176 179 def getchanges(self, version, full):
177 180 """Returns a tuple of (files, copies, cleanp2).
178 181
179 182 files is a sorted list of (filename, id) tuples for all files
180 183 changed between version and its first parent returned by
181 184 getcommit(). If full, all files in that revision is returned.
182 185 id is the source revision id of the file.
183 186
184 187 copies is a dictionary of dest: source
185 188
186 189 cleanp2 is the set of files filenames that are clean against p2.
187 190 (Files that are clean against p1 are already not in files (unless
188 191 full). This makes it possible to handle p2 clean files similarly.)
189 192 """
190 193 raise NotImplementedError
191 194
192 195 def getcommit(self, version):
193 196 """Return the commit object for version"""
194 197 raise NotImplementedError
195 198
196 199 def numcommits(self):
197 200 """Return the number of commits in this source.
198 201
199 202 If unknown, return None.
200 203 """
201 204 return None
202 205
203 206 def gettags(self):
204 207 """Return the tags as a dictionary of name: revision
205 208
206 209 Tag names must be UTF-8 strings.
207 210 """
208 211 raise NotImplementedError
209 212
210 213 def recode(self, s, encoding=None):
211 214 if not encoding:
212 215 encoding = self.encoding or 'utf-8'
213 216
214 217 if isinstance(s, unicode):
215 218 return s.encode("utf-8")
216 219 try:
217 220 return s.decode(encoding).encode("utf-8")
218 221 except UnicodeError:
219 222 try:
220 223 return s.decode("latin-1").encode("utf-8")
221 224 except UnicodeError:
222 225 return s.decode(encoding, "replace").encode("utf-8")
223 226
224 227 def getchangedfiles(self, rev, i):
225 228 """Return the files changed by rev compared to parent[i].
226 229
227 230 i is an index selecting one of the parents of rev. The return
228 231 value should be the list of files that are different in rev and
229 232 this parent.
230 233
231 234 If rev has no parents, i is None.
232 235
233 236 This function is only needed to support --filemap
234 237 """
235 238 raise NotImplementedError
236 239
237 240 def converted(self, rev, sinkrev):
238 241 '''Notify the source that a revision has been converted.'''
239 242
240 243 def hasnativeorder(self):
241 244 """Return true if this source has a meaningful, native revision
242 245 order. For instance, Mercurial revisions are store sequentially
243 246 while there is no such global ordering with Darcs.
244 247 """
245 248 return False
246 249
247 250 def hasnativeclose(self):
248 251 """Return true if this source has ability to close branch.
249 252 """
250 253 return False
251 254
252 255 def lookuprev(self, rev):
253 256 """If rev is a meaningful revision reference in source, return
254 257 the referenced identifier in the same format used by getcommit().
255 258 return None otherwise.
256 259 """
257 260 return None
258 261
259 262 def getbookmarks(self):
260 263 """Return the bookmarks as a dictionary of name: revision
261 264
262 265 Bookmark names are to be UTF-8 strings.
263 266 """
264 267 return {}
265 268
266 269 def checkrevformat(self, revstr, mapname='splicemap'):
267 270 """revstr is a string that describes a revision in the given
268 271 source control system. Return true if revstr has correct
269 272 format.
270 273 """
271 274 return True
272 275
273 276 class converter_sink(object):
274 277 """Conversion sink (target) interface"""
275 278
276 279 def __init__(self, ui, repotype, path):
277 280 """Initialize conversion sink (or raise NoRepo("message")
278 281 exception if path is not a valid repository)
279 282
280 283 created is a list of paths to remove if a fatal error occurs
281 284 later"""
282 285 self.ui = ui
283 286 self.path = path
284 287 self.created = []
285 288 self.repotype = repotype
286 289
287 290 def revmapfile(self):
288 291 """Path to a file that will contain lines
289 292 source_rev_id sink_rev_id
290 293 mapping equivalent revision identifiers for each system."""
291 294 raise NotImplementedError
292 295
293 296 def authorfile(self):
294 297 """Path to a file that will contain lines
295 298 srcauthor=dstauthor
296 299 mapping equivalent authors identifiers for each system."""
297 300 return None
298 301
299 302 def putcommit(self, files, copies, parents, commit, source, revmap, full,
300 303 cleanp2):
301 304 """Create a revision with all changed files listed in 'files'
302 305 and having listed parents. 'commit' is a commit object
303 306 containing at a minimum the author, date, and message for this
304 307 changeset. 'files' is a list of (path, version) tuples,
305 308 'copies' is a dictionary mapping destinations to sources,
306 309 'source' is the source repository, and 'revmap' is a mapfile
307 310 of source revisions to converted revisions. Only getfile() and
308 311 lookuprev() should be called on 'source'. 'full' means that 'files'
309 312 is complete and all other files should be removed.
310 313 'cleanp2' is a set of the filenames that are unchanged from p2
311 314 (only in the common merge case where there two parents).
312 315
313 316 Note that the sink repository is not told to update itself to
314 317 a particular revision (or even what that revision would be)
315 318 before it receives the file data.
316 319 """
317 320 raise NotImplementedError
318 321
319 322 def puttags(self, tags):
320 323 """Put tags into sink.
321 324
322 325 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
323 326 Return a pair (tag_revision, tag_parent_revision), or (None, None)
324 327 if nothing was changed.
325 328 """
326 329 raise NotImplementedError
327 330
328 331 def setbranch(self, branch, pbranches):
329 332 """Set the current branch name. Called before the first putcommit
330 333 on the branch.
331 334 branch: branch name for subsequent commits
332 335 pbranches: (converted parent revision, parent branch) tuples"""
333 336
334 337 def setfilemapmode(self, active):
335 338 """Tell the destination that we're using a filemap
336 339
337 340 Some converter_sources (svn in particular) can claim that a file
338 341 was changed in a revision, even if there was no change. This method
339 342 tells the destination that we're using a filemap and that it should
340 343 filter empty revisions.
341 344 """
342 345
343 346 def before(self):
344 347 pass
345 348
346 349 def after(self):
347 350 pass
348 351
349 352 def putbookmarks(self, bookmarks):
350 353 """Put bookmarks into sink.
351 354
352 355 bookmarks: {bookmarkname: sink_rev_id, ...}
353 356 where bookmarkname is an UTF-8 string.
354 357 """
355 358
356 359 def hascommitfrommap(self, rev):
357 360 """Return False if a rev mentioned in a filemap is known to not be
358 361 present."""
359 362 raise NotImplementedError
360 363
361 364 def hascommitforsplicemap(self, rev):
362 365 """This method is for the special needs for splicemap handling and not
363 366 for general use. Returns True if the sink contains rev, aborts on some
364 367 special cases."""
365 368 raise NotImplementedError
366 369
367 370 class commandline(object):
368 371 def __init__(self, ui, command):
369 372 self.ui = ui
370 373 self.command = command
371 374
372 375 def prerun(self):
373 376 pass
374 377
375 378 def postrun(self):
376 379 pass
377 380
378 381 def _cmdline(self, cmd, *args, **kwargs):
379 382 kwargs = pycompat.byteskwargs(kwargs)
380 383 cmdline = [self.command, cmd] + list(args)
381 384 for k, v in kwargs.iteritems():
382 385 if len(k) == 1:
383 386 cmdline.append('-' + k)
384 387 else:
385 388 cmdline.append('--' + k.replace('_', '-'))
386 389 try:
387 390 if len(k) == 1:
388 391 cmdline.append('' + v)
389 392 else:
390 393 cmdline[-1] += '=' + v
391 394 except TypeError:
392 395 pass
393 cmdline = [util.shellquote(arg) for arg in cmdline]
396 cmdline = [procutil.shellquote(arg) for arg in cmdline]
394 397 if not self.ui.debugflag:
395 398 cmdline += ['2>', pycompat.bytestr(os.devnull)]
396 399 cmdline = ' '.join(cmdline)
397 400 return cmdline
398 401
399 402 def _run(self, cmd, *args, **kwargs):
400 403 def popen(cmdline):
401 404 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
402 close_fds=util.closefds,
403 stdout=subprocess.PIPE)
405 close_fds=procutil.closefds,
406 stdout=subprocess.PIPE)
404 407 return p
405 408 return self._dorun(popen, cmd, *args, **kwargs)
406 409
407 410 def _run2(self, cmd, *args, **kwargs):
408 return self._dorun(util.popen2, cmd, *args, **kwargs)
411 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
409 412
410 413 def _run3(self, cmd, *args, **kwargs):
411 return self._dorun(util.popen3, cmd, *args, **kwargs)
414 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
412 415
413 416 def _dorun(self, openfunc, cmd, *args, **kwargs):
414 417 cmdline = self._cmdline(cmd, *args, **kwargs)
415 418 self.ui.debug('running: %s\n' % (cmdline,))
416 419 self.prerun()
417 420 try:
418 421 return openfunc(cmdline)
419 422 finally:
420 423 self.postrun()
421 424
422 425 def run(self, cmd, *args, **kwargs):
423 426 p = self._run(cmd, *args, **kwargs)
424 427 output = p.communicate()[0]
425 428 self.ui.debug(output)
426 429 return output, p.returncode
427 430
428 431 def runlines(self, cmd, *args, **kwargs):
429 432 p = self._run(cmd, *args, **kwargs)
430 433 output = p.stdout.readlines()
431 434 p.wait()
432 435 self.ui.debug(''.join(output))
433 436 return output, p.returncode
434 437
435 438 def checkexit(self, status, output=''):
436 439 if status:
437 440 if output:
438 441 self.ui.warn(_('%s error:\n') % self.command)
439 442 self.ui.warn(output)
440 msg = util.explainexit(status)[0]
443 msg = procutil.explainexit(status)[0]
441 444 raise error.Abort('%s %s' % (self.command, msg))
442 445
443 446 def run0(self, cmd, *args, **kwargs):
444 447 output, status = self.run(cmd, *args, **kwargs)
445 448 self.checkexit(status, output)
446 449 return output
447 450
448 451 def runlines0(self, cmd, *args, **kwargs):
449 452 output, status = self.runlines(cmd, *args, **kwargs)
450 453 self.checkexit(status, ''.join(output))
451 454 return output
452 455
453 456 @propertycache
454 457 def argmax(self):
455 458 # POSIX requires at least 4096 bytes for ARG_MAX
456 459 argmax = 4096
457 460 try:
458 461 argmax = os.sysconf("SC_ARG_MAX")
459 462 except (AttributeError, ValueError):
460 463 pass
461 464
462 465 # Windows shells impose their own limits on command line length,
463 466 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
464 467 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
465 468 # details about cmd.exe limitations.
466 469
467 470 # Since ARG_MAX is for command line _and_ environment, lower our limit
468 471 # (and make happy Windows shells while doing this).
469 472 return argmax // 2 - 1
470 473
471 474 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
472 475 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
473 476 limit = self.argmax - cmdlen
474 477 numbytes = 0
475 478 fl = []
476 479 for fn in arglist:
477 480 b = len(fn) + 3
478 481 if numbytes + b < limit or len(fl) == 0:
479 482 fl.append(fn)
480 483 numbytes += b
481 484 else:
482 485 yield fl
483 486 fl = [fn]
484 487 numbytes = b
485 488 if fl:
486 489 yield fl
487 490
488 491 def xargs(self, arglist, cmd, *args, **kwargs):
489 492 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
490 493 self.run0(cmd, *(list(args) + l), **kwargs)
491 494
492 495 class mapfile(dict):
493 496 def __init__(self, ui, path):
494 497 super(mapfile, self).__init__()
495 498 self.ui = ui
496 499 self.path = path
497 500 self.fp = None
498 501 self.order = []
499 502 self._read()
500 503
501 504 def _read(self):
502 505 if not self.path:
503 506 return
504 507 try:
505 508 fp = open(self.path, 'rb')
506 509 except IOError as err:
507 510 if err.errno != errno.ENOENT:
508 511 raise
509 512 return
510 513 for i, line in enumerate(util.iterfile(fp)):
511 514 line = line.splitlines()[0].rstrip()
512 515 if not line:
513 516 # Ignore blank lines
514 517 continue
515 518 try:
516 519 key, value = line.rsplit(' ', 1)
517 520 except ValueError:
518 521 raise error.Abort(
519 522 _('syntax error in %s(%d): key/value pair expected')
520 523 % (self.path, i + 1))
521 524 if key not in self:
522 525 self.order.append(key)
523 526 super(mapfile, self).__setitem__(key, value)
524 527 fp.close()
525 528
526 529 def __setitem__(self, key, value):
527 530 if self.fp is None:
528 531 try:
529 532 self.fp = open(self.path, 'ab')
530 533 except IOError as err:
531 534 raise error.Abort(
532 535 _('could not open map file %r: %s') %
533 536 (self.path, encoding.strtolocal(err.strerror)))
534 537 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
535 538 self.fp.flush()
536 539 super(mapfile, self).__setitem__(key, value)
537 540
538 541 def close(self):
539 542 if self.fp:
540 543 self.fp.close()
541 544 self.fp = None
542 545
543 546 def makedatetimestamp(t):
544 547 """Like dateutil.makedate() but for time t instead of current time"""
545 548 delta = (datetime.datetime.utcfromtimestamp(t) -
546 549 datetime.datetime.fromtimestamp(t))
547 550 tz = delta.days * 86400 + delta.seconds
548 551 return t, tz
@@ -1,298 +1,301
1 1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import errno
10 10 import os
11 11 import re
12 12 import socket
13 13
14 14 from mercurial.i18n import _
15 15 from mercurial import (
16 16 encoding,
17 17 error,
18 18 pycompat,
19 19 util,
20 20 )
21 from mercurial.utils import dateutil
21 from mercurial.utils import (
22 dateutil,
23 procutil,
24 )
22 25
23 26 from . import (
24 27 common,
25 28 cvsps,
26 29 )
27 30
28 31 stringio = util.stringio
29 32 checktool = common.checktool
30 33 commit = common.commit
31 34 converter_source = common.converter_source
32 35 makedatetimestamp = common.makedatetimestamp
33 36 NoRepo = common.NoRepo
34 37
35 38 class convert_cvs(converter_source):
36 39 def __init__(self, ui, repotype, path, revs=None):
37 40 super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
38 41
39 42 cvs = os.path.join(path, "CVS")
40 43 if not os.path.exists(cvs):
41 44 raise NoRepo(_("%s does not look like a CVS checkout") % path)
42 45
43 46 checktool('cvs')
44 47
45 48 self.changeset = None
46 49 self.files = {}
47 50 self.tags = {}
48 51 self.lastbranch = {}
49 52 self.socket = None
50 53 self.cvsroot = open(os.path.join(cvs, "Root"), 'rb').read()[:-1]
51 54 self.cvsrepo = open(os.path.join(cvs, "Repository"), 'rb').read()[:-1]
52 55 self.encoding = encoding.encoding
53 56
54 57 self._connect()
55 58
56 59 def _parse(self):
57 60 if self.changeset is not None:
58 61 return
59 62 self.changeset = {}
60 63
61 64 maxrev = 0
62 65 if self.revs:
63 66 if len(self.revs) > 1:
64 67 raise error.Abort(_('cvs source does not support specifying '
65 68 'multiple revs'))
66 69 # TODO: handle tags
67 70 try:
68 71 # patchset number?
69 72 maxrev = int(self.revs[0])
70 73 except ValueError:
71 74 raise error.Abort(_('revision %s is not a patchset number')
72 75 % self.revs[0])
73 76
74 77 d = pycompat.getcwd()
75 78 try:
76 79 os.chdir(self.path)
77 80 id = None
78 81
79 82 cache = 'update'
80 83 if not self.ui.configbool('convert', 'cvsps.cache'):
81 84 cache = None
82 85 db = cvsps.createlog(self.ui, cache=cache)
83 86 db = cvsps.createchangeset(self.ui, db,
84 87 fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
85 88 mergeto=self.ui.config('convert', 'cvsps.mergeto'),
86 89 mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))
87 90
88 91 for cs in db:
89 92 if maxrev and cs.id > maxrev:
90 93 break
91 94 id = str(cs.id)
92 95 cs.author = self.recode(cs.author)
93 96 self.lastbranch[cs.branch] = id
94 97 cs.comment = self.recode(cs.comment)
95 98 if self.ui.configbool('convert', 'localtimezone'):
96 99 cs.date = makedatetimestamp(cs.date[0])
97 100 date = dateutil.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
98 101 self.tags.update(dict.fromkeys(cs.tags, id))
99 102
100 103 files = {}
101 104 for f in cs.entries:
102 105 files[f.file] = "%s%s" % ('.'.join([str(x)
103 106 for x in f.revision]),
104 107 ['', '(DEAD)'][f.dead])
105 108
106 109 # add current commit to set
107 110 c = commit(author=cs.author, date=date,
108 111 parents=[str(p.id) for p in cs.parents],
109 112 desc=cs.comment, branch=cs.branch or '')
110 113 self.changeset[id] = c
111 114 self.files[id] = files
112 115
113 116 self.heads = self.lastbranch.values()
114 117 finally:
115 118 os.chdir(d)
116 119
117 120 def _connect(self):
118 121 root = self.cvsroot
119 122 conntype = None
120 123 user, host = None, None
121 124 cmd = ['cvs', 'server']
122 125
123 126 self.ui.status(_("connecting to %s\n") % root)
124 127
125 128 if root.startswith(":pserver:"):
126 129 root = root[9:]
127 130 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
128 131 root)
129 132 if m:
130 133 conntype = "pserver"
131 134 user, passw, serv, port, root = m.groups()
132 135 if not user:
133 136 user = "anonymous"
134 137 if not port:
135 138 port = 2401
136 139 else:
137 140 port = int(port)
138 141 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
139 142 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
140 143
141 144 if not passw:
142 145 passw = "A"
143 146 cvspass = os.path.expanduser("~/.cvspass")
144 147 try:
145 148 pf = open(cvspass, 'rb')
146 149 for line in pf.read().splitlines():
147 150 part1, part2 = line.split(' ', 1)
148 151 # /1 :pserver:user@example.com:2401/cvsroot/foo
149 152 # Ah<Z
150 153 if part1 == '/1':
151 154 part1, part2 = part2.split(' ', 1)
152 155 format = format1
153 156 # :pserver:user@example.com:/cvsroot/foo Ah<Z
154 157 else:
155 158 format = format0
156 159 if part1 == format:
157 160 passw = part2
158 161 break
159 162 pf.close()
160 163 except IOError as inst:
161 164 if inst.errno != errno.ENOENT:
162 165 if not getattr(inst, 'filename', None):
163 166 inst.filename = cvspass
164 167 raise
165 168
166 169 sck = socket.socket()
167 170 sck.connect((serv, port))
168 171 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
169 172 "END AUTH REQUEST", ""]))
170 173 if sck.recv(128) != "I LOVE YOU\n":
171 174 raise error.Abort(_("CVS pserver authentication failed"))
172 175
173 176 self.writep = self.readp = sck.makefile('r+')
174 177
175 178 if not conntype and root.startswith(":local:"):
176 179 conntype = "local"
177 180 root = root[7:]
178 181
179 182 if not conntype:
180 183 # :ext:user@host/home/user/path/to/cvsroot
181 184 if root.startswith(":ext:"):
182 185 root = root[5:]
183 186 m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
184 187 # Do not take Windows path "c:\foo\bar" for a connection strings
185 188 if os.path.isdir(root) or not m:
186 189 conntype = "local"
187 190 else:
188 191 conntype = "rsh"
189 192 user, host, root = m.group(1), m.group(2), m.group(3)
190 193
191 194 if conntype != "pserver":
192 195 if conntype == "rsh":
193 196 rsh = encoding.environ.get("CVS_RSH") or "ssh"
194 197 if user:
195 198 cmd = [rsh, '-l', user, host] + cmd
196 199 else:
197 200 cmd = [rsh, host] + cmd
198 201
199 202 # popen2 does not support argument lists under Windows
200 cmd = [util.shellquote(arg) for arg in cmd]
201 cmd = util.quotecommand(' '.join(cmd))
202 self.writep, self.readp = util.popen2(cmd)
203 cmd = [procutil.shellquote(arg) for arg in cmd]
204 cmd = procutil.quotecommand(' '.join(cmd))
205 self.writep, self.readp = procutil.popen2(cmd)
203 206
204 207 self.realroot = root
205 208
206 209 self.writep.write("Root %s\n" % root)
207 210 self.writep.write("Valid-responses ok error Valid-requests Mode"
208 211 " M Mbinary E Checked-in Created Updated"
209 212 " Merged Removed\n")
210 213 self.writep.write("valid-requests\n")
211 214 self.writep.flush()
212 215 r = self.readp.readline()
213 216 if not r.startswith("Valid-requests"):
214 217 raise error.Abort(_('unexpected response from CVS server '
215 218 '(expected "Valid-requests", but got %r)')
216 219 % r)
217 220 if "UseUnchanged" in r:
218 221 self.writep.write("UseUnchanged\n")
219 222 self.writep.flush()
220 223 r = self.readp.readline()
221 224
222 225 def getheads(self):
223 226 self._parse()
224 227 return self.heads
225 228
226 229 def getfile(self, name, rev):
227 230
228 231 def chunkedread(fp, count):
229 232 # file-objects returned by socket.makefile() do not handle
230 233 # large read() requests very well.
231 234 chunksize = 65536
232 235 output = stringio()
233 236 while count > 0:
234 237 data = fp.read(min(count, chunksize))
235 238 if not data:
236 239 raise error.Abort(_("%d bytes missing from remote file")
237 240 % count)
238 241 count -= len(data)
239 242 output.write(data)
240 243 return output.getvalue()
241 244
242 245 self._parse()
243 246 if rev.endswith("(DEAD)"):
244 247 return None, None
245 248
246 249 args = ("-N -P -kk -r %s --" % rev).split()
247 250 args.append(self.cvsrepo + '/' + name)
248 251 for x in args:
249 252 self.writep.write("Argument %s\n" % x)
250 253 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
251 254 self.writep.flush()
252 255
253 256 data = ""
254 257 mode = None
255 258 while True:
256 259 line = self.readp.readline()
257 260 if line.startswith("Created ") or line.startswith("Updated "):
258 261 self.readp.readline() # path
259 262 self.readp.readline() # entries
260 263 mode = self.readp.readline()[:-1]
261 264 count = int(self.readp.readline()[:-1])
262 265 data = chunkedread(self.readp, count)
263 266 elif line.startswith(" "):
264 267 data += line[1:]
265 268 elif line.startswith("M "):
266 269 pass
267 270 elif line.startswith("Mbinary "):
268 271 count = int(self.readp.readline()[:-1])
269 272 data = chunkedread(self.readp, count)
270 273 else:
271 274 if line == "ok\n":
272 275 if mode is None:
273 276 raise error.Abort(_('malformed response from CVS'))
274 277 return (data, "x" in mode and "x" or "")
275 278 elif line.startswith("E "):
276 279 self.ui.warn(_("cvs server: %s\n") % line[2:])
277 280 elif line.startswith("Remove"):
278 281 self.readp.readline()
279 282 else:
280 283 raise error.Abort(_("unknown CVS response: %s") % line)
281 284
282 285 def getchanges(self, rev, full):
283 286 if full:
284 287 raise error.Abort(_("convert from cvs does not support --full"))
285 288 self._parse()
286 289 return sorted(self.files[rev].iteritems()), {}, set()
287 290
288 291 def getcommit(self, rev):
289 292 self._parse()
290 293 return self.changeset[rev]
291 294
292 295 def gettags(self):
293 296 self._parse()
294 297 return self.tags
295 298
296 299 def getchangedfiles(self, rev, i):
297 300 self._parse()
298 301 return sorted(self.files[rev])
@@ -1,957 +1,958
1 1 # Mercurial built-in replacement for cvsps.
2 2 #
3 3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import os
10 10 import re
11 11
12 12 from mercurial.i18n import _
13 13 from mercurial import (
14 14 encoding,
15 15 error,
16 16 hook,
17 17 pycompat,
18 18 util,
19 19 )
20 20 from mercurial.utils import (
21 21 dateutil,
22 procutil,
22 23 stringutil,
23 24 )
24 25
25 26 pickle = util.pickle
26 27
27 28 class logentry(object):
28 29 '''Class logentry has the following attributes:
29 30 .author - author name as CVS knows it
30 31 .branch - name of branch this revision is on
31 32 .branches - revision tuple of branches starting at this revision
32 33 .comment - commit message
33 34 .commitid - CVS commitid or None
34 35 .date - the commit date as a (time, tz) tuple
35 36 .dead - true if file revision is dead
36 37 .file - Name of file
37 38 .lines - a tuple (+lines, -lines) or None
38 39 .parent - Previous revision of this entry
39 40 .rcs - name of file as returned from CVS
40 41 .revision - revision number as tuple
41 42 .tags - list of tags on the file
42 43 .synthetic - is this a synthetic "file ... added on ..." revision?
43 44 .mergepoint - the branch that has been merged from (if present in
44 45 rlog output) or None
45 46 .branchpoints - the branches that start at the current entry or empty
46 47 '''
47 48 def __init__(self, **entries):
48 49 self.synthetic = False
49 50 self.__dict__.update(entries)
50 51
51 52 def __repr__(self):
52 53 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
53 54 return "%s(%s)"%(type(self).__name__, ", ".join(items))
54 55
55 56 class logerror(Exception):
56 57 pass
57 58
58 59 def getrepopath(cvspath):
59 60 """Return the repository path from a CVS path.
60 61
61 62 >>> getrepopath(b'/foo/bar')
62 63 '/foo/bar'
63 64 >>> getrepopath(b'c:/foo/bar')
64 65 '/foo/bar'
65 66 >>> getrepopath(b':pserver:10/foo/bar')
66 67 '/foo/bar'
67 68 >>> getrepopath(b':pserver:10c:/foo/bar')
68 69 '/foo/bar'
69 70 >>> getrepopath(b':pserver:/foo/bar')
70 71 '/foo/bar'
71 72 >>> getrepopath(b':pserver:c:/foo/bar')
72 73 '/foo/bar'
73 74 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
74 75 '/foo/bar'
75 76 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
76 77 '/foo/bar'
77 78 >>> getrepopath(b'user@server/path/to/repository')
78 79 '/path/to/repository'
79 80 """
80 81 # According to CVS manual, CVS paths are expressed like:
81 82 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
82 83 #
83 84 # CVSpath is splitted into parts and then position of the first occurrence
84 85 # of the '/' char after the '@' is located. The solution is the rest of the
85 86 # string after that '/' sign including it
86 87
87 88 parts = cvspath.split(':')
88 89 atposition = parts[-1].find('@')
89 90 start = 0
90 91
91 92 if atposition != -1:
92 93 start = atposition
93 94
94 95 repopath = parts[-1][parts[-1].find('/', start):]
95 96 return repopath
96 97
97 98 def createlog(ui, directory=None, root="", rlog=True, cache=None):
98 99 '''Collect the CVS rlog'''
99 100
100 101 # Because we store many duplicate commit log messages, reusing strings
101 102 # saves a lot of memory and pickle storage space.
102 103 _scache = {}
103 104 def scache(s):
104 105 "return a shared version of a string"
105 106 return _scache.setdefault(s, s)
106 107
107 108 ui.status(_('collecting CVS rlog\n'))
108 109
109 110 log = [] # list of logentry objects containing the CVS state
110 111
111 112 # patterns to match in CVS (r)log output, by state of use
112 113 re_00 = re.compile('RCS file: (.+)$')
113 114 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
114 115 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
115 116 re_03 = re.compile("(Cannot access.+CVSROOT)|"
116 117 "(can't create temporary directory.+)$")
117 118 re_10 = re.compile('Working file: (.+)$')
118 119 re_20 = re.compile('symbolic names:')
119 120 re_30 = re.compile('\t(.+): ([\\d.]+)$')
120 121 re_31 = re.compile('----------------------------$')
121 122 re_32 = re.compile('======================================='
122 123 '======================================$')
123 124 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
124 125 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
125 126 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
126 127 r'(\s+commitid:\s+([^;]+);)?'
127 128 r'(.*mergepoint:\s+([^;]+);)?')
128 129 re_70 = re.compile('branches: (.+);$')
129 130
130 131 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
131 132
132 133 prefix = '' # leading path to strip of what we get from CVS
133 134
134 135 if directory is None:
135 136 # Current working directory
136 137
137 138 # Get the real directory in the repository
138 139 try:
139 140 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
140 141 directory = prefix
141 142 if prefix == ".":
142 143 prefix = ""
143 144 except IOError:
144 145 raise logerror(_('not a CVS sandbox'))
145 146
146 147 if prefix and not prefix.endswith(pycompat.ossep):
147 148 prefix += pycompat.ossep
148 149
149 150 # Use the Root file in the sandbox, if it exists
150 151 try:
151 152 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
152 153 except IOError:
153 154 pass
154 155
155 156 if not root:
156 157 root = encoding.environ.get('CVSROOT', '')
157 158
158 159 # read log cache if one exists
159 160 oldlog = []
160 161 date = None
161 162
162 163 if cache:
163 164 cachedir = os.path.expanduser('~/.hg.cvsps')
164 165 if not os.path.exists(cachedir):
165 166 os.mkdir(cachedir)
166 167
167 168 # The cvsps cache pickle needs a uniquified name, based on the
168 169 # repository location. The address may have all sort of nasties
169 170 # in it, slashes, colons and such. So here we take just the
170 171 # alphanumeric characters, concatenated in a way that does not
171 172 # mix up the various components, so that
172 173 # :pserver:user@server:/path
173 174 # and
174 175 # /pserver/user/server/path
175 176 # are mapped to different cache file names.
176 177 cachefile = root.split(":") + [directory, "cache"]
177 178 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
178 179 cachefile = os.path.join(cachedir,
179 180 '.'.join([s for s in cachefile if s]))
180 181
181 182 if cache == 'update':
182 183 try:
183 184 ui.note(_('reading cvs log cache %s\n') % cachefile)
184 185 oldlog = pickle.load(open(cachefile, 'rb'))
185 186 for e in oldlog:
186 187 if not (util.safehasattr(e, 'branchpoints') and
187 188 util.safehasattr(e, 'commitid') and
188 189 util.safehasattr(e, 'mergepoint')):
189 190 ui.status(_('ignoring old cache\n'))
190 191 oldlog = []
191 192 break
192 193
193 194 ui.note(_('cache has %d log entries\n') % len(oldlog))
194 195 except Exception as e:
195 196 ui.note(_('error reading cache: %r\n') % e)
196 197
197 198 if oldlog:
198 199 date = oldlog[-1].date # last commit date as a (time,tz) tuple
199 200 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
200 201
201 202 # build the CVS commandline
202 203 cmd = ['cvs', '-q']
203 204 if root:
204 205 cmd.append('-d%s' % root)
205 206 p = util.normpath(getrepopath(root))
206 207 if not p.endswith('/'):
207 208 p += '/'
208 209 if prefix:
209 210 # looks like normpath replaces "" by "."
210 211 prefix = p + util.normpath(prefix)
211 212 else:
212 213 prefix = p
213 214 cmd.append(['log', 'rlog'][rlog])
214 215 if date:
215 216 # no space between option and date string
216 217 cmd.append('-d>%s' % date)
217 218 cmd.append(directory)
218 219
219 220 # state machine begins here
220 221 tags = {} # dictionary of revisions on current file with their tags
221 222 branchmap = {} # mapping between branch names and revision numbers
222 223 rcsmap = {}
223 224 state = 0
224 225 store = False # set when a new record can be appended
225 226
226 cmd = [util.shellquote(arg) for arg in cmd]
227 cmd = [procutil.shellquote(arg) for arg in cmd]
227 228 ui.note(_("running %s\n") % (' '.join(cmd)))
228 229 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
229 230
230 pfp = util.popen(' '.join(cmd))
231 pfp = procutil.popen(' '.join(cmd))
231 232 peek = pfp.readline()
232 233 while True:
233 234 line = peek
234 235 if line == '':
235 236 break
236 237 peek = pfp.readline()
237 238 if line.endswith('\n'):
238 239 line = line[:-1]
239 240 #ui.debug('state=%d line=%r\n' % (state, line))
240 241
241 242 if state == 0:
242 243 # initial state, consume input until we see 'RCS file'
243 244 match = re_00.match(line)
244 245 if match:
245 246 rcs = match.group(1)
246 247 tags = {}
247 248 if rlog:
248 249 filename = util.normpath(rcs[:-2])
249 250 if filename.startswith(prefix):
250 251 filename = filename[len(prefix):]
251 252 if filename.startswith('/'):
252 253 filename = filename[1:]
253 254 if filename.startswith('Attic/'):
254 255 filename = filename[6:]
255 256 else:
256 257 filename = filename.replace('/Attic/', '/')
257 258 state = 2
258 259 continue
259 260 state = 1
260 261 continue
261 262 match = re_01.match(line)
262 263 if match:
263 264 raise logerror(match.group(1))
264 265 match = re_02.match(line)
265 266 if match:
266 267 raise logerror(match.group(2))
267 268 if re_03.match(line):
268 269 raise logerror(line)
269 270
270 271 elif state == 1:
271 272 # expect 'Working file' (only when using log instead of rlog)
272 273 match = re_10.match(line)
273 274 assert match, _('RCS file must be followed by working file')
274 275 filename = util.normpath(match.group(1))
275 276 state = 2
276 277
277 278 elif state == 2:
278 279 # expect 'symbolic names'
279 280 if re_20.match(line):
280 281 branchmap = {}
281 282 state = 3
282 283
283 284 elif state == 3:
284 285 # read the symbolic names and store as tags
285 286 match = re_30.match(line)
286 287 if match:
287 288 rev = [int(x) for x in match.group(2).split('.')]
288 289
289 290 # Convert magic branch number to an odd-numbered one
290 291 revn = len(rev)
291 292 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
292 293 rev = rev[:-2] + rev[-1:]
293 294 rev = tuple(rev)
294 295
295 296 if rev not in tags:
296 297 tags[rev] = []
297 298 tags[rev].append(match.group(1))
298 299 branchmap[match.group(1)] = match.group(2)
299 300
300 301 elif re_31.match(line):
301 302 state = 5
302 303 elif re_32.match(line):
303 304 state = 0
304 305
305 306 elif state == 4:
306 307 # expecting '------' separator before first revision
307 308 if re_31.match(line):
308 309 state = 5
309 310 else:
310 311 assert not re_32.match(line), _('must have at least '
311 312 'some revisions')
312 313
313 314 elif state == 5:
314 315 # expecting revision number and possibly (ignored) lock indication
315 316 # we create the logentry here from values stored in states 0 to 4,
316 317 # as this state is re-entered for subsequent revisions of a file.
317 318 match = re_50.match(line)
318 319 assert match, _('expected revision number')
319 320 e = logentry(rcs=scache(rcs),
320 321 file=scache(filename),
321 322 revision=tuple([int(x) for x in
322 323 match.group(1).split('.')]),
323 324 branches=[],
324 325 parent=None,
325 326 commitid=None,
326 327 mergepoint=None,
327 328 branchpoints=set())
328 329
329 330 state = 6
330 331
331 332 elif state == 6:
332 333 # expecting date, author, state, lines changed
333 334 match = re_60.match(line)
334 335 assert match, _('revision must be followed by date line')
335 336 d = match.group(1)
336 337 if d[2] == '/':
337 338 # Y2K
338 339 d = '19' + d
339 340
340 341 if len(d.split()) != 3:
341 342 # cvs log dates always in GMT
342 343 d = d + ' UTC'
343 344 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
344 345 '%Y/%m/%d %H:%M:%S',
345 346 '%Y-%m-%d %H:%M:%S'])
346 347 e.author = scache(match.group(2))
347 348 e.dead = match.group(3).lower() == 'dead'
348 349
349 350 if match.group(5):
350 351 if match.group(6):
351 352 e.lines = (int(match.group(5)), int(match.group(6)))
352 353 else:
353 354 e.lines = (int(match.group(5)), 0)
354 355 elif match.group(6):
355 356 e.lines = (0, int(match.group(6)))
356 357 else:
357 358 e.lines = None
358 359
359 360 if match.group(7): # cvs 1.12 commitid
360 361 e.commitid = match.group(8)
361 362
362 363 if match.group(9): # cvsnt mergepoint
363 364 myrev = match.group(10).split('.')
364 365 if len(myrev) == 2: # head
365 366 e.mergepoint = 'HEAD'
366 367 else:
367 368 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
368 369 branches = [b for b in branchmap if branchmap[b] == myrev]
369 370 assert len(branches) == 1, ('unknown branch: %s'
370 371 % e.mergepoint)
371 372 e.mergepoint = branches[0]
372 373
373 374 e.comment = []
374 375 state = 7
375 376
376 377 elif state == 7:
377 378 # read the revision numbers of branches that start at this revision
378 379 # or store the commit log message otherwise
379 380 m = re_70.match(line)
380 381 if m:
381 382 e.branches = [tuple([int(y) for y in x.strip().split('.')])
382 383 for x in m.group(1).split(';')]
383 384 state = 8
384 385 elif re_31.match(line) and re_50.match(peek):
385 386 state = 5
386 387 store = True
387 388 elif re_32.match(line):
388 389 state = 0
389 390 store = True
390 391 else:
391 392 e.comment.append(line)
392 393
393 394 elif state == 8:
394 395 # store commit log message
395 396 if re_31.match(line):
396 397 cpeek = peek
397 398 if cpeek.endswith('\n'):
398 399 cpeek = cpeek[:-1]
399 400 if re_50.match(cpeek):
400 401 state = 5
401 402 store = True
402 403 else:
403 404 e.comment.append(line)
404 405 elif re_32.match(line):
405 406 state = 0
406 407 store = True
407 408 else:
408 409 e.comment.append(line)
409 410
410 411 # When a file is added on a branch B1, CVS creates a synthetic
411 412 # dead trunk revision 1.1 so that the branch has a root.
412 413 # Likewise, if you merge such a file to a later branch B2 (one
413 414 # that already existed when the file was added on B1), CVS
414 415 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
415 416 # these revisions now, but mark them synthetic so
416 417 # createchangeset() can take care of them.
417 418 if (store and
418 419 e.dead and
419 420 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
420 421 len(e.comment) == 1 and
421 422 file_added_re.match(e.comment[0])):
422 423 ui.debug('found synthetic revision in %s: %r\n'
423 424 % (e.rcs, e.comment[0]))
424 425 e.synthetic = True
425 426
426 427 if store:
427 428 # clean up the results and save in the log.
428 429 store = False
429 430 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
430 431 e.comment = scache('\n'.join(e.comment))
431 432
432 433 revn = len(e.revision)
433 434 if revn > 3 and (revn % 2) == 0:
434 435 e.branch = tags.get(e.revision[:-1], [None])[0]
435 436 else:
436 437 e.branch = None
437 438
438 439 # find the branches starting from this revision
439 440 branchpoints = set()
440 441 for branch, revision in branchmap.iteritems():
441 442 revparts = tuple([int(i) for i in revision.split('.')])
442 443 if len(revparts) < 2: # bad tags
443 444 continue
444 445 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
445 446 # normal branch
446 447 if revparts[:-2] == e.revision:
447 448 branchpoints.add(branch)
448 449 elif revparts == (1, 1, 1): # vendor branch
449 450 if revparts in e.branches:
450 451 branchpoints.add(branch)
451 452 e.branchpoints = branchpoints
452 453
453 454 log.append(e)
454 455
455 456 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
456 457
457 458 if len(log) % 100 == 0:
458 459 ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
459 460 + '\n')
460 461
461 462 log.sort(key=lambda x: (x.rcs, x.revision))
462 463
463 464 # find parent revisions of individual files
464 465 versions = {}
465 466 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
466 467 rcs = e.rcs.replace('/Attic/', '/')
467 468 if rcs in rcsmap:
468 469 e.rcs = rcsmap[rcs]
469 470 branch = e.revision[:-1]
470 471 versions[(e.rcs, branch)] = e.revision
471 472
472 473 for e in log:
473 474 branch = e.revision[:-1]
474 475 p = versions.get((e.rcs, branch), None)
475 476 if p is None:
476 477 p = e.revision[:-2]
477 478 e.parent = p
478 479 versions[(e.rcs, branch)] = e.revision
479 480
480 481 # update the log cache
481 482 if cache:
482 483 if log:
483 484 # join up the old and new logs
484 485 log.sort(key=lambda x: x.date)
485 486
486 487 if oldlog and oldlog[-1].date >= log[0].date:
487 488 raise logerror(_('log cache overlaps with new log entries,'
488 489 ' re-run without cache.'))
489 490
490 491 log = oldlog + log
491 492
492 493 # write the new cachefile
493 494 ui.note(_('writing cvs log cache %s\n') % cachefile)
494 495 pickle.dump(log, open(cachefile, 'wb'))
495 496 else:
496 497 log = oldlog
497 498
498 499 ui.status(_('%d log entries\n') % len(log))
499 500
500 501 encodings = ui.configlist('convert', 'cvsps.logencoding')
501 502 if encodings:
502 503 def revstr(r):
503 504 # this is needed, because logentry.revision is a tuple of "int"
504 505 # (e.g. (1, 2) for "1.2")
505 506 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
506 507
507 508 for entry in log:
508 509 comment = entry.comment
509 510 for e in encodings:
510 511 try:
511 512 entry.comment = comment.decode(e).encode('utf-8')
512 513 if ui.debugflag:
513 514 ui.debug("transcoding by %s: %s of %s\n" %
514 515 (e, revstr(entry.revision), entry.file))
515 516 break
516 517 except UnicodeDecodeError:
517 518 pass # try next encoding
518 519 except LookupError as inst: # unknown encoding, maybe
519 520 raise error.Abort(inst,
520 521 hint=_('check convert.cvsps.logencoding'
521 522 ' configuration'))
522 523 else:
523 524 raise error.Abort(_("no encoding can transcode"
524 525 " CVS log message for %s of %s")
525 526 % (revstr(entry.revision), entry.file),
526 527 hint=_('check convert.cvsps.logencoding'
527 528 ' configuration'))
528 529
529 530 hook.hook(ui, None, "cvslog", True, log=log)
530 531
531 532 return log
532 533
533 534
534 535 class changeset(object):
535 536 '''Class changeset has the following attributes:
536 537 .id - integer identifying this changeset (list index)
537 538 .author - author name as CVS knows it
538 539 .branch - name of branch this changeset is on, or None
539 540 .comment - commit message
540 541 .commitid - CVS commitid or None
541 542 .date - the commit date as a (time,tz) tuple
542 543 .entries - list of logentry objects in this changeset
543 544 .parents - list of one or two parent changesets
544 545 .tags - list of tags on this changeset
545 546 .synthetic - from synthetic revision "file ... added on branch ..."
546 547 .mergepoint- the branch that has been merged from or None
547 548 .branchpoints- the branches that start at the current entry or empty
548 549 '''
549 550 def __init__(self, **entries):
550 551 self.id = None
551 552 self.synthetic = False
552 553 self.__dict__.update(entries)
553 554
554 555 def __repr__(self):
555 556 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
556 557 return "%s(%s)"%(type(self).__name__, ", ".join(items))
557 558
558 559 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
559 560 '''Convert log into changesets.'''
560 561
561 562 ui.status(_('creating changesets\n'))
562 563
563 564 # try to order commitids by date
564 565 mindate = {}
565 566 for e in log:
566 567 if e.commitid:
567 568 mindate[e.commitid] = min(e.date, mindate.get(e.commitid))
568 569
569 570 # Merge changesets
570 571 log.sort(key=lambda x: (mindate.get(x.commitid), x.commitid, x.comment,
571 572 x.author, x.branch, x.date, x.branchpoints))
572 573
573 574 changesets = []
574 575 files = set()
575 576 c = None
576 577 for i, e in enumerate(log):
577 578
578 579 # Check if log entry belongs to the current changeset or not.
579 580
580 581 # Since CVS is file-centric, two different file revisions with
581 582 # different branchpoints should be treated as belonging to two
582 583 # different changesets (and the ordering is important and not
583 584 # honoured by cvsps at this point).
584 585 #
585 586 # Consider the following case:
586 587 # foo 1.1 branchpoints: [MYBRANCH]
587 588 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
588 589 #
589 590 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
590 591 # later version of foo may be in MYBRANCH2, so foo should be the
591 592 # first changeset and bar the next and MYBRANCH and MYBRANCH2
592 593 # should both start off of the bar changeset. No provisions are
593 594 # made to ensure that this is, in fact, what happens.
594 595 if not (c and e.branchpoints == c.branchpoints and
595 596 (# cvs commitids
596 597 (e.commitid is not None and e.commitid == c.commitid) or
597 598 (# no commitids, use fuzzy commit detection
598 599 (e.commitid is None or c.commitid is None) and
599 600 e.comment == c.comment and
600 601 e.author == c.author and
601 602 e.branch == c.branch and
602 603 ((c.date[0] + c.date[1]) <=
603 604 (e.date[0] + e.date[1]) <=
604 605 (c.date[0] + c.date[1]) + fuzz) and
605 606 e.file not in files))):
606 607 c = changeset(comment=e.comment, author=e.author,
607 608 branch=e.branch, date=e.date,
608 609 entries=[], mergepoint=e.mergepoint,
609 610 branchpoints=e.branchpoints, commitid=e.commitid)
610 611 changesets.append(c)
611 612
612 613 files = set()
613 614 if len(changesets) % 100 == 0:
614 615 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
615 616 ui.status(stringutil.ellipsis(t, 80) + '\n')
616 617
617 618 c.entries.append(e)
618 619 files.add(e.file)
619 620 c.date = e.date # changeset date is date of latest commit in it
620 621
621 622 # Mark synthetic changesets
622 623
623 624 for c in changesets:
624 625 # Synthetic revisions always get their own changeset, because
625 626 # the log message includes the filename. E.g. if you add file3
626 627 # and file4 on a branch, you get four log entries and three
627 628 # changesets:
628 629 # "File file3 was added on branch ..." (synthetic, 1 entry)
629 630 # "File file4 was added on branch ..." (synthetic, 1 entry)
630 631 # "Add file3 and file4 to fix ..." (real, 2 entries)
631 632 # Hence the check for 1 entry here.
632 633 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
633 634
634 635 # Sort files in each changeset
635 636
636 637 def entitycompare(l, r):
637 638 'Mimic cvsps sorting order'
638 639 l = l.file.split('/')
639 640 r = r.file.split('/')
640 641 nl = len(l)
641 642 nr = len(r)
642 643 n = min(nl, nr)
643 644 for i in range(n):
644 645 if i + 1 == nl and nl < nr:
645 646 return -1
646 647 elif i + 1 == nr and nl > nr:
647 648 return +1
648 649 elif l[i] < r[i]:
649 650 return -1
650 651 elif l[i] > r[i]:
651 652 return +1
652 653 return 0
653 654
654 655 for c in changesets:
655 656 c.entries.sort(entitycompare)
656 657
657 658 # Sort changesets by date
658 659
659 660 odd = set()
660 661 def cscmp(l, r):
661 662 d = sum(l.date) - sum(r.date)
662 663 if d:
663 664 return d
664 665
665 666 # detect vendor branches and initial commits on a branch
666 667 le = {}
667 668 for e in l.entries:
668 669 le[e.rcs] = e.revision
669 670 re = {}
670 671 for e in r.entries:
671 672 re[e.rcs] = e.revision
672 673
673 674 d = 0
674 675 for e in l.entries:
675 676 if re.get(e.rcs, None) == e.parent:
676 677 assert not d
677 678 d = 1
678 679 break
679 680
680 681 for e in r.entries:
681 682 if le.get(e.rcs, None) == e.parent:
682 683 if d:
683 684 odd.add((l, r))
684 685 d = -1
685 686 break
686 687 # By this point, the changesets are sufficiently compared that
687 688 # we don't really care about ordering. However, this leaves
688 689 # some race conditions in the tests, so we compare on the
689 690 # number of files modified, the files contained in each
690 691 # changeset, and the branchpoints in the change to ensure test
691 692 # output remains stable.
692 693
693 694 # recommended replacement for cmp from
694 695 # https://docs.python.org/3.0/whatsnew/3.0.html
695 696 c = lambda x, y: (x > y) - (x < y)
696 697 # Sort bigger changes first.
697 698 if not d:
698 699 d = c(len(l.entries), len(r.entries))
699 700 # Try sorting by filename in the change.
700 701 if not d:
701 702 d = c([e.file for e in l.entries], [e.file for e in r.entries])
702 703 # Try and put changes without a branch point before ones with
703 704 # a branch point.
704 705 if not d:
705 706 d = c(len(l.branchpoints), len(r.branchpoints))
706 707 return d
707 708
708 709 changesets.sort(cscmp)
709 710
710 711 # Collect tags
711 712
712 713 globaltags = {}
713 714 for c in changesets:
714 715 for e in c.entries:
715 716 for tag in e.tags:
716 717 # remember which is the latest changeset to have this tag
717 718 globaltags[tag] = c
718 719
719 720 for c in changesets:
720 721 tags = set()
721 722 for e in c.entries:
722 723 tags.update(e.tags)
723 724 # remember tags only if this is the latest changeset to have it
724 725 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
725 726
726 727 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
727 728 # by inserting dummy changesets with two parents, and handle
728 729 # {{mergefrombranch BRANCHNAME}} by setting two parents.
729 730
730 731 if mergeto is None:
731 732 mergeto = r'{{mergetobranch ([-\w]+)}}'
732 733 if mergeto:
733 734 mergeto = re.compile(mergeto)
734 735
735 736 if mergefrom is None:
736 737 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
737 738 if mergefrom:
738 739 mergefrom = re.compile(mergefrom)
739 740
740 741 versions = {} # changeset index where we saw any particular file version
741 742 branches = {} # changeset index where we saw a branch
742 743 n = len(changesets)
743 744 i = 0
744 745 while i < n:
745 746 c = changesets[i]
746 747
747 748 for f in c.entries:
748 749 versions[(f.rcs, f.revision)] = i
749 750
750 751 p = None
751 752 if c.branch in branches:
752 753 p = branches[c.branch]
753 754 else:
754 755 # first changeset on a new branch
755 756 # the parent is a changeset with the branch in its
756 757 # branchpoints such that it is the latest possible
757 758 # commit without any intervening, unrelated commits.
758 759
759 760 for candidate in xrange(i):
760 761 if c.branch not in changesets[candidate].branchpoints:
761 762 if p is not None:
762 763 break
763 764 continue
764 765 p = candidate
765 766
766 767 c.parents = []
767 768 if p is not None:
768 769 p = changesets[p]
769 770
770 771 # Ensure no changeset has a synthetic changeset as a parent.
771 772 while p.synthetic:
772 773 assert len(p.parents) <= 1, \
773 774 _('synthetic changeset cannot have multiple parents')
774 775 if p.parents:
775 776 p = p.parents[0]
776 777 else:
777 778 p = None
778 779 break
779 780
780 781 if p is not None:
781 782 c.parents.append(p)
782 783
783 784 if c.mergepoint:
784 785 if c.mergepoint == 'HEAD':
785 786 c.mergepoint = None
786 787 c.parents.append(changesets[branches[c.mergepoint]])
787 788
788 789 if mergefrom:
789 790 m = mergefrom.search(c.comment)
790 791 if m:
791 792 m = m.group(1)
792 793 if m == 'HEAD':
793 794 m = None
794 795 try:
795 796 candidate = changesets[branches[m]]
796 797 except KeyError:
797 798 ui.warn(_("warning: CVS commit message references "
798 799 "non-existent branch %r:\n%s\n")
799 800 % (m, c.comment))
800 801 if m in branches and c.branch != m and not candidate.synthetic:
801 802 c.parents.append(candidate)
802 803
803 804 if mergeto:
804 805 m = mergeto.search(c.comment)
805 806 if m:
806 807 if m.groups():
807 808 m = m.group(1)
808 809 if m == 'HEAD':
809 810 m = None
810 811 else:
811 812 m = None # if no group found then merge to HEAD
812 813 if m in branches and c.branch != m:
813 814 # insert empty changeset for merge
814 815 cc = changeset(
815 816 author=c.author, branch=m, date=c.date,
816 817 comment='convert-repo: CVS merge from branch %s'
817 818 % c.branch,
818 819 entries=[], tags=[],
819 820 parents=[changesets[branches[m]], c])
820 821 changesets.insert(i + 1, cc)
821 822 branches[m] = i + 1
822 823
823 824 # adjust our loop counters now we have inserted a new entry
824 825 n += 1
825 826 i += 2
826 827 continue
827 828
828 829 branches[c.branch] = i
829 830 i += 1
830 831
831 832 # Drop synthetic changesets (safe now that we have ensured no other
832 833 # changesets can have them as parents).
833 834 i = 0
834 835 while i < len(changesets):
835 836 if changesets[i].synthetic:
836 837 del changesets[i]
837 838 else:
838 839 i += 1
839 840
840 841 # Number changesets
841 842
842 843 for i, c in enumerate(changesets):
843 844 c.id = i + 1
844 845
845 846 if odd:
846 847 for l, r in odd:
847 848 if l.id is not None and r.id is not None:
848 849 ui.warn(_('changeset %d is both before and after %d\n')
849 850 % (l.id, r.id))
850 851
851 852 ui.status(_('%d changeset entries\n') % len(changesets))
852 853
853 854 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
854 855
855 856 return changesets
856 857
857 858
858 859 def debugcvsps(ui, *args, **opts):
859 860 '''Read CVS rlog for current directory or named path in
860 861 repository, and convert the log to changesets based on matching
861 862 commit log entries and dates.
862 863 '''
863 864 opts = pycompat.byteskwargs(opts)
864 865 if opts["new_cache"]:
865 866 cache = "write"
866 867 elif opts["update_cache"]:
867 868 cache = "update"
868 869 else:
869 870 cache = None
870 871
871 872 revisions = opts["revisions"]
872 873
873 874 try:
874 875 if args:
875 876 log = []
876 877 for d in args:
877 878 log += createlog(ui, d, root=opts["root"], cache=cache)
878 879 else:
879 880 log = createlog(ui, root=opts["root"], cache=cache)
880 881 except logerror as e:
881 882 ui.write("%r\n"%e)
882 883 return
883 884
884 885 changesets = createchangeset(ui, log, opts["fuzz"])
885 886 del log
886 887
887 888 # Print changesets (optionally filtered)
888 889
889 890 off = len(revisions)
890 891 branches = {} # latest version number in each branch
891 892 ancestors = {} # parent branch
892 893 for cs in changesets:
893 894
894 895 if opts["ancestors"]:
895 896 if cs.branch not in branches and cs.parents and cs.parents[0].id:
896 897 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
897 898 cs.parents[0].id)
898 899 branches[cs.branch] = cs.id
899 900
900 901 # limit by branches
901 902 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
902 903 continue
903 904
904 905 if not off:
905 906 # Note: trailing spaces on several lines here are needed to have
906 907 # bug-for-bug compatibility with cvsps.
907 908 ui.write('---------------------\n')
908 909 ui.write(('PatchSet %d \n' % cs.id))
909 910 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
910 911 '%Y/%m/%d %H:%M:%S %1%2')))
911 912 ui.write(('Author: %s\n' % cs.author))
912 913 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
913 914 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
914 915 ','.join(cs.tags) or '(none)')))
915 916 if cs.branchpoints:
916 917 ui.write(('Branchpoints: %s \n') %
917 918 ', '.join(sorted(cs.branchpoints)))
918 919 if opts["parents"] and cs.parents:
919 920 if len(cs.parents) > 1:
920 921 ui.write(('Parents: %s\n' %
921 922 (','.join([str(p.id) for p in cs.parents]))))
922 923 else:
923 924 ui.write(('Parent: %d\n' % cs.parents[0].id))
924 925
925 926 if opts["ancestors"]:
926 927 b = cs.branch
927 928 r = []
928 929 while b:
929 930 b, c = ancestors[b]
930 931 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
931 932 if r:
932 933 ui.write(('Ancestors: %s\n' % (','.join(r))))
933 934
934 935 ui.write(('Log:\n'))
935 936 ui.write('%s\n\n' % cs.comment)
936 937 ui.write(('Members: \n'))
937 938 for f in cs.entries:
938 939 fn = f.file
939 940 if fn.startswith(opts["prefix"]):
940 941 fn = fn[len(opts["prefix"]):]
941 942 ui.write('\t%s:%s->%s%s \n' % (
942 943 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
943 944 '.'.join([str(x) for x in f.revision]),
944 945 ['', '(DEAD)'][f.dead]))
945 946 ui.write('\n')
946 947
947 948 # have we seen the start tag?
948 949 if revisions and off:
949 950 if revisions[0] == str(cs.id) or \
950 951 revisions[0] in cs.tags:
951 952 off = False
952 953
953 954 # see if we reached the end tag
954 955 if len(revisions) > 1 and not off:
955 956 if revisions[1] == str(cs.id) or \
956 957 revisions[1] in cs.tags:
957 958 break
@@ -1,353 +1,355
1 1 # gnuarch.py - GNU Arch support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 4 # and others
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 from __future__ import absolute_import
9 9
10 10 import email.parser as emailparser
11 11 import os
12 12 import shutil
13 13 import stat
14 14 import tempfile
15 15
16 16 from mercurial.i18n import _
17 17 from mercurial import (
18 18 encoding,
19 19 error,
20 util,
21 20 )
22 from mercurial.utils import dateutil
21 from mercurial.utils import (
22 dateutil,
23 procutil,
24 )
23 25 from . import common
24 26
25 27 class gnuarch_source(common.converter_source, common.commandline):
26 28
27 29 class gnuarch_rev(object):
28 30 def __init__(self, rev):
29 31 self.rev = rev
30 32 self.summary = ''
31 33 self.date = None
32 34 self.author = ''
33 35 self.continuationof = None
34 36 self.add_files = []
35 37 self.mod_files = []
36 38 self.del_files = []
37 39 self.ren_files = {}
38 40 self.ren_dirs = {}
39 41
40 42 def __init__(self, ui, repotype, path, revs=None):
41 43 super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
42 44
43 45 if not os.path.exists(os.path.join(path, '{arch}')):
44 46 raise common.NoRepo(_("%s does not look like a GNU Arch repository")
45 47 % path)
46 48
47 49 # Could use checktool, but we want to check for baz or tla.
48 50 self.execmd = None
49 if util.findexe('baz'):
51 if procutil.findexe('baz'):
50 52 self.execmd = 'baz'
51 53 else:
52 if util.findexe('tla'):
54 if procutil.findexe('tla'):
53 55 self.execmd = 'tla'
54 56 else:
55 57 raise error.Abort(_('cannot find a GNU Arch tool'))
56 58
57 59 common.commandline.__init__(self, ui, self.execmd)
58 60
59 61 self.path = os.path.realpath(path)
60 62 self.tmppath = None
61 63
62 64 self.treeversion = None
63 65 self.lastrev = None
64 66 self.changes = {}
65 67 self.parents = {}
66 68 self.tags = {}
67 69 self.catlogparser = emailparser.Parser()
68 70 self.encoding = encoding.encoding
69 71 self.archives = []
70 72
71 73 def before(self):
72 74 # Get registered archives
73 75 self.archives = [i.rstrip('\n')
74 76 for i in self.runlines0('archives', '-n')]
75 77
76 78 if self.execmd == 'tla':
77 79 output = self.run0('tree-version', self.path)
78 80 else:
79 81 output = self.run0('tree-version', '-d', self.path)
80 82 self.treeversion = output.strip()
81 83
82 84 # Get name of temporary directory
83 85 version = self.treeversion.split('/')
84 86 self.tmppath = os.path.join(tempfile.gettempdir(),
85 87 'hg-%s' % version[1])
86 88
87 89 # Generate parents dictionary
88 90 self.parents[None] = []
89 91 treeversion = self.treeversion
90 92 child = None
91 93 while treeversion:
92 94 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
93 95
94 96 archive = treeversion.split('/')[0]
95 97 if archive not in self.archives:
96 98 self.ui.status(_('tree analysis stopped because it points to '
97 99 'an unregistered archive %s...\n') % archive)
98 100 break
99 101
100 102 # Get the complete list of revisions for that tree version
101 103 output, status = self.runlines('revisions', '-r', '-f', treeversion)
102 104 self.checkexit(status, 'failed retrieving revisions for %s'
103 105 % treeversion)
104 106
105 107 # No new iteration unless a revision has a continuation-of header
106 108 treeversion = None
107 109
108 110 for l in output:
109 111 rev = l.strip()
110 112 self.changes[rev] = self.gnuarch_rev(rev)
111 113 self.parents[rev] = []
112 114
113 115 # Read author, date and summary
114 116 catlog, status = self.run('cat-log', '-d', self.path, rev)
115 117 if status:
116 118 catlog = self.run0('cat-archive-log', rev)
117 119 self._parsecatlog(catlog, rev)
118 120
119 121 # Populate the parents map
120 122 self.parents[child].append(rev)
121 123
122 124 # Keep track of the current revision as the child of the next
123 125 # revision scanned
124 126 child = rev
125 127
126 128 # Check if we have to follow the usual incremental history
127 129 # or if we have to 'jump' to a different treeversion given
128 130 # by the continuation-of header.
129 131 if self.changes[rev].continuationof:
130 132 treeversion = '--'.join(
131 133 self.changes[rev].continuationof.split('--')[:-1])
132 134 break
133 135
134 136 # If we reached a base-0 revision w/o any continuation-of
135 137 # header, it means the tree history ends here.
136 138 if rev[-6:] == 'base-0':
137 139 break
138 140
139 141 def after(self):
140 142 self.ui.debug('cleaning up %s\n' % self.tmppath)
141 143 shutil.rmtree(self.tmppath, ignore_errors=True)
142 144
143 145 def getheads(self):
144 146 return self.parents[None]
145 147
146 148 def getfile(self, name, rev):
147 149 if rev != self.lastrev:
148 150 raise error.Abort(_('internal calling inconsistency'))
149 151
150 152 if not os.path.lexists(os.path.join(self.tmppath, name)):
151 153 return None, None
152 154
153 155 return self._getfile(name, rev)
154 156
155 157 def getchanges(self, rev, full):
156 158 if full:
157 159 raise error.Abort(_("convert from arch does not support --full"))
158 160 self._update(rev)
159 161 changes = []
160 162 copies = {}
161 163
162 164 for f in self.changes[rev].add_files:
163 165 changes.append((f, rev))
164 166
165 167 for f in self.changes[rev].mod_files:
166 168 changes.append((f, rev))
167 169
168 170 for f in self.changes[rev].del_files:
169 171 changes.append((f, rev))
170 172
171 173 for src in self.changes[rev].ren_files:
172 174 to = self.changes[rev].ren_files[src]
173 175 changes.append((src, rev))
174 176 changes.append((to, rev))
175 177 copies[to] = src
176 178
177 179 for src in self.changes[rev].ren_dirs:
178 180 to = self.changes[rev].ren_dirs[src]
179 181 chgs, cps = self._rendirchanges(src, to)
180 182 changes += [(f, rev) for f in chgs]
181 183 copies.update(cps)
182 184
183 185 self.lastrev = rev
184 186 return sorted(set(changes)), copies, set()
185 187
186 188 def getcommit(self, rev):
187 189 changes = self.changes[rev]
188 190 return common.commit(author=changes.author, date=changes.date,
189 191 desc=changes.summary, parents=self.parents[rev],
190 192 rev=rev)
191 193
192 194 def gettags(self):
193 195 return self.tags
194 196
195 197 def _execute(self, cmd, *args, **kwargs):
196 198 cmdline = [self.execmd, cmd]
197 199 cmdline += args
198 cmdline = [util.shellquote(arg) for arg in cmdline]
200 cmdline = [procutil.shellquote(arg) for arg in cmdline]
199 201 cmdline += ['>', os.devnull, '2>', os.devnull]
200 cmdline = util.quotecommand(' '.join(cmdline))
202 cmdline = procutil.quotecommand(' '.join(cmdline))
201 203 self.ui.debug(cmdline, '\n')
202 204 return os.system(cmdline)
203 205
204 206 def _update(self, rev):
205 207 self.ui.debug('applying revision %s...\n' % rev)
206 208 changeset, status = self.runlines('replay', '-d', self.tmppath,
207 209 rev)
208 210 if status:
209 211 # Something went wrong while merging (baz or tla
210 212 # issue?), get latest revision and try from there
211 213 shutil.rmtree(self.tmppath, ignore_errors=True)
212 214 self._obtainrevision(rev)
213 215 else:
214 216 old_rev = self.parents[rev][0]
215 217 self.ui.debug('computing changeset between %s and %s...\n'
216 218 % (old_rev, rev))
217 219 self._parsechangeset(changeset, rev)
218 220
219 221 def _getfile(self, name, rev):
220 222 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
221 223 if stat.S_ISLNK(mode):
222 224 data = os.readlink(os.path.join(self.tmppath, name))
223 225 if mode:
224 226 mode = 'l'
225 227 else:
226 228 mode = ''
227 229 else:
228 230 data = open(os.path.join(self.tmppath, name), 'rb').read()
229 231 mode = (mode & 0o111) and 'x' or ''
230 232 return data, mode
231 233
232 234 def _exclude(self, name):
233 235 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
234 236 for exc in exclude:
235 237 if name.find(exc) != -1:
236 238 return True
237 239 return False
238 240
239 241 def _readcontents(self, path):
240 242 files = []
241 243 contents = os.listdir(path)
242 244 while len(contents) > 0:
243 245 c = contents.pop()
244 246 p = os.path.join(path, c)
245 247 # os.walk could be used, but here we avoid internal GNU
246 248 # Arch files and directories, thus saving a lot time.
247 249 if not self._exclude(p):
248 250 if os.path.isdir(p):
249 251 contents += [os.path.join(c, f) for f in os.listdir(p)]
250 252 else:
251 253 files.append(c)
252 254 return files
253 255
254 256 def _rendirchanges(self, src, dest):
255 257 changes = []
256 258 copies = {}
257 259 files = self._readcontents(os.path.join(self.tmppath, dest))
258 260 for f in files:
259 261 s = os.path.join(src, f)
260 262 d = os.path.join(dest, f)
261 263 changes.append(s)
262 264 changes.append(d)
263 265 copies[d] = s
264 266 return changes, copies
265 267
266 268 def _obtainrevision(self, rev):
267 269 self.ui.debug('obtaining revision %s...\n' % rev)
268 270 output = self._execute('get', rev, self.tmppath)
269 271 self.checkexit(output)
270 272 self.ui.debug('analyzing revision %s...\n' % rev)
271 273 files = self._readcontents(self.tmppath)
272 274 self.changes[rev].add_files += files
273 275
274 276 def _stripbasepath(self, path):
275 277 if path.startswith('./'):
276 278 return path[2:]
277 279 return path
278 280
279 281 def _parsecatlog(self, data, rev):
280 282 try:
281 283 catlog = self.catlogparser.parsestr(data)
282 284
283 285 # Commit date
284 286 self.changes[rev].date = dateutil.datestr(
285 287 dateutil.strdate(catlog['Standard-date'],
286 288 '%Y-%m-%d %H:%M:%S'))
287 289
288 290 # Commit author
289 291 self.changes[rev].author = self.recode(catlog['Creator'])
290 292
291 293 # Commit description
292 294 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
293 295 catlog.get_payload()))
294 296 self.changes[rev].summary = self.recode(self.changes[rev].summary)
295 297
296 298 # Commit revision origin when dealing with a branch or tag
297 299 if 'Continuation-of' in catlog:
298 300 self.changes[rev].continuationof = self.recode(
299 301 catlog['Continuation-of'])
300 302 except Exception:
301 303 raise error.Abort(_('could not parse cat-log of %s') % rev)
302 304
303 305 def _parsechangeset(self, data, rev):
304 306 for l in data:
305 307 l = l.strip()
306 308 # Added file (ignore added directory)
307 309 if l.startswith('A') and not l.startswith('A/'):
308 310 file = self._stripbasepath(l[1:].strip())
309 311 if not self._exclude(file):
310 312 self.changes[rev].add_files.append(file)
311 313 # Deleted file (ignore deleted directory)
312 314 elif l.startswith('D') and not l.startswith('D/'):
313 315 file = self._stripbasepath(l[1:].strip())
314 316 if not self._exclude(file):
315 317 self.changes[rev].del_files.append(file)
316 318 # Modified binary file
317 319 elif l.startswith('Mb'):
318 320 file = self._stripbasepath(l[2:].strip())
319 321 if not self._exclude(file):
320 322 self.changes[rev].mod_files.append(file)
321 323 # Modified link
322 324 elif l.startswith('M->'):
323 325 file = self._stripbasepath(l[3:].strip())
324 326 if not self._exclude(file):
325 327 self.changes[rev].mod_files.append(file)
326 328 # Modified file
327 329 elif l.startswith('M'):
328 330 file = self._stripbasepath(l[1:].strip())
329 331 if not self._exclude(file):
330 332 self.changes[rev].mod_files.append(file)
331 333 # Renamed file (or link)
332 334 elif l.startswith('=>'):
333 335 files = l[2:].strip().split(' ')
334 336 if len(files) == 1:
335 337 files = l[2:].strip().split('\t')
336 338 src = self._stripbasepath(files[0])
337 339 dst = self._stripbasepath(files[1])
338 340 if not self._exclude(src) and not self._exclude(dst):
339 341 self.changes[rev].ren_files[src] = dst
340 342 # Conversion from file to link or from link to file (modified)
341 343 elif l.startswith('ch'):
342 344 file = self._stripbasepath(l[2:].strip())
343 345 if not self._exclude(file):
344 346 self.changes[rev].mod_files.append(file)
345 347 # Renamed directory
346 348 elif l.startswith('/>'):
347 349 dirs = l[2:].strip().split(' ')
348 350 if len(dirs) == 1:
349 351 dirs = l[2:].strip().split('\t')
350 352 src = self._stripbasepath(dirs[0])
351 353 dst = self._stripbasepath(dirs[1])
352 354 if not self._exclude(src) and not self._exclude(dst):
353 355 self.changes[rev].ren_dirs[src] = dst
@@ -1,377 +1,378
1 1 # Perforce source for convert extension.
2 2 #
3 3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import marshal
10 10 import re
11 11
12 12 from mercurial.i18n import _
13 13 from mercurial import (
14 14 error,
15 15 util,
16 16 )
17 17 from mercurial.utils import (
18 18 dateutil,
19 procutil,
19 20 stringutil,
20 21 )
21 22
22 23 from . import common
23 24
24 25 def loaditer(f):
25 26 "Yield the dictionary objects generated by p4"
26 27 try:
27 28 while True:
28 29 d = marshal.load(f)
29 30 if not d:
30 31 break
31 32 yield d
32 33 except EOFError:
33 34 pass
34 35
35 36 def decodefilename(filename):
36 37 """Perforce escapes special characters @, #, *, or %
37 38 with %40, %23, %2A, or %25 respectively
38 39
39 40 >>> decodefilename(b'portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
40 41 'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
41 42 >>> decodefilename(b'//Depot/Directory/%2525/%2523/%23%40.%2A')
42 43 '//Depot/Directory/%25/%23/#@.*'
43 44 """
44 45 replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
45 46 for k, v in replacements:
46 47 filename = filename.replace(k, v)
47 48 return filename
48 49
49 50 class p4_source(common.converter_source):
50 51 def __init__(self, ui, repotype, path, revs=None):
51 52 # avoid import cycle
52 53 from . import convcmd
53 54
54 55 super(p4_source, self).__init__(ui, repotype, path, revs=revs)
55 56
56 57 if "/" in path and not path.startswith('//'):
57 58 raise common.NoRepo(_('%s does not look like a P4 repository') %
58 59 path)
59 60
60 61 common.checktool('p4', abort=False)
61 62
62 63 self.revmap = {}
63 64 self.encoding = self.ui.config('convert', 'p4.encoding',
64 65 convcmd.orig_encoding)
65 66 self.re_type = re.compile(
66 67 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
67 68 "(\+\w+)?$")
68 69 self.re_keywords = re.compile(
69 70 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
70 71 r":[^$\n]*\$")
71 72 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
72 73
73 74 if revs and len(revs) > 1:
74 75 raise error.Abort(_("p4 source does not support specifying "
75 76 "multiple revisions"))
76 77
77 78 def setrevmap(self, revmap):
78 79 """Sets the parsed revmap dictionary.
79 80
80 81 Revmap stores mappings from a source revision to a target revision.
81 82 It is set in convertcmd.convert and provided by the user as a file
82 83 on the commandline.
83 84
84 85 Revisions in the map are considered beeing present in the
85 86 repository and ignored during _parse(). This allows for incremental
86 87 imports if a revmap is provided.
87 88 """
88 89 self.revmap = revmap
89 90
90 91 def _parse_view(self, path):
91 92 "Read changes affecting the path"
92 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
93 stdout = util.popen(cmd, mode='rb')
93 cmd = 'p4 -G changes -s submitted %s' % procutil.shellquote(path)
94 stdout = procutil.popen(cmd, mode='rb')
94 95 p4changes = {}
95 96 for d in loaditer(stdout):
96 97 c = d.get("change", None)
97 98 if c:
98 99 p4changes[c] = True
99 100 return p4changes
100 101
101 102 def _parse(self, ui, path):
102 103 "Prepare list of P4 filenames and revisions to import"
103 104 p4changes = {}
104 105 changeset = {}
105 106 files_map = {}
106 107 copies_map = {}
107 108 localname = {}
108 109 depotname = {}
109 110 heads = []
110 111
111 112 ui.status(_('reading p4 views\n'))
112 113
113 114 # read client spec or view
114 115 if "/" in path:
115 116 p4changes.update(self._parse_view(path))
116 117 if path.startswith("//") and path.endswith("/..."):
117 118 views = {path[:-3]:""}
118 119 else:
119 120 views = {"//": ""}
120 121 else:
121 cmd = 'p4 -G client -o %s' % util.shellquote(path)
122 clientspec = marshal.load(util.popen(cmd, mode='rb'))
122 cmd = 'p4 -G client -o %s' % procutil.shellquote(path)
123 clientspec = marshal.load(procutil.popen(cmd, mode='rb'))
123 124
124 125 views = {}
125 126 for client in clientspec:
126 127 if client.startswith("View"):
127 128 sview, cview = clientspec[client].split()
128 129 p4changes.update(self._parse_view(sview))
129 130 if sview.endswith("...") and cview.endswith("..."):
130 131 sview = sview[:-3]
131 132 cview = cview[:-3]
132 133 cview = cview[2:]
133 134 cview = cview[cview.find("/") + 1:]
134 135 views[sview] = cview
135 136
136 137 # list of changes that affect our source files
137 138 p4changes = p4changes.keys()
138 139 p4changes.sort(key=int)
139 140
140 141 # list with depot pathnames, longest first
141 142 vieworder = views.keys()
142 143 vieworder.sort(key=len, reverse=True)
143 144
144 145 # handle revision limiting
145 146 startrev = self.ui.config('convert', 'p4.startrev')
146 147
147 148 # now read the full changelists to get the list of file revisions
148 149 ui.status(_('collecting p4 changelists\n'))
149 150 lastid = None
150 151 for change in p4changes:
151 152 if startrev and int(change) < int(startrev):
152 153 continue
153 154 if self.revs and int(change) > int(self.revs[0]):
154 155 continue
155 156 if change in self.revmap:
156 157 # Ignore already present revisions, but set the parent pointer.
157 158 lastid = change
158 159 continue
159 160
160 161 if lastid:
161 162 parents = [lastid]
162 163 else:
163 164 parents = []
164 165
165 166 d = self._fetch_revision(change)
166 167 c = self._construct_commit(d, parents)
167 168
168 169 descarr = c.desc.splitlines(True)
169 170 if len(descarr) > 0:
170 171 shortdesc = descarr[0].rstrip('\r\n')
171 172 else:
172 173 shortdesc = '**empty changelist description**'
173 174
174 175 t = '%s %s' % (c.rev, repr(shortdesc)[1:-1])
175 176 ui.status(stringutil.ellipsis(t, 80) + '\n')
176 177
177 178 files = []
178 179 copies = {}
179 180 copiedfiles = []
180 181 i = 0
181 182 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
182 183 oldname = d["depotFile%d" % i]
183 184 filename = None
184 185 for v in vieworder:
185 186 if oldname.lower().startswith(v.lower()):
186 187 filename = decodefilename(views[v] + oldname[len(v):])
187 188 break
188 189 if filename:
189 190 files.append((filename, d["rev%d" % i]))
190 191 depotname[filename] = oldname
191 192 if (d.get("action%d" % i) == "move/add"):
192 193 copiedfiles.append(filename)
193 194 localname[oldname] = filename
194 195 i += 1
195 196
196 197 # Collect information about copied files
197 198 for filename in copiedfiles:
198 199 oldname = depotname[filename]
199 200
200 201 flcmd = 'p4 -G filelog %s' \
201 % util.shellquote(oldname)
202 flstdout = util.popen(flcmd, mode='rb')
202 % procutil.shellquote(oldname)
203 flstdout = procutil.popen(flcmd, mode='rb')
203 204
204 205 copiedfilename = None
205 206 for d in loaditer(flstdout):
206 207 copiedoldname = None
207 208
208 209 i = 0
209 210 while ("change%d" % i) in d:
210 211 if (d["change%d" % i] == change and
211 212 d["action%d" % i] == "move/add"):
212 213 j = 0
213 214 while ("file%d,%d" % (i, j)) in d:
214 215 if d["how%d,%d" % (i, j)] == "moved from":
215 216 copiedoldname = d["file%d,%d" % (i, j)]
216 217 break
217 218 j += 1
218 219 i += 1
219 220
220 221 if copiedoldname and copiedoldname in localname:
221 222 copiedfilename = localname[copiedoldname]
222 223 break
223 224
224 225 if copiedfilename:
225 226 copies[filename] = copiedfilename
226 227 else:
227 228 ui.warn(_("cannot find source for copied file: %s@%s\n")
228 229 % (filename, change))
229 230
230 231 changeset[change] = c
231 232 files_map[change] = files
232 233 copies_map[change] = copies
233 234 lastid = change
234 235
235 236 if lastid and len(changeset) > 0:
236 237 heads = [lastid]
237 238
238 239 return {
239 240 'changeset': changeset,
240 241 'files': files_map,
241 242 'copies': copies_map,
242 243 'heads': heads,
243 244 'depotname': depotname,
244 245 }
245 246
246 247 @util.propertycache
247 248 def _parse_once(self):
248 249 return self._parse(self.ui, self.path)
249 250
250 251 @util.propertycache
251 252 def copies(self):
252 253 return self._parse_once['copies']
253 254
254 255 @util.propertycache
255 256 def files(self):
256 257 return self._parse_once['files']
257 258
258 259 @util.propertycache
259 260 def changeset(self):
260 261 return self._parse_once['changeset']
261 262
262 263 @util.propertycache
263 264 def heads(self):
264 265 return self._parse_once['heads']
265 266
266 267 @util.propertycache
267 268 def depotname(self):
268 269 return self._parse_once['depotname']
269 270
270 271 def getheads(self):
271 272 return self.heads
272 273
273 274 def getfile(self, name, rev):
274 275 cmd = 'p4 -G print %s' \
275 % util.shellquote("%s#%s" % (self.depotname[name], rev))
276 % procutil.shellquote("%s#%s" % (self.depotname[name], rev))
276 277
277 278 lasterror = None
278 279 while True:
279 stdout = util.popen(cmd, mode='rb')
280 stdout = procutil.popen(cmd, mode='rb')
280 281
281 282 mode = None
282 283 contents = []
283 284 keywords = None
284 285
285 286 for d in loaditer(stdout):
286 287 code = d["code"]
287 288 data = d.get("data")
288 289
289 290 if code == "error":
290 291 # if this is the first time error happened
291 292 # re-attempt getting the file
292 293 if not lasterror:
293 294 lasterror = IOError(d["generic"], data)
294 295 # this will exit inner-most for-loop
295 296 break
296 297 else:
297 298 raise lasterror
298 299
299 300 elif code == "stat":
300 301 action = d.get("action")
301 302 if action in ["purge", "delete", "move/delete"]:
302 303 return None, None
303 304 p4type = self.re_type.match(d["type"])
304 305 if p4type:
305 306 mode = ""
306 307 flags = ((p4type.group(1) or "")
307 308 + (p4type.group(3) or ""))
308 309 if "x" in flags:
309 310 mode = "x"
310 311 if p4type.group(2) == "symlink":
311 312 mode = "l"
312 313 if "ko" in flags:
313 314 keywords = self.re_keywords_old
314 315 elif "k" in flags:
315 316 keywords = self.re_keywords
316 317
317 318 elif code == "text" or code == "binary":
318 319 contents.append(data)
319 320
320 321 lasterror = None
321 322
322 323 if not lasterror:
323 324 break
324 325
325 326 if mode is None:
326 327 return None, None
327 328
328 329 contents = ''.join(contents)
329 330
330 331 if keywords:
331 332 contents = keywords.sub("$\\1$", contents)
332 333 if mode == "l" and contents.endswith("\n"):
333 334 contents = contents[:-1]
334 335
335 336 return contents, mode
336 337
337 338 def getchanges(self, rev, full):
338 339 if full:
339 340 raise error.Abort(_("convert from p4 does not support --full"))
340 341 return self.files[rev], self.copies[rev], set()
341 342
342 343 def _construct_commit(self, obj, parents=None):
343 344 """
344 345 Constructs a common.commit object from an unmarshalled
345 346 `p4 describe` output
346 347 """
347 348 desc = self.recode(obj.get("desc", ""))
348 349 date = (int(obj["time"]), 0) # timezone not set
349 350 if parents is None:
350 351 parents = []
351 352
352 353 return common.commit(author=self.recode(obj["user"]),
353 354 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
354 355 parents=parents, desc=desc, branch=None, rev=obj['change'],
355 356 extra={"p4": obj['change'], "convert_revision": obj['change']})
356 357
357 358 def _fetch_revision(self, rev):
358 359 """Return an output of `p4 describe` including author, commit date as
359 360 a dictionary."""
360 361 cmd = "p4 -G describe -s %s" % rev
361 stdout = util.popen(cmd, mode='rb')
362 stdout = procutil.popen(cmd, mode='rb')
362 363 return marshal.load(stdout)
363 364
364 365 def getcommit(self, rev):
365 366 if rev in self.changeset:
366 367 return self.changeset[rev]
367 368 elif rev in self.revmap:
368 369 d = self._fetch_revision(rev)
369 370 return self._construct_commit(d, parents=None)
370 371 raise error.Abort(
371 372 _("cannot find %s in the revmap or parsed changesets") % rev)
372 373
373 374 def gettags(self):
374 375 return {}
375 376
376 377 def getchangedfiles(self, rev, i):
377 378 return sorted([x[0] for x in self.files[rev]])
@@ -1,1360 +1,1361
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4 from __future__ import absolute_import
5 5
6 6 import os
7 7 import re
8 8 import tempfile
9 9 import xml.dom.minidom
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial import (
13 13 encoding,
14 14 error,
15 15 pycompat,
16 16 util,
17 17 vfs as vfsmod,
18 18 )
19 19 from mercurial.utils import (
20 20 dateutil,
21 procutil,
21 22 stringutil,
22 23 )
23 24
24 25 from . import common
25 26
26 27 pickle = util.pickle
27 28 stringio = util.stringio
28 29 propertycache = util.propertycache
29 30 urlerr = util.urlerr
30 31 urlreq = util.urlreq
31 32
32 33 commandline = common.commandline
33 34 commit = common.commit
34 35 converter_sink = common.converter_sink
35 36 converter_source = common.converter_source
36 37 decodeargs = common.decodeargs
37 38 encodeargs = common.encodeargs
38 39 makedatetimestamp = common.makedatetimestamp
39 40 mapfile = common.mapfile
40 41 MissingTool = common.MissingTool
41 42 NoRepo = common.NoRepo
42 43
43 44 # Subversion stuff. Works best with very recent Python SVN bindings
44 45 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
45 46 # these bindings.
46 47
47 48 try:
48 49 import svn
49 50 import svn.client
50 51 import svn.core
51 52 import svn.ra
52 53 import svn.delta
53 54 from . import transport
54 55 import warnings
55 56 warnings.filterwarnings('ignore',
56 57 module='svn.core',
57 58 category=DeprecationWarning)
58 59 svn.core.SubversionException # trigger import to catch error
59 60
60 61 except ImportError:
61 62 svn = None
62 63
63 64 class SvnPathNotFound(Exception):
64 65 pass
65 66
66 67 def revsplit(rev):
67 68 """Parse a revision string and return (uuid, path, revnum).
68 69 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
69 70 ... b'/proj%20B/mytrunk/mytrunk@1')
70 71 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
71 72 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
72 73 ('', '', 1)
73 74 >>> revsplit(b'@7')
74 75 ('', '', 7)
75 76 >>> revsplit(b'7')
76 77 ('', '', 0)
77 78 >>> revsplit(b'bad')
78 79 ('', '', 0)
79 80 """
80 81 parts = rev.rsplit('@', 1)
81 82 revnum = 0
82 83 if len(parts) > 1:
83 84 revnum = int(parts[1])
84 85 parts = parts[0].split('/', 1)
85 86 uuid = ''
86 87 mod = ''
87 88 if len(parts) > 1 and parts[0].startswith('svn:'):
88 89 uuid = parts[0][4:]
89 90 mod = '/' + parts[1]
90 91 return uuid, mod, revnum
91 92
92 93 def quote(s):
93 94 # As of svn 1.7, many svn calls expect "canonical" paths. In
94 95 # theory, we should call svn.core.*canonicalize() on all paths
95 96 # before passing them to the API. Instead, we assume the base url
96 97 # is canonical and copy the behaviour of svn URL encoding function
97 98 # so we can extend it safely with new components. The "safe"
98 99 # characters were taken from the "svn_uri__char_validity" table in
99 100 # libsvn_subr/path.c.
100 101 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
101 102
102 103 def geturl(path):
103 104 try:
104 105 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
105 106 except svn.core.SubversionException:
106 107 # svn.client.url_from_path() fails with local repositories
107 108 pass
108 109 if os.path.isdir(path):
109 110 path = os.path.normpath(os.path.abspath(path))
110 111 if pycompat.iswindows:
111 112 path = '/' + util.normpath(path)
112 113 # Module URL is later compared with the repository URL returned
113 114 # by svn API, which is UTF-8.
114 115 path = encoding.tolocal(path)
115 116 path = 'file://%s' % quote(path)
116 117 return svn.core.svn_path_canonicalize(path)
117 118
118 119 def optrev(number):
119 120 optrev = svn.core.svn_opt_revision_t()
120 121 optrev.kind = svn.core.svn_opt_revision_number
121 122 optrev.value.number = number
122 123 return optrev
123 124
124 125 class changedpath(object):
125 126 def __init__(self, p):
126 127 self.copyfrom_path = p.copyfrom_path
127 128 self.copyfrom_rev = p.copyfrom_rev
128 129 self.action = p.action
129 130
130 131 def get_log_child(fp, url, paths, start, end, limit=0,
131 132 discover_changed_paths=True, strict_node_history=False):
132 133 protocol = -1
133 134 def receiver(orig_paths, revnum, author, date, message, pool):
134 135 paths = {}
135 136 if orig_paths is not None:
136 137 for k, v in orig_paths.iteritems():
137 138 paths[k] = changedpath(v)
138 139 pickle.dump((paths, revnum, author, date, message),
139 140 fp, protocol)
140 141
141 142 try:
142 143 # Use an ra of our own so that our parent can consume
143 144 # our results without confusing the server.
144 145 t = transport.SvnRaTransport(url=url)
145 146 svn.ra.get_log(t.ra, paths, start, end, limit,
146 147 discover_changed_paths,
147 148 strict_node_history,
148 149 receiver)
149 150 except IOError:
150 151 # Caller may interrupt the iteration
151 152 pickle.dump(None, fp, protocol)
152 153 except Exception as inst:
153 154 pickle.dump(stringutil.forcebytestr(inst), fp, protocol)
154 155 else:
155 156 pickle.dump(None, fp, protocol)
156 157 fp.flush()
157 158 # With large history, cleanup process goes crazy and suddenly
158 159 # consumes *huge* amount of memory. The output file being closed,
159 160 # there is no need for clean termination.
160 161 os._exit(0)
161 162
162 163 def debugsvnlog(ui, **opts):
163 164 """Fetch SVN log in a subprocess and channel them back to parent to
164 165 avoid memory collection issues.
165 166 """
166 167 if svn is None:
167 168 raise error.Abort(_('debugsvnlog could not load Subversion python '
168 169 'bindings'))
169 170
170 171 args = decodeargs(ui.fin.read())
171 172 get_log_child(ui.fout, *args)
172 173
173 174 class logstream(object):
174 175 """Interruptible revision log iterator."""
175 176 def __init__(self, stdout):
176 177 self._stdout = stdout
177 178
178 179 def __iter__(self):
179 180 while True:
180 181 try:
181 182 entry = pickle.load(self._stdout)
182 183 except EOFError:
183 184 raise error.Abort(_('Mercurial failed to run itself, check'
184 185 ' hg executable is in PATH'))
185 186 try:
186 187 orig_paths, revnum, author, date, message = entry
187 188 except (TypeError, ValueError):
188 189 if entry is None:
189 190 break
190 191 raise error.Abort(_("log stream exception '%s'") % entry)
191 192 yield entry
192 193
193 194 def close(self):
194 195 if self._stdout:
195 196 self._stdout.close()
196 197 self._stdout = None
197 198
198 199 class directlogstream(list):
199 200 """Direct revision log iterator.
200 201 This can be used for debugging and development but it will probably leak
201 202 memory and is not suitable for real conversions."""
202 203 def __init__(self, url, paths, start, end, limit=0,
203 204 discover_changed_paths=True, strict_node_history=False):
204 205
205 206 def receiver(orig_paths, revnum, author, date, message, pool):
206 207 paths = {}
207 208 if orig_paths is not None:
208 209 for k, v in orig_paths.iteritems():
209 210 paths[k] = changedpath(v)
210 211 self.append((paths, revnum, author, date, message))
211 212
212 213 # Use an ra of our own so that our parent can consume
213 214 # our results without confusing the server.
214 215 t = transport.SvnRaTransport(url=url)
215 216 svn.ra.get_log(t.ra, paths, start, end, limit,
216 217 discover_changed_paths,
217 218 strict_node_history,
218 219 receiver)
219 220
220 221 def close(self):
221 222 pass
222 223
223 224 # Check to see if the given path is a local Subversion repo. Verify this by
224 225 # looking for several svn-specific files and directories in the given
225 226 # directory.
226 227 def filecheck(ui, path, proto):
227 228 for x in ('locks', 'hooks', 'format', 'db'):
228 229 if not os.path.exists(os.path.join(path, x)):
229 230 return False
230 231 return True
231 232
232 233 # Check to see if a given path is the root of an svn repo over http. We verify
233 234 # this by requesting a version-controlled URL we know can't exist and looking
234 235 # for the svn-specific "not found" XML.
235 236 def httpcheck(ui, path, proto):
236 237 try:
237 238 opener = urlreq.buildopener()
238 239 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path), 'rb')
239 240 data = rsp.read()
240 241 except urlerr.httperror as inst:
241 242 if inst.code != 404:
242 243 # Except for 404 we cannot know for sure this is not an svn repo
243 244 ui.warn(_('svn: cannot probe remote repository, assume it could '
244 245 'be a subversion repository. Use --source-type if you '
245 246 'know better.\n'))
246 247 return True
247 248 data = inst.fp.read()
248 249 except Exception:
249 250 # Could be urlerr.urlerror if the URL is invalid or anything else.
250 251 return False
251 252 return '<m:human-readable errcode="160013">' in data
252 253
253 254 protomap = {'http': httpcheck,
254 255 'https': httpcheck,
255 256 'file': filecheck,
256 257 }
257 258 def issvnurl(ui, url):
258 259 try:
259 260 proto, path = url.split('://', 1)
260 261 if proto == 'file':
261 262 if (pycompat.iswindows and path[:1] == '/'
262 263 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
263 264 path = path[:2] + ':/' + path[6:]
264 265 path = urlreq.url2pathname(path)
265 266 except ValueError:
266 267 proto = 'file'
267 268 path = os.path.abspath(url)
268 269 if proto == 'file':
269 270 path = util.pconvert(path)
270 271 check = protomap.get(proto, lambda *args: False)
271 272 while '/' in path:
272 273 if check(ui, path, proto):
273 274 return True
274 275 path = path.rsplit('/', 1)[0]
275 276 return False
276 277
277 278 # SVN conversion code stolen from bzr-svn and tailor
278 279 #
279 280 # Subversion looks like a versioned filesystem, branches structures
280 281 # are defined by conventions and not enforced by the tool. First,
281 282 # we define the potential branches (modules) as "trunk" and "branches"
282 283 # children directories. Revisions are then identified by their
283 284 # module and revision number (and a repository identifier).
284 285 #
285 286 # The revision graph is really a tree (or a forest). By default, a
286 287 # revision parent is the previous revision in the same module. If the
287 288 # module directory is copied/moved from another module then the
288 289 # revision is the module root and its parent the source revision in
289 290 # the parent module. A revision has at most one parent.
290 291 #
291 292 class svn_source(converter_source):
292 293 def __init__(self, ui, repotype, url, revs=None):
293 294 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
294 295
295 296 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
296 297 (os.path.exists(url) and
297 298 os.path.exists(os.path.join(url, '.svn'))) or
298 299 issvnurl(ui, url)):
299 300 raise NoRepo(_("%s does not look like a Subversion repository")
300 301 % url)
301 302 if svn is None:
302 303 raise MissingTool(_('could not load Subversion python bindings'))
303 304
304 305 try:
305 306 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
306 307 if version < (1, 4):
307 308 raise MissingTool(_('Subversion python bindings %d.%d found, '
308 309 '1.4 or later required') % version)
309 310 except AttributeError:
310 311 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
311 312 'or later required'))
312 313
313 314 self.lastrevs = {}
314 315
315 316 latest = None
316 317 try:
317 318 # Support file://path@rev syntax. Useful e.g. to convert
318 319 # deleted branches.
319 320 at = url.rfind('@')
320 321 if at >= 0:
321 322 latest = int(url[at + 1:])
322 323 url = url[:at]
323 324 except ValueError:
324 325 pass
325 326 self.url = geturl(url)
326 327 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
327 328 try:
328 329 self.transport = transport.SvnRaTransport(url=self.url)
329 330 self.ra = self.transport.ra
330 331 self.ctx = self.transport.client
331 332 self.baseurl = svn.ra.get_repos_root(self.ra)
332 333 # Module is either empty or a repository path starting with
333 334 # a slash and not ending with a slash.
334 335 self.module = urlreq.unquote(self.url[len(self.baseurl):])
335 336 self.prevmodule = None
336 337 self.rootmodule = self.module
337 338 self.commits = {}
338 339 self.paths = {}
339 340 self.uuid = svn.ra.get_uuid(self.ra)
340 341 except svn.core.SubversionException:
341 342 ui.traceback()
342 343 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
343 344 svn.core.SVN_VER_MINOR,
344 345 svn.core.SVN_VER_MICRO)
345 346 raise NoRepo(_("%s does not look like a Subversion repository "
346 347 "to libsvn version %s")
347 348 % (self.url, svnversion))
348 349
349 350 if revs:
350 351 if len(revs) > 1:
351 352 raise error.Abort(_('subversion source does not support '
352 353 'specifying multiple revisions'))
353 354 try:
354 355 latest = int(revs[0])
355 356 except ValueError:
356 357 raise error.Abort(_('svn: revision %s is not an integer') %
357 358 revs[0])
358 359
359 360 trunkcfg = self.ui.config('convert', 'svn.trunk')
360 361 if trunkcfg is None:
361 362 trunkcfg = 'trunk'
362 363 self.trunkname = trunkcfg.strip('/')
363 364 self.startrev = self.ui.config('convert', 'svn.startrev')
364 365 try:
365 366 self.startrev = int(self.startrev)
366 367 if self.startrev < 0:
367 368 self.startrev = 0
368 369 except ValueError:
369 370 raise error.Abort(_('svn: start revision %s is not an integer')
370 371 % self.startrev)
371 372
372 373 try:
373 374 self.head = self.latest(self.module, latest)
374 375 except SvnPathNotFound:
375 376 self.head = None
376 377 if not self.head:
377 378 raise error.Abort(_('no revision found in module %s')
378 379 % self.module)
379 380 self.last_changed = self.revnum(self.head)
380 381
381 382 self._changescache = (None, None)
382 383
383 384 if os.path.exists(os.path.join(url, '.svn/entries')):
384 385 self.wc = url
385 386 else:
386 387 self.wc = None
387 388 self.convertfp = None
388 389
389 390 def setrevmap(self, revmap):
390 391 lastrevs = {}
391 392 for revid in revmap:
392 393 uuid, module, revnum = revsplit(revid)
393 394 lastrevnum = lastrevs.setdefault(module, revnum)
394 395 if revnum > lastrevnum:
395 396 lastrevs[module] = revnum
396 397 self.lastrevs = lastrevs
397 398
398 399 def exists(self, path, optrev):
399 400 try:
400 401 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
401 402 optrev, False, self.ctx)
402 403 return True
403 404 except svn.core.SubversionException:
404 405 return False
405 406
406 407 def getheads(self):
407 408
408 409 def isdir(path, revnum):
409 410 kind = self._checkpath(path, revnum)
410 411 return kind == svn.core.svn_node_dir
411 412
412 413 def getcfgpath(name, rev):
413 414 cfgpath = self.ui.config('convert', 'svn.' + name)
414 415 if cfgpath is not None and cfgpath.strip() == '':
415 416 return None
416 417 path = (cfgpath or name).strip('/')
417 418 if not self.exists(path, rev):
418 419 if self.module.endswith(path) and name == 'trunk':
419 420 # we are converting from inside this directory
420 421 return None
421 422 if cfgpath:
422 423 raise error.Abort(_('expected %s to be at %r, but not found'
423 424 ) % (name, path))
424 425 return None
425 426 self.ui.note(_('found %s at %r\n') % (name, path))
426 427 return path
427 428
428 429 rev = optrev(self.last_changed)
429 430 oldmodule = ''
430 431 trunk = getcfgpath('trunk', rev)
431 432 self.tags = getcfgpath('tags', rev)
432 433 branches = getcfgpath('branches', rev)
433 434
434 435 # If the project has a trunk or branches, we will extract heads
435 436 # from them. We keep the project root otherwise.
436 437 if trunk:
437 438 oldmodule = self.module or ''
438 439 self.module += '/' + trunk
439 440 self.head = self.latest(self.module, self.last_changed)
440 441 if not self.head:
441 442 raise error.Abort(_('no revision found in module %s')
442 443 % self.module)
443 444
444 445 # First head in the list is the module's head
445 446 self.heads = [self.head]
446 447 if self.tags is not None:
447 448 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
448 449
449 450 # Check if branches bring a few more heads to the list
450 451 if branches:
451 452 rpath = self.url.strip('/')
452 453 branchnames = svn.client.ls(rpath + '/' + quote(branches),
453 454 rev, False, self.ctx)
454 455 for branch in sorted(branchnames):
455 456 module = '%s/%s/%s' % (oldmodule, branches, branch)
456 457 if not isdir(module, self.last_changed):
457 458 continue
458 459 brevid = self.latest(module, self.last_changed)
459 460 if not brevid:
460 461 self.ui.note(_('ignoring empty branch %s\n') % branch)
461 462 continue
462 463 self.ui.note(_('found branch %s at %d\n') %
463 464 (branch, self.revnum(brevid)))
464 465 self.heads.append(brevid)
465 466
466 467 if self.startrev and self.heads:
467 468 if len(self.heads) > 1:
468 469 raise error.Abort(_('svn: start revision is not supported '
469 470 'with more than one branch'))
470 471 revnum = self.revnum(self.heads[0])
471 472 if revnum < self.startrev:
472 473 raise error.Abort(
473 474 _('svn: no revision found after start revision %d')
474 475 % self.startrev)
475 476
476 477 return self.heads
477 478
478 479 def _getchanges(self, rev, full):
479 480 (paths, parents) = self.paths[rev]
480 481 copies = {}
481 482 if parents:
482 483 files, self.removed, copies = self.expandpaths(rev, paths, parents)
483 484 if full or not parents:
484 485 # Perform a full checkout on roots
485 486 uuid, module, revnum = revsplit(rev)
486 487 entries = svn.client.ls(self.baseurl + quote(module),
487 488 optrev(revnum), True, self.ctx)
488 489 files = [n for n, e in entries.iteritems()
489 490 if e.kind == svn.core.svn_node_file]
490 491 self.removed = set()
491 492
492 493 files.sort()
493 494 files = zip(files, [rev] * len(files))
494 495 return (files, copies)
495 496
496 497 def getchanges(self, rev, full):
497 498 # reuse cache from getchangedfiles
498 499 if self._changescache[0] == rev and not full:
499 500 (files, copies) = self._changescache[1]
500 501 else:
501 502 (files, copies) = self._getchanges(rev, full)
502 503 # caller caches the result, so free it here to release memory
503 504 del self.paths[rev]
504 505 return (files, copies, set())
505 506
506 507 def getchangedfiles(self, rev, i):
507 508 # called from filemap - cache computed values for reuse in getchanges
508 509 (files, copies) = self._getchanges(rev, False)
509 510 self._changescache = (rev, (files, copies))
510 511 return [f[0] for f in files]
511 512
512 513 def getcommit(self, rev):
513 514 if rev not in self.commits:
514 515 uuid, module, revnum = revsplit(rev)
515 516 self.module = module
516 517 self.reparent(module)
517 518 # We assume that:
518 519 # - requests for revisions after "stop" come from the
519 520 # revision graph backward traversal. Cache all of them
520 521 # down to stop, they will be used eventually.
521 522 # - requests for revisions before "stop" come to get
522 523 # isolated branches parents. Just fetch what is needed.
523 524 stop = self.lastrevs.get(module, 0)
524 525 if revnum < stop:
525 526 stop = revnum + 1
526 527 self._fetch_revisions(revnum, stop)
527 528 if rev not in self.commits:
528 529 raise error.Abort(_('svn: revision %s not found') % revnum)
529 530 revcommit = self.commits[rev]
530 531 # caller caches the result, so free it here to release memory
531 532 del self.commits[rev]
532 533 return revcommit
533 534
534 535 def checkrevformat(self, revstr, mapname='splicemap'):
535 536 """ fails if revision format does not match the correct format"""
536 537 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
537 538 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
538 539 r'{12,12}(.*)\@[0-9]+$',revstr):
539 540 raise error.Abort(_('%s entry %s is not a valid revision'
540 541 ' identifier') % (mapname, revstr))
541 542
542 543 def numcommits(self):
543 544 return int(self.head.rsplit('@', 1)[1]) - self.startrev
544 545
545 546 def gettags(self):
546 547 tags = {}
547 548 if self.tags is None:
548 549 return tags
549 550
550 551 # svn tags are just a convention, project branches left in a
551 552 # 'tags' directory. There is no other relationship than
552 553 # ancestry, which is expensive to discover and makes them hard
553 554 # to update incrementally. Worse, past revisions may be
554 555 # referenced by tags far away in the future, requiring a deep
555 556 # history traversal on every calculation. Current code
556 557 # performs a single backward traversal, tracking moves within
557 558 # the tags directory (tag renaming) and recording a new tag
558 559 # everytime a project is copied from outside the tags
559 560 # directory. It also lists deleted tags, this behaviour may
560 561 # change in the future.
561 562 pendings = []
562 563 tagspath = self.tags
563 564 start = svn.ra.get_latest_revnum(self.ra)
564 565 stream = self._getlog([self.tags], start, self.startrev)
565 566 try:
566 567 for entry in stream:
567 568 origpaths, revnum, author, date, message = entry
568 569 if not origpaths:
569 570 origpaths = []
570 571 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
571 572 in origpaths.iteritems() if e.copyfrom_path]
572 573 # Apply moves/copies from more specific to general
573 574 copies.sort(reverse=True)
574 575
575 576 srctagspath = tagspath
576 577 if copies and copies[-1][2] == tagspath:
577 578 # Track tags directory moves
578 579 srctagspath = copies.pop()[0]
579 580
580 581 for source, sourcerev, dest in copies:
581 582 if not dest.startswith(tagspath + '/'):
582 583 continue
583 584 for tag in pendings:
584 585 if tag[0].startswith(dest):
585 586 tagpath = source + tag[0][len(dest):]
586 587 tag[:2] = [tagpath, sourcerev]
587 588 break
588 589 else:
589 590 pendings.append([source, sourcerev, dest])
590 591
591 592 # Filter out tags with children coming from different
592 593 # parts of the repository like:
593 594 # /tags/tag.1 (from /trunk:10)
594 595 # /tags/tag.1/foo (from /branches/foo:12)
595 596 # Here/tags/tag.1 discarded as well as its children.
596 597 # It happens with tools like cvs2svn. Such tags cannot
597 598 # be represented in mercurial.
598 599 addeds = dict((p, e.copyfrom_path) for p, e
599 600 in origpaths.iteritems()
600 601 if e.action == 'A' and e.copyfrom_path)
601 602 badroots = set()
602 603 for destroot in addeds:
603 604 for source, sourcerev, dest in pendings:
604 605 if (not dest.startswith(destroot + '/')
605 606 or source.startswith(addeds[destroot] + '/')):
606 607 continue
607 608 badroots.add(destroot)
608 609 break
609 610
610 611 for badroot in badroots:
611 612 pendings = [p for p in pendings if p[2] != badroot
612 613 and not p[2].startswith(badroot + '/')]
613 614
614 615 # Tell tag renamings from tag creations
615 616 renamings = []
616 617 for source, sourcerev, dest in pendings:
617 618 tagname = dest.split('/')[-1]
618 619 if source.startswith(srctagspath):
619 620 renamings.append([source, sourcerev, tagname])
620 621 continue
621 622 if tagname in tags:
622 623 # Keep the latest tag value
623 624 continue
624 625 # From revision may be fake, get one with changes
625 626 try:
626 627 tagid = self.latest(source, sourcerev)
627 628 if tagid and tagname not in tags:
628 629 tags[tagname] = tagid
629 630 except SvnPathNotFound:
630 631 # It happens when we are following directories
631 632 # we assumed were copied with their parents
632 633 # but were really created in the tag
633 634 # directory.
634 635 pass
635 636 pendings = renamings
636 637 tagspath = srctagspath
637 638 finally:
638 639 stream.close()
639 640 return tags
640 641
641 642 def converted(self, rev, destrev):
642 643 if not self.wc:
643 644 return
644 645 if self.convertfp is None:
645 646 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
646 647 'ab')
647 648 self.convertfp.write(util.tonativeeol('%s %d\n'
648 649 % (destrev, self.revnum(rev))))
649 650 self.convertfp.flush()
650 651
651 652 def revid(self, revnum, module=None):
652 653 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
653 654
654 655 def revnum(self, rev):
655 656 return int(rev.split('@')[-1])
656 657
657 658 def latest(self, path, stop=None):
658 659 """Find the latest revid affecting path, up to stop revision
659 660 number. If stop is None, default to repository latest
660 661 revision. It may return a revision in a different module,
661 662 since a branch may be moved without a change being
662 663 reported. Return None if computed module does not belong to
663 664 rootmodule subtree.
664 665 """
665 666 def findchanges(path, start, stop=None):
666 667 stream = self._getlog([path], start, stop or 1)
667 668 try:
668 669 for entry in stream:
669 670 paths, revnum, author, date, message = entry
670 671 if stop is None and paths:
671 672 # We do not know the latest changed revision,
672 673 # keep the first one with changed paths.
673 674 break
674 675 if revnum <= stop:
675 676 break
676 677
677 678 for p in paths:
678 679 if (not path.startswith(p) or
679 680 not paths[p].copyfrom_path):
680 681 continue
681 682 newpath = paths[p].copyfrom_path + path[len(p):]
682 683 self.ui.debug("branch renamed from %s to %s at %d\n" %
683 684 (path, newpath, revnum))
684 685 path = newpath
685 686 break
686 687 if not paths:
687 688 revnum = None
688 689 return revnum, path
689 690 finally:
690 691 stream.close()
691 692
692 693 if not path.startswith(self.rootmodule):
693 694 # Requests on foreign branches may be forbidden at server level
694 695 self.ui.debug('ignoring foreign branch %r\n' % path)
695 696 return None
696 697
697 698 if stop is None:
698 699 stop = svn.ra.get_latest_revnum(self.ra)
699 700 try:
700 701 prevmodule = self.reparent('')
701 702 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
702 703 self.reparent(prevmodule)
703 704 except svn.core.SubversionException:
704 705 dirent = None
705 706 if not dirent:
706 707 raise SvnPathNotFound(_('%s not found up to revision %d')
707 708 % (path, stop))
708 709
709 710 # stat() gives us the previous revision on this line of
710 711 # development, but it might be in *another module*. Fetch the
711 712 # log and detect renames down to the latest revision.
712 713 revnum, realpath = findchanges(path, stop, dirent.created_rev)
713 714 if revnum is None:
714 715 # Tools like svnsync can create empty revision, when
715 716 # synchronizing only a subtree for instance. These empty
716 717 # revisions created_rev still have their original values
717 718 # despite all changes having disappeared and can be
718 719 # returned by ra.stat(), at least when stating the root
719 720 # module. In that case, do not trust created_rev and scan
720 721 # the whole history.
721 722 revnum, realpath = findchanges(path, stop)
722 723 if revnum is None:
723 724 self.ui.debug('ignoring empty branch %r\n' % realpath)
724 725 return None
725 726
726 727 if not realpath.startswith(self.rootmodule):
727 728 self.ui.debug('ignoring foreign branch %r\n' % realpath)
728 729 return None
729 730 return self.revid(revnum, realpath)
730 731
731 732 def reparent(self, module):
732 733 """Reparent the svn transport and return the previous parent."""
733 734 if self.prevmodule == module:
734 735 return module
735 736 svnurl = self.baseurl + quote(module)
736 737 prevmodule = self.prevmodule
737 738 if prevmodule is None:
738 739 prevmodule = ''
739 740 self.ui.debug("reparent to %s\n" % svnurl)
740 741 svn.ra.reparent(self.ra, svnurl)
741 742 self.prevmodule = module
742 743 return prevmodule
743 744
744 745 def expandpaths(self, rev, paths, parents):
745 746 changed, removed = set(), set()
746 747 copies = {}
747 748
748 749 new_module, revnum = revsplit(rev)[1:]
749 750 if new_module != self.module:
750 751 self.module = new_module
751 752 self.reparent(self.module)
752 753
753 754 for i, (path, ent) in enumerate(paths):
754 755 self.ui.progress(_('scanning paths'), i, item=path,
755 756 total=len(paths), unit=_('paths'))
756 757 entrypath = self.getrelpath(path)
757 758
758 759 kind = self._checkpath(entrypath, revnum)
759 760 if kind == svn.core.svn_node_file:
760 761 changed.add(self.recode(entrypath))
761 762 if not ent.copyfrom_path or not parents:
762 763 continue
763 764 # Copy sources not in parent revisions cannot be
764 765 # represented, ignore their origin for now
765 766 pmodule, prevnum = revsplit(parents[0])[1:]
766 767 if ent.copyfrom_rev < prevnum:
767 768 continue
768 769 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
769 770 if not copyfrom_path:
770 771 continue
771 772 self.ui.debug("copied to %s from %s@%s\n" %
772 773 (entrypath, copyfrom_path, ent.copyfrom_rev))
773 774 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
774 775 elif kind == 0: # gone, but had better be a deleted *file*
775 776 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
776 777 pmodule, prevnum = revsplit(parents[0])[1:]
777 778 parentpath = pmodule + "/" + entrypath
778 779 fromkind = self._checkpath(entrypath, prevnum, pmodule)
779 780
780 781 if fromkind == svn.core.svn_node_file:
781 782 removed.add(self.recode(entrypath))
782 783 elif fromkind == svn.core.svn_node_dir:
783 784 oroot = parentpath.strip('/')
784 785 nroot = path.strip('/')
785 786 children = self._iterfiles(oroot, prevnum)
786 787 for childpath in children:
787 788 childpath = childpath.replace(oroot, nroot)
788 789 childpath = self.getrelpath("/" + childpath, pmodule)
789 790 if childpath:
790 791 removed.add(self.recode(childpath))
791 792 else:
792 793 self.ui.debug('unknown path in revision %d: %s\n' % \
793 794 (revnum, path))
794 795 elif kind == svn.core.svn_node_dir:
795 796 if ent.action == 'M':
796 797 # If the directory just had a prop change,
797 798 # then we shouldn't need to look for its children.
798 799 continue
799 800 if ent.action == 'R' and parents:
800 801 # If a directory is replacing a file, mark the previous
801 802 # file as deleted
802 803 pmodule, prevnum = revsplit(parents[0])[1:]
803 804 pkind = self._checkpath(entrypath, prevnum, pmodule)
804 805 if pkind == svn.core.svn_node_file:
805 806 removed.add(self.recode(entrypath))
806 807 elif pkind == svn.core.svn_node_dir:
807 808 # We do not know what files were kept or removed,
808 809 # mark them all as changed.
809 810 for childpath in self._iterfiles(pmodule, prevnum):
810 811 childpath = self.getrelpath("/" + childpath)
811 812 if childpath:
812 813 changed.add(self.recode(childpath))
813 814
814 815 for childpath in self._iterfiles(path, revnum):
815 816 childpath = self.getrelpath("/" + childpath)
816 817 if childpath:
817 818 changed.add(self.recode(childpath))
818 819
819 820 # Handle directory copies
820 821 if not ent.copyfrom_path or not parents:
821 822 continue
822 823 # Copy sources not in parent revisions cannot be
823 824 # represented, ignore their origin for now
824 825 pmodule, prevnum = revsplit(parents[0])[1:]
825 826 if ent.copyfrom_rev < prevnum:
826 827 continue
827 828 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
828 829 if not copyfrompath:
829 830 continue
830 831 self.ui.debug("mark %s came from %s:%d\n"
831 832 % (path, copyfrompath, ent.copyfrom_rev))
832 833 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
833 834 for childpath in children:
834 835 childpath = self.getrelpath("/" + childpath, pmodule)
835 836 if not childpath:
836 837 continue
837 838 copytopath = path + childpath[len(copyfrompath):]
838 839 copytopath = self.getrelpath(copytopath)
839 840 copies[self.recode(copytopath)] = self.recode(childpath)
840 841
841 842 self.ui.progress(_('scanning paths'), None)
842 843 changed.update(removed)
843 844 return (list(changed), removed, copies)
844 845
845 846 def _fetch_revisions(self, from_revnum, to_revnum):
846 847 if from_revnum < to_revnum:
847 848 from_revnum, to_revnum = to_revnum, from_revnum
848 849
849 850 self.child_cset = None
850 851
851 852 def parselogentry(orig_paths, revnum, author, date, message):
852 853 """Return the parsed commit object or None, and True if
853 854 the revision is a branch root.
854 855 """
855 856 self.ui.debug("parsing revision %d (%d changes)\n" %
856 857 (revnum, len(orig_paths)))
857 858
858 859 branched = False
859 860 rev = self.revid(revnum)
860 861 # branch log might return entries for a parent we already have
861 862
862 863 if rev in self.commits or revnum < to_revnum:
863 864 return None, branched
864 865
865 866 parents = []
866 867 # check whether this revision is the start of a branch or part
867 868 # of a branch renaming
868 869 orig_paths = sorted(orig_paths.iteritems())
869 870 root_paths = [(p, e) for p, e in orig_paths
870 871 if self.module.startswith(p)]
871 872 if root_paths:
872 873 path, ent = root_paths[-1]
873 874 if ent.copyfrom_path:
874 875 branched = True
875 876 newpath = ent.copyfrom_path + self.module[len(path):]
876 877 # ent.copyfrom_rev may not be the actual last revision
877 878 previd = self.latest(newpath, ent.copyfrom_rev)
878 879 if previd is not None:
879 880 prevmodule, prevnum = revsplit(previd)[1:]
880 881 if prevnum >= self.startrev:
881 882 parents = [previd]
882 883 self.ui.note(
883 884 _('found parent of branch %s at %d: %s\n') %
884 885 (self.module, prevnum, prevmodule))
885 886 else:
886 887 self.ui.debug("no copyfrom path, don't know what to do.\n")
887 888
888 889 paths = []
889 890 # filter out unrelated paths
890 891 for path, ent in orig_paths:
891 892 if self.getrelpath(path) is None:
892 893 continue
893 894 paths.append((path, ent))
894 895
895 896 # Example SVN datetime. Includes microseconds.
896 897 # ISO-8601 conformant
897 898 # '2007-01-04T17:35:00.902377Z'
898 899 date = dateutil.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
899 900 if self.ui.configbool('convert', 'localtimezone'):
900 901 date = makedatetimestamp(date[0])
901 902
902 903 if message:
903 904 log = self.recode(message)
904 905 else:
905 906 log = ''
906 907
907 908 if author:
908 909 author = self.recode(author)
909 910 else:
910 911 author = ''
911 912
912 913 try:
913 914 branch = self.module.split("/")[-1]
914 915 if branch == self.trunkname:
915 916 branch = None
916 917 except IndexError:
917 918 branch = None
918 919
919 920 cset = commit(author=author,
920 921 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
921 922 desc=log,
922 923 parents=parents,
923 924 branch=branch,
924 925 rev=rev)
925 926
926 927 self.commits[rev] = cset
927 928 # The parents list is *shared* among self.paths and the
928 929 # commit object. Both will be updated below.
929 930 self.paths[rev] = (paths, cset.parents)
930 931 if self.child_cset and not self.child_cset.parents:
931 932 self.child_cset.parents[:] = [rev]
932 933 self.child_cset = cset
933 934 return cset, branched
934 935
935 936 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
936 937 (self.module, from_revnum, to_revnum))
937 938
938 939 try:
939 940 firstcset = None
940 941 lastonbranch = False
941 942 stream = self._getlog([self.module], from_revnum, to_revnum)
942 943 try:
943 944 for entry in stream:
944 945 paths, revnum, author, date, message = entry
945 946 if revnum < self.startrev:
946 947 lastonbranch = True
947 948 break
948 949 if not paths:
949 950 self.ui.debug('revision %d has no entries\n' % revnum)
950 951 # If we ever leave the loop on an empty
951 952 # revision, do not try to get a parent branch
952 953 lastonbranch = lastonbranch or revnum == 0
953 954 continue
954 955 cset, lastonbranch = parselogentry(paths, revnum, author,
955 956 date, message)
956 957 if cset:
957 958 firstcset = cset
958 959 if lastonbranch:
959 960 break
960 961 finally:
961 962 stream.close()
962 963
963 964 if not lastonbranch and firstcset and not firstcset.parents:
964 965 # The first revision of the sequence (the last fetched one)
965 966 # has invalid parents if not a branch root. Find the parent
966 967 # revision now, if any.
967 968 try:
968 969 firstrevnum = self.revnum(firstcset.rev)
969 970 if firstrevnum > 1:
970 971 latest = self.latest(self.module, firstrevnum - 1)
971 972 if latest:
972 973 firstcset.parents.append(latest)
973 974 except SvnPathNotFound:
974 975 pass
975 976 except svn.core.SubversionException as xxx_todo_changeme:
976 977 (inst, num) = xxx_todo_changeme.args
977 978 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
978 979 raise error.Abort(_('svn: branch has no revision %s')
979 980 % to_revnum)
980 981 raise
981 982
982 983 def getfile(self, file, rev):
983 984 # TODO: ra.get_file transmits the whole file instead of diffs.
984 985 if file in self.removed:
985 986 return None, None
986 987 mode = ''
987 988 try:
988 989 new_module, revnum = revsplit(rev)[1:]
989 990 if self.module != new_module:
990 991 self.module = new_module
991 992 self.reparent(self.module)
992 993 io = stringio()
993 994 info = svn.ra.get_file(self.ra, file, revnum, io)
994 995 data = io.getvalue()
995 996 # ra.get_file() seems to keep a reference on the input buffer
996 997 # preventing collection. Release it explicitly.
997 998 io.close()
998 999 if isinstance(info, list):
999 1000 info = info[-1]
1000 1001 mode = ("svn:executable" in info) and 'x' or ''
1001 1002 mode = ("svn:special" in info) and 'l' or mode
1002 1003 except svn.core.SubversionException as e:
1003 1004 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
1004 1005 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
1005 1006 if e.apr_err in notfound: # File not found
1006 1007 return None, None
1007 1008 raise
1008 1009 if mode == 'l':
1009 1010 link_prefix = "link "
1010 1011 if data.startswith(link_prefix):
1011 1012 data = data[len(link_prefix):]
1012 1013 return data, mode
1013 1014
1014 1015 def _iterfiles(self, path, revnum):
1015 1016 """Enumerate all files in path at revnum, recursively."""
1016 1017 path = path.strip('/')
1017 1018 pool = svn.core.Pool()
1018 1019 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1019 1020 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1020 1021 if path:
1021 1022 path += '/'
1022 1023 return ((path + p) for p, e in entries.iteritems()
1023 1024 if e.kind == svn.core.svn_node_file)
1024 1025
1025 1026 def getrelpath(self, path, module=None):
1026 1027 if module is None:
1027 1028 module = self.module
1028 1029 # Given the repository url of this wc, say
1029 1030 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1030 1031 # extract the "entry" portion (a relative path) from what
1031 1032 # svn log --xml says, i.e.
1032 1033 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1033 1034 # that is to say "tests/PloneTestCase.py"
1034 1035 if path.startswith(module):
1035 1036 relative = path.rstrip('/')[len(module):]
1036 1037 if relative.startswith('/'):
1037 1038 return relative[1:]
1038 1039 elif relative == '':
1039 1040 return relative
1040 1041
1041 1042 # The path is outside our tracked tree...
1042 1043 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1043 1044 return None
1044 1045
1045 1046 def _checkpath(self, path, revnum, module=None):
1046 1047 if module is not None:
1047 1048 prevmodule = self.reparent('')
1048 1049 path = module + '/' + path
1049 1050 try:
1050 1051 # ra.check_path does not like leading slashes very much, it leads
1051 1052 # to PROPFIND subversion errors
1052 1053 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1053 1054 finally:
1054 1055 if module is not None:
1055 1056 self.reparent(prevmodule)
1056 1057
1057 1058 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1058 1059 strict_node_history=False):
1059 1060 # Normalize path names, svn >= 1.5 only wants paths relative to
1060 1061 # supplied URL
1061 1062 relpaths = []
1062 1063 for p in paths:
1063 1064 if not p.startswith('/'):
1064 1065 p = self.module + '/' + p
1065 1066 relpaths.append(p.strip('/'))
1066 1067 args = [self.baseurl, relpaths, start, end, limit,
1067 1068 discover_changed_paths, strict_node_history]
1068 1069 # developer config: convert.svn.debugsvnlog
1069 1070 if not self.ui.configbool('convert', 'svn.debugsvnlog'):
1070 1071 return directlogstream(*args)
1071 1072 arg = encodeargs(args)
1072 hgexe = util.hgexecutable()
1073 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1074 stdin, stdout = util.popen2(util.quotecommand(cmd))
1073 hgexe = procutil.hgexecutable()
1074 cmd = '%s debugsvnlog' % procutil.shellquote(hgexe)
1075 stdin, stdout = procutil.popen2(procutil.quotecommand(cmd))
1075 1076 stdin.write(arg)
1076 1077 try:
1077 1078 stdin.close()
1078 1079 except IOError:
1079 1080 raise error.Abort(_('Mercurial failed to run itself, check'
1080 1081 ' hg executable is in PATH'))
1081 1082 return logstream(stdout)
1082 1083
1083 1084 pre_revprop_change = '''#!/bin/sh
1084 1085
1085 1086 REPOS="$1"
1086 1087 REV="$2"
1087 1088 USER="$3"
1088 1089 PROPNAME="$4"
1089 1090 ACTION="$5"
1090 1091
1091 1092 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1092 1093 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1093 1094 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1094 1095
1095 1096 echo "Changing prohibited revision property" >&2
1096 1097 exit 1
1097 1098 '''
1098 1099
1099 1100 class svn_sink(converter_sink, commandline):
1100 1101 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1101 1102 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1102 1103
1103 1104 def prerun(self):
1104 1105 if self.wc:
1105 1106 os.chdir(self.wc)
1106 1107
1107 1108 def postrun(self):
1108 1109 if self.wc:
1109 1110 os.chdir(self.cwd)
1110 1111
1111 1112 def join(self, name):
1112 1113 return os.path.join(self.wc, '.svn', name)
1113 1114
1114 1115 def revmapfile(self):
1115 1116 return self.join('hg-shamap')
1116 1117
1117 1118 def authorfile(self):
1118 1119 return self.join('hg-authormap')
1119 1120
1120 1121 def __init__(self, ui, repotype, path):
1121 1122
1122 1123 converter_sink.__init__(self, ui, repotype, path)
1123 1124 commandline.__init__(self, ui, 'svn')
1124 1125 self.delete = []
1125 1126 self.setexec = []
1126 1127 self.delexec = []
1127 1128 self.copies = []
1128 1129 self.wc = None
1129 1130 self.cwd = pycompat.getcwd()
1130 1131
1131 1132 created = False
1132 1133 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1133 1134 self.wc = os.path.realpath(path)
1134 1135 self.run0('update')
1135 1136 else:
1136 1137 if not re.search(br'^(file|http|https|svn|svn\+ssh)\://', path):
1137 1138 path = os.path.realpath(path)
1138 1139 if os.path.isdir(os.path.dirname(path)):
1139 1140 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1140 1141 ui.status(_('initializing svn repository %r\n') %
1141 1142 os.path.basename(path))
1142 1143 commandline(ui, 'svnadmin').run0('create', path)
1143 1144 created = path
1144 1145 path = util.normpath(path)
1145 1146 if not path.startswith('/'):
1146 1147 path = '/' + path
1147 1148 path = 'file://' + path
1148 1149
1149 1150 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1150 1151 '-wc')
1151 1152 ui.status(_('initializing svn working copy %r\n')
1152 1153 % os.path.basename(wcpath))
1153 1154 self.run0('checkout', path, wcpath)
1154 1155
1155 1156 self.wc = wcpath
1156 1157 self.opener = vfsmod.vfs(self.wc)
1157 1158 self.wopener = vfsmod.vfs(self.wc)
1158 1159 self.childmap = mapfile(ui, self.join('hg-childmap'))
1159 1160 if util.checkexec(self.wc):
1160 1161 self.is_exec = util.isexec
1161 1162 else:
1162 1163 self.is_exec = None
1163 1164
1164 1165 if created:
1165 1166 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1166 1167 fp = open(hook, 'wb')
1167 1168 fp.write(pre_revprop_change)
1168 1169 fp.close()
1169 1170 util.setflags(hook, False, True)
1170 1171
1171 1172 output = self.run0('info')
1172 1173 self.uuid = self.uuid_re.search(output).group(1).strip()
1173 1174
1174 1175 def wjoin(self, *names):
1175 1176 return os.path.join(self.wc, *names)
1176 1177
1177 1178 @propertycache
1178 1179 def manifest(self):
1179 1180 # As of svn 1.7, the "add" command fails when receiving
1180 1181 # already tracked entries, so we have to track and filter them
1181 1182 # ourselves.
1182 1183 m = set()
1183 1184 output = self.run0('ls', recursive=True, xml=True)
1184 1185 doc = xml.dom.minidom.parseString(output)
1185 1186 for e in doc.getElementsByTagName('entry'):
1186 1187 for n in e.childNodes:
1187 1188 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1188 1189 continue
1189 1190 name = ''.join(c.data for c in n.childNodes
1190 1191 if c.nodeType == c.TEXT_NODE)
1191 1192 # Entries are compared with names coming from
1192 1193 # mercurial, so bytes with undefined encoding. Our
1193 1194 # best bet is to assume they are in local
1194 1195 # encoding. They will be passed to command line calls
1195 1196 # later anyway, so they better be.
1196 1197 m.add(encoding.unitolocal(name))
1197 1198 break
1198 1199 return m
1199 1200
1200 1201 def putfile(self, filename, flags, data):
1201 1202 if 'l' in flags:
1202 1203 self.wopener.symlink(data, filename)
1203 1204 else:
1204 1205 try:
1205 1206 if os.path.islink(self.wjoin(filename)):
1206 1207 os.unlink(filename)
1207 1208 except OSError:
1208 1209 pass
1209 1210 self.wopener.write(filename, data)
1210 1211
1211 1212 if self.is_exec:
1212 1213 if self.is_exec(self.wjoin(filename)):
1213 1214 if 'x' not in flags:
1214 1215 self.delexec.append(filename)
1215 1216 else:
1216 1217 if 'x' in flags:
1217 1218 self.setexec.append(filename)
1218 1219 util.setflags(self.wjoin(filename), False, 'x' in flags)
1219 1220
1220 1221 def _copyfile(self, source, dest):
1221 1222 # SVN's copy command pukes if the destination file exists, but
1222 1223 # our copyfile method expects to record a copy that has
1223 1224 # already occurred. Cross the semantic gap.
1224 1225 wdest = self.wjoin(dest)
1225 1226 exists = os.path.lexists(wdest)
1226 1227 if exists:
1227 1228 fd, tempname = tempfile.mkstemp(
1228 1229 prefix='hg-copy-', dir=os.path.dirname(wdest))
1229 1230 os.close(fd)
1230 1231 os.unlink(tempname)
1231 1232 os.rename(wdest, tempname)
1232 1233 try:
1233 1234 self.run0('copy', source, dest)
1234 1235 finally:
1235 1236 self.manifest.add(dest)
1236 1237 if exists:
1237 1238 try:
1238 1239 os.unlink(wdest)
1239 1240 except OSError:
1240 1241 pass
1241 1242 os.rename(tempname, wdest)
1242 1243
1243 1244 def dirs_of(self, files):
1244 1245 dirs = set()
1245 1246 for f in files:
1246 1247 if os.path.isdir(self.wjoin(f)):
1247 1248 dirs.add(f)
1248 1249 i = len(f)
1249 1250 for i in iter(lambda: f.rfind('/', 0, i), -1):
1250 1251 dirs.add(f[:i])
1251 1252 return dirs
1252 1253
1253 1254 def add_dirs(self, files):
1254 1255 add_dirs = [d for d in sorted(self.dirs_of(files))
1255 1256 if d not in self.manifest]
1256 1257 if add_dirs:
1257 1258 self.manifest.update(add_dirs)
1258 1259 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1259 1260 return add_dirs
1260 1261
1261 1262 def add_files(self, files):
1262 1263 files = [f for f in files if f not in self.manifest]
1263 1264 if files:
1264 1265 self.manifest.update(files)
1265 1266 self.xargs(files, 'add', quiet=True)
1266 1267 return files
1267 1268
1268 1269 def addchild(self, parent, child):
1269 1270 self.childmap[parent] = child
1270 1271
1271 1272 def revid(self, rev):
1272 1273 return u"svn:%s@%s" % (self.uuid, rev)
1273 1274
1274 1275 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1275 1276 cleanp2):
1276 1277 for parent in parents:
1277 1278 try:
1278 1279 return self.revid(self.childmap[parent])
1279 1280 except KeyError:
1280 1281 pass
1281 1282
1282 1283 # Apply changes to working copy
1283 1284 for f, v in files:
1284 1285 data, mode = source.getfile(f, v)
1285 1286 if data is None:
1286 1287 self.delete.append(f)
1287 1288 else:
1288 1289 self.putfile(f, mode, data)
1289 1290 if f in copies:
1290 1291 self.copies.append([copies[f], f])
1291 1292 if full:
1292 1293 self.delete.extend(sorted(self.manifest.difference(files)))
1293 1294 files = [f[0] for f in files]
1294 1295
1295 1296 entries = set(self.delete)
1296 1297 files = frozenset(files)
1297 1298 entries.update(self.add_dirs(files.difference(entries)))
1298 1299 if self.copies:
1299 1300 for s, d in self.copies:
1300 1301 self._copyfile(s, d)
1301 1302 self.copies = []
1302 1303 if self.delete:
1303 1304 self.xargs(self.delete, 'delete')
1304 1305 for f in self.delete:
1305 1306 self.manifest.remove(f)
1306 1307 self.delete = []
1307 1308 entries.update(self.add_files(files.difference(entries)))
1308 1309 if self.delexec:
1309 1310 self.xargs(self.delexec, 'propdel', 'svn:executable')
1310 1311 self.delexec = []
1311 1312 if self.setexec:
1312 1313 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1313 1314 self.setexec = []
1314 1315
1315 1316 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1316 1317 fp = os.fdopen(fd, r'wb')
1317 1318 fp.write(util.tonativeeol(commit.desc))
1318 1319 fp.close()
1319 1320 try:
1320 1321 output = self.run0('commit',
1321 1322 username=stringutil.shortuser(commit.author),
1322 1323 file=messagefile,
1323 1324 encoding='utf-8')
1324 1325 try:
1325 1326 rev = self.commit_re.search(output).group(1)
1326 1327 except AttributeError:
1327 1328 if parents and not files:
1328 1329 return parents[0]
1329 1330 self.ui.warn(_('unexpected svn output:\n'))
1330 1331 self.ui.warn(output)
1331 1332 raise error.Abort(_('unable to cope with svn output'))
1332 1333 if commit.rev:
1333 1334 self.run('propset', 'hg:convert-rev', commit.rev,
1334 1335 revprop=True, revision=rev)
1335 1336 if commit.branch and commit.branch != 'default':
1336 1337 self.run('propset', 'hg:convert-branch', commit.branch,
1337 1338 revprop=True, revision=rev)
1338 1339 for parent in parents:
1339 1340 self.addchild(parent, rev)
1340 1341 return self.revid(rev)
1341 1342 finally:
1342 1343 os.unlink(messagefile)
1343 1344
1344 1345 def puttags(self, tags):
1345 1346 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1346 1347 return None, None
1347 1348
1348 1349 def hascommitfrommap(self, rev):
1349 1350 # We trust that revisions referenced in a map still is present
1350 1351 # TODO: implement something better if necessary and feasible
1351 1352 return True
1352 1353
1353 1354 def hascommitforsplicemap(self, rev):
1354 1355 # This is not correct as one can convert to an existing subversion
1355 1356 # repository and childmap would not list all revisions. Too bad.
1356 1357 if rev in self.childmap:
1357 1358 return True
1358 1359 raise error.Abort(_('splice map revision %s not found in subversion '
1359 1360 'child map (revision lookups are not implemented)')
1360 1361 % rev)
@@ -1,421 +1,422
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to allow external programs to compare revisions
9 9
10 10 The extdiff Mercurial extension allows you to use external programs
11 11 to compare revisions, or revision with working directory. The external
12 12 diff programs are called with a configurable set of options and two
13 13 non-option arguments: paths to directories containing snapshots of
14 14 files to compare.
15 15
16 16 The extdiff extension also allows you to configure new diff commands, so
17 17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18 18
19 19 [extdiff]
20 20 # add new command that runs GNU diff(1) in 'context diff' mode
21 21 cdiff = gdiff -Nprc5
22 22 ## or the old way:
23 23 #cmd.cdiff = gdiff
24 24 #opts.cdiff = -Nprc5
25 25
26 26 # add new command called meld, runs meld (no need to name twice). If
27 27 # the meld executable is not available, the meld tool in [merge-tools]
28 28 # will be used, if available
29 29 meld =
30 30
31 31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 34 # your .vimrc
35 35 vimdiff = gvim -f "+next" \\
36 36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
37 37
38 38 Tool arguments can include variables that are expanded at runtime::
39 39
40 40 $parent1, $plabel1 - filename, descriptive label of first parent
41 41 $child, $clabel - filename, descriptive label of child revision
42 42 $parent2, $plabel2 - filename, descriptive label of second parent
43 43 $root - repository root
44 44 $parent is an alias for $parent1.
45 45
46 46 The extdiff extension will look in your [diff-tools] and [merge-tools]
47 47 sections for diff tool arguments, when none are specified in [extdiff].
48 48
49 49 ::
50 50
51 51 [extdiff]
52 52 kdiff3 =
53 53
54 54 [diff-tools]
55 55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
56 56
57 57 You can use -I/-X and list of file or directory names like normal
58 58 :hg:`diff` command. The extdiff extension makes snapshots of only
59 59 needed files, so running the external diff program will actually be
60 60 pretty fast (at least faster than having to compare the entire tree).
61 61 '''
62 62
63 63 from __future__ import absolute_import
64 64
65 65 import os
66 66 import re
67 67 import shutil
68 68 import stat
69 69 import tempfile
70 70 from mercurial.i18n import _
71 71 from mercurial.node import (
72 72 nullid,
73 73 short,
74 74 )
75 75 from mercurial import (
76 76 archival,
77 77 cmdutil,
78 78 error,
79 79 filemerge,
80 80 pycompat,
81 81 registrar,
82 82 scmutil,
83 83 util,
84 84 )
85 85 from mercurial.utils import (
86 procutil,
86 87 stringutil,
87 88 )
88 89
89 90 cmdtable = {}
90 91 command = registrar.command(cmdtable)
91 92
92 93 configtable = {}
93 94 configitem = registrar.configitem(configtable)
94 95
95 96 configitem('extdiff', br'opts\..*',
96 97 default='',
97 98 generic=True,
98 99 )
99 100
100 101 configitem('diff-tools', br'.*\.diffargs$',
101 102 default=None,
102 103 generic=True,
103 104 )
104 105
105 106 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
106 107 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
107 108 # be specifying the version(s) of Mercurial they are tested with, or
108 109 # leave the attribute unspecified.
109 110 testedwith = 'ships-with-hg-core'
110 111
111 112 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
112 113 '''snapshot files as of some revision
113 114 if not using snapshot, -I/-X does not work and recursive diff
114 115 in tools like kdiff3 and meld displays too many files.'''
115 116 dirname = os.path.basename(repo.root)
116 117 if dirname == "":
117 118 dirname = "root"
118 119 if node is not None:
119 120 dirname = '%s.%s' % (dirname, short(node))
120 121 base = os.path.join(tmproot, dirname)
121 122 os.mkdir(base)
122 123 fnsandstat = []
123 124
124 125 if node is not None:
125 126 ui.note(_('making snapshot of %d files from rev %s\n') %
126 127 (len(files), short(node)))
127 128 else:
128 129 ui.note(_('making snapshot of %d files from working directory\n') %
129 130 (len(files)))
130 131
131 132 if files:
132 133 repo.ui.setconfig("ui", "archivemeta", False)
133 134
134 135 archival.archive(repo, base, node, 'files',
135 136 matchfn=scmutil.matchfiles(repo, files),
136 137 subrepos=listsubrepos)
137 138
138 139 for fn in sorted(files):
139 140 wfn = util.pconvert(fn)
140 141 ui.note(' %s\n' % wfn)
141 142
142 143 if node is None:
143 144 dest = os.path.join(base, wfn)
144 145
145 146 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
146 147 return dirname, fnsandstat
147 148
148 149 def dodiff(ui, repo, cmdline, pats, opts):
149 150 '''Do the actual diff:
150 151
151 152 - copy to a temp structure if diffing 2 internal revisions
152 153 - copy to a temp structure if diffing working revision with
153 154 another one and more than 1 file is changed
154 155 - just invoke the diff for a single file in the working dir
155 156 '''
156 157
157 158 revs = opts.get('rev')
158 159 change = opts.get('change')
159 160 do3way = '$parent2' in cmdline
160 161
161 162 if revs and change:
162 163 msg = _('cannot specify --rev and --change at the same time')
163 164 raise error.Abort(msg)
164 165 elif change:
165 166 node2 = scmutil.revsingle(repo, change, None).node()
166 167 node1a, node1b = repo.changelog.parents(node2)
167 168 else:
168 169 node1a, node2 = scmutil.revpair(repo, revs)
169 170 if not revs:
170 171 node1b = repo.dirstate.p2()
171 172 else:
172 173 node1b = nullid
173 174
174 175 # Disable 3-way merge if there is only one parent
175 176 if do3way:
176 177 if node1b == nullid:
177 178 do3way = False
178 179
179 180 subrepos=opts.get('subrepos')
180 181
181 182 matcher = scmutil.match(repo[node2], pats, opts)
182 183
183 184 if opts.get('patch'):
184 185 if subrepos:
185 186 raise error.Abort(_('--patch cannot be used with --subrepos'))
186 187 if node2 is None:
187 188 raise error.Abort(_('--patch requires two revisions'))
188 189 else:
189 190 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
190 191 listsubrepos=subrepos)[:3])
191 192 if do3way:
192 193 mod_b, add_b, rem_b = map(set,
193 194 repo.status(node1b, node2, matcher,
194 195 listsubrepos=subrepos)[:3])
195 196 else:
196 197 mod_b, add_b, rem_b = set(), set(), set()
197 198 modadd = mod_a | add_a | mod_b | add_b
198 199 common = modadd | rem_a | rem_b
199 200 if not common:
200 201 return 0
201 202
202 203 tmproot = tempfile.mkdtemp(prefix='extdiff.')
203 204 try:
204 205 if not opts.get('patch'):
205 206 # Always make a copy of node1a (and node1b, if applicable)
206 207 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
207 208 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
208 209 subrepos)[0]
209 210 rev1a = '@%d' % repo[node1a].rev()
210 211 if do3way:
211 212 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
212 213 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
213 214 subrepos)[0]
214 215 rev1b = '@%d' % repo[node1b].rev()
215 216 else:
216 217 dir1b = None
217 218 rev1b = ''
218 219
219 220 fnsandstat = []
220 221
221 222 # If node2 in not the wc or there is >1 change, copy it
222 223 dir2root = ''
223 224 rev2 = ''
224 225 if node2:
225 226 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
226 227 rev2 = '@%d' % repo[node2].rev()
227 228 elif len(common) > 1:
228 229 #we only actually need to get the files to copy back to
229 230 #the working dir in this case (because the other cases
230 231 #are: diffing 2 revisions or single file -- in which case
231 232 #the file is already directly passed to the diff tool).
232 233 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
233 234 subrepos)
234 235 else:
235 236 # This lets the diff tool open the changed file directly
236 237 dir2 = ''
237 238 dir2root = repo.root
238 239
239 240 label1a = rev1a
240 241 label1b = rev1b
241 242 label2 = rev2
242 243
243 244 # If only one change, diff the files instead of the directories
244 245 # Handle bogus modifies correctly by checking if the files exist
245 246 if len(common) == 1:
246 247 common_file = util.localpath(common.pop())
247 248 dir1a = os.path.join(tmproot, dir1a, common_file)
248 249 label1a = common_file + rev1a
249 250 if not os.path.isfile(dir1a):
250 251 dir1a = os.devnull
251 252 if do3way:
252 253 dir1b = os.path.join(tmproot, dir1b, common_file)
253 254 label1b = common_file + rev1b
254 255 if not os.path.isfile(dir1b):
255 256 dir1b = os.devnull
256 257 dir2 = os.path.join(dir2root, dir2, common_file)
257 258 label2 = common_file + rev2
258 259 else:
259 260 template = 'hg-%h.patch'
260 261 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
261 262 fntemplate=repo.vfs.reljoin(tmproot, template),
262 263 match=matcher)
263 264 label1a = cmdutil.makefilename(repo[node1a], template)
264 265 label2 = cmdutil.makefilename(repo[node2], template)
265 266 dir1a = repo.vfs.reljoin(tmproot, label1a)
266 267 dir2 = repo.vfs.reljoin(tmproot, label2)
267 268 dir1b = None
268 269 label1b = None
269 270 fnsandstat = []
270 271
271 272 # Function to quote file/dir names in the argument string.
272 273 # When not operating in 3-way mode, an empty string is
273 274 # returned for parent2
274 275 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
275 276 'plabel1': label1a, 'plabel2': label1b,
276 277 'clabel': label2, 'child': dir2,
277 278 'root': repo.root}
278 279 def quote(match):
279 280 pre = match.group(2)
280 281 key = match.group(3)
281 282 if not do3way and key == 'parent2':
282 283 return pre
283 return pre + util.shellquote(replace[key])
284 return pre + procutil.shellquote(replace[key])
284 285
285 286 # Match parent2 first, so 'parent1?' will match both parent1 and parent
286 287 regex = (br'''(['"]?)([^\s'"$]*)'''
287 288 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
288 289 if not do3way and not re.search(regex, cmdline):
289 290 cmdline += ' $parent1 $child'
290 291 cmdline = re.sub(regex, quote, cmdline)
291 292
292 293 ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
293 294 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
294 295
295 296 for copy_fn, working_fn, st in fnsandstat:
296 297 cpstat = os.lstat(copy_fn)
297 298 # Some tools copy the file and attributes, so mtime may not detect
298 299 # all changes. A size check will detect more cases, but not all.
299 300 # The only certain way to detect every case is to diff all files,
300 301 # which could be expensive.
301 302 # copyfile() carries over the permission, so the mode check could
302 303 # be in an 'elif' branch, but for the case where the file has
303 304 # changed without affecting mtime or size.
304 305 if (cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
305 306 or cpstat.st_size != st.st_size
306 307 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
307 308 ui.debug('file changed while diffing. '
308 309 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
309 310 util.copyfile(copy_fn, working_fn)
310 311
311 312 return 1
312 313 finally:
313 314 ui.note(_('cleaning up temp directory\n'))
314 315 shutil.rmtree(tmproot)
315 316
316 317 extdiffopts = [
317 318 ('o', 'option', [],
318 319 _('pass option to comparison program'), _('OPT')),
319 320 ('r', 'rev', [], _('revision'), _('REV')),
320 321 ('c', 'change', '', _('change made by revision'), _('REV')),
321 322 ('', 'patch', None, _('compare patches for two revisions'))
322 323 ] + cmdutil.walkopts + cmdutil.subrepoopts
323 324
324 325 @command('extdiff',
325 326 [('p', 'program', '', _('comparison program to run'), _('CMD')),
326 327 ] + extdiffopts,
327 328 _('hg extdiff [OPT]... [FILE]...'),
328 329 inferrepo=True)
329 330 def extdiff(ui, repo, *pats, **opts):
330 331 '''use external program to diff repository (or selected files)
331 332
332 333 Show differences between revisions for the specified files, using
333 334 an external program. The default program used is diff, with
334 335 default options "-Npru".
335 336
336 337 To select a different program, use the -p/--program option. The
337 338 program will be passed the names of two directories to compare. To
338 339 pass additional options to the program, use -o/--option. These
339 340 will be passed before the names of the directories to compare.
340 341
341 342 When two revision arguments are given, then changes are shown
342 343 between those revisions. If only one revision is specified then
343 344 that revision is compared to the working directory, and, when no
344 345 revisions are specified, the working directory files are compared
345 346 to its parent.'''
346 347 opts = pycompat.byteskwargs(opts)
347 348 program = opts.get('program')
348 349 option = opts.get('option')
349 350 if not program:
350 351 program = 'diff'
351 352 option = option or ['-Npru']
352 cmdline = ' '.join(map(util.shellquote, [program] + option))
353 cmdline = ' '.join(map(procutil.shellquote, [program] + option))
353 354 return dodiff(ui, repo, cmdline, pats, opts)
354 355
355 356 class savedcmd(object):
356 357 """use external program to diff repository (or selected files)
357 358
358 359 Show differences between revisions for the specified files, using
359 360 the following program::
360 361
361 362 %(path)s
362 363
363 364 When two revision arguments are given, then changes are shown
364 365 between those revisions. If only one revision is specified then
365 366 that revision is compared to the working directory, and, when no
366 367 revisions are specified, the working directory files are compared
367 368 to its parent.
368 369 """
369 370
370 371 def __init__(self, path, cmdline):
371 372 # We can't pass non-ASCII through docstrings (and path is
372 373 # in an unknown encoding anyway)
373 374 docpath = stringutil.escapestr(path)
374 375 self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))}
375 376 self._cmdline = cmdline
376 377
377 378 def __call__(self, ui, repo, *pats, **opts):
378 379 opts = pycompat.byteskwargs(opts)
379 options = ' '.join(map(util.shellquote, opts['option']))
380 options = ' '.join(map(procutil.shellquote, opts['option']))
380 381 if options:
381 382 options = ' ' + options
382 383 return dodiff(ui, repo, self._cmdline + options, pats, opts)
383 384
384 385 def uisetup(ui):
385 386 for cmd, path in ui.configitems('extdiff'):
386 387 path = util.expandpath(path)
387 388 if cmd.startswith('cmd.'):
388 389 cmd = cmd[4:]
389 390 if not path:
390 path = util.findexe(cmd)
391 path = procutil.findexe(cmd)
391 392 if path is None:
392 393 path = filemerge.findexternaltool(ui, cmd) or cmd
393 394 diffopts = ui.config('extdiff', 'opts.' + cmd)
394 cmdline = util.shellquote(path)
395 cmdline = procutil.shellquote(path)
395 396 if diffopts:
396 397 cmdline += ' ' + diffopts
397 398 elif cmd.startswith('opts.'):
398 399 continue
399 400 else:
400 401 if path:
401 402 # case "cmd = path opts"
402 403 cmdline = path
403 404 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
404 405 else:
405 406 # case "cmd ="
406 path = util.findexe(cmd)
407 path = procutil.findexe(cmd)
407 408 if path is None:
408 409 path = filemerge.findexternaltool(ui, cmd) or cmd
409 cmdline = util.shellquote(path)
410 cmdline = procutil.shellquote(path)
410 411 diffopts = False
411 412 # look for diff arguments in [diff-tools] then [merge-tools]
412 413 if not diffopts:
413 414 args = ui.config('diff-tools', cmd+'.diffargs') or \
414 415 ui.config('merge-tools', cmd+'.diffargs')
415 416 if args:
416 417 cmdline += ' ' + args
417 418 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
418 419 inferrepo=True)(savedcmd(path, cmdline))
419 420
420 421 # tell hggettext to extract docstrings from these functions:
421 422 i18nfunctions = [savedcmd]
@@ -1,1076 +1,1078
1 1 # githelp.py - Try to map Git commands to Mercurial equivalents.
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """try mapping git commands to Mercurial commands
8 8
9 9 Tries to map a given git command to a Mercurial command:
10 10
11 11 $ hg githelp -- git checkout master
12 12 hg update master
13 13
14 14 If an unknown command or parameter combination is detected, an error is
15 15 produced.
16 16 """
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import getopt
21 21 import re
22 22
23 23 from mercurial.i18n import _
24 24 from mercurial import (
25 25 encoding,
26 26 error,
27 27 fancyopts,
28 28 registrar,
29 util,
29 )
30 from mercurial.utils import (
31 procutil,
30 32 )
31 33
32 34 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 35 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 36 # be specifying the version(s) of Mercurial they are tested with, or
35 37 # leave the attribute unspecified.
36 38 testedwith = 'ships-with-hg-core'
37 39
38 40 cmdtable = {}
39 41 command = registrar.command(cmdtable)
40 42
41 43 def convert(s):
42 44 if s.startswith("origin/"):
43 45 return s[7:]
44 46 if 'HEAD' in s:
45 47 s = s.replace('HEAD', '.')
46 48 # HEAD~ in git is .~1 in mercurial
47 49 s = re.sub('~$', '~1', s)
48 50 return s
49 51
50 52 @command('^githelp|git', [
51 53 ], _('hg githelp'))
52 54 def githelp(ui, repo, *args, **kwargs):
53 55 '''suggests the Mercurial equivalent of the given git command
54 56
55 57 Usage: hg githelp -- <git command>
56 58 '''
57 59
58 60 if len(args) == 0 or (len(args) == 1 and args[0] =='git'):
59 61 raise error.Abort(_('missing git command - '
60 62 'usage: hg githelp -- <git command>'))
61 63
62 64 if args[0] == 'git':
63 65 args = args[1:]
64 66
65 67 cmd = args[0]
66 68 if not cmd in gitcommands:
67 69 raise error.Abort("error: unknown git command %s" % (cmd))
68 70
69 71 ui.pager('githelp')
70 72 args = args[1:]
71 73 return gitcommands[cmd](ui, repo, *args, **kwargs)
72 74
73 75 def parseoptions(ui, cmdoptions, args):
74 76 cmdoptions = list(cmdoptions)
75 77 opts = {}
76 78 args = list(args)
77 79 while True:
78 80 try:
79 81 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
80 82 break
81 83 except getopt.GetoptError as ex:
82 84 flag = None
83 85 if "requires argument" in ex.msg:
84 86 raise
85 87 if ('--' + ex.opt) in ex.msg:
86 88 flag = '--' + ex.opt
87 89 elif ('-' + ex.opt) in ex.msg:
88 90 flag = '-' + ex.opt
89 91 else:
90 92 raise error.Abort("unknown option %s" % ex.opt)
91 93 try:
92 94 args.remove(flag)
93 95 except Exception:
94 96 raise error.Abort(
95 97 "unknown option {0} packed with other options\n"
96 98 "Please try passing the option as it's own flag: -{0}" \
97 99 .format(ex.opt))
98 100
99 101 ui.warn(_("ignoring unknown option %s\n") % flag)
100 102
101 103 args = list([convert(x) for x in args])
102 104 opts = dict([(k, convert(v)) if isinstance(v, str) else (k, v)
103 105 for k, v in opts.iteritems()])
104 106
105 107 return args, opts
106 108
107 109 class Command(object):
108 110 def __init__(self, name):
109 111 self.name = name
110 112 self.args = []
111 113 self.opts = {}
112 114
113 115 def __bytes__(self):
114 116 cmd = "hg " + self.name
115 117 if self.opts:
116 118 for k, values in sorted(self.opts.iteritems()):
117 119 for v in values:
118 120 if v:
119 121 cmd += " %s %s" % (k, v)
120 122 else:
121 123 cmd += " %s" % (k,)
122 124 if self.args:
123 125 cmd += " "
124 126 cmd += " ".join(self.args)
125 127 return cmd
126 128
127 129 __str__ = encoding.strmethod(__bytes__)
128 130
129 131 def append(self, value):
130 132 self.args.append(value)
131 133
132 134 def extend(self, values):
133 135 self.args.extend(values)
134 136
135 137 def __setitem__(self, key, value):
136 138 values = self.opts.setdefault(key, [])
137 139 values.append(value)
138 140
139 141 def __and__(self, other):
140 142 return AndCommand(self, other)
141 143
142 144 class AndCommand(object):
143 145 def __init__(self, left, right):
144 146 self.left = left
145 147 self.right = right
146 148
147 149 def __str__(self):
148 150 return "%s && %s" % (self.left, self.right)
149 151
150 152 def __and__(self, other):
151 153 return AndCommand(self, other)
152 154
153 155 def add(ui, repo, *args, **kwargs):
154 156 cmdoptions = [
155 157 ('A', 'all', None, ''),
156 158 ('p', 'patch', None, ''),
157 159 ]
158 160 args, opts = parseoptions(ui, cmdoptions, args)
159 161
160 162 if (opts.get('patch')):
161 163 ui.status(_("note: Mercurial will commit when complete, "
162 164 "as there is no staging area in Mercurial\n\n"))
163 165 cmd = Command('commit --interactive')
164 166 else:
165 167 cmd = Command("add")
166 168
167 169 if not opts.get('all'):
168 170 cmd.extend(args)
169 171 else:
170 172 ui.status(_("note: use hg addremove to remove files that have "
171 173 "been deleted.\n\n"))
172 174
173 175 ui.status((bytes(cmd)), "\n")
174 176
175 177 def am(ui, repo, *args, **kwargs):
176 178 cmdoptions=[
177 179 ]
178 180 args, opts = parseoptions(ui, cmdoptions, args)
179 181 cmd = Command('import')
180 182 ui.status(bytes(cmd), "\n")
181 183
182 184 def apply(ui, repo, *args, **kwargs):
183 185 cmdoptions = [
184 186 ('p', 'p', int, ''),
185 187 ]
186 188 args, opts = parseoptions(ui, cmdoptions, args)
187 189
188 190 cmd = Command('import --no-commit')
189 191 if (opts.get('p')):
190 192 cmd['-p'] = opts.get('p')
191 193 cmd.extend(args)
192 194
193 195 ui.status((bytes(cmd)), "\n")
194 196
195 197 def bisect(ui, repo, *args, **kwargs):
196 198 ui.status(_("See 'hg help bisect' for how to use bisect.\n\n"))
197 199
198 200 def blame(ui, repo, *args, **kwargs):
199 201 cmdoptions = [
200 202 ]
201 203 args, opts = parseoptions(ui, cmdoptions, args)
202 204 cmd = Command('annotate -udl')
203 205 cmd.extend([convert(v) for v in args])
204 206 ui.status((bytes(cmd)), "\n")
205 207
206 208 def branch(ui, repo, *args, **kwargs):
207 209 cmdoptions = [
208 210 ('', 'set-upstream', None, ''),
209 211 ('', 'set-upstream-to', '', ''),
210 212 ('d', 'delete', None, ''),
211 213 ('D', 'delete', None, ''),
212 214 ('m', 'move', None, ''),
213 215 ('M', 'move', None, ''),
214 216 ]
215 217 args, opts = parseoptions(ui, cmdoptions, args)
216 218
217 219 cmd = Command("bookmark")
218 220
219 221 if opts.get('set_upstream') or opts.get('set_upstream_to'):
220 222 ui.status(_("Mercurial has no concept of upstream branches\n"))
221 223 return
222 224 elif opts.get('delete'):
223 225 cmd = Command("strip")
224 226 for branch in args:
225 227 cmd['-B'] = branch
226 228 else:
227 229 cmd['-B'] = None
228 230 elif opts.get('move'):
229 231 if len(args) > 0:
230 232 if len(args) > 1:
231 233 old = args.pop(0)
232 234 else:
233 235 # shell command to output the active bookmark for the active
234 236 # revision
235 237 old = '`hg log -T"{activebookmark}" -r .`'
236 238 new = args[0]
237 239 cmd['-m'] = old
238 240 cmd.append(new)
239 241 else:
240 242 if len(args) > 1:
241 243 cmd['-r'] = args[1]
242 244 cmd.append(args[0])
243 245 elif len(args) == 1:
244 246 cmd.append(args[0])
245 247 ui.status((bytes(cmd)), "\n")
246 248
247 249 def ispath(repo, string):
248 250 """
249 251 The first argument to git checkout can either be a revision or a path. Let's
250 252 generally assume it's a revision, unless it's obviously a path. There are
251 253 too many ways to spell revisions in git for us to reasonably catch all of
252 254 them, so let's be conservative.
253 255 """
254 256 if string in repo:
255 257 # if it's definitely a revision let's not even check if a file of the
256 258 # same name exists.
257 259 return False
258 260
259 261 cwd = repo.getcwd()
260 262 if cwd == '':
261 263 repopath = string
262 264 else:
263 265 repopath = cwd + '/' + string
264 266
265 267 exists = repo.wvfs.exists(repopath)
266 268 if exists:
267 269 return True
268 270
269 271 manifest = repo['.'].manifest()
270 272
271 273 didexist = (repopath in manifest) or manifest.hasdir(repopath)
272 274
273 275 return didexist
274 276
275 277 def checkout(ui, repo, *args, **kwargs):
276 278 cmdoptions = [
277 279 ('b', 'branch', '', ''),
278 280 ('B', 'branch', '', ''),
279 281 ('f', 'force', None, ''),
280 282 ('p', 'patch', None, ''),
281 283 ]
282 284 paths = []
283 285 if '--' in args:
284 286 sepindex = args.index('--')
285 287 paths.extend(args[sepindex + 1:])
286 288 args = args[:sepindex]
287 289
288 290 args, opts = parseoptions(ui, cmdoptions, args)
289 291
290 292 rev = None
291 293 if args and ispath(repo, args[0]):
292 294 paths = args + paths
293 295 elif args:
294 296 rev = args[0]
295 297 paths = args[1:] + paths
296 298
297 299 cmd = Command('update')
298 300
299 301 if opts.get('force'):
300 302 if paths or rev:
301 303 cmd['-C'] = None
302 304
303 305 if opts.get('patch'):
304 306 cmd = Command('revert')
305 307 cmd['-i'] = None
306 308
307 309 if opts.get('branch'):
308 310 if len(args) == 0:
309 311 cmd = Command('bookmark')
310 312 cmd.append(opts.get('branch'))
311 313 else:
312 314 cmd.append(args[0])
313 315 bookcmd = Command('bookmark')
314 316 bookcmd.append(opts.get('branch'))
315 317 cmd = cmd & bookcmd
316 318 # if there is any path argument supplied, use revert instead of update
317 319 elif len(paths) > 0:
318 320 ui.status(_("note: use --no-backup to avoid creating .orig files\n\n"))
319 321 cmd = Command('revert')
320 322 if opts.get('patch'):
321 323 cmd['-i'] = None
322 324 if rev:
323 325 cmd['-r'] = rev
324 326 cmd.extend(paths)
325 327 elif rev:
326 328 if opts.get('patch'):
327 329 cmd['-r'] = rev
328 330 else:
329 331 cmd.append(rev)
330 332 elif opts.get('force'):
331 333 cmd = Command('revert')
332 334 cmd['--all'] = None
333 335 else:
334 336 raise error.Abort("a commit must be specified")
335 337
336 338 ui.status((bytes(cmd)), "\n")
337 339
338 340 def cherrypick(ui, repo, *args, **kwargs):
339 341 cmdoptions = [
340 342 ('', 'continue', None, ''),
341 343 ('', 'abort', None, ''),
342 344 ('e', 'edit', None, ''),
343 345 ]
344 346 args, opts = parseoptions(ui, cmdoptions, args)
345 347
346 348 cmd = Command('graft')
347 349
348 350 if opts.get('edit'):
349 351 cmd['--edit'] = None
350 352 if opts.get('continue'):
351 353 cmd['--continue'] = None
352 354 elif opts.get('abort'):
353 355 ui.status(_("note: hg graft does not have --abort.\n\n"))
354 356 return
355 357 else:
356 358 cmd.extend(args)
357 359
358 360 ui.status((bytes(cmd)), "\n")
359 361
360 362 def clean(ui, repo, *args, **kwargs):
361 363 cmdoptions = [
362 364 ('d', 'd', None, ''),
363 365 ('f', 'force', None, ''),
364 366 ('x', 'x', None, ''),
365 367 ]
366 368 args, opts = parseoptions(ui, cmdoptions, args)
367 369
368 370 cmd = Command('purge')
369 371 if opts.get('x'):
370 372 cmd['--all'] = None
371 373 cmd.extend(args)
372 374
373 375 ui.status((bytes(cmd)), "\n")
374 376
375 377 def clone(ui, repo, *args, **kwargs):
376 378 cmdoptions = [
377 379 ('', 'bare', None, ''),
378 380 ('n', 'no-checkout', None, ''),
379 381 ('b', 'branch', '', ''),
380 382 ]
381 383 args, opts = parseoptions(ui, cmdoptions, args)
382 384
383 385 if len(args) == 0:
384 386 raise error.Abort("a repository to clone must be specified")
385 387
386 388 cmd = Command('clone')
387 389 cmd.append(args[0])
388 390 if len(args) > 1:
389 391 cmd.append(args[1])
390 392
391 393 if opts.get('bare'):
392 394 cmd['-U'] = None
393 395 ui.status(_("note: Mercurial does not have bare clones. " +
394 396 "-U will clone the repo without checking out a commit\n\n"))
395 397 elif opts.get('no_checkout'):
396 398 cmd['-U'] = None
397 399
398 400 if opts.get('branch'):
399 401 cocmd = Command("update")
400 402 cocmd.append(opts.get('branch'))
401 403 cmd = cmd & cocmd
402 404
403 405 ui.status((bytes(cmd)), "\n")
404 406
405 407 def commit(ui, repo, *args, **kwargs):
406 408 cmdoptions = [
407 409 ('a', 'all', None, ''),
408 410 ('m', 'message', '', ''),
409 411 ('p', 'patch', None, ''),
410 412 ('C', 'reuse-message', '', ''),
411 413 ('F', 'file', '', ''),
412 414 ('', 'author', '', ''),
413 415 ('', 'date', '', ''),
414 416 ('', 'amend', None, ''),
415 417 ('', 'no-edit', None, ''),
416 418 ]
417 419 args, opts = parseoptions(ui, cmdoptions, args)
418 420
419 421 cmd = Command('commit')
420 422 if opts.get('patch'):
421 423 cmd = Command('commit --interactive')
422 424
423 425 if opts.get('amend'):
424 426 if opts.get('no_edit'):
425 427 cmd = Command('amend')
426 428 else:
427 429 cmd['--amend'] = None
428 430
429 431 if opts.get('reuse_message'):
430 432 cmd['-M'] = opts.get('reuse_message')
431 433
432 434 if opts.get('message'):
433 435 cmd['-m'] = "'%s'" % (opts.get('message'),)
434 436
435 437 if opts.get('all'):
436 438 ui.status(_("note: Mercurial doesn't have a staging area, " +
437 439 "so there is no --all. -A will add and remove files " +
438 440 "for you though.\n\n"))
439 441
440 442 if opts.get('file'):
441 443 cmd['-l'] = opts.get('file')
442 444
443 445 if opts.get('author'):
444 446 cmd['-u'] = opts.get('author')
445 447
446 448 if opts.get('date'):
447 449 cmd['-d'] = opts.get('date')
448 450
449 451 cmd.extend(args)
450 452
451 453 ui.status((bytes(cmd)), "\n")
452 454
453 455 def deprecated(ui, repo, *args, **kwargs):
454 456 ui.warn(_('This command has been deprecated in the git project, ' +
455 457 'thus isn\'t supported by this tool.\n\n'))
456 458
457 459 def diff(ui, repo, *args, **kwargs):
458 460 cmdoptions = [
459 461 ('a', 'all', None, ''),
460 462 ('', 'cached', None, ''),
461 463 ('R', 'reverse', None, ''),
462 464 ]
463 465 args, opts = parseoptions(ui, cmdoptions, args)
464 466
465 467 cmd = Command('diff')
466 468
467 469 if opts.get('cached'):
468 470 ui.status(_('note: Mercurial has no concept of a staging area, ' +
469 471 'so --cached does nothing.\n\n'))
470 472
471 473 if opts.get('reverse'):
472 474 cmd['--reverse'] = None
473 475
474 476 for a in list(args):
475 477 args.remove(a)
476 478 try:
477 479 repo.revs(a)
478 480 cmd['-r'] = a
479 481 except Exception:
480 482 cmd.append(a)
481 483
482 484 ui.status((bytes(cmd)), "\n")
483 485
484 486 def difftool(ui, repo, *args, **kwargs):
485 487 ui.status(_('Mercurial does not enable external difftool by default. You '
486 488 'need to enable the extdiff extension in your .hgrc file by adding\n'
487 489 'extdiff =\n'
488 490 'to the [extensions] section and then running\n\n'
489 491 'hg extdiff -p <program>\n\n'
490 492 'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
491 493 'information.\n'))
492 494
493 495 def fetch(ui, repo, *args, **kwargs):
494 496 cmdoptions = [
495 497 ('', 'all', None, ''),
496 498 ('f', 'force', None, ''),
497 499 ]
498 500 args, opts = parseoptions(ui, cmdoptions, args)
499 501
500 502 cmd = Command('pull')
501 503
502 504 if len(args) > 0:
503 505 cmd.append(args[0])
504 506 if len(args) > 1:
505 507 ui.status(_("note: Mercurial doesn't have refspecs. " +
506 508 "-r can be used to specify which commits you want to pull. " +
507 509 "-B can be used to specify which bookmark you want to pull." +
508 510 "\n\n"))
509 511 for v in args[1:]:
510 512 if v in repo._bookmarks:
511 513 cmd['-B'] = v
512 514 else:
513 515 cmd['-r'] = v
514 516
515 517 ui.status((bytes(cmd)), "\n")
516 518
517 519 def grep(ui, repo, *args, **kwargs):
518 520 cmdoptions = [
519 521 ]
520 522 args, opts = parseoptions(ui, cmdoptions, args)
521 523
522 524 cmd = Command('grep')
523 525
524 526 # For basic usage, git grep and hg grep are the same. They both have the
525 527 # pattern first, followed by paths.
526 528 cmd.extend(args)
527 529
528 530 ui.status((bytes(cmd)), "\n")
529 531
530 532 def init(ui, repo, *args, **kwargs):
531 533 cmdoptions = [
532 534 ]
533 535 args, opts = parseoptions(ui, cmdoptions, args)
534 536
535 537 cmd = Command('init')
536 538
537 539 if len(args) > 0:
538 540 cmd.append(args[0])
539 541
540 542 ui.status((bytes(cmd)), "\n")
541 543
542 544 def log(ui, repo, *args, **kwargs):
543 545 cmdoptions = [
544 546 ('', 'follow', None, ''),
545 547 ('', 'decorate', None, ''),
546 548 ('n', 'number', '', ''),
547 549 ('1', '1', None, ''),
548 550 ('', 'pretty', '', ''),
549 551 ('', 'format', '', ''),
550 552 ('', 'oneline', None, ''),
551 553 ('', 'stat', None, ''),
552 554 ('', 'graph', None, ''),
553 555 ('p', 'patch', None, ''),
554 556 ]
555 557 args, opts = parseoptions(ui, cmdoptions, args)
556 558 ui.status(_('note: -v prints the entire commit message like Git does. To ' +
557 559 'print just the first line, drop the -v.\n\n'))
558 560 ui.status(_("note: see hg help revset for information on how to filter " +
559 561 "log output.\n\n"))
560 562
561 563 cmd = Command('log')
562 564 cmd['-v'] = None
563 565
564 566 if opts.get('number'):
565 567 cmd['-l'] = opts.get('number')
566 568 if opts.get('1'):
567 569 cmd['-l'] = '1'
568 570 if opts.get('stat'):
569 571 cmd['--stat'] = None
570 572 if opts.get('graph'):
571 573 cmd['-G'] = None
572 574 if opts.get('patch'):
573 575 cmd['-p'] = None
574 576
575 577 if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
576 578 format = opts.get('format', '')
577 579 if 'format:' in format:
578 580 ui.status(_("note: --format format:??? equates to Mercurial's " +
579 581 "--template. See hg help templates for more info.\n\n"))
580 582 cmd['--template'] = '???'
581 583 else:
582 584 ui.status(_("note: --pretty/format/oneline equate to Mercurial's " +
583 585 "--style or --template. See hg help templates for more info." +
584 586 "\n\n"))
585 587 cmd['--style'] = '???'
586 588
587 589 if len(args) > 0:
588 590 if '..' in args[0]:
589 591 since, until = args[0].split('..')
590 592 cmd['-r'] = "'%s::%s'" % (since, until)
591 593 del args[0]
592 594 cmd.extend(args)
593 595
594 596 ui.status((bytes(cmd)), "\n")
595 597
596 598 def lsfiles(ui, repo, *args, **kwargs):
597 599 cmdoptions = [
598 600 ('c', 'cached', None, ''),
599 601 ('d', 'deleted', None, ''),
600 602 ('m', 'modified', None, ''),
601 603 ('o', 'others', None, ''),
602 604 ('i', 'ignored', None, ''),
603 605 ('s', 'stage', None, ''),
604 606 ('z', '_zero', None, ''),
605 607 ]
606 608 args, opts = parseoptions(ui, cmdoptions, args)
607 609
608 610 if (opts.get('modified') or opts.get('deleted')
609 611 or opts.get('others') or opts.get('ignored')):
610 612 cmd = Command('status')
611 613 if opts.get('deleted'):
612 614 cmd['-d'] = None
613 615 if opts.get('modified'):
614 616 cmd['-m'] = None
615 617 if opts.get('others'):
616 618 cmd['-o'] = None
617 619 if opts.get('ignored'):
618 620 cmd['-i'] = None
619 621 else:
620 622 cmd = Command('files')
621 623 if opts.get('stage'):
622 624 ui.status(_("note: Mercurial doesn't have a staging area, ignoring "
623 625 "--stage\n"))
624 626 if opts.get('_zero'):
625 627 cmd['-0'] = None
626 628 cmd.append('.')
627 629 for include in args:
628 cmd['-I'] = util.shellquote(include)
630 cmd['-I'] = procutil.shellquote(include)
629 631
630 632 ui.status((bytes(cmd)), "\n")
631 633
632 634 def merge(ui, repo, *args, **kwargs):
633 635 cmdoptions = [
634 636 ]
635 637 args, opts = parseoptions(ui, cmdoptions, args)
636 638
637 639 cmd = Command('merge')
638 640
639 641 if len(args) > 0:
640 642 cmd.append(args[len(args) - 1])
641 643
642 644 ui.status((bytes(cmd)), "\n")
643 645
644 646 def mergebase(ui, repo, *args, **kwargs):
645 647 cmdoptions = []
646 648 args, opts = parseoptions(ui, cmdoptions, args)
647 649
648 650 if len(args) != 2:
649 651 args = ['A', 'B']
650 652
651 653 cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
652 654 % (args[0], args[1]))
653 655
654 656 ui.status(_('NOTE: ancestors() is part of the revset language.\n'),
655 657 _("Learn more about revsets with 'hg help revsets'\n\n"))
656 658 ui.status((bytes(cmd)), "\n")
657 659
658 660 def mergetool(ui, repo, *args, **kwargs):
659 661 cmdoptions = []
660 662 args, opts = parseoptions(ui, cmdoptions, args)
661 663
662 664 cmd = Command("resolve")
663 665
664 666 if len(args) == 0:
665 667 cmd['--all'] = None
666 668 cmd.extend(args)
667 669 ui.status((bytes(cmd)), "\n")
668 670
669 671 def mv(ui, repo, *args, **kwargs):
670 672 cmdoptions = [
671 673 ('f', 'force', None, ''),
672 674 ]
673 675 args, opts = parseoptions(ui, cmdoptions, args)
674 676
675 677 cmd = Command('mv')
676 678 cmd.extend(args)
677 679
678 680 if opts.get('force'):
679 681 cmd['-f'] = None
680 682
681 683 ui.status((bytes(cmd)), "\n")
682 684
683 685 def pull(ui, repo, *args, **kwargs):
684 686 cmdoptions = [
685 687 ('', 'all', None, ''),
686 688 ('f', 'force', None, ''),
687 689 ('r', 'rebase', None, ''),
688 690 ]
689 691 args, opts = parseoptions(ui, cmdoptions, args)
690 692
691 693 cmd = Command('pull')
692 694 cmd['--rebase'] = None
693 695
694 696 if len(args) > 0:
695 697 cmd.append(args[0])
696 698 if len(args) > 1:
697 699 ui.status(_("note: Mercurial doesn't have refspecs. " +
698 700 "-r can be used to specify which commits you want to pull. " +
699 701 "-B can be used to specify which bookmark you want to pull." +
700 702 "\n\n"))
701 703 for v in args[1:]:
702 704 if v in repo._bookmarks:
703 705 cmd['-B'] = v
704 706 else:
705 707 cmd['-r'] = v
706 708
707 709 ui.status((bytes(cmd)), "\n")
708 710
709 711 def push(ui, repo, *args, **kwargs):
710 712 cmdoptions = [
711 713 ('', 'all', None, ''),
712 714 ('f', 'force', None, ''),
713 715 ]
714 716 args, opts = parseoptions(ui, cmdoptions, args)
715 717
716 718 cmd = Command('push')
717 719
718 720 if len(args) > 0:
719 721 cmd.append(args[0])
720 722 if len(args) > 1:
721 723 ui.status(_("note: Mercurial doesn't have refspecs. " +
722 724 "-r can be used to specify which commits you want to push. " +
723 725 "-B can be used to specify which bookmark you want to push." +
724 726 "\n\n"))
725 727 for v in args[1:]:
726 728 if v in repo._bookmarks:
727 729 cmd['-B'] = v
728 730 else:
729 731 cmd['-r'] = v
730 732
731 733 if opts.get('force'):
732 734 cmd['-f'] = None
733 735
734 736 ui.status((bytes(cmd)), "\n")
735 737
736 738 def rebase(ui, repo, *args, **kwargs):
737 739 cmdoptions = [
738 740 ('', 'all', None, ''),
739 741 ('i', 'interactive', None, ''),
740 742 ('', 'onto', '', ''),
741 743 ('', 'abort', None, ''),
742 744 ('', 'continue', None, ''),
743 745 ('', 'skip', None, ''),
744 746 ]
745 747 args, opts = parseoptions(ui, cmdoptions, args)
746 748
747 749 if opts.get('interactive'):
748 750 ui.status(_("note: hg histedit does not perform a rebase. " +
749 751 "It just edits history.\n\n"))
750 752 cmd = Command('histedit')
751 753 if len(args) > 0:
752 754 ui.status(_("also note: 'hg histedit' will automatically detect"
753 755 " your stack, so no second argument is necessary.\n\n"))
754 756 ui.status((bytes(cmd)), "\n")
755 757 return
756 758
757 759 if opts.get('skip'):
758 760 cmd = Command('revert --all -r .')
759 761 ui.status((bytes(cmd)), "\n")
760 762
761 763 cmd = Command('rebase')
762 764
763 765 if opts.get('continue') or opts.get('skip'):
764 766 cmd['--continue'] = None
765 767 if opts.get('abort'):
766 768 cmd['--abort'] = None
767 769
768 770 if opts.get('onto'):
769 771 ui.status(_("note: if you're trying to lift a commit off one branch, " +
770 772 "try hg rebase -d <destination commit> -s <commit to be lifted>" +
771 773 "\n\n"))
772 774 cmd['-d'] = convert(opts.get('onto'))
773 775 if len(args) < 2:
774 776 raise error.Abort("Expected format: git rebase --onto X Y Z")
775 777 cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
776 778 else:
777 779 if len(args) == 1:
778 780 cmd['-d'] = convert(args[0])
779 781 elif len(args) == 2:
780 782 cmd['-d'] = convert(args[0])
781 783 cmd['-b'] = convert(args[1])
782 784
783 785 ui.status((bytes(cmd)), "\n")
784 786
785 787 def reflog(ui, repo, *args, **kwargs):
786 788 cmdoptions = [
787 789 ('', 'all', None, ''),
788 790 ]
789 791 args, opts = parseoptions(ui, cmdoptions, args)
790 792
791 793 cmd = Command('journal')
792 794 if opts.get('all'):
793 795 cmd['--all'] = None
794 796 if len(args) > 0:
795 797 cmd.append(args[0])
796 798
797 799 ui.status(bytes(cmd), "\n\n")
798 800 ui.status(_("note: in hg commits can be deleted from repo but we always"
799 801 " have backups.\n"))
800 802
801 803 def reset(ui, repo, *args, **kwargs):
802 804 cmdoptions = [
803 805 ('', 'soft', None, ''),
804 806 ('', 'hard', None, ''),
805 807 ('', 'mixed', None, ''),
806 808 ]
807 809 args, opts = parseoptions(ui, cmdoptions, args)
808 810
809 811 commit = convert(args[0] if len(args) > 0 else '.')
810 812 hard = opts.get('hard')
811 813
812 814 if opts.get('mixed'):
813 815 ui.status(_('NOTE: --mixed has no meaning since Mercurial has no '
814 816 'staging area\n\n'))
815 817 if opts.get('soft'):
816 818 ui.status(_('NOTE: --soft has no meaning since Mercurial has no '
817 819 'staging area\n\n'))
818 820
819 821 cmd = Command('update')
820 822 if hard:
821 823 cmd.append('--clean')
822 824
823 825 cmd.append(commit)
824 826
825 827 ui.status((bytes(cmd)), "\n")
826 828
827 829 def revert(ui, repo, *args, **kwargs):
828 830 cmdoptions = [
829 831 ]
830 832 args, opts = parseoptions(ui, cmdoptions, args)
831 833
832 834 if len(args) > 1:
833 835 ui.status(_("note: hg backout doesn't support multiple commits at " +
834 836 "once\n\n"))
835 837
836 838 cmd = Command('backout')
837 839 if args:
838 840 cmd.append(args[0])
839 841
840 842 ui.status((bytes(cmd)), "\n")
841 843
842 844 def revparse(ui, repo, *args, **kwargs):
843 845 cmdoptions = [
844 846 ('', 'show-cdup', None, ''),
845 847 ('', 'show-toplevel', None, ''),
846 848 ]
847 849 args, opts = parseoptions(ui, cmdoptions, args)
848 850
849 851 if opts.get('show_cdup') or opts.get('show_toplevel'):
850 852 cmd = Command('root')
851 853 if opts.get('show_cdup'):
852 854 ui.status(_("note: hg root prints the root of the repository\n\n"))
853 855 ui.status((bytes(cmd)), "\n")
854 856 else:
855 857 ui.status(_("note: see hg help revset for how to refer to commits\n"))
856 858
857 859 def rm(ui, repo, *args, **kwargs):
858 860 cmdoptions = [
859 861 ('f', 'force', None, ''),
860 862 ('n', 'dry-run', None, ''),
861 863 ]
862 864 args, opts = parseoptions(ui, cmdoptions, args)
863 865
864 866 cmd = Command('rm')
865 867 cmd.extend(args)
866 868
867 869 if opts.get('force'):
868 870 cmd['-f'] = None
869 871 if opts.get('dry_run'):
870 872 cmd['-n'] = None
871 873
872 874 ui.status((bytes(cmd)), "\n")
873 875
874 876 def show(ui, repo, *args, **kwargs):
875 877 cmdoptions = [
876 878 ('', 'name-status', None, ''),
877 879 ('', 'pretty', '', ''),
878 880 ('U', 'unified', int, ''),
879 881 ]
880 882 args, opts = parseoptions(ui, cmdoptions, args)
881 883
882 884 if opts.get('name_status'):
883 885 if opts.get('pretty') == 'format:':
884 886 cmd = Command('status')
885 887 cmd['--change'] = '.'
886 888 else:
887 889 cmd = Command('log')
888 890 cmd.append('--style status')
889 891 cmd.append('-r .')
890 892 elif len(args) > 0:
891 893 if ispath(repo, args[0]):
892 894 cmd = Command('cat')
893 895 else:
894 896 cmd = Command('export')
895 897 cmd.extend(args)
896 898 if opts.get('unified'):
897 899 cmd.append('--config diff.unified=%d' % (opts['unified'],))
898 900 elif opts.get('unified'):
899 901 cmd = Command('export')
900 902 cmd.append('--config diff.unified=%d' % (opts['unified'],))
901 903 else:
902 904 cmd = Command('export')
903 905
904 906 ui.status((bytes(cmd)), "\n")
905 907
906 908 def stash(ui, repo, *args, **kwargs):
907 909 cmdoptions = [
908 910 ]
909 911 args, opts = parseoptions(ui, cmdoptions, args)
910 912
911 913 cmd = Command('shelve')
912 914 action = args[0] if len(args) > 0 else None
913 915
914 916 if action == 'list':
915 917 cmd['-l'] = None
916 918 elif action == 'drop':
917 919 cmd['-d'] = None
918 920 if len(args) > 1:
919 921 cmd.append(args[1])
920 922 else:
921 923 cmd.append('<shelve name>')
922 924 elif action == 'pop' or action == 'apply':
923 925 cmd = Command('unshelve')
924 926 if len(args) > 1:
925 927 cmd.append(args[1])
926 928 if action == 'apply':
927 929 cmd['--keep'] = None
928 930 elif (action == 'branch' or action == 'show' or action == 'clear'
929 931 or action == 'create'):
930 932 ui.status(_("note: Mercurial doesn't have equivalents to the " +
931 933 "git stash branch, show, clear, or create actions.\n\n"))
932 934 return
933 935 else:
934 936 if len(args) > 0:
935 937 if args[0] != 'save':
936 938 cmd['--name'] = args[0]
937 939 elif len(args) > 1:
938 940 cmd['--name'] = args[1]
939 941
940 942 ui.status((bytes(cmd)), "\n")
941 943
942 944 def status(ui, repo, *args, **kwargs):
943 945 cmdoptions = [
944 946 ('', 'ignored', None, ''),
945 947 ]
946 948 args, opts = parseoptions(ui, cmdoptions, args)
947 949
948 950 cmd = Command('status')
949 951 cmd.extend(args)
950 952
951 953 if opts.get('ignored'):
952 954 cmd['-i'] = None
953 955
954 956 ui.status((bytes(cmd)), "\n")
955 957
956 958 def svn(ui, repo, *args, **kwargs):
957 959 svncmd = args[0]
958 960 if not svncmd in gitsvncommands:
959 961 ui.warn(_("error: unknown git svn command %s\n") % (svncmd))
960 962
961 963 args = args[1:]
962 964 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
963 965
964 966 def svndcommit(ui, repo, *args, **kwargs):
965 967 cmdoptions = [
966 968 ]
967 969 args, opts = parseoptions(ui, cmdoptions, args)
968 970
969 971 cmd = Command('push')
970 972
971 973 ui.status((bytes(cmd)), "\n")
972 974
973 975 def svnfetch(ui, repo, *args, **kwargs):
974 976 cmdoptions = [
975 977 ]
976 978 args, opts = parseoptions(ui, cmdoptions, args)
977 979
978 980 cmd = Command('pull')
979 981 cmd.append('default-push')
980 982
981 983 ui.status((bytes(cmd)), "\n")
982 984
983 985 def svnfindrev(ui, repo, *args, **kwargs):
984 986 cmdoptions = [
985 987 ]
986 988 args, opts = parseoptions(ui, cmdoptions, args)
987 989
988 990 cmd = Command('log')
989 991 cmd['-r'] = args[0]
990 992
991 993 ui.status((bytes(cmd)), "\n")
992 994
993 995 def svnrebase(ui, repo, *args, **kwargs):
994 996 cmdoptions = [
995 997 ('l', 'local', None, ''),
996 998 ]
997 999 args, opts = parseoptions(ui, cmdoptions, args)
998 1000
999 1001 pullcmd = Command('pull')
1000 1002 pullcmd.append('default-push')
1001 1003 rebasecmd = Command('rebase')
1002 1004 rebasecmd.append('tip')
1003 1005
1004 1006 cmd = pullcmd & rebasecmd
1005 1007
1006 1008 ui.status((bytes(cmd)), "\n")
1007 1009
1008 1010 def tag(ui, repo, *args, **kwargs):
1009 1011 cmdoptions = [
1010 1012 ('f', 'force', None, ''),
1011 1013 ('l', 'list', None, ''),
1012 1014 ('d', 'delete', None, ''),
1013 1015 ]
1014 1016 args, opts = parseoptions(ui, cmdoptions, args)
1015 1017
1016 1018 if opts.get('list'):
1017 1019 cmd = Command('tags')
1018 1020 else:
1019 1021 cmd = Command('tag')
1020 1022 cmd.append(args[0])
1021 1023 if len(args) > 1:
1022 1024 cmd['-r'] = args[1]
1023 1025
1024 1026 if opts.get('delete'):
1025 1027 cmd['--remove'] = None
1026 1028
1027 1029 if opts.get('force'):
1028 1030 cmd['-f'] = None
1029 1031
1030 1032 ui.status((bytes(cmd)), "\n")
1031 1033
1032 1034 gitcommands = {
1033 1035 'add': add,
1034 1036 'am': am,
1035 1037 'apply': apply,
1036 1038 'bisect': bisect,
1037 1039 'blame': blame,
1038 1040 'branch': branch,
1039 1041 'checkout': checkout,
1040 1042 'cherry-pick': cherrypick,
1041 1043 'clean': clean,
1042 1044 'clone': clone,
1043 1045 'commit': commit,
1044 1046 'diff': diff,
1045 1047 'difftool': difftool,
1046 1048 'fetch': fetch,
1047 1049 'grep': grep,
1048 1050 'init': init,
1049 1051 'log': log,
1050 1052 'ls-files': lsfiles,
1051 1053 'merge': merge,
1052 1054 'merge-base': mergebase,
1053 1055 'mergetool': mergetool,
1054 1056 'mv': mv,
1055 1057 'pull': pull,
1056 1058 'push': push,
1057 1059 'rebase': rebase,
1058 1060 'reflog': reflog,
1059 1061 'reset': reset,
1060 1062 'revert': revert,
1061 1063 'rev-parse': revparse,
1062 1064 'rm': rm,
1063 1065 'show': show,
1064 1066 'stash': stash,
1065 1067 'status': status,
1066 1068 'svn': svn,
1067 1069 'tag': tag,
1068 1070 'whatchanged': deprecated,
1069 1071 }
1070 1072
1071 1073 gitsvncommands = {
1072 1074 'dcommit': svndcommit,
1073 1075 'fetch': svnfetch,
1074 1076 'find-rev': svnfindrev,
1075 1077 'rebase': svnrebase,
1076 1078 }
@@ -1,328 +1,330
1 1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 '''commands to sign and verify changesets'''
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import os
12 12 import tempfile
13 13
14 14 from mercurial.i18n import _
15 15 from mercurial import (
16 16 cmdutil,
17 17 error,
18 18 match,
19 19 node as hgnode,
20 20 pycompat,
21 21 registrar,
22 util,
23 22 )
24 from mercurial.utils import dateutil
23 from mercurial.utils import (
24 dateutil,
25 procutil,
26 )
25 27
26 28 cmdtable = {}
27 29 command = registrar.command(cmdtable)
28 30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 32 # be specifying the version(s) of Mercurial they are tested with, or
31 33 # leave the attribute unspecified.
32 34 testedwith = 'ships-with-hg-core'
33 35
34 36 configtable = {}
35 37 configitem = registrar.configitem(configtable)
36 38
37 39 configitem('gpg', 'cmd',
38 40 default='gpg',
39 41 )
40 42 configitem('gpg', 'key',
41 43 default=None,
42 44 )
43 45 configitem('gpg', '.*',
44 46 default=None,
45 47 generic=True,
46 48 )
47 49
48 50 class gpg(object):
49 51 def __init__(self, path, key=None):
50 52 self.path = path
51 53 self.key = (key and " --local-user \"%s\"" % key) or ""
52 54
53 55 def sign(self, data):
54 56 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
55 return util.filter(data, gpgcmd)
57 return procutil.filter(data, gpgcmd)
56 58
57 59 def verify(self, data, sig):
58 60 """ returns of the good and bad signatures"""
59 61 sigfile = datafile = None
60 62 try:
61 63 # create temporary files
62 64 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
63 65 fp = os.fdopen(fd, r'wb')
64 66 fp.write(sig)
65 67 fp.close()
66 68 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
67 69 fp = os.fdopen(fd, r'wb')
68 70 fp.write(data)
69 71 fp.close()
70 72 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
71 73 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
72 ret = util.filter("", gpgcmd)
74 ret = procutil.filter("", gpgcmd)
73 75 finally:
74 76 for f in (sigfile, datafile):
75 77 try:
76 78 if f:
77 79 os.unlink(f)
78 80 except OSError:
79 81 pass
80 82 keys = []
81 83 key, fingerprint = None, None
82 84 for l in ret.splitlines():
83 85 # see DETAILS in the gnupg documentation
84 86 # filter the logger output
85 87 if not l.startswith("[GNUPG:]"):
86 88 continue
87 89 l = l[9:]
88 90 if l.startswith("VALIDSIG"):
89 91 # fingerprint of the primary key
90 92 fingerprint = l.split()[10]
91 93 elif l.startswith("ERRSIG"):
92 94 key = l.split(" ", 3)[:2]
93 95 key.append("")
94 96 fingerprint = None
95 97 elif (l.startswith("GOODSIG") or
96 98 l.startswith("EXPSIG") or
97 99 l.startswith("EXPKEYSIG") or
98 100 l.startswith("BADSIG")):
99 101 if key is not None:
100 102 keys.append(key + [fingerprint])
101 103 key = l.split(" ", 2)
102 104 fingerprint = None
103 105 if key is not None:
104 106 keys.append(key + [fingerprint])
105 107 return keys
106 108
107 109 def newgpg(ui, **opts):
108 110 """create a new gpg instance"""
109 111 gpgpath = ui.config("gpg", "cmd")
110 112 gpgkey = opts.get(r'key')
111 113 if not gpgkey:
112 114 gpgkey = ui.config("gpg", "key")
113 115 return gpg(gpgpath, gpgkey)
114 116
115 117 def sigwalk(repo):
116 118 """
117 119 walk over every sigs, yields a couple
118 120 ((node, version, sig), (filename, linenumber))
119 121 """
120 122 def parsefile(fileiter, context):
121 123 ln = 1
122 124 for l in fileiter:
123 125 if not l:
124 126 continue
125 127 yield (l.split(" ", 2), (context, ln))
126 128 ln += 1
127 129
128 130 # read the heads
129 131 fl = repo.file(".hgsigs")
130 132 for r in reversed(fl.heads()):
131 133 fn = ".hgsigs|%s" % hgnode.short(r)
132 134 for item in parsefile(fl.read(r).splitlines(), fn):
133 135 yield item
134 136 try:
135 137 # read local signatures
136 138 fn = "localsigs"
137 139 for item in parsefile(repo.vfs(fn), fn):
138 140 yield item
139 141 except IOError:
140 142 pass
141 143
142 144 def getkeys(ui, repo, mygpg, sigdata, context):
143 145 """get the keys who signed a data"""
144 146 fn, ln = context
145 147 node, version, sig = sigdata
146 148 prefix = "%s:%d" % (fn, ln)
147 149 node = hgnode.bin(node)
148 150
149 151 data = node2txt(repo, node, version)
150 152 sig = binascii.a2b_base64(sig)
151 153 keys = mygpg.verify(data, sig)
152 154
153 155 validkeys = []
154 156 # warn for expired key and/or sigs
155 157 for key in keys:
156 158 if key[0] == "ERRSIG":
157 159 ui.write(_("%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
158 160 continue
159 161 if key[0] == "BADSIG":
160 162 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
161 163 continue
162 164 if key[0] == "EXPSIG":
163 165 ui.write(_("%s Note: Signature has expired"
164 166 " (signed by: \"%s\")\n") % (prefix, key[2]))
165 167 elif key[0] == "EXPKEYSIG":
166 168 ui.write(_("%s Note: This key has expired"
167 169 " (signed by: \"%s\")\n") % (prefix, key[2]))
168 170 validkeys.append((key[1], key[2], key[3]))
169 171 return validkeys
170 172
171 173 @command("sigs", [], _('hg sigs'))
172 174 def sigs(ui, repo):
173 175 """list signed changesets"""
174 176 mygpg = newgpg(ui)
175 177 revs = {}
176 178
177 179 for data, context in sigwalk(repo):
178 180 node, version, sig = data
179 181 fn, ln = context
180 182 try:
181 183 n = repo.lookup(node)
182 184 except KeyError:
183 185 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
184 186 continue
185 187 r = repo.changelog.rev(n)
186 188 keys = getkeys(ui, repo, mygpg, data, context)
187 189 if not keys:
188 190 continue
189 191 revs.setdefault(r, [])
190 192 revs[r].extend(keys)
191 193 for rev in sorted(revs, reverse=True):
192 194 for k in revs[rev]:
193 195 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
194 196 ui.write("%-30s %s\n" % (keystr(ui, k), r))
195 197
196 198 @command("sigcheck", [], _('hg sigcheck REV'))
197 199 def sigcheck(ui, repo, rev):
198 200 """verify all the signatures there may be for a particular revision"""
199 201 mygpg = newgpg(ui)
200 202 rev = repo.lookup(rev)
201 203 hexrev = hgnode.hex(rev)
202 204 keys = []
203 205
204 206 for data, context in sigwalk(repo):
205 207 node, version, sig = data
206 208 if node == hexrev:
207 209 k = getkeys(ui, repo, mygpg, data, context)
208 210 if k:
209 211 keys.extend(k)
210 212
211 213 if not keys:
212 214 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
213 215 return
214 216
215 217 # print summary
216 218 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
217 219 for key in keys:
218 220 ui.write(" %s\n" % keystr(ui, key))
219 221
220 222 def keystr(ui, key):
221 223 """associate a string to a key (username, comment)"""
222 224 keyid, user, fingerprint = key
223 225 comment = ui.config("gpg", fingerprint)
224 226 if comment:
225 227 return "%s (%s)" % (user, comment)
226 228 else:
227 229 return user
228 230
229 231 @command("sign",
230 232 [('l', 'local', None, _('make the signature local')),
231 233 ('f', 'force', None, _('sign even if the sigfile is modified')),
232 234 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
233 235 ('k', 'key', '',
234 236 _('the key id to sign with'), _('ID')),
235 237 ('m', 'message', '',
236 238 _('use text as commit message'), _('TEXT')),
237 239 ('e', 'edit', False, _('invoke editor on commit messages')),
238 240 ] + cmdutil.commitopts2,
239 241 _('hg sign [OPTION]... [REV]...'))
240 242 def sign(ui, repo, *revs, **opts):
241 243 """add a signature for the current or given revision
242 244
243 245 If no revision is given, the parent of the working directory is used,
244 246 or tip if no revision is checked out.
245 247
246 248 The ``gpg.cmd`` config setting can be used to specify the command
247 249 to run. A default key can be specified with ``gpg.key``.
248 250
249 251 See :hg:`help dates` for a list of formats valid for -d/--date.
250 252 """
251 253 with repo.wlock():
252 254 return _dosign(ui, repo, *revs, **opts)
253 255
254 256 def _dosign(ui, repo, *revs, **opts):
255 257 mygpg = newgpg(ui, **opts)
256 258 opts = pycompat.byteskwargs(opts)
257 259 sigver = "0"
258 260 sigmessage = ""
259 261
260 262 date = opts.get('date')
261 263 if date:
262 264 opts['date'] = dateutil.parsedate(date)
263 265
264 266 if revs:
265 267 nodes = [repo.lookup(n) for n in revs]
266 268 else:
267 269 nodes = [node for node in repo.dirstate.parents()
268 270 if node != hgnode.nullid]
269 271 if len(nodes) > 1:
270 272 raise error.Abort(_('uncommitted merge - please provide a '
271 273 'specific revision'))
272 274 if not nodes:
273 275 nodes = [repo.changelog.tip()]
274 276
275 277 for n in nodes:
276 278 hexnode = hgnode.hex(n)
277 279 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
278 280 hgnode.short(n)))
279 281 # build data
280 282 data = node2txt(repo, n, sigver)
281 283 sig = mygpg.sign(data)
282 284 if not sig:
283 285 raise error.Abort(_("error while signing"))
284 286 sig = binascii.b2a_base64(sig)
285 287 sig = sig.replace("\n", "")
286 288 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
287 289
288 290 # write it
289 291 if opts['local']:
290 292 repo.vfs.append("localsigs", sigmessage)
291 293 return
292 294
293 295 if not opts["force"]:
294 296 msigs = match.exact(repo.root, '', ['.hgsigs'])
295 297 if any(repo.status(match=msigs, unknown=True, ignored=True)):
296 298 raise error.Abort(_("working copy of .hgsigs is changed "),
297 299 hint=_("please commit .hgsigs manually"))
298 300
299 301 sigsfile = repo.wvfs(".hgsigs", "ab")
300 302 sigsfile.write(sigmessage)
301 303 sigsfile.close()
302 304
303 305 if '.hgsigs' not in repo.dirstate:
304 306 repo[None].add([".hgsigs"])
305 307
306 308 if opts["no_commit"]:
307 309 return
308 310
309 311 message = opts['message']
310 312 if not message:
311 313 # we don't translate commit messages
312 314 message = "\n".join(["Added signature for changeset %s"
313 315 % hgnode.short(n)
314 316 for n in nodes])
315 317 try:
316 318 editor = cmdutil.getcommiteditor(editform='gpg.sign',
317 319 **pycompat.strkwargs(opts))
318 320 repo.commit(message, opts['user'], opts['date'], match=msigs,
319 321 editor=editor)
320 322 except ValueError as inst:
321 323 raise error.Abort(pycompat.bytestr(inst))
322 324
323 325 def node2txt(repo, node, ver):
324 326 """map a manifest into some text"""
325 327 if ver == "0":
326 328 return "%s\n" % hgnode.hex(node)
327 329 else:
328 330 raise error.Abort(_("unknown signature version"))
@@ -1,523 +1,524
1 1 # journal.py
2 2 #
3 3 # Copyright 2014-2016 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """track previous positions of bookmarks (EXPERIMENTAL)
8 8
9 9 This extension adds a new command: `hg journal`, which shows you where
10 10 bookmarks were previously located.
11 11
12 12 """
13 13
14 14 from __future__ import absolute_import
15 15
16 16 import collections
17 17 import errno
18 18 import os
19 19 import weakref
20 20
21 21 from mercurial.i18n import _
22 22
23 23 from mercurial import (
24 24 bookmarks,
25 25 cmdutil,
26 26 dispatch,
27 27 encoding,
28 28 error,
29 29 extensions,
30 30 hg,
31 31 localrepo,
32 32 lock,
33 33 logcmdutil,
34 34 node,
35 35 pycompat,
36 36 registrar,
37 37 util,
38 38 )
39 39 from mercurial.utils import (
40 40 dateutil,
41 procutil,
41 42 stringutil,
42 43 )
43 44
44 45 cmdtable = {}
45 46 command = registrar.command(cmdtable)
46 47
47 48 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
48 49 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
49 50 # be specifying the version(s) of Mercurial they are tested with, or
50 51 # leave the attribute unspecified.
51 52 testedwith = 'ships-with-hg-core'
52 53
53 54 # storage format version; increment when the format changes
54 55 storageversion = 0
55 56
56 57 # namespaces
57 58 bookmarktype = 'bookmark'
58 59 wdirparenttype = 'wdirparent'
59 60 # In a shared repository, what shared feature name is used
60 61 # to indicate this namespace is shared with the source?
61 62 sharednamespaces = {
62 63 bookmarktype: hg.sharedbookmarks,
63 64 }
64 65
65 66 # Journal recording, register hooks and storage object
66 67 def extsetup(ui):
67 68 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
68 69 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
69 70 extensions.wrapfilecache(
70 71 localrepo.localrepository, 'dirstate', wrapdirstate)
71 72 extensions.wrapfunction(hg, 'postshare', wrappostshare)
72 73 extensions.wrapfunction(hg, 'copystore', unsharejournal)
73 74
74 75 def reposetup(ui, repo):
75 76 if repo.local():
76 77 repo.journal = journalstorage(repo)
77 78 repo._wlockfreeprefix.add('namejournal')
78 79
79 80 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
80 81 if cached:
81 82 # already instantiated dirstate isn't yet marked as
82 83 # "journal"-ing, even though repo.dirstate() was already
83 84 # wrapped by own wrapdirstate()
84 85 _setupdirstate(repo, dirstate)
85 86
86 87 def runcommand(orig, lui, repo, cmd, fullargs, *args):
87 88 """Track the command line options for recording in the journal"""
88 89 journalstorage.recordcommand(*fullargs)
89 90 return orig(lui, repo, cmd, fullargs, *args)
90 91
91 92 def _setupdirstate(repo, dirstate):
92 93 dirstate.journalstorage = repo.journal
93 94 dirstate.addparentchangecallback('journal', recorddirstateparents)
94 95
95 96 # hooks to record dirstate changes
96 97 def wrapdirstate(orig, repo):
97 98 """Make journal storage available to the dirstate object"""
98 99 dirstate = orig(repo)
99 100 if util.safehasattr(repo, 'journal'):
100 101 _setupdirstate(repo, dirstate)
101 102 return dirstate
102 103
103 104 def recorddirstateparents(dirstate, old, new):
104 105 """Records all dirstate parent changes in the journal."""
105 106 old = list(old)
106 107 new = list(new)
107 108 if util.safehasattr(dirstate, 'journalstorage'):
108 109 # only record two hashes if there was a merge
109 110 oldhashes = old[:1] if old[1] == node.nullid else old
110 111 newhashes = new[:1] if new[1] == node.nullid else new
111 112 dirstate.journalstorage.record(
112 113 wdirparenttype, '.', oldhashes, newhashes)
113 114
114 115 # hooks to record bookmark changes (both local and remote)
115 116 def recordbookmarks(orig, store, fp):
116 117 """Records all bookmark changes in the journal."""
117 118 repo = store._repo
118 119 if util.safehasattr(repo, 'journal'):
119 120 oldmarks = bookmarks.bmstore(repo)
120 121 for mark, value in store.iteritems():
121 122 oldvalue = oldmarks.get(mark, node.nullid)
122 123 if value != oldvalue:
123 124 repo.journal.record(bookmarktype, mark, oldvalue, value)
124 125 return orig(store, fp)
125 126
126 127 # shared repository support
127 128 def _readsharedfeatures(repo):
128 129 """A set of shared features for this repository"""
129 130 try:
130 131 return set(repo.vfs.read('shared').splitlines())
131 132 except IOError as inst:
132 133 if inst.errno != errno.ENOENT:
133 134 raise
134 135 return set()
135 136
136 137 def _mergeentriesiter(*iterables, **kwargs):
137 138 """Given a set of sorted iterables, yield the next entry in merged order
138 139
139 140 Note that by default entries go from most recent to oldest.
140 141 """
141 142 order = kwargs.pop(r'order', max)
142 143 iterables = [iter(it) for it in iterables]
143 144 # this tracks still active iterables; iterables are deleted as they are
144 145 # exhausted, which is why this is a dictionary and why each entry also
145 146 # stores the key. Entries are mutable so we can store the next value each
146 147 # time.
147 148 iterable_map = {}
148 149 for key, it in enumerate(iterables):
149 150 try:
150 151 iterable_map[key] = [next(it), key, it]
151 152 except StopIteration:
152 153 # empty entry, can be ignored
153 154 pass
154 155
155 156 while iterable_map:
156 157 value, key, it = order(iterable_map.itervalues())
157 158 yield value
158 159 try:
159 160 iterable_map[key][0] = next(it)
160 161 except StopIteration:
161 162 # this iterable is empty, remove it from consideration
162 163 del iterable_map[key]
163 164
164 165 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
165 166 """Mark this shared working copy as sharing journal information"""
166 167 with destrepo.wlock():
167 168 orig(sourcerepo, destrepo, **kwargs)
168 169 with destrepo.vfs('shared', 'a') as fp:
169 170 fp.write('journal\n')
170 171
171 172 def unsharejournal(orig, ui, repo, repopath):
172 173 """Copy shared journal entries into this repo when unsharing"""
173 174 if (repo.path == repopath and repo.shared() and
174 175 util.safehasattr(repo, 'journal')):
175 176 sharedrepo = hg.sharedreposource(repo)
176 177 sharedfeatures = _readsharedfeatures(repo)
177 178 if sharedrepo and sharedfeatures > {'journal'}:
178 179 # there is a shared repository and there are shared journal entries
179 180 # to copy. move shared date over from source to destination but
180 181 # move the local file first
181 182 if repo.vfs.exists('namejournal'):
182 183 journalpath = repo.vfs.join('namejournal')
183 184 util.rename(journalpath, journalpath + '.bak')
184 185 storage = repo.journal
185 186 local = storage._open(
186 187 repo.vfs, filename='namejournal.bak', _newestfirst=False)
187 188 shared = (
188 189 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
189 190 if sharednamespaces.get(e.namespace) in sharedfeatures)
190 191 for entry in _mergeentriesiter(local, shared, order=min):
191 192 storage._write(repo.vfs, entry)
192 193
193 194 return orig(ui, repo, repopath)
194 195
195 196 class journalentry(collections.namedtuple(
196 197 u'journalentry',
197 198 u'timestamp user command namespace name oldhashes newhashes')):
198 199 """Individual journal entry
199 200
200 201 * timestamp: a mercurial (time, timezone) tuple
201 202 * user: the username that ran the command
202 203 * namespace: the entry namespace, an opaque string
203 204 * name: the name of the changed item, opaque string with meaning in the
204 205 namespace
205 206 * command: the hg command that triggered this record
206 207 * oldhashes: a tuple of one or more binary hashes for the old location
207 208 * newhashes: a tuple of one or more binary hashes for the new location
208 209
209 210 Handles serialisation from and to the storage format. Fields are
210 211 separated by newlines, hashes are written out in hex separated by commas,
211 212 timestamp and timezone are separated by a space.
212 213
213 214 """
214 215 @classmethod
215 216 def fromstorage(cls, line):
216 217 (time, user, command, namespace, name,
217 218 oldhashes, newhashes) = line.split('\n')
218 219 timestamp, tz = time.split()
219 220 timestamp, tz = float(timestamp), int(tz)
220 221 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
221 222 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
222 223 return cls(
223 224 (timestamp, tz), user, command, namespace, name,
224 225 oldhashes, newhashes)
225 226
226 227 def __bytes__(self):
227 228 """bytes representation for storage"""
228 229 time = ' '.join(map(str, self.timestamp))
229 230 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
230 231 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
231 232 return '\n'.join((
232 233 time, self.user, self.command, self.namespace, self.name,
233 234 oldhashes, newhashes))
234 235
235 236 __str__ = encoding.strmethod(__bytes__)
236 237
237 238 class journalstorage(object):
238 239 """Storage for journal entries
239 240
240 241 Entries are divided over two files; one with entries that pertain to the
241 242 local working copy *only*, and one with entries that are shared across
242 243 multiple working copies when shared using the share extension.
243 244
244 245 Entries are stored with NUL bytes as separators. See the journalentry
245 246 class for the per-entry structure.
246 247
247 248 The file format starts with an integer version, delimited by a NUL.
248 249
249 250 This storage uses a dedicated lock; this makes it easier to avoid issues
250 251 with adding entries that added when the regular wlock is unlocked (e.g.
251 252 the dirstate).
252 253
253 254 """
254 255 _currentcommand = ()
255 256 _lockref = None
256 257
257 258 def __init__(self, repo):
258 self.user = util.getuser()
259 self.user = procutil.getuser()
259 260 self.ui = repo.ui
260 261 self.vfs = repo.vfs
261 262
262 263 # is this working copy using a shared storage?
263 264 self.sharedfeatures = self.sharedvfs = None
264 265 if repo.shared():
265 266 features = _readsharedfeatures(repo)
266 267 sharedrepo = hg.sharedreposource(repo)
267 268 if sharedrepo is not None and 'journal' in features:
268 269 self.sharedvfs = sharedrepo.vfs
269 270 self.sharedfeatures = features
270 271
271 272 # track the current command for recording in journal entries
272 273 @property
273 274 def command(self):
274 275 commandstr = ' '.join(
275 map(util.shellquote, journalstorage._currentcommand))
276 map(procutil.shellquote, journalstorage._currentcommand))
276 277 if '\n' in commandstr:
277 278 # truncate multi-line commands
278 279 commandstr = commandstr.partition('\n')[0] + ' ...'
279 280 return commandstr
280 281
281 282 @classmethod
282 283 def recordcommand(cls, *fullargs):
283 284 """Set the current hg arguments, stored with recorded entries"""
284 285 # Set the current command on the class because we may have started
285 286 # with a non-local repo (cloning for example).
286 287 cls._currentcommand = fullargs
287 288
288 289 def _currentlock(self, lockref):
289 290 """Returns the lock if it's held, or None if it's not.
290 291
291 292 (This is copied from the localrepo class)
292 293 """
293 294 if lockref is None:
294 295 return None
295 296 l = lockref()
296 297 if l is None or not l.held:
297 298 return None
298 299 return l
299 300
300 301 def jlock(self, vfs):
301 302 """Create a lock for the journal file"""
302 303 if self._currentlock(self._lockref) is not None:
303 304 raise error.Abort(_('journal lock does not support nesting'))
304 305 desc = _('journal of %s') % vfs.base
305 306 try:
306 307 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
307 308 except error.LockHeld as inst:
308 309 self.ui.warn(
309 310 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
310 311 # default to 600 seconds timeout
311 312 l = lock.lock(
312 313 vfs, 'namejournal.lock',
313 314 self.ui.configint("ui", "timeout"), desc=desc)
314 315 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
315 316 self._lockref = weakref.ref(l)
316 317 return l
317 318
318 319 def record(self, namespace, name, oldhashes, newhashes):
319 320 """Record a new journal entry
320 321
321 322 * namespace: an opaque string; this can be used to filter on the type
322 323 of recorded entries.
323 324 * name: the name defining this entry; for bookmarks, this is the
324 325 bookmark name. Can be filtered on when retrieving entries.
325 326 * oldhashes and newhashes: each a single binary hash, or a list of
326 327 binary hashes. These represent the old and new position of the named
327 328 item.
328 329
329 330 """
330 331 if not isinstance(oldhashes, list):
331 332 oldhashes = [oldhashes]
332 333 if not isinstance(newhashes, list):
333 334 newhashes = [newhashes]
334 335
335 336 entry = journalentry(
336 337 dateutil.makedate(), self.user, self.command, namespace, name,
337 338 oldhashes, newhashes)
338 339
339 340 vfs = self.vfs
340 341 if self.sharedvfs is not None:
341 342 # write to the shared repository if this feature is being
342 343 # shared between working copies.
343 344 if sharednamespaces.get(namespace) in self.sharedfeatures:
344 345 vfs = self.sharedvfs
345 346
346 347 self._write(vfs, entry)
347 348
348 349 def _write(self, vfs, entry):
349 350 with self.jlock(vfs):
350 351 version = None
351 352 # open file in amend mode to ensure it is created if missing
352 353 with vfs('namejournal', mode='a+b') as f:
353 354 f.seek(0, os.SEEK_SET)
354 355 # Read just enough bytes to get a version number (up to 2
355 356 # digits plus separator)
356 357 version = f.read(3).partition('\0')[0]
357 358 if version and version != "%d" % storageversion:
358 359 # different version of the storage. Exit early (and not
359 360 # write anything) if this is not a version we can handle or
360 361 # the file is corrupt. In future, perhaps rotate the file
361 362 # instead?
362 363 self.ui.warn(
363 364 _("unsupported journal file version '%s'\n") % version)
364 365 return
365 366 if not version:
366 367 # empty file, write version first
367 368 f.write(("%d" % storageversion) + '\0')
368 369 f.seek(0, os.SEEK_END)
369 370 f.write(bytes(entry) + '\0')
370 371
371 372 def filtered(self, namespace=None, name=None):
372 373 """Yield all journal entries with the given namespace or name
373 374
374 375 Both the namespace and the name are optional; if neither is given all
375 376 entries in the journal are produced.
376 377
377 378 Matching supports regular expressions by using the `re:` prefix
378 379 (use `literal:` to match names or namespaces that start with `re:`)
379 380
380 381 """
381 382 if namespace is not None:
382 383 namespace = stringutil.stringmatcher(namespace)[-1]
383 384 if name is not None:
384 385 name = stringutil.stringmatcher(name)[-1]
385 386 for entry in self:
386 387 if namespace is not None and not namespace(entry.namespace):
387 388 continue
388 389 if name is not None and not name(entry.name):
389 390 continue
390 391 yield entry
391 392
392 393 def __iter__(self):
393 394 """Iterate over the storage
394 395
395 396 Yields journalentry instances for each contained journal record.
396 397
397 398 """
398 399 local = self._open(self.vfs)
399 400
400 401 if self.sharedvfs is None:
401 402 return local
402 403
403 404 # iterate over both local and shared entries, but only those
404 405 # shared entries that are among the currently shared features
405 406 shared = (
406 407 e for e in self._open(self.sharedvfs)
407 408 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
408 409 return _mergeentriesiter(local, shared)
409 410
410 411 def _open(self, vfs, filename='namejournal', _newestfirst=True):
411 412 if not vfs.exists(filename):
412 413 return
413 414
414 415 with vfs(filename) as f:
415 416 raw = f.read()
416 417
417 418 lines = raw.split('\0')
418 419 version = lines and lines[0]
419 420 if version != "%d" % storageversion:
420 421 version = version or _('not available')
421 422 raise error.Abort(_("unknown journal file version '%s'") % version)
422 423
423 424 # Skip the first line, it's a version number. Normally we iterate over
424 425 # these in reverse order to list newest first; only when copying across
425 426 # a shared storage do we forgo reversing.
426 427 lines = lines[1:]
427 428 if _newestfirst:
428 429 lines = reversed(lines)
429 430 for line in lines:
430 431 if not line:
431 432 continue
432 433 yield journalentry.fromstorage(line)
433 434
434 435 # journal reading
435 436 # log options that don't make sense for journal
436 437 _ignoreopts = ('no-merges', 'graph')
437 438 @command(
438 439 'journal', [
439 440 ('', 'all', None, 'show history for all names'),
440 441 ('c', 'commits', None, 'show commit metadata'),
441 442 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
442 443 '[OPTION]... [BOOKMARKNAME]')
443 444 def journal(ui, repo, *args, **opts):
444 445 """show the previous position of bookmarks and the working copy
445 446
446 447 The journal is used to see the previous commits that bookmarks and the
447 448 working copy pointed to. By default the previous locations for the working
448 449 copy. Passing a bookmark name will show all the previous positions of
449 450 that bookmark. Use the --all switch to show previous locations for all
450 451 bookmarks and the working copy; each line will then include the bookmark
451 452 name, or '.' for the working copy, as well.
452 453
453 454 If `name` starts with `re:`, the remainder of the name is treated as
454 455 a regular expression. To match a name that actually starts with `re:`,
455 456 use the prefix `literal:`.
456 457
457 458 By default hg journal only shows the commit hash and the command that was
458 459 running at that time. -v/--verbose will show the prior hash, the user, and
459 460 the time at which it happened.
460 461
461 462 Use -c/--commits to output log information on each commit hash; at this
462 463 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
463 464 switches to alter the log output for these.
464 465
465 466 `hg journal -T json` can be used to produce machine readable output.
466 467
467 468 """
468 469 opts = pycompat.byteskwargs(opts)
469 470 name = '.'
470 471 if opts.get('all'):
471 472 if args:
472 473 raise error.Abort(
473 474 _("You can't combine --all and filtering on a name"))
474 475 name = None
475 476 if args:
476 477 name = args[0]
477 478
478 479 fm = ui.formatter('journal', opts)
479 480
480 481 if opts.get("template") != "json":
481 482 if name is None:
482 483 displayname = _('the working copy and bookmarks')
483 484 else:
484 485 displayname = "'%s'" % name
485 486 ui.status(_("previous locations of %s:\n") % displayname)
486 487
487 488 limit = logcmdutil.getlimit(opts)
488 489 entry = None
489 490 ui.pager('journal')
490 491 for count, entry in enumerate(repo.journal.filtered(name=name)):
491 492 if count == limit:
492 493 break
493 494 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
494 495 name='node', sep=',')
495 496 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
496 497 name='node', sep=',')
497 498
498 499 fm.startitem()
499 500 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
500 501 fm.write('newhashes', '%s', newhashesstr)
501 502 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
502 503 fm.condwrite(
503 504 opts.get('all') or name.startswith('re:'),
504 505 'name', ' %-8s', entry.name)
505 506
506 507 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
507 508 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
508 509 fm.write('command', ' %s\n', entry.command)
509 510
510 511 if opts.get("commits"):
511 512 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
512 513 for hash in entry.newhashes:
513 514 try:
514 515 ctx = repo[hash]
515 516 displayer.show(ctx)
516 517 except error.RepoLookupError as e:
517 518 fm.write('repolookuperror', "%s\n\n", pycompat.bytestr(e))
518 519 displayer.close()
519 520
520 521 fm.end()
521 522
522 523 if entry is None:
523 524 ui.status(_("no recorded locations\n"))
@@ -1,764 +1,766
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to transplant changesets from another branch
9 9
10 10 This extension allows you to transplant changes to another parent revision,
11 11 possibly in another repository. The transplant is done using 'diff' patches.
12 12
13 13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 14 map from a changeset hash to its hash in the source repository.
15 15 '''
16 16 from __future__ import absolute_import
17 17
18 18 import os
19 19 import tempfile
20 20 from mercurial.i18n import _
21 21 from mercurial import (
22 22 bundlerepo,
23 23 cmdutil,
24 24 error,
25 25 exchange,
26 26 hg,
27 27 logcmdutil,
28 28 match,
29 29 merge,
30 30 node as nodemod,
31 31 patch,
32 32 pycompat,
33 33 registrar,
34 34 revlog,
35 35 revset,
36 36 scmutil,
37 37 smartset,
38 38 util,
39 39 vfs as vfsmod,
40 40 )
41 41 from mercurial.utils import (
42 procutil,
42 43 stringutil,
43 44 )
44 45
45 46 class TransplantError(error.Abort):
46 47 pass
47 48
48 49 cmdtable = {}
49 50 command = registrar.command(cmdtable)
50 51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
51 52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
52 53 # be specifying the version(s) of Mercurial they are tested with, or
53 54 # leave the attribute unspecified.
54 55 testedwith = 'ships-with-hg-core'
55 56
56 57 configtable = {}
57 58 configitem = registrar.configitem(configtable)
58 59
59 60 configitem('transplant', 'filter',
60 61 default=None,
61 62 )
62 63 configitem('transplant', 'log',
63 64 default=None,
64 65 )
65 66
66 67 class transplantentry(object):
67 68 def __init__(self, lnode, rnode):
68 69 self.lnode = lnode
69 70 self.rnode = rnode
70 71
71 72 class transplants(object):
72 73 def __init__(self, path=None, transplantfile=None, opener=None):
73 74 self.path = path
74 75 self.transplantfile = transplantfile
75 76 self.opener = opener
76 77
77 78 if not opener:
78 79 self.opener = vfsmod.vfs(self.path)
79 80 self.transplants = {}
80 81 self.dirty = False
81 82 self.read()
82 83
83 84 def read(self):
84 85 abspath = os.path.join(self.path, self.transplantfile)
85 86 if self.transplantfile and os.path.exists(abspath):
86 87 for line in self.opener.read(self.transplantfile).splitlines():
87 88 lnode, rnode = map(revlog.bin, line.split(':'))
88 89 list = self.transplants.setdefault(rnode, [])
89 90 list.append(transplantentry(lnode, rnode))
90 91
91 92 def write(self):
92 93 if self.dirty and self.transplantfile:
93 94 if not os.path.isdir(self.path):
94 95 os.mkdir(self.path)
95 96 fp = self.opener(self.transplantfile, 'w')
96 97 for list in self.transplants.itervalues():
97 98 for t in list:
98 99 l, r = map(nodemod.hex, (t.lnode, t.rnode))
99 100 fp.write(l + ':' + r + '\n')
100 101 fp.close()
101 102 self.dirty = False
102 103
103 104 def get(self, rnode):
104 105 return self.transplants.get(rnode) or []
105 106
106 107 def set(self, lnode, rnode):
107 108 list = self.transplants.setdefault(rnode, [])
108 109 list.append(transplantentry(lnode, rnode))
109 110 self.dirty = True
110 111
111 112 def remove(self, transplant):
112 113 list = self.transplants.get(transplant.rnode)
113 114 if list:
114 115 del list[list.index(transplant)]
115 116 self.dirty = True
116 117
117 118 class transplanter(object):
118 119 def __init__(self, ui, repo, opts):
119 120 self.ui = ui
120 121 self.path = repo.vfs.join('transplant')
121 122 self.opener = vfsmod.vfs(self.path)
122 123 self.transplants = transplants(self.path, 'transplants',
123 124 opener=self.opener)
124 125 def getcommiteditor():
125 126 editform = cmdutil.mergeeditform(repo[None], 'transplant')
126 127 return cmdutil.getcommiteditor(editform=editform,
127 128 **pycompat.strkwargs(opts))
128 129 self.getcommiteditor = getcommiteditor
129 130
130 131 def applied(self, repo, node, parent):
131 132 '''returns True if a node is already an ancestor of parent
132 133 or is parent or has already been transplanted'''
133 134 if hasnode(repo, parent):
134 135 parentrev = repo.changelog.rev(parent)
135 136 if hasnode(repo, node):
136 137 rev = repo.changelog.rev(node)
137 138 reachable = repo.changelog.ancestors([parentrev], rev,
138 139 inclusive=True)
139 140 if rev in reachable:
140 141 return True
141 142 for t in self.transplants.get(node):
142 143 # it might have been stripped
143 144 if not hasnode(repo, t.lnode):
144 145 self.transplants.remove(t)
145 146 return False
146 147 lnoderev = repo.changelog.rev(t.lnode)
147 148 if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
148 149 inclusive=True):
149 150 return True
150 151 return False
151 152
152 153 def apply(self, repo, source, revmap, merges, opts=None):
153 154 '''apply the revisions in revmap one by one in revision order'''
154 155 if opts is None:
155 156 opts = {}
156 157 revs = sorted(revmap)
157 158 p1, p2 = repo.dirstate.parents()
158 159 pulls = []
159 160 diffopts = patch.difffeatureopts(self.ui, opts)
160 161 diffopts.git = True
161 162
162 163 lock = tr = None
163 164 try:
164 165 lock = repo.lock()
165 166 tr = repo.transaction('transplant')
166 167 for rev in revs:
167 168 node = revmap[rev]
168 169 revstr = '%d:%s' % (rev, nodemod.short(node))
169 170
170 171 if self.applied(repo, node, p1):
171 172 self.ui.warn(_('skipping already applied revision %s\n') %
172 173 revstr)
173 174 continue
174 175
175 176 parents = source.changelog.parents(node)
176 177 if not (opts.get('filter') or opts.get('log')):
177 178 # If the changeset parent is the same as the
178 179 # wdir's parent, just pull it.
179 180 if parents[0] == p1:
180 181 pulls.append(node)
181 182 p1 = node
182 183 continue
183 184 if pulls:
184 185 if source != repo:
185 186 exchange.pull(repo, source.peer(), heads=pulls)
186 187 merge.update(repo, pulls[-1], False, False)
187 188 p1, p2 = repo.dirstate.parents()
188 189 pulls = []
189 190
190 191 domerge = False
191 192 if node in merges:
192 193 # pulling all the merge revs at once would mean we
193 194 # couldn't transplant after the latest even if
194 195 # transplants before them fail.
195 196 domerge = True
196 197 if not hasnode(repo, node):
197 198 exchange.pull(repo, source.peer(), heads=[node])
198 199
199 200 skipmerge = False
200 201 if parents[1] != revlog.nullid:
201 202 if not opts.get('parent'):
202 203 self.ui.note(_('skipping merge changeset %d:%s\n')
203 204 % (rev, nodemod.short(node)))
204 205 skipmerge = True
205 206 else:
206 207 parent = source.lookup(opts['parent'])
207 208 if parent not in parents:
208 209 raise error.Abort(_('%s is not a parent of %s') %
209 210 (nodemod.short(parent),
210 211 nodemod.short(node)))
211 212 else:
212 213 parent = parents[0]
213 214
214 215 if skipmerge:
215 216 patchfile = None
216 217 else:
217 218 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
218 219 fp = os.fdopen(fd, r'wb')
219 220 gen = patch.diff(source, parent, node, opts=diffopts)
220 221 for chunk in gen:
221 222 fp.write(chunk)
222 223 fp.close()
223 224
224 225 del revmap[rev]
225 226 if patchfile or domerge:
226 227 try:
227 228 try:
228 229 n = self.applyone(repo, node,
229 230 source.changelog.read(node),
230 231 patchfile, merge=domerge,
231 232 log=opts.get('log'),
232 233 filter=opts.get('filter'))
233 234 except TransplantError:
234 235 # Do not rollback, it is up to the user to
235 236 # fix the merge or cancel everything
236 237 tr.close()
237 238 raise
238 239 if n and domerge:
239 240 self.ui.status(_('%s merged at %s\n') % (revstr,
240 241 nodemod.short(n)))
241 242 elif n:
242 243 self.ui.status(_('%s transplanted to %s\n')
243 244 % (nodemod.short(node),
244 245 nodemod.short(n)))
245 246 finally:
246 247 if patchfile:
247 248 os.unlink(patchfile)
248 249 tr.close()
249 250 if pulls:
250 251 exchange.pull(repo, source.peer(), heads=pulls)
251 252 merge.update(repo, pulls[-1], False, False)
252 253 finally:
253 254 self.saveseries(revmap, merges)
254 255 self.transplants.write()
255 256 if tr:
256 257 tr.release()
257 258 if lock:
258 259 lock.release()
259 260
260 261 def filter(self, filter, node, changelog, patchfile):
261 262 '''arbitrarily rewrite changeset before applying it'''
262 263
263 264 self.ui.status(_('filtering %s\n') % patchfile)
264 265 user, date, msg = (changelog[1], changelog[2], changelog[4])
265 266 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
266 267 fp = os.fdopen(fd, r'wb')
267 268 fp.write("# HG changeset patch\n")
268 269 fp.write("# User %s\n" % user)
269 270 fp.write("# Date %d %d\n" % date)
270 271 fp.write(msg + '\n')
271 272 fp.close()
272 273
273 274 try:
274 self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
275 util.shellquote(patchfile)),
275 self.ui.system('%s %s %s' % (filter,
276 procutil.shellquote(headerfile),
277 procutil.shellquote(patchfile)),
276 278 environ={'HGUSER': changelog[1],
277 279 'HGREVISION': nodemod.hex(node),
278 280 },
279 281 onerr=error.Abort, errprefix=_('filter failed'),
280 282 blockedtag='transplant_filter')
281 283 user, date, msg = self.parselog(open(headerfile, 'rb'))[1:4]
282 284 finally:
283 285 os.unlink(headerfile)
284 286
285 287 return (user, date, msg)
286 288
287 289 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
288 290 filter=None):
289 291 '''apply the patch in patchfile to the repository as a transplant'''
290 292 (manifest, user, (time, timezone), files, message) = cl[:5]
291 293 date = "%d %d" % (time, timezone)
292 294 extra = {'transplant_source': node}
293 295 if filter:
294 296 (user, date, message) = self.filter(filter, node, cl, patchfile)
295 297
296 298 if log:
297 299 # we don't translate messages inserted into commits
298 300 message += '\n(transplanted from %s)' % nodemod.hex(node)
299 301
300 302 self.ui.status(_('applying %s\n') % nodemod.short(node))
301 303 self.ui.note('%s %s\n%s\n' % (user, date, message))
302 304
303 305 if not patchfile and not merge:
304 306 raise error.Abort(_('can only omit patchfile if merging'))
305 307 if patchfile:
306 308 try:
307 309 files = set()
308 310 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
309 311 files = list(files)
310 312 except Exception as inst:
311 313 seriespath = os.path.join(self.path, 'series')
312 314 if os.path.exists(seriespath):
313 315 os.unlink(seriespath)
314 316 p1 = repo.dirstate.p1()
315 317 p2 = node
316 318 self.log(user, date, message, p1, p2, merge=merge)
317 319 self.ui.write(stringutil.forcebytestr(inst) + '\n')
318 320 raise TransplantError(_('fix up the working directory and run '
319 321 'hg transplant --continue'))
320 322 else:
321 323 files = None
322 324 if merge:
323 325 p1, p2 = repo.dirstate.parents()
324 326 repo.setparents(p1, node)
325 327 m = match.always(repo.root, '')
326 328 else:
327 329 m = match.exact(repo.root, '', files)
328 330
329 331 n = repo.commit(message, user, date, extra=extra, match=m,
330 332 editor=self.getcommiteditor())
331 333 if not n:
332 334 self.ui.warn(_('skipping emptied changeset %s\n') %
333 335 nodemod.short(node))
334 336 return None
335 337 if not merge:
336 338 self.transplants.set(n, node)
337 339
338 340 return n
339 341
340 342 def canresume(self):
341 343 return os.path.exists(os.path.join(self.path, 'journal'))
342 344
343 345 def resume(self, repo, source, opts):
344 346 '''recover last transaction and apply remaining changesets'''
345 347 if os.path.exists(os.path.join(self.path, 'journal')):
346 348 n, node = self.recover(repo, source, opts)
347 349 if n:
348 350 self.ui.status(_('%s transplanted as %s\n') %
349 351 (nodemod.short(node),
350 352 nodemod.short(n)))
351 353 else:
352 354 self.ui.status(_('%s skipped due to empty diff\n')
353 355 % (nodemod.short(node),))
354 356 seriespath = os.path.join(self.path, 'series')
355 357 if not os.path.exists(seriespath):
356 358 self.transplants.write()
357 359 return
358 360 nodes, merges = self.readseries()
359 361 revmap = {}
360 362 for n in nodes:
361 363 revmap[source.changelog.rev(n)] = n
362 364 os.unlink(seriespath)
363 365
364 366 self.apply(repo, source, revmap, merges, opts)
365 367
366 368 def recover(self, repo, source, opts):
367 369 '''commit working directory using journal metadata'''
368 370 node, user, date, message, parents = self.readlog()
369 371 merge = False
370 372
371 373 if not user or not date or not message or not parents[0]:
372 374 raise error.Abort(_('transplant log file is corrupt'))
373 375
374 376 parent = parents[0]
375 377 if len(parents) > 1:
376 378 if opts.get('parent'):
377 379 parent = source.lookup(opts['parent'])
378 380 if parent not in parents:
379 381 raise error.Abort(_('%s is not a parent of %s') %
380 382 (nodemod.short(parent),
381 383 nodemod.short(node)))
382 384 else:
383 385 merge = True
384 386
385 387 extra = {'transplant_source': node}
386 388 try:
387 389 p1, p2 = repo.dirstate.parents()
388 390 if p1 != parent:
389 391 raise error.Abort(_('working directory not at transplant '
390 392 'parent %s') % nodemod.hex(parent))
391 393 if merge:
392 394 repo.setparents(p1, parents[1])
393 395 modified, added, removed, deleted = repo.status()[:4]
394 396 if merge or modified or added or removed or deleted:
395 397 n = repo.commit(message, user, date, extra=extra,
396 398 editor=self.getcommiteditor())
397 399 if not n:
398 400 raise error.Abort(_('commit failed'))
399 401 if not merge:
400 402 self.transplants.set(n, node)
401 403 else:
402 404 n = None
403 405 self.unlog()
404 406
405 407 return n, node
406 408 finally:
407 409 # TODO: get rid of this meaningless try/finally enclosing.
408 410 # this is kept only to reduce changes in a patch.
409 411 pass
410 412
411 413 def readseries(self):
412 414 nodes = []
413 415 merges = []
414 416 cur = nodes
415 417 for line in self.opener.read('series').splitlines():
416 418 if line.startswith('# Merges'):
417 419 cur = merges
418 420 continue
419 421 cur.append(revlog.bin(line))
420 422
421 423 return (nodes, merges)
422 424
423 425 def saveseries(self, revmap, merges):
424 426 if not revmap:
425 427 return
426 428
427 429 if not os.path.isdir(self.path):
428 430 os.mkdir(self.path)
429 431 series = self.opener('series', 'w')
430 432 for rev in sorted(revmap):
431 433 series.write(nodemod.hex(revmap[rev]) + '\n')
432 434 if merges:
433 435 series.write('# Merges\n')
434 436 for m in merges:
435 437 series.write(nodemod.hex(m) + '\n')
436 438 series.close()
437 439
438 440 def parselog(self, fp):
439 441 parents = []
440 442 message = []
441 443 node = revlog.nullid
442 444 inmsg = False
443 445 user = None
444 446 date = None
445 447 for line in fp.read().splitlines():
446 448 if inmsg:
447 449 message.append(line)
448 450 elif line.startswith('# User '):
449 451 user = line[7:]
450 452 elif line.startswith('# Date '):
451 453 date = line[7:]
452 454 elif line.startswith('# Node ID '):
453 455 node = revlog.bin(line[10:])
454 456 elif line.startswith('# Parent '):
455 457 parents.append(revlog.bin(line[9:]))
456 458 elif not line.startswith('# '):
457 459 inmsg = True
458 460 message.append(line)
459 461 if None in (user, date):
460 462 raise error.Abort(_("filter corrupted changeset (no user or date)"))
461 463 return (node, user, date, '\n'.join(message), parents)
462 464
463 465 def log(self, user, date, message, p1, p2, merge=False):
464 466 '''journal changelog metadata for later recover'''
465 467
466 468 if not os.path.isdir(self.path):
467 469 os.mkdir(self.path)
468 470 fp = self.opener('journal', 'w')
469 471 fp.write('# User %s\n' % user)
470 472 fp.write('# Date %s\n' % date)
471 473 fp.write('# Node ID %s\n' % nodemod.hex(p2))
472 474 fp.write('# Parent ' + nodemod.hex(p1) + '\n')
473 475 if merge:
474 476 fp.write('# Parent ' + nodemod.hex(p2) + '\n')
475 477 fp.write(message.rstrip() + '\n')
476 478 fp.close()
477 479
478 480 def readlog(self):
479 481 return self.parselog(self.opener('journal'))
480 482
481 483 def unlog(self):
482 484 '''remove changelog journal'''
483 485 absdst = os.path.join(self.path, 'journal')
484 486 if os.path.exists(absdst):
485 487 os.unlink(absdst)
486 488
487 489 def transplantfilter(self, repo, source, root):
488 490 def matchfn(node):
489 491 if self.applied(repo, node, root):
490 492 return False
491 493 if source.changelog.parents(node)[1] != revlog.nullid:
492 494 return False
493 495 extra = source.changelog.read(node)[5]
494 496 cnode = extra.get('transplant_source')
495 497 if cnode and self.applied(repo, cnode, root):
496 498 return False
497 499 return True
498 500
499 501 return matchfn
500 502
501 503 def hasnode(repo, node):
502 504 try:
503 505 return repo.changelog.rev(node) is not None
504 506 except error.RevlogError:
505 507 return False
506 508
507 509 def browserevs(ui, repo, nodes, opts):
508 510 '''interactively transplant changesets'''
509 511 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
510 512 transplants = []
511 513 merges = []
512 514 prompt = _('apply changeset? [ynmpcq?]:'
513 515 '$$ &yes, transplant this changeset'
514 516 '$$ &no, skip this changeset'
515 517 '$$ &merge at this changeset'
516 518 '$$ show &patch'
517 519 '$$ &commit selected changesets'
518 520 '$$ &quit and cancel transplant'
519 521 '$$ &? (show this help)')
520 522 for node in nodes:
521 523 displayer.show(repo[node])
522 524 action = None
523 525 while not action:
524 526 action = 'ynmpcq?'[ui.promptchoice(prompt)]
525 527 if action == '?':
526 528 for c, t in ui.extractchoices(prompt)[1]:
527 529 ui.write('%s: %s\n' % (c, t))
528 530 action = None
529 531 elif action == 'p':
530 532 parent = repo.changelog.parents(node)[0]
531 533 for chunk in patch.diff(repo, parent, node):
532 534 ui.write(chunk)
533 535 action = None
534 536 if action == 'y':
535 537 transplants.append(node)
536 538 elif action == 'm':
537 539 merges.append(node)
538 540 elif action == 'c':
539 541 break
540 542 elif action == 'q':
541 543 transplants = ()
542 544 merges = ()
543 545 break
544 546 displayer.close()
545 547 return (transplants, merges)
546 548
547 549 @command('transplant',
548 550 [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
549 551 ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
550 552 ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
551 553 ('p', 'prune', [], _('skip over REV'), _('REV')),
552 554 ('m', 'merge', [], _('merge at REV'), _('REV')),
553 555 ('', 'parent', '',
554 556 _('parent to choose when transplanting merge'), _('REV')),
555 557 ('e', 'edit', False, _('invoke editor on commit messages')),
556 558 ('', 'log', None, _('append transplant info to log message')),
557 559 ('c', 'continue', None, _('continue last transplant session '
558 560 'after fixing conflicts')),
559 561 ('', 'filter', '',
560 562 _('filter changesets through command'), _('CMD'))],
561 563 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
562 564 '[-m REV] [REV]...'))
563 565 def transplant(ui, repo, *revs, **opts):
564 566 '''transplant changesets from another branch
565 567
566 568 Selected changesets will be applied on top of the current working
567 569 directory with the log of the original changeset. The changesets
568 570 are copied and will thus appear twice in the history with different
569 571 identities.
570 572
571 573 Consider using the graft command if everything is inside the same
572 574 repository - it will use merges and will usually give a better result.
573 575 Use the rebase extension if the changesets are unpublished and you want
574 576 to move them instead of copying them.
575 577
576 578 If --log is specified, log messages will have a comment appended
577 579 of the form::
578 580
579 581 (transplanted from CHANGESETHASH)
580 582
581 583 You can rewrite the changelog message with the --filter option.
582 584 Its argument will be invoked with the current changelog message as
583 585 $1 and the patch as $2.
584 586
585 587 --source/-s specifies another repository to use for selecting changesets,
586 588 just as if it temporarily had been pulled.
587 589 If --branch/-b is specified, these revisions will be used as
588 590 heads when deciding which changesets to transplant, just as if only
589 591 these revisions had been pulled.
590 592 If --all/-a is specified, all the revisions up to the heads specified
591 593 with --branch will be transplanted.
592 594
593 595 Example:
594 596
595 597 - transplant all changes up to REV on top of your current revision::
596 598
597 599 hg transplant --branch REV --all
598 600
599 601 You can optionally mark selected transplanted changesets as merge
600 602 changesets. You will not be prompted to transplant any ancestors
601 603 of a merged transplant, and you can merge descendants of them
602 604 normally instead of transplanting them.
603 605
604 606 Merge changesets may be transplanted directly by specifying the
605 607 proper parent changeset by calling :hg:`transplant --parent`.
606 608
607 609 If no merges or revisions are provided, :hg:`transplant` will
608 610 start an interactive changeset browser.
609 611
610 612 If a changeset application fails, you can fix the merge by hand
611 613 and then resume where you left off by calling :hg:`transplant
612 614 --continue/-c`.
613 615 '''
614 616 with repo.wlock():
615 617 return _dotransplant(ui, repo, *revs, **opts)
616 618
617 619 def _dotransplant(ui, repo, *revs, **opts):
618 620 def incwalk(repo, csets, match=util.always):
619 621 for node in csets:
620 622 if match(node):
621 623 yield node
622 624
623 625 def transplantwalk(repo, dest, heads, match=util.always):
624 626 '''Yield all nodes that are ancestors of a head but not ancestors
625 627 of dest.
626 628 If no heads are specified, the heads of repo will be used.'''
627 629 if not heads:
628 630 heads = repo.heads()
629 631 ancestors = []
630 632 ctx = repo[dest]
631 633 for head in heads:
632 634 ancestors.append(ctx.ancestor(repo[head]).node())
633 635 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
634 636 if match(node):
635 637 yield node
636 638
637 639 def checkopts(opts, revs):
638 640 if opts.get('continue'):
639 641 if opts.get('branch') or opts.get('all') or opts.get('merge'):
640 642 raise error.Abort(_('--continue is incompatible with '
641 643 '--branch, --all and --merge'))
642 644 return
643 645 if not (opts.get('source') or revs or
644 646 opts.get('merge') or opts.get('branch')):
645 647 raise error.Abort(_('no source URL, branch revision, or revision '
646 648 'list provided'))
647 649 if opts.get('all'):
648 650 if not opts.get('branch'):
649 651 raise error.Abort(_('--all requires a branch revision'))
650 652 if revs:
651 653 raise error.Abort(_('--all is incompatible with a '
652 654 'revision list'))
653 655
654 656 opts = pycompat.byteskwargs(opts)
655 657 checkopts(opts, revs)
656 658
657 659 if not opts.get('log'):
658 660 # deprecated config: transplant.log
659 661 opts['log'] = ui.config('transplant', 'log')
660 662 if not opts.get('filter'):
661 663 # deprecated config: transplant.filter
662 664 opts['filter'] = ui.config('transplant', 'filter')
663 665
664 666 tp = transplanter(ui, repo, opts)
665 667
666 668 p1, p2 = repo.dirstate.parents()
667 669 if len(repo) > 0 and p1 == revlog.nullid:
668 670 raise error.Abort(_('no revision checked out'))
669 671 if opts.get('continue'):
670 672 if not tp.canresume():
671 673 raise error.Abort(_('no transplant to continue'))
672 674 else:
673 675 cmdutil.checkunfinished(repo)
674 676 if p2 != revlog.nullid:
675 677 raise error.Abort(_('outstanding uncommitted merges'))
676 678 m, a, r, d = repo.status()[:4]
677 679 if m or a or r or d:
678 680 raise error.Abort(_('outstanding local changes'))
679 681
680 682 sourcerepo = opts.get('source')
681 683 if sourcerepo:
682 684 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
683 685 heads = map(peer.lookup, opts.get('branch', ()))
684 686 target = set(heads)
685 687 for r in revs:
686 688 try:
687 689 target.add(peer.lookup(r))
688 690 except error.RepoError:
689 691 pass
690 692 source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
691 693 onlyheads=sorted(target), force=True)
692 694 else:
693 695 source = repo
694 696 heads = map(source.lookup, opts.get('branch', ()))
695 697 cleanupfn = None
696 698
697 699 try:
698 700 if opts.get('continue'):
699 701 tp.resume(repo, source, opts)
700 702 return
701 703
702 704 tf = tp.transplantfilter(repo, source, p1)
703 705 if opts.get('prune'):
704 706 prune = set(source.lookup(r)
705 707 for r in scmutil.revrange(source, opts.get('prune')))
706 708 matchfn = lambda x: tf(x) and x not in prune
707 709 else:
708 710 matchfn = tf
709 711 merges = map(source.lookup, opts.get('merge', ()))
710 712 revmap = {}
711 713 if revs:
712 714 for r in scmutil.revrange(source, revs):
713 715 revmap[int(r)] = source.lookup(r)
714 716 elif opts.get('all') or not merges:
715 717 if source != repo:
716 718 alltransplants = incwalk(source, csets, match=matchfn)
717 719 else:
718 720 alltransplants = transplantwalk(source, p1, heads,
719 721 match=matchfn)
720 722 if opts.get('all'):
721 723 revs = alltransplants
722 724 else:
723 725 revs, newmerges = browserevs(ui, source, alltransplants, opts)
724 726 merges.extend(newmerges)
725 727 for r in revs:
726 728 revmap[source.changelog.rev(r)] = r
727 729 for r in merges:
728 730 revmap[source.changelog.rev(r)] = r
729 731
730 732 tp.apply(repo, source, revmap, merges, opts)
731 733 finally:
732 734 if cleanupfn:
733 735 cleanupfn()
734 736
735 737 revsetpredicate = registrar.revsetpredicate()
736 738
737 739 @revsetpredicate('transplanted([set])')
738 740 def revsettransplanted(repo, subset, x):
739 741 """Transplanted changesets in set, or all transplanted changesets.
740 742 """
741 743 if x:
742 744 s = revset.getset(repo, subset, x)
743 745 else:
744 746 s = subset
745 747 return smartset.baseset([r for r in s if
746 748 repo[r].extra().get('transplant_source')])
747 749
748 750 templatekeyword = registrar.templatekeyword()
749 751
750 752 @templatekeyword('transplanted', requires={'ctx'})
751 753 def kwtransplanted(context, mapping):
752 754 """String. The node identifier of the transplanted
753 755 changeset if any."""
754 756 ctx = context.resource(mapping, 'ctx')
755 757 n = ctx.extra().get('transplant_source')
756 758 return n and nodemod.hex(n) or ''
757 759
758 760 def extsetup(ui):
759 761 cmdutil.unfinishedstates.append(
760 762 ['transplant/journal', True, False, _('transplant in progress'),
761 763 _("use 'hg transplant --continue' or 'hg update' to abort")])
762 764
763 765 # tell hggettext to extract docstrings from these functions:
764 766 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,598 +1,598
1 1 # chgserver.py - command server extension for cHg
2 2 #
3 3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """command server extension for cHg
9 9
10 10 'S' channel (read/write)
11 11 propagate ui.system() request to client
12 12
13 13 'attachio' command
14 14 attach client's stdio passed by sendmsg()
15 15
16 16 'chdir' command
17 17 change current directory
18 18
19 19 'setenv' command
20 20 replace os.environ completely
21 21
22 22 'setumask' command
23 23 set umask
24 24
25 25 'validate' command
26 26 reload the config and check if the server is up to date
27 27
28 28 Config
29 29 ------
30 30
31 31 ::
32 32
33 33 [chgserver]
34 34 # how long (in seconds) should an idle chg server exit
35 35 idletimeout = 3600
36 36
37 37 # whether to skip config or env change checks
38 38 skiphash = False
39 39 """
40 40
41 41 from __future__ import absolute_import
42 42
43 43 import hashlib
44 44 import inspect
45 45 import os
46 46 import re
47 47 import socket
48 48 import stat
49 49 import struct
50 50 import time
51 51
52 52 from .i18n import _
53 53
54 54 from . import (
55 55 commandserver,
56 56 encoding,
57 57 error,
58 58 extensions,
59 59 node,
60 60 pycompat,
61 61 util,
62 62 )
63 63
64 64 from .utils import (
65 65 procutil,
66 66 )
67 67
68 68 _log = commandserver.log
69 69
70 70 def _hashlist(items):
71 71 """return sha1 hexdigest for a list"""
72 72 return node.hex(hashlib.sha1(str(items)).digest())
73 73
74 74 # sensitive config sections affecting confighash
75 75 _configsections = [
76 76 'alias', # affects global state commands.table
77 77 'eol', # uses setconfig('eol', ...)
78 78 'extdiff', # uisetup will register new commands
79 79 'extensions',
80 80 ]
81 81
82 82 _configsectionitems = [
83 83 ('commands', 'show.aliasprefix'), # show.py reads it in extsetup
84 84 ]
85 85
86 86 # sensitive environment variables affecting confighash
87 87 _envre = re.compile(r'''\A(?:
88 88 CHGHG
89 89 |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
90 90 |HG(?:ENCODING|PLAIN).*
91 91 |LANG(?:UAGE)?
92 92 |LC_.*
93 93 |LD_.*
94 94 |PATH
95 95 |PYTHON.*
96 96 |TERM(?:INFO)?
97 97 |TZ
98 98 )\Z''', re.X)
99 99
100 100 def _confighash(ui):
101 101 """return a quick hash for detecting config/env changes
102 102
103 103 confighash is the hash of sensitive config items and environment variables.
104 104
105 105 for chgserver, it is designed that once confighash changes, the server is
106 106 not qualified to serve its client and should redirect the client to a new
107 107 server. different from mtimehash, confighash change will not mark the
108 108 server outdated and exit since the user can have different configs at the
109 109 same time.
110 110 """
111 111 sectionitems = []
112 112 for section in _configsections:
113 113 sectionitems.append(ui.configitems(section))
114 114 for section, item in _configsectionitems:
115 115 sectionitems.append(ui.config(section, item))
116 116 sectionhash = _hashlist(sectionitems)
117 117 # If $CHGHG is set, the change to $HG should not trigger a new chg server
118 118 if 'CHGHG' in encoding.environ:
119 119 ignored = {'HG'}
120 120 else:
121 121 ignored = set()
122 122 envitems = [(k, v) for k, v in encoding.environ.iteritems()
123 123 if _envre.match(k) and k not in ignored]
124 124 envhash = _hashlist(sorted(envitems))
125 125 return sectionhash[:6] + envhash[:6]
126 126
127 127 def _getmtimepaths(ui):
128 128 """get a list of paths that should be checked to detect change
129 129
130 130 The list will include:
131 131 - extensions (will not cover all files for complex extensions)
132 132 - mercurial/__version__.py
133 133 - python binary
134 134 """
135 135 modules = [m for n, m in extensions.extensions(ui)]
136 136 try:
137 137 from . import __version__
138 138 modules.append(__version__)
139 139 except ImportError:
140 140 pass
141 141 files = [pycompat.sysexecutable]
142 142 for m in modules:
143 143 try:
144 144 files.append(inspect.getabsfile(m))
145 145 except TypeError:
146 146 pass
147 147 return sorted(set(files))
148 148
149 149 def _mtimehash(paths):
150 150 """return a quick hash for detecting file changes
151 151
152 152 mtimehash calls stat on given paths and calculate a hash based on size and
153 153 mtime of each file. mtimehash does not read file content because reading is
154 154 expensive. therefore it's not 100% reliable for detecting content changes.
155 155 it's possible to return different hashes for same file contents.
156 156 it's also possible to return a same hash for different file contents for
157 157 some carefully crafted situation.
158 158
159 159 for chgserver, it is designed that once mtimehash changes, the server is
160 160 considered outdated immediately and should no longer provide service.
161 161
162 162 mtimehash is not included in confighash because we only know the paths of
163 163 extensions after importing them (there is imp.find_module but that faces
164 164 race conditions). We need to calculate confighash without importing.
165 165 """
166 166 def trystat(path):
167 167 try:
168 168 st = os.stat(path)
169 169 return (st[stat.ST_MTIME], st.st_size)
170 170 except OSError:
171 171 # could be ENOENT, EPERM etc. not fatal in any case
172 172 pass
173 173 return _hashlist(map(trystat, paths))[:12]
174 174
175 175 class hashstate(object):
176 176 """a structure storing confighash, mtimehash, paths used for mtimehash"""
177 177 def __init__(self, confighash, mtimehash, mtimepaths):
178 178 self.confighash = confighash
179 179 self.mtimehash = mtimehash
180 180 self.mtimepaths = mtimepaths
181 181
182 182 @staticmethod
183 183 def fromui(ui, mtimepaths=None):
184 184 if mtimepaths is None:
185 185 mtimepaths = _getmtimepaths(ui)
186 186 confighash = _confighash(ui)
187 187 mtimehash = _mtimehash(mtimepaths)
188 188 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
189 189 return hashstate(confighash, mtimehash, mtimepaths)
190 190
191 191 def _newchgui(srcui, csystem, attachio):
192 192 class chgui(srcui.__class__):
193 193 def __init__(self, src=None):
194 194 super(chgui, self).__init__(src)
195 195 if src:
196 196 self._csystem = getattr(src, '_csystem', csystem)
197 197 else:
198 198 self._csystem = csystem
199 199
200 200 def _runsystem(self, cmd, environ, cwd, out):
201 201 # fallback to the original system method if the output needs to be
202 202 # captured (to self._buffers), or the output stream is not stdout
203 203 # (e.g. stderr, cStringIO), because the chg client is not aware of
204 204 # these situations and will behave differently (write to stdout).
205 205 if (out is not self.fout
206 206 or not util.safehasattr(self.fout, 'fileno')
207 207 or self.fout.fileno() != procutil.stdout.fileno()):
208 return util.system(cmd, environ=environ, cwd=cwd, out=out)
208 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
209 209 self.flush()
210 return self._csystem(cmd, util.shellenviron(environ), cwd)
210 return self._csystem(cmd, procutil.shellenviron(environ), cwd)
211 211
212 212 def _runpager(self, cmd, env=None):
213 self._csystem(cmd, util.shellenviron(env), type='pager',
213 self._csystem(cmd, procutil.shellenviron(env), type='pager',
214 214 cmdtable={'attachio': attachio})
215 215 return True
216 216
217 217 return chgui(srcui)
218 218
219 219 def _loadnewui(srcui, args):
220 220 from . import dispatch # avoid cycle
221 221
222 222 newui = srcui.__class__.load()
223 223 for a in ['fin', 'fout', 'ferr', 'environ']:
224 224 setattr(newui, a, getattr(srcui, a))
225 225 if util.safehasattr(srcui, '_csystem'):
226 226 newui._csystem = srcui._csystem
227 227
228 228 # command line args
229 229 options = dispatch._earlyparseopts(newui, args)
230 230 dispatch._parseconfig(newui, options['config'])
231 231
232 232 # stolen from tortoisehg.util.copydynamicconfig()
233 233 for section, name, value in srcui.walkconfig():
234 234 source = srcui.configsource(section, name)
235 235 if ':' in source or source == '--config' or source.startswith('$'):
236 236 # path:line or command line, or environ
237 237 continue
238 238 newui.setconfig(section, name, value, source)
239 239
240 240 # load wd and repo config, copied from dispatch.py
241 241 cwd = options['cwd']
242 242 cwd = cwd and os.path.realpath(cwd) or None
243 243 rpath = options['repository']
244 244 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
245 245
246 246 return (newui, newlui)
247 247
248 248 class channeledsystem(object):
249 249 """Propagate ui.system() request in the following format:
250 250
251 251 payload length (unsigned int),
252 252 type, '\0',
253 253 cmd, '\0',
254 254 cwd, '\0',
255 255 envkey, '=', val, '\0',
256 256 ...
257 257 envkey, '=', val
258 258
259 259 if type == 'system', waits for:
260 260
261 261 exitcode length (unsigned int),
262 262 exitcode (int)
263 263
264 264 if type == 'pager', repetitively waits for a command name ending with '\n'
265 265 and executes it defined by cmdtable, or exits the loop if the command name
266 266 is empty.
267 267 """
268 268 def __init__(self, in_, out, channel):
269 269 self.in_ = in_
270 270 self.out = out
271 271 self.channel = channel
272 272
273 273 def __call__(self, cmd, environ, cwd=None, type='system', cmdtable=None):
274 args = [type, util.quotecommand(cmd), os.path.abspath(cwd or '.')]
274 args = [type, procutil.quotecommand(cmd), os.path.abspath(cwd or '.')]
275 275 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
276 276 data = '\0'.join(args)
277 277 self.out.write(struct.pack('>cI', self.channel, len(data)))
278 278 self.out.write(data)
279 279 self.out.flush()
280 280
281 281 if type == 'system':
282 282 length = self.in_.read(4)
283 283 length, = struct.unpack('>I', length)
284 284 if length != 4:
285 285 raise error.Abort(_('invalid response'))
286 286 rc, = struct.unpack('>i', self.in_.read(4))
287 287 return rc
288 288 elif type == 'pager':
289 289 while True:
290 290 cmd = self.in_.readline()[:-1]
291 291 if not cmd:
292 292 break
293 293 if cmdtable and cmd in cmdtable:
294 294 _log('pager subcommand: %s' % cmd)
295 295 cmdtable[cmd]()
296 296 else:
297 297 raise error.Abort(_('unexpected command: %s') % cmd)
298 298 else:
299 299 raise error.ProgrammingError('invalid S channel type: %s' % type)
300 300
301 301 _iochannels = [
302 302 # server.ch, ui.fp, mode
303 303 ('cin', 'fin', r'rb'),
304 304 ('cout', 'fout', r'wb'),
305 305 ('cerr', 'ferr', r'wb'),
306 306 ]
307 307
308 308 class chgcmdserver(commandserver.server):
309 309 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
310 310 super(chgcmdserver, self).__init__(
311 311 _newchgui(ui, channeledsystem(fin, fout, 'S'), self.attachio),
312 312 repo, fin, fout)
313 313 self.clientsock = sock
314 314 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
315 315 self.hashstate = hashstate
316 316 self.baseaddress = baseaddress
317 317 if hashstate is not None:
318 318 self.capabilities = self.capabilities.copy()
319 319 self.capabilities['validate'] = chgcmdserver.validate
320 320
321 321 def cleanup(self):
322 322 super(chgcmdserver, self).cleanup()
323 323 # dispatch._runcatch() does not flush outputs if exception is not
324 324 # handled by dispatch._dispatch()
325 325 self.ui.flush()
326 326 self._restoreio()
327 327
328 328 def attachio(self):
329 329 """Attach to client's stdio passed via unix domain socket; all
330 330 channels except cresult will no longer be used
331 331 """
332 332 # tell client to sendmsg() with 1-byte payload, which makes it
333 333 # distinctive from "attachio\n" command consumed by client.read()
334 334 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
335 335 clientfds = util.recvfds(self.clientsock.fileno())
336 336 _log('received fds: %r\n' % clientfds)
337 337
338 338 ui = self.ui
339 339 ui.flush()
340 340 first = self._saveio()
341 341 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
342 342 assert fd > 0
343 343 fp = getattr(ui, fn)
344 344 os.dup2(fd, fp.fileno())
345 345 os.close(fd)
346 346 if not first:
347 347 continue
348 348 # reset buffering mode when client is first attached. as we want
349 349 # to see output immediately on pager, the mode stays unchanged
350 350 # when client re-attached. ferr is unchanged because it should
351 351 # be unbuffered no matter if it is a tty or not.
352 352 if fn == 'ferr':
353 353 newfp = fp
354 354 else:
355 355 # make it line buffered explicitly because the default is
356 356 # decided on first write(), where fout could be a pager.
357 357 if fp.isatty():
358 358 bufsize = 1 # line buffered
359 359 else:
360 360 bufsize = -1 # system default
361 361 newfp = os.fdopen(fp.fileno(), mode, bufsize)
362 362 setattr(ui, fn, newfp)
363 363 setattr(self, cn, newfp)
364 364
365 365 self.cresult.write(struct.pack('>i', len(clientfds)))
366 366
367 367 def _saveio(self):
368 368 if self._oldios:
369 369 return False
370 370 ui = self.ui
371 371 for cn, fn, _mode in _iochannels:
372 372 ch = getattr(self, cn)
373 373 fp = getattr(ui, fn)
374 374 fd = os.dup(fp.fileno())
375 375 self._oldios.append((ch, fp, fd))
376 376 return True
377 377
378 378 def _restoreio(self):
379 379 ui = self.ui
380 380 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
381 381 newfp = getattr(ui, fn)
382 382 # close newfp while it's associated with client; otherwise it
383 383 # would be closed when newfp is deleted
384 384 if newfp is not fp:
385 385 newfp.close()
386 386 # restore original fd: fp is open again
387 387 os.dup2(fd, fp.fileno())
388 388 os.close(fd)
389 389 setattr(self, cn, ch)
390 390 setattr(ui, fn, fp)
391 391 del self._oldios[:]
392 392
393 393 def validate(self):
394 394 """Reload the config and check if the server is up to date
395 395
396 396 Read a list of '\0' separated arguments.
397 397 Write a non-empty list of '\0' separated instruction strings or '\0'
398 398 if the list is empty.
399 399 An instruction string could be either:
400 400 - "unlink $path", the client should unlink the path to stop the
401 401 outdated server.
402 402 - "redirect $path", the client should attempt to connect to $path
403 403 first. If it does not work, start a new server. It implies
404 404 "reconnect".
405 405 - "exit $n", the client should exit directly with code n.
406 406 This may happen if we cannot parse the config.
407 407 - "reconnect", the client should close the connection and
408 408 reconnect.
409 409 If neither "reconnect" nor "redirect" is included in the instruction
410 410 list, the client can continue with this server after completing all
411 411 the instructions.
412 412 """
413 413 from . import dispatch # avoid cycle
414 414
415 415 args = self._readlist()
416 416 try:
417 417 self.ui, lui = _loadnewui(self.ui, args)
418 418 except error.ParseError as inst:
419 419 dispatch._formatparse(self.ui.warn, inst)
420 420 self.ui.flush()
421 421 self.cresult.write('exit 255')
422 422 return
423 423 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
424 424 insts = []
425 425 if newhash.mtimehash != self.hashstate.mtimehash:
426 426 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
427 427 insts.append('unlink %s' % addr)
428 428 # mtimehash is empty if one or more extensions fail to load.
429 429 # to be compatible with hg, still serve the client this time.
430 430 if self.hashstate.mtimehash:
431 431 insts.append('reconnect')
432 432 if newhash.confighash != self.hashstate.confighash:
433 433 addr = _hashaddress(self.baseaddress, newhash.confighash)
434 434 insts.append('redirect %s' % addr)
435 435 _log('validate: %s\n' % insts)
436 436 self.cresult.write('\0'.join(insts) or '\0')
437 437
438 438 def chdir(self):
439 439 """Change current directory
440 440
441 441 Note that the behavior of --cwd option is bit different from this.
442 442 It does not affect --config parameter.
443 443 """
444 444 path = self._readstr()
445 445 if not path:
446 446 return
447 447 _log('chdir to %r\n' % path)
448 448 os.chdir(path)
449 449
450 450 def setumask(self):
451 451 """Change umask"""
452 452 mask = struct.unpack('>I', self._read(4))[0]
453 453 _log('setumask %r\n' % mask)
454 454 os.umask(mask)
455 455
456 456 def runcommand(self):
457 457 return super(chgcmdserver, self).runcommand()
458 458
459 459 def setenv(self):
460 460 """Clear and update os.environ
461 461
462 462 Note that not all variables can make an effect on the running process.
463 463 """
464 464 l = self._readlist()
465 465 try:
466 466 newenv = dict(s.split('=', 1) for s in l)
467 467 except ValueError:
468 468 raise ValueError('unexpected value in setenv request')
469 469 _log('setenv: %r\n' % sorted(newenv.keys()))
470 470 encoding.environ.clear()
471 471 encoding.environ.update(newenv)
472 472
473 473 capabilities = commandserver.server.capabilities.copy()
474 474 capabilities.update({'attachio': attachio,
475 475 'chdir': chdir,
476 476 'runcommand': runcommand,
477 477 'setenv': setenv,
478 478 'setumask': setumask})
479 479
480 if util.safehasattr(util, 'setprocname'):
480 if util.safehasattr(procutil, 'setprocname'):
481 481 def setprocname(self):
482 482 """Change process title"""
483 483 name = self._readstr()
484 484 _log('setprocname: %r\n' % name)
485 util.setprocname(name)
485 procutil.setprocname(name)
486 486 capabilities['setprocname'] = setprocname
487 487
488 488 def _tempaddress(address):
489 489 return '%s.%d.tmp' % (address, os.getpid())
490 490
491 491 def _hashaddress(address, hashstr):
492 492 # if the basename of address contains '.', use only the left part. this
493 493 # makes it possible for the client to pass 'server.tmp$PID' and follow by
494 494 # an atomic rename to avoid locking when spawning new servers.
495 495 dirname, basename = os.path.split(address)
496 496 basename = basename.split('.', 1)[0]
497 497 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
498 498
499 499 class chgunixservicehandler(object):
500 500 """Set of operations for chg services"""
501 501
502 502 pollinterval = 1 # [sec]
503 503
504 504 def __init__(self, ui):
505 505 self.ui = ui
506 506 self._idletimeout = ui.configint('chgserver', 'idletimeout')
507 507 self._lastactive = time.time()
508 508
509 509 def bindsocket(self, sock, address):
510 510 self._inithashstate(address)
511 511 self._checkextensions()
512 512 self._bind(sock)
513 513 self._createsymlink()
514 514 # no "listening at" message should be printed to simulate hg behavior
515 515
516 516 def _inithashstate(self, address):
517 517 self._baseaddress = address
518 518 if self.ui.configbool('chgserver', 'skiphash'):
519 519 self._hashstate = None
520 520 self._realaddress = address
521 521 return
522 522 self._hashstate = hashstate.fromui(self.ui)
523 523 self._realaddress = _hashaddress(address, self._hashstate.confighash)
524 524
525 525 def _checkextensions(self):
526 526 if not self._hashstate:
527 527 return
528 528 if extensions.notloaded():
529 529 # one or more extensions failed to load. mtimehash becomes
530 530 # meaningless because we do not know the paths of those extensions.
531 531 # set mtimehash to an illegal hash value to invalidate the server.
532 532 self._hashstate.mtimehash = ''
533 533
534 534 def _bind(self, sock):
535 535 # use a unique temp address so we can stat the file and do ownership
536 536 # check later
537 537 tempaddress = _tempaddress(self._realaddress)
538 538 util.bindunixsocket(sock, tempaddress)
539 539 self._socketstat = os.stat(tempaddress)
540 540 sock.listen(socket.SOMAXCONN)
541 541 # rename will replace the old socket file if exists atomically. the
542 542 # old server will detect ownership change and exit.
543 543 util.rename(tempaddress, self._realaddress)
544 544
545 545 def _createsymlink(self):
546 546 if self._baseaddress == self._realaddress:
547 547 return
548 548 tempaddress = _tempaddress(self._baseaddress)
549 549 os.symlink(os.path.basename(self._realaddress), tempaddress)
550 550 util.rename(tempaddress, self._baseaddress)
551 551
552 552 def _issocketowner(self):
553 553 try:
554 554 st = os.stat(self._realaddress)
555 555 return (st.st_ino == self._socketstat.st_ino and
556 556 st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME])
557 557 except OSError:
558 558 return False
559 559
560 560 def unlinksocket(self, address):
561 561 if not self._issocketowner():
562 562 return
563 563 # it is possible to have a race condition here that we may
564 564 # remove another server's socket file. but that's okay
565 565 # since that server will detect and exit automatically and
566 566 # the client will start a new server on demand.
567 567 util.tryunlink(self._realaddress)
568 568
569 569 def shouldexit(self):
570 570 if not self._issocketowner():
571 571 self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
572 572 return True
573 573 if time.time() - self._lastactive > self._idletimeout:
574 574 self.ui.debug('being idle too long. exiting.\n')
575 575 return True
576 576 return False
577 577
578 578 def newconnection(self):
579 579 self._lastactive = time.time()
580 580
581 581 def createcmdserver(self, repo, conn, fin, fout):
582 582 return chgcmdserver(self.ui, repo, fin, fout, conn,
583 583 self._hashstate, self._baseaddress)
584 584
585 585 def chgunixservice(ui, repo, opts):
586 586 # CHGINTERNALMARK is set by chg client. It is an indication of things are
587 587 # started by chg so other code can do things accordingly, like disabling
588 588 # demandimport or detecting chg client started by chg client. When executed
589 589 # here, CHGINTERNALMARK is no longer useful and hence dropped to make
590 590 # environ cleaner.
591 591 if 'CHGINTERNALMARK' in encoding.environ:
592 592 del encoding.environ['CHGINTERNALMARK']
593 593
594 594 if repo:
595 595 # one chgserver can serve multiple repos. drop repo information
596 596 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
597 597 h = chgunixservicehandler(ui)
598 598 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now