##// END OF EJS Templates
revset: make match function initiate query from full set by default...
Yuya Nishihara -
r24114:fafd9a12 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,6323 +1,6323 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys, socket
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb, commandserver
17 17 import extensions
18 18 from hgweb import server as hgweb_server
19 19 import merge as mergemod
20 20 import minirst, revset, fileset
21 21 import dagparser, context, simplemerge, graphmod, copies
22 22 import random
23 23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 24 import phases, obsolete, exchange, bundle2
25 25 import ui as uimod
26 26
27 27 table = {}
28 28
29 29 command = cmdutil.command(table)
30 30
31 31 # Space delimited list of commands that don't require local repositories.
32 32 # This should be populated by passing norepo=True into the @command decorator.
33 33 norepo = ''
34 34 # Space delimited list of commands that optionally require local repositories.
35 35 # This should be populated by passing optionalrepo=True into the @command
36 36 # decorator.
37 37 optionalrepo = ''
38 38 # Space delimited list of commands that will examine arguments looking for
39 39 # a repository. This should be populated by passing inferrepo=True into the
40 40 # @command decorator.
41 41 inferrepo = ''
42 42
43 43 # common command options
44 44
45 45 globalopts = [
46 46 ('R', 'repository', '',
47 47 _('repository root directory or name of overlay bundle file'),
48 48 _('REPO')),
49 49 ('', 'cwd', '',
50 50 _('change working directory'), _('DIR')),
51 51 ('y', 'noninteractive', None,
52 52 _('do not prompt, automatically pick the first choice for all prompts')),
53 53 ('q', 'quiet', None, _('suppress output')),
54 54 ('v', 'verbose', None, _('enable additional output')),
55 55 ('', 'config', [],
56 56 _('set/override config option (use \'section.name=value\')'),
57 57 _('CONFIG')),
58 58 ('', 'debug', None, _('enable debugging output')),
59 59 ('', 'debugger', None, _('start debugger')),
60 60 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
61 61 _('ENCODE')),
62 62 ('', 'encodingmode', encoding.encodingmode,
63 63 _('set the charset encoding mode'), _('MODE')),
64 64 ('', 'traceback', None, _('always print a traceback on exception')),
65 65 ('', 'time', None, _('time how long the command takes')),
66 66 ('', 'profile', None, _('print command execution profile')),
67 67 ('', 'version', None, _('output version information and exit')),
68 68 ('h', 'help', None, _('display help and exit')),
69 69 ('', 'hidden', False, _('consider hidden changesets')),
70 70 ]
71 71
72 72 dryrunopts = [('n', 'dry-run', None,
73 73 _('do not perform actions, just print output'))]
74 74
75 75 remoteopts = [
76 76 ('e', 'ssh', '',
77 77 _('specify ssh command to use'), _('CMD')),
78 78 ('', 'remotecmd', '',
79 79 _('specify hg command to run on the remote side'), _('CMD')),
80 80 ('', 'insecure', None,
81 81 _('do not verify server certificate (ignoring web.cacerts config)')),
82 82 ]
83 83
84 84 walkopts = [
85 85 ('I', 'include', [],
86 86 _('include names matching the given patterns'), _('PATTERN')),
87 87 ('X', 'exclude', [],
88 88 _('exclude names matching the given patterns'), _('PATTERN')),
89 89 ]
90 90
91 91 commitopts = [
92 92 ('m', 'message', '',
93 93 _('use text as commit message'), _('TEXT')),
94 94 ('l', 'logfile', '',
95 95 _('read commit message from file'), _('FILE')),
96 96 ]
97 97
98 98 commitopts2 = [
99 99 ('d', 'date', '',
100 100 _('record the specified date as commit date'), _('DATE')),
101 101 ('u', 'user', '',
102 102 _('record the specified user as committer'), _('USER')),
103 103 ]
104 104
105 105 # hidden for now
106 106 formatteropts = [
107 107 ('T', 'template', '',
108 108 _('display with template (DEPRECATED)'), _('TEMPLATE')),
109 109 ]
110 110
111 111 templateopts = [
112 112 ('', 'style', '',
113 113 _('display using template map file (DEPRECATED)'), _('STYLE')),
114 114 ('T', 'template', '',
115 115 _('display with template'), _('TEMPLATE')),
116 116 ]
117 117
118 118 logopts = [
119 119 ('p', 'patch', None, _('show patch')),
120 120 ('g', 'git', None, _('use git extended diff format')),
121 121 ('l', 'limit', '',
122 122 _('limit number of changes displayed'), _('NUM')),
123 123 ('M', 'no-merges', None, _('do not show merges')),
124 124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 125 ('G', 'graph', None, _("show the revision DAG")),
126 126 ] + templateopts
127 127
128 128 diffopts = [
129 129 ('a', 'text', None, _('treat all files as text')),
130 130 ('g', 'git', None, _('use git extended diff format')),
131 131 ('', 'nodates', None, _('omit dates from diff headers'))
132 132 ]
133 133
134 134 diffwsopts = [
135 135 ('w', 'ignore-all-space', None,
136 136 _('ignore white space when comparing lines')),
137 137 ('b', 'ignore-space-change', None,
138 138 _('ignore changes in the amount of white space')),
139 139 ('B', 'ignore-blank-lines', None,
140 140 _('ignore changes whose lines are all blank')),
141 141 ]
142 142
143 143 diffopts2 = [
144 144 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
145 145 ('p', 'show-function', None, _('show which function each change is in')),
146 146 ('', 'reverse', None, _('produce a diff that undoes the changes')),
147 147 ] + diffwsopts + [
148 148 ('U', 'unified', '',
149 149 _('number of lines of context to show'), _('NUM')),
150 150 ('', 'stat', None, _('output diffstat-style summary of changes')),
151 151 ]
152 152
153 153 mergetoolopts = [
154 154 ('t', 'tool', '', _('specify merge tool')),
155 155 ]
156 156
157 157 similarityopts = [
158 158 ('s', 'similarity', '',
159 159 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
160 160 ]
161 161
162 162 subrepoopts = [
163 163 ('S', 'subrepos', None,
164 164 _('recurse into subrepositories'))
165 165 ]
166 166
167 167 # Commands start here, listed alphabetically
168 168
169 169 @command('^add',
170 170 walkopts + subrepoopts + dryrunopts,
171 171 _('[OPTION]... [FILE]...'),
172 172 inferrepo=True)
173 173 def add(ui, repo, *pats, **opts):
174 174 """add the specified files on the next commit
175 175
176 176 Schedule files to be version controlled and added to the
177 177 repository.
178 178
179 179 The files will be added to the repository at the next commit. To
180 180 undo an add before that, see :hg:`forget`.
181 181
182 182 If no names are given, add all files to the repository.
183 183
184 184 .. container:: verbose
185 185
186 186 An example showing how new (unknown) files are added
187 187 automatically by :hg:`add`::
188 188
189 189 $ ls
190 190 foo.c
191 191 $ hg status
192 192 ? foo.c
193 193 $ hg add
194 194 adding foo.c
195 195 $ hg status
196 196 A foo.c
197 197
198 198 Returns 0 if all files are successfully added.
199 199 """
200 200
201 201 m = scmutil.match(repo[None], pats, opts)
202 202 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
203 203 return rejected and 1 or 0
204 204
205 205 @command('addremove',
206 206 similarityopts + subrepoopts + walkopts + dryrunopts,
207 207 _('[OPTION]... [FILE]...'),
208 208 inferrepo=True)
209 209 def addremove(ui, repo, *pats, **opts):
210 210 """add all new files, delete all missing files
211 211
212 212 Add all new files and remove all missing files from the
213 213 repository.
214 214
215 215 New files are ignored if they match any of the patterns in
216 216 ``.hgignore``. As with add, these changes take effect at the next
217 217 commit.
218 218
219 219 Use the -s/--similarity option to detect renamed files. This
220 220 option takes a percentage between 0 (disabled) and 100 (files must
221 221 be identical) as its parameter. With a parameter greater than 0,
222 222 this compares every removed file with every added file and records
223 223 those similar enough as renames. Detecting renamed files this way
224 224 can be expensive. After using this option, :hg:`status -C` can be
225 225 used to check which files were identified as moved or renamed. If
226 226 not specified, -s/--similarity defaults to 100 and only renames of
227 227 identical files are detected.
228 228
229 229 Returns 0 if all files are successfully added.
230 230 """
231 231 try:
232 232 sim = float(opts.get('similarity') or 100)
233 233 except ValueError:
234 234 raise util.Abort(_('similarity must be a number'))
235 235 if sim < 0 or sim > 100:
236 236 raise util.Abort(_('similarity must be between 0 and 100'))
237 237 matcher = scmutil.match(repo[None], pats, opts)
238 238 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
239 239
240 240 @command('^annotate|blame',
241 241 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
242 242 ('', 'follow', None,
243 243 _('follow copies/renames and list the filename (DEPRECATED)')),
244 244 ('', 'no-follow', None, _("don't follow copies and renames")),
245 245 ('a', 'text', None, _('treat all files as text')),
246 246 ('u', 'user', None, _('list the author (long with -v)')),
247 247 ('f', 'file', None, _('list the filename')),
248 248 ('d', 'date', None, _('list the date (short with -q)')),
249 249 ('n', 'number', None, _('list the revision number (default)')),
250 250 ('c', 'changeset', None, _('list the changeset')),
251 251 ('l', 'line-number', None, _('show line number at the first appearance'))
252 252 ] + diffwsopts + walkopts + formatteropts,
253 253 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
254 254 inferrepo=True)
255 255 def annotate(ui, repo, *pats, **opts):
256 256 """show changeset information by line for each file
257 257
258 258 List changes in files, showing the revision id responsible for
259 259 each line
260 260
261 261 This command is useful for discovering when a change was made and
262 262 by whom.
263 263
264 264 Without the -a/--text option, annotate will avoid processing files
265 265 it detects as binary. With -a, annotate will annotate the file
266 266 anyway, although the results will probably be neither useful
267 267 nor desirable.
268 268
269 269 Returns 0 on success.
270 270 """
271 271 if not pats:
272 272 raise util.Abort(_('at least one filename or pattern is required'))
273 273
274 274 if opts.get('follow'):
275 275 # --follow is deprecated and now just an alias for -f/--file
276 276 # to mimic the behavior of Mercurial before version 1.5
277 277 opts['file'] = True
278 278
279 279 fm = ui.formatter('annotate', opts)
280 280 datefunc = ui.quiet and util.shortdate or util.datestr
281 281 hexfn = fm.hexfunc
282 282
283 283 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
284 284 ('number', ' ', lambda x: x[0].rev(), str),
285 285 ('changeset', ' ', lambda x: hexfn(x[0].node()), str),
286 286 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
287 287 ('file', ' ', lambda x: x[0].path(), str),
288 288 ('line_number', ':', lambda x: x[1], str),
289 289 ]
290 290 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
291 291
292 292 if (not opts.get('user') and not opts.get('changeset')
293 293 and not opts.get('date') and not opts.get('file')):
294 294 opts['number'] = True
295 295
296 296 linenumber = opts.get('line_number') is not None
297 297 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
298 298 raise util.Abort(_('at least one of -n/-c is required for -l'))
299 299
300 300 if fm:
301 301 def makefunc(get, fmt):
302 302 return get
303 303 else:
304 304 def makefunc(get, fmt):
305 305 return lambda x: fmt(get(x))
306 306 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
307 307 if opts.get(op)]
308 308 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
309 309 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
310 310 if opts.get(op))
311 311
312 312 def bad(x, y):
313 313 raise util.Abort("%s: %s" % (x, y))
314 314
315 315 ctx = scmutil.revsingle(repo, opts.get('rev'))
316 316 m = scmutil.match(ctx, pats, opts)
317 317 m.bad = bad
318 318 follow = not opts.get('no_follow')
319 319 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
320 320 whitespace=True)
321 321 for abs in ctx.walk(m):
322 322 fctx = ctx[abs]
323 323 if not opts.get('text') and util.binary(fctx.data()):
324 324 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
325 325 continue
326 326
327 327 lines = fctx.annotate(follow=follow, linenumber=linenumber,
328 328 diffopts=diffopts)
329 329 formats = []
330 330 pieces = []
331 331
332 332 for f, sep in funcmap:
333 333 l = [f(n) for n, dummy in lines]
334 334 if l:
335 335 if fm:
336 336 formats.append(['%s' for x in l])
337 337 else:
338 338 sizes = [encoding.colwidth(x) for x in l]
339 339 ml = max(sizes)
340 340 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
341 341 pieces.append(l)
342 342
343 343 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
344 344 fm.startitem()
345 345 fm.write(fields, "".join(f), *p)
346 346 fm.write('line', ": %s", l[1])
347 347
348 348 if lines and not lines[-1][1].endswith('\n'):
349 349 fm.plain('\n')
350 350
351 351 fm.end()
352 352
353 353 @command('archive',
354 354 [('', 'no-decode', None, _('do not pass files through decoders')),
355 355 ('p', 'prefix', '', _('directory prefix for files in archive'),
356 356 _('PREFIX')),
357 357 ('r', 'rev', '', _('revision to distribute'), _('REV')),
358 358 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
359 359 ] + subrepoopts + walkopts,
360 360 _('[OPTION]... DEST'))
361 361 def archive(ui, repo, dest, **opts):
362 362 '''create an unversioned archive of a repository revision
363 363
364 364 By default, the revision used is the parent of the working
365 365 directory; use -r/--rev to specify a different revision.
366 366
367 367 The archive type is automatically detected based on file
368 368 extension (or override using -t/--type).
369 369
370 370 .. container:: verbose
371 371
372 372 Examples:
373 373
374 374 - create a zip file containing the 1.0 release::
375 375
376 376 hg archive -r 1.0 project-1.0.zip
377 377
378 378 - create a tarball excluding .hg files::
379 379
380 380 hg archive project.tar.gz -X ".hg*"
381 381
382 382 Valid types are:
383 383
384 384 :``files``: a directory full of files (default)
385 385 :``tar``: tar archive, uncompressed
386 386 :``tbz2``: tar archive, compressed using bzip2
387 387 :``tgz``: tar archive, compressed using gzip
388 388 :``uzip``: zip archive, uncompressed
389 389 :``zip``: zip archive, compressed using deflate
390 390
391 391 The exact name of the destination archive or directory is given
392 392 using a format string; see :hg:`help export` for details.
393 393
394 394 Each member added to an archive file has a directory prefix
395 395 prepended. Use -p/--prefix to specify a format string for the
396 396 prefix. The default is the basename of the archive, with suffixes
397 397 removed.
398 398
399 399 Returns 0 on success.
400 400 '''
401 401
402 402 ctx = scmutil.revsingle(repo, opts.get('rev'))
403 403 if not ctx:
404 404 raise util.Abort(_('no working directory: please specify a revision'))
405 405 node = ctx.node()
406 406 dest = cmdutil.makefilename(repo, dest, node)
407 407 if os.path.realpath(dest) == repo.root:
408 408 raise util.Abort(_('repository root cannot be destination'))
409 409
410 410 kind = opts.get('type') or archival.guesskind(dest) or 'files'
411 411 prefix = opts.get('prefix')
412 412
413 413 if dest == '-':
414 414 if kind == 'files':
415 415 raise util.Abort(_('cannot archive plain files to stdout'))
416 416 dest = cmdutil.makefileobj(repo, dest)
417 417 if not prefix:
418 418 prefix = os.path.basename(repo.root) + '-%h'
419 419
420 420 prefix = cmdutil.makefilename(repo, prefix, node)
421 421 matchfn = scmutil.match(ctx, [], opts)
422 422 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
423 423 matchfn, prefix, subrepos=opts.get('subrepos'))
424 424
425 425 @command('backout',
426 426 [('', 'merge', None, _('merge with old dirstate parent after backout')),
427 427 ('', 'commit', None, _('commit if no conflicts were encountered')),
428 428 ('', 'parent', '',
429 429 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
430 430 ('r', 'rev', '', _('revision to backout'), _('REV')),
431 431 ('e', 'edit', False, _('invoke editor on commit messages')),
432 432 ] + mergetoolopts + walkopts + commitopts + commitopts2,
433 433 _('[OPTION]... [-r] REV'))
434 434 def backout(ui, repo, node=None, rev=None, commit=False, **opts):
435 435 '''reverse effect of earlier changeset
436 436
437 437 Prepare a new changeset with the effect of REV undone in the
438 438 current working directory.
439 439
440 440 If REV is the parent of the working directory, then this new changeset
441 441 is committed automatically. Otherwise, hg needs to merge the
442 442 changes and the merged result is left uncommitted.
443 443
444 444 .. note::
445 445
446 446 backout cannot be used to fix either an unwanted or
447 447 incorrect merge.
448 448
449 449 .. container:: verbose
450 450
451 451 By default, the pending changeset will have one parent,
452 452 maintaining a linear history. With --merge, the pending
453 453 changeset will instead have two parents: the old parent of the
454 454 working directory and a new child of REV that simply undoes REV.
455 455
456 456 Before version 1.7, the behavior without --merge was equivalent
457 457 to specifying --merge followed by :hg:`update --clean .` to
458 458 cancel the merge and leave the child of REV as a head to be
459 459 merged separately.
460 460
461 461 See :hg:`help dates` for a list of formats valid for -d/--date.
462 462
463 463 Returns 0 on success, 1 if nothing to backout or there are unresolved
464 464 files.
465 465 '''
466 466 if rev and node:
467 467 raise util.Abort(_("please specify just one revision"))
468 468
469 469 if not rev:
470 470 rev = node
471 471
472 472 if not rev:
473 473 raise util.Abort(_("please specify a revision to backout"))
474 474
475 475 date = opts.get('date')
476 476 if date:
477 477 opts['date'] = util.parsedate(date)
478 478
479 479 cmdutil.checkunfinished(repo)
480 480 cmdutil.bailifchanged(repo)
481 481 node = scmutil.revsingle(repo, rev).node()
482 482
483 483 op1, op2 = repo.dirstate.parents()
484 484 if not repo.changelog.isancestor(node, op1):
485 485 raise util.Abort(_('cannot backout change that is not an ancestor'))
486 486
487 487 p1, p2 = repo.changelog.parents(node)
488 488 if p1 == nullid:
489 489 raise util.Abort(_('cannot backout a change with no parents'))
490 490 if p2 != nullid:
491 491 if not opts.get('parent'):
492 492 raise util.Abort(_('cannot backout a merge changeset'))
493 493 p = repo.lookup(opts['parent'])
494 494 if p not in (p1, p2):
495 495 raise util.Abort(_('%s is not a parent of %s') %
496 496 (short(p), short(node)))
497 497 parent = p
498 498 else:
499 499 if opts.get('parent'):
500 500 raise util.Abort(_('cannot use --parent on non-merge changeset'))
501 501 parent = p1
502 502
503 503 # the backout should appear on the same branch
504 504 wlock = repo.wlock()
505 505 try:
506 506 branch = repo.dirstate.branch()
507 507 bheads = repo.branchheads(branch)
508 508 rctx = scmutil.revsingle(repo, hex(parent))
509 509 if not opts.get('merge') and op1 != node:
510 510 try:
511 511 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
512 512 'backout')
513 513 repo.dirstate.beginparentchange()
514 514 stats = mergemod.update(repo, parent, True, True, False,
515 515 node, False)
516 516 repo.setparents(op1, op2)
517 517 repo.dirstate.endparentchange()
518 518 hg._showstats(repo, stats)
519 519 if stats[3]:
520 520 repo.ui.status(_("use 'hg resolve' to retry unresolved "
521 521 "file merges\n"))
522 522 return 1
523 523 elif not commit:
524 524 msg = _("changeset %s backed out, "
525 525 "don't forget to commit.\n")
526 526 ui.status(msg % short(node))
527 527 return 0
528 528 finally:
529 529 ui.setconfig('ui', 'forcemerge', '', '')
530 530 else:
531 531 hg.clean(repo, node, show_stats=False)
532 532 repo.dirstate.setbranch(branch)
533 533 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
534 534
535 535
536 536 def commitfunc(ui, repo, message, match, opts):
537 537 editform = 'backout'
538 538 e = cmdutil.getcommiteditor(editform=editform, **opts)
539 539 if not message:
540 540 # we don't translate commit messages
541 541 message = "Backed out changeset %s" % short(node)
542 542 e = cmdutil.getcommiteditor(edit=True, editform=editform)
543 543 return repo.commit(message, opts.get('user'), opts.get('date'),
544 544 match, editor=e)
545 545 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
546 546 if not newnode:
547 547 ui.status(_("nothing changed\n"))
548 548 return 1
549 549 cmdutil.commitstatus(repo, newnode, branch, bheads)
550 550
551 551 def nice(node):
552 552 return '%d:%s' % (repo.changelog.rev(node), short(node))
553 553 ui.status(_('changeset %s backs out changeset %s\n') %
554 554 (nice(repo.changelog.tip()), nice(node)))
555 555 if opts.get('merge') and op1 != node:
556 556 hg.clean(repo, op1, show_stats=False)
557 557 ui.status(_('merging with changeset %s\n')
558 558 % nice(repo.changelog.tip()))
559 559 try:
560 560 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
561 561 'backout')
562 562 return hg.merge(repo, hex(repo.changelog.tip()))
563 563 finally:
564 564 ui.setconfig('ui', 'forcemerge', '', '')
565 565 finally:
566 566 wlock.release()
567 567 return 0
568 568
569 569 @command('bisect',
570 570 [('r', 'reset', False, _('reset bisect state')),
571 571 ('g', 'good', False, _('mark changeset good')),
572 572 ('b', 'bad', False, _('mark changeset bad')),
573 573 ('s', 'skip', False, _('skip testing changeset')),
574 574 ('e', 'extend', False, _('extend the bisect range')),
575 575 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
576 576 ('U', 'noupdate', False, _('do not update to target'))],
577 577 _("[-gbsr] [-U] [-c CMD] [REV]"))
578 578 def bisect(ui, repo, rev=None, extra=None, command=None,
579 579 reset=None, good=None, bad=None, skip=None, extend=None,
580 580 noupdate=None):
581 581 """subdivision search of changesets
582 582
583 583 This command helps to find changesets which introduce problems. To
584 584 use, mark the earliest changeset you know exhibits the problem as
585 585 bad, then mark the latest changeset which is free from the problem
586 586 as good. Bisect will update your working directory to a revision
587 587 for testing (unless the -U/--noupdate option is specified). Once
588 588 you have performed tests, mark the working directory as good or
589 589 bad, and bisect will either update to another candidate changeset
590 590 or announce that it has found the bad revision.
591 591
592 592 As a shortcut, you can also use the revision argument to mark a
593 593 revision as good or bad without checking it out first.
594 594
595 595 If you supply a command, it will be used for automatic bisection.
596 596 The environment variable HG_NODE will contain the ID of the
597 597 changeset being tested. The exit status of the command will be
598 598 used to mark revisions as good or bad: status 0 means good, 125
599 599 means to skip the revision, 127 (command not found) will abort the
600 600 bisection, and any other non-zero exit status means the revision
601 601 is bad.
602 602
603 603 .. container:: verbose
604 604
605 605 Some examples:
606 606
607 607 - start a bisection with known bad revision 34, and good revision 12::
608 608
609 609 hg bisect --bad 34
610 610 hg bisect --good 12
611 611
612 612 - advance the current bisection by marking current revision as good or
613 613 bad::
614 614
615 615 hg bisect --good
616 616 hg bisect --bad
617 617
618 618 - mark the current revision, or a known revision, to be skipped (e.g. if
619 619 that revision is not usable because of another issue)::
620 620
621 621 hg bisect --skip
622 622 hg bisect --skip 23
623 623
624 624 - skip all revisions that do not touch directories ``foo`` or ``bar``::
625 625
626 626 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
627 627
628 628 - forget the current bisection::
629 629
630 630 hg bisect --reset
631 631
632 632 - use 'make && make tests' to automatically find the first broken
633 633 revision::
634 634
635 635 hg bisect --reset
636 636 hg bisect --bad 34
637 637 hg bisect --good 12
638 638 hg bisect --command "make && make tests"
639 639
640 640 - see all changesets whose states are already known in the current
641 641 bisection::
642 642
643 643 hg log -r "bisect(pruned)"
644 644
645 645 - see the changeset currently being bisected (especially useful
646 646 if running with -U/--noupdate)::
647 647
648 648 hg log -r "bisect(current)"
649 649
650 650 - see all changesets that took part in the current bisection::
651 651
652 652 hg log -r "bisect(range)"
653 653
654 654 - you can even get a nice graph::
655 655
656 656 hg log --graph -r "bisect(range)"
657 657
658 658 See :hg:`help revsets` for more about the `bisect()` keyword.
659 659
660 660 Returns 0 on success.
661 661 """
662 662 def extendbisectrange(nodes, good):
663 663 # bisect is incomplete when it ends on a merge node and
664 664 # one of the parent was not checked.
665 665 parents = repo[nodes[0]].parents()
666 666 if len(parents) > 1:
667 667 side = good and state['bad'] or state['good']
668 668 num = len(set(i.node() for i in parents) & set(side))
669 669 if num == 1:
670 670 return parents[0].ancestor(parents[1])
671 671 return None
672 672
673 673 def print_result(nodes, good):
674 674 displayer = cmdutil.show_changeset(ui, repo, {})
675 675 if len(nodes) == 1:
676 676 # narrowed it down to a single revision
677 677 if good:
678 678 ui.write(_("The first good revision is:\n"))
679 679 else:
680 680 ui.write(_("The first bad revision is:\n"))
681 681 displayer.show(repo[nodes[0]])
682 682 extendnode = extendbisectrange(nodes, good)
683 683 if extendnode is not None:
684 684 ui.write(_('Not all ancestors of this changeset have been'
685 685 ' checked.\nUse bisect --extend to continue the '
686 686 'bisection from\nthe common ancestor, %s.\n')
687 687 % extendnode)
688 688 else:
689 689 # multiple possible revisions
690 690 if good:
691 691 ui.write(_("Due to skipped revisions, the first "
692 692 "good revision could be any of:\n"))
693 693 else:
694 694 ui.write(_("Due to skipped revisions, the first "
695 695 "bad revision could be any of:\n"))
696 696 for n in nodes:
697 697 displayer.show(repo[n])
698 698 displayer.close()
699 699
700 700 def check_state(state, interactive=True):
701 701 if not state['good'] or not state['bad']:
702 702 if (good or bad or skip or reset) and interactive:
703 703 return
704 704 if not state['good']:
705 705 raise util.Abort(_('cannot bisect (no known good revisions)'))
706 706 else:
707 707 raise util.Abort(_('cannot bisect (no known bad revisions)'))
708 708 return True
709 709
710 710 # backward compatibility
711 711 if rev in "good bad reset init".split():
712 712 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
713 713 cmd, rev, extra = rev, extra, None
714 714 if cmd == "good":
715 715 good = True
716 716 elif cmd == "bad":
717 717 bad = True
718 718 else:
719 719 reset = True
720 720 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
721 721 raise util.Abort(_('incompatible arguments'))
722 722
723 723 cmdutil.checkunfinished(repo)
724 724
725 725 if reset:
726 726 p = repo.join("bisect.state")
727 727 if os.path.exists(p):
728 728 os.unlink(p)
729 729 return
730 730
731 731 state = hbisect.load_state(repo)
732 732
733 733 if command:
734 734 changesets = 1
735 735 if noupdate:
736 736 try:
737 737 node = state['current'][0]
738 738 except LookupError:
739 739 raise util.Abort(_('current bisect revision is unknown - '
740 740 'start a new bisect to fix'))
741 741 else:
742 742 node, p2 = repo.dirstate.parents()
743 743 if p2 != nullid:
744 744 raise util.Abort(_('current bisect revision is a merge'))
745 745 try:
746 746 while changesets:
747 747 # update state
748 748 state['current'] = [node]
749 749 hbisect.save_state(repo, state)
750 750 status = ui.system(command, environ={'HG_NODE': hex(node)})
751 751 if status == 125:
752 752 transition = "skip"
753 753 elif status == 0:
754 754 transition = "good"
755 755 # status < 0 means process was killed
756 756 elif status == 127:
757 757 raise util.Abort(_("failed to execute %s") % command)
758 758 elif status < 0:
759 759 raise util.Abort(_("%s killed") % command)
760 760 else:
761 761 transition = "bad"
762 762 ctx = scmutil.revsingle(repo, rev, node)
763 763 rev = None # clear for future iterations
764 764 state[transition].append(ctx.node())
765 765 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
766 766 check_state(state, interactive=False)
767 767 # bisect
768 768 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
769 769 # update to next check
770 770 node = nodes[0]
771 771 if not noupdate:
772 772 cmdutil.bailifchanged(repo)
773 773 hg.clean(repo, node, show_stats=False)
774 774 finally:
775 775 state['current'] = [node]
776 776 hbisect.save_state(repo, state)
777 777 print_result(nodes, bgood)
778 778 return
779 779
780 780 # update state
781 781
782 782 if rev:
783 783 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
784 784 else:
785 785 nodes = [repo.lookup('.')]
786 786
787 787 if good or bad or skip:
788 788 if good:
789 789 state['good'] += nodes
790 790 elif bad:
791 791 state['bad'] += nodes
792 792 elif skip:
793 793 state['skip'] += nodes
794 794 hbisect.save_state(repo, state)
795 795
796 796 if not check_state(state):
797 797 return
798 798
799 799 # actually bisect
800 800 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
801 801 if extend:
802 802 if not changesets:
803 803 extendnode = extendbisectrange(nodes, good)
804 804 if extendnode is not None:
805 805 ui.write(_("Extending search to changeset %d:%s\n")
806 806 % (extendnode.rev(), extendnode))
807 807 state['current'] = [extendnode.node()]
808 808 hbisect.save_state(repo, state)
809 809 if noupdate:
810 810 return
811 811 cmdutil.bailifchanged(repo)
812 812 return hg.clean(repo, extendnode.node())
813 813 raise util.Abort(_("nothing to extend"))
814 814
815 815 if changesets == 0:
816 816 print_result(nodes, good)
817 817 else:
818 818 assert len(nodes) == 1 # only a single node can be tested next
819 819 node = nodes[0]
820 820 # compute the approximate number of remaining tests
821 821 tests, size = 0, 2
822 822 while size <= changesets:
823 823 tests, size = tests + 1, size * 2
824 824 rev = repo.changelog.rev(node)
825 825 ui.write(_("Testing changeset %d:%s "
826 826 "(%d changesets remaining, ~%d tests)\n")
827 827 % (rev, short(node), changesets, tests))
828 828 state['current'] = [node]
829 829 hbisect.save_state(repo, state)
830 830 if not noupdate:
831 831 cmdutil.bailifchanged(repo)
832 832 return hg.clean(repo, node)
833 833
834 834 @command('bookmarks|bookmark',
835 835 [('f', 'force', False, _('force')),
836 836 ('r', 'rev', '', _('revision'), _('REV')),
837 837 ('d', 'delete', False, _('delete a given bookmark')),
838 838 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
839 839 ('i', 'inactive', False, _('mark a bookmark inactive')),
840 840 ] + formatteropts,
841 841 _('hg bookmarks [OPTIONS]... [NAME]...'))
842 842 def bookmark(ui, repo, *names, **opts):
843 843 '''create a new bookmark or list existing bookmarks
844 844
845 845 Bookmarks are labels on changesets to help track lines of development.
846 846 Bookmarks are unversioned and can be moved, renamed and deleted.
847 847 Deleting or moving a bookmark has no effect on the associated changesets.
848 848
849 849 Creating or updating to a bookmark causes it to be marked as 'active'.
850 850 The active bookmark is indicated with a '*'.
851 851 When a commit is made, the active bookmark will advance to the new commit.
852 852 A plain :hg:`update` will also advance an active bookmark, if possible.
853 853 Updating away from a bookmark will cause it to be deactivated.
854 854
855 855 Bookmarks can be pushed and pulled between repositories (see
856 856 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
857 857 diverged, a new 'divergent bookmark' of the form 'name@path' will
858 858 be created. Using :hg:`merge` will resolve the divergence.
859 859
860 860 A bookmark named '@' has the special property that :hg:`clone` will
861 861 check it out by default if it exists.
862 862
863 863 .. container:: verbose
864 864
865 865 Examples:
866 866
867 867 - create an active bookmark for a new line of development::
868 868
869 869 hg book new-feature
870 870
871 871 - create an inactive bookmark as a place marker::
872 872
873 873 hg book -i reviewed
874 874
875 875 - create an inactive bookmark on another changeset::
876 876
877 877 hg book -r .^ tested
878 878
879 879 - move the '@' bookmark from another branch::
880 880
881 881 hg book -f @
882 882 '''
883 883 force = opts.get('force')
884 884 rev = opts.get('rev')
885 885 delete = opts.get('delete')
886 886 rename = opts.get('rename')
887 887 inactive = opts.get('inactive')
888 888
889 889 def checkformat(mark):
890 890 mark = mark.strip()
891 891 if not mark:
892 892 raise util.Abort(_("bookmark names cannot consist entirely of "
893 893 "whitespace"))
894 894 scmutil.checknewlabel(repo, mark, 'bookmark')
895 895 return mark
896 896
897 897 def checkconflict(repo, mark, cur, force=False, target=None):
898 898 if mark in marks and not force:
899 899 if target:
900 900 if marks[mark] == target and target == cur:
901 901 # re-activating a bookmark
902 902 return
903 903 anc = repo.changelog.ancestors([repo[target].rev()])
904 904 bmctx = repo[marks[mark]]
905 905 divs = [repo[b].node() for b in marks
906 906 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
907 907
908 908 # allow resolving a single divergent bookmark even if moving
909 909 # the bookmark across branches when a revision is specified
910 910 # that contains a divergent bookmark
911 911 if bmctx.rev() not in anc and target in divs:
912 912 bookmarks.deletedivergent(repo, [target], mark)
913 913 return
914 914
915 915 deletefrom = [b for b in divs
916 916 if repo[b].rev() in anc or b == target]
917 917 bookmarks.deletedivergent(repo, deletefrom, mark)
918 918 if bookmarks.validdest(repo, bmctx, repo[target]):
919 919 ui.status(_("moving bookmark '%s' forward from %s\n") %
920 920 (mark, short(bmctx.node())))
921 921 return
922 922 raise util.Abort(_("bookmark '%s' already exists "
923 923 "(use -f to force)") % mark)
924 924 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
925 925 and not force):
926 926 raise util.Abort(
927 927 _("a bookmark cannot have the name of an existing branch"))
928 928
929 929 if delete and rename:
930 930 raise util.Abort(_("--delete and --rename are incompatible"))
931 931 if delete and rev:
932 932 raise util.Abort(_("--rev is incompatible with --delete"))
933 933 if rename and rev:
934 934 raise util.Abort(_("--rev is incompatible with --rename"))
935 935 if not names and (delete or rev):
936 936 raise util.Abort(_("bookmark name required"))
937 937
938 938 if delete or rename or names or inactive:
939 939 wlock = repo.wlock()
940 940 try:
941 941 cur = repo.changectx('.').node()
942 942 marks = repo._bookmarks
943 943 if delete:
944 944 for mark in names:
945 945 if mark not in marks:
946 946 raise util.Abort(_("bookmark '%s' does not exist") %
947 947 mark)
948 948 if mark == repo._bookmarkcurrent:
949 949 bookmarks.unsetcurrent(repo)
950 950 del marks[mark]
951 951 marks.write()
952 952
953 953 elif rename:
954 954 if not names:
955 955 raise util.Abort(_("new bookmark name required"))
956 956 elif len(names) > 1:
957 957 raise util.Abort(_("only one new bookmark name allowed"))
958 958 mark = checkformat(names[0])
959 959 if rename not in marks:
960 960 raise util.Abort(_("bookmark '%s' does not exist") % rename)
961 961 checkconflict(repo, mark, cur, force)
962 962 marks[mark] = marks[rename]
963 963 if repo._bookmarkcurrent == rename and not inactive:
964 964 bookmarks.setcurrent(repo, mark)
965 965 del marks[rename]
966 966 marks.write()
967 967
968 968 elif names:
969 969 newact = None
970 970 for mark in names:
971 971 mark = checkformat(mark)
972 972 if newact is None:
973 973 newact = mark
974 974 if inactive and mark == repo._bookmarkcurrent:
975 975 bookmarks.unsetcurrent(repo)
976 976 return
977 977 tgt = cur
978 978 if rev:
979 979 tgt = scmutil.revsingle(repo, rev).node()
980 980 checkconflict(repo, mark, cur, force, tgt)
981 981 marks[mark] = tgt
982 982 if not inactive and cur == marks[newact] and not rev:
983 983 bookmarks.setcurrent(repo, newact)
984 984 elif cur != tgt and newact == repo._bookmarkcurrent:
985 985 bookmarks.unsetcurrent(repo)
986 986 marks.write()
987 987
988 988 elif inactive:
989 989 if len(marks) == 0:
990 990 ui.status(_("no bookmarks set\n"))
991 991 elif not repo._bookmarkcurrent:
992 992 ui.status(_("no active bookmark\n"))
993 993 else:
994 994 bookmarks.unsetcurrent(repo)
995 995 finally:
996 996 wlock.release()
997 997 else: # show bookmarks
998 998 fm = ui.formatter('bookmarks', opts)
999 999 hexfn = fm.hexfunc
1000 1000 marks = repo._bookmarks
1001 1001 if len(marks) == 0 and not fm:
1002 1002 ui.status(_("no bookmarks set\n"))
1003 1003 for bmark, n in sorted(marks.iteritems()):
1004 1004 current = repo._bookmarkcurrent
1005 1005 if bmark == current:
1006 1006 prefix, label = '*', 'bookmarks.current'
1007 1007 else:
1008 1008 prefix, label = ' ', ''
1009 1009
1010 1010 fm.startitem()
1011 1011 if not ui.quiet:
1012 1012 fm.plain(' %s ' % prefix, label=label)
1013 1013 fm.write('bookmark', '%s', bmark, label=label)
1014 1014 pad = " " * (25 - encoding.colwidth(bmark))
1015 1015 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1016 1016 repo.changelog.rev(n), hexfn(n), label=label)
1017 1017 fm.data(active=(bmark == current))
1018 1018 fm.plain('\n')
1019 1019 fm.end()
1020 1020
1021 1021 @command('branch',
1022 1022 [('f', 'force', None,
1023 1023 _('set branch name even if it shadows an existing branch')),
1024 1024 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1025 1025 _('[-fC] [NAME]'))
1026 1026 def branch(ui, repo, label=None, **opts):
1027 1027 """set or show the current branch name
1028 1028
1029 1029 .. note::
1030 1030
1031 1031 Branch names are permanent and global. Use :hg:`bookmark` to create a
1032 1032 light-weight bookmark instead. See :hg:`help glossary` for more
1033 1033 information about named branches and bookmarks.
1034 1034
1035 1035 With no argument, show the current branch name. With one argument,
1036 1036 set the working directory branch name (the branch will not exist
1037 1037 in the repository until the next commit). Standard practice
1038 1038 recommends that primary development take place on the 'default'
1039 1039 branch.
1040 1040
1041 1041 Unless -f/--force is specified, branch will not let you set a
1042 1042 branch name that already exists.
1043 1043
1044 1044 Use -C/--clean to reset the working directory branch to that of
1045 1045 the parent of the working directory, negating a previous branch
1046 1046 change.
1047 1047
1048 1048 Use the command :hg:`update` to switch to an existing branch. Use
1049 1049 :hg:`commit --close-branch` to mark this branch as closed.
1050 1050
1051 1051 Returns 0 on success.
1052 1052 """
1053 1053 if label:
1054 1054 label = label.strip()
1055 1055
1056 1056 if not opts.get('clean') and not label:
1057 1057 ui.write("%s\n" % repo.dirstate.branch())
1058 1058 return
1059 1059
1060 1060 wlock = repo.wlock()
1061 1061 try:
1062 1062 if opts.get('clean'):
1063 1063 label = repo[None].p1().branch()
1064 1064 repo.dirstate.setbranch(label)
1065 1065 ui.status(_('reset working directory to branch %s\n') % label)
1066 1066 elif label:
1067 1067 if not opts.get('force') and label in repo.branchmap():
1068 1068 if label not in [p.branch() for p in repo.parents()]:
1069 1069 raise util.Abort(_('a branch of the same name already'
1070 1070 ' exists'),
1071 1071 # i18n: "it" refers to an existing branch
1072 1072 hint=_("use 'hg update' to switch to it"))
1073 1073 scmutil.checknewlabel(repo, label, 'branch')
1074 1074 repo.dirstate.setbranch(label)
1075 1075 ui.status(_('marked working directory as branch %s\n') % label)
1076 1076 ui.status(_('(branches are permanent and global, '
1077 1077 'did you want a bookmark?)\n'))
1078 1078 finally:
1079 1079 wlock.release()
1080 1080
1081 1081 @command('branches',
1082 1082 [('a', 'active', False,
1083 1083 _('show only branches that have unmerged heads (DEPRECATED)')),
1084 1084 ('c', 'closed', False, _('show normal and closed branches')),
1085 1085 ] + formatteropts,
1086 1086 _('[-ac]'))
1087 1087 def branches(ui, repo, active=False, closed=False, **opts):
1088 1088 """list repository named branches
1089 1089
1090 1090 List the repository's named branches, indicating which ones are
1091 1091 inactive. If -c/--closed is specified, also list branches which have
1092 1092 been marked closed (see :hg:`commit --close-branch`).
1093 1093
1094 1094 Use the command :hg:`update` to switch to an existing branch.
1095 1095
1096 1096 Returns 0.
1097 1097 """
1098 1098
1099 1099 fm = ui.formatter('branches', opts)
1100 1100 hexfunc = fm.hexfunc
1101 1101
1102 1102 allheads = set(repo.heads())
1103 1103 branches = []
1104 1104 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1105 1105 isactive = not isclosed and bool(set(heads) & allheads)
1106 1106 branches.append((tag, repo[tip], isactive, not isclosed))
1107 1107 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1108 1108 reverse=True)
1109 1109
1110 1110 for tag, ctx, isactive, isopen in branches:
1111 1111 if active and not isactive:
1112 1112 continue
1113 1113 if isactive:
1114 1114 label = 'branches.active'
1115 1115 notice = ''
1116 1116 elif not isopen:
1117 1117 if not closed:
1118 1118 continue
1119 1119 label = 'branches.closed'
1120 1120 notice = _(' (closed)')
1121 1121 else:
1122 1122 label = 'branches.inactive'
1123 1123 notice = _(' (inactive)')
1124 1124 current = (tag == repo.dirstate.branch())
1125 1125 if current:
1126 1126 label = 'branches.current'
1127 1127
1128 1128 fm.startitem()
1129 1129 fm.write('branch', '%s', tag, label=label)
1130 1130 rev = ctx.rev()
1131 1131 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1132 1132 fmt = ' ' * padsize + ' %d:%s'
1133 1133 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1134 1134 label='log.changeset changeset.%s' % ctx.phasestr())
1135 1135 fm.data(active=isactive, closed=not isopen, current=current)
1136 1136 if not ui.quiet:
1137 1137 fm.plain(notice)
1138 1138 fm.plain('\n')
1139 1139 fm.end()
1140 1140
1141 1141 @command('bundle',
1142 1142 [('f', 'force', None, _('run even when the destination is unrelated')),
1143 1143 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1144 1144 _('REV')),
1145 1145 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1146 1146 _('BRANCH')),
1147 1147 ('', 'base', [],
1148 1148 _('a base changeset assumed to be available at the destination'),
1149 1149 _('REV')),
1150 1150 ('a', 'all', None, _('bundle all changesets in the repository')),
1151 1151 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1152 1152 ] + remoteopts,
1153 1153 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1154 1154 def bundle(ui, repo, fname, dest=None, **opts):
1155 1155 """create a changegroup file
1156 1156
1157 1157 Generate a compressed changegroup file collecting changesets not
1158 1158 known to be in another repository.
1159 1159
1160 1160 If you omit the destination repository, then hg assumes the
1161 1161 destination will have all the nodes you specify with --base
1162 1162 parameters. To create a bundle containing all changesets, use
1163 1163 -a/--all (or --base null).
1164 1164
1165 1165 You can change compression method with the -t/--type option.
1166 1166 The available compression methods are: none, bzip2, and
1167 1167 gzip (by default, bundles are compressed using bzip2).
1168 1168
1169 1169 The bundle file can then be transferred using conventional means
1170 1170 and applied to another repository with the unbundle or pull
1171 1171 command. This is useful when direct push and pull are not
1172 1172 available or when exporting an entire repository is undesirable.
1173 1173
1174 1174 Applying bundles preserves all changeset contents including
1175 1175 permissions, copy/rename information, and revision history.
1176 1176
1177 1177 Returns 0 on success, 1 if no changes found.
1178 1178 """
1179 1179 revs = None
1180 1180 if 'rev' in opts:
1181 1181 revs = scmutil.revrange(repo, opts['rev'])
1182 1182
1183 1183 bundletype = opts.get('type', 'bzip2').lower()
1184 1184 btypes = {'none': 'HG10UN',
1185 1185 'bzip2': 'HG10BZ',
1186 1186 'gzip': 'HG10GZ',
1187 1187 'bundle2': 'HG2Y'}
1188 1188 bundletype = btypes.get(bundletype)
1189 1189 if bundletype not in changegroup.bundletypes:
1190 1190 raise util.Abort(_('unknown bundle type specified with --type'))
1191 1191
1192 1192 if opts.get('all'):
1193 1193 base = ['null']
1194 1194 else:
1195 1195 base = scmutil.revrange(repo, opts.get('base'))
1196 1196 # TODO: get desired bundlecaps from command line.
1197 1197 bundlecaps = None
1198 1198 if base:
1199 1199 if dest:
1200 1200 raise util.Abort(_("--base is incompatible with specifying "
1201 1201 "a destination"))
1202 1202 common = [repo.lookup(rev) for rev in base]
1203 1203 heads = revs and map(repo.lookup, revs) or revs
1204 1204 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1205 1205 common=common, bundlecaps=bundlecaps)
1206 1206 outgoing = None
1207 1207 else:
1208 1208 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1209 1209 dest, branches = hg.parseurl(dest, opts.get('branch'))
1210 1210 other = hg.peer(repo, opts, dest)
1211 1211 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1212 1212 heads = revs and map(repo.lookup, revs) or revs
1213 1213 outgoing = discovery.findcommonoutgoing(repo, other,
1214 1214 onlyheads=heads,
1215 1215 force=opts.get('force'),
1216 1216 portable=True)
1217 1217 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1218 1218 bundlecaps)
1219 1219 if not cg:
1220 1220 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1221 1221 return 1
1222 1222
1223 1223 changegroup.writebundle(ui, cg, fname, bundletype)
1224 1224
1225 1225 @command('cat',
1226 1226 [('o', 'output', '',
1227 1227 _('print output to file with formatted name'), _('FORMAT')),
1228 1228 ('r', 'rev', '', _('print the given revision'), _('REV')),
1229 1229 ('', 'decode', None, _('apply any matching decode filter')),
1230 1230 ] + walkopts,
1231 1231 _('[OPTION]... FILE...'),
1232 1232 inferrepo=True)
1233 1233 def cat(ui, repo, file1, *pats, **opts):
1234 1234 """output the current or given revision of files
1235 1235
1236 1236 Print the specified files as they were at the given revision. If
1237 1237 no revision is given, the parent of the working directory is used.
1238 1238
1239 1239 Output may be to a file, in which case the name of the file is
1240 1240 given using a format string. The formatting rules as follows:
1241 1241
1242 1242 :``%%``: literal "%" character
1243 1243 :``%s``: basename of file being printed
1244 1244 :``%d``: dirname of file being printed, or '.' if in repository root
1245 1245 :``%p``: root-relative path name of file being printed
1246 1246 :``%H``: changeset hash (40 hexadecimal digits)
1247 1247 :``%R``: changeset revision number
1248 1248 :``%h``: short-form changeset hash (12 hexadecimal digits)
1249 1249 :``%r``: zero-padded changeset revision number
1250 1250 :``%b``: basename of the exporting repository
1251 1251
1252 1252 Returns 0 on success.
1253 1253 """
1254 1254 ctx = scmutil.revsingle(repo, opts.get('rev'))
1255 1255 m = scmutil.match(ctx, (file1,) + pats, opts)
1256 1256
1257 1257 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1258 1258
1259 1259 @command('^clone',
1260 1260 [('U', 'noupdate', None,
1261 1261 _('the clone will include an empty working copy (only a repository)')),
1262 1262 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1263 1263 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1264 1264 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1265 1265 ('', 'pull', None, _('use pull protocol to copy metadata')),
1266 1266 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1267 1267 ] + remoteopts,
1268 1268 _('[OPTION]... SOURCE [DEST]'),
1269 1269 norepo=True)
1270 1270 def clone(ui, source, dest=None, **opts):
1271 1271 """make a copy of an existing repository
1272 1272
1273 1273 Create a copy of an existing repository in a new directory.
1274 1274
1275 1275 If no destination directory name is specified, it defaults to the
1276 1276 basename of the source.
1277 1277
1278 1278 The location of the source is added to the new repository's
1279 1279 ``.hg/hgrc`` file, as the default to be used for future pulls.
1280 1280
1281 1281 Only local paths and ``ssh://`` URLs are supported as
1282 1282 destinations. For ``ssh://`` destinations, no working directory or
1283 1283 ``.hg/hgrc`` will be created on the remote side.
1284 1284
1285 1285 To pull only a subset of changesets, specify one or more revisions
1286 1286 identifiers with -r/--rev or branches with -b/--branch. The
1287 1287 resulting clone will contain only the specified changesets and
1288 1288 their ancestors. These options (or 'clone src#rev dest') imply
1289 1289 --pull, even for local source repositories. Note that specifying a
1290 1290 tag will include the tagged changeset but not the changeset
1291 1291 containing the tag.
1292 1292
1293 1293 If the source repository has a bookmark called '@' set, that
1294 1294 revision will be checked out in the new repository by default.
1295 1295
1296 1296 To check out a particular version, use -u/--update, or
1297 1297 -U/--noupdate to create a clone with no working directory.
1298 1298
1299 1299 .. container:: verbose
1300 1300
1301 1301 For efficiency, hardlinks are used for cloning whenever the
1302 1302 source and destination are on the same filesystem (note this
1303 1303 applies only to the repository data, not to the working
1304 1304 directory). Some filesystems, such as AFS, implement hardlinking
1305 1305 incorrectly, but do not report errors. In these cases, use the
1306 1306 --pull option to avoid hardlinking.
1307 1307
1308 1308 In some cases, you can clone repositories and the working
1309 1309 directory using full hardlinks with ::
1310 1310
1311 1311 $ cp -al REPO REPOCLONE
1312 1312
1313 1313 This is the fastest way to clone, but it is not always safe. The
1314 1314 operation is not atomic (making sure REPO is not modified during
1315 1315 the operation is up to you) and you have to make sure your
1316 1316 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1317 1317 so). Also, this is not compatible with certain extensions that
1318 1318 place their metadata under the .hg directory, such as mq.
1319 1319
1320 1320 Mercurial will update the working directory to the first applicable
1321 1321 revision from this list:
1322 1322
1323 1323 a) null if -U or the source repository has no changesets
1324 1324 b) if -u . and the source repository is local, the first parent of
1325 1325 the source repository's working directory
1326 1326 c) the changeset specified with -u (if a branch name, this means the
1327 1327 latest head of that branch)
1328 1328 d) the changeset specified with -r
1329 1329 e) the tipmost head specified with -b
1330 1330 f) the tipmost head specified with the url#branch source syntax
1331 1331 g) the revision marked with the '@' bookmark, if present
1332 1332 h) the tipmost head of the default branch
1333 1333 i) tip
1334 1334
1335 1335 Examples:
1336 1336
1337 1337 - clone a remote repository to a new directory named hg/::
1338 1338
1339 1339 hg clone http://selenic.com/hg
1340 1340
1341 1341 - create a lightweight local clone::
1342 1342
1343 1343 hg clone project/ project-feature/
1344 1344
1345 1345 - clone from an absolute path on an ssh server (note double-slash)::
1346 1346
1347 1347 hg clone ssh://user@server//home/projects/alpha/
1348 1348
1349 1349 - do a high-speed clone over a LAN while checking out a
1350 1350 specified version::
1351 1351
1352 1352 hg clone --uncompressed http://server/repo -u 1.5
1353 1353
1354 1354 - create a repository without changesets after a particular revision::
1355 1355
1356 1356 hg clone -r 04e544 experimental/ good/
1357 1357
1358 1358 - clone (and track) a particular named branch::
1359 1359
1360 1360 hg clone http://selenic.com/hg#stable
1361 1361
1362 1362 See :hg:`help urls` for details on specifying URLs.
1363 1363
1364 1364 Returns 0 on success.
1365 1365 """
1366 1366 if opts.get('noupdate') and opts.get('updaterev'):
1367 1367 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1368 1368
1369 1369 r = hg.clone(ui, opts, source, dest,
1370 1370 pull=opts.get('pull'),
1371 1371 stream=opts.get('uncompressed'),
1372 1372 rev=opts.get('rev'),
1373 1373 update=opts.get('updaterev') or not opts.get('noupdate'),
1374 1374 branch=opts.get('branch'))
1375 1375
1376 1376 return r is None
1377 1377
1378 1378 @command('^commit|ci',
1379 1379 [('A', 'addremove', None,
1380 1380 _('mark new/missing files as added/removed before committing')),
1381 1381 ('', 'close-branch', None,
1382 1382 _('mark a branch as closed, hiding it from the branch list')),
1383 1383 ('', 'amend', None, _('amend the parent of the working dir')),
1384 1384 ('s', 'secret', None, _('use the secret phase for committing')),
1385 1385 ('e', 'edit', None, _('invoke editor on commit messages')),
1386 1386 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1387 1387 _('[OPTION]... [FILE]...'),
1388 1388 inferrepo=True)
1389 1389 def commit(ui, repo, *pats, **opts):
1390 1390 """commit the specified files or all outstanding changes
1391 1391
1392 1392 Commit changes to the given files into the repository. Unlike a
1393 1393 centralized SCM, this operation is a local operation. See
1394 1394 :hg:`push` for a way to actively distribute your changes.
1395 1395
1396 1396 If a list of files is omitted, all changes reported by :hg:`status`
1397 1397 will be committed.
1398 1398
1399 1399 If you are committing the result of a merge, do not provide any
1400 1400 filenames or -I/-X filters.
1401 1401
1402 1402 If no commit message is specified, Mercurial starts your
1403 1403 configured editor where you can enter a message. In case your
1404 1404 commit fails, you will find a backup of your message in
1405 1405 ``.hg/last-message.txt``.
1406 1406
1407 1407 The --amend flag can be used to amend the parent of the
1408 1408 working directory with a new commit that contains the changes
1409 1409 in the parent in addition to those currently reported by :hg:`status`,
1410 1410 if there are any. The old commit is stored in a backup bundle in
1411 1411 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1412 1412 on how to restore it).
1413 1413
1414 1414 Message, user and date are taken from the amended commit unless
1415 1415 specified. When a message isn't specified on the command line,
1416 1416 the editor will open with the message of the amended commit.
1417 1417
1418 1418 It is not possible to amend public changesets (see :hg:`help phases`)
1419 1419 or changesets that have children.
1420 1420
1421 1421 See :hg:`help dates` for a list of formats valid for -d/--date.
1422 1422
1423 1423 Returns 0 on success, 1 if nothing changed.
1424 1424 """
1425 1425 if opts.get('subrepos'):
1426 1426 if opts.get('amend'):
1427 1427 raise util.Abort(_('cannot amend with --subrepos'))
1428 1428 # Let --subrepos on the command line override config setting.
1429 1429 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1430 1430
1431 1431 cmdutil.checkunfinished(repo, commit=True)
1432 1432
1433 1433 branch = repo[None].branch()
1434 1434 bheads = repo.branchheads(branch)
1435 1435
1436 1436 extra = {}
1437 1437 if opts.get('close_branch'):
1438 1438 extra['close'] = 1
1439 1439
1440 1440 if not bheads:
1441 1441 raise util.Abort(_('can only close branch heads'))
1442 1442 elif opts.get('amend'):
1443 1443 if repo.parents()[0].p1().branch() != branch and \
1444 1444 repo.parents()[0].p2().branch() != branch:
1445 1445 raise util.Abort(_('can only close branch heads'))
1446 1446
1447 1447 if opts.get('amend'):
1448 1448 if ui.configbool('ui', 'commitsubrepos'):
1449 1449 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1450 1450
1451 1451 old = repo['.']
1452 1452 if not old.mutable():
1453 1453 raise util.Abort(_('cannot amend public changesets'))
1454 1454 if len(repo[None].parents()) > 1:
1455 1455 raise util.Abort(_('cannot amend while merging'))
1456 1456 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1457 1457 if not allowunstable and old.children():
1458 1458 raise util.Abort(_('cannot amend changeset with children'))
1459 1459
1460 1460 # commitfunc is used only for temporary amend commit by cmdutil.amend
1461 1461 def commitfunc(ui, repo, message, match, opts):
1462 1462 return repo.commit(message,
1463 1463 opts.get('user') or old.user(),
1464 1464 opts.get('date') or old.date(),
1465 1465 match,
1466 1466 extra=extra)
1467 1467
1468 1468 current = repo._bookmarkcurrent
1469 1469 marks = old.bookmarks()
1470 1470 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1471 1471 if node == old.node():
1472 1472 ui.status(_("nothing changed\n"))
1473 1473 return 1
1474 1474 elif marks:
1475 1475 ui.debug('moving bookmarks %r from %s to %s\n' %
1476 1476 (marks, old.hex(), hex(node)))
1477 1477 newmarks = repo._bookmarks
1478 1478 for bm in marks:
1479 1479 newmarks[bm] = node
1480 1480 if bm == current:
1481 1481 bookmarks.setcurrent(repo, bm)
1482 1482 newmarks.write()
1483 1483 else:
1484 1484 def commitfunc(ui, repo, message, match, opts):
1485 1485 backup = ui.backupconfig('phases', 'new-commit')
1486 1486 baseui = repo.baseui
1487 1487 basebackup = baseui.backupconfig('phases', 'new-commit')
1488 1488 try:
1489 1489 if opts.get('secret'):
1490 1490 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1491 1491 # Propagate to subrepos
1492 1492 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1493 1493
1494 1494 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1495 1495 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1496 1496 return repo.commit(message, opts.get('user'), opts.get('date'),
1497 1497 match,
1498 1498 editor=editor,
1499 1499 extra=extra)
1500 1500 finally:
1501 1501 ui.restoreconfig(backup)
1502 1502 repo.baseui.restoreconfig(basebackup)
1503 1503
1504 1504
1505 1505 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1506 1506
1507 1507 if not node:
1508 1508 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1509 1509 if stat[3]:
1510 1510 ui.status(_("nothing changed (%d missing files, see "
1511 1511 "'hg status')\n") % len(stat[3]))
1512 1512 else:
1513 1513 ui.status(_("nothing changed\n"))
1514 1514 return 1
1515 1515
1516 1516 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1517 1517
1518 1518 @command('config|showconfig|debugconfig',
1519 1519 [('u', 'untrusted', None, _('show untrusted configuration options')),
1520 1520 ('e', 'edit', None, _('edit user config')),
1521 1521 ('l', 'local', None, _('edit repository config')),
1522 1522 ('g', 'global', None, _('edit global config'))],
1523 1523 _('[-u] [NAME]...'),
1524 1524 optionalrepo=True)
1525 1525 def config(ui, repo, *values, **opts):
1526 1526 """show combined config settings from all hgrc files
1527 1527
1528 1528 With no arguments, print names and values of all config items.
1529 1529
1530 1530 With one argument of the form section.name, print just the value
1531 1531 of that config item.
1532 1532
1533 1533 With multiple arguments, print names and values of all config
1534 1534 items with matching section names.
1535 1535
1536 1536 With --edit, start an editor on the user-level config file. With
1537 1537 --global, edit the system-wide config file. With --local, edit the
1538 1538 repository-level config file.
1539 1539
1540 1540 With --debug, the source (filename and line number) is printed
1541 1541 for each config item.
1542 1542
1543 1543 See :hg:`help config` for more information about config files.
1544 1544
1545 1545 Returns 0 on success, 1 if NAME does not exist.
1546 1546
1547 1547 """
1548 1548
1549 1549 if opts.get('edit') or opts.get('local') or opts.get('global'):
1550 1550 if opts.get('local') and opts.get('global'):
1551 1551 raise util.Abort(_("can't use --local and --global together"))
1552 1552
1553 1553 if opts.get('local'):
1554 1554 if not repo:
1555 1555 raise util.Abort(_("can't use --local outside a repository"))
1556 1556 paths = [repo.join('hgrc')]
1557 1557 elif opts.get('global'):
1558 1558 paths = scmutil.systemrcpath()
1559 1559 else:
1560 1560 paths = scmutil.userrcpath()
1561 1561
1562 1562 for f in paths:
1563 1563 if os.path.exists(f):
1564 1564 break
1565 1565 else:
1566 1566 if opts.get('global'):
1567 1567 samplehgrc = uimod.samplehgrcs['global']
1568 1568 elif opts.get('local'):
1569 1569 samplehgrc = uimod.samplehgrcs['local']
1570 1570 else:
1571 1571 samplehgrc = uimod.samplehgrcs['user']
1572 1572
1573 1573 f = paths[0]
1574 1574 fp = open(f, "w")
1575 1575 fp.write(samplehgrc)
1576 1576 fp.close()
1577 1577
1578 1578 editor = ui.geteditor()
1579 1579 ui.system("%s \"%s\"" % (editor, f),
1580 1580 onerr=util.Abort, errprefix=_("edit failed"))
1581 1581 return
1582 1582
1583 1583 for f in scmutil.rcpath():
1584 1584 ui.debug('read config from: %s\n' % f)
1585 1585 untrusted = bool(opts.get('untrusted'))
1586 1586 if values:
1587 1587 sections = [v for v in values if '.' not in v]
1588 1588 items = [v for v in values if '.' in v]
1589 1589 if len(items) > 1 or items and sections:
1590 1590 raise util.Abort(_('only one config item permitted'))
1591 1591 matched = False
1592 1592 for section, name, value in ui.walkconfig(untrusted=untrusted):
1593 1593 value = str(value).replace('\n', '\\n')
1594 1594 sectname = section + '.' + name
1595 1595 if values:
1596 1596 for v in values:
1597 1597 if v == section:
1598 1598 ui.debug('%s: ' %
1599 1599 ui.configsource(section, name, untrusted))
1600 1600 ui.write('%s=%s\n' % (sectname, value))
1601 1601 matched = True
1602 1602 elif v == sectname:
1603 1603 ui.debug('%s: ' %
1604 1604 ui.configsource(section, name, untrusted))
1605 1605 ui.write(value, '\n')
1606 1606 matched = True
1607 1607 else:
1608 1608 ui.debug('%s: ' %
1609 1609 ui.configsource(section, name, untrusted))
1610 1610 ui.write('%s=%s\n' % (sectname, value))
1611 1611 matched = True
1612 1612 if matched:
1613 1613 return 0
1614 1614 return 1
1615 1615
1616 1616 @command('copy|cp',
1617 1617 [('A', 'after', None, _('record a copy that has already occurred')),
1618 1618 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1619 1619 ] + walkopts + dryrunopts,
1620 1620 _('[OPTION]... [SOURCE]... DEST'))
1621 1621 def copy(ui, repo, *pats, **opts):
1622 1622 """mark files as copied for the next commit
1623 1623
1624 1624 Mark dest as having copies of source files. If dest is a
1625 1625 directory, copies are put in that directory. If dest is a file,
1626 1626 the source must be a single file.
1627 1627
1628 1628 By default, this command copies the contents of files as they
1629 1629 exist in the working directory. If invoked with -A/--after, the
1630 1630 operation is recorded, but no copying is performed.
1631 1631
1632 1632 This command takes effect with the next commit. To undo a copy
1633 1633 before that, see :hg:`revert`.
1634 1634
1635 1635 Returns 0 on success, 1 if errors are encountered.
1636 1636 """
1637 1637 wlock = repo.wlock(False)
1638 1638 try:
1639 1639 return cmdutil.copy(ui, repo, pats, opts)
1640 1640 finally:
1641 1641 wlock.release()
1642 1642
1643 1643 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1644 1644 def debugancestor(ui, repo, *args):
1645 1645 """find the ancestor revision of two revisions in a given index"""
1646 1646 if len(args) == 3:
1647 1647 index, rev1, rev2 = args
1648 1648 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1649 1649 lookup = r.lookup
1650 1650 elif len(args) == 2:
1651 1651 if not repo:
1652 1652 raise util.Abort(_("there is no Mercurial repository here "
1653 1653 "(.hg not found)"))
1654 1654 rev1, rev2 = args
1655 1655 r = repo.changelog
1656 1656 lookup = repo.lookup
1657 1657 else:
1658 1658 raise util.Abort(_('either two or three arguments required'))
1659 1659 a = r.ancestor(lookup(rev1), lookup(rev2))
1660 1660 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1661 1661
1662 1662 @command('debugbuilddag',
1663 1663 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1664 1664 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1665 1665 ('n', 'new-file', None, _('add new file at each rev'))],
1666 1666 _('[OPTION]... [TEXT]'))
1667 1667 def debugbuilddag(ui, repo, text=None,
1668 1668 mergeable_file=False,
1669 1669 overwritten_file=False,
1670 1670 new_file=False):
1671 1671 """builds a repo with a given DAG from scratch in the current empty repo
1672 1672
1673 1673 The description of the DAG is read from stdin if not given on the
1674 1674 command line.
1675 1675
1676 1676 Elements:
1677 1677
1678 1678 - "+n" is a linear run of n nodes based on the current default parent
1679 1679 - "." is a single node based on the current default parent
1680 1680 - "$" resets the default parent to null (implied at the start);
1681 1681 otherwise the default parent is always the last node created
1682 1682 - "<p" sets the default parent to the backref p
1683 1683 - "*p" is a fork at parent p, which is a backref
1684 1684 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1685 1685 - "/p2" is a merge of the preceding node and p2
1686 1686 - ":tag" defines a local tag for the preceding node
1687 1687 - "@branch" sets the named branch for subsequent nodes
1688 1688 - "#...\\n" is a comment up to the end of the line
1689 1689
1690 1690 Whitespace between the above elements is ignored.
1691 1691
1692 1692 A backref is either
1693 1693
1694 1694 - a number n, which references the node curr-n, where curr is the current
1695 1695 node, or
1696 1696 - the name of a local tag you placed earlier using ":tag", or
1697 1697 - empty to denote the default parent.
1698 1698
1699 1699 All string valued-elements are either strictly alphanumeric, or must
1700 1700 be enclosed in double quotes ("..."), with "\\" as escape character.
1701 1701 """
1702 1702
1703 1703 if text is None:
1704 1704 ui.status(_("reading DAG from stdin\n"))
1705 1705 text = ui.fin.read()
1706 1706
1707 1707 cl = repo.changelog
1708 1708 if len(cl) > 0:
1709 1709 raise util.Abort(_('repository is not empty'))
1710 1710
1711 1711 # determine number of revs in DAG
1712 1712 total = 0
1713 1713 for type, data in dagparser.parsedag(text):
1714 1714 if type == 'n':
1715 1715 total += 1
1716 1716
1717 1717 if mergeable_file:
1718 1718 linesperrev = 2
1719 1719 # make a file with k lines per rev
1720 1720 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1721 1721 initialmergedlines.append("")
1722 1722
1723 1723 tags = []
1724 1724
1725 1725 lock = tr = None
1726 1726 try:
1727 1727 lock = repo.lock()
1728 1728 tr = repo.transaction("builddag")
1729 1729
1730 1730 at = -1
1731 1731 atbranch = 'default'
1732 1732 nodeids = []
1733 1733 id = 0
1734 1734 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1735 1735 for type, data in dagparser.parsedag(text):
1736 1736 if type == 'n':
1737 1737 ui.note(('node %s\n' % str(data)))
1738 1738 id, ps = data
1739 1739
1740 1740 files = []
1741 1741 fctxs = {}
1742 1742
1743 1743 p2 = None
1744 1744 if mergeable_file:
1745 1745 fn = "mf"
1746 1746 p1 = repo[ps[0]]
1747 1747 if len(ps) > 1:
1748 1748 p2 = repo[ps[1]]
1749 1749 pa = p1.ancestor(p2)
1750 1750 base, local, other = [x[fn].data() for x in (pa, p1,
1751 1751 p2)]
1752 1752 m3 = simplemerge.Merge3Text(base, local, other)
1753 1753 ml = [l.strip() for l in m3.merge_lines()]
1754 1754 ml.append("")
1755 1755 elif at > 0:
1756 1756 ml = p1[fn].data().split("\n")
1757 1757 else:
1758 1758 ml = initialmergedlines
1759 1759 ml[id * linesperrev] += " r%i" % id
1760 1760 mergedtext = "\n".join(ml)
1761 1761 files.append(fn)
1762 1762 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1763 1763
1764 1764 if overwritten_file:
1765 1765 fn = "of"
1766 1766 files.append(fn)
1767 1767 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1768 1768
1769 1769 if new_file:
1770 1770 fn = "nf%i" % id
1771 1771 files.append(fn)
1772 1772 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1773 1773 if len(ps) > 1:
1774 1774 if not p2:
1775 1775 p2 = repo[ps[1]]
1776 1776 for fn in p2:
1777 1777 if fn.startswith("nf"):
1778 1778 files.append(fn)
1779 1779 fctxs[fn] = p2[fn]
1780 1780
1781 1781 def fctxfn(repo, cx, path):
1782 1782 return fctxs.get(path)
1783 1783
1784 1784 if len(ps) == 0 or ps[0] < 0:
1785 1785 pars = [None, None]
1786 1786 elif len(ps) == 1:
1787 1787 pars = [nodeids[ps[0]], None]
1788 1788 else:
1789 1789 pars = [nodeids[p] for p in ps]
1790 1790 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1791 1791 date=(id, 0),
1792 1792 user="debugbuilddag",
1793 1793 extra={'branch': atbranch})
1794 1794 nodeid = repo.commitctx(cx)
1795 1795 nodeids.append(nodeid)
1796 1796 at = id
1797 1797 elif type == 'l':
1798 1798 id, name = data
1799 1799 ui.note(('tag %s\n' % name))
1800 1800 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1801 1801 elif type == 'a':
1802 1802 ui.note(('branch %s\n' % data))
1803 1803 atbranch = data
1804 1804 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1805 1805 tr.close()
1806 1806
1807 1807 if tags:
1808 1808 repo.vfs.write("localtags", "".join(tags))
1809 1809 finally:
1810 1810 ui.progress(_('building'), None)
1811 1811 release(tr, lock)
1812 1812
1813 1813 @command('debugbundle',
1814 1814 [('a', 'all', None, _('show all details'))],
1815 1815 _('FILE'),
1816 1816 norepo=True)
1817 1817 def debugbundle(ui, bundlepath, all=None, **opts):
1818 1818 """lists the contents of a bundle"""
1819 1819 f = hg.openpath(ui, bundlepath)
1820 1820 try:
1821 1821 gen = exchange.readbundle(ui, f, bundlepath)
1822 1822 if isinstance(gen, bundle2.unbundle20):
1823 1823 return _debugbundle2(ui, gen, all=all, **opts)
1824 1824 if all:
1825 1825 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1826 1826
1827 1827 def showchunks(named):
1828 1828 ui.write("\n%s\n" % named)
1829 1829 chain = None
1830 1830 while True:
1831 1831 chunkdata = gen.deltachunk(chain)
1832 1832 if not chunkdata:
1833 1833 break
1834 1834 node = chunkdata['node']
1835 1835 p1 = chunkdata['p1']
1836 1836 p2 = chunkdata['p2']
1837 1837 cs = chunkdata['cs']
1838 1838 deltabase = chunkdata['deltabase']
1839 1839 delta = chunkdata['delta']
1840 1840 ui.write("%s %s %s %s %s %s\n" %
1841 1841 (hex(node), hex(p1), hex(p2),
1842 1842 hex(cs), hex(deltabase), len(delta)))
1843 1843 chain = node
1844 1844
1845 1845 chunkdata = gen.changelogheader()
1846 1846 showchunks("changelog")
1847 1847 chunkdata = gen.manifestheader()
1848 1848 showchunks("manifest")
1849 1849 while True:
1850 1850 chunkdata = gen.filelogheader()
1851 1851 if not chunkdata:
1852 1852 break
1853 1853 fname = chunkdata['filename']
1854 1854 showchunks(fname)
1855 1855 else:
1856 1856 if isinstance(gen, bundle2.unbundle20):
1857 1857 raise util.Abort(_('use debugbundle2 for this file'))
1858 1858 chunkdata = gen.changelogheader()
1859 1859 chain = None
1860 1860 while True:
1861 1861 chunkdata = gen.deltachunk(chain)
1862 1862 if not chunkdata:
1863 1863 break
1864 1864 node = chunkdata['node']
1865 1865 ui.write("%s\n" % hex(node))
1866 1866 chain = node
1867 1867 finally:
1868 1868 f.close()
1869 1869
1870 1870 def _debugbundle2(ui, gen, **opts):
1871 1871 """lists the contents of a bundle2"""
1872 1872 if not isinstance(gen, bundle2.unbundle20):
1873 1873 raise util.Abort(_('not a bundle2 file'))
1874 1874 ui.write(('Stream params: %s\n' % repr(gen.params)))
1875 1875 for part in gen.iterparts():
1876 1876 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
1877 1877 if part.type == 'b2x:changegroup':
1878 1878 version = part.params.get('version', '01')
1879 1879 cg = changegroup.packermap[version][1](part, 'UN')
1880 1880 chunkdata = cg.changelogheader()
1881 1881 chain = None
1882 1882 while True:
1883 1883 chunkdata = cg.deltachunk(chain)
1884 1884 if not chunkdata:
1885 1885 break
1886 1886 node = chunkdata['node']
1887 1887 ui.write(" %s\n" % hex(node))
1888 1888 chain = node
1889 1889
1890 1890 @command('debugcheckstate', [], '')
1891 1891 def debugcheckstate(ui, repo):
1892 1892 """validate the correctness of the current dirstate"""
1893 1893 parent1, parent2 = repo.dirstate.parents()
1894 1894 m1 = repo[parent1].manifest()
1895 1895 m2 = repo[parent2].manifest()
1896 1896 errors = 0
1897 1897 for f in repo.dirstate:
1898 1898 state = repo.dirstate[f]
1899 1899 if state in "nr" and f not in m1:
1900 1900 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1901 1901 errors += 1
1902 1902 if state in "a" and f in m1:
1903 1903 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1904 1904 errors += 1
1905 1905 if state in "m" and f not in m1 and f not in m2:
1906 1906 ui.warn(_("%s in state %s, but not in either manifest\n") %
1907 1907 (f, state))
1908 1908 errors += 1
1909 1909 for f in m1:
1910 1910 state = repo.dirstate[f]
1911 1911 if state not in "nrm":
1912 1912 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1913 1913 errors += 1
1914 1914 if errors:
1915 1915 error = _(".hg/dirstate inconsistent with current parent's manifest")
1916 1916 raise util.Abort(error)
1917 1917
1918 1918 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1919 1919 def debugcommands(ui, cmd='', *args):
1920 1920 """list all available commands and options"""
1921 1921 for cmd, vals in sorted(table.iteritems()):
1922 1922 cmd = cmd.split('|')[0].strip('^')
1923 1923 opts = ', '.join([i[1] for i in vals[1]])
1924 1924 ui.write('%s: %s\n' % (cmd, opts))
1925 1925
1926 1926 @command('debugcomplete',
1927 1927 [('o', 'options', None, _('show the command options'))],
1928 1928 _('[-o] CMD'),
1929 1929 norepo=True)
1930 1930 def debugcomplete(ui, cmd='', **opts):
1931 1931 """returns the completion list associated with the given command"""
1932 1932
1933 1933 if opts.get('options'):
1934 1934 options = []
1935 1935 otables = [globalopts]
1936 1936 if cmd:
1937 1937 aliases, entry = cmdutil.findcmd(cmd, table, False)
1938 1938 otables.append(entry[1])
1939 1939 for t in otables:
1940 1940 for o in t:
1941 1941 if "(DEPRECATED)" in o[3]:
1942 1942 continue
1943 1943 if o[0]:
1944 1944 options.append('-%s' % o[0])
1945 1945 options.append('--%s' % o[1])
1946 1946 ui.write("%s\n" % "\n".join(options))
1947 1947 return
1948 1948
1949 1949 cmdlist = cmdutil.findpossible(cmd, table)
1950 1950 if ui.verbose:
1951 1951 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1952 1952 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1953 1953
1954 1954 @command('debugdag',
1955 1955 [('t', 'tags', None, _('use tags as labels')),
1956 1956 ('b', 'branches', None, _('annotate with branch names')),
1957 1957 ('', 'dots', None, _('use dots for runs')),
1958 1958 ('s', 'spaces', None, _('separate elements by spaces'))],
1959 1959 _('[OPTION]... [FILE [REV]...]'),
1960 1960 optionalrepo=True)
1961 1961 def debugdag(ui, repo, file_=None, *revs, **opts):
1962 1962 """format the changelog or an index DAG as a concise textual description
1963 1963
1964 1964 If you pass a revlog index, the revlog's DAG is emitted. If you list
1965 1965 revision numbers, they get labeled in the output as rN.
1966 1966
1967 1967 Otherwise, the changelog DAG of the current repo is emitted.
1968 1968 """
1969 1969 spaces = opts.get('spaces')
1970 1970 dots = opts.get('dots')
1971 1971 if file_:
1972 1972 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1973 1973 revs = set((int(r) for r in revs))
1974 1974 def events():
1975 1975 for r in rlog:
1976 1976 yield 'n', (r, list(p for p in rlog.parentrevs(r)
1977 1977 if p != -1))
1978 1978 if r in revs:
1979 1979 yield 'l', (r, "r%i" % r)
1980 1980 elif repo:
1981 1981 cl = repo.changelog
1982 1982 tags = opts.get('tags')
1983 1983 branches = opts.get('branches')
1984 1984 if tags:
1985 1985 labels = {}
1986 1986 for l, n in repo.tags().items():
1987 1987 labels.setdefault(cl.rev(n), []).append(l)
1988 1988 def events():
1989 1989 b = "default"
1990 1990 for r in cl:
1991 1991 if branches:
1992 1992 newb = cl.read(cl.node(r))[5]['branch']
1993 1993 if newb != b:
1994 1994 yield 'a', newb
1995 1995 b = newb
1996 1996 yield 'n', (r, list(p for p in cl.parentrevs(r)
1997 1997 if p != -1))
1998 1998 if tags:
1999 1999 ls = labels.get(r)
2000 2000 if ls:
2001 2001 for l in ls:
2002 2002 yield 'l', (r, l)
2003 2003 else:
2004 2004 raise util.Abort(_('need repo for changelog dag'))
2005 2005
2006 2006 for line in dagparser.dagtextlines(events(),
2007 2007 addspaces=spaces,
2008 2008 wraplabels=True,
2009 2009 wrapannotations=True,
2010 2010 wrapnonlinear=dots,
2011 2011 usedots=dots,
2012 2012 maxlinewidth=70):
2013 2013 ui.write(line)
2014 2014 ui.write("\n")
2015 2015
2016 2016 @command('debugdata',
2017 2017 [('c', 'changelog', False, _('open changelog')),
2018 2018 ('m', 'manifest', False, _('open manifest'))],
2019 2019 _('-c|-m|FILE REV'))
2020 2020 def debugdata(ui, repo, file_, rev=None, **opts):
2021 2021 """dump the contents of a data file revision"""
2022 2022 if opts.get('changelog') or opts.get('manifest'):
2023 2023 file_, rev = None, file_
2024 2024 elif rev is None:
2025 2025 raise error.CommandError('debugdata', _('invalid arguments'))
2026 2026 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2027 2027 try:
2028 2028 ui.write(r.revision(r.lookup(rev)))
2029 2029 except KeyError:
2030 2030 raise util.Abort(_('invalid revision identifier %s') % rev)
2031 2031
2032 2032 @command('debugdate',
2033 2033 [('e', 'extended', None, _('try extended date formats'))],
2034 2034 _('[-e] DATE [RANGE]'),
2035 2035 norepo=True, optionalrepo=True)
2036 2036 def debugdate(ui, date, range=None, **opts):
2037 2037 """parse and display a date"""
2038 2038 if opts["extended"]:
2039 2039 d = util.parsedate(date, util.extendeddateformats)
2040 2040 else:
2041 2041 d = util.parsedate(date)
2042 2042 ui.write(("internal: %s %s\n") % d)
2043 2043 ui.write(("standard: %s\n") % util.datestr(d))
2044 2044 if range:
2045 2045 m = util.matchdate(range)
2046 2046 ui.write(("match: %s\n") % m(d[0]))
2047 2047
2048 2048 @command('debugdiscovery',
2049 2049 [('', 'old', None, _('use old-style discovery')),
2050 2050 ('', 'nonheads', None,
2051 2051 _('use old-style discovery with non-heads included')),
2052 2052 ] + remoteopts,
2053 2053 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2054 2054 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2055 2055 """runs the changeset discovery protocol in isolation"""
2056 2056 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2057 2057 opts.get('branch'))
2058 2058 remote = hg.peer(repo, opts, remoteurl)
2059 2059 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2060 2060
2061 2061 # make sure tests are repeatable
2062 2062 random.seed(12323)
2063 2063
2064 2064 def doit(localheads, remoteheads, remote=remote):
2065 2065 if opts.get('old'):
2066 2066 if localheads:
2067 2067 raise util.Abort('cannot use localheads with old style '
2068 2068 'discovery')
2069 2069 if not util.safehasattr(remote, 'branches'):
2070 2070 # enable in-client legacy support
2071 2071 remote = localrepo.locallegacypeer(remote.local())
2072 2072 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2073 2073 force=True)
2074 2074 common = set(common)
2075 2075 if not opts.get('nonheads'):
2076 2076 ui.write(("unpruned common: %s\n") %
2077 2077 " ".join(sorted(short(n) for n in common)))
2078 2078 dag = dagutil.revlogdag(repo.changelog)
2079 2079 all = dag.ancestorset(dag.internalizeall(common))
2080 2080 common = dag.externalizeall(dag.headsetofconnecteds(all))
2081 2081 else:
2082 2082 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2083 2083 common = set(common)
2084 2084 rheads = set(hds)
2085 2085 lheads = set(repo.heads())
2086 2086 ui.write(("common heads: %s\n") %
2087 2087 " ".join(sorted(short(n) for n in common)))
2088 2088 if lheads <= common:
2089 2089 ui.write(("local is subset\n"))
2090 2090 elif rheads <= common:
2091 2091 ui.write(("remote is subset\n"))
2092 2092
2093 2093 serverlogs = opts.get('serverlog')
2094 2094 if serverlogs:
2095 2095 for filename in serverlogs:
2096 2096 logfile = open(filename, 'r')
2097 2097 try:
2098 2098 line = logfile.readline()
2099 2099 while line:
2100 2100 parts = line.strip().split(';')
2101 2101 op = parts[1]
2102 2102 if op == 'cg':
2103 2103 pass
2104 2104 elif op == 'cgss':
2105 2105 doit(parts[2].split(' '), parts[3].split(' '))
2106 2106 elif op == 'unb':
2107 2107 doit(parts[3].split(' '), parts[2].split(' '))
2108 2108 line = logfile.readline()
2109 2109 finally:
2110 2110 logfile.close()
2111 2111
2112 2112 else:
2113 2113 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2114 2114 opts.get('remote_head'))
2115 2115 localrevs = opts.get('local_head')
2116 2116 doit(localrevs, remoterevs)
2117 2117
2118 2118 @command('debugfileset',
2119 2119 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2120 2120 _('[-r REV] FILESPEC'))
2121 2121 def debugfileset(ui, repo, expr, **opts):
2122 2122 '''parse and apply a fileset specification'''
2123 2123 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2124 2124 if ui.verbose:
2125 2125 tree = fileset.parse(expr)[0]
2126 2126 ui.note(tree, "\n")
2127 2127
2128 2128 for f in ctx.getfileset(expr):
2129 2129 ui.write("%s\n" % f)
2130 2130
2131 2131 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2132 2132 def debugfsinfo(ui, path="."):
2133 2133 """show information detected about current filesystem"""
2134 2134 util.writefile('.debugfsinfo', '')
2135 2135 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2136 2136 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2137 2137 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2138 2138 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2139 2139 and 'yes' or 'no'))
2140 2140 os.unlink('.debugfsinfo')
2141 2141
2142 2142 @command('debuggetbundle',
2143 2143 [('H', 'head', [], _('id of head node'), _('ID')),
2144 2144 ('C', 'common', [], _('id of common node'), _('ID')),
2145 2145 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2146 2146 _('REPO FILE [-H|-C ID]...'),
2147 2147 norepo=True)
2148 2148 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2149 2149 """retrieves a bundle from a repo
2150 2150
2151 2151 Every ID must be a full-length hex node id string. Saves the bundle to the
2152 2152 given file.
2153 2153 """
2154 2154 repo = hg.peer(ui, opts, repopath)
2155 2155 if not repo.capable('getbundle'):
2156 2156 raise util.Abort("getbundle() not supported by target repository")
2157 2157 args = {}
2158 2158 if common:
2159 2159 args['common'] = [bin(s) for s in common]
2160 2160 if head:
2161 2161 args['heads'] = [bin(s) for s in head]
2162 2162 # TODO: get desired bundlecaps from command line.
2163 2163 args['bundlecaps'] = None
2164 2164 bundle = repo.getbundle('debug', **args)
2165 2165
2166 2166 bundletype = opts.get('type', 'bzip2').lower()
2167 2167 btypes = {'none': 'HG10UN',
2168 2168 'bzip2': 'HG10BZ',
2169 2169 'gzip': 'HG10GZ',
2170 2170 'bundle2': 'HG2Y'}
2171 2171 bundletype = btypes.get(bundletype)
2172 2172 if bundletype not in changegroup.bundletypes:
2173 2173 raise util.Abort(_('unknown bundle type specified with --type'))
2174 2174 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2175 2175
2176 2176 @command('debugignore', [], '')
2177 2177 def debugignore(ui, repo, *values, **opts):
2178 2178 """display the combined ignore pattern"""
2179 2179 ignore = repo.dirstate._ignore
2180 2180 includepat = getattr(ignore, 'includepat', None)
2181 2181 if includepat is not None:
2182 2182 ui.write("%s\n" % includepat)
2183 2183 else:
2184 2184 raise util.Abort(_("no ignore patterns found"))
2185 2185
2186 2186 @command('debugindex',
2187 2187 [('c', 'changelog', False, _('open changelog')),
2188 2188 ('m', 'manifest', False, _('open manifest')),
2189 2189 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2190 2190 _('[-f FORMAT] -c|-m|FILE'),
2191 2191 optionalrepo=True)
2192 2192 def debugindex(ui, repo, file_=None, **opts):
2193 2193 """dump the contents of an index file"""
2194 2194 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2195 2195 format = opts.get('format', 0)
2196 2196 if format not in (0, 1):
2197 2197 raise util.Abort(_("unknown format %d") % format)
2198 2198
2199 2199 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2200 2200 if generaldelta:
2201 2201 basehdr = ' delta'
2202 2202 else:
2203 2203 basehdr = ' base'
2204 2204
2205 2205 if ui.debugflag:
2206 2206 shortfn = hex
2207 2207 else:
2208 2208 shortfn = short
2209 2209
2210 2210 # There might not be anything in r, so have a sane default
2211 2211 idlen = 12
2212 2212 for i in r:
2213 2213 idlen = len(shortfn(r.node(i)))
2214 2214 break
2215 2215
2216 2216 if format == 0:
2217 2217 ui.write(" rev offset length " + basehdr + " linkrev"
2218 2218 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2219 2219 elif format == 1:
2220 2220 ui.write(" rev flag offset length"
2221 2221 " size " + basehdr + " link p1 p2"
2222 2222 " %s\n" % "nodeid".rjust(idlen))
2223 2223
2224 2224 for i in r:
2225 2225 node = r.node(i)
2226 2226 if generaldelta:
2227 2227 base = r.deltaparent(i)
2228 2228 else:
2229 2229 base = r.chainbase(i)
2230 2230 if format == 0:
2231 2231 try:
2232 2232 pp = r.parents(node)
2233 2233 except Exception:
2234 2234 pp = [nullid, nullid]
2235 2235 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2236 2236 i, r.start(i), r.length(i), base, r.linkrev(i),
2237 2237 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2238 2238 elif format == 1:
2239 2239 pr = r.parentrevs(i)
2240 2240 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2241 2241 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2242 2242 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2243 2243
2244 2244 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2245 2245 def debugindexdot(ui, repo, file_):
2246 2246 """dump an index DAG as a graphviz dot file"""
2247 2247 r = None
2248 2248 if repo:
2249 2249 filelog = repo.file(file_)
2250 2250 if len(filelog):
2251 2251 r = filelog
2252 2252 if not r:
2253 2253 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2254 2254 ui.write(("digraph G {\n"))
2255 2255 for i in r:
2256 2256 node = r.node(i)
2257 2257 pp = r.parents(node)
2258 2258 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2259 2259 if pp[1] != nullid:
2260 2260 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2261 2261 ui.write("}\n")
2262 2262
2263 2263 @command('debuginstall', [], '', norepo=True)
2264 2264 def debuginstall(ui):
2265 2265 '''test Mercurial installation
2266 2266
2267 2267 Returns 0 on success.
2268 2268 '''
2269 2269
2270 2270 def writetemp(contents):
2271 2271 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2272 2272 f = os.fdopen(fd, "wb")
2273 2273 f.write(contents)
2274 2274 f.close()
2275 2275 return name
2276 2276
2277 2277 problems = 0
2278 2278
2279 2279 # encoding
2280 2280 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2281 2281 try:
2282 2282 encoding.fromlocal("test")
2283 2283 except util.Abort, inst:
2284 2284 ui.write(" %s\n" % inst)
2285 2285 ui.write(_(" (check that your locale is properly set)\n"))
2286 2286 problems += 1
2287 2287
2288 2288 # Python
2289 2289 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2290 2290 ui.status(_("checking Python version (%s)\n")
2291 2291 % ("%s.%s.%s" % sys.version_info[:3]))
2292 2292 ui.status(_("checking Python lib (%s)...\n")
2293 2293 % os.path.dirname(os.__file__))
2294 2294
2295 2295 # compiled modules
2296 2296 ui.status(_("checking installed modules (%s)...\n")
2297 2297 % os.path.dirname(__file__))
2298 2298 try:
2299 2299 import bdiff, mpatch, base85, osutil
2300 2300 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2301 2301 except Exception, inst:
2302 2302 ui.write(" %s\n" % inst)
2303 2303 ui.write(_(" One or more extensions could not be found"))
2304 2304 ui.write(_(" (check that you compiled the extensions)\n"))
2305 2305 problems += 1
2306 2306
2307 2307 # templates
2308 2308 import templater
2309 2309 p = templater.templatepaths()
2310 2310 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2311 2311 if p:
2312 2312 m = templater.templatepath("map-cmdline.default")
2313 2313 if m:
2314 2314 # template found, check if it is working
2315 2315 try:
2316 2316 templater.templater(m)
2317 2317 except Exception, inst:
2318 2318 ui.write(" %s\n" % inst)
2319 2319 p = None
2320 2320 else:
2321 2321 ui.write(_(" template 'default' not found\n"))
2322 2322 p = None
2323 2323 else:
2324 2324 ui.write(_(" no template directories found\n"))
2325 2325 if not p:
2326 2326 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2327 2327 problems += 1
2328 2328
2329 2329 # editor
2330 2330 ui.status(_("checking commit editor...\n"))
2331 2331 editor = ui.geteditor()
2332 2332 cmdpath = util.findexe(shlex.split(editor)[0])
2333 2333 if not cmdpath:
2334 2334 if editor == 'vi':
2335 2335 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2336 2336 ui.write(_(" (specify a commit editor in your configuration"
2337 2337 " file)\n"))
2338 2338 else:
2339 2339 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2340 2340 ui.write(_(" (specify a commit editor in your configuration"
2341 2341 " file)\n"))
2342 2342 problems += 1
2343 2343
2344 2344 # check username
2345 2345 ui.status(_("checking username...\n"))
2346 2346 try:
2347 2347 ui.username()
2348 2348 except util.Abort, e:
2349 2349 ui.write(" %s\n" % e)
2350 2350 ui.write(_(" (specify a username in your configuration file)\n"))
2351 2351 problems += 1
2352 2352
2353 2353 if not problems:
2354 2354 ui.status(_("no problems detected\n"))
2355 2355 else:
2356 2356 ui.write(_("%s problems detected,"
2357 2357 " please check your install!\n") % problems)
2358 2358
2359 2359 return problems
2360 2360
2361 2361 @command('debugknown', [], _('REPO ID...'), norepo=True)
2362 2362 def debugknown(ui, repopath, *ids, **opts):
2363 2363 """test whether node ids are known to a repo
2364 2364
2365 2365 Every ID must be a full-length hex node id string. Returns a list of 0s
2366 2366 and 1s indicating unknown/known.
2367 2367 """
2368 2368 repo = hg.peer(ui, opts, repopath)
2369 2369 if not repo.capable('known'):
2370 2370 raise util.Abort("known() not supported by target repository")
2371 2371 flags = repo.known([bin(s) for s in ids])
2372 2372 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2373 2373
2374 2374 @command('debuglabelcomplete', [], _('LABEL...'))
2375 2375 def debuglabelcomplete(ui, repo, *args):
2376 2376 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2377 2377 debugnamecomplete(ui, repo, *args)
2378 2378
2379 2379 @command('debugnamecomplete', [], _('NAME...'))
2380 2380 def debugnamecomplete(ui, repo, *args):
2381 2381 '''complete "names" - tags, open branch names, bookmark names'''
2382 2382
2383 2383 names = set()
2384 2384 # since we previously only listed open branches, we will handle that
2385 2385 # specially (after this for loop)
2386 2386 for name, ns in repo.names.iteritems():
2387 2387 if name != 'branches':
2388 2388 names.update(ns.listnames(repo))
2389 2389 names.update(tag for (tag, heads, tip, closed)
2390 2390 in repo.branchmap().iterbranches() if not closed)
2391 2391 completions = set()
2392 2392 if not args:
2393 2393 args = ['']
2394 2394 for a in args:
2395 2395 completions.update(n for n in names if n.startswith(a))
2396 2396 ui.write('\n'.join(sorted(completions)))
2397 2397 ui.write('\n')
2398 2398
2399 2399 @command('debuglocks',
2400 2400 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2401 2401 ('W', 'force-wlock', None,
2402 2402 _('free the working state lock (DANGEROUS)'))],
2403 2403 _('[OPTION]...'))
2404 2404 def debuglocks(ui, repo, **opts):
2405 2405 """show or modify state of locks
2406 2406
2407 2407 By default, this command will show which locks are held. This
2408 2408 includes the user and process holding the lock, the amount of time
2409 2409 the lock has been held, and the machine name where the process is
2410 2410 running if it's not local.
2411 2411
2412 2412 Locks protect the integrity of Mercurial's data, so should be
2413 2413 treated with care. System crashes or other interruptions may cause
2414 2414 locks to not be properly released, though Mercurial will usually
2415 2415 detect and remove such stale locks automatically.
2416 2416
2417 2417 However, detecting stale locks may not always be possible (for
2418 2418 instance, on a shared filesystem). Removing locks may also be
2419 2419 blocked by filesystem permissions.
2420 2420
2421 2421 Returns 0 if no locks are held.
2422 2422
2423 2423 """
2424 2424
2425 2425 if opts.get('force_lock'):
2426 2426 repo.svfs.unlink('lock')
2427 2427 if opts.get('force_wlock'):
2428 2428 repo.vfs.unlink('wlock')
2429 2429 if opts.get('force_lock') or opts.get('force_lock'):
2430 2430 return 0
2431 2431
2432 2432 now = time.time()
2433 2433 held = 0
2434 2434
2435 2435 def report(vfs, name, method):
2436 2436 # this causes stale locks to get reaped for more accurate reporting
2437 2437 try:
2438 2438 l = method(False)
2439 2439 except error.LockHeld:
2440 2440 l = None
2441 2441
2442 2442 if l:
2443 2443 l.release()
2444 2444 else:
2445 2445 try:
2446 2446 stat = repo.svfs.lstat(name)
2447 2447 age = now - stat.st_mtime
2448 2448 user = util.username(stat.st_uid)
2449 2449 locker = vfs.readlock(name)
2450 2450 if ":" in locker:
2451 2451 host, pid = locker.split(':')
2452 2452 if host == socket.gethostname():
2453 2453 locker = 'user %s, process %s' % (user, pid)
2454 2454 else:
2455 2455 locker = 'user %s, process %s, host %s' \
2456 2456 % (user, pid, host)
2457 2457 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2458 2458 return 1
2459 2459 except OSError, e:
2460 2460 if e.errno != errno.ENOENT:
2461 2461 raise
2462 2462
2463 2463 ui.write("%-6s free\n" % (name + ":"))
2464 2464 return 0
2465 2465
2466 2466 held += report(repo.svfs, "lock", repo.lock)
2467 2467 held += report(repo.vfs, "wlock", repo.wlock)
2468 2468
2469 2469 return held
2470 2470
2471 2471 @command('debugobsolete',
2472 2472 [('', 'flags', 0, _('markers flag')),
2473 2473 ('', 'record-parents', False,
2474 2474 _('record parent information for the precursor')),
2475 2475 ('r', 'rev', [], _('display markers relevant to REV')),
2476 2476 ] + commitopts2,
2477 2477 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2478 2478 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2479 2479 """create arbitrary obsolete marker
2480 2480
2481 2481 With no arguments, displays the list of obsolescence markers."""
2482 2482
2483 2483 def parsenodeid(s):
2484 2484 try:
2485 2485 # We do not use revsingle/revrange functions here to accept
2486 2486 # arbitrary node identifiers, possibly not present in the
2487 2487 # local repository.
2488 2488 n = bin(s)
2489 2489 if len(n) != len(nullid):
2490 2490 raise TypeError()
2491 2491 return n
2492 2492 except TypeError:
2493 2493 raise util.Abort('changeset references must be full hexadecimal '
2494 2494 'node identifiers')
2495 2495
2496 2496 if precursor is not None:
2497 2497 if opts['rev']:
2498 2498 raise util.Abort('cannot select revision when creating marker')
2499 2499 metadata = {}
2500 2500 metadata['user'] = opts['user'] or ui.username()
2501 2501 succs = tuple(parsenodeid(succ) for succ in successors)
2502 2502 l = repo.lock()
2503 2503 try:
2504 2504 tr = repo.transaction('debugobsolete')
2505 2505 try:
2506 2506 try:
2507 2507 date = opts.get('date')
2508 2508 if date:
2509 2509 date = util.parsedate(date)
2510 2510 else:
2511 2511 date = None
2512 2512 prec = parsenodeid(precursor)
2513 2513 parents = None
2514 2514 if opts['record_parents']:
2515 2515 if prec not in repo.unfiltered():
2516 2516 raise util.Abort('cannot used --record-parents on '
2517 2517 'unknown changesets')
2518 2518 parents = repo.unfiltered()[prec].parents()
2519 2519 parents = tuple(p.node() for p in parents)
2520 2520 repo.obsstore.create(tr, prec, succs, opts['flags'],
2521 2521 parents=parents, date=date,
2522 2522 metadata=metadata)
2523 2523 tr.close()
2524 2524 except ValueError, exc:
2525 2525 raise util.Abort(_('bad obsmarker input: %s') % exc)
2526 2526 finally:
2527 2527 tr.release()
2528 2528 finally:
2529 2529 l.release()
2530 2530 else:
2531 2531 if opts['rev']:
2532 2532 revs = scmutil.revrange(repo, opts['rev'])
2533 2533 nodes = [repo[r].node() for r in revs]
2534 2534 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2535 2535 markers.sort(key=lambda x: x._data)
2536 2536 else:
2537 2537 markers = obsolete.getmarkers(repo)
2538 2538
2539 2539 for m in markers:
2540 2540 cmdutil.showmarker(ui, m)
2541 2541
2542 2542 @command('debugpathcomplete',
2543 2543 [('f', 'full', None, _('complete an entire path')),
2544 2544 ('n', 'normal', None, _('show only normal files')),
2545 2545 ('a', 'added', None, _('show only added files')),
2546 2546 ('r', 'removed', None, _('show only removed files'))],
2547 2547 _('FILESPEC...'))
2548 2548 def debugpathcomplete(ui, repo, *specs, **opts):
2549 2549 '''complete part or all of a tracked path
2550 2550
2551 2551 This command supports shells that offer path name completion. It
2552 2552 currently completes only files already known to the dirstate.
2553 2553
2554 2554 Completion extends only to the next path segment unless
2555 2555 --full is specified, in which case entire paths are used.'''
2556 2556
2557 2557 def complete(path, acceptable):
2558 2558 dirstate = repo.dirstate
2559 2559 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2560 2560 rootdir = repo.root + os.sep
2561 2561 if spec != repo.root and not spec.startswith(rootdir):
2562 2562 return [], []
2563 2563 if os.path.isdir(spec):
2564 2564 spec += '/'
2565 2565 spec = spec[len(rootdir):]
2566 2566 fixpaths = os.sep != '/'
2567 2567 if fixpaths:
2568 2568 spec = spec.replace(os.sep, '/')
2569 2569 speclen = len(spec)
2570 2570 fullpaths = opts['full']
2571 2571 files, dirs = set(), set()
2572 2572 adddir, addfile = dirs.add, files.add
2573 2573 for f, st in dirstate.iteritems():
2574 2574 if f.startswith(spec) and st[0] in acceptable:
2575 2575 if fixpaths:
2576 2576 f = f.replace('/', os.sep)
2577 2577 if fullpaths:
2578 2578 addfile(f)
2579 2579 continue
2580 2580 s = f.find(os.sep, speclen)
2581 2581 if s >= 0:
2582 2582 adddir(f[:s])
2583 2583 else:
2584 2584 addfile(f)
2585 2585 return files, dirs
2586 2586
2587 2587 acceptable = ''
2588 2588 if opts['normal']:
2589 2589 acceptable += 'nm'
2590 2590 if opts['added']:
2591 2591 acceptable += 'a'
2592 2592 if opts['removed']:
2593 2593 acceptable += 'r'
2594 2594 cwd = repo.getcwd()
2595 2595 if not specs:
2596 2596 specs = ['.']
2597 2597
2598 2598 files, dirs = set(), set()
2599 2599 for spec in specs:
2600 2600 f, d = complete(spec, acceptable or 'nmar')
2601 2601 files.update(f)
2602 2602 dirs.update(d)
2603 2603 files.update(dirs)
2604 2604 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2605 2605 ui.write('\n')
2606 2606
2607 2607 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2608 2608 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2609 2609 '''access the pushkey key/value protocol
2610 2610
2611 2611 With two args, list the keys in the given namespace.
2612 2612
2613 2613 With five args, set a key to new if it currently is set to old.
2614 2614 Reports success or failure.
2615 2615 '''
2616 2616
2617 2617 target = hg.peer(ui, {}, repopath)
2618 2618 if keyinfo:
2619 2619 key, old, new = keyinfo
2620 2620 r = target.pushkey(namespace, key, old, new)
2621 2621 ui.status(str(r) + '\n')
2622 2622 return not r
2623 2623 else:
2624 2624 for k, v in sorted(target.listkeys(namespace).iteritems()):
2625 2625 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2626 2626 v.encode('string-escape')))
2627 2627
2628 2628 @command('debugpvec', [], _('A B'))
2629 2629 def debugpvec(ui, repo, a, b=None):
2630 2630 ca = scmutil.revsingle(repo, a)
2631 2631 cb = scmutil.revsingle(repo, b)
2632 2632 pa = pvec.ctxpvec(ca)
2633 2633 pb = pvec.ctxpvec(cb)
2634 2634 if pa == pb:
2635 2635 rel = "="
2636 2636 elif pa > pb:
2637 2637 rel = ">"
2638 2638 elif pa < pb:
2639 2639 rel = "<"
2640 2640 elif pa | pb:
2641 2641 rel = "|"
2642 2642 ui.write(_("a: %s\n") % pa)
2643 2643 ui.write(_("b: %s\n") % pb)
2644 2644 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2645 2645 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2646 2646 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2647 2647 pa.distance(pb), rel))
2648 2648
2649 2649 @command('debugrebuilddirstate|debugrebuildstate',
2650 2650 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2651 2651 _('[-r REV]'))
2652 2652 def debugrebuilddirstate(ui, repo, rev):
2653 2653 """rebuild the dirstate as it would look like for the given revision
2654 2654
2655 2655 If no revision is specified the first current parent will be used.
2656 2656
2657 2657 The dirstate will be set to the files of the given revision.
2658 2658 The actual working directory content or existing dirstate
2659 2659 information such as adds or removes is not considered.
2660 2660
2661 2661 One use of this command is to make the next :hg:`status` invocation
2662 2662 check the actual file content.
2663 2663 """
2664 2664 ctx = scmutil.revsingle(repo, rev)
2665 2665 wlock = repo.wlock()
2666 2666 try:
2667 2667 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2668 2668 finally:
2669 2669 wlock.release()
2670 2670
2671 2671 @command('debugrename',
2672 2672 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2673 2673 _('[-r REV] FILE'))
2674 2674 def debugrename(ui, repo, file1, *pats, **opts):
2675 2675 """dump rename information"""
2676 2676
2677 2677 ctx = scmutil.revsingle(repo, opts.get('rev'))
2678 2678 m = scmutil.match(ctx, (file1,) + pats, opts)
2679 2679 for abs in ctx.walk(m):
2680 2680 fctx = ctx[abs]
2681 2681 o = fctx.filelog().renamed(fctx.filenode())
2682 2682 rel = m.rel(abs)
2683 2683 if o:
2684 2684 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2685 2685 else:
2686 2686 ui.write(_("%s not renamed\n") % rel)
2687 2687
2688 2688 @command('debugrevlog',
2689 2689 [('c', 'changelog', False, _('open changelog')),
2690 2690 ('m', 'manifest', False, _('open manifest')),
2691 2691 ('d', 'dump', False, _('dump index data'))],
2692 2692 _('-c|-m|FILE'),
2693 2693 optionalrepo=True)
2694 2694 def debugrevlog(ui, repo, file_=None, **opts):
2695 2695 """show data and statistics about a revlog"""
2696 2696 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2697 2697
2698 2698 if opts.get("dump"):
2699 2699 numrevs = len(r)
2700 2700 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2701 2701 " rawsize totalsize compression heads chainlen\n")
2702 2702 ts = 0
2703 2703 heads = set()
2704 2704
2705 2705 for rev in xrange(numrevs):
2706 2706 dbase = r.deltaparent(rev)
2707 2707 if dbase == -1:
2708 2708 dbase = rev
2709 2709 cbase = r.chainbase(rev)
2710 2710 clen = r.chainlen(rev)
2711 2711 p1, p2 = r.parentrevs(rev)
2712 2712 rs = r.rawsize(rev)
2713 2713 ts = ts + rs
2714 2714 heads -= set(r.parentrevs(rev))
2715 2715 heads.add(rev)
2716 2716 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2717 2717 "%11d %5d %8d\n" %
2718 2718 (rev, p1, p2, r.start(rev), r.end(rev),
2719 2719 r.start(dbase), r.start(cbase),
2720 2720 r.start(p1), r.start(p2),
2721 2721 rs, ts, ts / r.end(rev), len(heads), clen))
2722 2722 return 0
2723 2723
2724 2724 v = r.version
2725 2725 format = v & 0xFFFF
2726 2726 flags = []
2727 2727 gdelta = False
2728 2728 if v & revlog.REVLOGNGINLINEDATA:
2729 2729 flags.append('inline')
2730 2730 if v & revlog.REVLOGGENERALDELTA:
2731 2731 gdelta = True
2732 2732 flags.append('generaldelta')
2733 2733 if not flags:
2734 2734 flags = ['(none)']
2735 2735
2736 2736 nummerges = 0
2737 2737 numfull = 0
2738 2738 numprev = 0
2739 2739 nump1 = 0
2740 2740 nump2 = 0
2741 2741 numother = 0
2742 2742 nump1prev = 0
2743 2743 nump2prev = 0
2744 2744 chainlengths = []
2745 2745
2746 2746 datasize = [None, 0, 0L]
2747 2747 fullsize = [None, 0, 0L]
2748 2748 deltasize = [None, 0, 0L]
2749 2749
2750 2750 def addsize(size, l):
2751 2751 if l[0] is None or size < l[0]:
2752 2752 l[0] = size
2753 2753 if size > l[1]:
2754 2754 l[1] = size
2755 2755 l[2] += size
2756 2756
2757 2757 numrevs = len(r)
2758 2758 for rev in xrange(numrevs):
2759 2759 p1, p2 = r.parentrevs(rev)
2760 2760 delta = r.deltaparent(rev)
2761 2761 if format > 0:
2762 2762 addsize(r.rawsize(rev), datasize)
2763 2763 if p2 != nullrev:
2764 2764 nummerges += 1
2765 2765 size = r.length(rev)
2766 2766 if delta == nullrev:
2767 2767 chainlengths.append(0)
2768 2768 numfull += 1
2769 2769 addsize(size, fullsize)
2770 2770 else:
2771 2771 chainlengths.append(chainlengths[delta] + 1)
2772 2772 addsize(size, deltasize)
2773 2773 if delta == rev - 1:
2774 2774 numprev += 1
2775 2775 if delta == p1:
2776 2776 nump1prev += 1
2777 2777 elif delta == p2:
2778 2778 nump2prev += 1
2779 2779 elif delta == p1:
2780 2780 nump1 += 1
2781 2781 elif delta == p2:
2782 2782 nump2 += 1
2783 2783 elif delta != nullrev:
2784 2784 numother += 1
2785 2785
2786 2786 # Adjust size min value for empty cases
2787 2787 for size in (datasize, fullsize, deltasize):
2788 2788 if size[0] is None:
2789 2789 size[0] = 0
2790 2790
2791 2791 numdeltas = numrevs - numfull
2792 2792 numoprev = numprev - nump1prev - nump2prev
2793 2793 totalrawsize = datasize[2]
2794 2794 datasize[2] /= numrevs
2795 2795 fulltotal = fullsize[2]
2796 2796 fullsize[2] /= numfull
2797 2797 deltatotal = deltasize[2]
2798 2798 if numrevs - numfull > 0:
2799 2799 deltasize[2] /= numrevs - numfull
2800 2800 totalsize = fulltotal + deltatotal
2801 2801 avgchainlen = sum(chainlengths) / numrevs
2802 2802 compratio = totalrawsize / totalsize
2803 2803
2804 2804 basedfmtstr = '%%%dd\n'
2805 2805 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2806 2806
2807 2807 def dfmtstr(max):
2808 2808 return basedfmtstr % len(str(max))
2809 2809 def pcfmtstr(max, padding=0):
2810 2810 return basepcfmtstr % (len(str(max)), ' ' * padding)
2811 2811
2812 2812 def pcfmt(value, total):
2813 2813 return (value, 100 * float(value) / total)
2814 2814
2815 2815 ui.write(('format : %d\n') % format)
2816 2816 ui.write(('flags : %s\n') % ', '.join(flags))
2817 2817
2818 2818 ui.write('\n')
2819 2819 fmt = pcfmtstr(totalsize)
2820 2820 fmt2 = dfmtstr(totalsize)
2821 2821 ui.write(('revisions : ') + fmt2 % numrevs)
2822 2822 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2823 2823 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2824 2824 ui.write(('revisions : ') + fmt2 % numrevs)
2825 2825 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2826 2826 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2827 2827 ui.write(('revision size : ') + fmt2 % totalsize)
2828 2828 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2829 2829 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2830 2830
2831 2831 ui.write('\n')
2832 2832 fmt = dfmtstr(max(avgchainlen, compratio))
2833 2833 ui.write(('avg chain length : ') + fmt % avgchainlen)
2834 2834 ui.write(('compression ratio : ') + fmt % compratio)
2835 2835
2836 2836 if format > 0:
2837 2837 ui.write('\n')
2838 2838 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2839 2839 % tuple(datasize))
2840 2840 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2841 2841 % tuple(fullsize))
2842 2842 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2843 2843 % tuple(deltasize))
2844 2844
2845 2845 if numdeltas > 0:
2846 2846 ui.write('\n')
2847 2847 fmt = pcfmtstr(numdeltas)
2848 2848 fmt2 = pcfmtstr(numdeltas, 4)
2849 2849 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2850 2850 if numprev > 0:
2851 2851 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2852 2852 numprev))
2853 2853 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2854 2854 numprev))
2855 2855 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2856 2856 numprev))
2857 2857 if gdelta:
2858 2858 ui.write(('deltas against p1 : ')
2859 2859 + fmt % pcfmt(nump1, numdeltas))
2860 2860 ui.write(('deltas against p2 : ')
2861 2861 + fmt % pcfmt(nump2, numdeltas))
2862 2862 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2863 2863 numdeltas))
2864 2864
2865 2865 @command('debugrevspec',
2866 2866 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2867 2867 ('REVSPEC'))
2868 2868 def debugrevspec(ui, repo, expr, **opts):
2869 2869 """parse and apply a revision specification
2870 2870
2871 2871 Use --verbose to print the parsed tree before and after aliases
2872 2872 expansion.
2873 2873 """
2874 2874 if ui.verbose:
2875 2875 tree = revset.parse(expr)[0]
2876 2876 ui.note(revset.prettyformat(tree), "\n")
2877 2877 newtree = revset.findaliases(ui, tree)
2878 2878 if newtree != tree:
2879 2879 ui.note(revset.prettyformat(newtree), "\n")
2880 2880 tree = newtree
2881 2881 newtree = revset.foldconcat(tree)
2882 2882 if newtree != tree:
2883 2883 ui.note(revset.prettyformat(newtree), "\n")
2884 2884 if opts["optimize"]:
2885 2885 weight, optimizedtree = revset.optimize(newtree, True)
2886 2886 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2887 2887 func = revset.match(ui, expr)
2888 for c in func(repo, revset.spanset(repo)):
2888 for c in func(repo):
2889 2889 ui.write("%s\n" % c)
2890 2890
2891 2891 @command('debugsetparents', [], _('REV1 [REV2]'))
2892 2892 def debugsetparents(ui, repo, rev1, rev2=None):
2893 2893 """manually set the parents of the current working directory
2894 2894
2895 2895 This is useful for writing repository conversion tools, but should
2896 2896 be used with care.
2897 2897
2898 2898 Returns 0 on success.
2899 2899 """
2900 2900
2901 2901 r1 = scmutil.revsingle(repo, rev1).node()
2902 2902 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2903 2903
2904 2904 wlock = repo.wlock()
2905 2905 try:
2906 2906 repo.dirstate.beginparentchange()
2907 2907 repo.setparents(r1, r2)
2908 2908 repo.dirstate.endparentchange()
2909 2909 finally:
2910 2910 wlock.release()
2911 2911
2912 2912 @command('debugdirstate|debugstate',
2913 2913 [('', 'nodates', None, _('do not display the saved mtime')),
2914 2914 ('', 'datesort', None, _('sort by saved mtime'))],
2915 2915 _('[OPTION]...'))
2916 2916 def debugstate(ui, repo, nodates=None, datesort=None):
2917 2917 """show the contents of the current dirstate"""
2918 2918 timestr = ""
2919 2919 if datesort:
2920 2920 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2921 2921 else:
2922 2922 keyfunc = None # sort by filename
2923 2923 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2924 2924 if ent[3] == -1:
2925 2925 timestr = 'unset '
2926 2926 elif nodates:
2927 2927 timestr = 'set '
2928 2928 else:
2929 2929 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2930 2930 time.localtime(ent[3]))
2931 2931 if ent[1] & 020000:
2932 2932 mode = 'lnk'
2933 2933 else:
2934 2934 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2935 2935 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2936 2936 for f in repo.dirstate.copies():
2937 2937 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2938 2938
2939 2939 @command('debugsub',
2940 2940 [('r', 'rev', '',
2941 2941 _('revision to check'), _('REV'))],
2942 2942 _('[-r REV] [REV]'))
2943 2943 def debugsub(ui, repo, rev=None):
2944 2944 ctx = scmutil.revsingle(repo, rev, None)
2945 2945 for k, v in sorted(ctx.substate.items()):
2946 2946 ui.write(('path %s\n') % k)
2947 2947 ui.write((' source %s\n') % v[0])
2948 2948 ui.write((' revision %s\n') % v[1])
2949 2949
2950 2950 @command('debugsuccessorssets',
2951 2951 [],
2952 2952 _('[REV]'))
2953 2953 def debugsuccessorssets(ui, repo, *revs):
2954 2954 """show set of successors for revision
2955 2955
2956 2956 A successors set of changeset A is a consistent group of revisions that
2957 2957 succeed A. It contains non-obsolete changesets only.
2958 2958
2959 2959 In most cases a changeset A has a single successors set containing a single
2960 2960 successor (changeset A replaced by A').
2961 2961
2962 2962 A changeset that is made obsolete with no successors are called "pruned".
2963 2963 Such changesets have no successors sets at all.
2964 2964
2965 2965 A changeset that has been "split" will have a successors set containing
2966 2966 more than one successor.
2967 2967
2968 2968 A changeset that has been rewritten in multiple different ways is called
2969 2969 "divergent". Such changesets have multiple successor sets (each of which
2970 2970 may also be split, i.e. have multiple successors).
2971 2971
2972 2972 Results are displayed as follows::
2973 2973
2974 2974 <rev1>
2975 2975 <successors-1A>
2976 2976 <rev2>
2977 2977 <successors-2A>
2978 2978 <successors-2B1> <successors-2B2> <successors-2B3>
2979 2979
2980 2980 Here rev2 has two possible (i.e. divergent) successors sets. The first
2981 2981 holds one element, whereas the second holds three (i.e. the changeset has
2982 2982 been split).
2983 2983 """
2984 2984 # passed to successorssets caching computation from one call to another
2985 2985 cache = {}
2986 2986 ctx2str = str
2987 2987 node2str = short
2988 2988 if ui.debug():
2989 2989 def ctx2str(ctx):
2990 2990 return ctx.hex()
2991 2991 node2str = hex
2992 2992 for rev in scmutil.revrange(repo, revs):
2993 2993 ctx = repo[rev]
2994 2994 ui.write('%s\n'% ctx2str(ctx))
2995 2995 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2996 2996 if succsset:
2997 2997 ui.write(' ')
2998 2998 ui.write(node2str(succsset[0]))
2999 2999 for node in succsset[1:]:
3000 3000 ui.write(' ')
3001 3001 ui.write(node2str(node))
3002 3002 ui.write('\n')
3003 3003
3004 3004 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3005 3005 def debugwalk(ui, repo, *pats, **opts):
3006 3006 """show how files match on given patterns"""
3007 3007 m = scmutil.match(repo[None], pats, opts)
3008 3008 items = list(repo.walk(m))
3009 3009 if not items:
3010 3010 return
3011 3011 f = lambda fn: fn
3012 3012 if ui.configbool('ui', 'slash') and os.sep != '/':
3013 3013 f = lambda fn: util.normpath(fn)
3014 3014 fmt = 'f %%-%ds %%-%ds %%s' % (
3015 3015 max([len(abs) for abs in items]),
3016 3016 max([len(m.rel(abs)) for abs in items]))
3017 3017 for abs in items:
3018 3018 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3019 3019 ui.write("%s\n" % line.rstrip())
3020 3020
3021 3021 @command('debugwireargs',
3022 3022 [('', 'three', '', 'three'),
3023 3023 ('', 'four', '', 'four'),
3024 3024 ('', 'five', '', 'five'),
3025 3025 ] + remoteopts,
3026 3026 _('REPO [OPTIONS]... [ONE [TWO]]'),
3027 3027 norepo=True)
3028 3028 def debugwireargs(ui, repopath, *vals, **opts):
3029 3029 repo = hg.peer(ui, opts, repopath)
3030 3030 for opt in remoteopts:
3031 3031 del opts[opt[1]]
3032 3032 args = {}
3033 3033 for k, v in opts.iteritems():
3034 3034 if v:
3035 3035 args[k] = v
3036 3036 # run twice to check that we don't mess up the stream for the next command
3037 3037 res1 = repo.debugwireargs(*vals, **args)
3038 3038 res2 = repo.debugwireargs(*vals, **args)
3039 3039 ui.write("%s\n" % res1)
3040 3040 if res1 != res2:
3041 3041 ui.warn("%s\n" % res2)
3042 3042
3043 3043 @command('^diff',
3044 3044 [('r', 'rev', [], _('revision'), _('REV')),
3045 3045 ('c', 'change', '', _('change made by revision'), _('REV'))
3046 3046 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3047 3047 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3048 3048 inferrepo=True)
3049 3049 def diff(ui, repo, *pats, **opts):
3050 3050 """diff repository (or selected files)
3051 3051
3052 3052 Show differences between revisions for the specified files.
3053 3053
3054 3054 Differences between files are shown using the unified diff format.
3055 3055
3056 3056 .. note::
3057 3057
3058 3058 diff may generate unexpected results for merges, as it will
3059 3059 default to comparing against the working directory's first
3060 3060 parent changeset if no revisions are specified.
3061 3061
3062 3062 When two revision arguments are given, then changes are shown
3063 3063 between those revisions. If only one revision is specified then
3064 3064 that revision is compared to the working directory, and, when no
3065 3065 revisions are specified, the working directory files are compared
3066 3066 to its parent.
3067 3067
3068 3068 Alternatively you can specify -c/--change with a revision to see
3069 3069 the changes in that changeset relative to its first parent.
3070 3070
3071 3071 Without the -a/--text option, diff will avoid generating diffs of
3072 3072 files it detects as binary. With -a, diff will generate a diff
3073 3073 anyway, probably with undesirable results.
3074 3074
3075 3075 Use the -g/--git option to generate diffs in the git extended diff
3076 3076 format. For more information, read :hg:`help diffs`.
3077 3077
3078 3078 .. container:: verbose
3079 3079
3080 3080 Examples:
3081 3081
3082 3082 - compare a file in the current working directory to its parent::
3083 3083
3084 3084 hg diff foo.c
3085 3085
3086 3086 - compare two historical versions of a directory, with rename info::
3087 3087
3088 3088 hg diff --git -r 1.0:1.2 lib/
3089 3089
3090 3090 - get change stats relative to the last change on some date::
3091 3091
3092 3092 hg diff --stat -r "date('may 2')"
3093 3093
3094 3094 - diff all newly-added files that contain a keyword::
3095 3095
3096 3096 hg diff "set:added() and grep(GNU)"
3097 3097
3098 3098 - compare a revision and its parents::
3099 3099
3100 3100 hg diff -c 9353 # compare against first parent
3101 3101 hg diff -r 9353^:9353 # same using revset syntax
3102 3102 hg diff -r 9353^2:9353 # compare against the second parent
3103 3103
3104 3104 Returns 0 on success.
3105 3105 """
3106 3106
3107 3107 revs = opts.get('rev')
3108 3108 change = opts.get('change')
3109 3109 stat = opts.get('stat')
3110 3110 reverse = opts.get('reverse')
3111 3111
3112 3112 if revs and change:
3113 3113 msg = _('cannot specify --rev and --change at the same time')
3114 3114 raise util.Abort(msg)
3115 3115 elif change:
3116 3116 node2 = scmutil.revsingle(repo, change, None).node()
3117 3117 node1 = repo[node2].p1().node()
3118 3118 else:
3119 3119 node1, node2 = scmutil.revpair(repo, revs)
3120 3120
3121 3121 if reverse:
3122 3122 node1, node2 = node2, node1
3123 3123
3124 3124 diffopts = patch.diffallopts(ui, opts)
3125 3125 m = scmutil.match(repo[node2], pats, opts)
3126 3126 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3127 3127 listsubrepos=opts.get('subrepos'))
3128 3128
3129 3129 @command('^export',
3130 3130 [('o', 'output', '',
3131 3131 _('print output to file with formatted name'), _('FORMAT')),
3132 3132 ('', 'switch-parent', None, _('diff against the second parent')),
3133 3133 ('r', 'rev', [], _('revisions to export'), _('REV')),
3134 3134 ] + diffopts,
3135 3135 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3136 3136 def export(ui, repo, *changesets, **opts):
3137 3137 """dump the header and diffs for one or more changesets
3138 3138
3139 3139 Print the changeset header and diffs for one or more revisions.
3140 3140 If no revision is given, the parent of the working directory is used.
3141 3141
3142 3142 The information shown in the changeset header is: author, date,
3143 3143 branch name (if non-default), changeset hash, parent(s) and commit
3144 3144 comment.
3145 3145
3146 3146 .. note::
3147 3147
3148 3148 export may generate unexpected diff output for merge
3149 3149 changesets, as it will compare the merge changeset against its
3150 3150 first parent only.
3151 3151
3152 3152 Output may be to a file, in which case the name of the file is
3153 3153 given using a format string. The formatting rules are as follows:
3154 3154
3155 3155 :``%%``: literal "%" character
3156 3156 :``%H``: changeset hash (40 hexadecimal digits)
3157 3157 :``%N``: number of patches being generated
3158 3158 :``%R``: changeset revision number
3159 3159 :``%b``: basename of the exporting repository
3160 3160 :``%h``: short-form changeset hash (12 hexadecimal digits)
3161 3161 :``%m``: first line of the commit message (only alphanumeric characters)
3162 3162 :``%n``: zero-padded sequence number, starting at 1
3163 3163 :``%r``: zero-padded changeset revision number
3164 3164
3165 3165 Without the -a/--text option, export will avoid generating diffs
3166 3166 of files it detects as binary. With -a, export will generate a
3167 3167 diff anyway, probably with undesirable results.
3168 3168
3169 3169 Use the -g/--git option to generate diffs in the git extended diff
3170 3170 format. See :hg:`help diffs` for more information.
3171 3171
3172 3172 With the --switch-parent option, the diff will be against the
3173 3173 second parent. It can be useful to review a merge.
3174 3174
3175 3175 .. container:: verbose
3176 3176
3177 3177 Examples:
3178 3178
3179 3179 - use export and import to transplant a bugfix to the current
3180 3180 branch::
3181 3181
3182 3182 hg export -r 9353 | hg import -
3183 3183
3184 3184 - export all the changesets between two revisions to a file with
3185 3185 rename information::
3186 3186
3187 3187 hg export --git -r 123:150 > changes.txt
3188 3188
3189 3189 - split outgoing changes into a series of patches with
3190 3190 descriptive names::
3191 3191
3192 3192 hg export -r "outgoing()" -o "%n-%m.patch"
3193 3193
3194 3194 Returns 0 on success.
3195 3195 """
3196 3196 changesets += tuple(opts.get('rev', []))
3197 3197 if not changesets:
3198 3198 changesets = ['.']
3199 3199 revs = scmutil.revrange(repo, changesets)
3200 3200 if not revs:
3201 3201 raise util.Abort(_("export requires at least one changeset"))
3202 3202 if len(revs) > 1:
3203 3203 ui.note(_('exporting patches:\n'))
3204 3204 else:
3205 3205 ui.note(_('exporting patch:\n'))
3206 3206 cmdutil.export(repo, revs, template=opts.get('output'),
3207 3207 switch_parent=opts.get('switch_parent'),
3208 3208 opts=patch.diffallopts(ui, opts))
3209 3209
3210 3210 @command('files',
3211 3211 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3212 3212 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3213 3213 ] + walkopts + formatteropts,
3214 3214 _('[OPTION]... [PATTERN]...'))
3215 3215 def files(ui, repo, *pats, **opts):
3216 3216 """list tracked files
3217 3217
3218 3218 Print files under Mercurial control in the working directory or
3219 3219 specified revision whose names match the given patterns (excluding
3220 3220 removed files).
3221 3221
3222 3222 If no patterns are given to match, this command prints the names
3223 3223 of all files under Mercurial control in the working copy.
3224 3224
3225 3225 .. container:: verbose
3226 3226
3227 3227 Examples:
3228 3228
3229 3229 - list all files under the current directory::
3230 3230
3231 3231 hg files .
3232 3232
3233 3233 - shows sizes and flags for current revision::
3234 3234
3235 3235 hg files -vr .
3236 3236
3237 3237 - list all files named README::
3238 3238
3239 3239 hg files -I "**/README"
3240 3240
3241 3241 - list all binary files::
3242 3242
3243 3243 hg files "set:binary()"
3244 3244
3245 3245 - find files containing a regular expression::
3246 3246
3247 3247 hg files "set:grep('bob')"
3248 3248
3249 3249 - search tracked file contents with xargs and grep::
3250 3250
3251 3251 hg files -0 | xargs -0 grep foo
3252 3252
3253 3253 See :hg:`help patterns` and :hg:`help filesets` for more information
3254 3254 on specifying file patterns.
3255 3255
3256 3256 Returns 0 if a match is found, 1 otherwise.
3257 3257
3258 3258 """
3259 3259 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3260 3260 rev = ctx.rev()
3261 3261 ret = 1
3262 3262
3263 3263 end = '\n'
3264 3264 if opts.get('print0'):
3265 3265 end = '\0'
3266 3266 fm = ui.formatter('files', opts)
3267 3267 fmt = '%s' + end
3268 3268
3269 3269 m = scmutil.match(ctx, pats, opts)
3270 3270 ds = repo.dirstate
3271 3271 for f in ctx.matches(m):
3272 3272 if rev is None and ds[f] == 'r':
3273 3273 continue
3274 3274 fm.startitem()
3275 3275 if ui.verbose:
3276 3276 fc = ctx[f]
3277 3277 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
3278 3278 fm.data(abspath=f)
3279 3279 fm.write('path', fmt, m.rel(f))
3280 3280 ret = 0
3281 3281
3282 3282 fm.end()
3283 3283
3284 3284 return ret
3285 3285
3286 3286 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3287 3287 def forget(ui, repo, *pats, **opts):
3288 3288 """forget the specified files on the next commit
3289 3289
3290 3290 Mark the specified files so they will no longer be tracked
3291 3291 after the next commit.
3292 3292
3293 3293 This only removes files from the current branch, not from the
3294 3294 entire project history, and it does not delete them from the
3295 3295 working directory.
3296 3296
3297 3297 To undo a forget before the next commit, see :hg:`add`.
3298 3298
3299 3299 .. container:: verbose
3300 3300
3301 3301 Examples:
3302 3302
3303 3303 - forget newly-added binary files::
3304 3304
3305 3305 hg forget "set:added() and binary()"
3306 3306
3307 3307 - forget files that would be excluded by .hgignore::
3308 3308
3309 3309 hg forget "set:hgignore()"
3310 3310
3311 3311 Returns 0 on success.
3312 3312 """
3313 3313
3314 3314 if not pats:
3315 3315 raise util.Abort(_('no files specified'))
3316 3316
3317 3317 m = scmutil.match(repo[None], pats, opts)
3318 3318 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3319 3319 return rejected and 1 or 0
3320 3320
3321 3321 @command(
3322 3322 'graft',
3323 3323 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3324 3324 ('c', 'continue', False, _('resume interrupted graft')),
3325 3325 ('e', 'edit', False, _('invoke editor on commit messages')),
3326 3326 ('', 'log', None, _('append graft info to log message')),
3327 3327 ('f', 'force', False, _('force graft')),
3328 3328 ('D', 'currentdate', False,
3329 3329 _('record the current date as commit date')),
3330 3330 ('U', 'currentuser', False,
3331 3331 _('record the current user as committer'), _('DATE'))]
3332 3332 + commitopts2 + mergetoolopts + dryrunopts,
3333 3333 _('[OPTION]... [-r] REV...'))
3334 3334 def graft(ui, repo, *revs, **opts):
3335 3335 '''copy changes from other branches onto the current branch
3336 3336
3337 3337 This command uses Mercurial's merge logic to copy individual
3338 3338 changes from other branches without merging branches in the
3339 3339 history graph. This is sometimes known as 'backporting' or
3340 3340 'cherry-picking'. By default, graft will copy user, date, and
3341 3341 description from the source changesets.
3342 3342
3343 3343 Changesets that are ancestors of the current revision, that have
3344 3344 already been grafted, or that are merges will be skipped.
3345 3345
3346 3346 If --log is specified, log messages will have a comment appended
3347 3347 of the form::
3348 3348
3349 3349 (grafted from CHANGESETHASH)
3350 3350
3351 3351 If --force is specified, revisions will be grafted even if they
3352 3352 are already ancestors of or have been grafted to the destination.
3353 3353 This is useful when the revisions have since been backed out.
3354 3354
3355 3355 If a graft merge results in conflicts, the graft process is
3356 3356 interrupted so that the current merge can be manually resolved.
3357 3357 Once all conflicts are addressed, the graft process can be
3358 3358 continued with the -c/--continue option.
3359 3359
3360 3360 .. note::
3361 3361
3362 3362 The -c/--continue option does not reapply earlier options, except
3363 3363 for --force.
3364 3364
3365 3365 .. container:: verbose
3366 3366
3367 3367 Examples:
3368 3368
3369 3369 - copy a single change to the stable branch and edit its description::
3370 3370
3371 3371 hg update stable
3372 3372 hg graft --edit 9393
3373 3373
3374 3374 - graft a range of changesets with one exception, updating dates::
3375 3375
3376 3376 hg graft -D "2085::2093 and not 2091"
3377 3377
3378 3378 - continue a graft after resolving conflicts::
3379 3379
3380 3380 hg graft -c
3381 3381
3382 3382 - show the source of a grafted changeset::
3383 3383
3384 3384 hg log --debug -r .
3385 3385
3386 3386 See :hg:`help revisions` and :hg:`help revsets` for more about
3387 3387 specifying revisions.
3388 3388
3389 3389 Returns 0 on successful completion.
3390 3390 '''
3391 3391
3392 3392 revs = list(revs)
3393 3393 revs.extend(opts['rev'])
3394 3394
3395 3395 if not opts.get('user') and opts.get('currentuser'):
3396 3396 opts['user'] = ui.username()
3397 3397 if not opts.get('date') and opts.get('currentdate'):
3398 3398 opts['date'] = "%d %d" % util.makedate()
3399 3399
3400 3400 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3401 3401
3402 3402 cont = False
3403 3403 if opts['continue']:
3404 3404 cont = True
3405 3405 if revs:
3406 3406 raise util.Abort(_("can't specify --continue and revisions"))
3407 3407 # read in unfinished revisions
3408 3408 try:
3409 3409 nodes = repo.vfs.read('graftstate').splitlines()
3410 3410 revs = [repo[node].rev() for node in nodes]
3411 3411 except IOError, inst:
3412 3412 if inst.errno != errno.ENOENT:
3413 3413 raise
3414 3414 raise util.Abort(_("no graft state found, can't continue"))
3415 3415 else:
3416 3416 cmdutil.checkunfinished(repo)
3417 3417 cmdutil.bailifchanged(repo)
3418 3418 if not revs:
3419 3419 raise util.Abort(_('no revisions specified'))
3420 3420 revs = scmutil.revrange(repo, revs)
3421 3421
3422 3422 skipped = set()
3423 3423 # check for merges
3424 3424 for rev in repo.revs('%ld and merge()', revs):
3425 3425 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3426 3426 skipped.add(rev)
3427 3427 revs = [r for r in revs if r not in skipped]
3428 3428 if not revs:
3429 3429 return -1
3430 3430
3431 3431 # Don't check in the --continue case, in effect retaining --force across
3432 3432 # --continues. That's because without --force, any revisions we decided to
3433 3433 # skip would have been filtered out here, so they wouldn't have made their
3434 3434 # way to the graftstate. With --force, any revisions we would have otherwise
3435 3435 # skipped would not have been filtered out, and if they hadn't been applied
3436 3436 # already, they'd have been in the graftstate.
3437 3437 if not (cont or opts.get('force')):
3438 3438 # check for ancestors of dest branch
3439 3439 crev = repo['.'].rev()
3440 3440 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3441 3441 # Cannot use x.remove(y) on smart set, this has to be a list.
3442 3442 # XXX make this lazy in the future
3443 3443 revs = list(revs)
3444 3444 # don't mutate while iterating, create a copy
3445 3445 for rev in list(revs):
3446 3446 if rev in ancestors:
3447 3447 ui.warn(_('skipping ancestor revision %d:%s\n') %
3448 3448 (rev, repo[rev]))
3449 3449 # XXX remove on list is slow
3450 3450 revs.remove(rev)
3451 3451 if not revs:
3452 3452 return -1
3453 3453
3454 3454 # analyze revs for earlier grafts
3455 3455 ids = {}
3456 3456 for ctx in repo.set("%ld", revs):
3457 3457 ids[ctx.hex()] = ctx.rev()
3458 3458 n = ctx.extra().get('source')
3459 3459 if n:
3460 3460 ids[n] = ctx.rev()
3461 3461
3462 3462 # check ancestors for earlier grafts
3463 3463 ui.debug('scanning for duplicate grafts\n')
3464 3464
3465 3465 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3466 3466 ctx = repo[rev]
3467 3467 n = ctx.extra().get('source')
3468 3468 if n in ids:
3469 3469 try:
3470 3470 r = repo[n].rev()
3471 3471 except error.RepoLookupError:
3472 3472 r = None
3473 3473 if r in revs:
3474 3474 ui.warn(_('skipping revision %d:%s '
3475 3475 '(already grafted to %d:%s)\n')
3476 3476 % (r, repo[r], rev, ctx))
3477 3477 revs.remove(r)
3478 3478 elif ids[n] in revs:
3479 3479 if r is None:
3480 3480 ui.warn(_('skipping already grafted revision %d:%s '
3481 3481 '(%d:%s also has unknown origin %s)\n')
3482 3482 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3483 3483 else:
3484 3484 ui.warn(_('skipping already grafted revision %d:%s '
3485 3485 '(%d:%s also has origin %d:%s)\n')
3486 3486 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3487 3487 revs.remove(ids[n])
3488 3488 elif ctx.hex() in ids:
3489 3489 r = ids[ctx.hex()]
3490 3490 ui.warn(_('skipping already grafted revision %d:%s '
3491 3491 '(was grafted from %d:%s)\n') %
3492 3492 (r, repo[r], rev, ctx))
3493 3493 revs.remove(r)
3494 3494 if not revs:
3495 3495 return -1
3496 3496
3497 3497 wlock = repo.wlock()
3498 3498 try:
3499 3499 for pos, ctx in enumerate(repo.set("%ld", revs)):
3500 3500 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3501 3501 ctx.description().split('\n', 1)[0])
3502 3502 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3503 3503 if names:
3504 3504 desc += ' (%s)' % ' '.join(names)
3505 3505 ui.status(_('grafting %s\n') % desc)
3506 3506 if opts.get('dry_run'):
3507 3507 continue
3508 3508
3509 3509 source = ctx.extra().get('source')
3510 3510 if not source:
3511 3511 source = ctx.hex()
3512 3512 extra = {'source': source}
3513 3513 user = ctx.user()
3514 3514 if opts.get('user'):
3515 3515 user = opts['user']
3516 3516 date = ctx.date()
3517 3517 if opts.get('date'):
3518 3518 date = opts['date']
3519 3519 message = ctx.description()
3520 3520 if opts.get('log'):
3521 3521 message += '\n(grafted from %s)' % ctx.hex()
3522 3522
3523 3523 # we don't merge the first commit when continuing
3524 3524 if not cont:
3525 3525 # perform the graft merge with p1(rev) as 'ancestor'
3526 3526 try:
3527 3527 # ui.forcemerge is an internal variable, do not document
3528 3528 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3529 3529 'graft')
3530 3530 stats = mergemod.graft(repo, ctx, ctx.p1(),
3531 3531 ['local', 'graft'])
3532 3532 finally:
3533 3533 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3534 3534 # report any conflicts
3535 3535 if stats and stats[3] > 0:
3536 3536 # write out state for --continue
3537 3537 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3538 3538 repo.vfs.write('graftstate', ''.join(nodelines))
3539 3539 raise util.Abort(
3540 3540 _("unresolved conflicts, can't continue"),
3541 3541 hint=_('use hg resolve and hg graft --continue'))
3542 3542 else:
3543 3543 cont = False
3544 3544
3545 3545 # commit
3546 3546 node = repo.commit(text=message, user=user,
3547 3547 date=date, extra=extra, editor=editor)
3548 3548 if node is None:
3549 3549 ui.warn(
3550 3550 _('note: graft of %d:%s created no changes to commit\n') %
3551 3551 (ctx.rev(), ctx))
3552 3552 finally:
3553 3553 wlock.release()
3554 3554
3555 3555 # remove state when we complete successfully
3556 3556 if not opts.get('dry_run'):
3557 3557 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3558 3558
3559 3559 return 0
3560 3560
3561 3561 @command('grep',
3562 3562 [('0', 'print0', None, _('end fields with NUL')),
3563 3563 ('', 'all', None, _('print all revisions that match')),
3564 3564 ('a', 'text', None, _('treat all files as text')),
3565 3565 ('f', 'follow', None,
3566 3566 _('follow changeset history,'
3567 3567 ' or file history across copies and renames')),
3568 3568 ('i', 'ignore-case', None, _('ignore case when matching')),
3569 3569 ('l', 'files-with-matches', None,
3570 3570 _('print only filenames and revisions that match')),
3571 3571 ('n', 'line-number', None, _('print matching line numbers')),
3572 3572 ('r', 'rev', [],
3573 3573 _('only search files changed within revision range'), _('REV')),
3574 3574 ('u', 'user', None, _('list the author (long with -v)')),
3575 3575 ('d', 'date', None, _('list the date (short with -q)')),
3576 3576 ] + walkopts,
3577 3577 _('[OPTION]... PATTERN [FILE]...'),
3578 3578 inferrepo=True)
3579 3579 def grep(ui, repo, pattern, *pats, **opts):
3580 3580 """search for a pattern in specified files and revisions
3581 3581
3582 3582 Search revisions of files for a regular expression.
3583 3583
3584 3584 This command behaves differently than Unix grep. It only accepts
3585 3585 Python/Perl regexps. It searches repository history, not the
3586 3586 working directory. It always prints the revision number in which a
3587 3587 match appears.
3588 3588
3589 3589 By default, grep only prints output for the first revision of a
3590 3590 file in which it finds a match. To get it to print every revision
3591 3591 that contains a change in match status ("-" for a match that
3592 3592 becomes a non-match, or "+" for a non-match that becomes a match),
3593 3593 use the --all flag.
3594 3594
3595 3595 Returns 0 if a match is found, 1 otherwise.
3596 3596 """
3597 3597 reflags = re.M
3598 3598 if opts.get('ignore_case'):
3599 3599 reflags |= re.I
3600 3600 try:
3601 3601 regexp = util.re.compile(pattern, reflags)
3602 3602 except re.error, inst:
3603 3603 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3604 3604 return 1
3605 3605 sep, eol = ':', '\n'
3606 3606 if opts.get('print0'):
3607 3607 sep = eol = '\0'
3608 3608
3609 3609 getfile = util.lrucachefunc(repo.file)
3610 3610
3611 3611 def matchlines(body):
3612 3612 begin = 0
3613 3613 linenum = 0
3614 3614 while begin < len(body):
3615 3615 match = regexp.search(body, begin)
3616 3616 if not match:
3617 3617 break
3618 3618 mstart, mend = match.span()
3619 3619 linenum += body.count('\n', begin, mstart) + 1
3620 3620 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3621 3621 begin = body.find('\n', mend) + 1 or len(body) + 1
3622 3622 lend = begin - 1
3623 3623 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3624 3624
3625 3625 class linestate(object):
3626 3626 def __init__(self, line, linenum, colstart, colend):
3627 3627 self.line = line
3628 3628 self.linenum = linenum
3629 3629 self.colstart = colstart
3630 3630 self.colend = colend
3631 3631
3632 3632 def __hash__(self):
3633 3633 return hash((self.linenum, self.line))
3634 3634
3635 3635 def __eq__(self, other):
3636 3636 return self.line == other.line
3637 3637
3638 3638 def __iter__(self):
3639 3639 yield (self.line[:self.colstart], '')
3640 3640 yield (self.line[self.colstart:self.colend], 'grep.match')
3641 3641 rest = self.line[self.colend:]
3642 3642 while rest != '':
3643 3643 match = regexp.search(rest)
3644 3644 if not match:
3645 3645 yield (rest, '')
3646 3646 break
3647 3647 mstart, mend = match.span()
3648 3648 yield (rest[:mstart], '')
3649 3649 yield (rest[mstart:mend], 'grep.match')
3650 3650 rest = rest[mend:]
3651 3651
3652 3652 matches = {}
3653 3653 copies = {}
3654 3654 def grepbody(fn, rev, body):
3655 3655 matches[rev].setdefault(fn, [])
3656 3656 m = matches[rev][fn]
3657 3657 for lnum, cstart, cend, line in matchlines(body):
3658 3658 s = linestate(line, lnum, cstart, cend)
3659 3659 m.append(s)
3660 3660
3661 3661 def difflinestates(a, b):
3662 3662 sm = difflib.SequenceMatcher(None, a, b)
3663 3663 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3664 3664 if tag == 'insert':
3665 3665 for i in xrange(blo, bhi):
3666 3666 yield ('+', b[i])
3667 3667 elif tag == 'delete':
3668 3668 for i in xrange(alo, ahi):
3669 3669 yield ('-', a[i])
3670 3670 elif tag == 'replace':
3671 3671 for i in xrange(alo, ahi):
3672 3672 yield ('-', a[i])
3673 3673 for i in xrange(blo, bhi):
3674 3674 yield ('+', b[i])
3675 3675
3676 3676 def display(fn, ctx, pstates, states):
3677 3677 rev = ctx.rev()
3678 3678 datefunc = ui.quiet and util.shortdate or util.datestr
3679 3679 found = False
3680 3680 @util.cachefunc
3681 3681 def binary():
3682 3682 flog = getfile(fn)
3683 3683 return util.binary(flog.read(ctx.filenode(fn)))
3684 3684
3685 3685 if opts.get('all'):
3686 3686 iter = difflinestates(pstates, states)
3687 3687 else:
3688 3688 iter = [('', l) for l in states]
3689 3689 for change, l in iter:
3690 3690 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3691 3691
3692 3692 if opts.get('line_number'):
3693 3693 cols.append((str(l.linenum), 'grep.linenumber'))
3694 3694 if opts.get('all'):
3695 3695 cols.append((change, 'grep.change'))
3696 3696 if opts.get('user'):
3697 3697 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3698 3698 if opts.get('date'):
3699 3699 cols.append((datefunc(ctx.date()), 'grep.date'))
3700 3700 for col, label in cols[:-1]:
3701 3701 ui.write(col, label=label)
3702 3702 ui.write(sep, label='grep.sep')
3703 3703 ui.write(cols[-1][0], label=cols[-1][1])
3704 3704 if not opts.get('files_with_matches'):
3705 3705 ui.write(sep, label='grep.sep')
3706 3706 if not opts.get('text') and binary():
3707 3707 ui.write(" Binary file matches")
3708 3708 else:
3709 3709 for s, label in l:
3710 3710 ui.write(s, label=label)
3711 3711 ui.write(eol)
3712 3712 found = True
3713 3713 if opts.get('files_with_matches'):
3714 3714 break
3715 3715 return found
3716 3716
3717 3717 skip = {}
3718 3718 revfiles = {}
3719 3719 matchfn = scmutil.match(repo[None], pats, opts)
3720 3720 found = False
3721 3721 follow = opts.get('follow')
3722 3722
3723 3723 def prep(ctx, fns):
3724 3724 rev = ctx.rev()
3725 3725 pctx = ctx.p1()
3726 3726 parent = pctx.rev()
3727 3727 matches.setdefault(rev, {})
3728 3728 matches.setdefault(parent, {})
3729 3729 files = revfiles.setdefault(rev, [])
3730 3730 for fn in fns:
3731 3731 flog = getfile(fn)
3732 3732 try:
3733 3733 fnode = ctx.filenode(fn)
3734 3734 except error.LookupError:
3735 3735 continue
3736 3736
3737 3737 copied = flog.renamed(fnode)
3738 3738 copy = follow and copied and copied[0]
3739 3739 if copy:
3740 3740 copies.setdefault(rev, {})[fn] = copy
3741 3741 if fn in skip:
3742 3742 if copy:
3743 3743 skip[copy] = True
3744 3744 continue
3745 3745 files.append(fn)
3746 3746
3747 3747 if fn not in matches[rev]:
3748 3748 grepbody(fn, rev, flog.read(fnode))
3749 3749
3750 3750 pfn = copy or fn
3751 3751 if pfn not in matches[parent]:
3752 3752 try:
3753 3753 fnode = pctx.filenode(pfn)
3754 3754 grepbody(pfn, parent, flog.read(fnode))
3755 3755 except error.LookupError:
3756 3756 pass
3757 3757
3758 3758 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3759 3759 rev = ctx.rev()
3760 3760 parent = ctx.p1().rev()
3761 3761 for fn in sorted(revfiles.get(rev, [])):
3762 3762 states = matches[rev][fn]
3763 3763 copy = copies.get(rev, {}).get(fn)
3764 3764 if fn in skip:
3765 3765 if copy:
3766 3766 skip[copy] = True
3767 3767 continue
3768 3768 pstates = matches.get(parent, {}).get(copy or fn, [])
3769 3769 if pstates or states:
3770 3770 r = display(fn, ctx, pstates, states)
3771 3771 found = found or r
3772 3772 if r and not opts.get('all'):
3773 3773 skip[fn] = True
3774 3774 if copy:
3775 3775 skip[copy] = True
3776 3776 del matches[rev]
3777 3777 del revfiles[rev]
3778 3778
3779 3779 return not found
3780 3780
3781 3781 @command('heads',
3782 3782 [('r', 'rev', '',
3783 3783 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3784 3784 ('t', 'topo', False, _('show topological heads only')),
3785 3785 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3786 3786 ('c', 'closed', False, _('show normal and closed branch heads')),
3787 3787 ] + templateopts,
3788 3788 _('[-ct] [-r STARTREV] [REV]...'))
3789 3789 def heads(ui, repo, *branchrevs, **opts):
3790 3790 """show branch heads
3791 3791
3792 3792 With no arguments, show all open branch heads in the repository.
3793 3793 Branch heads are changesets that have no descendants on the
3794 3794 same branch. They are where development generally takes place and
3795 3795 are the usual targets for update and merge operations.
3796 3796
3797 3797 If one or more REVs are given, only open branch heads on the
3798 3798 branches associated with the specified changesets are shown. This
3799 3799 means that you can use :hg:`heads .` to see the heads on the
3800 3800 currently checked-out branch.
3801 3801
3802 3802 If -c/--closed is specified, also show branch heads marked closed
3803 3803 (see :hg:`commit --close-branch`).
3804 3804
3805 3805 If STARTREV is specified, only those heads that are descendants of
3806 3806 STARTREV will be displayed.
3807 3807
3808 3808 If -t/--topo is specified, named branch mechanics will be ignored and only
3809 3809 topological heads (changesets with no children) will be shown.
3810 3810
3811 3811 Returns 0 if matching heads are found, 1 if not.
3812 3812 """
3813 3813
3814 3814 start = None
3815 3815 if 'rev' in opts:
3816 3816 start = scmutil.revsingle(repo, opts['rev'], None).node()
3817 3817
3818 3818 if opts.get('topo'):
3819 3819 heads = [repo[h] for h in repo.heads(start)]
3820 3820 else:
3821 3821 heads = []
3822 3822 for branch in repo.branchmap():
3823 3823 heads += repo.branchheads(branch, start, opts.get('closed'))
3824 3824 heads = [repo[h] for h in heads]
3825 3825
3826 3826 if branchrevs:
3827 3827 branches = set(repo[br].branch() for br in branchrevs)
3828 3828 heads = [h for h in heads if h.branch() in branches]
3829 3829
3830 3830 if opts.get('active') and branchrevs:
3831 3831 dagheads = repo.heads(start)
3832 3832 heads = [h for h in heads if h.node() in dagheads]
3833 3833
3834 3834 if branchrevs:
3835 3835 haveheads = set(h.branch() for h in heads)
3836 3836 if branches - haveheads:
3837 3837 headless = ', '.join(b for b in branches - haveheads)
3838 3838 msg = _('no open branch heads found on branches %s')
3839 3839 if opts.get('rev'):
3840 3840 msg += _(' (started at %s)') % opts['rev']
3841 3841 ui.warn((msg + '\n') % headless)
3842 3842
3843 3843 if not heads:
3844 3844 return 1
3845 3845
3846 3846 heads = sorted(heads, key=lambda x: -x.rev())
3847 3847 displayer = cmdutil.show_changeset(ui, repo, opts)
3848 3848 for ctx in heads:
3849 3849 displayer.show(ctx)
3850 3850 displayer.close()
3851 3851
3852 3852 @command('help',
3853 3853 [('e', 'extension', None, _('show only help for extensions')),
3854 3854 ('c', 'command', None, _('show only help for commands')),
3855 3855 ('k', 'keyword', '', _('show topics matching keyword')),
3856 3856 ],
3857 3857 _('[-ec] [TOPIC]'),
3858 3858 norepo=True)
3859 3859 def help_(ui, name=None, **opts):
3860 3860 """show help for a given topic or a help overview
3861 3861
3862 3862 With no arguments, print a list of commands with short help messages.
3863 3863
3864 3864 Given a topic, extension, or command name, print help for that
3865 3865 topic.
3866 3866
3867 3867 Returns 0 if successful.
3868 3868 """
3869 3869
3870 3870 textwidth = min(ui.termwidth(), 80) - 2
3871 3871
3872 3872 keep = []
3873 3873 if ui.verbose:
3874 3874 keep.append('verbose')
3875 3875 if sys.platform.startswith('win'):
3876 3876 keep.append('windows')
3877 3877 elif sys.platform == 'OpenVMS':
3878 3878 keep.append('vms')
3879 3879 elif sys.platform == 'plan9':
3880 3880 keep.append('plan9')
3881 3881 else:
3882 3882 keep.append('unix')
3883 3883 keep.append(sys.platform.lower())
3884 3884
3885 3885 section = None
3886 3886 if name and '.' in name:
3887 3887 name, section = name.split('.', 1)
3888 3888
3889 3889 text = help.help_(ui, name, **opts)
3890 3890
3891 3891 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3892 3892 section=section)
3893 3893 if section and not formatted:
3894 3894 raise util.Abort(_("help section not found"))
3895 3895
3896 3896 if 'verbose' in pruned:
3897 3897 keep.append('omitted')
3898 3898 else:
3899 3899 keep.append('notomitted')
3900 3900 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3901 3901 section=section)
3902 3902 ui.write(formatted)
3903 3903
3904 3904
3905 3905 @command('identify|id',
3906 3906 [('r', 'rev', '',
3907 3907 _('identify the specified revision'), _('REV')),
3908 3908 ('n', 'num', None, _('show local revision number')),
3909 3909 ('i', 'id', None, _('show global revision id')),
3910 3910 ('b', 'branch', None, _('show branch')),
3911 3911 ('t', 'tags', None, _('show tags')),
3912 3912 ('B', 'bookmarks', None, _('show bookmarks')),
3913 3913 ] + remoteopts,
3914 3914 _('[-nibtB] [-r REV] [SOURCE]'),
3915 3915 optionalrepo=True)
3916 3916 def identify(ui, repo, source=None, rev=None,
3917 3917 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3918 3918 """identify the working copy or specified revision
3919 3919
3920 3920 Print a summary identifying the repository state at REV using one or
3921 3921 two parent hash identifiers, followed by a "+" if the working
3922 3922 directory has uncommitted changes, the branch name (if not default),
3923 3923 a list of tags, and a list of bookmarks.
3924 3924
3925 3925 When REV is not given, print a summary of the current state of the
3926 3926 repository.
3927 3927
3928 3928 Specifying a path to a repository root or Mercurial bundle will
3929 3929 cause lookup to operate on that repository/bundle.
3930 3930
3931 3931 .. container:: verbose
3932 3932
3933 3933 Examples:
3934 3934
3935 3935 - generate a build identifier for the working directory::
3936 3936
3937 3937 hg id --id > build-id.dat
3938 3938
3939 3939 - find the revision corresponding to a tag::
3940 3940
3941 3941 hg id -n -r 1.3
3942 3942
3943 3943 - check the most recent revision of a remote repository::
3944 3944
3945 3945 hg id -r tip http://selenic.com/hg/
3946 3946
3947 3947 Returns 0 if successful.
3948 3948 """
3949 3949
3950 3950 if not repo and not source:
3951 3951 raise util.Abort(_("there is no Mercurial repository here "
3952 3952 "(.hg not found)"))
3953 3953
3954 3954 hexfunc = ui.debugflag and hex or short
3955 3955 default = not (num or id or branch or tags or bookmarks)
3956 3956 output = []
3957 3957 revs = []
3958 3958
3959 3959 if source:
3960 3960 source, branches = hg.parseurl(ui.expandpath(source))
3961 3961 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3962 3962 repo = peer.local()
3963 3963 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3964 3964
3965 3965 if not repo:
3966 3966 if num or branch or tags:
3967 3967 raise util.Abort(
3968 3968 _("can't query remote revision number, branch, or tags"))
3969 3969 if not rev and revs:
3970 3970 rev = revs[0]
3971 3971 if not rev:
3972 3972 rev = "tip"
3973 3973
3974 3974 remoterev = peer.lookup(rev)
3975 3975 if default or id:
3976 3976 output = [hexfunc(remoterev)]
3977 3977
3978 3978 def getbms():
3979 3979 bms = []
3980 3980
3981 3981 if 'bookmarks' in peer.listkeys('namespaces'):
3982 3982 hexremoterev = hex(remoterev)
3983 3983 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3984 3984 if bmr == hexremoterev]
3985 3985
3986 3986 return sorted(bms)
3987 3987
3988 3988 if bookmarks:
3989 3989 output.extend(getbms())
3990 3990 elif default and not ui.quiet:
3991 3991 # multiple bookmarks for a single parent separated by '/'
3992 3992 bm = '/'.join(getbms())
3993 3993 if bm:
3994 3994 output.append(bm)
3995 3995 else:
3996 3996 if not rev:
3997 3997 ctx = repo[None]
3998 3998 parents = ctx.parents()
3999 3999 changed = ""
4000 4000 if default or id or num:
4001 4001 if (util.any(repo.status())
4002 4002 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
4003 4003 changed = '+'
4004 4004 if default or id:
4005 4005 output = ["%s%s" %
4006 4006 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4007 4007 if num:
4008 4008 output.append("%s%s" %
4009 4009 ('+'.join([str(p.rev()) for p in parents]), changed))
4010 4010 else:
4011 4011 ctx = scmutil.revsingle(repo, rev)
4012 4012 if default or id:
4013 4013 output = [hexfunc(ctx.node())]
4014 4014 if num:
4015 4015 output.append(str(ctx.rev()))
4016 4016
4017 4017 if default and not ui.quiet:
4018 4018 b = ctx.branch()
4019 4019 if b != 'default':
4020 4020 output.append("(%s)" % b)
4021 4021
4022 4022 # multiple tags for a single parent separated by '/'
4023 4023 t = '/'.join(ctx.tags())
4024 4024 if t:
4025 4025 output.append(t)
4026 4026
4027 4027 # multiple bookmarks for a single parent separated by '/'
4028 4028 bm = '/'.join(ctx.bookmarks())
4029 4029 if bm:
4030 4030 output.append(bm)
4031 4031 else:
4032 4032 if branch:
4033 4033 output.append(ctx.branch())
4034 4034
4035 4035 if tags:
4036 4036 output.extend(ctx.tags())
4037 4037
4038 4038 if bookmarks:
4039 4039 output.extend(ctx.bookmarks())
4040 4040
4041 4041 ui.write("%s\n" % ' '.join(output))
4042 4042
4043 4043 @command('import|patch',
4044 4044 [('p', 'strip', 1,
4045 4045 _('directory strip option for patch. This has the same '
4046 4046 'meaning as the corresponding patch option'), _('NUM')),
4047 4047 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4048 4048 ('e', 'edit', False, _('invoke editor on commit messages')),
4049 4049 ('f', 'force', None,
4050 4050 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4051 4051 ('', 'no-commit', None,
4052 4052 _("don't commit, just update the working directory")),
4053 4053 ('', 'bypass', None,
4054 4054 _("apply patch without touching the working directory")),
4055 4055 ('', 'partial', None,
4056 4056 _('commit even if some hunks fail')),
4057 4057 ('', 'exact', None,
4058 4058 _('apply patch to the nodes from which it was generated')),
4059 4059 ('', 'import-branch', None,
4060 4060 _('use any branch information in patch (implied by --exact)'))] +
4061 4061 commitopts + commitopts2 + similarityopts,
4062 4062 _('[OPTION]... PATCH...'))
4063 4063 def import_(ui, repo, patch1=None, *patches, **opts):
4064 4064 """import an ordered set of patches
4065 4065
4066 4066 Import a list of patches and commit them individually (unless
4067 4067 --no-commit is specified).
4068 4068
4069 4069 Because import first applies changes to the working directory,
4070 4070 import will abort if there are outstanding changes.
4071 4071
4072 4072 You can import a patch straight from a mail message. Even patches
4073 4073 as attachments work (to use the body part, it must have type
4074 4074 text/plain or text/x-patch). From and Subject headers of email
4075 4075 message are used as default committer and commit message. All
4076 4076 text/plain body parts before first diff are added to commit
4077 4077 message.
4078 4078
4079 4079 If the imported patch was generated by :hg:`export`, user and
4080 4080 description from patch override values from message headers and
4081 4081 body. Values given on command line with -m/--message and -u/--user
4082 4082 override these.
4083 4083
4084 4084 If --exact is specified, import will set the working directory to
4085 4085 the parent of each patch before applying it, and will abort if the
4086 4086 resulting changeset has a different ID than the one recorded in
4087 4087 the patch. This may happen due to character set problems or other
4088 4088 deficiencies in the text patch format.
4089 4089
4090 4090 Use --bypass to apply and commit patches directly to the
4091 4091 repository, not touching the working directory. Without --exact,
4092 4092 patches will be applied on top of the working directory parent
4093 4093 revision.
4094 4094
4095 4095 With -s/--similarity, hg will attempt to discover renames and
4096 4096 copies in the patch in the same way as :hg:`addremove`.
4097 4097
4098 4098 Use --partial to ensure a changeset will be created from the patch
4099 4099 even if some hunks fail to apply. Hunks that fail to apply will be
4100 4100 written to a <target-file>.rej file. Conflicts can then be resolved
4101 4101 by hand before :hg:`commit --amend` is run to update the created
4102 4102 changeset. This flag exists to let people import patches that
4103 4103 partially apply without losing the associated metadata (author,
4104 4104 date, description, ...). Note that when none of the hunk applies
4105 4105 cleanly, :hg:`import --partial` will create an empty changeset,
4106 4106 importing only the patch metadata.
4107 4107
4108 4108 To read a patch from standard input, use "-" as the patch name. If
4109 4109 a URL is specified, the patch will be downloaded from it.
4110 4110 See :hg:`help dates` for a list of formats valid for -d/--date.
4111 4111
4112 4112 .. container:: verbose
4113 4113
4114 4114 Examples:
4115 4115
4116 4116 - import a traditional patch from a website and detect renames::
4117 4117
4118 4118 hg import -s 80 http://example.com/bugfix.patch
4119 4119
4120 4120 - import a changeset from an hgweb server::
4121 4121
4122 4122 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4123 4123
4124 4124 - import all the patches in an Unix-style mbox::
4125 4125
4126 4126 hg import incoming-patches.mbox
4127 4127
4128 4128 - attempt to exactly restore an exported changeset (not always
4129 4129 possible)::
4130 4130
4131 4131 hg import --exact proposed-fix.patch
4132 4132
4133 4133 Returns 0 on success, 1 on partial success (see --partial).
4134 4134 """
4135 4135
4136 4136 if not patch1:
4137 4137 raise util.Abort(_('need at least one patch to import'))
4138 4138
4139 4139 patches = (patch1,) + patches
4140 4140
4141 4141 date = opts.get('date')
4142 4142 if date:
4143 4143 opts['date'] = util.parsedate(date)
4144 4144
4145 4145 update = not opts.get('bypass')
4146 4146 if not update and opts.get('no_commit'):
4147 4147 raise util.Abort(_('cannot use --no-commit with --bypass'))
4148 4148 try:
4149 4149 sim = float(opts.get('similarity') or 0)
4150 4150 except ValueError:
4151 4151 raise util.Abort(_('similarity must be a number'))
4152 4152 if sim < 0 or sim > 100:
4153 4153 raise util.Abort(_('similarity must be between 0 and 100'))
4154 4154 if sim and not update:
4155 4155 raise util.Abort(_('cannot use --similarity with --bypass'))
4156 4156 if opts.get('exact') and opts.get('edit'):
4157 4157 raise util.Abort(_('cannot use --exact with --edit'))
4158 4158
4159 4159 if update:
4160 4160 cmdutil.checkunfinished(repo)
4161 4161 if (opts.get('exact') or not opts.get('force')) and update:
4162 4162 cmdutil.bailifchanged(repo)
4163 4163
4164 4164 base = opts["base"]
4165 4165 wlock = lock = tr = None
4166 4166 msgs = []
4167 4167 ret = 0
4168 4168
4169 4169
4170 4170 try:
4171 4171 try:
4172 4172 wlock = repo.wlock()
4173 4173 repo.dirstate.beginparentchange()
4174 4174 if not opts.get('no_commit'):
4175 4175 lock = repo.lock()
4176 4176 tr = repo.transaction('import')
4177 4177 parents = repo.parents()
4178 4178 for patchurl in patches:
4179 4179 if patchurl == '-':
4180 4180 ui.status(_('applying patch from stdin\n'))
4181 4181 patchfile = ui.fin
4182 4182 patchurl = 'stdin' # for error message
4183 4183 else:
4184 4184 patchurl = os.path.join(base, patchurl)
4185 4185 ui.status(_('applying %s\n') % patchurl)
4186 4186 patchfile = hg.openpath(ui, patchurl)
4187 4187
4188 4188 haspatch = False
4189 4189 for hunk in patch.split(patchfile):
4190 4190 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4191 4191 parents, opts,
4192 4192 msgs, hg.clean)
4193 4193 if msg:
4194 4194 haspatch = True
4195 4195 ui.note(msg + '\n')
4196 4196 if update or opts.get('exact'):
4197 4197 parents = repo.parents()
4198 4198 else:
4199 4199 parents = [repo[node]]
4200 4200 if rej:
4201 4201 ui.write_err(_("patch applied partially\n"))
4202 4202 ui.write_err(_("(fix the .rej files and run "
4203 4203 "`hg commit --amend`)\n"))
4204 4204 ret = 1
4205 4205 break
4206 4206
4207 4207 if not haspatch:
4208 4208 raise util.Abort(_('%s: no diffs found') % patchurl)
4209 4209
4210 4210 if tr:
4211 4211 tr.close()
4212 4212 if msgs:
4213 4213 repo.savecommitmessage('\n* * *\n'.join(msgs))
4214 4214 repo.dirstate.endparentchange()
4215 4215 return ret
4216 4216 except: # re-raises
4217 4217 # wlock.release() indirectly calls dirstate.write(): since
4218 4218 # we're crashing, we do not want to change the working dir
4219 4219 # parent after all, so make sure it writes nothing
4220 4220 repo.dirstate.invalidate()
4221 4221 raise
4222 4222 finally:
4223 4223 if tr:
4224 4224 tr.release()
4225 4225 release(lock, wlock)
4226 4226
4227 4227 @command('incoming|in',
4228 4228 [('f', 'force', None,
4229 4229 _('run even if remote repository is unrelated')),
4230 4230 ('n', 'newest-first', None, _('show newest record first')),
4231 4231 ('', 'bundle', '',
4232 4232 _('file to store the bundles into'), _('FILE')),
4233 4233 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4234 4234 ('B', 'bookmarks', False, _("compare bookmarks")),
4235 4235 ('b', 'branch', [],
4236 4236 _('a specific branch you would like to pull'), _('BRANCH')),
4237 4237 ] + logopts + remoteopts + subrepoopts,
4238 4238 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4239 4239 def incoming(ui, repo, source="default", **opts):
4240 4240 """show new changesets found in source
4241 4241
4242 4242 Show new changesets found in the specified path/URL or the default
4243 4243 pull location. These are the changesets that would have been pulled
4244 4244 if a pull at the time you issued this command.
4245 4245
4246 4246 For remote repository, using --bundle avoids downloading the
4247 4247 changesets twice if the incoming is followed by a pull.
4248 4248
4249 4249 See pull for valid source format details.
4250 4250
4251 4251 .. container:: verbose
4252 4252
4253 4253 Examples:
4254 4254
4255 4255 - show incoming changes with patches and full description::
4256 4256
4257 4257 hg incoming -vp
4258 4258
4259 4259 - show incoming changes excluding merges, store a bundle::
4260 4260
4261 4261 hg in -vpM --bundle incoming.hg
4262 4262 hg pull incoming.hg
4263 4263
4264 4264 - briefly list changes inside a bundle::
4265 4265
4266 4266 hg in changes.hg -T "{desc|firstline}\\n"
4267 4267
4268 4268 Returns 0 if there are incoming changes, 1 otherwise.
4269 4269 """
4270 4270 if opts.get('graph'):
4271 4271 cmdutil.checkunsupportedgraphflags([], opts)
4272 4272 def display(other, chlist, displayer):
4273 4273 revdag = cmdutil.graphrevs(other, chlist, opts)
4274 4274 showparents = [ctx.node() for ctx in repo[None].parents()]
4275 4275 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4276 4276 graphmod.asciiedges)
4277 4277
4278 4278 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4279 4279 return 0
4280 4280
4281 4281 if opts.get('bundle') and opts.get('subrepos'):
4282 4282 raise util.Abort(_('cannot combine --bundle and --subrepos'))
4283 4283
4284 4284 if opts.get('bookmarks'):
4285 4285 source, branches = hg.parseurl(ui.expandpath(source),
4286 4286 opts.get('branch'))
4287 4287 other = hg.peer(repo, opts, source)
4288 4288 if 'bookmarks' not in other.listkeys('namespaces'):
4289 4289 ui.warn(_("remote doesn't support bookmarks\n"))
4290 4290 return 0
4291 4291 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4292 4292 return bookmarks.diff(ui, repo, other)
4293 4293
4294 4294 repo._subtoppath = ui.expandpath(source)
4295 4295 try:
4296 4296 return hg.incoming(ui, repo, source, opts)
4297 4297 finally:
4298 4298 del repo._subtoppath
4299 4299
4300 4300
4301 4301 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4302 4302 norepo=True)
4303 4303 def init(ui, dest=".", **opts):
4304 4304 """create a new repository in the given directory
4305 4305
4306 4306 Initialize a new repository in the given directory. If the given
4307 4307 directory does not exist, it will be created.
4308 4308
4309 4309 If no directory is given, the current directory is used.
4310 4310
4311 4311 It is possible to specify an ``ssh://`` URL as the destination.
4312 4312 See :hg:`help urls` for more information.
4313 4313
4314 4314 Returns 0 on success.
4315 4315 """
4316 4316 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4317 4317
4318 4318 @command('locate',
4319 4319 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4320 4320 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4321 4321 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4322 4322 ] + walkopts,
4323 4323 _('[OPTION]... [PATTERN]...'))
4324 4324 def locate(ui, repo, *pats, **opts):
4325 4325 """locate files matching specific patterns (DEPRECATED)
4326 4326
4327 4327 Print files under Mercurial control in the working directory whose
4328 4328 names match the given patterns.
4329 4329
4330 4330 By default, this command searches all directories in the working
4331 4331 directory. To search just the current directory and its
4332 4332 subdirectories, use "--include .".
4333 4333
4334 4334 If no patterns are given to match, this command prints the names
4335 4335 of all files under Mercurial control in the working directory.
4336 4336
4337 4337 If you want to feed the output of this command into the "xargs"
4338 4338 command, use the -0 option to both this command and "xargs". This
4339 4339 will avoid the problem of "xargs" treating single filenames that
4340 4340 contain whitespace as multiple filenames.
4341 4341
4342 4342 See :hg:`help files` for a more versatile command.
4343 4343
4344 4344 Returns 0 if a match is found, 1 otherwise.
4345 4345 """
4346 4346 end = opts.get('print0') and '\0' or '\n'
4347 4347 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4348 4348
4349 4349 ret = 1
4350 4350 ctx = repo[rev]
4351 4351 m = scmutil.match(ctx, pats, opts, default='relglob')
4352 4352 m.bad = lambda x, y: False
4353 4353
4354 4354 for abs in ctx.matches(m):
4355 4355 if opts.get('fullpath'):
4356 4356 ui.write(repo.wjoin(abs), end)
4357 4357 else:
4358 4358 ui.write(((pats and m.rel(abs)) or abs), end)
4359 4359 ret = 0
4360 4360
4361 4361 return ret
4362 4362
4363 4363 @command('^log|history',
4364 4364 [('f', 'follow', None,
4365 4365 _('follow changeset history, or file history across copies and renames')),
4366 4366 ('', 'follow-first', None,
4367 4367 _('only follow the first parent of merge changesets (DEPRECATED)')),
4368 4368 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4369 4369 ('C', 'copies', None, _('show copied files')),
4370 4370 ('k', 'keyword', [],
4371 4371 _('do case-insensitive search for a given text'), _('TEXT')),
4372 4372 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4373 4373 ('', 'removed', None, _('include revisions where files were removed')),
4374 4374 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4375 4375 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4376 4376 ('', 'only-branch', [],
4377 4377 _('show only changesets within the given named branch (DEPRECATED)'),
4378 4378 _('BRANCH')),
4379 4379 ('b', 'branch', [],
4380 4380 _('show changesets within the given named branch'), _('BRANCH')),
4381 4381 ('P', 'prune', [],
4382 4382 _('do not display revision or any of its ancestors'), _('REV')),
4383 4383 ] + logopts + walkopts,
4384 4384 _('[OPTION]... [FILE]'),
4385 4385 inferrepo=True)
4386 4386 def log(ui, repo, *pats, **opts):
4387 4387 """show revision history of entire repository or files
4388 4388
4389 4389 Print the revision history of the specified files or the entire
4390 4390 project.
4391 4391
4392 4392 If no revision range is specified, the default is ``tip:0`` unless
4393 4393 --follow is set, in which case the working directory parent is
4394 4394 used as the starting revision.
4395 4395
4396 4396 File history is shown without following rename or copy history of
4397 4397 files. Use -f/--follow with a filename to follow history across
4398 4398 renames and copies. --follow without a filename will only show
4399 4399 ancestors or descendants of the starting revision.
4400 4400
4401 4401 By default this command prints revision number and changeset id,
4402 4402 tags, non-trivial parents, user, date and time, and a summary for
4403 4403 each commit. When the -v/--verbose switch is used, the list of
4404 4404 changed files and full commit message are shown.
4405 4405
4406 4406 With --graph the revisions are shown as an ASCII art DAG with the most
4407 4407 recent changeset at the top.
4408 4408 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4409 4409 and '+' represents a fork where the changeset from the lines below is a
4410 4410 parent of the 'o' merge on the same line.
4411 4411
4412 4412 .. note::
4413 4413
4414 4414 log -p/--patch may generate unexpected diff output for merge
4415 4415 changesets, as it will only compare the merge changeset against
4416 4416 its first parent. Also, only files different from BOTH parents
4417 4417 will appear in files:.
4418 4418
4419 4419 .. note::
4420 4420
4421 4421 for performance reasons, log FILE may omit duplicate changes
4422 4422 made on branches and will not show removals or mode changes. To
4423 4423 see all such changes, use the --removed switch.
4424 4424
4425 4425 .. container:: verbose
4426 4426
4427 4427 Some examples:
4428 4428
4429 4429 - changesets with full descriptions and file lists::
4430 4430
4431 4431 hg log -v
4432 4432
4433 4433 - changesets ancestral to the working directory::
4434 4434
4435 4435 hg log -f
4436 4436
4437 4437 - last 10 commits on the current branch::
4438 4438
4439 4439 hg log -l 10 -b .
4440 4440
4441 4441 - changesets showing all modifications of a file, including removals::
4442 4442
4443 4443 hg log --removed file.c
4444 4444
4445 4445 - all changesets that touch a directory, with diffs, excluding merges::
4446 4446
4447 4447 hg log -Mp lib/
4448 4448
4449 4449 - all revision numbers that match a keyword::
4450 4450
4451 4451 hg log -k bug --template "{rev}\\n"
4452 4452
4453 4453 - list available log templates::
4454 4454
4455 4455 hg log -T list
4456 4456
4457 4457 - check if a given changeset is included in a tagged release::
4458 4458
4459 4459 hg log -r "a21ccf and ancestor(1.9)"
4460 4460
4461 4461 - find all changesets by some user in a date range::
4462 4462
4463 4463 hg log -k alice -d "may 2008 to jul 2008"
4464 4464
4465 4465 - summary of all changesets after the last tag::
4466 4466
4467 4467 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4468 4468
4469 4469 See :hg:`help dates` for a list of formats valid for -d/--date.
4470 4470
4471 4471 See :hg:`help revisions` and :hg:`help revsets` for more about
4472 4472 specifying revisions.
4473 4473
4474 4474 See :hg:`help templates` for more about pre-packaged styles and
4475 4475 specifying custom templates.
4476 4476
4477 4477 Returns 0 on success.
4478 4478
4479 4479 """
4480 4480 if opts.get('graph'):
4481 4481 return cmdutil.graphlog(ui, repo, *pats, **opts)
4482 4482
4483 4483 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4484 4484 limit = cmdutil.loglimit(opts)
4485 4485 count = 0
4486 4486
4487 4487 getrenamed = None
4488 4488 if opts.get('copies'):
4489 4489 endrev = None
4490 4490 if opts.get('rev'):
4491 4491 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4492 4492 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4493 4493
4494 4494 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4495 4495 for rev in revs:
4496 4496 if count == limit:
4497 4497 break
4498 4498 ctx = repo[rev]
4499 4499 copies = None
4500 4500 if getrenamed is not None and rev:
4501 4501 copies = []
4502 4502 for fn in ctx.files():
4503 4503 rename = getrenamed(fn, rev)
4504 4504 if rename:
4505 4505 copies.append((fn, rename[0]))
4506 4506 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4507 4507 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4508 4508 if displayer.flush(rev):
4509 4509 count += 1
4510 4510
4511 4511 displayer.close()
4512 4512
4513 4513 @command('manifest',
4514 4514 [('r', 'rev', '', _('revision to display'), _('REV')),
4515 4515 ('', 'all', False, _("list files from all revisions"))]
4516 4516 + formatteropts,
4517 4517 _('[-r REV]'))
4518 4518 def manifest(ui, repo, node=None, rev=None, **opts):
4519 4519 """output the current or given revision of the project manifest
4520 4520
4521 4521 Print a list of version controlled files for the given revision.
4522 4522 If no revision is given, the first parent of the working directory
4523 4523 is used, or the null revision if no revision is checked out.
4524 4524
4525 4525 With -v, print file permissions, symlink and executable bits.
4526 4526 With --debug, print file revision hashes.
4527 4527
4528 4528 If option --all is specified, the list of all files from all revisions
4529 4529 is printed. This includes deleted and renamed files.
4530 4530
4531 4531 Returns 0 on success.
4532 4532 """
4533 4533
4534 4534 fm = ui.formatter('manifest', opts)
4535 4535
4536 4536 if opts.get('all'):
4537 4537 if rev or node:
4538 4538 raise util.Abort(_("can't specify a revision with --all"))
4539 4539
4540 4540 res = []
4541 4541 prefix = "data/"
4542 4542 suffix = ".i"
4543 4543 plen = len(prefix)
4544 4544 slen = len(suffix)
4545 4545 lock = repo.lock()
4546 4546 try:
4547 4547 for fn, b, size in repo.store.datafiles():
4548 4548 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4549 4549 res.append(fn[plen:-slen])
4550 4550 finally:
4551 4551 lock.release()
4552 4552 for f in res:
4553 4553 fm.startitem()
4554 4554 fm.write("path", '%s\n', f)
4555 4555 fm.end()
4556 4556 return
4557 4557
4558 4558 if rev and node:
4559 4559 raise util.Abort(_("please specify just one revision"))
4560 4560
4561 4561 if not node:
4562 4562 node = rev
4563 4563
4564 4564 char = {'l': '@', 'x': '*', '': ''}
4565 4565 mode = {'l': '644', 'x': '755', '': '644'}
4566 4566 ctx = scmutil.revsingle(repo, node)
4567 4567 mf = ctx.manifest()
4568 4568 for f in ctx:
4569 4569 fm.startitem()
4570 4570 fl = ctx[f].flags()
4571 4571 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4572 4572 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4573 4573 fm.write('path', '%s\n', f)
4574 4574 fm.end()
4575 4575
4576 4576 @command('^merge',
4577 4577 [('f', 'force', None,
4578 4578 _('force a merge including outstanding changes (DEPRECATED)')),
4579 4579 ('r', 'rev', '', _('revision to merge'), _('REV')),
4580 4580 ('P', 'preview', None,
4581 4581 _('review revisions to merge (no merge is performed)'))
4582 4582 ] + mergetoolopts,
4583 4583 _('[-P] [-f] [[-r] REV]'))
4584 4584 def merge(ui, repo, node=None, **opts):
4585 4585 """merge another revision into working directory
4586 4586
4587 4587 The current working directory is updated with all changes made in
4588 4588 the requested revision since the last common predecessor revision.
4589 4589
4590 4590 Files that changed between either parent are marked as changed for
4591 4591 the next commit and a commit must be performed before any further
4592 4592 updates to the repository are allowed. The next commit will have
4593 4593 two parents.
4594 4594
4595 4595 ``--tool`` can be used to specify the merge tool used for file
4596 4596 merges. It overrides the HGMERGE environment variable and your
4597 4597 configuration files. See :hg:`help merge-tools` for options.
4598 4598
4599 4599 If no revision is specified, the working directory's parent is a
4600 4600 head revision, and the current branch contains exactly one other
4601 4601 head, the other head is merged with by default. Otherwise, an
4602 4602 explicit revision with which to merge with must be provided.
4603 4603
4604 4604 :hg:`resolve` must be used to resolve unresolved files.
4605 4605
4606 4606 To undo an uncommitted merge, use :hg:`update --clean .` which
4607 4607 will check out a clean copy of the original merge parent, losing
4608 4608 all changes.
4609 4609
4610 4610 Returns 0 on success, 1 if there are unresolved files.
4611 4611 """
4612 4612
4613 4613 if opts.get('rev') and node:
4614 4614 raise util.Abort(_("please specify just one revision"))
4615 4615 if not node:
4616 4616 node = opts.get('rev')
4617 4617
4618 4618 if node:
4619 4619 node = scmutil.revsingle(repo, node).node()
4620 4620
4621 4621 if not node and repo._bookmarkcurrent:
4622 4622 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4623 4623 curhead = repo[repo._bookmarkcurrent].node()
4624 4624 if len(bmheads) == 2:
4625 4625 if curhead == bmheads[0]:
4626 4626 node = bmheads[1]
4627 4627 else:
4628 4628 node = bmheads[0]
4629 4629 elif len(bmheads) > 2:
4630 4630 raise util.Abort(_("multiple matching bookmarks to merge - "
4631 4631 "please merge with an explicit rev or bookmark"),
4632 4632 hint=_("run 'hg heads' to see all heads"))
4633 4633 elif len(bmheads) <= 1:
4634 4634 raise util.Abort(_("no matching bookmark to merge - "
4635 4635 "please merge with an explicit rev or bookmark"),
4636 4636 hint=_("run 'hg heads' to see all heads"))
4637 4637
4638 4638 if not node and not repo._bookmarkcurrent:
4639 4639 branch = repo[None].branch()
4640 4640 bheads = repo.branchheads(branch)
4641 4641 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4642 4642
4643 4643 if len(nbhs) > 2:
4644 4644 raise util.Abort(_("branch '%s' has %d heads - "
4645 4645 "please merge with an explicit rev")
4646 4646 % (branch, len(bheads)),
4647 4647 hint=_("run 'hg heads .' to see heads"))
4648 4648
4649 4649 parent = repo.dirstate.p1()
4650 4650 if len(nbhs) <= 1:
4651 4651 if len(bheads) > 1:
4652 4652 raise util.Abort(_("heads are bookmarked - "
4653 4653 "please merge with an explicit rev"),
4654 4654 hint=_("run 'hg heads' to see all heads"))
4655 4655 if len(repo.heads()) > 1:
4656 4656 raise util.Abort(_("branch '%s' has one head - "
4657 4657 "please merge with an explicit rev")
4658 4658 % branch,
4659 4659 hint=_("run 'hg heads' to see all heads"))
4660 4660 msg, hint = _('nothing to merge'), None
4661 4661 if parent != repo.lookup(branch):
4662 4662 hint = _("use 'hg update' instead")
4663 4663 raise util.Abort(msg, hint=hint)
4664 4664
4665 4665 if parent not in bheads:
4666 4666 raise util.Abort(_('working directory not at a head revision'),
4667 4667 hint=_("use 'hg update' or merge with an "
4668 4668 "explicit revision"))
4669 4669 if parent == nbhs[0]:
4670 4670 node = nbhs[-1]
4671 4671 else:
4672 4672 node = nbhs[0]
4673 4673
4674 4674 if opts.get('preview'):
4675 4675 # find nodes that are ancestors of p2 but not of p1
4676 4676 p1 = repo.lookup('.')
4677 4677 p2 = repo.lookup(node)
4678 4678 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4679 4679
4680 4680 displayer = cmdutil.show_changeset(ui, repo, opts)
4681 4681 for node in nodes:
4682 4682 displayer.show(repo[node])
4683 4683 displayer.close()
4684 4684 return 0
4685 4685
4686 4686 try:
4687 4687 # ui.forcemerge is an internal variable, do not document
4688 4688 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4689 4689 return hg.merge(repo, node, force=opts.get('force'))
4690 4690 finally:
4691 4691 ui.setconfig('ui', 'forcemerge', '', 'merge')
4692 4692
4693 4693 @command('outgoing|out',
4694 4694 [('f', 'force', None, _('run even when the destination is unrelated')),
4695 4695 ('r', 'rev', [],
4696 4696 _('a changeset intended to be included in the destination'), _('REV')),
4697 4697 ('n', 'newest-first', None, _('show newest record first')),
4698 4698 ('B', 'bookmarks', False, _('compare bookmarks')),
4699 4699 ('b', 'branch', [], _('a specific branch you would like to push'),
4700 4700 _('BRANCH')),
4701 4701 ] + logopts + remoteopts + subrepoopts,
4702 4702 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4703 4703 def outgoing(ui, repo, dest=None, **opts):
4704 4704 """show changesets not found in the destination
4705 4705
4706 4706 Show changesets not found in the specified destination repository
4707 4707 or the default push location. These are the changesets that would
4708 4708 be pushed if a push was requested.
4709 4709
4710 4710 See pull for details of valid destination formats.
4711 4711
4712 4712 Returns 0 if there are outgoing changes, 1 otherwise.
4713 4713 """
4714 4714 if opts.get('graph'):
4715 4715 cmdutil.checkunsupportedgraphflags([], opts)
4716 4716 o, other = hg._outgoing(ui, repo, dest, opts)
4717 4717 if not o:
4718 4718 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4719 4719 return
4720 4720
4721 4721 revdag = cmdutil.graphrevs(repo, o, opts)
4722 4722 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4723 4723 showparents = [ctx.node() for ctx in repo[None].parents()]
4724 4724 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4725 4725 graphmod.asciiedges)
4726 4726 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4727 4727 return 0
4728 4728
4729 4729 if opts.get('bookmarks'):
4730 4730 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4731 4731 dest, branches = hg.parseurl(dest, opts.get('branch'))
4732 4732 other = hg.peer(repo, opts, dest)
4733 4733 if 'bookmarks' not in other.listkeys('namespaces'):
4734 4734 ui.warn(_("remote doesn't support bookmarks\n"))
4735 4735 return 0
4736 4736 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4737 4737 return bookmarks.diff(ui, other, repo)
4738 4738
4739 4739 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4740 4740 try:
4741 4741 return hg.outgoing(ui, repo, dest, opts)
4742 4742 finally:
4743 4743 del repo._subtoppath
4744 4744
4745 4745 @command('parents',
4746 4746 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4747 4747 ] + templateopts,
4748 4748 _('[-r REV] [FILE]'),
4749 4749 inferrepo=True)
4750 4750 def parents(ui, repo, file_=None, **opts):
4751 4751 """show the parents of the working directory or revision (DEPRECATED)
4752 4752
4753 4753 Print the working directory's parent revisions. If a revision is
4754 4754 given via -r/--rev, the parent of that revision will be printed.
4755 4755 If a file argument is given, the revision in which the file was
4756 4756 last changed (before the working directory revision or the
4757 4757 argument to --rev if given) is printed.
4758 4758
4759 4759 See :hg:`summary` and :hg:`help revsets` for related information.
4760 4760
4761 4761 Returns 0 on success.
4762 4762 """
4763 4763
4764 4764 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4765 4765
4766 4766 if file_:
4767 4767 m = scmutil.match(ctx, (file_,), opts)
4768 4768 if m.anypats() or len(m.files()) != 1:
4769 4769 raise util.Abort(_('can only specify an explicit filename'))
4770 4770 file_ = m.files()[0]
4771 4771 filenodes = []
4772 4772 for cp in ctx.parents():
4773 4773 if not cp:
4774 4774 continue
4775 4775 try:
4776 4776 filenodes.append(cp.filenode(file_))
4777 4777 except error.LookupError:
4778 4778 pass
4779 4779 if not filenodes:
4780 4780 raise util.Abort(_("'%s' not found in manifest!") % file_)
4781 4781 p = []
4782 4782 for fn in filenodes:
4783 4783 fctx = repo.filectx(file_, fileid=fn)
4784 4784 p.append(fctx.node())
4785 4785 else:
4786 4786 p = [cp.node() for cp in ctx.parents()]
4787 4787
4788 4788 displayer = cmdutil.show_changeset(ui, repo, opts)
4789 4789 for n in p:
4790 4790 if n != nullid:
4791 4791 displayer.show(repo[n])
4792 4792 displayer.close()
4793 4793
4794 4794 @command('paths', [], _('[NAME]'), optionalrepo=True)
4795 4795 def paths(ui, repo, search=None):
4796 4796 """show aliases for remote repositories
4797 4797
4798 4798 Show definition of symbolic path name NAME. If no name is given,
4799 4799 show definition of all available names.
4800 4800
4801 4801 Option -q/--quiet suppresses all output when searching for NAME
4802 4802 and shows only the path names when listing all definitions.
4803 4803
4804 4804 Path names are defined in the [paths] section of your
4805 4805 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4806 4806 repository, ``.hg/hgrc`` is used, too.
4807 4807
4808 4808 The path names ``default`` and ``default-push`` have a special
4809 4809 meaning. When performing a push or pull operation, they are used
4810 4810 as fallbacks if no location is specified on the command-line.
4811 4811 When ``default-push`` is set, it will be used for push and
4812 4812 ``default`` will be used for pull; otherwise ``default`` is used
4813 4813 as the fallback for both. When cloning a repository, the clone
4814 4814 source is written as ``default`` in ``.hg/hgrc``. Note that
4815 4815 ``default`` and ``default-push`` apply to all inbound (e.g.
4816 4816 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4817 4817 :hg:`bundle`) operations.
4818 4818
4819 4819 See :hg:`help urls` for more information.
4820 4820
4821 4821 Returns 0 on success.
4822 4822 """
4823 4823 if search:
4824 4824 for name, path in ui.configitems("paths"):
4825 4825 if name == search:
4826 4826 ui.status("%s\n" % util.hidepassword(path))
4827 4827 return
4828 4828 if not ui.quiet:
4829 4829 ui.warn(_("not found!\n"))
4830 4830 return 1
4831 4831 else:
4832 4832 for name, path in ui.configitems("paths"):
4833 4833 if ui.quiet:
4834 4834 ui.write("%s\n" % name)
4835 4835 else:
4836 4836 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4837 4837
4838 4838 @command('phase',
4839 4839 [('p', 'public', False, _('set changeset phase to public')),
4840 4840 ('d', 'draft', False, _('set changeset phase to draft')),
4841 4841 ('s', 'secret', False, _('set changeset phase to secret')),
4842 4842 ('f', 'force', False, _('allow to move boundary backward')),
4843 4843 ('r', 'rev', [], _('target revision'), _('REV')),
4844 4844 ],
4845 4845 _('[-p|-d|-s] [-f] [-r] REV...'))
4846 4846 def phase(ui, repo, *revs, **opts):
4847 4847 """set or show the current phase name
4848 4848
4849 4849 With no argument, show the phase name of specified revisions.
4850 4850
4851 4851 With one of -p/--public, -d/--draft or -s/--secret, change the
4852 4852 phase value of the specified revisions.
4853 4853
4854 4854 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4855 4855 lower phase to an higher phase. Phases are ordered as follows::
4856 4856
4857 4857 public < draft < secret
4858 4858
4859 4859 Returns 0 on success, 1 if no phases were changed or some could not
4860 4860 be changed.
4861 4861 """
4862 4862 # search for a unique phase argument
4863 4863 targetphase = None
4864 4864 for idx, name in enumerate(phases.phasenames):
4865 4865 if opts[name]:
4866 4866 if targetphase is not None:
4867 4867 raise util.Abort(_('only one phase can be specified'))
4868 4868 targetphase = idx
4869 4869
4870 4870 # look for specified revision
4871 4871 revs = list(revs)
4872 4872 revs.extend(opts['rev'])
4873 4873 if not revs:
4874 4874 raise util.Abort(_('no revisions specified'))
4875 4875
4876 4876 revs = scmutil.revrange(repo, revs)
4877 4877
4878 4878 lock = None
4879 4879 ret = 0
4880 4880 if targetphase is None:
4881 4881 # display
4882 4882 for r in revs:
4883 4883 ctx = repo[r]
4884 4884 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4885 4885 else:
4886 4886 tr = None
4887 4887 lock = repo.lock()
4888 4888 try:
4889 4889 tr = repo.transaction("phase")
4890 4890 # set phase
4891 4891 if not revs:
4892 4892 raise util.Abort(_('empty revision set'))
4893 4893 nodes = [repo[r].node() for r in revs]
4894 4894 # moving revision from public to draft may hide them
4895 4895 # We have to check result on an unfiltered repository
4896 4896 unfi = repo.unfiltered()
4897 4897 getphase = unfi._phasecache.phase
4898 4898 olddata = [getphase(unfi, r) for r in unfi]
4899 4899 phases.advanceboundary(repo, tr, targetphase, nodes)
4900 4900 if opts['force']:
4901 4901 phases.retractboundary(repo, tr, targetphase, nodes)
4902 4902 tr.close()
4903 4903 finally:
4904 4904 if tr is not None:
4905 4905 tr.release()
4906 4906 lock.release()
4907 4907 getphase = unfi._phasecache.phase
4908 4908 newdata = [getphase(unfi, r) for r in unfi]
4909 4909 changes = sum(newdata[r] != olddata[r] for r in unfi)
4910 4910 cl = unfi.changelog
4911 4911 rejected = [n for n in nodes
4912 4912 if newdata[cl.rev(n)] < targetphase]
4913 4913 if rejected:
4914 4914 ui.warn(_('cannot move %i changesets to a higher '
4915 4915 'phase, use --force\n') % len(rejected))
4916 4916 ret = 1
4917 4917 if changes:
4918 4918 msg = _('phase changed for %i changesets\n') % changes
4919 4919 if ret:
4920 4920 ui.status(msg)
4921 4921 else:
4922 4922 ui.note(msg)
4923 4923 else:
4924 4924 ui.warn(_('no phases changed\n'))
4925 4925 ret = 1
4926 4926 return ret
4927 4927
4928 4928 def postincoming(ui, repo, modheads, optupdate, checkout):
4929 4929 if modheads == 0:
4930 4930 return
4931 4931 if optupdate:
4932 4932 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4933 4933 try:
4934 4934 ret = hg.update(repo, checkout)
4935 4935 except util.Abort, inst:
4936 4936 ui.warn(_("not updating: %s\n") % str(inst))
4937 4937 if inst.hint:
4938 4938 ui.warn(_("(%s)\n") % inst.hint)
4939 4939 return 0
4940 4940 if not ret and not checkout:
4941 4941 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4942 4942 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4943 4943 return ret
4944 4944 if modheads > 1:
4945 4945 currentbranchheads = len(repo.branchheads())
4946 4946 if currentbranchheads == modheads:
4947 4947 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4948 4948 elif currentbranchheads > 1:
4949 4949 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4950 4950 "merge)\n"))
4951 4951 else:
4952 4952 ui.status(_("(run 'hg heads' to see heads)\n"))
4953 4953 else:
4954 4954 ui.status(_("(run 'hg update' to get a working copy)\n"))
4955 4955
4956 4956 @command('^pull',
4957 4957 [('u', 'update', None,
4958 4958 _('update to new branch head if changesets were pulled')),
4959 4959 ('f', 'force', None, _('run even when remote repository is unrelated')),
4960 4960 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4961 4961 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4962 4962 ('b', 'branch', [], _('a specific branch you would like to pull'),
4963 4963 _('BRANCH')),
4964 4964 ] + remoteopts,
4965 4965 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4966 4966 def pull(ui, repo, source="default", **opts):
4967 4967 """pull changes from the specified source
4968 4968
4969 4969 Pull changes from a remote repository to a local one.
4970 4970
4971 4971 This finds all changes from the repository at the specified path
4972 4972 or URL and adds them to a local repository (the current one unless
4973 4973 -R is specified). By default, this does not update the copy of the
4974 4974 project in the working directory.
4975 4975
4976 4976 Use :hg:`incoming` if you want to see what would have been added
4977 4977 by a pull at the time you issued this command. If you then decide
4978 4978 to add those changes to the repository, you should use :hg:`pull
4979 4979 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4980 4980
4981 4981 If SOURCE is omitted, the 'default' path will be used.
4982 4982 See :hg:`help urls` for more information.
4983 4983
4984 4984 Returns 0 on success, 1 if an update had unresolved files.
4985 4985 """
4986 4986 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4987 4987 other = hg.peer(repo, opts, source)
4988 4988 try:
4989 4989 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4990 4990 revs, checkout = hg.addbranchrevs(repo, other, branches,
4991 4991 opts.get('rev'))
4992 4992
4993 4993 remotebookmarks = other.listkeys('bookmarks')
4994 4994
4995 4995 if opts.get('bookmark'):
4996 4996 if not revs:
4997 4997 revs = []
4998 4998 for b in opts['bookmark']:
4999 4999 if b not in remotebookmarks:
5000 5000 raise util.Abort(_('remote bookmark %s not found!') % b)
5001 5001 revs.append(remotebookmarks[b])
5002 5002
5003 5003 if revs:
5004 5004 try:
5005 5005 revs = [other.lookup(rev) for rev in revs]
5006 5006 except error.CapabilityError:
5007 5007 err = _("other repository doesn't support revision lookup, "
5008 5008 "so a rev cannot be specified.")
5009 5009 raise util.Abort(err)
5010 5010
5011 5011 modheads = exchange.pull(repo, other, heads=revs,
5012 5012 force=opts.get('force'),
5013 5013 bookmarks=opts.get('bookmark', ())).cgresult
5014 5014 if checkout:
5015 5015 checkout = str(repo.changelog.rev(other.lookup(checkout)))
5016 5016 repo._subtoppath = source
5017 5017 try:
5018 5018 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5019 5019
5020 5020 finally:
5021 5021 del repo._subtoppath
5022 5022
5023 5023 finally:
5024 5024 other.close()
5025 5025 return ret
5026 5026
5027 5027 @command('^push',
5028 5028 [('f', 'force', None, _('force push')),
5029 5029 ('r', 'rev', [],
5030 5030 _('a changeset intended to be included in the destination'),
5031 5031 _('REV')),
5032 5032 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5033 5033 ('b', 'branch', [],
5034 5034 _('a specific branch you would like to push'), _('BRANCH')),
5035 5035 ('', 'new-branch', False, _('allow pushing a new branch')),
5036 5036 ] + remoteopts,
5037 5037 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5038 5038 def push(ui, repo, dest=None, **opts):
5039 5039 """push changes to the specified destination
5040 5040
5041 5041 Push changesets from the local repository to the specified
5042 5042 destination.
5043 5043
5044 5044 This operation is symmetrical to pull: it is identical to a pull
5045 5045 in the destination repository from the current one.
5046 5046
5047 5047 By default, push will not allow creation of new heads at the
5048 5048 destination, since multiple heads would make it unclear which head
5049 5049 to use. In this situation, it is recommended to pull and merge
5050 5050 before pushing.
5051 5051
5052 5052 Use --new-branch if you want to allow push to create a new named
5053 5053 branch that is not present at the destination. This allows you to
5054 5054 only create a new branch without forcing other changes.
5055 5055
5056 5056 .. note::
5057 5057
5058 5058 Extra care should be taken with the -f/--force option,
5059 5059 which will push all new heads on all branches, an action which will
5060 5060 almost always cause confusion for collaborators.
5061 5061
5062 5062 If -r/--rev is used, the specified revision and all its ancestors
5063 5063 will be pushed to the remote repository.
5064 5064
5065 5065 If -B/--bookmark is used, the specified bookmarked revision, its
5066 5066 ancestors, and the bookmark will be pushed to the remote
5067 5067 repository.
5068 5068
5069 5069 Please see :hg:`help urls` for important details about ``ssh://``
5070 5070 URLs. If DESTINATION is omitted, a default path will be used.
5071 5071
5072 5072 Returns 0 if push was successful, 1 if nothing to push.
5073 5073 """
5074 5074
5075 5075 if opts.get('bookmark'):
5076 5076 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5077 5077 for b in opts['bookmark']:
5078 5078 # translate -B options to -r so changesets get pushed
5079 5079 if b in repo._bookmarks:
5080 5080 opts.setdefault('rev', []).append(b)
5081 5081 else:
5082 5082 # if we try to push a deleted bookmark, translate it to null
5083 5083 # this lets simultaneous -r, -b options continue working
5084 5084 opts.setdefault('rev', []).append("null")
5085 5085
5086 5086 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5087 5087 dest, branches = hg.parseurl(dest, opts.get('branch'))
5088 5088 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5089 5089 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5090 5090 try:
5091 5091 other = hg.peer(repo, opts, dest)
5092 5092 except error.RepoError:
5093 5093 if dest == "default-push":
5094 5094 raise util.Abort(_("default repository not configured!"),
5095 5095 hint=_('see the "path" section in "hg help config"'))
5096 5096 else:
5097 5097 raise
5098 5098
5099 5099 if revs:
5100 5100 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5101 5101
5102 5102 repo._subtoppath = dest
5103 5103 try:
5104 5104 # push subrepos depth-first for coherent ordering
5105 5105 c = repo['']
5106 5106 subs = c.substate # only repos that are committed
5107 5107 for s in sorted(subs):
5108 5108 result = c.sub(s).push(opts)
5109 5109 if result == 0:
5110 5110 return not result
5111 5111 finally:
5112 5112 del repo._subtoppath
5113 5113 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5114 5114 newbranch=opts.get('new_branch'),
5115 5115 bookmarks=opts.get('bookmark', ()))
5116 5116
5117 5117 result = not pushop.cgresult
5118 5118
5119 5119 if pushop.bkresult is not None:
5120 5120 if pushop.bkresult == 2:
5121 5121 result = 2
5122 5122 elif not result and pushop.bkresult:
5123 5123 result = 2
5124 5124
5125 5125 return result
5126 5126
5127 5127 @command('recover', [])
5128 5128 def recover(ui, repo):
5129 5129 """roll back an interrupted transaction
5130 5130
5131 5131 Recover from an interrupted commit or pull.
5132 5132
5133 5133 This command tries to fix the repository status after an
5134 5134 interrupted operation. It should only be necessary when Mercurial
5135 5135 suggests it.
5136 5136
5137 5137 Returns 0 if successful, 1 if nothing to recover or verify fails.
5138 5138 """
5139 5139 if repo.recover():
5140 5140 return hg.verify(repo)
5141 5141 return 1
5142 5142
5143 5143 @command('^remove|rm',
5144 5144 [('A', 'after', None, _('record delete for missing files')),
5145 5145 ('f', 'force', None,
5146 5146 _('remove (and delete) file even if added or modified')),
5147 5147 ] + subrepoopts + walkopts,
5148 5148 _('[OPTION]... FILE...'),
5149 5149 inferrepo=True)
5150 5150 def remove(ui, repo, *pats, **opts):
5151 5151 """remove the specified files on the next commit
5152 5152
5153 5153 Schedule the indicated files for removal from the current branch.
5154 5154
5155 5155 This command schedules the files to be removed at the next commit.
5156 5156 To undo a remove before that, see :hg:`revert`. To undo added
5157 5157 files, see :hg:`forget`.
5158 5158
5159 5159 .. container:: verbose
5160 5160
5161 5161 -A/--after can be used to remove only files that have already
5162 5162 been deleted, -f/--force can be used to force deletion, and -Af
5163 5163 can be used to remove files from the next revision without
5164 5164 deleting them from the working directory.
5165 5165
5166 5166 The following table details the behavior of remove for different
5167 5167 file states (columns) and option combinations (rows). The file
5168 5168 states are Added [A], Clean [C], Modified [M] and Missing [!]
5169 5169 (as reported by :hg:`status`). The actions are Warn, Remove
5170 5170 (from branch) and Delete (from disk):
5171 5171
5172 5172 ========= == == == ==
5173 5173 opt/state A C M !
5174 5174 ========= == == == ==
5175 5175 none W RD W R
5176 5176 -f R RD RD R
5177 5177 -A W W W R
5178 5178 -Af R R R R
5179 5179 ========= == == == ==
5180 5180
5181 5181 Note that remove never deletes files in Added [A] state from the
5182 5182 working directory, not even if option --force is specified.
5183 5183
5184 5184 Returns 0 on success, 1 if any warnings encountered.
5185 5185 """
5186 5186
5187 5187 after, force = opts.get('after'), opts.get('force')
5188 5188 if not pats and not after:
5189 5189 raise util.Abort(_('no files specified'))
5190 5190
5191 5191 m = scmutil.match(repo[None], pats, opts)
5192 5192 subrepos = opts.get('subrepos')
5193 5193 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5194 5194
5195 5195 @command('rename|move|mv',
5196 5196 [('A', 'after', None, _('record a rename that has already occurred')),
5197 5197 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5198 5198 ] + walkopts + dryrunopts,
5199 5199 _('[OPTION]... SOURCE... DEST'))
5200 5200 def rename(ui, repo, *pats, **opts):
5201 5201 """rename files; equivalent of copy + remove
5202 5202
5203 5203 Mark dest as copies of sources; mark sources for deletion. If dest
5204 5204 is a directory, copies are put in that directory. If dest is a
5205 5205 file, there can only be one source.
5206 5206
5207 5207 By default, this command copies the contents of files as they
5208 5208 exist in the working directory. If invoked with -A/--after, the
5209 5209 operation is recorded, but no copying is performed.
5210 5210
5211 5211 This command takes effect at the next commit. To undo a rename
5212 5212 before that, see :hg:`revert`.
5213 5213
5214 5214 Returns 0 on success, 1 if errors are encountered.
5215 5215 """
5216 5216 wlock = repo.wlock(False)
5217 5217 try:
5218 5218 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5219 5219 finally:
5220 5220 wlock.release()
5221 5221
5222 5222 @command('resolve',
5223 5223 [('a', 'all', None, _('select all unresolved files')),
5224 5224 ('l', 'list', None, _('list state of files needing merge')),
5225 5225 ('m', 'mark', None, _('mark files as resolved')),
5226 5226 ('u', 'unmark', None, _('mark files as unresolved')),
5227 5227 ('n', 'no-status', None, _('hide status prefix'))]
5228 5228 + mergetoolopts + walkopts,
5229 5229 _('[OPTION]... [FILE]...'),
5230 5230 inferrepo=True)
5231 5231 def resolve(ui, repo, *pats, **opts):
5232 5232 """redo merges or set/view the merge status of files
5233 5233
5234 5234 Merges with unresolved conflicts are often the result of
5235 5235 non-interactive merging using the ``internal:merge`` configuration
5236 5236 setting, or a command-line merge tool like ``diff3``. The resolve
5237 5237 command is used to manage the files involved in a merge, after
5238 5238 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5239 5239 working directory must have two parents). See :hg:`help
5240 5240 merge-tools` for information on configuring merge tools.
5241 5241
5242 5242 The resolve command can be used in the following ways:
5243 5243
5244 5244 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5245 5245 files, discarding any previous merge attempts. Re-merging is not
5246 5246 performed for files already marked as resolved. Use ``--all/-a``
5247 5247 to select all unresolved files. ``--tool`` can be used to specify
5248 5248 the merge tool used for the given files. It overrides the HGMERGE
5249 5249 environment variable and your configuration files. Previous file
5250 5250 contents are saved with a ``.orig`` suffix.
5251 5251
5252 5252 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5253 5253 (e.g. after having manually fixed-up the files). The default is
5254 5254 to mark all unresolved files.
5255 5255
5256 5256 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5257 5257 default is to mark all resolved files.
5258 5258
5259 5259 - :hg:`resolve -l`: list files which had or still have conflicts.
5260 5260 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5261 5261
5262 5262 Note that Mercurial will not let you commit files with unresolved
5263 5263 merge conflicts. You must use :hg:`resolve -m ...` before you can
5264 5264 commit after a conflicting merge.
5265 5265
5266 5266 Returns 0 on success, 1 if any files fail a resolve attempt.
5267 5267 """
5268 5268
5269 5269 all, mark, unmark, show, nostatus = \
5270 5270 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5271 5271
5272 5272 if (show and (mark or unmark)) or (mark and unmark):
5273 5273 raise util.Abort(_("too many options specified"))
5274 5274 if pats and all:
5275 5275 raise util.Abort(_("can't specify --all and patterns"))
5276 5276 if not (all or pats or show or mark or unmark):
5277 5277 raise util.Abort(_('no files or directories specified'),
5278 5278 hint=('use --all to remerge all files'))
5279 5279
5280 5280 wlock = repo.wlock()
5281 5281 try:
5282 5282 ms = mergemod.mergestate(repo)
5283 5283
5284 5284 if not (ms.active() or repo.dirstate.p2() != nullid) and not show:
5285 5285 raise util.Abort(
5286 5286 _('resolve command not applicable when not merging'))
5287 5287
5288 5288 m = scmutil.match(repo[None], pats, opts)
5289 5289 ret = 0
5290 5290 didwork = False
5291 5291
5292 5292 for f in ms:
5293 5293 if not m(f):
5294 5294 continue
5295 5295
5296 5296 didwork = True
5297 5297
5298 5298 if show:
5299 5299 if nostatus:
5300 5300 ui.write("%s\n" % f)
5301 5301 else:
5302 5302 ui.write("%s %s\n" % (ms[f].upper(), f),
5303 5303 label='resolve.' +
5304 5304 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
5305 5305 elif mark:
5306 5306 ms.mark(f, "r")
5307 5307 elif unmark:
5308 5308 ms.mark(f, "u")
5309 5309 else:
5310 5310 wctx = repo[None]
5311 5311
5312 5312 # backup pre-resolve (merge uses .orig for its own purposes)
5313 5313 a = repo.wjoin(f)
5314 5314 util.copyfile(a, a + ".resolve")
5315 5315
5316 5316 try:
5317 5317 # resolve file
5318 5318 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5319 5319 'resolve')
5320 5320 if ms.resolve(f, wctx):
5321 5321 ret = 1
5322 5322 finally:
5323 5323 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5324 5324 ms.commit()
5325 5325
5326 5326 # replace filemerge's .orig file with our resolve file
5327 5327 util.rename(a + ".resolve", a + ".orig")
5328 5328
5329 5329 ms.commit()
5330 5330
5331 5331 if not didwork and pats:
5332 5332 ui.warn(_("arguments do not match paths that need resolving\n"))
5333 5333
5334 5334 finally:
5335 5335 wlock.release()
5336 5336
5337 5337 # Nudge users into finishing an unfinished operation. We don't print
5338 5338 # this with the list/show operation because we want list/show to remain
5339 5339 # machine readable.
5340 5340 if not list(ms.unresolved()) and not show:
5341 5341 ui.status(_('(no more unresolved files)\n'))
5342 5342
5343 5343 return ret
5344 5344
5345 5345 @command('revert',
5346 5346 [('a', 'all', None, _('revert all changes when no arguments given')),
5347 5347 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5348 5348 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5349 5349 ('C', 'no-backup', None, _('do not save backup copies of files')),
5350 5350 ] + walkopts + dryrunopts,
5351 5351 _('[OPTION]... [-r REV] [NAME]...'))
5352 5352 def revert(ui, repo, *pats, **opts):
5353 5353 """restore files to their checkout state
5354 5354
5355 5355 .. note::
5356 5356
5357 5357 To check out earlier revisions, you should use :hg:`update REV`.
5358 5358 To cancel an uncommitted merge (and lose your changes),
5359 5359 use :hg:`update --clean .`.
5360 5360
5361 5361 With no revision specified, revert the specified files or directories
5362 5362 to the contents they had in the parent of the working directory.
5363 5363 This restores the contents of files to an unmodified
5364 5364 state and unschedules adds, removes, copies, and renames. If the
5365 5365 working directory has two parents, you must explicitly specify a
5366 5366 revision.
5367 5367
5368 5368 Using the -r/--rev or -d/--date options, revert the given files or
5369 5369 directories to their states as of a specific revision. Because
5370 5370 revert does not change the working directory parents, this will
5371 5371 cause these files to appear modified. This can be helpful to "back
5372 5372 out" some or all of an earlier change. See :hg:`backout` for a
5373 5373 related method.
5374 5374
5375 5375 Modified files are saved with a .orig suffix before reverting.
5376 5376 To disable these backups, use --no-backup.
5377 5377
5378 5378 See :hg:`help dates` for a list of formats valid for -d/--date.
5379 5379
5380 5380 Returns 0 on success.
5381 5381 """
5382 5382
5383 5383 if opts.get("date"):
5384 5384 if opts.get("rev"):
5385 5385 raise util.Abort(_("you can't specify a revision and a date"))
5386 5386 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5387 5387
5388 5388 parent, p2 = repo.dirstate.parents()
5389 5389 if not opts.get('rev') and p2 != nullid:
5390 5390 # revert after merge is a trap for new users (issue2915)
5391 5391 raise util.Abort(_('uncommitted merge with no revision specified'),
5392 5392 hint=_('use "hg update" or see "hg help revert"'))
5393 5393
5394 5394 ctx = scmutil.revsingle(repo, opts.get('rev'))
5395 5395
5396 5396 if not pats and not opts.get('all'):
5397 5397 msg = _("no files or directories specified")
5398 5398 if p2 != nullid:
5399 5399 hint = _("uncommitted merge, use --all to discard all changes,"
5400 5400 " or 'hg update -C .' to abort the merge")
5401 5401 raise util.Abort(msg, hint=hint)
5402 5402 dirty = util.any(repo.status())
5403 5403 node = ctx.node()
5404 5404 if node != parent:
5405 5405 if dirty:
5406 5406 hint = _("uncommitted changes, use --all to discard all"
5407 5407 " changes, or 'hg update %s' to update") % ctx.rev()
5408 5408 else:
5409 5409 hint = _("use --all to revert all files,"
5410 5410 " or 'hg update %s' to update") % ctx.rev()
5411 5411 elif dirty:
5412 5412 hint = _("uncommitted changes, use --all to discard all changes")
5413 5413 else:
5414 5414 hint = _("use --all to revert all files")
5415 5415 raise util.Abort(msg, hint=hint)
5416 5416
5417 5417 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5418 5418
5419 5419 @command('rollback', dryrunopts +
5420 5420 [('f', 'force', False, _('ignore safety measures'))])
5421 5421 def rollback(ui, repo, **opts):
5422 5422 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5423 5423
5424 5424 Please use :hg:`commit --amend` instead of rollback to correct
5425 5425 mistakes in the last commit.
5426 5426
5427 5427 This command should be used with care. There is only one level of
5428 5428 rollback, and there is no way to undo a rollback. It will also
5429 5429 restore the dirstate at the time of the last transaction, losing
5430 5430 any dirstate changes since that time. This command does not alter
5431 5431 the working directory.
5432 5432
5433 5433 Transactions are used to encapsulate the effects of all commands
5434 5434 that create new changesets or propagate existing changesets into a
5435 5435 repository.
5436 5436
5437 5437 .. container:: verbose
5438 5438
5439 5439 For example, the following commands are transactional, and their
5440 5440 effects can be rolled back:
5441 5441
5442 5442 - commit
5443 5443 - import
5444 5444 - pull
5445 5445 - push (with this repository as the destination)
5446 5446 - unbundle
5447 5447
5448 5448 To avoid permanent data loss, rollback will refuse to rollback a
5449 5449 commit transaction if it isn't checked out. Use --force to
5450 5450 override this protection.
5451 5451
5452 5452 This command is not intended for use on public repositories. Once
5453 5453 changes are visible for pull by other users, rolling a transaction
5454 5454 back locally is ineffective (someone else may already have pulled
5455 5455 the changes). Furthermore, a race is possible with readers of the
5456 5456 repository; for example an in-progress pull from the repository
5457 5457 may fail if a rollback is performed.
5458 5458
5459 5459 Returns 0 on success, 1 if no rollback data is available.
5460 5460 """
5461 5461 return repo.rollback(dryrun=opts.get('dry_run'),
5462 5462 force=opts.get('force'))
5463 5463
5464 5464 @command('root', [])
5465 5465 def root(ui, repo):
5466 5466 """print the root (top) of the current working directory
5467 5467
5468 5468 Print the root directory of the current repository.
5469 5469
5470 5470 Returns 0 on success.
5471 5471 """
5472 5472 ui.write(repo.root + "\n")
5473 5473
5474 5474 @command('^serve',
5475 5475 [('A', 'accesslog', '', _('name of access log file to write to'),
5476 5476 _('FILE')),
5477 5477 ('d', 'daemon', None, _('run server in background')),
5478 5478 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5479 5479 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5480 5480 # use string type, then we can check if something was passed
5481 5481 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5482 5482 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5483 5483 _('ADDR')),
5484 5484 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5485 5485 _('PREFIX')),
5486 5486 ('n', 'name', '',
5487 5487 _('name to show in web pages (default: working directory)'), _('NAME')),
5488 5488 ('', 'web-conf', '',
5489 5489 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5490 5490 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5491 5491 _('FILE')),
5492 5492 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5493 5493 ('', 'stdio', None, _('for remote clients')),
5494 5494 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5495 5495 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5496 5496 ('', 'style', '', _('template style to use'), _('STYLE')),
5497 5497 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5498 5498 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5499 5499 _('[OPTION]...'),
5500 5500 optionalrepo=True)
5501 5501 def serve(ui, repo, **opts):
5502 5502 """start stand-alone webserver
5503 5503
5504 5504 Start a local HTTP repository browser and pull server. You can use
5505 5505 this for ad-hoc sharing and browsing of repositories. It is
5506 5506 recommended to use a real web server to serve a repository for
5507 5507 longer periods of time.
5508 5508
5509 5509 Please note that the server does not implement access control.
5510 5510 This means that, by default, anybody can read from the server and
5511 5511 nobody can write to it by default. Set the ``web.allow_push``
5512 5512 option to ``*`` to allow everybody to push to the server. You
5513 5513 should use a real web server if you need to authenticate users.
5514 5514
5515 5515 By default, the server logs accesses to stdout and errors to
5516 5516 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5517 5517 files.
5518 5518
5519 5519 To have the server choose a free port number to listen on, specify
5520 5520 a port number of 0; in this case, the server will print the port
5521 5521 number it uses.
5522 5522
5523 5523 Returns 0 on success.
5524 5524 """
5525 5525
5526 5526 if opts["stdio"] and opts["cmdserver"]:
5527 5527 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5528 5528
5529 5529 if opts["stdio"]:
5530 5530 if repo is None:
5531 5531 raise error.RepoError(_("there is no Mercurial repository here"
5532 5532 " (.hg not found)"))
5533 5533 s = sshserver.sshserver(ui, repo)
5534 5534 s.serve_forever()
5535 5535
5536 5536 if opts["cmdserver"]:
5537 5537 service = commandserver.createservice(ui, repo, opts)
5538 5538 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5539 5539
5540 5540 # this way we can check if something was given in the command-line
5541 5541 if opts.get('port'):
5542 5542 opts['port'] = util.getport(opts.get('port'))
5543 5543
5544 5544 baseui = repo and repo.baseui or ui
5545 5545 optlist = ("name templates style address port prefix ipv6"
5546 5546 " accesslog errorlog certificate encoding")
5547 5547 for o in optlist.split():
5548 5548 val = opts.get(o, '')
5549 5549 if val in (None, ''): # should check against default options instead
5550 5550 continue
5551 5551 baseui.setconfig("web", o, val, 'serve')
5552 5552 if repo and repo.ui != baseui:
5553 5553 repo.ui.setconfig("web", o, val, 'serve')
5554 5554
5555 5555 o = opts.get('web_conf') or opts.get('webdir_conf')
5556 5556 if not o:
5557 5557 if not repo:
5558 5558 raise error.RepoError(_("there is no Mercurial repository"
5559 5559 " here (.hg not found)"))
5560 5560 o = repo
5561 5561
5562 5562 app = hgweb.hgweb(o, baseui=baseui)
5563 5563 service = httpservice(ui, app, opts)
5564 5564 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5565 5565
5566 5566 class httpservice(object):
5567 5567 def __init__(self, ui, app, opts):
5568 5568 self.ui = ui
5569 5569 self.app = app
5570 5570 self.opts = opts
5571 5571
5572 5572 def init(self):
5573 5573 util.setsignalhandler()
5574 5574 self.httpd = hgweb_server.create_server(self.ui, self.app)
5575 5575
5576 5576 if self.opts['port'] and not self.ui.verbose:
5577 5577 return
5578 5578
5579 5579 if self.httpd.prefix:
5580 5580 prefix = self.httpd.prefix.strip('/') + '/'
5581 5581 else:
5582 5582 prefix = ''
5583 5583
5584 5584 port = ':%d' % self.httpd.port
5585 5585 if port == ':80':
5586 5586 port = ''
5587 5587
5588 5588 bindaddr = self.httpd.addr
5589 5589 if bindaddr == '0.0.0.0':
5590 5590 bindaddr = '*'
5591 5591 elif ':' in bindaddr: # IPv6
5592 5592 bindaddr = '[%s]' % bindaddr
5593 5593
5594 5594 fqaddr = self.httpd.fqaddr
5595 5595 if ':' in fqaddr:
5596 5596 fqaddr = '[%s]' % fqaddr
5597 5597 if self.opts['port']:
5598 5598 write = self.ui.status
5599 5599 else:
5600 5600 write = self.ui.write
5601 5601 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5602 5602 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5603 5603 self.ui.flush() # avoid buffering of status message
5604 5604
5605 5605 def run(self):
5606 5606 self.httpd.serve_forever()
5607 5607
5608 5608
5609 5609 @command('^status|st',
5610 5610 [('A', 'all', None, _('show status of all files')),
5611 5611 ('m', 'modified', None, _('show only modified files')),
5612 5612 ('a', 'added', None, _('show only added files')),
5613 5613 ('r', 'removed', None, _('show only removed files')),
5614 5614 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5615 5615 ('c', 'clean', None, _('show only files without changes')),
5616 5616 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5617 5617 ('i', 'ignored', None, _('show only ignored files')),
5618 5618 ('n', 'no-status', None, _('hide status prefix')),
5619 5619 ('C', 'copies', None, _('show source of copied files')),
5620 5620 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5621 5621 ('', 'rev', [], _('show difference from revision'), _('REV')),
5622 5622 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5623 5623 ] + walkopts + subrepoopts + formatteropts,
5624 5624 _('[OPTION]... [FILE]...'),
5625 5625 inferrepo=True)
5626 5626 def status(ui, repo, *pats, **opts):
5627 5627 """show changed files in the working directory
5628 5628
5629 5629 Show status of files in the repository. If names are given, only
5630 5630 files that match are shown. Files that are clean or ignored or
5631 5631 the source of a copy/move operation, are not listed unless
5632 5632 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5633 5633 Unless options described with "show only ..." are given, the
5634 5634 options -mardu are used.
5635 5635
5636 5636 Option -q/--quiet hides untracked (unknown and ignored) files
5637 5637 unless explicitly requested with -u/--unknown or -i/--ignored.
5638 5638
5639 5639 .. note::
5640 5640
5641 5641 status may appear to disagree with diff if permissions have
5642 5642 changed or a merge has occurred. The standard diff format does
5643 5643 not report permission changes and diff only reports changes
5644 5644 relative to one merge parent.
5645 5645
5646 5646 If one revision is given, it is used as the base revision.
5647 5647 If two revisions are given, the differences between them are
5648 5648 shown. The --change option can also be used as a shortcut to list
5649 5649 the changed files of a revision from its first parent.
5650 5650
5651 5651 The codes used to show the status of files are::
5652 5652
5653 5653 M = modified
5654 5654 A = added
5655 5655 R = removed
5656 5656 C = clean
5657 5657 ! = missing (deleted by non-hg command, but still tracked)
5658 5658 ? = not tracked
5659 5659 I = ignored
5660 5660 = origin of the previous file (with --copies)
5661 5661
5662 5662 .. container:: verbose
5663 5663
5664 5664 Examples:
5665 5665
5666 5666 - show changes in the working directory relative to a
5667 5667 changeset::
5668 5668
5669 5669 hg status --rev 9353
5670 5670
5671 5671 - show all changes including copies in an existing changeset::
5672 5672
5673 5673 hg status --copies --change 9353
5674 5674
5675 5675 - get a NUL separated list of added files, suitable for xargs::
5676 5676
5677 5677 hg status -an0
5678 5678
5679 5679 Returns 0 on success.
5680 5680 """
5681 5681
5682 5682 revs = opts.get('rev')
5683 5683 change = opts.get('change')
5684 5684
5685 5685 if revs and change:
5686 5686 msg = _('cannot specify --rev and --change at the same time')
5687 5687 raise util.Abort(msg)
5688 5688 elif change:
5689 5689 node2 = scmutil.revsingle(repo, change, None).node()
5690 5690 node1 = repo[node2].p1().node()
5691 5691 else:
5692 5692 node1, node2 = scmutil.revpair(repo, revs)
5693 5693
5694 5694 cwd = (pats and repo.getcwd()) or ''
5695 5695 end = opts.get('print0') and '\0' or '\n'
5696 5696 copy = {}
5697 5697 states = 'modified added removed deleted unknown ignored clean'.split()
5698 5698 show = [k for k in states if opts.get(k)]
5699 5699 if opts.get('all'):
5700 5700 show += ui.quiet and (states[:4] + ['clean']) or states
5701 5701 if not show:
5702 5702 show = ui.quiet and states[:4] or states[:5]
5703 5703
5704 5704 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5705 5705 'ignored' in show, 'clean' in show, 'unknown' in show,
5706 5706 opts.get('subrepos'))
5707 5707 changestates = zip(states, 'MAR!?IC', stat)
5708 5708
5709 5709 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5710 5710 copy = copies.pathcopies(repo[node1], repo[node2])
5711 5711
5712 5712 fm = ui.formatter('status', opts)
5713 5713 fmt = '%s' + end
5714 5714 showchar = not opts.get('no_status')
5715 5715
5716 5716 for state, char, files in changestates:
5717 5717 if state in show:
5718 5718 label = 'status.' + state
5719 5719 for f in files:
5720 5720 fm.startitem()
5721 5721 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5722 5722 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5723 5723 if f in copy:
5724 5724 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5725 5725 label='status.copied')
5726 5726 fm.end()
5727 5727
5728 5728 @command('^summary|sum',
5729 5729 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5730 5730 def summary(ui, repo, **opts):
5731 5731 """summarize working directory state
5732 5732
5733 5733 This generates a brief summary of the working directory state,
5734 5734 including parents, branch, commit status, and available updates.
5735 5735
5736 5736 With the --remote option, this will check the default paths for
5737 5737 incoming and outgoing changes. This can be time-consuming.
5738 5738
5739 5739 Returns 0 on success.
5740 5740 """
5741 5741
5742 5742 ctx = repo[None]
5743 5743 parents = ctx.parents()
5744 5744 pnode = parents[0].node()
5745 5745 marks = []
5746 5746
5747 5747 for p in parents:
5748 5748 # label with log.changeset (instead of log.parent) since this
5749 5749 # shows a working directory parent *changeset*:
5750 5750 # i18n: column positioning for "hg summary"
5751 5751 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5752 5752 label='log.changeset changeset.%s' % p.phasestr())
5753 5753 ui.write(' '.join(p.tags()), label='log.tag')
5754 5754 if p.bookmarks():
5755 5755 marks.extend(p.bookmarks())
5756 5756 if p.rev() == -1:
5757 5757 if not len(repo):
5758 5758 ui.write(_(' (empty repository)'))
5759 5759 else:
5760 5760 ui.write(_(' (no revision checked out)'))
5761 5761 ui.write('\n')
5762 5762 if p.description():
5763 5763 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5764 5764 label='log.summary')
5765 5765
5766 5766 branch = ctx.branch()
5767 5767 bheads = repo.branchheads(branch)
5768 5768 # i18n: column positioning for "hg summary"
5769 5769 m = _('branch: %s\n') % branch
5770 5770 if branch != 'default':
5771 5771 ui.write(m, label='log.branch')
5772 5772 else:
5773 5773 ui.status(m, label='log.branch')
5774 5774
5775 5775 if marks:
5776 5776 current = repo._bookmarkcurrent
5777 5777 # i18n: column positioning for "hg summary"
5778 5778 ui.write(_('bookmarks:'), label='log.bookmark')
5779 5779 if current is not None:
5780 5780 if current in marks:
5781 5781 ui.write(' *' + current, label='bookmarks.current')
5782 5782 marks.remove(current)
5783 5783 else:
5784 5784 ui.write(' [%s]' % current, label='bookmarks.current')
5785 5785 for m in marks:
5786 5786 ui.write(' ' + m, label='log.bookmark')
5787 5787 ui.write('\n', label='log.bookmark')
5788 5788
5789 5789 status = repo.status(unknown=True)
5790 5790
5791 5791 c = repo.dirstate.copies()
5792 5792 copied, renamed = [], []
5793 5793 for d, s in c.iteritems():
5794 5794 if s in status.removed:
5795 5795 status.removed.remove(s)
5796 5796 renamed.append(d)
5797 5797 else:
5798 5798 copied.append(d)
5799 5799 if d in status.added:
5800 5800 status.added.remove(d)
5801 5801
5802 5802 ms = mergemod.mergestate(repo)
5803 5803 unresolved = [f for f in ms if ms[f] == 'u']
5804 5804
5805 5805 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5806 5806
5807 5807 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5808 5808 (ui.label(_('%d added'), 'status.added'), status.added),
5809 5809 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5810 5810 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5811 5811 (ui.label(_('%d copied'), 'status.copied'), copied),
5812 5812 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5813 5813 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5814 5814 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5815 5815 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5816 5816 t = []
5817 5817 for l, s in labels:
5818 5818 if s:
5819 5819 t.append(l % len(s))
5820 5820
5821 5821 t = ', '.join(t)
5822 5822 cleanworkdir = False
5823 5823
5824 5824 if repo.vfs.exists('updatestate'):
5825 5825 t += _(' (interrupted update)')
5826 5826 elif len(parents) > 1:
5827 5827 t += _(' (merge)')
5828 5828 elif branch != parents[0].branch():
5829 5829 t += _(' (new branch)')
5830 5830 elif (parents[0].closesbranch() and
5831 5831 pnode in repo.branchheads(branch, closed=True)):
5832 5832 t += _(' (head closed)')
5833 5833 elif not (status.modified or status.added or status.removed or renamed or
5834 5834 copied or subs):
5835 5835 t += _(' (clean)')
5836 5836 cleanworkdir = True
5837 5837 elif pnode not in bheads:
5838 5838 t += _(' (new branch head)')
5839 5839
5840 5840 if cleanworkdir:
5841 5841 # i18n: column positioning for "hg summary"
5842 5842 ui.status(_('commit: %s\n') % t.strip())
5843 5843 else:
5844 5844 # i18n: column positioning for "hg summary"
5845 5845 ui.write(_('commit: %s\n') % t.strip())
5846 5846
5847 5847 # all ancestors of branch heads - all ancestors of parent = new csets
5848 5848 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5849 5849 bheads))
5850 5850
5851 5851 if new == 0:
5852 5852 # i18n: column positioning for "hg summary"
5853 5853 ui.status(_('update: (current)\n'))
5854 5854 elif pnode not in bheads:
5855 5855 # i18n: column positioning for "hg summary"
5856 5856 ui.write(_('update: %d new changesets (update)\n') % new)
5857 5857 else:
5858 5858 # i18n: column positioning for "hg summary"
5859 5859 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5860 5860 (new, len(bheads)))
5861 5861
5862 5862 cmdutil.summaryhooks(ui, repo)
5863 5863
5864 5864 if opts.get('remote'):
5865 5865 needsincoming, needsoutgoing = True, True
5866 5866 else:
5867 5867 needsincoming, needsoutgoing = False, False
5868 5868 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5869 5869 if i:
5870 5870 needsincoming = True
5871 5871 if o:
5872 5872 needsoutgoing = True
5873 5873 if not needsincoming and not needsoutgoing:
5874 5874 return
5875 5875
5876 5876 def getincoming():
5877 5877 source, branches = hg.parseurl(ui.expandpath('default'))
5878 5878 sbranch = branches[0]
5879 5879 try:
5880 5880 other = hg.peer(repo, {}, source)
5881 5881 except error.RepoError:
5882 5882 if opts.get('remote'):
5883 5883 raise
5884 5884 return source, sbranch, None, None, None
5885 5885 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5886 5886 if revs:
5887 5887 revs = [other.lookup(rev) for rev in revs]
5888 5888 ui.debug('comparing with %s\n' % util.hidepassword(source))
5889 5889 repo.ui.pushbuffer()
5890 5890 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5891 5891 repo.ui.popbuffer()
5892 5892 return source, sbranch, other, commoninc, commoninc[1]
5893 5893
5894 5894 if needsincoming:
5895 5895 source, sbranch, sother, commoninc, incoming = getincoming()
5896 5896 else:
5897 5897 source = sbranch = sother = commoninc = incoming = None
5898 5898
5899 5899 def getoutgoing():
5900 5900 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5901 5901 dbranch = branches[0]
5902 5902 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5903 5903 if source != dest:
5904 5904 try:
5905 5905 dother = hg.peer(repo, {}, dest)
5906 5906 except error.RepoError:
5907 5907 if opts.get('remote'):
5908 5908 raise
5909 5909 return dest, dbranch, None, None
5910 5910 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5911 5911 elif sother is None:
5912 5912 # there is no explicit destination peer, but source one is invalid
5913 5913 return dest, dbranch, None, None
5914 5914 else:
5915 5915 dother = sother
5916 5916 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5917 5917 common = None
5918 5918 else:
5919 5919 common = commoninc
5920 5920 if revs:
5921 5921 revs = [repo.lookup(rev) for rev in revs]
5922 5922 repo.ui.pushbuffer()
5923 5923 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5924 5924 commoninc=common)
5925 5925 repo.ui.popbuffer()
5926 5926 return dest, dbranch, dother, outgoing
5927 5927
5928 5928 if needsoutgoing:
5929 5929 dest, dbranch, dother, outgoing = getoutgoing()
5930 5930 else:
5931 5931 dest = dbranch = dother = outgoing = None
5932 5932
5933 5933 if opts.get('remote'):
5934 5934 t = []
5935 5935 if incoming:
5936 5936 t.append(_('1 or more incoming'))
5937 5937 o = outgoing.missing
5938 5938 if o:
5939 5939 t.append(_('%d outgoing') % len(o))
5940 5940 other = dother or sother
5941 5941 if 'bookmarks' in other.listkeys('namespaces'):
5942 5942 lmarks = repo.listkeys('bookmarks')
5943 5943 rmarks = other.listkeys('bookmarks')
5944 5944 diff = set(rmarks) - set(lmarks)
5945 5945 if len(diff) > 0:
5946 5946 t.append(_('%d incoming bookmarks') % len(diff))
5947 5947 diff = set(lmarks) - set(rmarks)
5948 5948 if len(diff) > 0:
5949 5949 t.append(_('%d outgoing bookmarks') % len(diff))
5950 5950
5951 5951 if t:
5952 5952 # i18n: column positioning for "hg summary"
5953 5953 ui.write(_('remote: %s\n') % (', '.join(t)))
5954 5954 else:
5955 5955 # i18n: column positioning for "hg summary"
5956 5956 ui.status(_('remote: (synced)\n'))
5957 5957
5958 5958 cmdutil.summaryremotehooks(ui, repo, opts,
5959 5959 ((source, sbranch, sother, commoninc),
5960 5960 (dest, dbranch, dother, outgoing)))
5961 5961
5962 5962 @command('tag',
5963 5963 [('f', 'force', None, _('force tag')),
5964 5964 ('l', 'local', None, _('make the tag local')),
5965 5965 ('r', 'rev', '', _('revision to tag'), _('REV')),
5966 5966 ('', 'remove', None, _('remove a tag')),
5967 5967 # -l/--local is already there, commitopts cannot be used
5968 5968 ('e', 'edit', None, _('invoke editor on commit messages')),
5969 5969 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5970 5970 ] + commitopts2,
5971 5971 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5972 5972 def tag(ui, repo, name1, *names, **opts):
5973 5973 """add one or more tags for the current or given revision
5974 5974
5975 5975 Name a particular revision using <name>.
5976 5976
5977 5977 Tags are used to name particular revisions of the repository and are
5978 5978 very useful to compare different revisions, to go back to significant
5979 5979 earlier versions or to mark branch points as releases, etc. Changing
5980 5980 an existing tag is normally disallowed; use -f/--force to override.
5981 5981
5982 5982 If no revision is given, the parent of the working directory is
5983 5983 used.
5984 5984
5985 5985 To facilitate version control, distribution, and merging of tags,
5986 5986 they are stored as a file named ".hgtags" which is managed similarly
5987 5987 to other project files and can be hand-edited if necessary. This
5988 5988 also means that tagging creates a new commit. The file
5989 5989 ".hg/localtags" is used for local tags (not shared among
5990 5990 repositories).
5991 5991
5992 5992 Tag commits are usually made at the head of a branch. If the parent
5993 5993 of the working directory is not a branch head, :hg:`tag` aborts; use
5994 5994 -f/--force to force the tag commit to be based on a non-head
5995 5995 changeset.
5996 5996
5997 5997 See :hg:`help dates` for a list of formats valid for -d/--date.
5998 5998
5999 5999 Since tag names have priority over branch names during revision
6000 6000 lookup, using an existing branch name as a tag name is discouraged.
6001 6001
6002 6002 Returns 0 on success.
6003 6003 """
6004 6004 wlock = lock = None
6005 6005 try:
6006 6006 wlock = repo.wlock()
6007 6007 lock = repo.lock()
6008 6008 rev_ = "."
6009 6009 names = [t.strip() for t in (name1,) + names]
6010 6010 if len(names) != len(set(names)):
6011 6011 raise util.Abort(_('tag names must be unique'))
6012 6012 for n in names:
6013 6013 scmutil.checknewlabel(repo, n, 'tag')
6014 6014 if not n:
6015 6015 raise util.Abort(_('tag names cannot consist entirely of '
6016 6016 'whitespace'))
6017 6017 if opts.get('rev') and opts.get('remove'):
6018 6018 raise util.Abort(_("--rev and --remove are incompatible"))
6019 6019 if opts.get('rev'):
6020 6020 rev_ = opts['rev']
6021 6021 message = opts.get('message')
6022 6022 if opts.get('remove'):
6023 6023 expectedtype = opts.get('local') and 'local' or 'global'
6024 6024 for n in names:
6025 6025 if not repo.tagtype(n):
6026 6026 raise util.Abort(_("tag '%s' does not exist") % n)
6027 6027 if repo.tagtype(n) != expectedtype:
6028 6028 if expectedtype == 'global':
6029 6029 raise util.Abort(_("tag '%s' is not a global tag") % n)
6030 6030 else:
6031 6031 raise util.Abort(_("tag '%s' is not a local tag") % n)
6032 6032 rev_ = nullid
6033 6033 if not message:
6034 6034 # we don't translate commit messages
6035 6035 message = 'Removed tag %s' % ', '.join(names)
6036 6036 elif not opts.get('force'):
6037 6037 for n in names:
6038 6038 if n in repo.tags():
6039 6039 raise util.Abort(_("tag '%s' already exists "
6040 6040 "(use -f to force)") % n)
6041 6041 if not opts.get('local'):
6042 6042 p1, p2 = repo.dirstate.parents()
6043 6043 if p2 != nullid:
6044 6044 raise util.Abort(_('uncommitted merge'))
6045 6045 bheads = repo.branchheads()
6046 6046 if not opts.get('force') and bheads and p1 not in bheads:
6047 6047 raise util.Abort(_('not at a branch head (use -f to force)'))
6048 6048 r = scmutil.revsingle(repo, rev_).node()
6049 6049
6050 6050 if not message:
6051 6051 # we don't translate commit messages
6052 6052 message = ('Added tag %s for changeset %s' %
6053 6053 (', '.join(names), short(r)))
6054 6054
6055 6055 date = opts.get('date')
6056 6056 if date:
6057 6057 date = util.parsedate(date)
6058 6058
6059 6059 if opts.get('remove'):
6060 6060 editform = 'tag.remove'
6061 6061 else:
6062 6062 editform = 'tag.add'
6063 6063 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6064 6064
6065 6065 # don't allow tagging the null rev
6066 6066 if (not opts.get('remove') and
6067 6067 scmutil.revsingle(repo, rev_).rev() == nullrev):
6068 6068 raise util.Abort(_("cannot tag null revision"))
6069 6069
6070 6070 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6071 6071 editor=editor)
6072 6072 finally:
6073 6073 release(lock, wlock)
6074 6074
6075 6075 @command('tags', formatteropts, '')
6076 6076 def tags(ui, repo, **opts):
6077 6077 """list repository tags
6078 6078
6079 6079 This lists both regular and local tags. When the -v/--verbose
6080 6080 switch is used, a third column "local" is printed for local tags.
6081 6081
6082 6082 Returns 0 on success.
6083 6083 """
6084 6084
6085 6085 fm = ui.formatter('tags', opts)
6086 6086 hexfunc = fm.hexfunc
6087 6087 tagtype = ""
6088 6088
6089 6089 for t, n in reversed(repo.tagslist()):
6090 6090 hn = hexfunc(n)
6091 6091 label = 'tags.normal'
6092 6092 tagtype = ''
6093 6093 if repo.tagtype(t) == 'local':
6094 6094 label = 'tags.local'
6095 6095 tagtype = 'local'
6096 6096
6097 6097 fm.startitem()
6098 6098 fm.write('tag', '%s', t, label=label)
6099 6099 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6100 6100 fm.condwrite(not ui.quiet, 'rev node', fmt,
6101 6101 repo.changelog.rev(n), hn, label=label)
6102 6102 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6103 6103 tagtype, label=label)
6104 6104 fm.plain('\n')
6105 6105 fm.end()
6106 6106
6107 6107 @command('tip',
6108 6108 [('p', 'patch', None, _('show patch')),
6109 6109 ('g', 'git', None, _('use git extended diff format')),
6110 6110 ] + templateopts,
6111 6111 _('[-p] [-g]'))
6112 6112 def tip(ui, repo, **opts):
6113 6113 """show the tip revision (DEPRECATED)
6114 6114
6115 6115 The tip revision (usually just called the tip) is the changeset
6116 6116 most recently added to the repository (and therefore the most
6117 6117 recently changed head).
6118 6118
6119 6119 If you have just made a commit, that commit will be the tip. If
6120 6120 you have just pulled changes from another repository, the tip of
6121 6121 that repository becomes the current tip. The "tip" tag is special
6122 6122 and cannot be renamed or assigned to a different changeset.
6123 6123
6124 6124 This command is deprecated, please use :hg:`heads` instead.
6125 6125
6126 6126 Returns 0 on success.
6127 6127 """
6128 6128 displayer = cmdutil.show_changeset(ui, repo, opts)
6129 6129 displayer.show(repo['tip'])
6130 6130 displayer.close()
6131 6131
6132 6132 @command('unbundle',
6133 6133 [('u', 'update', None,
6134 6134 _('update to new branch head if changesets were unbundled'))],
6135 6135 _('[-u] FILE...'))
6136 6136 def unbundle(ui, repo, fname1, *fnames, **opts):
6137 6137 """apply one or more changegroup files
6138 6138
6139 6139 Apply one or more compressed changegroup files generated by the
6140 6140 bundle command.
6141 6141
6142 6142 Returns 0 on success, 1 if an update has unresolved files.
6143 6143 """
6144 6144 fnames = (fname1,) + fnames
6145 6145
6146 6146 lock = repo.lock()
6147 6147 try:
6148 6148 for fname in fnames:
6149 6149 f = hg.openpath(ui, fname)
6150 6150 gen = exchange.readbundle(ui, f, fname)
6151 6151 if isinstance(gen, bundle2.unbundle20):
6152 6152 tr = repo.transaction('unbundle')
6153 6153 try:
6154 6154 op = bundle2.processbundle(repo, gen, lambda: tr)
6155 6155 tr.close()
6156 6156 finally:
6157 6157 if tr:
6158 6158 tr.release()
6159 6159 changes = [r.get('result', 0)
6160 6160 for r in op.records['changegroup']]
6161 6161 modheads = changegroup.combineresults(changes)
6162 6162 else:
6163 6163 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
6164 6164 'bundle:' + fname)
6165 6165 finally:
6166 6166 lock.release()
6167 6167
6168 6168 return postincoming(ui, repo, modheads, opts.get('update'), None)
6169 6169
6170 6170 @command('^update|up|checkout|co',
6171 6171 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6172 6172 ('c', 'check', None,
6173 6173 _('update across branches if no uncommitted changes')),
6174 6174 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6175 6175 ('r', 'rev', '', _('revision'), _('REV'))
6176 6176 ] + mergetoolopts,
6177 6177 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6178 6178 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6179 6179 tool=None):
6180 6180 """update working directory (or switch revisions)
6181 6181
6182 6182 Update the repository's working directory to the specified
6183 6183 changeset. If no changeset is specified, update to the tip of the
6184 6184 current named branch and move the current bookmark (see :hg:`help
6185 6185 bookmarks`).
6186 6186
6187 6187 Update sets the working directory's parent revision to the specified
6188 6188 changeset (see :hg:`help parents`).
6189 6189
6190 6190 If the changeset is not a descendant or ancestor of the working
6191 6191 directory's parent, the update is aborted. With the -c/--check
6192 6192 option, the working directory is checked for uncommitted changes; if
6193 6193 none are found, the working directory is updated to the specified
6194 6194 changeset.
6195 6195
6196 6196 .. container:: verbose
6197 6197
6198 6198 The following rules apply when the working directory contains
6199 6199 uncommitted changes:
6200 6200
6201 6201 1. If neither -c/--check nor -C/--clean is specified, and if
6202 6202 the requested changeset is an ancestor or descendant of
6203 6203 the working directory's parent, the uncommitted changes
6204 6204 are merged into the requested changeset and the merged
6205 6205 result is left uncommitted. If the requested changeset is
6206 6206 not an ancestor or descendant (that is, it is on another
6207 6207 branch), the update is aborted and the uncommitted changes
6208 6208 are preserved.
6209 6209
6210 6210 2. With the -c/--check option, the update is aborted and the
6211 6211 uncommitted changes are preserved.
6212 6212
6213 6213 3. With the -C/--clean option, uncommitted changes are discarded and
6214 6214 the working directory is updated to the requested changeset.
6215 6215
6216 6216 To cancel an uncommitted merge (and lose your changes), use
6217 6217 :hg:`update --clean .`.
6218 6218
6219 6219 Use null as the changeset to remove the working directory (like
6220 6220 :hg:`clone -U`).
6221 6221
6222 6222 If you want to revert just one file to an older revision, use
6223 6223 :hg:`revert [-r REV] NAME`.
6224 6224
6225 6225 See :hg:`help dates` for a list of formats valid for -d/--date.
6226 6226
6227 6227 Returns 0 on success, 1 if there are unresolved files.
6228 6228 """
6229 6229 if rev and node:
6230 6230 raise util.Abort(_("please specify just one revision"))
6231 6231
6232 6232 if rev is None or rev == '':
6233 6233 rev = node
6234 6234
6235 6235 cmdutil.clearunfinished(repo)
6236 6236
6237 6237 # with no argument, we also move the current bookmark, if any
6238 6238 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
6239 6239
6240 6240 # if we defined a bookmark, we have to remember the original bookmark name
6241 6241 brev = rev
6242 6242 rev = scmutil.revsingle(repo, rev, rev).rev()
6243 6243
6244 6244 if check and clean:
6245 6245 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
6246 6246
6247 6247 if date:
6248 6248 if rev is not None:
6249 6249 raise util.Abort(_("you can't specify a revision and a date"))
6250 6250 rev = cmdutil.finddate(ui, repo, date)
6251 6251
6252 6252 if check:
6253 6253 c = repo[None]
6254 6254 if c.dirty(merge=False, branch=False, missing=True):
6255 6255 raise util.Abort(_("uncommitted changes"))
6256 6256 if rev is None:
6257 6257 rev = repo[repo[None].branch()].rev()
6258 6258
6259 6259 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6260 6260
6261 6261 if clean:
6262 6262 ret = hg.clean(repo, rev)
6263 6263 else:
6264 6264 ret = hg.update(repo, rev)
6265 6265
6266 6266 if not ret and movemarkfrom:
6267 6267 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6268 6268 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6269 6269 elif brev in repo._bookmarks:
6270 6270 bookmarks.setcurrent(repo, brev)
6271 6271 ui.status(_("(activating bookmark %s)\n") % brev)
6272 6272 elif brev:
6273 6273 if repo._bookmarkcurrent:
6274 6274 ui.status(_("(leaving bookmark %s)\n") %
6275 6275 repo._bookmarkcurrent)
6276 6276 bookmarks.unsetcurrent(repo)
6277 6277
6278 6278 return ret
6279 6279
6280 6280 @command('verify', [])
6281 6281 def verify(ui, repo):
6282 6282 """verify the integrity of the repository
6283 6283
6284 6284 Verify the integrity of the current repository.
6285 6285
6286 6286 This will perform an extensive check of the repository's
6287 6287 integrity, validating the hashes and checksums of each entry in
6288 6288 the changelog, manifest, and tracked files, as well as the
6289 6289 integrity of their crosslinks and indices.
6290 6290
6291 6291 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6292 6292 for more information about recovery from corruption of the
6293 6293 repository.
6294 6294
6295 6295 Returns 0 on success, 1 if errors are encountered.
6296 6296 """
6297 6297 return hg.verify(repo)
6298 6298
6299 6299 @command('version', [], norepo=True)
6300 6300 def version_(ui):
6301 6301 """output version and copyright information"""
6302 6302 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6303 6303 % util.version())
6304 6304 ui.status(_(
6305 6305 "(see http://mercurial.selenic.com for more information)\n"
6306 6306 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
6307 6307 "This is free software; see the source for copying conditions. "
6308 6308 "There is NO\nwarranty; "
6309 6309 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6310 6310 ))
6311 6311
6312 6312 ui.note(_("\nEnabled extensions:\n\n"))
6313 6313 if ui.verbose:
6314 6314 # format names and versions into columns
6315 6315 names = []
6316 6316 vers = []
6317 6317 for name, module in extensions.extensions():
6318 6318 names.append(name)
6319 6319 vers.append(extensions.moduleversion(module))
6320 6320 if names:
6321 6321 maxnamelen = max(len(n) for n in names)
6322 6322 for i, name in enumerate(names):
6323 6323 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1356 +1,1356 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os, mimetypes, re, cgi, copy
9 9 import webutil
10 10 from mercurial import error, encoding, archival, templater, templatefilters
11 11 from mercurial.node import short, hex
12 12 from mercurial import util
13 13 from common import paritygen, staticfile, get_contact, ErrorResponse
14 14 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
15 15 from mercurial import graphmod, patch
16 16 from mercurial import scmutil
17 17 from mercurial.i18n import _
18 18 from mercurial.error import ParseError, RepoLookupError, Abort
19 19 from mercurial import revset
20 20
21 21 __all__ = []
22 22 commands = {}
23 23
24 24 class webcommand(object):
25 25 """Decorator used to register a web command handler.
26 26
27 27 The decorator takes as its positional arguments the name/path the
28 28 command should be accessible under.
29 29
30 30 Usage:
31 31
32 32 @webcommand('mycommand')
33 33 def mycommand(web, req, tmpl):
34 34 pass
35 35 """
36 36
37 37 def __init__(self, name):
38 38 self.name = name
39 39
40 40 def __call__(self, func):
41 41 __all__.append(self.name)
42 42 commands[self.name] = func
43 43 return func
44 44
45 45 @webcommand('log')
46 46 def log(web, req, tmpl):
47 47 """
48 48 /log[/{revision}[/{path}]]
49 49 --------------------------
50 50
51 51 Show repository or file history.
52 52
53 53 For URLs of the form ``/log/{revision}``, a list of changesets starting at
54 54 the specified changeset identifier is shown. If ``{revision}`` is not
55 55 defined, the default is ``tip``. This form is equivalent to the
56 56 ``changelog`` handler.
57 57
58 58 For URLs of the form ``/log/{revision}/{file}``, the history for a specific
59 59 file will be shown. This form is equivalent to the ``filelog`` handler.
60 60 """
61 61
62 62 if 'file' in req.form and req.form['file'][0]:
63 63 return filelog(web, req, tmpl)
64 64 else:
65 65 return changelog(web, req, tmpl)
66 66
67 67 @webcommand('rawfile')
68 68 def rawfile(web, req, tmpl):
69 69 guessmime = web.configbool('web', 'guessmime', False)
70 70
71 71 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
72 72 if not path:
73 73 content = manifest(web, req, tmpl)
74 74 req.respond(HTTP_OK, web.ctype)
75 75 return content
76 76
77 77 try:
78 78 fctx = webutil.filectx(web.repo, req)
79 79 except error.LookupError, inst:
80 80 try:
81 81 content = manifest(web, req, tmpl)
82 82 req.respond(HTTP_OK, web.ctype)
83 83 return content
84 84 except ErrorResponse:
85 85 raise inst
86 86
87 87 path = fctx.path()
88 88 text = fctx.data()
89 89 mt = 'application/binary'
90 90 if guessmime:
91 91 mt = mimetypes.guess_type(path)[0]
92 92 if mt is None:
93 93 mt = util.binary(text) and 'application/binary' or 'text/plain'
94 94 if mt.startswith('text/'):
95 95 mt += '; charset="%s"' % encoding.encoding
96 96
97 97 req.respond(HTTP_OK, mt, path, body=text)
98 98 return []
99 99
100 100 def _filerevision(web, tmpl, fctx):
101 101 f = fctx.path()
102 102 text = fctx.data()
103 103 parity = paritygen(web.stripecount)
104 104
105 105 if util.binary(text):
106 106 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
107 107 text = '(binary:%s)' % mt
108 108
109 109 def lines():
110 110 for lineno, t in enumerate(text.splitlines(True)):
111 111 yield {"line": t,
112 112 "lineid": "l%d" % (lineno + 1),
113 113 "linenumber": "% 6d" % (lineno + 1),
114 114 "parity": parity.next()}
115 115
116 116 return tmpl("filerevision",
117 117 file=f,
118 118 path=webutil.up(f),
119 119 text=lines(),
120 120 rev=fctx.rev(),
121 121 node=fctx.hex(),
122 122 author=fctx.user(),
123 123 date=fctx.date(),
124 124 desc=fctx.description(),
125 125 extra=fctx.extra(),
126 126 branch=webutil.nodebranchnodefault(fctx),
127 127 parent=webutil.parents(fctx),
128 128 child=webutil.children(fctx),
129 129 rename=webutil.renamelink(fctx),
130 130 permissions=fctx.manifest().flags(f))
131 131
132 132 @webcommand('file')
133 133 def file(web, req, tmpl):
134 134 """
135 135 /file/{revision}[/{path}]
136 136 -------------------------
137 137
138 138 Show information about a directory or file in the repository.
139 139
140 140 Info about the ``path`` given as a URL parameter will be rendered.
141 141
142 142 If ``path`` is a directory, information about the entries in that
143 143 directory will be rendered. This form is equivalent to the ``manifest``
144 144 handler.
145 145
146 146 If ``path`` is a file, information about that file will be shown via
147 147 the ``filerevision`` template.
148 148
149 149 If ``path`` is not defined, information about the root directory will
150 150 be rendered.
151 151 """
152 152 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
153 153 if not path:
154 154 return manifest(web, req, tmpl)
155 155 try:
156 156 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
157 157 except error.LookupError, inst:
158 158 try:
159 159 return manifest(web, req, tmpl)
160 160 except ErrorResponse:
161 161 raise inst
162 162
163 163 def _search(web, req, tmpl):
164 164 MODE_REVISION = 'rev'
165 165 MODE_KEYWORD = 'keyword'
166 166 MODE_REVSET = 'revset'
167 167
168 168 def revsearch(ctx):
169 169 yield ctx
170 170
171 171 def keywordsearch(query):
172 172 lower = encoding.lower
173 173 qw = lower(query).split()
174 174
175 175 def revgen():
176 176 cl = web.repo.changelog
177 177 for i in xrange(len(web.repo) - 1, 0, -100):
178 178 l = []
179 179 for j in cl.revs(max(0, i - 99), i):
180 180 ctx = web.repo[j]
181 181 l.append(ctx)
182 182 l.reverse()
183 183 for e in l:
184 184 yield e
185 185
186 186 for ctx in revgen():
187 187 miss = 0
188 188 for q in qw:
189 189 if not (q in lower(ctx.user()) or
190 190 q in lower(ctx.description()) or
191 191 q in lower(" ".join(ctx.files()))):
192 192 miss = 1
193 193 break
194 194 if miss:
195 195 continue
196 196
197 197 yield ctx
198 198
199 199 def revsetsearch(revs):
200 200 for r in revs:
201 201 yield web.repo[r]
202 202
203 203 searchfuncs = {
204 204 MODE_REVISION: (revsearch, 'exact revision search'),
205 205 MODE_KEYWORD: (keywordsearch, 'literal keyword search'),
206 206 MODE_REVSET: (revsetsearch, 'revset expression search'),
207 207 }
208 208
209 209 def getsearchmode(query):
210 210 try:
211 211 ctx = web.repo[query]
212 212 except (error.RepoError, error.LookupError):
213 213 # query is not an exact revision pointer, need to
214 214 # decide if it's a revset expression or keywords
215 215 pass
216 216 else:
217 217 return MODE_REVISION, ctx
218 218
219 219 revdef = 'reverse(%s)' % query
220 220 try:
221 221 tree, pos = revset.parse(revdef)
222 222 except ParseError:
223 223 # can't parse to a revset tree
224 224 return MODE_KEYWORD, query
225 225
226 226 if revset.depth(tree) <= 2:
227 227 # no revset syntax used
228 228 return MODE_KEYWORD, query
229 229
230 230 if util.any((token, (value or '')[:3]) == ('string', 're:')
231 231 for token, value, pos in revset.tokenize(revdef)):
232 232 return MODE_KEYWORD, query
233 233
234 234 funcsused = revset.funcsused(tree)
235 235 if not funcsused.issubset(revset.safesymbols):
236 236 return MODE_KEYWORD, query
237 237
238 238 mfunc = revset.match(web.repo.ui, revdef)
239 239 try:
240 revs = mfunc(web.repo, revset.spanset(web.repo))
240 revs = mfunc(web.repo)
241 241 return MODE_REVSET, revs
242 242 # ParseError: wrongly placed tokens, wrongs arguments, etc
243 243 # RepoLookupError: no such revision, e.g. in 'revision:'
244 244 # Abort: bookmark/tag not exists
245 245 # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
246 246 except (ParseError, RepoLookupError, Abort, LookupError):
247 247 return MODE_KEYWORD, query
248 248
249 249 def changelist(**map):
250 250 count = 0
251 251
252 252 for ctx in searchfunc[0](funcarg):
253 253 count += 1
254 254 n = ctx.node()
255 255 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
256 256 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
257 257
258 258 yield tmpl('searchentry',
259 259 parity=parity.next(),
260 260 author=ctx.user(),
261 261 parent=webutil.parents(ctx),
262 262 child=webutil.children(ctx),
263 263 changelogtag=showtags,
264 264 desc=ctx.description(),
265 265 extra=ctx.extra(),
266 266 date=ctx.date(),
267 267 files=files,
268 268 rev=ctx.rev(),
269 269 node=hex(n),
270 270 tags=webutil.nodetagsdict(web.repo, n),
271 271 bookmarks=webutil.nodebookmarksdict(web.repo, n),
272 272 inbranch=webutil.nodeinbranch(web.repo, ctx),
273 273 branches=webutil.nodebranchdict(web.repo, ctx))
274 274
275 275 if count >= revcount:
276 276 break
277 277
278 278 query = req.form['rev'][0]
279 279 revcount = web.maxchanges
280 280 if 'revcount' in req.form:
281 281 try:
282 282 revcount = int(req.form.get('revcount', [revcount])[0])
283 283 revcount = max(revcount, 1)
284 284 tmpl.defaults['sessionvars']['revcount'] = revcount
285 285 except ValueError:
286 286 pass
287 287
288 288 lessvars = copy.copy(tmpl.defaults['sessionvars'])
289 289 lessvars['revcount'] = max(revcount / 2, 1)
290 290 lessvars['rev'] = query
291 291 morevars = copy.copy(tmpl.defaults['sessionvars'])
292 292 morevars['revcount'] = revcount * 2
293 293 morevars['rev'] = query
294 294
295 295 mode, funcarg = getsearchmode(query)
296 296
297 297 if 'forcekw' in req.form:
298 298 showforcekw = ''
299 299 showunforcekw = searchfuncs[mode][1]
300 300 mode = MODE_KEYWORD
301 301 funcarg = query
302 302 else:
303 303 if mode != MODE_KEYWORD:
304 304 showforcekw = searchfuncs[MODE_KEYWORD][1]
305 305 else:
306 306 showforcekw = ''
307 307 showunforcekw = ''
308 308
309 309 searchfunc = searchfuncs[mode]
310 310
311 311 tip = web.repo['tip']
312 312 parity = paritygen(web.stripecount)
313 313
314 314 return tmpl('search', query=query, node=tip.hex(),
315 315 entries=changelist, archives=web.archivelist("tip"),
316 316 morevars=morevars, lessvars=lessvars,
317 317 modedesc=searchfunc[1],
318 318 showforcekw=showforcekw, showunforcekw=showunforcekw)
319 319
320 320 @webcommand('changelog')
321 321 def changelog(web, req, tmpl, shortlog=False):
322 322 """
323 323 /changelog[/{revision}]
324 324 -----------------------
325 325
326 326 Show information about multiple changesets.
327 327
328 328 If the optional ``revision`` URL argument is absent, information about
329 329 all changesets starting at ``tip`` will be rendered. If the ``revision``
330 330 argument is present, changesets will be shown starting from the specified
331 331 revision.
332 332
333 333 If ``revision`` is absent, the ``rev`` query string argument may be
334 334 defined. This will perform a search for changesets.
335 335
336 336 The argument for ``rev`` can be a single revision, a revision set,
337 337 or a literal keyword to search for in changeset data (equivalent to
338 338 :hg:`log -k`.
339 339
340 340 The ``revcount`` query string argument defines the maximum numbers of
341 341 changesets to render.
342 342
343 343 For non-searches, the ``changelog`` template will be rendered.
344 344 """
345 345
346 346 query = ''
347 347 if 'node' in req.form:
348 348 ctx = webutil.changectx(web.repo, req)
349 349 elif 'rev' in req.form:
350 350 return _search(web, req, tmpl)
351 351 else:
352 352 ctx = web.repo['tip']
353 353
354 354 def changelist():
355 355 revs = []
356 356 if pos != -1:
357 357 revs = web.repo.changelog.revs(pos, 0)
358 358 curcount = 0
359 359 for rev in revs:
360 360 curcount += 1
361 361 if curcount > revcount + 1:
362 362 break
363 363
364 364 entry = webutil.changelistentry(web, web.repo[rev], tmpl)
365 365 entry['parity'] = parity.next()
366 366 yield entry
367 367
368 368 revcount = shortlog and web.maxshortchanges or web.maxchanges
369 369 if 'revcount' in req.form:
370 370 try:
371 371 revcount = int(req.form.get('revcount', [revcount])[0])
372 372 revcount = max(revcount, 1)
373 373 tmpl.defaults['sessionvars']['revcount'] = revcount
374 374 except ValueError:
375 375 pass
376 376
377 377 lessvars = copy.copy(tmpl.defaults['sessionvars'])
378 378 lessvars['revcount'] = max(revcount / 2, 1)
379 379 morevars = copy.copy(tmpl.defaults['sessionvars'])
380 380 morevars['revcount'] = revcount * 2
381 381
382 382 count = len(web.repo)
383 383 pos = ctx.rev()
384 384 parity = paritygen(web.stripecount)
385 385
386 386 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
387 387
388 388 entries = list(changelist())
389 389 latestentry = entries[:1]
390 390 if len(entries) > revcount:
391 391 nextentry = entries[-1:]
392 392 entries = entries[:-1]
393 393 else:
394 394 nextentry = []
395 395
396 396 return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
397 397 node=ctx.hex(), rev=pos, changesets=count,
398 398 entries=entries,
399 399 latestentry=latestentry, nextentry=nextentry,
400 400 archives=web.archivelist("tip"), revcount=revcount,
401 401 morevars=morevars, lessvars=lessvars, query=query)
402 402
403 403 @webcommand('shortlog')
404 404 def shortlog(web, req, tmpl):
405 405 """
406 406 /shortlog
407 407 ---------
408 408
409 409 Show basic information about a set of changesets.
410 410
411 411 This accepts the same parameters as the ``changelog`` handler. The only
412 412 difference is the ``shortlog`` template will be rendered instead of the
413 413 ``changelog`` template.
414 414 """
415 415 return changelog(web, req, tmpl, shortlog=True)
416 416
417 417 @webcommand('changeset')
418 418 def changeset(web, req, tmpl):
419 419 """
420 420 /changeset[/{revision}]
421 421 -----------------------
422 422
423 423 Show information about a single changeset.
424 424
425 425 A URL path argument is the changeset identifier to show. See ``hg help
426 426 revisions`` for possible values. If not defined, the ``tip`` changeset
427 427 will be shown.
428 428
429 429 The ``changeset`` template is rendered. Contents of the ``changesettag``,
430 430 ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many
431 431 templates related to diffs may all be used to produce the output.
432 432 """
433 433 ctx = webutil.changectx(web.repo, req)
434 434 basectx = webutil.basechangectx(web.repo, req)
435 435 if basectx is None:
436 436 basectx = ctx.p1()
437 437 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
438 438 showbookmarks = webutil.showbookmark(web.repo, tmpl, 'changesetbookmark',
439 439 ctx.node())
440 440 showbranch = webutil.nodebranchnodefault(ctx)
441 441
442 442 files = []
443 443 parity = paritygen(web.stripecount)
444 444 for blockno, f in enumerate(ctx.files()):
445 445 template = f in ctx and 'filenodelink' or 'filenolink'
446 446 files.append(tmpl(template,
447 447 node=ctx.hex(), file=f, blockno=blockno + 1,
448 448 parity=parity.next()))
449 449
450 450 style = web.config('web', 'style', 'paper')
451 451 if 'style' in req.form:
452 452 style = req.form['style'][0]
453 453
454 454 parity = paritygen(web.stripecount)
455 455 diffs = webutil.diffs(web.repo, tmpl, ctx, basectx, None, parity, style)
456 456
457 457 parity = paritygen(web.stripecount)
458 458 diffstatgen = webutil.diffstatgen(ctx, basectx)
459 459 diffstat = webutil.diffstat(tmpl, ctx, diffstatgen, parity)
460 460
461 461 return tmpl('changeset',
462 462 diff=diffs,
463 463 rev=ctx.rev(),
464 464 node=ctx.hex(),
465 465 parent=tuple(webutil.parents(ctx)),
466 466 child=webutil.children(ctx),
467 467 basenode=basectx.hex(),
468 468 changesettag=showtags,
469 469 changesetbookmark=showbookmarks,
470 470 changesetbranch=showbranch,
471 471 author=ctx.user(),
472 472 desc=ctx.description(),
473 473 extra=ctx.extra(),
474 474 date=ctx.date(),
475 475 files=files,
476 476 diffsummary=lambda **x: webutil.diffsummary(diffstatgen),
477 477 diffstat=diffstat,
478 478 archives=web.archivelist(ctx.hex()),
479 479 tags=webutil.nodetagsdict(web.repo, ctx.node()),
480 480 bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()),
481 481 branch=webutil.nodebranchnodefault(ctx),
482 482 inbranch=webutil.nodeinbranch(web.repo, ctx),
483 483 branches=webutil.nodebranchdict(web.repo, ctx))
484 484
485 485 rev = webcommand('rev')(changeset)
486 486
487 487 def decodepath(path):
488 488 """Hook for mapping a path in the repository to a path in the
489 489 working copy.
490 490
491 491 Extensions (e.g., largefiles) can override this to remap files in
492 492 the virtual file system presented by the manifest command below."""
493 493 return path
494 494
495 495 @webcommand('manifest')
496 496 def manifest(web, req, tmpl):
497 497 """
498 498 /manifest[/{revision}[/{path}]]
499 499 -------------------------------
500 500
501 501 Show information about a directory.
502 502
503 503 If the URL path arguments are defined, information about the root
504 504 directory for the ``tip`` changeset will be shown.
505 505
506 506 Because this handler can only show information for directories, it
507 507 is recommended to use the ``file`` handler instead, as it can handle both
508 508 directories and files.
509 509
510 510 The ``manifest`` template will be rendered for this handler.
511 511 """
512 512 ctx = webutil.changectx(web.repo, req)
513 513 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
514 514 mf = ctx.manifest()
515 515 node = ctx.node()
516 516
517 517 files = {}
518 518 dirs = {}
519 519 parity = paritygen(web.stripecount)
520 520
521 521 if path and path[-1] != "/":
522 522 path += "/"
523 523 l = len(path)
524 524 abspath = "/" + path
525 525
526 526 for full, n in mf.iteritems():
527 527 # the virtual path (working copy path) used for the full
528 528 # (repository) path
529 529 f = decodepath(full)
530 530
531 531 if f[:l] != path:
532 532 continue
533 533 remain = f[l:]
534 534 elements = remain.split('/')
535 535 if len(elements) == 1:
536 536 files[remain] = full
537 537 else:
538 538 h = dirs # need to retain ref to dirs (root)
539 539 for elem in elements[0:-1]:
540 540 if elem not in h:
541 541 h[elem] = {}
542 542 h = h[elem]
543 543 if len(h) > 1:
544 544 break
545 545 h[None] = None # denotes files present
546 546
547 547 if mf and not files and not dirs:
548 548 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
549 549
550 550 def filelist(**map):
551 551 for f in sorted(files):
552 552 full = files[f]
553 553
554 554 fctx = ctx.filectx(full)
555 555 yield {"file": full,
556 556 "parity": parity.next(),
557 557 "basename": f,
558 558 "date": fctx.date(),
559 559 "size": fctx.size(),
560 560 "permissions": mf.flags(full)}
561 561
562 562 def dirlist(**map):
563 563 for d in sorted(dirs):
564 564
565 565 emptydirs = []
566 566 h = dirs[d]
567 567 while isinstance(h, dict) and len(h) == 1:
568 568 k, v = h.items()[0]
569 569 if v:
570 570 emptydirs.append(k)
571 571 h = v
572 572
573 573 path = "%s%s" % (abspath, d)
574 574 yield {"parity": parity.next(),
575 575 "path": path,
576 576 "emptydirs": "/".join(emptydirs),
577 577 "basename": d}
578 578
579 579 return tmpl("manifest",
580 580 rev=ctx.rev(),
581 581 node=hex(node),
582 582 path=abspath,
583 583 up=webutil.up(abspath),
584 584 upparity=parity.next(),
585 585 fentries=filelist,
586 586 dentries=dirlist,
587 587 archives=web.archivelist(hex(node)),
588 588 tags=webutil.nodetagsdict(web.repo, node),
589 589 bookmarks=webutil.nodebookmarksdict(web.repo, node),
590 590 inbranch=webutil.nodeinbranch(web.repo, ctx),
591 591 branches=webutil.nodebranchdict(web.repo, ctx))
592 592
593 593 @webcommand('tags')
594 594 def tags(web, req, tmpl):
595 595 """
596 596 /tags
597 597 -----
598 598
599 599 Show information about tags.
600 600
601 601 No arguments are accepted.
602 602
603 603 The ``tags`` template is rendered.
604 604 """
605 605 i = list(reversed(web.repo.tagslist()))
606 606 parity = paritygen(web.stripecount)
607 607
608 608 def entries(notip, latestonly, **map):
609 609 t = i
610 610 if notip:
611 611 t = [(k, n) for k, n in i if k != "tip"]
612 612 if latestonly:
613 613 t = t[:1]
614 614 for k, n in t:
615 615 yield {"parity": parity.next(),
616 616 "tag": k,
617 617 "date": web.repo[n].date(),
618 618 "node": hex(n)}
619 619
620 620 return tmpl("tags",
621 621 node=hex(web.repo.changelog.tip()),
622 622 entries=lambda **x: entries(False, False, **x),
623 623 entriesnotip=lambda **x: entries(True, False, **x),
624 624 latestentry=lambda **x: entries(True, True, **x))
625 625
626 626 @webcommand('bookmarks')
627 627 def bookmarks(web, req, tmpl):
628 628 """
629 629 /bookmarks
630 630 ----------
631 631
632 632 Show information about bookmarks.
633 633
634 634 No arguments are accepted.
635 635
636 636 The ``bookmarks`` template is rendered.
637 637 """
638 638 i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
639 639 parity = paritygen(web.stripecount)
640 640
641 641 def entries(latestonly, **map):
642 642 if latestonly:
643 643 t = [min(i)]
644 644 else:
645 645 t = sorted(i)
646 646 for k, n in t:
647 647 yield {"parity": parity.next(),
648 648 "bookmark": k,
649 649 "date": web.repo[n].date(),
650 650 "node": hex(n)}
651 651
652 652 return tmpl("bookmarks",
653 653 node=hex(web.repo.changelog.tip()),
654 654 entries=lambda **x: entries(latestonly=False, **x),
655 655 latestentry=lambda **x: entries(latestonly=True, **x))
656 656
657 657 @webcommand('branches')
658 658 def branches(web, req, tmpl):
659 659 """
660 660 /branches
661 661 ---------
662 662
663 663 Show information about branches.
664 664
665 665 All known branches are contained in the output, even closed branches.
666 666
667 667 No arguments are accepted.
668 668
669 669 The ``branches`` template is rendered.
670 670 """
671 671 tips = []
672 672 heads = web.repo.heads()
673 673 parity = paritygen(web.stripecount)
674 674 sortkey = lambda item: (not item[1], item[0].rev())
675 675
676 676 def entries(limit, **map):
677 677 count = 0
678 678 if not tips:
679 679 for tag, hs, tip, closed in web.repo.branchmap().iterbranches():
680 680 tips.append((web.repo[tip], closed))
681 681 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
682 682 if limit > 0 and count >= limit:
683 683 return
684 684 count += 1
685 685 if closed:
686 686 status = 'closed'
687 687 elif ctx.node() not in heads:
688 688 status = 'inactive'
689 689 else:
690 690 status = 'open'
691 691 yield {'parity': parity.next(),
692 692 'branch': ctx.branch(),
693 693 'status': status,
694 694 'node': ctx.hex(),
695 695 'date': ctx.date()}
696 696
697 697 return tmpl('branches', node=hex(web.repo.changelog.tip()),
698 698 entries=lambda **x: entries(0, **x),
699 699 latestentry=lambda **x: entries(1, **x))
700 700
701 701 @webcommand('summary')
702 702 def summary(web, req, tmpl):
703 703 """
704 704 /summary
705 705 --------
706 706
707 707 Show a summary of repository state.
708 708
709 709 Information about the latest changesets, bookmarks, tags, and branches
710 710 is captured by this handler.
711 711
712 712 The ``summary`` template is rendered.
713 713 """
714 714 i = reversed(web.repo.tagslist())
715 715
716 716 def tagentries(**map):
717 717 parity = paritygen(web.stripecount)
718 718 count = 0
719 719 for k, n in i:
720 720 if k == "tip": # skip tip
721 721 continue
722 722
723 723 count += 1
724 724 if count > 10: # limit to 10 tags
725 725 break
726 726
727 727 yield tmpl("tagentry",
728 728 parity=parity.next(),
729 729 tag=k,
730 730 node=hex(n),
731 731 date=web.repo[n].date())
732 732
733 733 def bookmarks(**map):
734 734 parity = paritygen(web.stripecount)
735 735 marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
736 736 for k, n in sorted(marks)[:10]: # limit to 10 bookmarks
737 737 yield {'parity': parity.next(),
738 738 'bookmark': k,
739 739 'date': web.repo[n].date(),
740 740 'node': hex(n)}
741 741
742 742 def branches(**map):
743 743 parity = paritygen(web.stripecount)
744 744
745 745 b = web.repo.branchmap()
746 746 l = [(-web.repo.changelog.rev(tip), tip, tag)
747 747 for tag, heads, tip, closed in b.iterbranches()]
748 748 for r, n, t in sorted(l):
749 749 yield {'parity': parity.next(),
750 750 'branch': t,
751 751 'node': hex(n),
752 752 'date': web.repo[n].date()}
753 753
754 754 def changelist(**map):
755 755 parity = paritygen(web.stripecount, offset=start - end)
756 756 l = [] # build a list in forward order for efficiency
757 757 revs = []
758 758 if start < end:
759 759 revs = web.repo.changelog.revs(start, end - 1)
760 760 for i in revs:
761 761 ctx = web.repo[i]
762 762 n = ctx.node()
763 763 hn = hex(n)
764 764
765 765 l.append(tmpl(
766 766 'shortlogentry',
767 767 parity=parity.next(),
768 768 author=ctx.user(),
769 769 desc=ctx.description(),
770 770 extra=ctx.extra(),
771 771 date=ctx.date(),
772 772 rev=i,
773 773 node=hn,
774 774 tags=webutil.nodetagsdict(web.repo, n),
775 775 bookmarks=webutil.nodebookmarksdict(web.repo, n),
776 776 inbranch=webutil.nodeinbranch(web.repo, ctx),
777 777 branches=webutil.nodebranchdict(web.repo, ctx)))
778 778
779 779 l.reverse()
780 780 yield l
781 781
782 782 tip = web.repo['tip']
783 783 count = len(web.repo)
784 784 start = max(0, count - web.maxchanges)
785 785 end = min(count, start + web.maxchanges)
786 786
787 787 return tmpl("summary",
788 788 desc=web.config("web", "description", "unknown"),
789 789 owner=get_contact(web.config) or "unknown",
790 790 lastchange=tip.date(),
791 791 tags=tagentries,
792 792 bookmarks=bookmarks,
793 793 branches=branches,
794 794 shortlog=changelist,
795 795 node=tip.hex(),
796 796 archives=web.archivelist("tip"))
797 797
798 798 @webcommand('filediff')
799 799 def filediff(web, req, tmpl):
800 800 """
801 801 /diff/{revision}/{path}
802 802 -----------------------
803 803
804 804 Show how a file changed in a particular commit.
805 805
806 806 The ``filediff`` template is rendered.
807 807
808 808 This hander is registered under both the ``/diff`` and ``/filediff``
809 809 paths. ``/diff`` is used in modern code.
810 810 """
811 811 fctx, ctx = None, None
812 812 try:
813 813 fctx = webutil.filectx(web.repo, req)
814 814 except LookupError:
815 815 ctx = webutil.changectx(web.repo, req)
816 816 path = webutil.cleanpath(web.repo, req.form['file'][0])
817 817 if path not in ctx.files():
818 818 raise
819 819
820 820 if fctx is not None:
821 821 n = fctx.node()
822 822 path = fctx.path()
823 823 ctx = fctx.changectx()
824 824 else:
825 825 n = ctx.node()
826 826 # path already defined in except clause
827 827
828 828 parity = paritygen(web.stripecount)
829 829 style = web.config('web', 'style', 'paper')
830 830 if 'style' in req.form:
831 831 style = req.form['style'][0]
832 832
833 833 diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style)
834 834 rename = fctx and webutil.renamelink(fctx) or []
835 835 ctx = fctx and fctx or ctx
836 836 return tmpl("filediff",
837 837 file=path,
838 838 node=hex(n),
839 839 rev=ctx.rev(),
840 840 date=ctx.date(),
841 841 desc=ctx.description(),
842 842 extra=ctx.extra(),
843 843 author=ctx.user(),
844 844 rename=rename,
845 845 branch=webutil.nodebranchnodefault(ctx),
846 846 parent=webutil.parents(ctx),
847 847 child=webutil.children(ctx),
848 848 diff=diffs)
849 849
850 850 diff = webcommand('diff')(filediff)
851 851
852 852 @webcommand('comparison')
853 853 def comparison(web, req, tmpl):
854 854 """
855 855 /comparison/{revision}/{path}
856 856 -----------------------------
857 857
858 858 Show a comparison between the old and new versions of a file from changes
859 859 made on a particular revision.
860 860
861 861 This is similar to the ``diff`` handler. However, this form features
862 862 a split or side-by-side diff rather than a unified diff.
863 863
864 864 The ``context`` query string argument can be used to control the lines of
865 865 context in the diff.
866 866
867 867 The ``filecomparison`` template is rendered.
868 868 """
869 869 ctx = webutil.changectx(web.repo, req)
870 870 if 'file' not in req.form:
871 871 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
872 872 path = webutil.cleanpath(web.repo, req.form['file'][0])
873 873 rename = path in ctx and webutil.renamelink(ctx[path]) or []
874 874
875 875 parsecontext = lambda v: v == 'full' and -1 or int(v)
876 876 if 'context' in req.form:
877 877 context = parsecontext(req.form['context'][0])
878 878 else:
879 879 context = parsecontext(web.config('web', 'comparisoncontext', '5'))
880 880
881 881 def filelines(f):
882 882 if util.binary(f.data()):
883 883 mt = mimetypes.guess_type(f.path())[0]
884 884 if not mt:
885 885 mt = 'application/octet-stream'
886 886 return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
887 887 return f.data().splitlines()
888 888
889 889 parent = ctx.p1()
890 890 leftrev = parent.rev()
891 891 leftnode = parent.node()
892 892 rightrev = ctx.rev()
893 893 rightnode = ctx.node()
894 894 if path in ctx:
895 895 fctx = ctx[path]
896 896 rightlines = filelines(fctx)
897 897 if path not in parent:
898 898 leftlines = ()
899 899 else:
900 900 pfctx = parent[path]
901 901 leftlines = filelines(pfctx)
902 902 else:
903 903 rightlines = ()
904 904 fctx = ctx.parents()[0][path]
905 905 leftlines = filelines(fctx)
906 906
907 907 comparison = webutil.compare(tmpl, context, leftlines, rightlines)
908 908 return tmpl('filecomparison',
909 909 file=path,
910 910 node=hex(ctx.node()),
911 911 rev=ctx.rev(),
912 912 date=ctx.date(),
913 913 desc=ctx.description(),
914 914 extra=ctx.extra(),
915 915 author=ctx.user(),
916 916 rename=rename,
917 917 branch=webutil.nodebranchnodefault(ctx),
918 918 parent=webutil.parents(fctx),
919 919 child=webutil.children(fctx),
920 920 leftrev=leftrev,
921 921 leftnode=hex(leftnode),
922 922 rightrev=rightrev,
923 923 rightnode=hex(rightnode),
924 924 comparison=comparison)
925 925
926 926 @webcommand('annotate')
927 927 def annotate(web, req, tmpl):
928 928 """
929 929 /annotate/{revision}/{path}
930 930 ---------------------------
931 931
932 932 Show changeset information for each line in a file.
933 933
934 934 The ``fileannotate`` template is rendered.
935 935 """
936 936 fctx = webutil.filectx(web.repo, req)
937 937 f = fctx.path()
938 938 parity = paritygen(web.stripecount)
939 939 diffopts = patch.difffeatureopts(web.repo.ui, untrusted=True,
940 940 section='annotate', whitespace=True)
941 941
942 942 def annotate(**map):
943 943 last = None
944 944 if util.binary(fctx.data()):
945 945 mt = (mimetypes.guess_type(fctx.path())[0]
946 946 or 'application/octet-stream')
947 947 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
948 948 '(binary:%s)' % mt)])
949 949 else:
950 950 lines = enumerate(fctx.annotate(follow=True, linenumber=True,
951 951 diffopts=diffopts))
952 952 for lineno, ((f, targetline), l) in lines:
953 953 fnode = f.filenode()
954 954
955 955 if last != fnode:
956 956 last = fnode
957 957
958 958 yield {"parity": parity.next(),
959 959 "node": f.hex(),
960 960 "rev": f.rev(),
961 961 "author": f.user(),
962 962 "desc": f.description(),
963 963 "extra": f.extra(),
964 964 "file": f.path(),
965 965 "targetline": targetline,
966 966 "line": l,
967 967 "lineid": "l%d" % (lineno + 1),
968 968 "linenumber": "% 6d" % (lineno + 1),
969 969 "revdate": f.date()}
970 970
971 971 return tmpl("fileannotate",
972 972 file=f,
973 973 annotate=annotate,
974 974 path=webutil.up(f),
975 975 rev=fctx.rev(),
976 976 node=fctx.hex(),
977 977 author=fctx.user(),
978 978 date=fctx.date(),
979 979 desc=fctx.description(),
980 980 extra=fctx.extra(),
981 981 rename=webutil.renamelink(fctx),
982 982 branch=webutil.nodebranchnodefault(fctx),
983 983 parent=webutil.parents(fctx),
984 984 child=webutil.children(fctx),
985 985 permissions=fctx.manifest().flags(f))
986 986
987 987 @webcommand('filelog')
988 988 def filelog(web, req, tmpl):
989 989 """
990 990 /filelog/{revision}/{path}
991 991 --------------------------
992 992
993 993 Show information about the history of a file in the repository.
994 994
995 995 The ``revcount`` query string argument can be defined to control the
996 996 maximum number of entries to show.
997 997
998 998 The ``filelog`` template will be rendered.
999 999 """
1000 1000
1001 1001 try:
1002 1002 fctx = webutil.filectx(web.repo, req)
1003 1003 f = fctx.path()
1004 1004 fl = fctx.filelog()
1005 1005 except error.LookupError:
1006 1006 f = webutil.cleanpath(web.repo, req.form['file'][0])
1007 1007 fl = web.repo.file(f)
1008 1008 numrevs = len(fl)
1009 1009 if not numrevs: # file doesn't exist at all
1010 1010 raise
1011 1011 rev = webutil.changectx(web.repo, req).rev()
1012 1012 first = fl.linkrev(0)
1013 1013 if rev < first: # current rev is from before file existed
1014 1014 raise
1015 1015 frev = numrevs - 1
1016 1016 while fl.linkrev(frev) > rev:
1017 1017 frev -= 1
1018 1018 fctx = web.repo.filectx(f, fl.linkrev(frev))
1019 1019
1020 1020 revcount = web.maxshortchanges
1021 1021 if 'revcount' in req.form:
1022 1022 try:
1023 1023 revcount = int(req.form.get('revcount', [revcount])[0])
1024 1024 revcount = max(revcount, 1)
1025 1025 tmpl.defaults['sessionvars']['revcount'] = revcount
1026 1026 except ValueError:
1027 1027 pass
1028 1028
1029 1029 lessvars = copy.copy(tmpl.defaults['sessionvars'])
1030 1030 lessvars['revcount'] = max(revcount / 2, 1)
1031 1031 morevars = copy.copy(tmpl.defaults['sessionvars'])
1032 1032 morevars['revcount'] = revcount * 2
1033 1033
1034 1034 count = fctx.filerev() + 1
1035 1035 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page
1036 1036 end = min(count, start + revcount) # last rev on this page
1037 1037 parity = paritygen(web.stripecount, offset=start - end)
1038 1038
1039 1039 def entries():
1040 1040 l = []
1041 1041
1042 1042 repo = web.repo
1043 1043 revs = fctx.filelog().revs(start, end - 1)
1044 1044 for i in revs:
1045 1045 iterfctx = fctx.filectx(i)
1046 1046
1047 1047 l.append({"parity": parity.next(),
1048 1048 "filerev": i,
1049 1049 "file": f,
1050 1050 "node": iterfctx.hex(),
1051 1051 "author": iterfctx.user(),
1052 1052 "date": iterfctx.date(),
1053 1053 "rename": webutil.renamelink(iterfctx),
1054 1054 "parent": webutil.parents(iterfctx),
1055 1055 "child": webutil.children(iterfctx),
1056 1056 "desc": iterfctx.description(),
1057 1057 "extra": iterfctx.extra(),
1058 1058 "tags": webutil.nodetagsdict(repo, iterfctx.node()),
1059 1059 "bookmarks": webutil.nodebookmarksdict(
1060 1060 repo, iterfctx.node()),
1061 1061 "branch": webutil.nodebranchnodefault(iterfctx),
1062 1062 "inbranch": webutil.nodeinbranch(repo, iterfctx),
1063 1063 "branches": webutil.nodebranchdict(repo, iterfctx)})
1064 1064 for e in reversed(l):
1065 1065 yield e
1066 1066
1067 1067 entries = list(entries())
1068 1068 latestentry = entries[:1]
1069 1069
1070 1070 revnav = webutil.filerevnav(web.repo, fctx.path())
1071 1071 nav = revnav.gen(end - 1, revcount, count)
1072 1072 return tmpl("filelog", file=f, node=fctx.hex(), nav=nav,
1073 1073 entries=entries,
1074 1074 latestentry=latestentry,
1075 1075 revcount=revcount, morevars=morevars, lessvars=lessvars)
1076 1076
1077 1077 @webcommand('archive')
1078 1078 def archive(web, req, tmpl):
1079 1079 """
1080 1080 /archive/{revision}.{format}[/{path}]
1081 1081 -------------------------------------
1082 1082
1083 1083 Obtain an archive of repository content.
1084 1084
1085 1085 The content and type of the archive is defined by a URL path parameter.
1086 1086 ``format`` is the file extension of the archive type to be generated. e.g.
1087 1087 ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your
1088 1088 server configuration.
1089 1089
1090 1090 The optional ``path`` URL parameter controls content to include in the
1091 1091 archive. If omitted, every file in the specified revision is present in the
1092 1092 archive. If included, only the specified file or contents of the specified
1093 1093 directory will be included in the archive.
1094 1094
1095 1095 No template is used for this handler. Raw, binary content is generated.
1096 1096 """
1097 1097
1098 1098 type_ = req.form.get('type', [None])[0]
1099 1099 allowed = web.configlist("web", "allow_archive")
1100 1100 key = req.form['node'][0]
1101 1101
1102 1102 if type_ not in web.archives:
1103 1103 msg = 'Unsupported archive type: %s' % type_
1104 1104 raise ErrorResponse(HTTP_NOT_FOUND, msg)
1105 1105
1106 1106 if not ((type_ in allowed or
1107 1107 web.configbool("web", "allow" + type_, False))):
1108 1108 msg = 'Archive type not allowed: %s' % type_
1109 1109 raise ErrorResponse(HTTP_FORBIDDEN, msg)
1110 1110
1111 1111 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
1112 1112 cnode = web.repo.lookup(key)
1113 1113 arch_version = key
1114 1114 if cnode == key or key == 'tip':
1115 1115 arch_version = short(cnode)
1116 1116 name = "%s-%s" % (reponame, arch_version)
1117 1117
1118 1118 ctx = webutil.changectx(web.repo, req)
1119 1119 pats = []
1120 1120 matchfn = scmutil.match(ctx, [])
1121 1121 file = req.form.get('file', None)
1122 1122 if file:
1123 1123 pats = ['path:' + file[0]]
1124 1124 matchfn = scmutil.match(ctx, pats, default='path')
1125 1125 if pats:
1126 1126 files = [f for f in ctx.manifest().keys() if matchfn(f)]
1127 1127 if not files:
1128 1128 raise ErrorResponse(HTTP_NOT_FOUND,
1129 1129 'file(s) not found: %s' % file[0])
1130 1130
1131 1131 mimetype, artype, extension, encoding = web.archive_specs[type_]
1132 1132 headers = [
1133 1133 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
1134 1134 ]
1135 1135 if encoding:
1136 1136 headers.append(('Content-Encoding', encoding))
1137 1137 req.headers.extend(headers)
1138 1138 req.respond(HTTP_OK, mimetype)
1139 1139
1140 1140 archival.archive(web.repo, req, cnode, artype, prefix=name,
1141 1141 matchfn=matchfn,
1142 1142 subrepos=web.configbool("web", "archivesubrepos"))
1143 1143 return []
1144 1144
1145 1145
1146 1146 @webcommand('static')
1147 1147 def static(web, req, tmpl):
1148 1148 fname = req.form['file'][0]
1149 1149 # a repo owner may set web.static in .hg/hgrc to get any file
1150 1150 # readable by the user running the CGI script
1151 1151 static = web.config("web", "static", None, untrusted=False)
1152 1152 if not static:
1153 1153 tp = web.templatepath or templater.templatepaths()
1154 1154 if isinstance(tp, str):
1155 1155 tp = [tp]
1156 1156 static = [os.path.join(p, 'static') for p in tp]
1157 1157 staticfile(static, fname, req)
1158 1158 return []
1159 1159
1160 1160 @webcommand('graph')
1161 1161 def graph(web, req, tmpl):
1162 1162 """
1163 1163 /graph[/{revision}]
1164 1164 -------------------
1165 1165
1166 1166 Show information about the graphical topology of the repository.
1167 1167
1168 1168 Information rendered by this handler can be used to create visual
1169 1169 representations of repository topology.
1170 1170
1171 1171 The ``revision`` URL parameter controls the starting changeset.
1172 1172
1173 1173 The ``revcount`` query string argument can define the number of changesets
1174 1174 to show information for.
1175 1175
1176 1176 This handler will render the ``graph`` template.
1177 1177 """
1178 1178
1179 1179 ctx = webutil.changectx(web.repo, req)
1180 1180 rev = ctx.rev()
1181 1181
1182 1182 bg_height = 39
1183 1183 revcount = web.maxshortchanges
1184 1184 if 'revcount' in req.form:
1185 1185 try:
1186 1186 revcount = int(req.form.get('revcount', [revcount])[0])
1187 1187 revcount = max(revcount, 1)
1188 1188 tmpl.defaults['sessionvars']['revcount'] = revcount
1189 1189 except ValueError:
1190 1190 pass
1191 1191
1192 1192 lessvars = copy.copy(tmpl.defaults['sessionvars'])
1193 1193 lessvars['revcount'] = max(revcount / 2, 1)
1194 1194 morevars = copy.copy(tmpl.defaults['sessionvars'])
1195 1195 morevars['revcount'] = revcount * 2
1196 1196
1197 1197 count = len(web.repo)
1198 1198 pos = rev
1199 1199
1200 1200 uprev = min(max(0, count - 1), rev + revcount)
1201 1201 downrev = max(0, rev - revcount)
1202 1202 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
1203 1203
1204 1204 tree = []
1205 1205 if pos != -1:
1206 1206 allrevs = web.repo.changelog.revs(pos, 0)
1207 1207 revs = []
1208 1208 for i in allrevs:
1209 1209 revs.append(i)
1210 1210 if len(revs) >= revcount:
1211 1211 break
1212 1212
1213 1213 # We have to feed a baseset to dagwalker as it is expecting smartset
1214 1214 # object. This does not have a big impact on hgweb performance itself
1215 1215 # since hgweb graphing code is not itself lazy yet.
1216 1216 dag = graphmod.dagwalker(web.repo, revset.baseset(revs))
1217 1217 # As we said one line above... not lazy.
1218 1218 tree = list(graphmod.colored(dag, web.repo))
1219 1219
1220 1220 def getcolumns(tree):
1221 1221 cols = 0
1222 1222 for (id, type, ctx, vtx, edges) in tree:
1223 1223 if type != graphmod.CHANGESET:
1224 1224 continue
1225 1225 cols = max(cols, max([edge[0] for edge in edges] or [0]),
1226 1226 max([edge[1] for edge in edges] or [0]))
1227 1227 return cols
1228 1228
1229 1229 def graphdata(usetuples, **map):
1230 1230 data = []
1231 1231
1232 1232 row = 0
1233 1233 for (id, type, ctx, vtx, edges) in tree:
1234 1234 if type != graphmod.CHANGESET:
1235 1235 continue
1236 1236 node = str(ctx)
1237 1237 age = templatefilters.age(ctx.date())
1238 1238 desc = templatefilters.firstline(ctx.description())
1239 1239 desc = cgi.escape(templatefilters.nonempty(desc))
1240 1240 user = cgi.escape(templatefilters.person(ctx.user()))
1241 1241 branch = cgi.escape(ctx.branch())
1242 1242 try:
1243 1243 branchnode = web.repo.branchtip(branch)
1244 1244 except error.RepoLookupError:
1245 1245 branchnode = None
1246 1246 branch = branch, branchnode == ctx.node()
1247 1247
1248 1248 if usetuples:
1249 1249 data.append((node, vtx, edges, desc, user, age, branch,
1250 1250 [cgi.escape(x) for x in ctx.tags()],
1251 1251 [cgi.escape(x) for x in ctx.bookmarks()]))
1252 1252 else:
1253 1253 edgedata = [{'col': edge[0], 'nextcol': edge[1],
1254 1254 'color': (edge[2] - 1) % 6 + 1,
1255 1255 'width': edge[3], 'bcolor': edge[4]}
1256 1256 for edge in edges]
1257 1257
1258 1258 data.append(
1259 1259 {'node': node,
1260 1260 'col': vtx[0],
1261 1261 'color': (vtx[1] - 1) % 6 + 1,
1262 1262 'edges': edgedata,
1263 1263 'row': row,
1264 1264 'nextrow': row + 1,
1265 1265 'desc': desc,
1266 1266 'user': user,
1267 1267 'age': age,
1268 1268 'bookmarks': webutil.nodebookmarksdict(
1269 1269 web.repo, ctx.node()),
1270 1270 'branches': webutil.nodebranchdict(web.repo, ctx),
1271 1271 'inbranch': webutil.nodeinbranch(web.repo, ctx),
1272 1272 'tags': webutil.nodetagsdict(web.repo, ctx.node())})
1273 1273
1274 1274 row += 1
1275 1275
1276 1276 return data
1277 1277
1278 1278 cols = getcolumns(tree)
1279 1279 rows = len(tree)
1280 1280 canvasheight = (rows + 1) * bg_height - 27
1281 1281
1282 1282 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
1283 1283 lessvars=lessvars, morevars=morevars, downrev=downrev,
1284 1284 cols=cols, rows=rows,
1285 1285 canvaswidth=(cols + 1) * bg_height,
1286 1286 truecanvasheight=rows * bg_height,
1287 1287 canvasheight=canvasheight, bg_height=bg_height,
1288 1288 jsdata=lambda **x: graphdata(True, **x),
1289 1289 nodes=lambda **x: graphdata(False, **x),
1290 1290 node=ctx.hex(), changenav=changenav)
1291 1291
1292 1292 def _getdoc(e):
1293 1293 doc = e[0].__doc__
1294 1294 if doc:
1295 1295 doc = _(doc).split('\n')[0]
1296 1296 else:
1297 1297 doc = _('(no help text available)')
1298 1298 return doc
1299 1299
1300 1300 @webcommand('help')
1301 1301 def help(web, req, tmpl):
1302 1302 """
1303 1303 /help[/{topic}]
1304 1304 ---------------
1305 1305
1306 1306 Render help documentation.
1307 1307
1308 1308 This web command is roughly equivalent to :hg:`help`. If a ``topic``
1309 1309 is defined, that help topic will be rendered. If not, an index of
1310 1310 available help topics will be rendered.
1311 1311
1312 1312 The ``help`` template will be rendered when requesting help for a topic.
1313 1313 ``helptopics`` will be rendered for the index of help topics.
1314 1314 """
1315 1315 from mercurial import commands # avoid cycle
1316 1316 from mercurial import help as helpmod # avoid cycle
1317 1317
1318 1318 topicname = req.form.get('node', [None])[0]
1319 1319 if not topicname:
1320 1320 def topics(**map):
1321 1321 for entries, summary, _doc in helpmod.helptable:
1322 1322 yield {'topic': entries[0], 'summary': summary}
1323 1323
1324 1324 early, other = [], []
1325 1325 primary = lambda s: s.split('|')[0]
1326 1326 for c, e in commands.table.iteritems():
1327 1327 doc = _getdoc(e)
1328 1328 if 'DEPRECATED' in doc or c.startswith('debug'):
1329 1329 continue
1330 1330 cmd = primary(c)
1331 1331 if cmd.startswith('^'):
1332 1332 early.append((cmd[1:], doc))
1333 1333 else:
1334 1334 other.append((cmd, doc))
1335 1335
1336 1336 early.sort()
1337 1337 other.sort()
1338 1338
1339 1339 def earlycommands(**map):
1340 1340 for c, doc in early:
1341 1341 yield {'topic': c, 'summary': doc}
1342 1342
1343 1343 def othercommands(**map):
1344 1344 for c, doc in other:
1345 1345 yield {'topic': c, 'summary': doc}
1346 1346
1347 1347 return tmpl('helptopics', topics=topics, earlycommands=earlycommands,
1348 1348 othercommands=othercommands, title='Index')
1349 1349
1350 1350 u = webutil.wsgiui()
1351 1351 u.verbose = True
1352 1352 try:
1353 1353 doc = helpmod.help_(u, topicname)
1354 1354 except error.UnknownCommand:
1355 1355 raise ErrorResponse(HTTP_NOT_FOUND)
1356 1356 return tmpl('help', topic=topicname, doc=doc)
@@ -1,1855 +1,1855 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from node import hex, nullid, short
8 8 from i18n import _
9 9 import urllib
10 10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 12 import lock as lockmod
13 13 import transaction, store, encoding, exchange, bundle2
14 14 import scmutil, util, extensions, hook, error, revset
15 15 import match as matchmod
16 16 import merge as mergemod
17 17 import tags as tagsmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 import branchmap, pathutil
21 21 import namespaces
22 22 propertycache = util.propertycache
23 23 filecache = scmutil.filecache
24 24
25 25 class repofilecache(filecache):
26 26 """All filecache usage on repo are done for logic that should be unfiltered
27 27 """
28 28
29 29 def __get__(self, repo, type=None):
30 30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 31 def __set__(self, repo, value):
32 32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 33 def __delete__(self, repo):
34 34 return super(repofilecache, self).__delete__(repo.unfiltered())
35 35
36 36 class storecache(repofilecache):
37 37 """filecache for files in the store"""
38 38 def join(self, obj, fname):
39 39 return obj.sjoin(fname)
40 40
41 41 class unfilteredpropertycache(propertycache):
42 42 """propertycache that apply to unfiltered repo only"""
43 43
44 44 def __get__(self, repo, type=None):
45 45 unfi = repo.unfiltered()
46 46 if unfi is repo:
47 47 return super(unfilteredpropertycache, self).__get__(unfi)
48 48 return getattr(unfi, self.name)
49 49
50 50 class filteredpropertycache(propertycache):
51 51 """propertycache that must take filtering in account"""
52 52
53 53 def cachevalue(self, obj, value):
54 54 object.__setattr__(obj, self.name, value)
55 55
56 56
57 57 def hasunfilteredcache(repo, name):
58 58 """check if a repo has an unfilteredpropertycache value for <name>"""
59 59 return name in vars(repo.unfiltered())
60 60
61 61 def unfilteredmethod(orig):
62 62 """decorate method that always need to be run on unfiltered version"""
63 63 def wrapper(repo, *args, **kwargs):
64 64 return orig(repo.unfiltered(), *args, **kwargs)
65 65 return wrapper
66 66
67 67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 68 'unbundle'))
69 69 legacycaps = moderncaps.union(set(['changegroupsubset']))
70 70
71 71 class localpeer(peer.peerrepository):
72 72 '''peer for a local repo; reflects only the most recent API'''
73 73
74 74 def __init__(self, repo, caps=moderncaps):
75 75 peer.peerrepository.__init__(self)
76 76 self._repo = repo.filtered('served')
77 77 self.ui = repo.ui
78 78 self._caps = repo._restrictcapabilities(caps)
79 79 self.requirements = repo.requirements
80 80 self.supportedformats = repo.supportedformats
81 81
82 82 def close(self):
83 83 self._repo.close()
84 84
85 85 def _capabilities(self):
86 86 return self._caps
87 87
88 88 def local(self):
89 89 return self._repo
90 90
91 91 def canpush(self):
92 92 return True
93 93
94 94 def url(self):
95 95 return self._repo.url()
96 96
97 97 def lookup(self, key):
98 98 return self._repo.lookup(key)
99 99
100 100 def branchmap(self):
101 101 return self._repo.branchmap()
102 102
103 103 def heads(self):
104 104 return self._repo.heads()
105 105
106 106 def known(self, nodes):
107 107 return self._repo.known(nodes)
108 108
109 109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 110 format='HG10', **kwargs):
111 111 cg = exchange.getbundle(self._repo, source, heads=heads,
112 112 common=common, bundlecaps=bundlecaps, **kwargs)
113 113 if bundlecaps is not None and 'HG2Y' in bundlecaps:
114 114 # When requesting a bundle2, getbundle returns a stream to make the
115 115 # wire level function happier. We need to build a proper object
116 116 # from it in local peer.
117 117 cg = bundle2.unbundle20(self.ui, cg)
118 118 return cg
119 119
120 120 # TODO We might want to move the next two calls into legacypeer and add
121 121 # unbundle instead.
122 122
123 123 def unbundle(self, cg, heads, url):
124 124 """apply a bundle on a repo
125 125
126 126 This function handles the repo locking itself."""
127 127 try:
128 128 cg = exchange.readbundle(self.ui, cg, None)
129 129 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
130 130 if util.safehasattr(ret, 'getchunks'):
131 131 # This is a bundle20 object, turn it into an unbundler.
132 132 # This little dance should be dropped eventually when the API
133 133 # is finally improved.
134 134 stream = util.chunkbuffer(ret.getchunks())
135 135 ret = bundle2.unbundle20(self.ui, stream)
136 136 return ret
137 137 except error.PushRaced, exc:
138 138 raise error.ResponseError(_('push failed:'), str(exc))
139 139
140 140 def lock(self):
141 141 return self._repo.lock()
142 142
143 143 def addchangegroup(self, cg, source, url):
144 144 return changegroup.addchangegroup(self._repo, cg, source, url)
145 145
146 146 def pushkey(self, namespace, key, old, new):
147 147 return self._repo.pushkey(namespace, key, old, new)
148 148
149 149 def listkeys(self, namespace):
150 150 return self._repo.listkeys(namespace)
151 151
152 152 def debugwireargs(self, one, two, three=None, four=None, five=None):
153 153 '''used to test argument passing over the wire'''
154 154 return "%s %s %s %s %s" % (one, two, three, four, five)
155 155
156 156 class locallegacypeer(localpeer):
157 157 '''peer extension which implements legacy methods too; used for tests with
158 158 restricted capabilities'''
159 159
160 160 def __init__(self, repo):
161 161 localpeer.__init__(self, repo, caps=legacycaps)
162 162
163 163 def branches(self, nodes):
164 164 return self._repo.branches(nodes)
165 165
166 166 def between(self, pairs):
167 167 return self._repo.between(pairs)
168 168
169 169 def changegroup(self, basenodes, source):
170 170 return changegroup.changegroup(self._repo, basenodes, source)
171 171
172 172 def changegroupsubset(self, bases, heads, source):
173 173 return changegroup.changegroupsubset(self._repo, bases, heads, source)
174 174
175 175 class localrepository(object):
176 176
177 177 supportedformats = set(('revlogv1', 'generaldelta'))
178 178 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
179 179 'dotencode'))
180 180 openerreqs = set(('revlogv1', 'generaldelta'))
181 181 requirements = ['revlogv1']
182 182 filtername = None
183 183
184 184 # a list of (ui, featureset) functions.
185 185 # only functions defined in module of enabled extensions are invoked
186 186 featuresetupfuncs = set()
187 187
188 188 def _baserequirements(self, create):
189 189 return self.requirements[:]
190 190
191 191 def __init__(self, baseui, path=None, create=False):
192 192 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
193 193 self.wopener = self.wvfs
194 194 self.root = self.wvfs.base
195 195 self.path = self.wvfs.join(".hg")
196 196 self.origroot = path
197 197 self.auditor = pathutil.pathauditor(self.root, self._checknested)
198 198 self.vfs = scmutil.vfs(self.path)
199 199 self.opener = self.vfs
200 200 self.baseui = baseui
201 201 self.ui = baseui.copy()
202 202 self.ui.copy = baseui.copy # prevent copying repo configuration
203 203 # A list of callback to shape the phase if no data were found.
204 204 # Callback are in the form: func(repo, roots) --> processed root.
205 205 # This list it to be filled by extension during repo setup
206 206 self._phasedefaults = []
207 207 try:
208 208 self.ui.readconfig(self.join("hgrc"), self.root)
209 209 extensions.loadall(self.ui)
210 210 except IOError:
211 211 pass
212 212
213 213 if self.featuresetupfuncs:
214 214 self.supported = set(self._basesupported) # use private copy
215 215 extmods = set(m.__name__ for n, m
216 216 in extensions.extensions(self.ui))
217 217 for setupfunc in self.featuresetupfuncs:
218 218 if setupfunc.__module__ in extmods:
219 219 setupfunc(self.ui, self.supported)
220 220 else:
221 221 self.supported = self._basesupported
222 222
223 223 if not self.vfs.isdir():
224 224 if create:
225 225 if not self.wvfs.exists():
226 226 self.wvfs.makedirs()
227 227 self.vfs.makedir(notindexed=True)
228 228 requirements = self._baserequirements(create)
229 229 if self.ui.configbool('format', 'usestore', True):
230 230 self.vfs.mkdir("store")
231 231 requirements.append("store")
232 232 if self.ui.configbool('format', 'usefncache', True):
233 233 requirements.append("fncache")
234 234 if self.ui.configbool('format', 'dotencode', True):
235 235 requirements.append('dotencode')
236 236 # create an invalid changelog
237 237 self.vfs.append(
238 238 "00changelog.i",
239 239 '\0\0\0\2' # represents revlogv2
240 240 ' dummy changelog to prevent using the old repo layout'
241 241 )
242 242 if self.ui.configbool('format', 'generaldelta', False):
243 243 requirements.append("generaldelta")
244 244 requirements = set(requirements)
245 245 else:
246 246 raise error.RepoError(_("repository %s not found") % path)
247 247 elif create:
248 248 raise error.RepoError(_("repository %s already exists") % path)
249 249 else:
250 250 try:
251 251 requirements = scmutil.readrequires(self.vfs, self.supported)
252 252 except IOError, inst:
253 253 if inst.errno != errno.ENOENT:
254 254 raise
255 255 requirements = set()
256 256
257 257 self.sharedpath = self.path
258 258 try:
259 259 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
260 260 realpath=True)
261 261 s = vfs.base
262 262 if not vfs.exists():
263 263 raise error.RepoError(
264 264 _('.hg/sharedpath points to nonexistent directory %s') % s)
265 265 self.sharedpath = s
266 266 except IOError, inst:
267 267 if inst.errno != errno.ENOENT:
268 268 raise
269 269
270 270 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
271 271 self.spath = self.store.path
272 272 self.svfs = self.store.vfs
273 273 self.sopener = self.svfs
274 274 self.sjoin = self.store.join
275 275 self.vfs.createmode = self.store.createmode
276 276 self._applyrequirements(requirements)
277 277 if create:
278 278 self._writerequirements()
279 279
280 280
281 281 self._branchcaches = {}
282 282 self.filterpats = {}
283 283 self._datafilters = {}
284 284 self._transref = self._lockref = self._wlockref = None
285 285
286 286 # A cache for various files under .hg/ that tracks file changes,
287 287 # (used by the filecache decorator)
288 288 #
289 289 # Maps a property name to its util.filecacheentry
290 290 self._filecache = {}
291 291
292 292 # hold sets of revision to be filtered
293 293 # should be cleared when something might have changed the filter value:
294 294 # - new changesets,
295 295 # - phase change,
296 296 # - new obsolescence marker,
297 297 # - working directory parent change,
298 298 # - bookmark changes
299 299 self.filteredrevcache = {}
300 300
301 301 # generic mapping between names and nodes
302 302 self.names = namespaces.namespaces()
303 303
304 304 def close(self):
305 305 pass
306 306
307 307 def _restrictcapabilities(self, caps):
308 308 # bundle2 is not ready for prime time, drop it unless explicitly
309 309 # required by the tests (or some brave tester)
310 310 if self.ui.configbool('experimental', 'bundle2-exp', False):
311 311 caps = set(caps)
312 312 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
313 313 caps.add('bundle2-exp=' + urllib.quote(capsblob))
314 314 return caps
315 315
316 316 def _applyrequirements(self, requirements):
317 317 self.requirements = requirements
318 318 self.svfs.options = dict((r, 1) for r in requirements
319 319 if r in self.openerreqs)
320 320 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
321 321 if chunkcachesize is not None:
322 322 self.svfs.options['chunkcachesize'] = chunkcachesize
323 323 maxchainlen = self.ui.configint('format', 'maxchainlen')
324 324 if maxchainlen is not None:
325 325 self.svfs.options['maxchainlen'] = maxchainlen
326 326 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
327 327 if manifestcachesize is not None:
328 328 self.svfs.options['manifestcachesize'] = manifestcachesize
329 329
330 330 def _writerequirements(self):
331 331 reqfile = self.vfs("requires", "w")
332 332 for r in sorted(self.requirements):
333 333 reqfile.write("%s\n" % r)
334 334 reqfile.close()
335 335
336 336 def _checknested(self, path):
337 337 """Determine if path is a legal nested repository."""
338 338 if not path.startswith(self.root):
339 339 return False
340 340 subpath = path[len(self.root) + 1:]
341 341 normsubpath = util.pconvert(subpath)
342 342
343 343 # XXX: Checking against the current working copy is wrong in
344 344 # the sense that it can reject things like
345 345 #
346 346 # $ hg cat -r 10 sub/x.txt
347 347 #
348 348 # if sub/ is no longer a subrepository in the working copy
349 349 # parent revision.
350 350 #
351 351 # However, it can of course also allow things that would have
352 352 # been rejected before, such as the above cat command if sub/
353 353 # is a subrepository now, but was a normal directory before.
354 354 # The old path auditor would have rejected by mistake since it
355 355 # panics when it sees sub/.hg/.
356 356 #
357 357 # All in all, checking against the working copy seems sensible
358 358 # since we want to prevent access to nested repositories on
359 359 # the filesystem *now*.
360 360 ctx = self[None]
361 361 parts = util.splitpath(subpath)
362 362 while parts:
363 363 prefix = '/'.join(parts)
364 364 if prefix in ctx.substate:
365 365 if prefix == normsubpath:
366 366 return True
367 367 else:
368 368 sub = ctx.sub(prefix)
369 369 return sub.checknested(subpath[len(prefix) + 1:])
370 370 else:
371 371 parts.pop()
372 372 return False
373 373
374 374 def peer(self):
375 375 return localpeer(self) # not cached to avoid reference cycle
376 376
377 377 def unfiltered(self):
378 378 """Return unfiltered version of the repository
379 379
380 380 Intended to be overwritten by filtered repo."""
381 381 return self
382 382
383 383 def filtered(self, name):
384 384 """Return a filtered version of a repository"""
385 385 # build a new class with the mixin and the current class
386 386 # (possibly subclass of the repo)
387 387 class proxycls(repoview.repoview, self.unfiltered().__class__):
388 388 pass
389 389 return proxycls(self, name)
390 390
391 391 @repofilecache('bookmarks')
392 392 def _bookmarks(self):
393 393 return bookmarks.bmstore(self)
394 394
395 395 @repofilecache('bookmarks.current')
396 396 def _bookmarkcurrent(self):
397 397 return bookmarks.readcurrent(self)
398 398
399 399 def bookmarkheads(self, bookmark):
400 400 name = bookmark.split('@', 1)[0]
401 401 heads = []
402 402 for mark, n in self._bookmarks.iteritems():
403 403 if mark.split('@', 1)[0] == name:
404 404 heads.append(n)
405 405 return heads
406 406
407 407 @storecache('phaseroots')
408 408 def _phasecache(self):
409 409 return phases.phasecache(self, self._phasedefaults)
410 410
411 411 @storecache('obsstore')
412 412 def obsstore(self):
413 413 # read default format for new obsstore.
414 414 defaultformat = self.ui.configint('format', 'obsstore-version', None)
415 415 # rely on obsstore class default when possible.
416 416 kwargs = {}
417 417 if defaultformat is not None:
418 418 kwargs['defaultformat'] = defaultformat
419 419 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
420 420 store = obsolete.obsstore(self.svfs, readonly=readonly,
421 421 **kwargs)
422 422 if store and readonly:
423 423 # message is rare enough to not be translated
424 424 msg = 'obsolete feature not enabled but %i markers found!\n'
425 425 self.ui.warn(msg % len(list(store)))
426 426 return store
427 427
428 428 @storecache('00changelog.i')
429 429 def changelog(self):
430 430 c = changelog.changelog(self.svfs)
431 431 if 'HG_PENDING' in os.environ:
432 432 p = os.environ['HG_PENDING']
433 433 if p.startswith(self.root):
434 434 c.readpending('00changelog.i.a')
435 435 return c
436 436
437 437 @storecache('00manifest.i')
438 438 def manifest(self):
439 439 return manifest.manifest(self.svfs)
440 440
441 441 @repofilecache('dirstate')
442 442 def dirstate(self):
443 443 warned = [0]
444 444 def validate(node):
445 445 try:
446 446 self.changelog.rev(node)
447 447 return node
448 448 except error.LookupError:
449 449 if not warned[0]:
450 450 warned[0] = True
451 451 self.ui.warn(_("warning: ignoring unknown"
452 452 " working parent %s!\n") % short(node))
453 453 return nullid
454 454
455 455 return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
456 456
457 457 def __getitem__(self, changeid):
458 458 if changeid is None:
459 459 return context.workingctx(self)
460 460 if isinstance(changeid, slice):
461 461 return [context.changectx(self, i)
462 462 for i in xrange(*changeid.indices(len(self)))
463 463 if i not in self.changelog.filteredrevs]
464 464 return context.changectx(self, changeid)
465 465
466 466 def __contains__(self, changeid):
467 467 try:
468 468 return bool(self.lookup(changeid))
469 469 except error.RepoLookupError:
470 470 return False
471 471
472 472 def __nonzero__(self):
473 473 return True
474 474
475 475 def __len__(self):
476 476 return len(self.changelog)
477 477
478 478 def __iter__(self):
479 479 return iter(self.changelog)
480 480
481 481 def revs(self, expr, *args):
482 482 '''Return a list of revisions matching the given revset'''
483 483 expr = revset.formatspec(expr, *args)
484 484 m = revset.match(None, expr)
485 return m(self, revset.spanset(self))
485 return m(self)
486 486
487 487 def set(self, expr, *args):
488 488 '''
489 489 Yield a context for each matching revision, after doing arg
490 490 replacement via revset.formatspec
491 491 '''
492 492 for r in self.revs(expr, *args):
493 493 yield self[r]
494 494
495 495 def url(self):
496 496 return 'file:' + self.root
497 497
498 498 def hook(self, name, throw=False, **args):
499 499 """Call a hook, passing this repo instance.
500 500
501 501 This a convenience method to aid invoking hooks. Extensions likely
502 502 won't call this unless they have registered a custom hook or are
503 503 replacing code that is expected to call a hook.
504 504 """
505 505 return hook.hook(self.ui, self, name, throw, **args)
506 506
507 507 def _tag(self, names, node, message, local, user, date, extra={},
508 508 editor=False):
509 509 if isinstance(names, str):
510 510 names = (names,)
511 511
512 512 branches = self.branchmap()
513 513 for name in names:
514 514 self.hook('pretag', throw=True, node=hex(node), tag=name,
515 515 local=local)
516 516 if name in branches:
517 517 self.ui.warn(_("warning: tag %s conflicts with existing"
518 518 " branch name\n") % name)
519 519
520 520 def writetags(fp, names, munge, prevtags):
521 521 fp.seek(0, 2)
522 522 if prevtags and prevtags[-1] != '\n':
523 523 fp.write('\n')
524 524 for name in names:
525 525 m = munge and munge(name) or name
526 526 if (self._tagscache.tagtypes and
527 527 name in self._tagscache.tagtypes):
528 528 old = self.tags().get(name, nullid)
529 529 fp.write('%s %s\n' % (hex(old), m))
530 530 fp.write('%s %s\n' % (hex(node), m))
531 531 fp.close()
532 532
533 533 prevtags = ''
534 534 if local:
535 535 try:
536 536 fp = self.vfs('localtags', 'r+')
537 537 except IOError:
538 538 fp = self.vfs('localtags', 'a')
539 539 else:
540 540 prevtags = fp.read()
541 541
542 542 # local tags are stored in the current charset
543 543 writetags(fp, names, None, prevtags)
544 544 for name in names:
545 545 self.hook('tag', node=hex(node), tag=name, local=local)
546 546 return
547 547
548 548 try:
549 549 fp = self.wfile('.hgtags', 'rb+')
550 550 except IOError, e:
551 551 if e.errno != errno.ENOENT:
552 552 raise
553 553 fp = self.wfile('.hgtags', 'ab')
554 554 else:
555 555 prevtags = fp.read()
556 556
557 557 # committed tags are stored in UTF-8
558 558 writetags(fp, names, encoding.fromlocal, prevtags)
559 559
560 560 fp.close()
561 561
562 562 self.invalidatecaches()
563 563
564 564 if '.hgtags' not in self.dirstate:
565 565 self[None].add(['.hgtags'])
566 566
567 567 m = matchmod.exact(self.root, '', ['.hgtags'])
568 568 tagnode = self.commit(message, user, date, extra=extra, match=m,
569 569 editor=editor)
570 570
571 571 for name in names:
572 572 self.hook('tag', node=hex(node), tag=name, local=local)
573 573
574 574 return tagnode
575 575
576 576 def tag(self, names, node, message, local, user, date, editor=False):
577 577 '''tag a revision with one or more symbolic names.
578 578
579 579 names is a list of strings or, when adding a single tag, names may be a
580 580 string.
581 581
582 582 if local is True, the tags are stored in a per-repository file.
583 583 otherwise, they are stored in the .hgtags file, and a new
584 584 changeset is committed with the change.
585 585
586 586 keyword arguments:
587 587
588 588 local: whether to store tags in non-version-controlled file
589 589 (default False)
590 590
591 591 message: commit message to use if committing
592 592
593 593 user: name of user to use if committing
594 594
595 595 date: date tuple to use if committing'''
596 596
597 597 if not local:
598 598 m = matchmod.exact(self.root, '', ['.hgtags'])
599 599 if util.any(self.status(match=m, unknown=True, ignored=True)):
600 600 raise util.Abort(_('working copy of .hgtags is changed'),
601 601 hint=_('please commit .hgtags manually'))
602 602
603 603 self.tags() # instantiate the cache
604 604 self._tag(names, node, message, local, user, date, editor=editor)
605 605
606 606 @filteredpropertycache
607 607 def _tagscache(self):
608 608 '''Returns a tagscache object that contains various tags related
609 609 caches.'''
610 610
611 611 # This simplifies its cache management by having one decorated
612 612 # function (this one) and the rest simply fetch things from it.
613 613 class tagscache(object):
614 614 def __init__(self):
615 615 # These two define the set of tags for this repository. tags
616 616 # maps tag name to node; tagtypes maps tag name to 'global' or
617 617 # 'local'. (Global tags are defined by .hgtags across all
618 618 # heads, and local tags are defined in .hg/localtags.)
619 619 # They constitute the in-memory cache of tags.
620 620 self.tags = self.tagtypes = None
621 621
622 622 self.nodetagscache = self.tagslist = None
623 623
624 624 cache = tagscache()
625 625 cache.tags, cache.tagtypes = self._findtags()
626 626
627 627 return cache
628 628
629 629 def tags(self):
630 630 '''return a mapping of tag to node'''
631 631 t = {}
632 632 if self.changelog.filteredrevs:
633 633 tags, tt = self._findtags()
634 634 else:
635 635 tags = self._tagscache.tags
636 636 for k, v in tags.iteritems():
637 637 try:
638 638 # ignore tags to unknown nodes
639 639 self.changelog.rev(v)
640 640 t[k] = v
641 641 except (error.LookupError, ValueError):
642 642 pass
643 643 return t
644 644
645 645 def _findtags(self):
646 646 '''Do the hard work of finding tags. Return a pair of dicts
647 647 (tags, tagtypes) where tags maps tag name to node, and tagtypes
648 648 maps tag name to a string like \'global\' or \'local\'.
649 649 Subclasses or extensions are free to add their own tags, but
650 650 should be aware that the returned dicts will be retained for the
651 651 duration of the localrepo object.'''
652 652
653 653 # XXX what tagtype should subclasses/extensions use? Currently
654 654 # mq and bookmarks add tags, but do not set the tagtype at all.
655 655 # Should each extension invent its own tag type? Should there
656 656 # be one tagtype for all such "virtual" tags? Or is the status
657 657 # quo fine?
658 658
659 659 alltags = {} # map tag name to (node, hist)
660 660 tagtypes = {}
661 661
662 662 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
663 663 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
664 664
665 665 # Build the return dicts. Have to re-encode tag names because
666 666 # the tags module always uses UTF-8 (in order not to lose info
667 667 # writing to the cache), but the rest of Mercurial wants them in
668 668 # local encoding.
669 669 tags = {}
670 670 for (name, (node, hist)) in alltags.iteritems():
671 671 if node != nullid:
672 672 tags[encoding.tolocal(name)] = node
673 673 tags['tip'] = self.changelog.tip()
674 674 tagtypes = dict([(encoding.tolocal(name), value)
675 675 for (name, value) in tagtypes.iteritems()])
676 676 return (tags, tagtypes)
677 677
678 678 def tagtype(self, tagname):
679 679 '''
680 680 return the type of the given tag. result can be:
681 681
682 682 'local' : a local tag
683 683 'global' : a global tag
684 684 None : tag does not exist
685 685 '''
686 686
687 687 return self._tagscache.tagtypes.get(tagname)
688 688
689 689 def tagslist(self):
690 690 '''return a list of tags ordered by revision'''
691 691 if not self._tagscache.tagslist:
692 692 l = []
693 693 for t, n in self.tags().iteritems():
694 694 l.append((self.changelog.rev(n), t, n))
695 695 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
696 696
697 697 return self._tagscache.tagslist
698 698
699 699 def nodetags(self, node):
700 700 '''return the tags associated with a node'''
701 701 if not self._tagscache.nodetagscache:
702 702 nodetagscache = {}
703 703 for t, n in self._tagscache.tags.iteritems():
704 704 nodetagscache.setdefault(n, []).append(t)
705 705 for tags in nodetagscache.itervalues():
706 706 tags.sort()
707 707 self._tagscache.nodetagscache = nodetagscache
708 708 return self._tagscache.nodetagscache.get(node, [])
709 709
710 710 def nodebookmarks(self, node):
711 711 marks = []
712 712 for bookmark, n in self._bookmarks.iteritems():
713 713 if n == node:
714 714 marks.append(bookmark)
715 715 return sorted(marks)
716 716
717 717 def branchmap(self):
718 718 '''returns a dictionary {branch: [branchheads]} with branchheads
719 719 ordered by increasing revision number'''
720 720 branchmap.updatecache(self)
721 721 return self._branchcaches[self.filtername]
722 722
723 723 def branchtip(self, branch, ignoremissing=False):
724 724 '''return the tip node for a given branch
725 725
726 726 If ignoremissing is True, then this method will not raise an error.
727 727 This is helpful for callers that only expect None for a missing branch
728 728 (e.g. namespace).
729 729
730 730 '''
731 731 try:
732 732 return self.branchmap().branchtip(branch)
733 733 except KeyError:
734 734 if not ignoremissing:
735 735 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
736 736 else:
737 737 pass
738 738
739 739 def lookup(self, key):
740 740 return self[key].node()
741 741
742 742 def lookupbranch(self, key, remote=None):
743 743 repo = remote or self
744 744 if key in repo.branchmap():
745 745 return key
746 746
747 747 repo = (remote and remote.local()) and remote or self
748 748 return repo[key].branch()
749 749
750 750 def known(self, nodes):
751 751 nm = self.changelog.nodemap
752 752 pc = self._phasecache
753 753 result = []
754 754 for n in nodes:
755 755 r = nm.get(n)
756 756 resp = not (r is None or pc.phase(self, r) >= phases.secret)
757 757 result.append(resp)
758 758 return result
759 759
760 760 def local(self):
761 761 return self
762 762
763 763 def cancopy(self):
764 764 # so statichttprepo's override of local() works
765 765 if not self.local():
766 766 return False
767 767 if not self.ui.configbool('phases', 'publish', True):
768 768 return True
769 769 # if publishing we can't copy if there is filtered content
770 770 return not self.filtered('visible').changelog.filteredrevs
771 771
772 772 def shared(self):
773 773 '''the type of shared repository (None if not shared)'''
774 774 if self.sharedpath != self.path:
775 775 return 'store'
776 776 return None
777 777
778 778 def join(self, f, *insidef):
779 779 return self.vfs.join(os.path.join(f, *insidef))
780 780
781 781 def wjoin(self, f, *insidef):
782 782 return self.vfs.reljoin(self.root, f, *insidef)
783 783
784 784 def file(self, f):
785 785 if f[0] == '/':
786 786 f = f[1:]
787 787 return filelog.filelog(self.svfs, f)
788 788
789 789 def changectx(self, changeid):
790 790 return self[changeid]
791 791
792 792 def parents(self, changeid=None):
793 793 '''get list of changectxs for parents of changeid'''
794 794 return self[changeid].parents()
795 795
796 796 def setparents(self, p1, p2=nullid):
797 797 self.dirstate.beginparentchange()
798 798 copies = self.dirstate.setparents(p1, p2)
799 799 pctx = self[p1]
800 800 if copies:
801 801 # Adjust copy records, the dirstate cannot do it, it
802 802 # requires access to parents manifests. Preserve them
803 803 # only for entries added to first parent.
804 804 for f in copies:
805 805 if f not in pctx and copies[f] in pctx:
806 806 self.dirstate.copy(copies[f], f)
807 807 if p2 == nullid:
808 808 for f, s in sorted(self.dirstate.copies().items()):
809 809 if f not in pctx and s not in pctx:
810 810 self.dirstate.copy(None, f)
811 811 self.dirstate.endparentchange()
812 812
813 813 def filectx(self, path, changeid=None, fileid=None):
814 814 """changeid can be a changeset revision, node, or tag.
815 815 fileid can be a file revision or node."""
816 816 return context.filectx(self, path, changeid, fileid)
817 817
818 818 def getcwd(self):
819 819 return self.dirstate.getcwd()
820 820
821 821 def pathto(self, f, cwd=None):
822 822 return self.dirstate.pathto(f, cwd)
823 823
824 824 def wfile(self, f, mode='r'):
825 825 return self.wvfs(f, mode)
826 826
827 827 def _link(self, f):
828 828 return self.wvfs.islink(f)
829 829
830 830 def _loadfilter(self, filter):
831 831 if filter not in self.filterpats:
832 832 l = []
833 833 for pat, cmd in self.ui.configitems(filter):
834 834 if cmd == '!':
835 835 continue
836 836 mf = matchmod.match(self.root, '', [pat])
837 837 fn = None
838 838 params = cmd
839 839 for name, filterfn in self._datafilters.iteritems():
840 840 if cmd.startswith(name):
841 841 fn = filterfn
842 842 params = cmd[len(name):].lstrip()
843 843 break
844 844 if not fn:
845 845 fn = lambda s, c, **kwargs: util.filter(s, c)
846 846 # Wrap old filters not supporting keyword arguments
847 847 if not inspect.getargspec(fn)[2]:
848 848 oldfn = fn
849 849 fn = lambda s, c, **kwargs: oldfn(s, c)
850 850 l.append((mf, fn, params))
851 851 self.filterpats[filter] = l
852 852 return self.filterpats[filter]
853 853
854 854 def _filter(self, filterpats, filename, data):
855 855 for mf, fn, cmd in filterpats:
856 856 if mf(filename):
857 857 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
858 858 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
859 859 break
860 860
861 861 return data
862 862
863 863 @unfilteredpropertycache
864 864 def _encodefilterpats(self):
865 865 return self._loadfilter('encode')
866 866
867 867 @unfilteredpropertycache
868 868 def _decodefilterpats(self):
869 869 return self._loadfilter('decode')
870 870
871 871 def adddatafilter(self, name, filter):
872 872 self._datafilters[name] = filter
873 873
874 874 def wread(self, filename):
875 875 if self._link(filename):
876 876 data = self.wvfs.readlink(filename)
877 877 else:
878 878 data = self.wvfs.read(filename)
879 879 return self._filter(self._encodefilterpats, filename, data)
880 880
881 881 def wwrite(self, filename, data, flags):
882 882 data = self._filter(self._decodefilterpats, filename, data)
883 883 if 'l' in flags:
884 884 self.wvfs.symlink(data, filename)
885 885 else:
886 886 self.wvfs.write(filename, data)
887 887 if 'x' in flags:
888 888 self.wvfs.setflags(filename, False, True)
889 889
890 890 def wwritedata(self, filename, data):
891 891 return self._filter(self._decodefilterpats, filename, data)
892 892
893 893 def currenttransaction(self):
894 894 """return the current transaction or None if non exists"""
895 895 tr = self._transref and self._transref() or None
896 896 if tr and tr.running():
897 897 return tr
898 898 return None
899 899
900 900 def transaction(self, desc, report=None):
901 901 tr = self.currenttransaction()
902 902 if tr is not None:
903 903 return tr.nest()
904 904
905 905 # abort here if the journal already exists
906 906 if self.svfs.exists("journal"):
907 907 raise error.RepoError(
908 908 _("abandoned transaction found"),
909 909 hint=_("run 'hg recover' to clean up transaction"))
910 910
911 911 self._writejournal(desc)
912 912 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
913 913 rp = report and report or self.ui.warn
914 914 vfsmap = {'plain': self.vfs} # root of .hg/
915 915 tr = transaction.transaction(rp, self.svfs, vfsmap,
916 916 "journal",
917 917 "undo",
918 918 aftertrans(renames),
919 919 self.store.createmode)
920 920 # note: writing the fncache only during finalize mean that the file is
921 921 # outdated when running hooks. As fncache is used for streaming clone,
922 922 # this is not expected to break anything that happen during the hooks.
923 923 tr.addfinalize('flush-fncache', self.store.write)
924 924 self._transref = weakref.ref(tr)
925 925 return tr
926 926
927 927 def _journalfiles(self):
928 928 return ((self.svfs, 'journal'),
929 929 (self.vfs, 'journal.dirstate'),
930 930 (self.vfs, 'journal.branch'),
931 931 (self.vfs, 'journal.desc'),
932 932 (self.vfs, 'journal.bookmarks'),
933 933 (self.svfs, 'journal.phaseroots'))
934 934
935 935 def undofiles(self):
936 936 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
937 937
938 938 def _writejournal(self, desc):
939 939 self.vfs.write("journal.dirstate",
940 940 self.vfs.tryread("dirstate"))
941 941 self.vfs.write("journal.branch",
942 942 encoding.fromlocal(self.dirstate.branch()))
943 943 self.vfs.write("journal.desc",
944 944 "%d\n%s\n" % (len(self), desc))
945 945 self.vfs.write("journal.bookmarks",
946 946 self.vfs.tryread("bookmarks"))
947 947 self.svfs.write("journal.phaseroots",
948 948 self.svfs.tryread("phaseroots"))
949 949
950 950 def recover(self):
951 951 lock = self.lock()
952 952 try:
953 953 if self.svfs.exists("journal"):
954 954 self.ui.status(_("rolling back interrupted transaction\n"))
955 955 vfsmap = {'': self.svfs,
956 956 'plain': self.vfs,}
957 957 transaction.rollback(self.svfs, vfsmap, "journal",
958 958 self.ui.warn)
959 959 self.invalidate()
960 960 return True
961 961 else:
962 962 self.ui.warn(_("no interrupted transaction available\n"))
963 963 return False
964 964 finally:
965 965 lock.release()
966 966
967 967 def rollback(self, dryrun=False, force=False):
968 968 wlock = lock = None
969 969 try:
970 970 wlock = self.wlock()
971 971 lock = self.lock()
972 972 if self.svfs.exists("undo"):
973 973 return self._rollback(dryrun, force)
974 974 else:
975 975 self.ui.warn(_("no rollback information available\n"))
976 976 return 1
977 977 finally:
978 978 release(lock, wlock)
979 979
980 980 @unfilteredmethod # Until we get smarter cache management
981 981 def _rollback(self, dryrun, force):
982 982 ui = self.ui
983 983 try:
984 984 args = self.vfs.read('undo.desc').splitlines()
985 985 (oldlen, desc, detail) = (int(args[0]), args[1], None)
986 986 if len(args) >= 3:
987 987 detail = args[2]
988 988 oldtip = oldlen - 1
989 989
990 990 if detail and ui.verbose:
991 991 msg = (_('repository tip rolled back to revision %s'
992 992 ' (undo %s: %s)\n')
993 993 % (oldtip, desc, detail))
994 994 else:
995 995 msg = (_('repository tip rolled back to revision %s'
996 996 ' (undo %s)\n')
997 997 % (oldtip, desc))
998 998 except IOError:
999 999 msg = _('rolling back unknown transaction\n')
1000 1000 desc = None
1001 1001
1002 1002 if not force and self['.'] != self['tip'] and desc == 'commit':
1003 1003 raise util.Abort(
1004 1004 _('rollback of last commit while not checked out '
1005 1005 'may lose data'), hint=_('use -f to force'))
1006 1006
1007 1007 ui.status(msg)
1008 1008 if dryrun:
1009 1009 return 0
1010 1010
1011 1011 parents = self.dirstate.parents()
1012 1012 self.destroying()
1013 1013 vfsmap = {'plain': self.vfs, '': self.svfs}
1014 1014 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1015 1015 if self.vfs.exists('undo.bookmarks'):
1016 1016 self.vfs.rename('undo.bookmarks', 'bookmarks')
1017 1017 if self.svfs.exists('undo.phaseroots'):
1018 1018 self.svfs.rename('undo.phaseroots', 'phaseroots')
1019 1019 self.invalidate()
1020 1020
1021 1021 parentgone = (parents[0] not in self.changelog.nodemap or
1022 1022 parents[1] not in self.changelog.nodemap)
1023 1023 if parentgone:
1024 1024 self.vfs.rename('undo.dirstate', 'dirstate')
1025 1025 try:
1026 1026 branch = self.vfs.read('undo.branch')
1027 1027 self.dirstate.setbranch(encoding.tolocal(branch))
1028 1028 except IOError:
1029 1029 ui.warn(_('named branch could not be reset: '
1030 1030 'current branch is still \'%s\'\n')
1031 1031 % self.dirstate.branch())
1032 1032
1033 1033 self.dirstate.invalidate()
1034 1034 parents = tuple([p.rev() for p in self.parents()])
1035 1035 if len(parents) > 1:
1036 1036 ui.status(_('working directory now based on '
1037 1037 'revisions %d and %d\n') % parents)
1038 1038 else:
1039 1039 ui.status(_('working directory now based on '
1040 1040 'revision %d\n') % parents)
1041 1041 # TODO: if we know which new heads may result from this rollback, pass
1042 1042 # them to destroy(), which will prevent the branchhead cache from being
1043 1043 # invalidated.
1044 1044 self.destroyed()
1045 1045 return 0
1046 1046
1047 1047 def invalidatecaches(self):
1048 1048
1049 1049 if '_tagscache' in vars(self):
1050 1050 # can't use delattr on proxy
1051 1051 del self.__dict__['_tagscache']
1052 1052
1053 1053 self.unfiltered()._branchcaches.clear()
1054 1054 self.invalidatevolatilesets()
1055 1055
1056 1056 def invalidatevolatilesets(self):
1057 1057 self.filteredrevcache.clear()
1058 1058 obsolete.clearobscaches(self)
1059 1059
1060 1060 def invalidatedirstate(self):
1061 1061 '''Invalidates the dirstate, causing the next call to dirstate
1062 1062 to check if it was modified since the last time it was read,
1063 1063 rereading it if it has.
1064 1064
1065 1065 This is different to dirstate.invalidate() that it doesn't always
1066 1066 rereads the dirstate. Use dirstate.invalidate() if you want to
1067 1067 explicitly read the dirstate again (i.e. restoring it to a previous
1068 1068 known good state).'''
1069 1069 if hasunfilteredcache(self, 'dirstate'):
1070 1070 for k in self.dirstate._filecache:
1071 1071 try:
1072 1072 delattr(self.dirstate, k)
1073 1073 except AttributeError:
1074 1074 pass
1075 1075 delattr(self.unfiltered(), 'dirstate')
1076 1076
1077 1077 def invalidate(self):
1078 1078 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1079 1079 for k in self._filecache:
1080 1080 # dirstate is invalidated separately in invalidatedirstate()
1081 1081 if k == 'dirstate':
1082 1082 continue
1083 1083
1084 1084 try:
1085 1085 delattr(unfiltered, k)
1086 1086 except AttributeError:
1087 1087 pass
1088 1088 self.invalidatecaches()
1089 1089 self.store.invalidatecaches()
1090 1090
1091 1091 def invalidateall(self):
1092 1092 '''Fully invalidates both store and non-store parts, causing the
1093 1093 subsequent operation to reread any outside changes.'''
1094 1094 # extension should hook this to invalidate its caches
1095 1095 self.invalidate()
1096 1096 self.invalidatedirstate()
1097 1097
1098 1098 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1099 1099 try:
1100 1100 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1101 1101 except error.LockHeld, inst:
1102 1102 if not wait:
1103 1103 raise
1104 1104 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1105 1105 (desc, inst.locker))
1106 1106 # default to 600 seconds timeout
1107 1107 l = lockmod.lock(vfs, lockname,
1108 1108 int(self.ui.config("ui", "timeout", "600")),
1109 1109 releasefn, desc=desc)
1110 1110 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1111 1111 if acquirefn:
1112 1112 acquirefn()
1113 1113 return l
1114 1114
1115 1115 def _afterlock(self, callback):
1116 1116 """add a callback to the current repository lock.
1117 1117
1118 1118 The callback will be executed on lock release."""
1119 1119 l = self._lockref and self._lockref()
1120 1120 if l:
1121 1121 l.postrelease.append(callback)
1122 1122 else:
1123 1123 callback()
1124 1124
1125 1125 def lock(self, wait=True):
1126 1126 '''Lock the repository store (.hg/store) and return a weak reference
1127 1127 to the lock. Use this before modifying the store (e.g. committing or
1128 1128 stripping). If you are opening a transaction, get a lock as well.)'''
1129 1129 l = self._lockref and self._lockref()
1130 1130 if l is not None and l.held:
1131 1131 l.lock()
1132 1132 return l
1133 1133
1134 1134 def unlock():
1135 1135 for k, ce in self._filecache.items():
1136 1136 if k == 'dirstate' or k not in self.__dict__:
1137 1137 continue
1138 1138 ce.refresh()
1139 1139
1140 1140 l = self._lock(self.svfs, "lock", wait, unlock,
1141 1141 self.invalidate, _('repository %s') % self.origroot)
1142 1142 self._lockref = weakref.ref(l)
1143 1143 return l
1144 1144
1145 1145 def wlock(self, wait=True):
1146 1146 '''Lock the non-store parts of the repository (everything under
1147 1147 .hg except .hg/store) and return a weak reference to the lock.
1148 1148 Use this before modifying files in .hg.'''
1149 1149 l = self._wlockref and self._wlockref()
1150 1150 if l is not None and l.held:
1151 1151 l.lock()
1152 1152 return l
1153 1153
1154 1154 def unlock():
1155 1155 if self.dirstate.pendingparentchange():
1156 1156 self.dirstate.invalidate()
1157 1157 else:
1158 1158 self.dirstate.write()
1159 1159
1160 1160 self._filecache['dirstate'].refresh()
1161 1161
1162 1162 l = self._lock(self.vfs, "wlock", wait, unlock,
1163 1163 self.invalidatedirstate, _('working directory of %s') %
1164 1164 self.origroot)
1165 1165 self._wlockref = weakref.ref(l)
1166 1166 return l
1167 1167
1168 1168 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1169 1169 """
1170 1170 commit an individual file as part of a larger transaction
1171 1171 """
1172 1172
1173 1173 fname = fctx.path()
1174 1174 text = fctx.data()
1175 1175 flog = self.file(fname)
1176 1176 fparent1 = manifest1.get(fname, nullid)
1177 1177 fparent2 = manifest2.get(fname, nullid)
1178 1178
1179 1179 meta = {}
1180 1180 copy = fctx.renamed()
1181 1181 if copy and copy[0] != fname:
1182 1182 # Mark the new revision of this file as a copy of another
1183 1183 # file. This copy data will effectively act as a parent
1184 1184 # of this new revision. If this is a merge, the first
1185 1185 # parent will be the nullid (meaning "look up the copy data")
1186 1186 # and the second one will be the other parent. For example:
1187 1187 #
1188 1188 # 0 --- 1 --- 3 rev1 changes file foo
1189 1189 # \ / rev2 renames foo to bar and changes it
1190 1190 # \- 2 -/ rev3 should have bar with all changes and
1191 1191 # should record that bar descends from
1192 1192 # bar in rev2 and foo in rev1
1193 1193 #
1194 1194 # this allows this merge to succeed:
1195 1195 #
1196 1196 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1197 1197 # \ / merging rev3 and rev4 should use bar@rev2
1198 1198 # \- 2 --- 4 as the merge base
1199 1199 #
1200 1200
1201 1201 cfname = copy[0]
1202 1202 crev = manifest1.get(cfname)
1203 1203 newfparent = fparent2
1204 1204
1205 1205 if manifest2: # branch merge
1206 1206 if fparent2 == nullid or crev is None: # copied on remote side
1207 1207 if cfname in manifest2:
1208 1208 crev = manifest2[cfname]
1209 1209 newfparent = fparent1
1210 1210
1211 1211 # Here, we used to search backwards through history to try to find
1212 1212 # where the file copy came from if the source of a copy was not in
1213 1213 # the parent diretory. However, this doesn't actually make sense to
1214 1214 # do (what does a copy from something not in your working copy even
1215 1215 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1216 1216 # the user that copy information was dropped, so if they didn't
1217 1217 # expect this outcome it can be fixed, but this is the correct
1218 1218 # behavior in this circumstance.
1219 1219
1220 1220 if crev:
1221 1221 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1222 1222 meta["copy"] = cfname
1223 1223 meta["copyrev"] = hex(crev)
1224 1224 fparent1, fparent2 = nullid, newfparent
1225 1225 else:
1226 1226 self.ui.warn(_("warning: can't find ancestor for '%s' "
1227 1227 "copied from '%s'!\n") % (fname, cfname))
1228 1228
1229 1229 elif fparent1 == nullid:
1230 1230 fparent1, fparent2 = fparent2, nullid
1231 1231 elif fparent2 != nullid:
1232 1232 # is one parent an ancestor of the other?
1233 1233 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1234 1234 if fparent1 in fparentancestors:
1235 1235 fparent1, fparent2 = fparent2, nullid
1236 1236 elif fparent2 in fparentancestors:
1237 1237 fparent2 = nullid
1238 1238
1239 1239 # is the file changed?
1240 1240 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1241 1241 changelist.append(fname)
1242 1242 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1243 1243 # are just the flags changed during merge?
1244 1244 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1245 1245 changelist.append(fname)
1246 1246
1247 1247 return fparent1
1248 1248
1249 1249 @unfilteredmethod
1250 1250 def commit(self, text="", user=None, date=None, match=None, force=False,
1251 1251 editor=False, extra={}):
1252 1252 """Add a new revision to current repository.
1253 1253
1254 1254 Revision information is gathered from the working directory,
1255 1255 match can be used to filter the committed files. If editor is
1256 1256 supplied, it is called to get a commit message.
1257 1257 """
1258 1258
1259 1259 def fail(f, msg):
1260 1260 raise util.Abort('%s: %s' % (f, msg))
1261 1261
1262 1262 if not match:
1263 1263 match = matchmod.always(self.root, '')
1264 1264
1265 1265 if not force:
1266 1266 vdirs = []
1267 1267 match.explicitdir = vdirs.append
1268 1268 match.bad = fail
1269 1269
1270 1270 wlock = self.wlock()
1271 1271 try:
1272 1272 wctx = self[None]
1273 1273 merge = len(wctx.parents()) > 1
1274 1274
1275 1275 if (not force and merge and match and
1276 1276 (match.files() or match.anypats())):
1277 1277 raise util.Abort(_('cannot partially commit a merge '
1278 1278 '(do not specify files or patterns)'))
1279 1279
1280 1280 status = self.status(match=match, clean=force)
1281 1281 if force:
1282 1282 status.modified.extend(status.clean) # mq may commit clean files
1283 1283
1284 1284 # check subrepos
1285 1285 subs = []
1286 1286 commitsubs = set()
1287 1287 newstate = wctx.substate.copy()
1288 1288 # only manage subrepos and .hgsubstate if .hgsub is present
1289 1289 if '.hgsub' in wctx:
1290 1290 # we'll decide whether to track this ourselves, thanks
1291 1291 for c in status.modified, status.added, status.removed:
1292 1292 if '.hgsubstate' in c:
1293 1293 c.remove('.hgsubstate')
1294 1294
1295 1295 # compare current state to last committed state
1296 1296 # build new substate based on last committed state
1297 1297 oldstate = wctx.p1().substate
1298 1298 for s in sorted(newstate.keys()):
1299 1299 if not match(s):
1300 1300 # ignore working copy, use old state if present
1301 1301 if s in oldstate:
1302 1302 newstate[s] = oldstate[s]
1303 1303 continue
1304 1304 if not force:
1305 1305 raise util.Abort(
1306 1306 _("commit with new subrepo %s excluded") % s)
1307 1307 if wctx.sub(s).dirty(True):
1308 1308 if not self.ui.configbool('ui', 'commitsubrepos'):
1309 1309 raise util.Abort(
1310 1310 _("uncommitted changes in subrepo %s") % s,
1311 1311 hint=_("use --subrepos for recursive commit"))
1312 1312 subs.append(s)
1313 1313 commitsubs.add(s)
1314 1314 else:
1315 1315 bs = wctx.sub(s).basestate()
1316 1316 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1317 1317 if oldstate.get(s, (None, None, None))[1] != bs:
1318 1318 subs.append(s)
1319 1319
1320 1320 # check for removed subrepos
1321 1321 for p in wctx.parents():
1322 1322 r = [s for s in p.substate if s not in newstate]
1323 1323 subs += [s for s in r if match(s)]
1324 1324 if subs:
1325 1325 if (not match('.hgsub') and
1326 1326 '.hgsub' in (wctx.modified() + wctx.added())):
1327 1327 raise util.Abort(
1328 1328 _("can't commit subrepos without .hgsub"))
1329 1329 status.modified.insert(0, '.hgsubstate')
1330 1330
1331 1331 elif '.hgsub' in status.removed:
1332 1332 # clean up .hgsubstate when .hgsub is removed
1333 1333 if ('.hgsubstate' in wctx and
1334 1334 '.hgsubstate' not in (status.modified + status.added +
1335 1335 status.removed)):
1336 1336 status.removed.insert(0, '.hgsubstate')
1337 1337
1338 1338 # make sure all explicit patterns are matched
1339 1339 if not force and match.files():
1340 1340 matched = set(status.modified + status.added + status.removed)
1341 1341
1342 1342 for f in match.files():
1343 1343 f = self.dirstate.normalize(f)
1344 1344 if f == '.' or f in matched or f in wctx.substate:
1345 1345 continue
1346 1346 if f in status.deleted:
1347 1347 fail(f, _('file not found!'))
1348 1348 if f in vdirs: # visited directory
1349 1349 d = f + '/'
1350 1350 for mf in matched:
1351 1351 if mf.startswith(d):
1352 1352 break
1353 1353 else:
1354 1354 fail(f, _("no match under directory!"))
1355 1355 elif f not in self.dirstate:
1356 1356 fail(f, _("file not tracked!"))
1357 1357
1358 1358 cctx = context.workingcommitctx(self, status,
1359 1359 text, user, date, extra)
1360 1360
1361 1361 if (not force and not extra.get("close") and not merge
1362 1362 and not cctx.files()
1363 1363 and wctx.branch() == wctx.p1().branch()):
1364 1364 return None
1365 1365
1366 1366 if merge and cctx.deleted():
1367 1367 raise util.Abort(_("cannot commit merge with missing files"))
1368 1368
1369 1369 ms = mergemod.mergestate(self)
1370 1370 for f in status.modified:
1371 1371 if f in ms and ms[f] == 'u':
1372 1372 raise util.Abort(_('unresolved merge conflicts '
1373 1373 '(see "hg help resolve")'))
1374 1374
1375 1375 if editor:
1376 1376 cctx._text = editor(self, cctx, subs)
1377 1377 edited = (text != cctx._text)
1378 1378
1379 1379 # Save commit message in case this transaction gets rolled back
1380 1380 # (e.g. by a pretxncommit hook). Leave the content alone on
1381 1381 # the assumption that the user will use the same editor again.
1382 1382 msgfn = self.savecommitmessage(cctx._text)
1383 1383
1384 1384 # commit subs and write new state
1385 1385 if subs:
1386 1386 for s in sorted(commitsubs):
1387 1387 sub = wctx.sub(s)
1388 1388 self.ui.status(_('committing subrepository %s\n') %
1389 1389 subrepo.subrelpath(sub))
1390 1390 sr = sub.commit(cctx._text, user, date)
1391 1391 newstate[s] = (newstate[s][0], sr)
1392 1392 subrepo.writestate(self, newstate)
1393 1393
1394 1394 p1, p2 = self.dirstate.parents()
1395 1395 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1396 1396 try:
1397 1397 self.hook("precommit", throw=True, parent1=hookp1,
1398 1398 parent2=hookp2)
1399 1399 ret = self.commitctx(cctx, True)
1400 1400 except: # re-raises
1401 1401 if edited:
1402 1402 self.ui.write(
1403 1403 _('note: commit message saved in %s\n') % msgfn)
1404 1404 raise
1405 1405
1406 1406 # update bookmarks, dirstate and mergestate
1407 1407 bookmarks.update(self, [p1, p2], ret)
1408 1408 cctx.markcommitted(ret)
1409 1409 ms.reset()
1410 1410 finally:
1411 1411 wlock.release()
1412 1412
1413 1413 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1414 1414 # hack for command that use a temporary commit (eg: histedit)
1415 1415 # temporary commit got stripped before hook release
1416 1416 if node in self:
1417 1417 self.hook("commit", node=node, parent1=parent1,
1418 1418 parent2=parent2)
1419 1419 self._afterlock(commithook)
1420 1420 return ret
1421 1421
1422 1422 @unfilteredmethod
1423 1423 def commitctx(self, ctx, error=False):
1424 1424 """Add a new revision to current repository.
1425 1425 Revision information is passed via the context argument.
1426 1426 """
1427 1427
1428 1428 tr = None
1429 1429 p1, p2 = ctx.p1(), ctx.p2()
1430 1430 user = ctx.user()
1431 1431
1432 1432 lock = self.lock()
1433 1433 try:
1434 1434 tr = self.transaction("commit")
1435 1435 trp = weakref.proxy(tr)
1436 1436
1437 1437 if ctx.files():
1438 1438 m1 = p1.manifest()
1439 1439 m2 = p2.manifest()
1440 1440 m = m1.copy()
1441 1441
1442 1442 # check in files
1443 1443 added = []
1444 1444 changed = []
1445 1445 removed = list(ctx.removed())
1446 1446 linkrev = len(self)
1447 1447 self.ui.note(_("committing files:\n"))
1448 1448 for f in sorted(ctx.modified() + ctx.added()):
1449 1449 self.ui.note(f + "\n")
1450 1450 try:
1451 1451 fctx = ctx[f]
1452 1452 if fctx is None:
1453 1453 removed.append(f)
1454 1454 else:
1455 1455 added.append(f)
1456 1456 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1457 1457 trp, changed)
1458 1458 m.setflag(f, fctx.flags())
1459 1459 except OSError, inst:
1460 1460 self.ui.warn(_("trouble committing %s!\n") % f)
1461 1461 raise
1462 1462 except IOError, inst:
1463 1463 errcode = getattr(inst, 'errno', errno.ENOENT)
1464 1464 if error or errcode and errcode != errno.ENOENT:
1465 1465 self.ui.warn(_("trouble committing %s!\n") % f)
1466 1466 raise
1467 1467
1468 1468 # update manifest
1469 1469 self.ui.note(_("committing manifest\n"))
1470 1470 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1471 1471 drop = [f for f in removed if f in m]
1472 1472 for f in drop:
1473 1473 del m[f]
1474 1474 mn = self.manifest.add(m, trp, linkrev,
1475 1475 p1.manifestnode(), p2.manifestnode(),
1476 1476 added, drop)
1477 1477 files = changed + removed
1478 1478 else:
1479 1479 mn = p1.manifestnode()
1480 1480 files = []
1481 1481
1482 1482 # update changelog
1483 1483 self.ui.note(_("committing changelog\n"))
1484 1484 self.changelog.delayupdate(tr)
1485 1485 n = self.changelog.add(mn, files, ctx.description(),
1486 1486 trp, p1.node(), p2.node(),
1487 1487 user, ctx.date(), ctx.extra().copy())
1488 1488 p = lambda: tr.writepending() and self.root or ""
1489 1489 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1490 1490 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1491 1491 parent2=xp2, pending=p)
1492 1492 # set the new commit is proper phase
1493 1493 targetphase = subrepo.newcommitphase(self.ui, ctx)
1494 1494 if targetphase:
1495 1495 # retract boundary do not alter parent changeset.
1496 1496 # if a parent have higher the resulting phase will
1497 1497 # be compliant anyway
1498 1498 #
1499 1499 # if minimal phase was 0 we don't need to retract anything
1500 1500 phases.retractboundary(self, tr, targetphase, [n])
1501 1501 tr.close()
1502 1502 branchmap.updatecache(self.filtered('served'))
1503 1503 return n
1504 1504 finally:
1505 1505 if tr:
1506 1506 tr.release()
1507 1507 lock.release()
1508 1508
1509 1509 @unfilteredmethod
1510 1510 def destroying(self):
1511 1511 '''Inform the repository that nodes are about to be destroyed.
1512 1512 Intended for use by strip and rollback, so there's a common
1513 1513 place for anything that has to be done before destroying history.
1514 1514
1515 1515 This is mostly useful for saving state that is in memory and waiting
1516 1516 to be flushed when the current lock is released. Because a call to
1517 1517 destroyed is imminent, the repo will be invalidated causing those
1518 1518 changes to stay in memory (waiting for the next unlock), or vanish
1519 1519 completely.
1520 1520 '''
1521 1521 # When using the same lock to commit and strip, the phasecache is left
1522 1522 # dirty after committing. Then when we strip, the repo is invalidated,
1523 1523 # causing those changes to disappear.
1524 1524 if '_phasecache' in vars(self):
1525 1525 self._phasecache.write()
1526 1526
1527 1527 @unfilteredmethod
1528 1528 def destroyed(self):
1529 1529 '''Inform the repository that nodes have been destroyed.
1530 1530 Intended for use by strip and rollback, so there's a common
1531 1531 place for anything that has to be done after destroying history.
1532 1532 '''
1533 1533 # When one tries to:
1534 1534 # 1) destroy nodes thus calling this method (e.g. strip)
1535 1535 # 2) use phasecache somewhere (e.g. commit)
1536 1536 #
1537 1537 # then 2) will fail because the phasecache contains nodes that were
1538 1538 # removed. We can either remove phasecache from the filecache,
1539 1539 # causing it to reload next time it is accessed, or simply filter
1540 1540 # the removed nodes now and write the updated cache.
1541 1541 self._phasecache.filterunknown(self)
1542 1542 self._phasecache.write()
1543 1543
1544 1544 # update the 'served' branch cache to help read only server process
1545 1545 # Thanks to branchcache collaboration this is done from the nearest
1546 1546 # filtered subset and it is expected to be fast.
1547 1547 branchmap.updatecache(self.filtered('served'))
1548 1548
1549 1549 # Ensure the persistent tag cache is updated. Doing it now
1550 1550 # means that the tag cache only has to worry about destroyed
1551 1551 # heads immediately after a strip/rollback. That in turn
1552 1552 # guarantees that "cachetip == currenttip" (comparing both rev
1553 1553 # and node) always means no nodes have been added or destroyed.
1554 1554
1555 1555 # XXX this is suboptimal when qrefresh'ing: we strip the current
1556 1556 # head, refresh the tag cache, then immediately add a new head.
1557 1557 # But I think doing it this way is necessary for the "instant
1558 1558 # tag cache retrieval" case to work.
1559 1559 self.invalidate()
1560 1560
1561 1561 def walk(self, match, node=None):
1562 1562 '''
1563 1563 walk recursively through the directory tree or a given
1564 1564 changeset, finding all files matched by the match
1565 1565 function
1566 1566 '''
1567 1567 return self[node].walk(match)
1568 1568
1569 1569 def status(self, node1='.', node2=None, match=None,
1570 1570 ignored=False, clean=False, unknown=False,
1571 1571 listsubrepos=False):
1572 1572 '''a convenience method that calls node1.status(node2)'''
1573 1573 return self[node1].status(node2, match, ignored, clean, unknown,
1574 1574 listsubrepos)
1575 1575
1576 1576 def heads(self, start=None):
1577 1577 heads = self.changelog.heads(start)
1578 1578 # sort the output in rev descending order
1579 1579 return sorted(heads, key=self.changelog.rev, reverse=True)
1580 1580
1581 1581 def branchheads(self, branch=None, start=None, closed=False):
1582 1582 '''return a (possibly filtered) list of heads for the given branch
1583 1583
1584 1584 Heads are returned in topological order, from newest to oldest.
1585 1585 If branch is None, use the dirstate branch.
1586 1586 If start is not None, return only heads reachable from start.
1587 1587 If closed is True, return heads that are marked as closed as well.
1588 1588 '''
1589 1589 if branch is None:
1590 1590 branch = self[None].branch()
1591 1591 branches = self.branchmap()
1592 1592 if branch not in branches:
1593 1593 return []
1594 1594 # the cache returns heads ordered lowest to highest
1595 1595 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1596 1596 if start is not None:
1597 1597 # filter out the heads that cannot be reached from startrev
1598 1598 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1599 1599 bheads = [h for h in bheads if h in fbheads]
1600 1600 return bheads
1601 1601
1602 1602 def branches(self, nodes):
1603 1603 if not nodes:
1604 1604 nodes = [self.changelog.tip()]
1605 1605 b = []
1606 1606 for n in nodes:
1607 1607 t = n
1608 1608 while True:
1609 1609 p = self.changelog.parents(n)
1610 1610 if p[1] != nullid or p[0] == nullid:
1611 1611 b.append((t, n, p[0], p[1]))
1612 1612 break
1613 1613 n = p[0]
1614 1614 return b
1615 1615
1616 1616 def between(self, pairs):
1617 1617 r = []
1618 1618
1619 1619 for top, bottom in pairs:
1620 1620 n, l, i = top, [], 0
1621 1621 f = 1
1622 1622
1623 1623 while n != bottom and n != nullid:
1624 1624 p = self.changelog.parents(n)[0]
1625 1625 if i == f:
1626 1626 l.append(n)
1627 1627 f = f * 2
1628 1628 n = p
1629 1629 i += 1
1630 1630
1631 1631 r.append(l)
1632 1632
1633 1633 return r
1634 1634
1635 1635 def checkpush(self, pushop):
1636 1636 """Extensions can override this function if additional checks have
1637 1637 to be performed before pushing, or call it if they override push
1638 1638 command.
1639 1639 """
1640 1640 pass
1641 1641
1642 1642 @unfilteredpropertycache
1643 1643 def prepushoutgoinghooks(self):
1644 1644 """Return util.hooks consists of "(repo, remote, outgoing)"
1645 1645 functions, which are called before pushing changesets.
1646 1646 """
1647 1647 return util.hooks()
1648 1648
1649 1649 def stream_in(self, remote, requirements):
1650 1650 lock = self.lock()
1651 1651 try:
1652 1652 # Save remote branchmap. We will use it later
1653 1653 # to speed up branchcache creation
1654 1654 rbranchmap = None
1655 1655 if remote.capable("branchmap"):
1656 1656 rbranchmap = remote.branchmap()
1657 1657
1658 1658 fp = remote.stream_out()
1659 1659 l = fp.readline()
1660 1660 try:
1661 1661 resp = int(l)
1662 1662 except ValueError:
1663 1663 raise error.ResponseError(
1664 1664 _('unexpected response from remote server:'), l)
1665 1665 if resp == 1:
1666 1666 raise util.Abort(_('operation forbidden by server'))
1667 1667 elif resp == 2:
1668 1668 raise util.Abort(_('locking the remote repository failed'))
1669 1669 elif resp != 0:
1670 1670 raise util.Abort(_('the server sent an unknown error code'))
1671 1671 self.ui.status(_('streaming all changes\n'))
1672 1672 l = fp.readline()
1673 1673 try:
1674 1674 total_files, total_bytes = map(int, l.split(' ', 1))
1675 1675 except (ValueError, TypeError):
1676 1676 raise error.ResponseError(
1677 1677 _('unexpected response from remote server:'), l)
1678 1678 self.ui.status(_('%d files to transfer, %s of data\n') %
1679 1679 (total_files, util.bytecount(total_bytes)))
1680 1680 handled_bytes = 0
1681 1681 self.ui.progress(_('clone'), 0, total=total_bytes)
1682 1682 start = time.time()
1683 1683
1684 1684 tr = self.transaction(_('clone'))
1685 1685 try:
1686 1686 for i in xrange(total_files):
1687 1687 # XXX doesn't support '\n' or '\r' in filenames
1688 1688 l = fp.readline()
1689 1689 try:
1690 1690 name, size = l.split('\0', 1)
1691 1691 size = int(size)
1692 1692 except (ValueError, TypeError):
1693 1693 raise error.ResponseError(
1694 1694 _('unexpected response from remote server:'), l)
1695 1695 if self.ui.debugflag:
1696 1696 self.ui.debug('adding %s (%s)\n' %
1697 1697 (name, util.bytecount(size)))
1698 1698 # for backwards compat, name was partially encoded
1699 1699 ofp = self.svfs(store.decodedir(name), 'w')
1700 1700 for chunk in util.filechunkiter(fp, limit=size):
1701 1701 handled_bytes += len(chunk)
1702 1702 self.ui.progress(_('clone'), handled_bytes,
1703 1703 total=total_bytes)
1704 1704 ofp.write(chunk)
1705 1705 ofp.close()
1706 1706 tr.close()
1707 1707 finally:
1708 1708 tr.release()
1709 1709
1710 1710 # Writing straight to files circumvented the inmemory caches
1711 1711 self.invalidate()
1712 1712
1713 1713 elapsed = time.time() - start
1714 1714 if elapsed <= 0:
1715 1715 elapsed = 0.001
1716 1716 self.ui.progress(_('clone'), None)
1717 1717 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1718 1718 (util.bytecount(total_bytes), elapsed,
1719 1719 util.bytecount(total_bytes / elapsed)))
1720 1720
1721 1721 # new requirements = old non-format requirements +
1722 1722 # new format-related
1723 1723 # requirements from the streamed-in repository
1724 1724 requirements.update(set(self.requirements) - self.supportedformats)
1725 1725 self._applyrequirements(requirements)
1726 1726 self._writerequirements()
1727 1727
1728 1728 if rbranchmap:
1729 1729 rbheads = []
1730 1730 closed = []
1731 1731 for bheads in rbranchmap.itervalues():
1732 1732 rbheads.extend(bheads)
1733 1733 for h in bheads:
1734 1734 r = self.changelog.rev(h)
1735 1735 b, c = self.changelog.branchinfo(r)
1736 1736 if c:
1737 1737 closed.append(h)
1738 1738
1739 1739 if rbheads:
1740 1740 rtiprev = max((int(self.changelog.rev(node))
1741 1741 for node in rbheads))
1742 1742 cache = branchmap.branchcache(rbranchmap,
1743 1743 self[rtiprev].node(),
1744 1744 rtiprev,
1745 1745 closednodes=closed)
1746 1746 # Try to stick it as low as possible
1747 1747 # filter above served are unlikely to be fetch from a clone
1748 1748 for candidate in ('base', 'immutable', 'served'):
1749 1749 rview = self.filtered(candidate)
1750 1750 if cache.validfor(rview):
1751 1751 self._branchcaches[candidate] = cache
1752 1752 cache.write(rview)
1753 1753 break
1754 1754 self.invalidate()
1755 1755 return len(self.heads()) + 1
1756 1756 finally:
1757 1757 lock.release()
1758 1758
1759 1759 def clone(self, remote, heads=[], stream=None):
1760 1760 '''clone remote repository.
1761 1761
1762 1762 keyword arguments:
1763 1763 heads: list of revs to clone (forces use of pull)
1764 1764 stream: use streaming clone if possible'''
1765 1765
1766 1766 # now, all clients that can request uncompressed clones can
1767 1767 # read repo formats supported by all servers that can serve
1768 1768 # them.
1769 1769
1770 1770 # if revlog format changes, client will have to check version
1771 1771 # and format flags on "stream" capability, and use
1772 1772 # uncompressed only if compatible.
1773 1773
1774 1774 if stream is None:
1775 1775 # if the server explicitly prefers to stream (for fast LANs)
1776 1776 stream = remote.capable('stream-preferred')
1777 1777
1778 1778 if stream and not heads:
1779 1779 # 'stream' means remote revlog format is revlogv1 only
1780 1780 if remote.capable('stream'):
1781 1781 self.stream_in(remote, set(('revlogv1',)))
1782 1782 else:
1783 1783 # otherwise, 'streamreqs' contains the remote revlog format
1784 1784 streamreqs = remote.capable('streamreqs')
1785 1785 if streamreqs:
1786 1786 streamreqs = set(streamreqs.split(','))
1787 1787 # if we support it, stream in and adjust our requirements
1788 1788 if not streamreqs - self.supportedformats:
1789 1789 self.stream_in(remote, streamreqs)
1790 1790
1791 1791 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1792 1792 try:
1793 1793 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1794 1794 ret = exchange.pull(self, remote, heads).cgresult
1795 1795 finally:
1796 1796 self.ui.restoreconfig(quiet)
1797 1797 return ret
1798 1798
1799 1799 def pushkey(self, namespace, key, old, new):
1800 1800 try:
1801 1801 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1802 1802 old=old, new=new)
1803 1803 except error.HookAbort, exc:
1804 1804 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1805 1805 if exc.hint:
1806 1806 self.ui.write_err(_("(%s)\n") % exc.hint)
1807 1807 return False
1808 1808 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1809 1809 ret = pushkey.push(self, namespace, key, old, new)
1810 1810 def runhook():
1811 1811 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1812 1812 ret=ret)
1813 1813 self._afterlock(runhook)
1814 1814 return ret
1815 1815
1816 1816 def listkeys(self, namespace):
1817 1817 self.hook('prelistkeys', throw=True, namespace=namespace)
1818 1818 self.ui.debug('listing keys for "%s"\n' % namespace)
1819 1819 values = pushkey.list(self, namespace)
1820 1820 self.hook('listkeys', namespace=namespace, values=values)
1821 1821 return values
1822 1822
1823 1823 def debugwireargs(self, one, two, three=None, four=None, five=None):
1824 1824 '''used to test argument passing over the wire'''
1825 1825 return "%s %s %s %s %s" % (one, two, three, four, five)
1826 1826
1827 1827 def savecommitmessage(self, text):
1828 1828 fp = self.vfs('last-message.txt', 'wb')
1829 1829 try:
1830 1830 fp.write(text)
1831 1831 finally:
1832 1832 fp.close()
1833 1833 return self.pathto(fp.name[len(self.root) + 1:])
1834 1834
1835 1835 # used to avoid circular references so destructors work
1836 1836 def aftertrans(files):
1837 1837 renamefiles = [tuple(t) for t in files]
1838 1838 def a():
1839 1839 for vfs, src, dest in renamefiles:
1840 1840 try:
1841 1841 vfs.rename(src, dest)
1842 1842 except OSError: # journal file does not yet exist
1843 1843 pass
1844 1844 return a
1845 1845
1846 1846 def undoname(fn):
1847 1847 base, name = os.path.split(fn)
1848 1848 assert name.startswith('journal')
1849 1849 return os.path.join(base, name.replace('journal', 'undo', 1))
1850 1850
1851 1851 def instance(ui, path, create):
1852 1852 return localrepository(ui, util.urllocalpath(path), create)
1853 1853
1854 1854 def islocal(path):
1855 1855 return True
@@ -1,3348 +1,3350 b''
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 9 import parser, util, error, discovery, hbisect, phases
10 10 import node
11 11 import heapq
12 12 import match as matchmod
13 13 from i18n import _
14 14 import encoding
15 15 import obsolete as obsmod
16 16 import pathutil
17 17 import repoview
18 18
19 19 def _revancestors(repo, revs, followfirst):
20 20 """Like revlog.ancestors(), but supports followfirst."""
21 21 cut = followfirst and 1 or None
22 22 cl = repo.changelog
23 23
24 24 def iterate():
25 25 revqueue, revsnode = None, None
26 26 h = []
27 27
28 28 revs.sort(reverse=True)
29 29 revqueue = util.deque(revs)
30 30 if revqueue:
31 31 revsnode = revqueue.popleft()
32 32 heapq.heappush(h, -revsnode)
33 33
34 34 seen = set()
35 35 while h:
36 36 current = -heapq.heappop(h)
37 37 if current not in seen:
38 38 if revsnode and current == revsnode:
39 39 if revqueue:
40 40 revsnode = revqueue.popleft()
41 41 heapq.heappush(h, -revsnode)
42 42 seen.add(current)
43 43 yield current
44 44 for parent in cl.parentrevs(current)[:cut]:
45 45 if parent != node.nullrev:
46 46 heapq.heappush(h, -parent)
47 47
48 48 return generatorset(iterate(), iterasc=False)
49 49
50 50 def _revdescendants(repo, revs, followfirst):
51 51 """Like revlog.descendants() but supports followfirst."""
52 52 cut = followfirst and 1 or None
53 53
54 54 def iterate():
55 55 cl = repo.changelog
56 56 first = min(revs)
57 57 nullrev = node.nullrev
58 58 if first == nullrev:
59 59 # Are there nodes with a null first parent and a non-null
60 60 # second one? Maybe. Do we care? Probably not.
61 61 for i in cl:
62 62 yield i
63 63 else:
64 64 seen = set(revs)
65 65 for i in cl.revs(first + 1):
66 66 for x in cl.parentrevs(i)[:cut]:
67 67 if x != nullrev and x in seen:
68 68 seen.add(i)
69 69 yield i
70 70 break
71 71
72 72 return generatorset(iterate(), iterasc=True)
73 73
74 74 def _revsbetween(repo, roots, heads):
75 75 """Return all paths between roots and heads, inclusive of both endpoint
76 76 sets."""
77 77 if not roots:
78 78 return baseset()
79 79 parentrevs = repo.changelog.parentrevs
80 80 visit = list(heads)
81 81 reachable = set()
82 82 seen = {}
83 83 minroot = min(roots)
84 84 roots = set(roots)
85 85 # open-code the post-order traversal due to the tiny size of
86 86 # sys.getrecursionlimit()
87 87 while visit:
88 88 rev = visit.pop()
89 89 if rev in roots:
90 90 reachable.add(rev)
91 91 parents = parentrevs(rev)
92 92 seen[rev] = parents
93 93 for parent in parents:
94 94 if parent >= minroot and parent not in seen:
95 95 visit.append(parent)
96 96 if not reachable:
97 97 return baseset()
98 98 for rev in sorted(seen):
99 99 for parent in seen[rev]:
100 100 if parent in reachable:
101 101 reachable.add(rev)
102 102 return baseset(sorted(reachable))
103 103
104 104 elements = {
105 105 "(": (21, ("group", 1, ")"), ("func", 1, ")")),
106 106 "##": (20, None, ("_concat", 20)),
107 107 "~": (18, None, ("ancestor", 18)),
108 108 "^": (18, None, ("parent", 18), ("parentpost", 18)),
109 109 "-": (5, ("negate", 19), ("minus", 5)),
110 110 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
111 111 ("dagrangepost", 17)),
112 112 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
113 113 ("dagrangepost", 17)),
114 114 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
115 115 "not": (10, ("not", 10)),
116 116 "!": (10, ("not", 10)),
117 117 "and": (5, None, ("and", 5)),
118 118 "&": (5, None, ("and", 5)),
119 119 "%": (5, None, ("only", 5), ("onlypost", 5)),
120 120 "or": (4, None, ("or", 4)),
121 121 "|": (4, None, ("or", 4)),
122 122 "+": (4, None, ("or", 4)),
123 123 ",": (2, None, ("list", 2)),
124 124 ")": (0, None, None),
125 125 "symbol": (0, ("symbol",), None),
126 126 "string": (0, ("string",), None),
127 127 "end": (0, None, None),
128 128 }
129 129
130 130 keywords = set(['and', 'or', 'not'])
131 131
132 132 # default set of valid characters for the initial letter of symbols
133 133 _syminitletters = set(c for c in [chr(i) for i in xrange(256)]
134 134 if c.isalnum() or c in '._@' or ord(c) > 127)
135 135
136 136 # default set of valid characters for non-initial letters of symbols
137 137 _symletters = set(c for c in [chr(i) for i in xrange(256)]
138 138 if c.isalnum() or c in '-._/@' or ord(c) > 127)
139 139
140 140 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
141 141 '''
142 142 Parse a revset statement into a stream of tokens
143 143
144 144 ``syminitletters`` is the set of valid characters for the initial
145 145 letter of symbols.
146 146
147 147 By default, character ``c`` is recognized as valid for initial
148 148 letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``.
149 149
150 150 ``symletters`` is the set of valid characters for non-initial
151 151 letters of symbols.
152 152
153 153 By default, character ``c`` is recognized as valid for non-initial
154 154 letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
155 155
156 156 Check that @ is a valid unquoted token character (issue3686):
157 157 >>> list(tokenize("@::"))
158 158 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
159 159
160 160 '''
161 161 if syminitletters is None:
162 162 syminitletters = _syminitletters
163 163 if symletters is None:
164 164 symletters = _symletters
165 165
166 166 pos, l = 0, len(program)
167 167 while pos < l:
168 168 c = program[pos]
169 169 if c.isspace(): # skip inter-token whitespace
170 170 pass
171 171 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
172 172 yield ('::', None, pos)
173 173 pos += 1 # skip ahead
174 174 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
175 175 yield ('..', None, pos)
176 176 pos += 1 # skip ahead
177 177 elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
178 178 yield ('##', None, pos)
179 179 pos += 1 # skip ahead
180 180 elif c in "():,-|&+!~^%": # handle simple operators
181 181 yield (c, None, pos)
182 182 elif (c in '"\'' or c == 'r' and
183 183 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
184 184 if c == 'r':
185 185 pos += 1
186 186 c = program[pos]
187 187 decode = lambda x: x
188 188 else:
189 189 decode = lambda x: x.decode('string-escape')
190 190 pos += 1
191 191 s = pos
192 192 while pos < l: # find closing quote
193 193 d = program[pos]
194 194 if d == '\\': # skip over escaped characters
195 195 pos += 2
196 196 continue
197 197 if d == c:
198 198 yield ('string', decode(program[s:pos]), s)
199 199 break
200 200 pos += 1
201 201 else:
202 202 raise error.ParseError(_("unterminated string"), s)
203 203 # gather up a symbol/keyword
204 204 elif c in syminitletters:
205 205 s = pos
206 206 pos += 1
207 207 while pos < l: # find end of symbol
208 208 d = program[pos]
209 209 if d not in symletters:
210 210 break
211 211 if d == '.' and program[pos - 1] == '.': # special case for ..
212 212 pos -= 1
213 213 break
214 214 pos += 1
215 215 sym = program[s:pos]
216 216 if sym in keywords: # operator keywords
217 217 yield (sym, None, s)
218 218 elif '-' in sym:
219 219 # some jerk gave us foo-bar-baz, try to check if it's a symbol
220 220 if lookup and lookup(sym):
221 221 # looks like a real symbol
222 222 yield ('symbol', sym, s)
223 223 else:
224 224 # looks like an expression
225 225 parts = sym.split('-')
226 226 for p in parts[:-1]:
227 227 if p: # possible consecutive -
228 228 yield ('symbol', p, s)
229 229 s += len(p)
230 230 yield ('-', None, pos)
231 231 s += 1
232 232 if parts[-1]: # possible trailing -
233 233 yield ('symbol', parts[-1], s)
234 234 else:
235 235 yield ('symbol', sym, s)
236 236 pos -= 1
237 237 else:
238 238 raise error.ParseError(_("syntax error"), pos)
239 239 pos += 1
240 240 yield ('end', None, pos)
241 241
242 242 def parseerrordetail(inst):
243 243 """Compose error message from specified ParseError object
244 244 """
245 245 if len(inst.args) > 1:
246 246 return _('at %s: %s') % (inst.args[1], inst.args[0])
247 247 else:
248 248 return inst.args[0]
249 249
250 250 # helpers
251 251
252 252 def getstring(x, err):
253 253 if x and (x[0] == 'string' or x[0] == 'symbol'):
254 254 return x[1]
255 255 raise error.ParseError(err)
256 256
257 257 def getlist(x):
258 258 if not x:
259 259 return []
260 260 if x[0] == 'list':
261 261 return getlist(x[1]) + [x[2]]
262 262 return [x]
263 263
264 264 def getargs(x, min, max, err):
265 265 l = getlist(x)
266 266 if len(l) < min or (max >= 0 and len(l) > max):
267 267 raise error.ParseError(err)
268 268 return l
269 269
270 270 def isvalidsymbol(tree):
271 271 """Examine whether specified ``tree`` is valid ``symbol`` or not
272 272 """
273 273 return tree[0] == 'symbol' and len(tree) > 1
274 274
275 275 def getsymbol(tree):
276 276 """Get symbol name from valid ``symbol`` in ``tree``
277 277
278 278 This assumes that ``tree`` is already examined by ``isvalidsymbol``.
279 279 """
280 280 return tree[1]
281 281
282 282 def isvalidfunc(tree):
283 283 """Examine whether specified ``tree`` is valid ``func`` or not
284 284 """
285 285 return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1])
286 286
287 287 def getfuncname(tree):
288 288 """Get function name from valid ``func`` in ``tree``
289 289
290 290 This assumes that ``tree`` is already examined by ``isvalidfunc``.
291 291 """
292 292 return getsymbol(tree[1])
293 293
294 294 def getfuncargs(tree):
295 295 """Get list of function arguments from valid ``func`` in ``tree``
296 296
297 297 This assumes that ``tree`` is already examined by ``isvalidfunc``.
298 298 """
299 299 if len(tree) > 2:
300 300 return getlist(tree[2])
301 301 else:
302 302 return []
303 303
304 304 def getset(repo, subset, x):
305 305 if not x:
306 306 raise error.ParseError(_("missing argument"))
307 307 s = methods[x[0]](repo, subset, *x[1:])
308 308 if util.safehasattr(s, 'isascending'):
309 309 return s
310 310 return baseset(s)
311 311
312 312 def _getrevsource(repo, r):
313 313 extra = repo[r].extra()
314 314 for label in ('source', 'transplant_source', 'rebase_source'):
315 315 if label in extra:
316 316 try:
317 317 return repo[extra[label]].rev()
318 318 except error.RepoLookupError:
319 319 pass
320 320 return None
321 321
322 322 # operator methods
323 323
324 324 def stringset(repo, subset, x):
325 325 x = repo[x].rev()
326 326 if x == -1 and len(subset) == len(repo):
327 327 return baseset([-1])
328 328 if x in subset:
329 329 return baseset([x])
330 330 return baseset()
331 331
332 332 def symbolset(repo, subset, x):
333 333 if x in symbols:
334 334 raise error.ParseError(_("can't use %s here") % x)
335 335 return stringset(repo, subset, x)
336 336
337 337 def rangeset(repo, subset, x, y):
338 338 m = getset(repo, fullreposet(repo), x)
339 339 n = getset(repo, fullreposet(repo), y)
340 340
341 341 if not m or not n:
342 342 return baseset()
343 343 m, n = m.first(), n.last()
344 344
345 345 if m < n:
346 346 r = spanset(repo, m, n + 1)
347 347 else:
348 348 r = spanset(repo, m, n - 1)
349 349 return r & subset
350 350
351 351 def dagrange(repo, subset, x, y):
352 352 r = spanset(repo)
353 353 xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
354 354 return xs & subset
355 355
356 356 def andset(repo, subset, x, y):
357 357 return getset(repo, getset(repo, subset, x), y)
358 358
359 359 def orset(repo, subset, x, y):
360 360 xl = getset(repo, subset, x)
361 361 yl = getset(repo, subset - xl, y)
362 362 return xl + yl
363 363
364 364 def notset(repo, subset, x):
365 365 return subset - getset(repo, subset, x)
366 366
367 367 def listset(repo, subset, a, b):
368 368 raise error.ParseError(_("can't use a list in this context"))
369 369
370 370 def func(repo, subset, a, b):
371 371 if a[0] == 'symbol' and a[1] in symbols:
372 372 return symbols[a[1]](repo, subset, b)
373 373 raise error.ParseError(_("not a function: %s") % a[1])
374 374
375 375 # functions
376 376
377 377 def adds(repo, subset, x):
378 378 """``adds(pattern)``
379 379 Changesets that add a file matching pattern.
380 380
381 381 The pattern without explicit kind like ``glob:`` is expected to be
382 382 relative to the current directory and match against a file or a
383 383 directory.
384 384 """
385 385 # i18n: "adds" is a keyword
386 386 pat = getstring(x, _("adds requires a pattern"))
387 387 return checkstatus(repo, subset, pat, 1)
388 388
389 389 def ancestor(repo, subset, x):
390 390 """``ancestor(*changeset)``
391 391 A greatest common ancestor of the changesets.
392 392
393 393 Accepts 0 or more changesets.
394 394 Will return empty list when passed no args.
395 395 Greatest common ancestor of a single changeset is that changeset.
396 396 """
397 397 # i18n: "ancestor" is a keyword
398 398 l = getlist(x)
399 399 rl = spanset(repo)
400 400 anc = None
401 401
402 402 # (getset(repo, rl, i) for i in l) generates a list of lists
403 403 for revs in (getset(repo, rl, i) for i in l):
404 404 for r in revs:
405 405 if anc is None:
406 406 anc = repo[r]
407 407 else:
408 408 anc = anc.ancestor(repo[r])
409 409
410 410 if anc is not None and anc.rev() in subset:
411 411 return baseset([anc.rev()])
412 412 return baseset()
413 413
414 414 def _ancestors(repo, subset, x, followfirst=False):
415 415 heads = getset(repo, spanset(repo), x)
416 416 if not heads:
417 417 return baseset()
418 418 s = _revancestors(repo, heads, followfirst)
419 419 return subset & s
420 420
421 421 def ancestors(repo, subset, x):
422 422 """``ancestors(set)``
423 423 Changesets that are ancestors of a changeset in set.
424 424 """
425 425 return _ancestors(repo, subset, x)
426 426
427 427 def _firstancestors(repo, subset, x):
428 428 # ``_firstancestors(set)``
429 429 # Like ``ancestors(set)`` but follows only the first parents.
430 430 return _ancestors(repo, subset, x, followfirst=True)
431 431
432 432 def ancestorspec(repo, subset, x, n):
433 433 """``set~n``
434 434 Changesets that are the Nth ancestor (first parents only) of a changeset
435 435 in set.
436 436 """
437 437 try:
438 438 n = int(n[1])
439 439 except (TypeError, ValueError):
440 440 raise error.ParseError(_("~ expects a number"))
441 441 ps = set()
442 442 cl = repo.changelog
443 443 for r in getset(repo, fullreposet(repo), x):
444 444 for i in range(n):
445 445 r = cl.parentrevs(r)[0]
446 446 ps.add(r)
447 447 return subset & ps
448 448
449 449 def author(repo, subset, x):
450 450 """``author(string)``
451 451 Alias for ``user(string)``.
452 452 """
453 453 # i18n: "author" is a keyword
454 454 n = encoding.lower(getstring(x, _("author requires a string")))
455 455 kind, pattern, matcher = _substringmatcher(n)
456 456 return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())))
457 457
458 458 def bisect(repo, subset, x):
459 459 """``bisect(string)``
460 460 Changesets marked in the specified bisect status:
461 461
462 462 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
463 463 - ``goods``, ``bads`` : csets topologically good/bad
464 464 - ``range`` : csets taking part in the bisection
465 465 - ``pruned`` : csets that are goods, bads or skipped
466 466 - ``untested`` : csets whose fate is yet unknown
467 467 - ``ignored`` : csets ignored due to DAG topology
468 468 - ``current`` : the cset currently being bisected
469 469 """
470 470 # i18n: "bisect" is a keyword
471 471 status = getstring(x, _("bisect requires a string")).lower()
472 472 state = set(hbisect.get(repo, status))
473 473 return subset & state
474 474
475 475 # Backward-compatibility
476 476 # - no help entry so that we do not advertise it any more
477 477 def bisected(repo, subset, x):
478 478 return bisect(repo, subset, x)
479 479
480 480 def bookmark(repo, subset, x):
481 481 """``bookmark([name])``
482 482 The named bookmark or all bookmarks.
483 483
484 484 If `name` starts with `re:`, the remainder of the name is treated as
485 485 a regular expression. To match a bookmark that actually starts with `re:`,
486 486 use the prefix `literal:`.
487 487 """
488 488 # i18n: "bookmark" is a keyword
489 489 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
490 490 if args:
491 491 bm = getstring(args[0],
492 492 # i18n: "bookmark" is a keyword
493 493 _('the argument to bookmark must be a string'))
494 494 kind, pattern, matcher = _stringmatcher(bm)
495 495 bms = set()
496 496 if kind == 'literal':
497 497 bmrev = repo._bookmarks.get(pattern, None)
498 498 if not bmrev:
499 499 raise error.RepoLookupError(_("bookmark '%s' does not exist")
500 500 % bm)
501 501 bms.add(repo[bmrev].rev())
502 502 else:
503 503 matchrevs = set()
504 504 for name, bmrev in repo._bookmarks.iteritems():
505 505 if matcher(name):
506 506 matchrevs.add(bmrev)
507 507 if not matchrevs:
508 508 raise error.RepoLookupError(_("no bookmarks exist"
509 509 " that match '%s'") % pattern)
510 510 for bmrev in matchrevs:
511 511 bms.add(repo[bmrev].rev())
512 512 else:
513 513 bms = set([repo[r].rev()
514 514 for r in repo._bookmarks.values()])
515 515 bms -= set([node.nullrev])
516 516 return subset & bms
517 517
518 518 def branch(repo, subset, x):
519 519 """``branch(string or set)``
520 520 All changesets belonging to the given branch or the branches of the given
521 521 changesets.
522 522
523 523 If `string` starts with `re:`, the remainder of the name is treated as
524 524 a regular expression. To match a branch that actually starts with `re:`,
525 525 use the prefix `literal:`.
526 526 """
527 527 import branchmap
528 528 urepo = repo.unfiltered()
529 529 ucl = urepo.changelog
530 530 getbi = branchmap.revbranchcache(urepo).branchinfo
531 531
532 532 try:
533 533 b = getstring(x, '')
534 534 except error.ParseError:
535 535 # not a string, but another revspec, e.g. tip()
536 536 pass
537 537 else:
538 538 kind, pattern, matcher = _stringmatcher(b)
539 539 if kind == 'literal':
540 540 # note: falls through to the revspec case if no branch with
541 541 # this name exists
542 542 if pattern in repo.branchmap():
543 543 return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
544 544 else:
545 545 return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
546 546
547 547 s = getset(repo, spanset(repo), x)
548 548 b = set()
549 549 for r in s:
550 550 b.add(getbi(ucl, r)[0])
551 551 c = s.__contains__
552 552 return subset.filter(lambda r: c(r) or getbi(ucl, r)[0] in b)
553 553
554 554 def bumped(repo, subset, x):
555 555 """``bumped()``
556 556 Mutable changesets marked as successors of public changesets.
557 557
558 558 Only non-public and non-obsolete changesets can be `bumped`.
559 559 """
560 560 # i18n: "bumped" is a keyword
561 561 getargs(x, 0, 0, _("bumped takes no arguments"))
562 562 bumped = obsmod.getrevs(repo, 'bumped')
563 563 return subset & bumped
564 564
565 565 def bundle(repo, subset, x):
566 566 """``bundle()``
567 567 Changesets in the bundle.
568 568
569 569 Bundle must be specified by the -R option."""
570 570
571 571 try:
572 572 bundlerevs = repo.changelog.bundlerevs
573 573 except AttributeError:
574 574 raise util.Abort(_("no bundle provided - specify with -R"))
575 575 return subset & bundlerevs
576 576
577 577 def checkstatus(repo, subset, pat, field):
578 578 hasset = matchmod.patkind(pat) == 'set'
579 579
580 580 mcache = [None]
581 581 def matches(x):
582 582 c = repo[x]
583 583 if not mcache[0] or hasset:
584 584 mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
585 585 m = mcache[0]
586 586 fname = None
587 587 if not m.anypats() and len(m.files()) == 1:
588 588 fname = m.files()[0]
589 589 if fname is not None:
590 590 if fname not in c.files():
591 591 return False
592 592 else:
593 593 for f in c.files():
594 594 if m(f):
595 595 break
596 596 else:
597 597 return False
598 598 files = repo.status(c.p1().node(), c.node())[field]
599 599 if fname is not None:
600 600 if fname in files:
601 601 return True
602 602 else:
603 603 for f in files:
604 604 if m(f):
605 605 return True
606 606
607 607 return subset.filter(matches)
608 608
609 609 def _children(repo, narrow, parentset):
610 610 cs = set()
611 611 if not parentset:
612 612 return baseset(cs)
613 613 pr = repo.changelog.parentrevs
614 614 minrev = min(parentset)
615 615 for r in narrow:
616 616 if r <= minrev:
617 617 continue
618 618 for p in pr(r):
619 619 if p in parentset:
620 620 cs.add(r)
621 621 return baseset(cs)
622 622
623 623 def children(repo, subset, x):
624 624 """``children(set)``
625 625 Child changesets of changesets in set.
626 626 """
627 627 s = getset(repo, fullreposet(repo), x)
628 628 cs = _children(repo, subset, s)
629 629 return subset & cs
630 630
631 631 def closed(repo, subset, x):
632 632 """``closed()``
633 633 Changeset is closed.
634 634 """
635 635 # i18n: "closed" is a keyword
636 636 getargs(x, 0, 0, _("closed takes no arguments"))
637 637 return subset.filter(lambda r: repo[r].closesbranch())
638 638
639 639 def contains(repo, subset, x):
640 640 """``contains(pattern)``
641 641 The revision's manifest contains a file matching pattern (but might not
642 642 modify it). See :hg:`help patterns` for information about file patterns.
643 643
644 644 The pattern without explicit kind like ``glob:`` is expected to be
645 645 relative to the current directory and match against a file exactly
646 646 for efficiency.
647 647 """
648 648 # i18n: "contains" is a keyword
649 649 pat = getstring(x, _("contains requires a pattern"))
650 650
651 651 def matches(x):
652 652 if not matchmod.patkind(pat):
653 653 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
654 654 if pats in repo[x]:
655 655 return True
656 656 else:
657 657 c = repo[x]
658 658 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
659 659 for f in c.manifest():
660 660 if m(f):
661 661 return True
662 662 return False
663 663
664 664 return subset.filter(matches)
665 665
666 666 def converted(repo, subset, x):
667 667 """``converted([id])``
668 668 Changesets converted from the given identifier in the old repository if
669 669 present, or all converted changesets if no identifier is specified.
670 670 """
671 671
672 672 # There is exactly no chance of resolving the revision, so do a simple
673 673 # string compare and hope for the best
674 674
675 675 rev = None
676 676 # i18n: "converted" is a keyword
677 677 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
678 678 if l:
679 679 # i18n: "converted" is a keyword
680 680 rev = getstring(l[0], _('converted requires a revision'))
681 681
682 682 def _matchvalue(r):
683 683 source = repo[r].extra().get('convert_revision', None)
684 684 return source is not None and (rev is None or source.startswith(rev))
685 685
686 686 return subset.filter(lambda r: _matchvalue(r))
687 687
688 688 def date(repo, subset, x):
689 689 """``date(interval)``
690 690 Changesets within the interval, see :hg:`help dates`.
691 691 """
692 692 # i18n: "date" is a keyword
693 693 ds = getstring(x, _("date requires a string"))
694 694 dm = util.matchdate(ds)
695 695 return subset.filter(lambda x: dm(repo[x].date()[0]))
696 696
697 697 def desc(repo, subset, x):
698 698 """``desc(string)``
699 699 Search commit message for string. The match is case-insensitive.
700 700 """
701 701 # i18n: "desc" is a keyword
702 702 ds = encoding.lower(getstring(x, _("desc requires a string")))
703 703
704 704 def matches(x):
705 705 c = repo[x]
706 706 return ds in encoding.lower(c.description())
707 707
708 708 return subset.filter(matches)
709 709
710 710 def _descendants(repo, subset, x, followfirst=False):
711 711 roots = getset(repo, spanset(repo), x)
712 712 if not roots:
713 713 return baseset()
714 714 s = _revdescendants(repo, roots, followfirst)
715 715
716 716 # Both sets need to be ascending in order to lazily return the union
717 717 # in the correct order.
718 718 base = subset & roots
719 719 desc = subset & s
720 720 result = base + desc
721 721 if subset.isascending():
722 722 result.sort()
723 723 elif subset.isdescending():
724 724 result.sort(reverse=True)
725 725 else:
726 726 result = subset & result
727 727 return result
728 728
729 729 def descendants(repo, subset, x):
730 730 """``descendants(set)``
731 731 Changesets which are descendants of changesets in set.
732 732 """
733 733 return _descendants(repo, subset, x)
734 734
735 735 def _firstdescendants(repo, subset, x):
736 736 # ``_firstdescendants(set)``
737 737 # Like ``descendants(set)`` but follows only the first parents.
738 738 return _descendants(repo, subset, x, followfirst=True)
739 739
740 740 def destination(repo, subset, x):
741 741 """``destination([set])``
742 742 Changesets that were created by a graft, transplant or rebase operation,
743 743 with the given revisions specified as the source. Omitting the optional set
744 744 is the same as passing all().
745 745 """
746 746 if x is not None:
747 747 sources = getset(repo, spanset(repo), x)
748 748 else:
749 749 sources = getall(repo, spanset(repo), x)
750 750
751 751 dests = set()
752 752
753 753 # subset contains all of the possible destinations that can be returned, so
754 754 # iterate over them and see if their source(s) were provided in the arg set.
755 755 # Even if the immediate src of r is not in the arg set, src's source (or
756 756 # further back) may be. Scanning back further than the immediate src allows
757 757 # transitive transplants and rebases to yield the same results as transitive
758 758 # grafts.
759 759 for r in subset:
760 760 src = _getrevsource(repo, r)
761 761 lineage = None
762 762
763 763 while src is not None:
764 764 if lineage is None:
765 765 lineage = list()
766 766
767 767 lineage.append(r)
768 768
769 769 # The visited lineage is a match if the current source is in the arg
770 770 # set. Since every candidate dest is visited by way of iterating
771 771 # subset, any dests further back in the lineage will be tested by a
772 772 # different iteration over subset. Likewise, if the src was already
773 773 # selected, the current lineage can be selected without going back
774 774 # further.
775 775 if src in sources or src in dests:
776 776 dests.update(lineage)
777 777 break
778 778
779 779 r = src
780 780 src = _getrevsource(repo, r)
781 781
782 782 return subset.filter(dests.__contains__)
783 783
784 784 def divergent(repo, subset, x):
785 785 """``divergent()``
786 786 Final successors of changesets with an alternative set of final successors.
787 787 """
788 788 # i18n: "divergent" is a keyword
789 789 getargs(x, 0, 0, _("divergent takes no arguments"))
790 790 divergent = obsmod.getrevs(repo, 'divergent')
791 791 return subset & divergent
792 792
793 793 def draft(repo, subset, x):
794 794 """``draft()``
795 795 Changeset in draft phase."""
796 796 # i18n: "draft" is a keyword
797 797 getargs(x, 0, 0, _("draft takes no arguments"))
798 798 phase = repo._phasecache.phase
799 799 target = phases.draft
800 800 condition = lambda r: phase(repo, r) == target
801 801 return subset.filter(condition, cache=False)
802 802
803 803 def extinct(repo, subset, x):
804 804 """``extinct()``
805 805 Obsolete changesets with obsolete descendants only.
806 806 """
807 807 # i18n: "extinct" is a keyword
808 808 getargs(x, 0, 0, _("extinct takes no arguments"))
809 809 extincts = obsmod.getrevs(repo, 'extinct')
810 810 return subset & extincts
811 811
812 812 def extra(repo, subset, x):
813 813 """``extra(label, [value])``
814 814 Changesets with the given label in the extra metadata, with the given
815 815 optional value.
816 816
817 817 If `value` starts with `re:`, the remainder of the value is treated as
818 818 a regular expression. To match a value that actually starts with `re:`,
819 819 use the prefix `literal:`.
820 820 """
821 821
822 822 # i18n: "extra" is a keyword
823 823 l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
824 824 # i18n: "extra" is a keyword
825 825 label = getstring(l[0], _('first argument to extra must be a string'))
826 826 value = None
827 827
828 828 if len(l) > 1:
829 829 # i18n: "extra" is a keyword
830 830 value = getstring(l[1], _('second argument to extra must be a string'))
831 831 kind, value, matcher = _stringmatcher(value)
832 832
833 833 def _matchvalue(r):
834 834 extra = repo[r].extra()
835 835 return label in extra and (value is None or matcher(extra[label]))
836 836
837 837 return subset.filter(lambda r: _matchvalue(r))
838 838
839 839 def filelog(repo, subset, x):
840 840 """``filelog(pattern)``
841 841 Changesets connected to the specified filelog.
842 842
843 843 For performance reasons, visits only revisions mentioned in the file-level
844 844 filelog, rather than filtering through all changesets (much faster, but
845 845 doesn't include deletes or duplicate changes). For a slower, more accurate
846 846 result, use ``file()``.
847 847
848 848 The pattern without explicit kind like ``glob:`` is expected to be
849 849 relative to the current directory and match against a file exactly
850 850 for efficiency.
851 851
852 852 If some linkrev points to revisions filtered by the current repoview, we'll
853 853 work around it to return a non-filtered value.
854 854 """
855 855
856 856 # i18n: "filelog" is a keyword
857 857 pat = getstring(x, _("filelog requires a pattern"))
858 858 s = set()
859 859 cl = repo.changelog
860 860
861 861 if not matchmod.patkind(pat):
862 862 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
863 863 files = [f]
864 864 else:
865 865 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
866 866 files = (f for f in repo[None] if m(f))
867 867
868 868 for f in files:
869 869 backrevref = {} # final value for: filerev -> changerev
870 870 lowestchild = {} # lowest known filerev child of a filerev
871 871 delayed = [] # filerev with filtered linkrev, for post-processing
872 872 lowesthead = None # cache for manifest content of all head revisions
873 873 fl = repo.file(f)
874 874 for fr in list(fl):
875 875 rev = fl.linkrev(fr)
876 876 if rev not in cl:
877 877 # changerev pointed in linkrev is filtered
878 878 # record it for post processing.
879 879 delayed.append((fr, rev))
880 880 continue
881 881 for p in fl.parentrevs(fr):
882 882 if 0 <= p and p not in lowestchild:
883 883 lowestchild[p] = fr
884 884 backrevref[fr] = rev
885 885 s.add(rev)
886 886
887 887 # Post-processing of all filerevs we skipped because they were
888 888 # filtered. If such filerevs have known and unfiltered children, this
889 889 # means they have an unfiltered appearance out there. We'll use linkrev
890 890 # adjustment to find one of these appearances. The lowest known child
891 891 # will be used as a starting point because it is the best upper-bound we
892 892 # have.
893 893 #
894 894 # This approach will fail when an unfiltered but linkrev-shadowed
895 895 # appearance exists in a head changeset without unfiltered filerev
896 896 # children anywhere.
897 897 while delayed:
898 898 # must be a descending iteration. To slowly fill lowest child
899 899 # information that is of potential use by the next item.
900 900 fr, rev = delayed.pop()
901 901 lkr = rev
902 902
903 903 child = lowestchild.get(fr)
904 904
905 905 if child is None:
906 906 # search for existence of this file revision in a head revision.
907 907 # There are three possibilities:
908 908 # - the revision exists in a head and we can find an
909 909 # introduction from there,
910 910 # - the revision does not exist in a head because it has been
911 911 # changed since its introduction: we would have found a child
912 912 # and be in the other 'else' clause,
913 913 # - all versions of the revision are hidden.
914 914 if lowesthead is None:
915 915 lowesthead = {}
916 916 for h in repo.heads():
917 917 fnode = repo[h].manifest().get(f)
918 918 if fnode is not None:
919 919 lowesthead[fl.rev(fnode)] = h
920 920 headrev = lowesthead.get(fr)
921 921 if headrev is None:
922 922 # content is nowhere unfiltered
923 923 continue
924 924 rev = repo[headrev][f].introrev()
925 925 else:
926 926 # the lowest known child is a good upper bound
927 927 childcrev = backrevref[child]
928 928 # XXX this does not guarantee returning the lowest
929 929 # introduction of this revision, but this gives a
930 930 # result which is a good start and will fit in most
931 931 # cases. We probably need to fix the multiple
932 932 # introductions case properly (report each
933 933 # introduction, even for identical file revisions)
934 934 # once and for all at some point anyway.
935 935 for p in repo[childcrev][f].parents():
936 936 if p.filerev() == fr:
937 937 rev = p.rev()
938 938 break
939 939 if rev == lkr: # no shadowed entry found
940 940 # XXX This should never happen unless some manifest points
941 941 # to biggish file revisions (like a revision that uses a
942 942 # parent that never appears in the manifest ancestors)
943 943 continue
944 944
945 945 # Fill the data for the next iteration.
946 946 for p in fl.parentrevs(fr):
947 947 if 0 <= p and p not in lowestchild:
948 948 lowestchild[p] = fr
949 949 backrevref[fr] = rev
950 950 s.add(rev)
951 951
952 952 return subset & s
953 953
954 954 def first(repo, subset, x):
955 955 """``first(set, [n])``
956 956 An alias for limit().
957 957 """
958 958 return limit(repo, subset, x)
959 959
960 960 def _follow(repo, subset, x, name, followfirst=False):
961 961 l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
962 962 c = repo['.']
963 963 if l:
964 964 x = getstring(l[0], _("%s expected a filename") % name)
965 965 if x in c:
966 966 cx = c[x]
967 967 s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
968 968 # include the revision responsible for the most recent version
969 969 s.add(cx.introrev())
970 970 else:
971 971 return baseset()
972 972 else:
973 973 s = _revancestors(repo, baseset([c.rev()]), followfirst)
974 974
975 975 return subset & s
976 976
977 977 def follow(repo, subset, x):
978 978 """``follow([file])``
979 979 An alias for ``::.`` (ancestors of the working copy's first parent).
980 980 If a filename is specified, the history of the given file is followed,
981 981 including copies.
982 982 """
983 983 return _follow(repo, subset, x, 'follow')
984 984
985 985 def _followfirst(repo, subset, x):
986 986 # ``followfirst([file])``
987 987 # Like ``follow([file])`` but follows only the first parent of
988 988 # every revision or file revision.
989 989 return _follow(repo, subset, x, '_followfirst', followfirst=True)
990 990
991 991 def getall(repo, subset, x):
992 992 """``all()``
993 993 All changesets, the same as ``0:tip``.
994 994 """
995 995 # i18n: "all" is a keyword
996 996 getargs(x, 0, 0, _("all takes no arguments"))
997 997 return subset
998 998
999 999 def grep(repo, subset, x):
1000 1000 """``grep(regex)``
1001 1001 Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
1002 1002 to ensure special escape characters are handled correctly. Unlike
1003 1003 ``keyword(string)``, the match is case-sensitive.
1004 1004 """
1005 1005 try:
1006 1006 # i18n: "grep" is a keyword
1007 1007 gr = re.compile(getstring(x, _("grep requires a string")))
1008 1008 except re.error, e:
1009 1009 raise error.ParseError(_('invalid match pattern: %s') % e)
1010 1010
1011 1011 def matches(x):
1012 1012 c = repo[x]
1013 1013 for e in c.files() + [c.user(), c.description()]:
1014 1014 if gr.search(e):
1015 1015 return True
1016 1016 return False
1017 1017
1018 1018 return subset.filter(matches)
1019 1019
1020 1020 def _matchfiles(repo, subset, x):
1021 1021 # _matchfiles takes a revset list of prefixed arguments:
1022 1022 #
1023 1023 # [p:foo, i:bar, x:baz]
1024 1024 #
1025 1025 # builds a match object from them and filters subset. Allowed
1026 1026 # prefixes are 'p:' for regular patterns, 'i:' for include
1027 1027 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
1028 1028 # a revision identifier, or the empty string to reference the
1029 1029 # working directory, from which the match object is
1030 1030 # initialized. Use 'd:' to set the default matching mode, default
1031 1031 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
1032 1032
1033 1033 # i18n: "_matchfiles" is a keyword
1034 1034 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
1035 1035 pats, inc, exc = [], [], []
1036 1036 rev, default = None, None
1037 1037 for arg in l:
1038 1038 # i18n: "_matchfiles" is a keyword
1039 1039 s = getstring(arg, _("_matchfiles requires string arguments"))
1040 1040 prefix, value = s[:2], s[2:]
1041 1041 if prefix == 'p:':
1042 1042 pats.append(value)
1043 1043 elif prefix == 'i:':
1044 1044 inc.append(value)
1045 1045 elif prefix == 'x:':
1046 1046 exc.append(value)
1047 1047 elif prefix == 'r:':
1048 1048 if rev is not None:
1049 1049 # i18n: "_matchfiles" is a keyword
1050 1050 raise error.ParseError(_('_matchfiles expected at most one '
1051 1051 'revision'))
1052 1052 if value != '': # empty means working directory; leave rev as None
1053 1053 rev = value
1054 1054 elif prefix == 'd:':
1055 1055 if default is not None:
1056 1056 # i18n: "_matchfiles" is a keyword
1057 1057 raise error.ParseError(_('_matchfiles expected at most one '
1058 1058 'default mode'))
1059 1059 default = value
1060 1060 else:
1061 1061 # i18n: "_matchfiles" is a keyword
1062 1062 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
1063 1063 if not default:
1064 1064 default = 'glob'
1065 1065
1066 1066 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
1067 1067 exclude=exc, ctx=repo[rev], default=default)
1068 1068
1069 1069 def matches(x):
1070 1070 for f in repo[x].files():
1071 1071 if m(f):
1072 1072 return True
1073 1073 return False
1074 1074
1075 1075 return subset.filter(matches)
1076 1076
1077 1077 def hasfile(repo, subset, x):
1078 1078 """``file(pattern)``
1079 1079 Changesets affecting files matched by pattern.
1080 1080
1081 1081 For a faster but less accurate result, consider using ``filelog()``
1082 1082 instead.
1083 1083
1084 1084 This predicate uses ``glob:`` as the default kind of pattern.
1085 1085 """
1086 1086 # i18n: "file" is a keyword
1087 1087 pat = getstring(x, _("file requires a pattern"))
1088 1088 return _matchfiles(repo, subset, ('string', 'p:' + pat))
1089 1089
1090 1090 def head(repo, subset, x):
1091 1091 """``head()``
1092 1092 Changeset is a named branch head.
1093 1093 """
1094 1094 # i18n: "head" is a keyword
1095 1095 getargs(x, 0, 0, _("head takes no arguments"))
1096 1096 hs = set()
1097 1097 for b, ls in repo.branchmap().iteritems():
1098 1098 hs.update(repo[h].rev() for h in ls)
1099 1099 return baseset(hs).filter(subset.__contains__)
1100 1100
1101 1101 def heads(repo, subset, x):
1102 1102 """``heads(set)``
1103 1103 Members of set with no children in set.
1104 1104 """
1105 1105 s = getset(repo, subset, x)
1106 1106 ps = parents(repo, subset, x)
1107 1107 return s - ps
1108 1108
1109 1109 def hidden(repo, subset, x):
1110 1110 """``hidden()``
1111 1111 Hidden changesets.
1112 1112 """
1113 1113 # i18n: "hidden" is a keyword
1114 1114 getargs(x, 0, 0, _("hidden takes no arguments"))
1115 1115 hiddenrevs = repoview.filterrevs(repo, 'visible')
1116 1116 return subset & hiddenrevs
1117 1117
1118 1118 def keyword(repo, subset, x):
1119 1119 """``keyword(string)``
1120 1120 Search commit message, user name, and names of changed files for
1121 1121 string. The match is case-insensitive.
1122 1122 """
1123 1123 # i18n: "keyword" is a keyword
1124 1124 kw = encoding.lower(getstring(x, _("keyword requires a string")))
1125 1125
1126 1126 def matches(r):
1127 1127 c = repo[r]
1128 1128 return util.any(kw in encoding.lower(t) for t in c.files() + [c.user(),
1129 1129 c.description()])
1130 1130
1131 1131 return subset.filter(matches)
1132 1132
1133 1133 def limit(repo, subset, x):
1134 1134 """``limit(set, [n])``
1135 1135 First n members of set, defaulting to 1.
1136 1136 """
1137 1137 # i18n: "limit" is a keyword
1138 1138 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
1139 1139 try:
1140 1140 lim = 1
1141 1141 if len(l) == 2:
1142 1142 # i18n: "limit" is a keyword
1143 1143 lim = int(getstring(l[1], _("limit requires a number")))
1144 1144 except (TypeError, ValueError):
1145 1145 # i18n: "limit" is a keyword
1146 1146 raise error.ParseError(_("limit expects a number"))
1147 1147 ss = subset
1148 1148 os = getset(repo, spanset(repo), l[0])
1149 1149 result = []
1150 1150 it = iter(os)
1151 1151 for x in xrange(lim):
1152 1152 try:
1153 1153 y = it.next()
1154 1154 if y in ss:
1155 1155 result.append(y)
1156 1156 except (StopIteration):
1157 1157 break
1158 1158 return baseset(result)
1159 1159
1160 1160 def last(repo, subset, x):
1161 1161 """``last(set, [n])``
1162 1162 Last n members of set, defaulting to 1.
1163 1163 """
1164 1164 # i18n: "last" is a keyword
1165 1165 l = getargs(x, 1, 2, _("last requires one or two arguments"))
1166 1166 try:
1167 1167 lim = 1
1168 1168 if len(l) == 2:
1169 1169 # i18n: "last" is a keyword
1170 1170 lim = int(getstring(l[1], _("last requires a number")))
1171 1171 except (TypeError, ValueError):
1172 1172 # i18n: "last" is a keyword
1173 1173 raise error.ParseError(_("last expects a number"))
1174 1174 ss = subset
1175 1175 os = getset(repo, spanset(repo), l[0])
1176 1176 os.reverse()
1177 1177 result = []
1178 1178 it = iter(os)
1179 1179 for x in xrange(lim):
1180 1180 try:
1181 1181 y = it.next()
1182 1182 if y in ss:
1183 1183 result.append(y)
1184 1184 except (StopIteration):
1185 1185 break
1186 1186 return baseset(result)
1187 1187
1188 1188 def maxrev(repo, subset, x):
1189 1189 """``max(set)``
1190 1190 Changeset with highest revision number in set.
1191 1191 """
1192 1192 os = getset(repo, spanset(repo), x)
1193 1193 if os:
1194 1194 m = os.max()
1195 1195 if m in subset:
1196 1196 return baseset([m])
1197 1197 return baseset()
1198 1198
1199 1199 def merge(repo, subset, x):
1200 1200 """``merge()``
1201 1201 Changeset is a merge changeset.
1202 1202 """
1203 1203 # i18n: "merge" is a keyword
1204 1204 getargs(x, 0, 0, _("merge takes no arguments"))
1205 1205 cl = repo.changelog
1206 1206 return subset.filter(lambda r: cl.parentrevs(r)[1] != -1)
1207 1207
1208 1208 def branchpoint(repo, subset, x):
1209 1209 """``branchpoint()``
1210 1210 Changesets with more than one child.
1211 1211 """
1212 1212 # i18n: "branchpoint" is a keyword
1213 1213 getargs(x, 0, 0, _("branchpoint takes no arguments"))
1214 1214 cl = repo.changelog
1215 1215 if not subset:
1216 1216 return baseset()
1217 1217 baserev = min(subset)
1218 1218 parentscount = [0]*(len(repo) - baserev)
1219 1219 for r in cl.revs(start=baserev + 1):
1220 1220 for p in cl.parentrevs(r):
1221 1221 if p >= baserev:
1222 1222 parentscount[p - baserev] += 1
1223 1223 return subset.filter(lambda r: parentscount[r - baserev] > 1)
1224 1224
1225 1225 def minrev(repo, subset, x):
1226 1226 """``min(set)``
1227 1227 Changeset with lowest revision number in set.
1228 1228 """
1229 1229 os = getset(repo, spanset(repo), x)
1230 1230 if os:
1231 1231 m = os.min()
1232 1232 if m in subset:
1233 1233 return baseset([m])
1234 1234 return baseset()
1235 1235
1236 1236 def modifies(repo, subset, x):
1237 1237 """``modifies(pattern)``
1238 1238 Changesets modifying files matched by pattern.
1239 1239
1240 1240 The pattern without explicit kind like ``glob:`` is expected to be
1241 1241 relative to the current directory and match against a file or a
1242 1242 directory.
1243 1243 """
1244 1244 # i18n: "modifies" is a keyword
1245 1245 pat = getstring(x, _("modifies requires a pattern"))
1246 1246 return checkstatus(repo, subset, pat, 0)
1247 1247
1248 1248 def named(repo, subset, x):
1249 1249 """``named(namespace)``
1250 1250 The changesets in a given namespace.
1251 1251
1252 1252 If `namespace` starts with `re:`, the remainder of the string is treated as
1253 1253 a regular expression. To match a namespace that actually starts with `re:`,
1254 1254 use the prefix `literal:`.
1255 1255 """
1256 1256 # i18n: "named" is a keyword
1257 1257 args = getargs(x, 1, 1, _('named requires a namespace argument'))
1258 1258
1259 1259 ns = getstring(args[0],
1260 1260 # i18n: "named" is a keyword
1261 1261 _('the argument to named must be a string'))
1262 1262 kind, pattern, matcher = _stringmatcher(ns)
1263 1263 namespaces = set()
1264 1264 if kind == 'literal':
1265 1265 if pattern not in repo.names:
1266 1266 raise error.RepoLookupError(_("namespace '%s' does not exist")
1267 1267 % ns)
1268 1268 namespaces.add(repo.names[pattern])
1269 1269 else:
1270 1270 for name, ns in repo.names.iteritems():
1271 1271 if matcher(name):
1272 1272 namespaces.add(ns)
1273 1273 if not namespaces:
1274 1274 raise error.RepoLookupError(_("no namespace exists"
1275 1275 " that match '%s'") % pattern)
1276 1276
1277 1277 names = set()
1278 1278 for ns in namespaces:
1279 1279 for name in ns.listnames(repo):
1280 1280 names.update(repo[n].rev() for n in ns.nodes(repo, name))
1281 1281
1282 1282 names -= set([node.nullrev])
1283 1283 return subset & names
1284 1284
1285 1285 def node_(repo, subset, x):
1286 1286 """``id(string)``
1287 1287 Revision non-ambiguously specified by the given hex string prefix.
1288 1288 """
1289 1289 # i18n: "id" is a keyword
1290 1290 l = getargs(x, 1, 1, _("id requires one argument"))
1291 1291 # i18n: "id" is a keyword
1292 1292 n = getstring(l[0], _("id requires a string"))
1293 1293 if len(n) == 40:
1294 1294 rn = repo[n].rev()
1295 1295 else:
1296 1296 rn = None
1297 1297 pm = repo.changelog._partialmatch(n)
1298 1298 if pm is not None:
1299 1299 rn = repo.changelog.rev(pm)
1300 1300
1301 1301 if rn is None:
1302 1302 return baseset()
1303 1303 result = baseset([rn])
1304 1304 return result & subset
1305 1305
1306 1306 def obsolete(repo, subset, x):
1307 1307 """``obsolete()``
1308 1308 Mutable changeset with a newer version."""
1309 1309 # i18n: "obsolete" is a keyword
1310 1310 getargs(x, 0, 0, _("obsolete takes no arguments"))
1311 1311 obsoletes = obsmod.getrevs(repo, 'obsolete')
1312 1312 return subset & obsoletes
1313 1313
1314 1314 def only(repo, subset, x):
1315 1315 """``only(set, [set])``
1316 1316 Changesets that are ancestors of the first set that are not ancestors
1317 1317 of any other head in the repo. If a second set is specified, the result
1318 1318 is ancestors of the first set that are not ancestors of the second set
1319 1319 (i.e. ::<set1> - ::<set2>).
1320 1320 """
1321 1321 cl = repo.changelog
1322 1322 # i18n: "only" is a keyword
1323 1323 args = getargs(x, 1, 2, _('only takes one or two arguments'))
1324 1324 include = getset(repo, spanset(repo), args[0])
1325 1325 if len(args) == 1:
1326 1326 if not include:
1327 1327 return baseset()
1328 1328
1329 1329 descendants = set(_revdescendants(repo, include, False))
1330 1330 exclude = [rev for rev in cl.headrevs()
1331 1331 if not rev in descendants and not rev in include]
1332 1332 else:
1333 1333 exclude = getset(repo, spanset(repo), args[1])
1334 1334
1335 1335 results = set(cl.findmissingrevs(common=exclude, heads=include))
1336 1336 return subset & results
1337 1337
1338 1338 def origin(repo, subset, x):
1339 1339 """``origin([set])``
1340 1340 Changesets that were specified as a source for the grafts, transplants or
1341 1341 rebases that created the given revisions. Omitting the optional set is the
1342 1342 same as passing all(). If a changeset created by these operations is itself
1343 1343 specified as a source for one of these operations, only the source changeset
1344 1344 for the first operation is selected.
1345 1345 """
1346 1346 if x is not None:
1347 1347 dests = getset(repo, spanset(repo), x)
1348 1348 else:
1349 1349 dests = getall(repo, spanset(repo), x)
1350 1350
1351 1351 def _firstsrc(rev):
1352 1352 src = _getrevsource(repo, rev)
1353 1353 if src is None:
1354 1354 return None
1355 1355
1356 1356 while True:
1357 1357 prev = _getrevsource(repo, src)
1358 1358
1359 1359 if prev is None:
1360 1360 return src
1361 1361 src = prev
1362 1362
1363 1363 o = set([_firstsrc(r) for r in dests])
1364 1364 o -= set([None])
1365 1365 return subset & o
1366 1366
1367 1367 def outgoing(repo, subset, x):
1368 1368 """``outgoing([path])``
1369 1369 Changesets not found in the specified destination repository, or the
1370 1370 default push location.
1371 1371 """
1372 1372 import hg # avoid start-up nasties
1373 1373 # i18n: "outgoing" is a keyword
1374 1374 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1375 1375 # i18n: "outgoing" is a keyword
1376 1376 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1377 1377 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1378 1378 dest, branches = hg.parseurl(dest)
1379 1379 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1380 1380 if revs:
1381 1381 revs = [repo.lookup(rev) for rev in revs]
1382 1382 other = hg.peer(repo, {}, dest)
1383 1383 repo.ui.pushbuffer()
1384 1384 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1385 1385 repo.ui.popbuffer()
1386 1386 cl = repo.changelog
1387 1387 o = set([cl.rev(r) for r in outgoing.missing])
1388 1388 return subset & o
1389 1389
1390 1390 def p1(repo, subset, x):
1391 1391 """``p1([set])``
1392 1392 First parent of changesets in set, or the working directory.
1393 1393 """
1394 1394 if x is None:
1395 1395 p = repo[x].p1().rev()
1396 1396 if p >= 0:
1397 1397 return subset & baseset([p])
1398 1398 return baseset()
1399 1399
1400 1400 ps = set()
1401 1401 cl = repo.changelog
1402 1402 for r in getset(repo, spanset(repo), x):
1403 1403 ps.add(cl.parentrevs(r)[0])
1404 1404 ps -= set([node.nullrev])
1405 1405 return subset & ps
1406 1406
1407 1407 def p2(repo, subset, x):
1408 1408 """``p2([set])``
1409 1409 Second parent of changesets in set, or the working directory.
1410 1410 """
1411 1411 if x is None:
1412 1412 ps = repo[x].parents()
1413 1413 try:
1414 1414 p = ps[1].rev()
1415 1415 if p >= 0:
1416 1416 return subset & baseset([p])
1417 1417 return baseset()
1418 1418 except IndexError:
1419 1419 return baseset()
1420 1420
1421 1421 ps = set()
1422 1422 cl = repo.changelog
1423 1423 for r in getset(repo, spanset(repo), x):
1424 1424 ps.add(cl.parentrevs(r)[1])
1425 1425 ps -= set([node.nullrev])
1426 1426 return subset & ps
1427 1427
1428 1428 def parents(repo, subset, x):
1429 1429 """``parents([set])``
1430 1430 The set of all parents for all changesets in set, or the working directory.
1431 1431 """
1432 1432 if x is None:
1433 1433 ps = set(p.rev() for p in repo[x].parents())
1434 1434 else:
1435 1435 ps = set()
1436 1436 cl = repo.changelog
1437 1437 for r in getset(repo, spanset(repo), x):
1438 1438 ps.update(cl.parentrevs(r))
1439 1439 ps -= set([node.nullrev])
1440 1440 return subset & ps
1441 1441
1442 1442 def parentspec(repo, subset, x, n):
1443 1443 """``set^0``
1444 1444 The set.
1445 1445 ``set^1`` (or ``set^``), ``set^2``
1446 1446 First or second parent, respectively, of all changesets in set.
1447 1447 """
1448 1448 try:
1449 1449 n = int(n[1])
1450 1450 if n not in (0, 1, 2):
1451 1451 raise ValueError
1452 1452 except (TypeError, ValueError):
1453 1453 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1454 1454 ps = set()
1455 1455 cl = repo.changelog
1456 1456 for r in getset(repo, fullreposet(repo), x):
1457 1457 if n == 0:
1458 1458 ps.add(r)
1459 1459 elif n == 1:
1460 1460 ps.add(cl.parentrevs(r)[0])
1461 1461 elif n == 2:
1462 1462 parents = cl.parentrevs(r)
1463 1463 if len(parents) > 1:
1464 1464 ps.add(parents[1])
1465 1465 return subset & ps
1466 1466
1467 1467 def present(repo, subset, x):
1468 1468 """``present(set)``
1469 1469 An empty set, if any revision in set isn't found; otherwise,
1470 1470 all revisions in set.
1471 1471
1472 1472 If any of specified revisions is not present in the local repository,
1473 1473 the query is normally aborted. But this predicate allows the query
1474 1474 to continue even in such cases.
1475 1475 """
1476 1476 try:
1477 1477 return getset(repo, subset, x)
1478 1478 except error.RepoLookupError:
1479 1479 return baseset()
1480 1480
1481 1481 def public(repo, subset, x):
1482 1482 """``public()``
1483 1483 Changeset in public phase."""
1484 1484 # i18n: "public" is a keyword
1485 1485 getargs(x, 0, 0, _("public takes no arguments"))
1486 1486 phase = repo._phasecache.phase
1487 1487 target = phases.public
1488 1488 condition = lambda r: phase(repo, r) == target
1489 1489 return subset.filter(condition, cache=False)
1490 1490
1491 1491 def remote(repo, subset, x):
1492 1492 """``remote([id [,path]])``
1493 1493 Local revision that corresponds to the given identifier in a
1494 1494 remote repository, if present. Here, the '.' identifier is a
1495 1495 synonym for the current local branch.
1496 1496 """
1497 1497
1498 1498 import hg # avoid start-up nasties
1499 1499 # i18n: "remote" is a keyword
1500 1500 l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
1501 1501
1502 1502 q = '.'
1503 1503 if len(l) > 0:
1504 1504 # i18n: "remote" is a keyword
1505 1505 q = getstring(l[0], _("remote requires a string id"))
1506 1506 if q == '.':
1507 1507 q = repo['.'].branch()
1508 1508
1509 1509 dest = ''
1510 1510 if len(l) > 1:
1511 1511 # i18n: "remote" is a keyword
1512 1512 dest = getstring(l[1], _("remote requires a repository path"))
1513 1513 dest = repo.ui.expandpath(dest or 'default')
1514 1514 dest, branches = hg.parseurl(dest)
1515 1515 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1516 1516 if revs:
1517 1517 revs = [repo.lookup(rev) for rev in revs]
1518 1518 other = hg.peer(repo, {}, dest)
1519 1519 n = other.lookup(q)
1520 1520 if n in repo:
1521 1521 r = repo[n].rev()
1522 1522 if r in subset:
1523 1523 return baseset([r])
1524 1524 return baseset()
1525 1525
1526 1526 def removes(repo, subset, x):
1527 1527 """``removes(pattern)``
1528 1528 Changesets which remove files matching pattern.
1529 1529
1530 1530 The pattern without explicit kind like ``glob:`` is expected to be
1531 1531 relative to the current directory and match against a file or a
1532 1532 directory.
1533 1533 """
1534 1534 # i18n: "removes" is a keyword
1535 1535 pat = getstring(x, _("removes requires a pattern"))
1536 1536 return checkstatus(repo, subset, pat, 2)
1537 1537
1538 1538 def rev(repo, subset, x):
1539 1539 """``rev(number)``
1540 1540 Revision with the given numeric identifier.
1541 1541 """
1542 1542 # i18n: "rev" is a keyword
1543 1543 l = getargs(x, 1, 1, _("rev requires one argument"))
1544 1544 try:
1545 1545 # i18n: "rev" is a keyword
1546 1546 l = int(getstring(l[0], _("rev requires a number")))
1547 1547 except (TypeError, ValueError):
1548 1548 # i18n: "rev" is a keyword
1549 1549 raise error.ParseError(_("rev expects a number"))
1550 1550 if l not in repo.changelog and l != node.nullrev:
1551 1551 return baseset()
1552 1552 return subset & baseset([l])
1553 1553
1554 1554 def matching(repo, subset, x):
1555 1555 """``matching(revision [, field])``
1556 1556 Changesets in which a given set of fields match the set of fields in the
1557 1557 selected revision or set.
1558 1558
1559 1559 To match more than one field pass the list of fields to match separated
1560 1560 by spaces (e.g. ``author description``).
1561 1561
1562 1562 Valid fields are most regular revision fields and some special fields.
1563 1563
1564 1564 Regular revision fields are ``description``, ``author``, ``branch``,
1565 1565 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1566 1566 and ``diff``.
1567 1567 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1568 1568 contents of the revision. Two revisions matching their ``diff`` will
1569 1569 also match their ``files``.
1570 1570
1571 1571 Special fields are ``summary`` and ``metadata``:
1572 1572 ``summary`` matches the first line of the description.
1573 1573 ``metadata`` is equivalent to matching ``description user date``
1574 1574 (i.e. it matches the main metadata fields).
1575 1575
1576 1576 ``metadata`` is the default field which is used when no fields are
1577 1577 specified. You can match more than one field at a time.
1578 1578 """
1579 1579 # i18n: "matching" is a keyword
1580 1580 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1581 1581
1582 1582 revs = getset(repo, fullreposet(repo), l[0])
1583 1583
1584 1584 fieldlist = ['metadata']
1585 1585 if len(l) > 1:
1586 1586 fieldlist = getstring(l[1],
1587 1587 # i18n: "matching" is a keyword
1588 1588 _("matching requires a string "
1589 1589 "as its second argument")).split()
1590 1590
1591 1591 # Make sure that there are no repeated fields,
1592 1592 # expand the 'special' 'metadata' field type
1593 1593 # and check the 'files' whenever we check the 'diff'
1594 1594 fields = []
1595 1595 for field in fieldlist:
1596 1596 if field == 'metadata':
1597 1597 fields += ['user', 'description', 'date']
1598 1598 elif field == 'diff':
1599 1599 # a revision matching the diff must also match the files
1600 1600 # since matching the diff is very costly, make sure to
1601 1601 # also match the files first
1602 1602 fields += ['files', 'diff']
1603 1603 else:
1604 1604 if field == 'author':
1605 1605 field = 'user'
1606 1606 fields.append(field)
1607 1607 fields = set(fields)
1608 1608 if 'summary' in fields and 'description' in fields:
1609 1609 # If a revision matches its description it also matches its summary
1610 1610 fields.discard('summary')
1611 1611
1612 1612 # We may want to match more than one field
1613 1613 # Not all fields take the same amount of time to be matched
1614 1614 # Sort the selected fields in order of increasing matching cost
1615 1615 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1616 1616 'files', 'description', 'substate', 'diff']
1617 1617 def fieldkeyfunc(f):
1618 1618 try:
1619 1619 return fieldorder.index(f)
1620 1620 except ValueError:
1621 1621 # assume an unknown field is very costly
1622 1622 return len(fieldorder)
1623 1623 fields = list(fields)
1624 1624 fields.sort(key=fieldkeyfunc)
1625 1625
1626 1626 # Each field will be matched with its own "getfield" function
1627 1627 # which will be added to the getfieldfuncs array of functions
1628 1628 getfieldfuncs = []
1629 1629 _funcs = {
1630 1630 'user': lambda r: repo[r].user(),
1631 1631 'branch': lambda r: repo[r].branch(),
1632 1632 'date': lambda r: repo[r].date(),
1633 1633 'description': lambda r: repo[r].description(),
1634 1634 'files': lambda r: repo[r].files(),
1635 1635 'parents': lambda r: repo[r].parents(),
1636 1636 'phase': lambda r: repo[r].phase(),
1637 1637 'substate': lambda r: repo[r].substate,
1638 1638 'summary': lambda r: repo[r].description().splitlines()[0],
1639 1639 'diff': lambda r: list(repo[r].diff(git=True),)
1640 1640 }
1641 1641 for info in fields:
1642 1642 getfield = _funcs.get(info, None)
1643 1643 if getfield is None:
1644 1644 raise error.ParseError(
1645 1645 # i18n: "matching" is a keyword
1646 1646 _("unexpected field name passed to matching: %s") % info)
1647 1647 getfieldfuncs.append(getfield)
1648 1648 # convert the getfield array of functions into a "getinfo" function
1649 1649 # which returns an array of field values (or a single value if there
1650 1650 # is only one field to match)
1651 1651 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1652 1652
1653 1653 def matches(x):
1654 1654 for rev in revs:
1655 1655 target = getinfo(rev)
1656 1656 match = True
1657 1657 for n, f in enumerate(getfieldfuncs):
1658 1658 if target[n] != f(x):
1659 1659 match = False
1660 1660 if match:
1661 1661 return True
1662 1662 return False
1663 1663
1664 1664 return subset.filter(matches)
1665 1665
1666 1666 def reverse(repo, subset, x):
1667 1667 """``reverse(set)``
1668 1668 Reverse order of set.
1669 1669 """
1670 1670 l = getset(repo, subset, x)
1671 1671 l.reverse()
1672 1672 return l
1673 1673
1674 1674 def roots(repo, subset, x):
1675 1675 """``roots(set)``
1676 1676 Changesets in set with no parent changeset in set.
1677 1677 """
1678 1678 s = getset(repo, spanset(repo), x)
1679 1679 subset = baseset([r for r in s if r in subset])
1680 1680 cs = _children(repo, subset, s)
1681 1681 return subset - cs
1682 1682
1683 1683 def secret(repo, subset, x):
1684 1684 """``secret()``
1685 1685 Changeset in secret phase."""
1686 1686 # i18n: "secret" is a keyword
1687 1687 getargs(x, 0, 0, _("secret takes no arguments"))
1688 1688 phase = repo._phasecache.phase
1689 1689 target = phases.secret
1690 1690 condition = lambda r: phase(repo, r) == target
1691 1691 return subset.filter(condition, cache=False)
1692 1692
1693 1693 def sort(repo, subset, x):
1694 1694 """``sort(set[, [-]key...])``
1695 1695 Sort set by keys. The default sort order is ascending, specify a key
1696 1696 as ``-key`` to sort in descending order.
1697 1697
1698 1698 The keys can be:
1699 1699
1700 1700 - ``rev`` for the revision number,
1701 1701 - ``branch`` for the branch name,
1702 1702 - ``desc`` for the commit message (description),
1703 1703 - ``user`` for user name (``author`` can be used as an alias),
1704 1704 - ``date`` for the commit date
1705 1705 """
1706 1706 # i18n: "sort" is a keyword
1707 1707 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1708 1708 keys = "rev"
1709 1709 if len(l) == 2:
1710 1710 # i18n: "sort" is a keyword
1711 1711 keys = getstring(l[1], _("sort spec must be a string"))
1712 1712
1713 1713 s = l[0]
1714 1714 keys = keys.split()
1715 1715 l = []
1716 1716 def invert(s):
1717 1717 return "".join(chr(255 - ord(c)) for c in s)
1718 1718 revs = getset(repo, subset, s)
1719 1719 if keys == ["rev"]:
1720 1720 revs.sort()
1721 1721 return revs
1722 1722 elif keys == ["-rev"]:
1723 1723 revs.sort(reverse=True)
1724 1724 return revs
1725 1725 for r in revs:
1726 1726 c = repo[r]
1727 1727 e = []
1728 1728 for k in keys:
1729 1729 if k == 'rev':
1730 1730 e.append(r)
1731 1731 elif k == '-rev':
1732 1732 e.append(-r)
1733 1733 elif k == 'branch':
1734 1734 e.append(c.branch())
1735 1735 elif k == '-branch':
1736 1736 e.append(invert(c.branch()))
1737 1737 elif k == 'desc':
1738 1738 e.append(c.description())
1739 1739 elif k == '-desc':
1740 1740 e.append(invert(c.description()))
1741 1741 elif k in 'user author':
1742 1742 e.append(c.user())
1743 1743 elif k in '-user -author':
1744 1744 e.append(invert(c.user()))
1745 1745 elif k == 'date':
1746 1746 e.append(c.date()[0])
1747 1747 elif k == '-date':
1748 1748 e.append(-c.date()[0])
1749 1749 else:
1750 1750 raise error.ParseError(_("unknown sort key %r") % k)
1751 1751 e.append(r)
1752 1752 l.append(e)
1753 1753 l.sort()
1754 1754 return baseset([e[-1] for e in l])
1755 1755
1756 1756 def _stringmatcher(pattern):
1757 1757 """
1758 1758 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1759 1759 returns the matcher name, pattern, and matcher function.
1760 1760 missing or unknown prefixes are treated as literal matches.
1761 1761
1762 1762 helper for tests:
1763 1763 >>> def test(pattern, *tests):
1764 1764 ... kind, pattern, matcher = _stringmatcher(pattern)
1765 1765 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1766 1766
1767 1767 exact matching (no prefix):
1768 1768 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1769 1769 ('literal', 'abcdefg', [False, False, True])
1770 1770
1771 1771 regex matching ('re:' prefix)
1772 1772 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1773 1773 ('re', 'a.+b', [False, False, True])
1774 1774
1775 1775 force exact matches ('literal:' prefix)
1776 1776 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1777 1777 ('literal', 're:foobar', [False, True])
1778 1778
1779 1779 unknown prefixes are ignored and treated as literals
1780 1780 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1781 1781 ('literal', 'foo:bar', [False, False, True])
1782 1782 """
1783 1783 if pattern.startswith('re:'):
1784 1784 pattern = pattern[3:]
1785 1785 try:
1786 1786 regex = re.compile(pattern)
1787 1787 except re.error, e:
1788 1788 raise error.ParseError(_('invalid regular expression: %s')
1789 1789 % e)
1790 1790 return 're', pattern, regex.search
1791 1791 elif pattern.startswith('literal:'):
1792 1792 pattern = pattern[8:]
1793 1793 return 'literal', pattern, pattern.__eq__
1794 1794
1795 1795 def _substringmatcher(pattern):
1796 1796 kind, pattern, matcher = _stringmatcher(pattern)
1797 1797 if kind == 'literal':
1798 1798 matcher = lambda s: pattern in s
1799 1799 return kind, pattern, matcher
1800 1800
1801 1801 def tag(repo, subset, x):
1802 1802 """``tag([name])``
1803 1803 The specified tag by name, or all tagged revisions if no name is given.
1804 1804
1805 1805 If `name` starts with `re:`, the remainder of the name is treated as
1806 1806 a regular expression. To match a tag that actually starts with `re:`,
1807 1807 use the prefix `literal:`.
1808 1808 """
1809 1809 # i18n: "tag" is a keyword
1810 1810 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1811 1811 cl = repo.changelog
1812 1812 if args:
1813 1813 pattern = getstring(args[0],
1814 1814 # i18n: "tag" is a keyword
1815 1815 _('the argument to tag must be a string'))
1816 1816 kind, pattern, matcher = _stringmatcher(pattern)
1817 1817 if kind == 'literal':
1818 1818 # avoid resolving all tags
1819 1819 tn = repo._tagscache.tags.get(pattern, None)
1820 1820 if tn is None:
1821 1821 raise error.RepoLookupError(_("tag '%s' does not exist")
1822 1822 % pattern)
1823 1823 s = set([repo[tn].rev()])
1824 1824 else:
1825 1825 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1826 1826 else:
1827 1827 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1828 1828 return subset & s
1829 1829
1830 1830 def tagged(repo, subset, x):
1831 1831 return tag(repo, subset, x)
1832 1832
1833 1833 def unstable(repo, subset, x):
1834 1834 """``unstable()``
1835 1835 Non-obsolete changesets with obsolete ancestors.
1836 1836 """
1837 1837 # i18n: "unstable" is a keyword
1838 1838 getargs(x, 0, 0, _("unstable takes no arguments"))
1839 1839 unstables = obsmod.getrevs(repo, 'unstable')
1840 1840 return subset & unstables
1841 1841
1842 1842
1843 1843 def user(repo, subset, x):
1844 1844 """``user(string)``
1845 1845 User name contains string. The match is case-insensitive.
1846 1846
1847 1847 If `string` starts with `re:`, the remainder of the string is treated as
1848 1848 a regular expression. To match a user that actually contains `re:`, use
1849 1849 the prefix `literal:`.
1850 1850 """
1851 1851 return author(repo, subset, x)
1852 1852
1853 1853 # for internal use
1854 1854 def _list(repo, subset, x):
1855 1855 s = getstring(x, "internal error")
1856 1856 if not s:
1857 1857 return baseset()
1858 1858 ls = [repo[r].rev() for r in s.split('\0')]
1859 1859 s = subset
1860 1860 return baseset([r for r in ls if r in s])
1861 1861
1862 1862 # for internal use
1863 1863 def _intlist(repo, subset, x):
1864 1864 s = getstring(x, "internal error")
1865 1865 if not s:
1866 1866 return baseset()
1867 1867 ls = [int(r) for r in s.split('\0')]
1868 1868 s = subset
1869 1869 return baseset([r for r in ls if r in s])
1870 1870
1871 1871 # for internal use
1872 1872 def _hexlist(repo, subset, x):
1873 1873 s = getstring(x, "internal error")
1874 1874 if not s:
1875 1875 return baseset()
1876 1876 cl = repo.changelog
1877 1877 ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
1878 1878 s = subset
1879 1879 return baseset([r for r in ls if r in s])
1880 1880
1881 1881 symbols = {
1882 1882 "adds": adds,
1883 1883 "all": getall,
1884 1884 "ancestor": ancestor,
1885 1885 "ancestors": ancestors,
1886 1886 "_firstancestors": _firstancestors,
1887 1887 "author": author,
1888 1888 "bisect": bisect,
1889 1889 "bisected": bisected,
1890 1890 "bookmark": bookmark,
1891 1891 "branch": branch,
1892 1892 "branchpoint": branchpoint,
1893 1893 "bumped": bumped,
1894 1894 "bundle": bundle,
1895 1895 "children": children,
1896 1896 "closed": closed,
1897 1897 "contains": contains,
1898 1898 "converted": converted,
1899 1899 "date": date,
1900 1900 "desc": desc,
1901 1901 "descendants": descendants,
1902 1902 "_firstdescendants": _firstdescendants,
1903 1903 "destination": destination,
1904 1904 "divergent": divergent,
1905 1905 "draft": draft,
1906 1906 "extinct": extinct,
1907 1907 "extra": extra,
1908 1908 "file": hasfile,
1909 1909 "filelog": filelog,
1910 1910 "first": first,
1911 1911 "follow": follow,
1912 1912 "_followfirst": _followfirst,
1913 1913 "grep": grep,
1914 1914 "head": head,
1915 1915 "heads": heads,
1916 1916 "hidden": hidden,
1917 1917 "id": node_,
1918 1918 "keyword": keyword,
1919 1919 "last": last,
1920 1920 "limit": limit,
1921 1921 "_matchfiles": _matchfiles,
1922 1922 "max": maxrev,
1923 1923 "merge": merge,
1924 1924 "min": minrev,
1925 1925 "modifies": modifies,
1926 1926 "named": named,
1927 1927 "obsolete": obsolete,
1928 1928 "only": only,
1929 1929 "origin": origin,
1930 1930 "outgoing": outgoing,
1931 1931 "p1": p1,
1932 1932 "p2": p2,
1933 1933 "parents": parents,
1934 1934 "present": present,
1935 1935 "public": public,
1936 1936 "remote": remote,
1937 1937 "removes": removes,
1938 1938 "rev": rev,
1939 1939 "reverse": reverse,
1940 1940 "roots": roots,
1941 1941 "sort": sort,
1942 1942 "secret": secret,
1943 1943 "matching": matching,
1944 1944 "tag": tag,
1945 1945 "tagged": tagged,
1946 1946 "user": user,
1947 1947 "unstable": unstable,
1948 1948 "_list": _list,
1949 1949 "_intlist": _intlist,
1950 1950 "_hexlist": _hexlist,
1951 1951 }
1952 1952
1953 1953 # symbols which can't be used for a DoS attack for any given input
1954 1954 # (e.g. those which accept regexes as plain strings shouldn't be included)
1955 1955 # functions that just return a lot of changesets (like all) don't count here
1956 1956 safesymbols = set([
1957 1957 "adds",
1958 1958 "all",
1959 1959 "ancestor",
1960 1960 "ancestors",
1961 1961 "_firstancestors",
1962 1962 "author",
1963 1963 "bisect",
1964 1964 "bisected",
1965 1965 "bookmark",
1966 1966 "branch",
1967 1967 "branchpoint",
1968 1968 "bumped",
1969 1969 "bundle",
1970 1970 "children",
1971 1971 "closed",
1972 1972 "converted",
1973 1973 "date",
1974 1974 "desc",
1975 1975 "descendants",
1976 1976 "_firstdescendants",
1977 1977 "destination",
1978 1978 "divergent",
1979 1979 "draft",
1980 1980 "extinct",
1981 1981 "extra",
1982 1982 "file",
1983 1983 "filelog",
1984 1984 "first",
1985 1985 "follow",
1986 1986 "_followfirst",
1987 1987 "head",
1988 1988 "heads",
1989 1989 "hidden",
1990 1990 "id",
1991 1991 "keyword",
1992 1992 "last",
1993 1993 "limit",
1994 1994 "_matchfiles",
1995 1995 "max",
1996 1996 "merge",
1997 1997 "min",
1998 1998 "modifies",
1999 1999 "obsolete",
2000 2000 "only",
2001 2001 "origin",
2002 2002 "outgoing",
2003 2003 "p1",
2004 2004 "p2",
2005 2005 "parents",
2006 2006 "present",
2007 2007 "public",
2008 2008 "remote",
2009 2009 "removes",
2010 2010 "rev",
2011 2011 "reverse",
2012 2012 "roots",
2013 2013 "sort",
2014 2014 "secret",
2015 2015 "matching",
2016 2016 "tag",
2017 2017 "tagged",
2018 2018 "user",
2019 2019 "unstable",
2020 2020 "_list",
2021 2021 "_intlist",
2022 2022 "_hexlist",
2023 2023 ])
2024 2024
2025 2025 methods = {
2026 2026 "range": rangeset,
2027 2027 "dagrange": dagrange,
2028 2028 "string": stringset,
2029 2029 "symbol": symbolset,
2030 2030 "and": andset,
2031 2031 "or": orset,
2032 2032 "not": notset,
2033 2033 "list": listset,
2034 2034 "func": func,
2035 2035 "ancestor": ancestorspec,
2036 2036 "parent": parentspec,
2037 2037 "parentpost": p1,
2038 2038 "only": only,
2039 2039 "onlypost": only,
2040 2040 }
2041 2041
2042 2042 def optimize(x, small):
2043 2043 if x is None:
2044 2044 return 0, x
2045 2045
2046 2046 smallbonus = 1
2047 2047 if small:
2048 2048 smallbonus = .5
2049 2049
2050 2050 op = x[0]
2051 2051 if op == 'minus':
2052 2052 return optimize(('and', x[1], ('not', x[2])), small)
2053 2053 elif op == 'only':
2054 2054 return optimize(('func', ('symbol', 'only'),
2055 2055 ('list', x[1], x[2])), small)
2056 2056 elif op == 'dagrangepre':
2057 2057 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
2058 2058 elif op == 'dagrangepost':
2059 2059 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
2060 2060 elif op == 'rangepre':
2061 2061 return optimize(('range', ('string', '0'), x[1]), small)
2062 2062 elif op == 'rangepost':
2063 2063 return optimize(('range', x[1], ('string', 'tip')), small)
2064 2064 elif op == 'negate':
2065 2065 return optimize(('string',
2066 2066 '-' + getstring(x[1], _("can't negate that"))), small)
2067 2067 elif op in 'string symbol negate':
2068 2068 return smallbonus, x # single revisions are small
2069 2069 elif op == 'and':
2070 2070 wa, ta = optimize(x[1], True)
2071 2071 wb, tb = optimize(x[2], True)
2072 2072
2073 2073 # (::x and not ::y)/(not ::y and ::x) have a fast path
2074 2074 def isonly(revs, bases):
2075 2075 return (
2076 2076 revs[0] == 'func'
2077 2077 and getstring(revs[1], _('not a symbol')) == 'ancestors'
2078 2078 and bases[0] == 'not'
2079 2079 and bases[1][0] == 'func'
2080 2080 and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
2081 2081
2082 2082 w = min(wa, wb)
2083 2083 if isonly(ta, tb):
2084 2084 return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2]))
2085 2085 if isonly(tb, ta):
2086 2086 return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2]))
2087 2087
2088 2088 if wa > wb:
2089 2089 return w, (op, tb, ta)
2090 2090 return w, (op, ta, tb)
2091 2091 elif op == 'or':
2092 2092 wa, ta = optimize(x[1], False)
2093 2093 wb, tb = optimize(x[2], False)
2094 2094 if wb < wa:
2095 2095 wb, wa = wa, wb
2096 2096 return max(wa, wb), (op, ta, tb)
2097 2097 elif op == 'not':
2098 2098 o = optimize(x[1], not small)
2099 2099 return o[0], (op, o[1])
2100 2100 elif op == 'parentpost':
2101 2101 o = optimize(x[1], small)
2102 2102 return o[0], (op, o[1])
2103 2103 elif op == 'group':
2104 2104 return optimize(x[1], small)
2105 2105 elif op in 'dagrange range list parent ancestorspec':
2106 2106 if op == 'parent':
2107 2107 # x^:y means (x^) : y, not x ^ (:y)
2108 2108 post = ('parentpost', x[1])
2109 2109 if x[2][0] == 'dagrangepre':
2110 2110 return optimize(('dagrange', post, x[2][1]), small)
2111 2111 elif x[2][0] == 'rangepre':
2112 2112 return optimize(('range', post, x[2][1]), small)
2113 2113
2114 2114 wa, ta = optimize(x[1], small)
2115 2115 wb, tb = optimize(x[2], small)
2116 2116 return wa + wb, (op, ta, tb)
2117 2117 elif op == 'func':
2118 2118 f = getstring(x[1], _("not a symbol"))
2119 2119 wa, ta = optimize(x[2], small)
2120 2120 if f in ("author branch closed date desc file grep keyword "
2121 2121 "outgoing user"):
2122 2122 w = 10 # slow
2123 2123 elif f in "modifies adds removes":
2124 2124 w = 30 # slower
2125 2125 elif f == "contains":
2126 2126 w = 100 # very slow
2127 2127 elif f == "ancestor":
2128 2128 w = 1 * smallbonus
2129 2129 elif f in "reverse limit first _intlist":
2130 2130 w = 0
2131 2131 elif f in "sort":
2132 2132 w = 10 # assume most sorts look at changelog
2133 2133 else:
2134 2134 w = 1
2135 2135 return w + wa, (op, x[1], ta)
2136 2136 return 1, x
2137 2137
2138 2138 _aliasarg = ('func', ('symbol', '_aliasarg'))
2139 2139 def _getaliasarg(tree):
2140 2140 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
2141 2141 return X, None otherwise.
2142 2142 """
2143 2143 if (len(tree) == 3 and tree[:2] == _aliasarg
2144 2144 and tree[2][0] == 'string'):
2145 2145 return tree[2][1]
2146 2146 return None
2147 2147
2148 2148 def _checkaliasarg(tree, known=None):
2149 2149 """Check tree contains no _aliasarg construct or only ones which
2150 2150 value is in known. Used to avoid alias placeholders injection.
2151 2151 """
2152 2152 if isinstance(tree, tuple):
2153 2153 arg = _getaliasarg(tree)
2154 2154 if arg is not None and (not known or arg not in known):
2155 2155 raise error.ParseError(_("not a function: %s") % '_aliasarg')
2156 2156 for t in tree:
2157 2157 _checkaliasarg(t, known)
2158 2158
2159 2159 # the set of valid characters for the initial letter of symbols in
2160 2160 # alias declarations and definitions
2161 2161 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
2162 2162 if c.isalnum() or c in '._@$' or ord(c) > 127)
2163 2163
2164 2164 def _tokenizealias(program, lookup=None):
2165 2165 """Parse alias declaration/definition into a stream of tokens
2166 2166
2167 2167 This allows symbol names to use also ``$`` as an initial letter
2168 2168 (for backward compatibility), and callers of this function should
2169 2169 examine whether ``$`` is used also for unexpected symbols or not.
2170 2170 """
2171 2171 return tokenize(program, lookup=lookup,
2172 2172 syminitletters=_aliassyminitletters)
2173 2173
2174 2174 def _parsealiasdecl(decl):
2175 2175 """Parse alias declaration ``decl``
2176 2176
2177 2177 This returns ``(name, tree, args, errorstr)`` tuple:
2178 2178
2179 2179 - ``name``: of declared alias (may be ``decl`` itself at error)
2180 2180 - ``tree``: parse result (or ``None`` at error)
2181 2181 - ``args``: list of alias argument names (or None for symbol declaration)
2182 2182 - ``errorstr``: detail about detected error (or None)
2183 2183
2184 2184 >>> _parsealiasdecl('foo')
2185 2185 ('foo', ('symbol', 'foo'), None, None)
2186 2186 >>> _parsealiasdecl('$foo')
2187 2187 ('$foo', None, None, "'$' not for alias arguments")
2188 2188 >>> _parsealiasdecl('foo::bar')
2189 2189 ('foo::bar', None, None, 'invalid format')
2190 2190 >>> _parsealiasdecl('foo bar')
2191 2191 ('foo bar', None, None, 'at 4: invalid token')
2192 2192 >>> _parsealiasdecl('foo()')
2193 2193 ('foo', ('func', ('symbol', 'foo')), [], None)
2194 2194 >>> _parsealiasdecl('$foo()')
2195 2195 ('$foo()', None, None, "'$' not for alias arguments")
2196 2196 >>> _parsealiasdecl('foo($1, $2)')
2197 2197 ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None)
2198 2198 >>> _parsealiasdecl('foo(bar_bar, baz.baz)')
2199 2199 ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None)
2200 2200 >>> _parsealiasdecl('foo($1, $2, nested($1, $2))')
2201 2201 ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list')
2202 2202 >>> _parsealiasdecl('foo(bar($1, $2))')
2203 2203 ('foo(bar($1, $2))', None, None, 'invalid argument list')
2204 2204 >>> _parsealiasdecl('foo("string")')
2205 2205 ('foo("string")', None, None, 'invalid argument list')
2206 2206 >>> _parsealiasdecl('foo($1, $2')
2207 2207 ('foo($1, $2', None, None, 'at 10: unexpected token: end')
2208 2208 >>> _parsealiasdecl('foo("string')
2209 2209 ('foo("string', None, None, 'at 5: unterminated string')
2210 2210 >>> _parsealiasdecl('foo($1, $2, $1)')
2211 2211 ('foo', None, None, 'argument names collide with each other')
2212 2212 """
2213 2213 p = parser.parser(_tokenizealias, elements)
2214 2214 try:
2215 2215 tree, pos = p.parse(decl)
2216 2216 if (pos != len(decl)):
2217 2217 raise error.ParseError(_('invalid token'), pos)
2218 2218
2219 2219 if isvalidsymbol(tree):
2220 2220 # "name = ...." style
2221 2221 name = getsymbol(tree)
2222 2222 if name.startswith('$'):
2223 2223 return (decl, None, None, _("'$' not for alias arguments"))
2224 2224 return (name, ('symbol', name), None, None)
2225 2225
2226 2226 if isvalidfunc(tree):
2227 2227 # "name(arg, ....) = ...." style
2228 2228 name = getfuncname(tree)
2229 2229 if name.startswith('$'):
2230 2230 return (decl, None, None, _("'$' not for alias arguments"))
2231 2231 args = []
2232 2232 for arg in getfuncargs(tree):
2233 2233 if not isvalidsymbol(arg):
2234 2234 return (decl, None, None, _("invalid argument list"))
2235 2235 args.append(getsymbol(arg))
2236 2236 if len(args) != len(set(args)):
2237 2237 return (name, None, None,
2238 2238 _("argument names collide with each other"))
2239 2239 return (name, ('func', ('symbol', name)), args, None)
2240 2240
2241 2241 return (decl, None, None, _("invalid format"))
2242 2242 except error.ParseError, inst:
2243 2243 return (decl, None, None, parseerrordetail(inst))
2244 2244
2245 2245 def _parsealiasdefn(defn, args):
2246 2246 """Parse alias definition ``defn``
2247 2247
2248 2248 This function also replaces alias argument references in the
2249 2249 specified definition by ``_aliasarg(ARGNAME)``.
2250 2250
2251 2251 ``args`` is a list of alias argument names, or None if the alias
2252 2252 is declared as a symbol.
2253 2253
2254 2254 This returns "tree" as parsing result.
2255 2255
2256 2256 >>> args = ['$1', '$2', 'foo']
2257 2257 >>> print prettyformat(_parsealiasdefn('$1 or foo', args))
2258 2258 (or
2259 2259 (func
2260 2260 ('symbol', '_aliasarg')
2261 2261 ('string', '$1'))
2262 2262 (func
2263 2263 ('symbol', '_aliasarg')
2264 2264 ('string', 'foo')))
2265 2265 >>> try:
2266 2266 ... _parsealiasdefn('$1 or $bar', args)
2267 2267 ... except error.ParseError, inst:
2268 2268 ... print parseerrordetail(inst)
2269 2269 at 6: '$' not for alias arguments
2270 2270 >>> args = ['$1', '$10', 'foo']
2271 2271 >>> print prettyformat(_parsealiasdefn('$10 or foobar', args))
2272 2272 (or
2273 2273 (func
2274 2274 ('symbol', '_aliasarg')
2275 2275 ('string', '$10'))
2276 2276 ('symbol', 'foobar'))
2277 2277 >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args))
2278 2278 (or
2279 2279 ('string', '$1')
2280 2280 ('string', 'foo'))
2281 2281 """
2282 2282 def tokenizedefn(program, lookup=None):
2283 2283 if args:
2284 2284 argset = set(args)
2285 2285 else:
2286 2286 argset = set()
2287 2287
2288 2288 for t, value, pos in _tokenizealias(program, lookup=lookup):
2289 2289 if t == 'symbol':
2290 2290 if value in argset:
2291 2291 # emulate tokenization of "_aliasarg('ARGNAME')":
2292 2292 # "_aliasarg()" is an unknown symbol only used separate
2293 2293 # alias argument placeholders from regular strings.
2294 2294 yield ('symbol', '_aliasarg', pos)
2295 2295 yield ('(', None, pos)
2296 2296 yield ('string', value, pos)
2297 2297 yield (')', None, pos)
2298 2298 continue
2299 2299 elif value.startswith('$'):
2300 2300 raise error.ParseError(_("'$' not for alias arguments"),
2301 2301 pos)
2302 2302 yield (t, value, pos)
2303 2303
2304 2304 p = parser.parser(tokenizedefn, elements)
2305 2305 tree, pos = p.parse(defn)
2306 2306 if pos != len(defn):
2307 2307 raise error.ParseError(_('invalid token'), pos)
2308 2308 return tree
2309 2309
2310 2310 class revsetalias(object):
2311 2311 # whether own `error` information is already shown or not.
2312 2312 # this avoids showing same warning multiple times at each `findaliases`.
2313 2313 warned = False
2314 2314
2315 2315 def __init__(self, name, value):
2316 2316 '''Aliases like:
2317 2317
2318 2318 h = heads(default)
2319 2319 b($1) = ancestors($1) - ancestors(default)
2320 2320 '''
2321 2321 self.name, self.tree, self.args, self.error = _parsealiasdecl(name)
2322 2322 if self.error:
2323 2323 self.error = _('failed to parse the declaration of revset alias'
2324 2324 ' "%s": %s') % (self.name, self.error)
2325 2325 return
2326 2326
2327 2327 try:
2328 2328 self.replacement = _parsealiasdefn(value, self.args)
2329 2329 # Check for placeholder injection
2330 2330 _checkaliasarg(self.replacement, self.args)
2331 2331 except error.ParseError, inst:
2332 2332 self.error = _('failed to parse the definition of revset alias'
2333 2333 ' "%s": %s') % (self.name, parseerrordetail(inst))
2334 2334
2335 2335 def _getalias(aliases, tree):
2336 2336 """If tree looks like an unexpanded alias, return it. Return None
2337 2337 otherwise.
2338 2338 """
2339 2339 if isinstance(tree, tuple) and tree:
2340 2340 if tree[0] == 'symbol' and len(tree) == 2:
2341 2341 name = tree[1]
2342 2342 alias = aliases.get(name)
2343 2343 if alias and alias.args is None and alias.tree == tree:
2344 2344 return alias
2345 2345 if tree[0] == 'func' and len(tree) > 1:
2346 2346 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
2347 2347 name = tree[1][1]
2348 2348 alias = aliases.get(name)
2349 2349 if alias and alias.args is not None and alias.tree == tree[:2]:
2350 2350 return alias
2351 2351 return None
2352 2352
2353 2353 def _expandargs(tree, args):
2354 2354 """Replace _aliasarg instances with the substitution value of the
2355 2355 same name in args, recursively.
2356 2356 """
2357 2357 if not tree or not isinstance(tree, tuple):
2358 2358 return tree
2359 2359 arg = _getaliasarg(tree)
2360 2360 if arg is not None:
2361 2361 return args[arg]
2362 2362 return tuple(_expandargs(t, args) for t in tree)
2363 2363
2364 2364 def _expandaliases(aliases, tree, expanding, cache):
2365 2365 """Expand aliases in tree, recursively.
2366 2366
2367 2367 'aliases' is a dictionary mapping user defined aliases to
2368 2368 revsetalias objects.
2369 2369 """
2370 2370 if not isinstance(tree, tuple):
2371 2371 # Do not expand raw strings
2372 2372 return tree
2373 2373 alias = _getalias(aliases, tree)
2374 2374 if alias is not None:
2375 2375 if alias.error:
2376 2376 raise util.Abort(alias.error)
2377 2377 if alias in expanding:
2378 2378 raise error.ParseError(_('infinite expansion of revset alias "%s" '
2379 2379 'detected') % alias.name)
2380 2380 expanding.append(alias)
2381 2381 if alias.name not in cache:
2382 2382 cache[alias.name] = _expandaliases(aliases, alias.replacement,
2383 2383 expanding, cache)
2384 2384 result = cache[alias.name]
2385 2385 expanding.pop()
2386 2386 if alias.args is not None:
2387 2387 l = getlist(tree[2])
2388 2388 if len(l) != len(alias.args):
2389 2389 raise error.ParseError(
2390 2390 _('invalid number of arguments: %s') % len(l))
2391 2391 l = [_expandaliases(aliases, a, [], cache) for a in l]
2392 2392 result = _expandargs(result, dict(zip(alias.args, l)))
2393 2393 else:
2394 2394 result = tuple(_expandaliases(aliases, t, expanding, cache)
2395 2395 for t in tree)
2396 2396 return result
2397 2397
2398 2398 def findaliases(ui, tree, showwarning=None):
2399 2399 _checkaliasarg(tree)
2400 2400 aliases = {}
2401 2401 for k, v in ui.configitems('revsetalias'):
2402 2402 alias = revsetalias(k, v)
2403 2403 aliases[alias.name] = alias
2404 2404 tree = _expandaliases(aliases, tree, [], {})
2405 2405 if showwarning:
2406 2406 # warn about problematic (but not referred) aliases
2407 2407 for name, alias in sorted(aliases.iteritems()):
2408 2408 if alias.error and not alias.warned:
2409 2409 showwarning(_('warning: %s\n') % (alias.error))
2410 2410 alias.warned = True
2411 2411 return tree
2412 2412
2413 2413 def foldconcat(tree):
2414 2414 """Fold elements to be concatenated by `##`
2415 2415 """
2416 2416 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2417 2417 return tree
2418 2418 if tree[0] == '_concat':
2419 2419 pending = [tree]
2420 2420 l = []
2421 2421 while pending:
2422 2422 e = pending.pop()
2423 2423 if e[0] == '_concat':
2424 2424 pending.extend(reversed(e[1:]))
2425 2425 elif e[0] in ('string', 'symbol'):
2426 2426 l.append(e[1])
2427 2427 else:
2428 2428 msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
2429 2429 raise error.ParseError(msg)
2430 2430 return ('string', ''.join(l))
2431 2431 else:
2432 2432 return tuple(foldconcat(t) for t in tree)
2433 2433
2434 2434 def parse(spec, lookup=None):
2435 2435 p = parser.parser(tokenize, elements)
2436 2436 return p.parse(spec, lookup=lookup)
2437 2437
2438 2438 def match(ui, spec, repo=None):
2439 2439 if not spec:
2440 2440 raise error.ParseError(_("empty query"))
2441 2441 lookup = None
2442 2442 if repo:
2443 2443 lookup = repo.__contains__
2444 2444 tree, pos = parse(spec, lookup)
2445 2445 if (pos != len(spec)):
2446 2446 raise error.ParseError(_("invalid token"), pos)
2447 2447 if ui:
2448 2448 tree = findaliases(ui, tree, showwarning=ui.warn)
2449 2449 tree = foldconcat(tree)
2450 2450 weight, tree = optimize(tree, True)
2451 def mfunc(repo, subset):
2451 def mfunc(repo, subset=None):
2452 if subset is None:
2453 subset = spanset(repo)
2452 2454 if util.safehasattr(subset, 'isascending'):
2453 2455 result = getset(repo, subset, tree)
2454 2456 else:
2455 2457 result = getset(repo, baseset(subset), tree)
2456 2458 return result
2457 2459 return mfunc
2458 2460
2459 2461 def formatspec(expr, *args):
2460 2462 '''
2461 2463 This is a convenience function for using revsets internally, and
2462 2464 escapes arguments appropriately. Aliases are intentionally ignored
2463 2465 so that intended expression behavior isn't accidentally subverted.
2464 2466
2465 2467 Supported arguments:
2466 2468
2467 2469 %r = revset expression, parenthesized
2468 2470 %d = int(arg), no quoting
2469 2471 %s = string(arg), escaped and single-quoted
2470 2472 %b = arg.branch(), escaped and single-quoted
2471 2473 %n = hex(arg), single-quoted
2472 2474 %% = a literal '%'
2473 2475
2474 2476 Prefixing the type with 'l' specifies a parenthesized list of that type.
2475 2477
2476 2478 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
2477 2479 '(10 or 11):: and ((this()) or (that()))'
2478 2480 >>> formatspec('%d:: and not %d::', 10, 20)
2479 2481 '10:: and not 20::'
2480 2482 >>> formatspec('%ld or %ld', [], [1])
2481 2483 "_list('') or 1"
2482 2484 >>> formatspec('keyword(%s)', 'foo\\xe9')
2483 2485 "keyword('foo\\\\xe9')"
2484 2486 >>> b = lambda: 'default'
2485 2487 >>> b.branch = b
2486 2488 >>> formatspec('branch(%b)', b)
2487 2489 "branch('default')"
2488 2490 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
2489 2491 "root(_list('a\\x00b\\x00c\\x00d'))"
2490 2492 '''
2491 2493
2492 2494 def quote(s):
2493 2495 return repr(str(s))
2494 2496
2495 2497 def argtype(c, arg):
2496 2498 if c == 'd':
2497 2499 return str(int(arg))
2498 2500 elif c == 's':
2499 2501 return quote(arg)
2500 2502 elif c == 'r':
2501 2503 parse(arg) # make sure syntax errors are confined
2502 2504 return '(%s)' % arg
2503 2505 elif c == 'n':
2504 2506 return quote(node.hex(arg))
2505 2507 elif c == 'b':
2506 2508 return quote(arg.branch())
2507 2509
2508 2510 def listexp(s, t):
2509 2511 l = len(s)
2510 2512 if l == 0:
2511 2513 return "_list('')"
2512 2514 elif l == 1:
2513 2515 return argtype(t, s[0])
2514 2516 elif t == 'd':
2515 2517 return "_intlist('%s')" % "\0".join(str(int(a)) for a in s)
2516 2518 elif t == 's':
2517 2519 return "_list('%s')" % "\0".join(s)
2518 2520 elif t == 'n':
2519 2521 return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
2520 2522 elif t == 'b':
2521 2523 return "_list('%s')" % "\0".join(a.branch() for a in s)
2522 2524
2523 2525 m = l // 2
2524 2526 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
2525 2527
2526 2528 ret = ''
2527 2529 pos = 0
2528 2530 arg = 0
2529 2531 while pos < len(expr):
2530 2532 c = expr[pos]
2531 2533 if c == '%':
2532 2534 pos += 1
2533 2535 d = expr[pos]
2534 2536 if d == '%':
2535 2537 ret += d
2536 2538 elif d in 'dsnbr':
2537 2539 ret += argtype(d, args[arg])
2538 2540 arg += 1
2539 2541 elif d == 'l':
2540 2542 # a list of some type
2541 2543 pos += 1
2542 2544 d = expr[pos]
2543 2545 ret += listexp(list(args[arg]), d)
2544 2546 arg += 1
2545 2547 else:
2546 2548 raise util.Abort('unexpected revspec format character %s' % d)
2547 2549 else:
2548 2550 ret += c
2549 2551 pos += 1
2550 2552
2551 2553 return ret
2552 2554
2553 2555 def prettyformat(tree):
2554 2556 def _prettyformat(tree, level, lines):
2555 2557 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2556 2558 lines.append((level, str(tree)))
2557 2559 else:
2558 2560 lines.append((level, '(%s' % tree[0]))
2559 2561 for s in tree[1:]:
2560 2562 _prettyformat(s, level + 1, lines)
2561 2563 lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
2562 2564
2563 2565 lines = []
2564 2566 _prettyformat(tree, 0, lines)
2565 2567 output = '\n'.join((' '*l + s) for l, s in lines)
2566 2568 return output
2567 2569
2568 2570 def depth(tree):
2569 2571 if isinstance(tree, tuple):
2570 2572 return max(map(depth, tree)) + 1
2571 2573 else:
2572 2574 return 0
2573 2575
2574 2576 def funcsused(tree):
2575 2577 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2576 2578 return set()
2577 2579 else:
2578 2580 funcs = set()
2579 2581 for s in tree[1:]:
2580 2582 funcs |= funcsused(s)
2581 2583 if tree[0] == 'func':
2582 2584 funcs.add(tree[1][1])
2583 2585 return funcs
2584 2586
2585 2587 class abstractsmartset(object):
2586 2588
2587 2589 def __nonzero__(self):
2588 2590 """True if the smartset is not empty"""
2589 2591 raise NotImplementedError()
2590 2592
2591 2593 def __contains__(self, rev):
2592 2594 """provide fast membership testing"""
2593 2595 raise NotImplementedError()
2594 2596
2595 2597 def __iter__(self):
2596 2598 """iterate the set in the order it is supposed to be iterated"""
2597 2599 raise NotImplementedError()
2598 2600
2599 2601 # Attributes containing a function to perform a fast iteration in a given
2600 2602 # direction. A smartset can have none, one, or both defined.
2601 2603 #
2602 2604 # Default value is None instead of a function returning None to avoid
2603 2605 # initializing an iterator just for testing if a fast method exists.
2604 2606 fastasc = None
2605 2607 fastdesc = None
2606 2608
2607 2609 def isascending(self):
2608 2610 """True if the set will iterate in ascending order"""
2609 2611 raise NotImplementedError()
2610 2612
2611 2613 def isdescending(self):
2612 2614 """True if the set will iterate in descending order"""
2613 2615 raise NotImplementedError()
2614 2616
2615 2617 def min(self):
2616 2618 """return the minimum element in the set"""
2617 2619 if self.fastasc is not None:
2618 2620 for r in self.fastasc():
2619 2621 return r
2620 2622 raise ValueError('arg is an empty sequence')
2621 2623 return min(self)
2622 2624
2623 2625 def max(self):
2624 2626 """return the maximum element in the set"""
2625 2627 if self.fastdesc is not None:
2626 2628 for r in self.fastdesc():
2627 2629 return r
2628 2630 raise ValueError('arg is an empty sequence')
2629 2631 return max(self)
2630 2632
2631 2633 def first(self):
2632 2634 """return the first element in the set (user iteration perspective)
2633 2635
2634 2636 Return None if the set is empty"""
2635 2637 raise NotImplementedError()
2636 2638
2637 2639 def last(self):
2638 2640 """return the last element in the set (user iteration perspective)
2639 2641
2640 2642 Return None if the set is empty"""
2641 2643 raise NotImplementedError()
2642 2644
2643 2645 def __len__(self):
2644 2646 """return the length of the smartsets
2645 2647
2646 2648 This can be expensive on smartset that could be lazy otherwise."""
2647 2649 raise NotImplementedError()
2648 2650
2649 2651 def reverse(self):
2650 2652 """reverse the expected iteration order"""
2651 2653 raise NotImplementedError()
2652 2654
2653 2655 def sort(self, reverse=True):
2654 2656 """get the set to iterate in an ascending or descending order"""
2655 2657 raise NotImplementedError()
2656 2658
2657 2659 def __and__(self, other):
2658 2660 """Returns a new object with the intersection of the two collections.
2659 2661
2660 2662 This is part of the mandatory API for smartset."""
2661 2663 return self.filter(other.__contains__, cache=False)
2662 2664
2663 2665 def __add__(self, other):
2664 2666 """Returns a new object with the union of the two collections.
2665 2667
2666 2668 This is part of the mandatory API for smartset."""
2667 2669 return addset(self, other)
2668 2670
2669 2671 def __sub__(self, other):
2670 2672 """Returns a new object with the substraction of the two collections.
2671 2673
2672 2674 This is part of the mandatory API for smartset."""
2673 2675 c = other.__contains__
2674 2676 return self.filter(lambda r: not c(r), cache=False)
2675 2677
2676 2678 def filter(self, condition, cache=True):
2677 2679 """Returns this smartset filtered by condition as a new smartset.
2678 2680
2679 2681 `condition` is a callable which takes a revision number and returns a
2680 2682 boolean.
2681 2683
2682 2684 This is part of the mandatory API for smartset."""
2683 2685 # builtin cannot be cached. but do not needs to
2684 2686 if cache and util.safehasattr(condition, 'func_code'):
2685 2687 condition = util.cachefunc(condition)
2686 2688 return filteredset(self, condition)
2687 2689
2688 2690 class baseset(abstractsmartset):
2689 2691 """Basic data structure that represents a revset and contains the basic
2690 2692 operation that it should be able to perform.
2691 2693
2692 2694 Every method in this class should be implemented by any smartset class.
2693 2695 """
2694 2696 def __init__(self, data=()):
2695 2697 if not isinstance(data, list):
2696 2698 data = list(data)
2697 2699 self._list = data
2698 2700 self._ascending = None
2699 2701
2700 2702 @util.propertycache
2701 2703 def _set(self):
2702 2704 return set(self._list)
2703 2705
2704 2706 @util.propertycache
2705 2707 def _asclist(self):
2706 2708 asclist = self._list[:]
2707 2709 asclist.sort()
2708 2710 return asclist
2709 2711
2710 2712 def __iter__(self):
2711 2713 if self._ascending is None:
2712 2714 return iter(self._list)
2713 2715 elif self._ascending:
2714 2716 return iter(self._asclist)
2715 2717 else:
2716 2718 return reversed(self._asclist)
2717 2719
2718 2720 def fastasc(self):
2719 2721 return iter(self._asclist)
2720 2722
2721 2723 def fastdesc(self):
2722 2724 return reversed(self._asclist)
2723 2725
2724 2726 @util.propertycache
2725 2727 def __contains__(self):
2726 2728 return self._set.__contains__
2727 2729
2728 2730 def __nonzero__(self):
2729 2731 return bool(self._list)
2730 2732
2731 2733 def sort(self, reverse=False):
2732 2734 self._ascending = not bool(reverse)
2733 2735
2734 2736 def reverse(self):
2735 2737 if self._ascending is None:
2736 2738 self._list.reverse()
2737 2739 else:
2738 2740 self._ascending = not self._ascending
2739 2741
2740 2742 def __len__(self):
2741 2743 return len(self._list)
2742 2744
2743 2745 def isascending(self):
2744 2746 """Returns True if the collection is ascending order, False if not.
2745 2747
2746 2748 This is part of the mandatory API for smartset."""
2747 2749 if len(self) <= 1:
2748 2750 return True
2749 2751 return self._ascending is not None and self._ascending
2750 2752
2751 2753 def isdescending(self):
2752 2754 """Returns True if the collection is descending order, False if not.
2753 2755
2754 2756 This is part of the mandatory API for smartset."""
2755 2757 if len(self) <= 1:
2756 2758 return True
2757 2759 return self._ascending is not None and not self._ascending
2758 2760
2759 2761 def first(self):
2760 2762 if self:
2761 2763 if self._ascending is None:
2762 2764 return self._list[0]
2763 2765 elif self._ascending:
2764 2766 return self._asclist[0]
2765 2767 else:
2766 2768 return self._asclist[-1]
2767 2769 return None
2768 2770
2769 2771 def last(self):
2770 2772 if self:
2771 2773 if self._ascending is None:
2772 2774 return self._list[-1]
2773 2775 elif self._ascending:
2774 2776 return self._asclist[-1]
2775 2777 else:
2776 2778 return self._asclist[0]
2777 2779 return None
2778 2780
2779 2781 class filteredset(abstractsmartset):
2780 2782 """Duck type for baseset class which iterates lazily over the revisions in
2781 2783 the subset and contains a function which tests for membership in the
2782 2784 revset
2783 2785 """
2784 2786 def __init__(self, subset, condition=lambda x: True):
2785 2787 """
2786 2788 condition: a function that decide whether a revision in the subset
2787 2789 belongs to the revset or not.
2788 2790 """
2789 2791 self._subset = subset
2790 2792 self._condition = condition
2791 2793 self._cache = {}
2792 2794
2793 2795 def __contains__(self, x):
2794 2796 c = self._cache
2795 2797 if x not in c:
2796 2798 v = c[x] = x in self._subset and self._condition(x)
2797 2799 return v
2798 2800 return c[x]
2799 2801
2800 2802 def __iter__(self):
2801 2803 return self._iterfilter(self._subset)
2802 2804
2803 2805 def _iterfilter(self, it):
2804 2806 cond = self._condition
2805 2807 for x in it:
2806 2808 if cond(x):
2807 2809 yield x
2808 2810
2809 2811 @property
2810 2812 def fastasc(self):
2811 2813 it = self._subset.fastasc
2812 2814 if it is None:
2813 2815 return None
2814 2816 return lambda: self._iterfilter(it())
2815 2817
2816 2818 @property
2817 2819 def fastdesc(self):
2818 2820 it = self._subset.fastdesc
2819 2821 if it is None:
2820 2822 return None
2821 2823 return lambda: self._iterfilter(it())
2822 2824
2823 2825 def __nonzero__(self):
2824 2826 for r in self:
2825 2827 return True
2826 2828 return False
2827 2829
2828 2830 def __len__(self):
2829 2831 # Basic implementation to be changed in future patches.
2830 2832 l = baseset([r for r in self])
2831 2833 return len(l)
2832 2834
2833 2835 def sort(self, reverse=False):
2834 2836 self._subset.sort(reverse=reverse)
2835 2837
2836 2838 def reverse(self):
2837 2839 self._subset.reverse()
2838 2840
2839 2841 def isascending(self):
2840 2842 return self._subset.isascending()
2841 2843
2842 2844 def isdescending(self):
2843 2845 return self._subset.isdescending()
2844 2846
2845 2847 def first(self):
2846 2848 for x in self:
2847 2849 return x
2848 2850 return None
2849 2851
2850 2852 def last(self):
2851 2853 it = None
2852 2854 if self._subset.isascending:
2853 2855 it = self.fastdesc
2854 2856 elif self._subset.isdescending:
2855 2857 it = self.fastdesc
2856 2858 if it is None:
2857 2859 # slowly consume everything. This needs improvement
2858 2860 it = lambda: reversed(list(self))
2859 2861 for x in it():
2860 2862 return x
2861 2863 return None
2862 2864
2863 2865 class addset(abstractsmartset):
2864 2866 """Represent the addition of two sets
2865 2867
2866 2868 Wrapper structure for lazily adding two structures without losing much
2867 2869 performance on the __contains__ method
2868 2870
2869 2871 If the ascending attribute is set, that means the two structures are
2870 2872 ordered in either an ascending or descending way. Therefore, we can add
2871 2873 them maintaining the order by iterating over both at the same time
2872 2874 """
2873 2875 def __init__(self, revs1, revs2, ascending=None):
2874 2876 self._r1 = revs1
2875 2877 self._r2 = revs2
2876 2878 self._iter = None
2877 2879 self._ascending = ascending
2878 2880 self._genlist = None
2879 2881 self._asclist = None
2880 2882
2881 2883 def __len__(self):
2882 2884 return len(self._list)
2883 2885
2884 2886 def __nonzero__(self):
2885 2887 return bool(self._r1) or bool(self._r2)
2886 2888
2887 2889 @util.propertycache
2888 2890 def _list(self):
2889 2891 if not self._genlist:
2890 2892 self._genlist = baseset(self._iterator())
2891 2893 return self._genlist
2892 2894
2893 2895 def _iterator(self):
2894 2896 """Iterate over both collections without repeating elements
2895 2897
2896 2898 If the ascending attribute is not set, iterate over the first one and
2897 2899 then over the second one checking for membership on the first one so we
2898 2900 dont yield any duplicates.
2899 2901
2900 2902 If the ascending attribute is set, iterate over both collections at the
2901 2903 same time, yielding only one value at a time in the given order.
2902 2904 """
2903 2905 if self._ascending is None:
2904 2906 def gen():
2905 2907 for r in self._r1:
2906 2908 yield r
2907 2909 inr1 = self._r1.__contains__
2908 2910 for r in self._r2:
2909 2911 if not inr1(r):
2910 2912 yield r
2911 2913 gen = gen()
2912 2914 else:
2913 2915 iter1 = iter(self._r1)
2914 2916 iter2 = iter(self._r2)
2915 2917 gen = self._iterordered(self._ascending, iter1, iter2)
2916 2918 return gen
2917 2919
2918 2920 def __iter__(self):
2919 2921 if self._ascending is None:
2920 2922 if self._genlist:
2921 2923 return iter(self._genlist)
2922 2924 return iter(self._iterator())
2923 2925 self._trysetasclist()
2924 2926 if self._ascending:
2925 2927 it = self.fastasc
2926 2928 else:
2927 2929 it = self.fastdesc
2928 2930 if it is None:
2929 2931 # consume the gen and try again
2930 2932 self._list
2931 2933 return iter(self)
2932 2934 return it()
2933 2935
2934 2936 def _trysetasclist(self):
2935 2937 """populate the _asclist attribute if possible and necessary"""
2936 2938 if self._genlist is not None and self._asclist is None:
2937 2939 self._asclist = sorted(self._genlist)
2938 2940
2939 2941 @property
2940 2942 def fastasc(self):
2941 2943 self._trysetasclist()
2942 2944 if self._asclist is not None:
2943 2945 return self._asclist.__iter__
2944 2946 iter1 = self._r1.fastasc
2945 2947 iter2 = self._r2.fastasc
2946 2948 if None in (iter1, iter2):
2947 2949 return None
2948 2950 return lambda: self._iterordered(True, iter1(), iter2())
2949 2951
2950 2952 @property
2951 2953 def fastdesc(self):
2952 2954 self._trysetasclist()
2953 2955 if self._asclist is not None:
2954 2956 return self._asclist.__reversed__
2955 2957 iter1 = self._r1.fastdesc
2956 2958 iter2 = self._r2.fastdesc
2957 2959 if None in (iter1, iter2):
2958 2960 return None
2959 2961 return lambda: self._iterordered(False, iter1(), iter2())
2960 2962
2961 2963 def _iterordered(self, ascending, iter1, iter2):
2962 2964 """produce an ordered iteration from two iterators with the same order
2963 2965
2964 2966 The ascending is used to indicated the iteration direction.
2965 2967 """
2966 2968 choice = max
2967 2969 if ascending:
2968 2970 choice = min
2969 2971
2970 2972 val1 = None
2971 2973 val2 = None
2972 2974
2973 2975 choice = max
2974 2976 if ascending:
2975 2977 choice = min
2976 2978 try:
2977 2979 # Consume both iterators in an ordered way until one is
2978 2980 # empty
2979 2981 while True:
2980 2982 if val1 is None:
2981 2983 val1 = iter1.next()
2982 2984 if val2 is None:
2983 2985 val2 = iter2.next()
2984 2986 next = choice(val1, val2)
2985 2987 yield next
2986 2988 if val1 == next:
2987 2989 val1 = None
2988 2990 if val2 == next:
2989 2991 val2 = None
2990 2992 except StopIteration:
2991 2993 # Flush any remaining values and consume the other one
2992 2994 it = iter2
2993 2995 if val1 is not None:
2994 2996 yield val1
2995 2997 it = iter1
2996 2998 elif val2 is not None:
2997 2999 # might have been equality and both are empty
2998 3000 yield val2
2999 3001 for val in it:
3000 3002 yield val
3001 3003
3002 3004 def __contains__(self, x):
3003 3005 return x in self._r1 or x in self._r2
3004 3006
3005 3007 def sort(self, reverse=False):
3006 3008 """Sort the added set
3007 3009
3008 3010 For this we use the cached list with all the generated values and if we
3009 3011 know they are ascending or descending we can sort them in a smart way.
3010 3012 """
3011 3013 self._ascending = not reverse
3012 3014
3013 3015 def isascending(self):
3014 3016 return self._ascending is not None and self._ascending
3015 3017
3016 3018 def isdescending(self):
3017 3019 return self._ascending is not None and not self._ascending
3018 3020
3019 3021 def reverse(self):
3020 3022 if self._ascending is None:
3021 3023 self._list.reverse()
3022 3024 else:
3023 3025 self._ascending = not self._ascending
3024 3026
3025 3027 def first(self):
3026 3028 for x in self:
3027 3029 return x
3028 3030 return None
3029 3031
3030 3032 def last(self):
3031 3033 self.reverse()
3032 3034 val = self.first()
3033 3035 self.reverse()
3034 3036 return val
3035 3037
3036 3038 class generatorset(abstractsmartset):
3037 3039 """Wrap a generator for lazy iteration
3038 3040
3039 3041 Wrapper structure for generators that provides lazy membership and can
3040 3042 be iterated more than once.
3041 3043 When asked for membership it generates values until either it finds the
3042 3044 requested one or has gone through all the elements in the generator
3043 3045 """
3044 3046 def __init__(self, gen, iterasc=None):
3045 3047 """
3046 3048 gen: a generator producing the values for the generatorset.
3047 3049 """
3048 3050 self._gen = gen
3049 3051 self._asclist = None
3050 3052 self._cache = {}
3051 3053 self._genlist = []
3052 3054 self._finished = False
3053 3055 self._ascending = True
3054 3056 if iterasc is not None:
3055 3057 if iterasc:
3056 3058 self.fastasc = self._iterator
3057 3059 self.__contains__ = self._asccontains
3058 3060 else:
3059 3061 self.fastdesc = self._iterator
3060 3062 self.__contains__ = self._desccontains
3061 3063
3062 3064 def __nonzero__(self):
3063 3065 for r in self:
3064 3066 return True
3065 3067 return False
3066 3068
3067 3069 def __contains__(self, x):
3068 3070 if x in self._cache:
3069 3071 return self._cache[x]
3070 3072
3071 3073 # Use new values only, as existing values would be cached.
3072 3074 for l in self._consumegen():
3073 3075 if l == x:
3074 3076 return True
3075 3077
3076 3078 self._cache[x] = False
3077 3079 return False
3078 3080
3079 3081 def _asccontains(self, x):
3080 3082 """version of contains optimised for ascending generator"""
3081 3083 if x in self._cache:
3082 3084 return self._cache[x]
3083 3085
3084 3086 # Use new values only, as existing values would be cached.
3085 3087 for l in self._consumegen():
3086 3088 if l == x:
3087 3089 return True
3088 3090 if l > x:
3089 3091 break
3090 3092
3091 3093 self._cache[x] = False
3092 3094 return False
3093 3095
3094 3096 def _desccontains(self, x):
3095 3097 """version of contains optimised for descending generator"""
3096 3098 if x in self._cache:
3097 3099 return self._cache[x]
3098 3100
3099 3101 # Use new values only, as existing values would be cached.
3100 3102 for l in self._consumegen():
3101 3103 if l == x:
3102 3104 return True
3103 3105 if l < x:
3104 3106 break
3105 3107
3106 3108 self._cache[x] = False
3107 3109 return False
3108 3110
3109 3111 def __iter__(self):
3110 3112 if self._ascending:
3111 3113 it = self.fastasc
3112 3114 else:
3113 3115 it = self.fastdesc
3114 3116 if it is not None:
3115 3117 return it()
3116 3118 # we need to consume the iterator
3117 3119 for x in self._consumegen():
3118 3120 pass
3119 3121 # recall the same code
3120 3122 return iter(self)
3121 3123
3122 3124 def _iterator(self):
3123 3125 if self._finished:
3124 3126 return iter(self._genlist)
3125 3127
3126 3128 # We have to use this complex iteration strategy to allow multiple
3127 3129 # iterations at the same time. We need to be able to catch revision
3128 3130 # removed from _consumegen and added to genlist in another instance.
3129 3131 #
3130 3132 # Getting rid of it would provide an about 15% speed up on this
3131 3133 # iteration.
3132 3134 genlist = self._genlist
3133 3135 nextrev = self._consumegen().next
3134 3136 _len = len # cache global lookup
3135 3137 def gen():
3136 3138 i = 0
3137 3139 while True:
3138 3140 if i < _len(genlist):
3139 3141 yield genlist[i]
3140 3142 else:
3141 3143 yield nextrev()
3142 3144 i += 1
3143 3145 return gen()
3144 3146
3145 3147 def _consumegen(self):
3146 3148 cache = self._cache
3147 3149 genlist = self._genlist.append
3148 3150 for item in self._gen:
3149 3151 cache[item] = True
3150 3152 genlist(item)
3151 3153 yield item
3152 3154 if not self._finished:
3153 3155 self._finished = True
3154 3156 asc = self._genlist[:]
3155 3157 asc.sort()
3156 3158 self._asclist = asc
3157 3159 self.fastasc = asc.__iter__
3158 3160 self.fastdesc = asc.__reversed__
3159 3161
3160 3162 def __len__(self):
3161 3163 for x in self._consumegen():
3162 3164 pass
3163 3165 return len(self._genlist)
3164 3166
3165 3167 def sort(self, reverse=False):
3166 3168 self._ascending = not reverse
3167 3169
3168 3170 def reverse(self):
3169 3171 self._ascending = not self._ascending
3170 3172
3171 3173 def isascending(self):
3172 3174 return self._ascending
3173 3175
3174 3176 def isdescending(self):
3175 3177 return not self._ascending
3176 3178
3177 3179 def first(self):
3178 3180 if self._ascending:
3179 3181 it = self.fastasc
3180 3182 else:
3181 3183 it = self.fastdesc
3182 3184 if it is None:
3183 3185 # we need to consume all and try again
3184 3186 for x in self._consumegen():
3185 3187 pass
3186 3188 return self.first()
3187 3189 if self:
3188 3190 return it().next()
3189 3191 return None
3190 3192
3191 3193 def last(self):
3192 3194 if self._ascending:
3193 3195 it = self.fastdesc
3194 3196 else:
3195 3197 it = self.fastasc
3196 3198 if it is None:
3197 3199 # we need to consume all and try again
3198 3200 for x in self._consumegen():
3199 3201 pass
3200 3202 return self.first()
3201 3203 if self:
3202 3204 return it().next()
3203 3205 return None
3204 3206
3205 3207 def spanset(repo, start=None, end=None):
3206 3208 """factory function to dispatch between fullreposet and actual spanset
3207 3209
3208 3210 Feel free to update all spanset call sites and kill this function at some
3209 3211 point.
3210 3212 """
3211 3213 if start is None and end is None:
3212 3214 return fullreposet(repo)
3213 3215 return _spanset(repo, start, end)
3214 3216
3215 3217
3216 3218 class _spanset(abstractsmartset):
3217 3219 """Duck type for baseset class which represents a range of revisions and
3218 3220 can work lazily and without having all the range in memory
3219 3221
3220 3222 Note that spanset(x, y) behave almost like xrange(x, y) except for two
3221 3223 notable points:
3222 3224 - when x < y it will be automatically descending,
3223 3225 - revision filtered with this repoview will be skipped.
3224 3226
3225 3227 """
3226 3228 def __init__(self, repo, start=0, end=None):
3227 3229 """
3228 3230 start: first revision included the set
3229 3231 (default to 0)
3230 3232 end: first revision excluded (last+1)
3231 3233 (default to len(repo)
3232 3234
3233 3235 Spanset will be descending if `end` < `start`.
3234 3236 """
3235 3237 if end is None:
3236 3238 end = len(repo)
3237 3239 self._ascending = start <= end
3238 3240 if not self._ascending:
3239 3241 start, end = end + 1, start +1
3240 3242 self._start = start
3241 3243 self._end = end
3242 3244 self._hiddenrevs = repo.changelog.filteredrevs
3243 3245
3244 3246 def sort(self, reverse=False):
3245 3247 self._ascending = not reverse
3246 3248
3247 3249 def reverse(self):
3248 3250 self._ascending = not self._ascending
3249 3251
3250 3252 def _iterfilter(self, iterrange):
3251 3253 s = self._hiddenrevs
3252 3254 for r in iterrange:
3253 3255 if r not in s:
3254 3256 yield r
3255 3257
3256 3258 def __iter__(self):
3257 3259 if self._ascending:
3258 3260 return self.fastasc()
3259 3261 else:
3260 3262 return self.fastdesc()
3261 3263
3262 3264 def fastasc(self):
3263 3265 iterrange = xrange(self._start, self._end)
3264 3266 if self._hiddenrevs:
3265 3267 return self._iterfilter(iterrange)
3266 3268 return iter(iterrange)
3267 3269
3268 3270 def fastdesc(self):
3269 3271 iterrange = xrange(self._end - 1, self._start - 1, -1)
3270 3272 if self._hiddenrevs:
3271 3273 return self._iterfilter(iterrange)
3272 3274 return iter(iterrange)
3273 3275
3274 3276 def __contains__(self, rev):
3275 3277 hidden = self._hiddenrevs
3276 3278 return ((self._start <= rev < self._end)
3277 3279 and not (hidden and rev in hidden))
3278 3280
3279 3281 def __nonzero__(self):
3280 3282 for r in self:
3281 3283 return True
3282 3284 return False
3283 3285
3284 3286 def __len__(self):
3285 3287 if not self._hiddenrevs:
3286 3288 return abs(self._end - self._start)
3287 3289 else:
3288 3290 count = 0
3289 3291 start = self._start
3290 3292 end = self._end
3291 3293 for rev in self._hiddenrevs:
3292 3294 if (end < rev <= start) or (start <= rev < end):
3293 3295 count += 1
3294 3296 return abs(self._end - self._start) - count
3295 3297
3296 3298 def isascending(self):
3297 3299 return self._ascending
3298 3300
3299 3301 def isdescending(self):
3300 3302 return not self._ascending
3301 3303
3302 3304 def first(self):
3303 3305 if self._ascending:
3304 3306 it = self.fastasc
3305 3307 else:
3306 3308 it = self.fastdesc
3307 3309 for x in it():
3308 3310 return x
3309 3311 return None
3310 3312
3311 3313 def last(self):
3312 3314 if self._ascending:
3313 3315 it = self.fastdesc
3314 3316 else:
3315 3317 it = self.fastasc
3316 3318 for x in it():
3317 3319 return x
3318 3320 return None
3319 3321
3320 3322 class fullreposet(_spanset):
3321 3323 """a set containing all revisions in the repo
3322 3324
3323 3325 This class exists to host special optimization.
3324 3326 """
3325 3327
3326 3328 def __init__(self, repo):
3327 3329 super(fullreposet, self).__init__(repo)
3328 3330
3329 3331 def __and__(self, other):
3330 3332 """As self contains the whole repo, all of the other set should also be
3331 3333 in self. Therefore `self & other = other`.
3332 3334
3333 3335 This boldly assumes the other contains valid revs only.
3334 3336 """
3335 3337 # other not a smartset, make is so
3336 3338 if not util.safehasattr(other, 'isascending'):
3337 3339 # filter out hidden revision
3338 3340 # (this boldly assumes all smartset are pure)
3339 3341 #
3340 3342 # `other` was used with "&", let's assume this is a set like
3341 3343 # object.
3342 3344 other = baseset(other - self._hiddenrevs)
3343 3345
3344 3346 other.sort(reverse=self.isdescending())
3345 3347 return other
3346 3348
3347 3349 # tell hggettext to extract docstrings from these functions:
3348 3350 i18nfunctions = symbols.values()
@@ -1,1108 +1,1108 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from mercurial.node import nullrev
10 10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 11 import pathutil
12 12 import match as matchmod
13 13 import os, errno, re, glob, tempfile
14 14
15 15 if os.name == 'nt':
16 16 import scmwindows as scmplatform
17 17 else:
18 18 import scmposix as scmplatform
19 19
20 20 systemrcpath = scmplatform.systemrcpath
21 21 userrcpath = scmplatform.userrcpath
22 22
23 23 class status(tuple):
24 24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 25 and 'ignored' properties are only relevant to the working copy.
26 26 '''
27 27
28 28 __slots__ = ()
29 29
30 30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 31 clean):
32 32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 33 ignored, clean))
34 34
35 35 @property
36 36 def modified(self):
37 37 '''files that have been modified'''
38 38 return self[0]
39 39
40 40 @property
41 41 def added(self):
42 42 '''files that have been added'''
43 43 return self[1]
44 44
45 45 @property
46 46 def removed(self):
47 47 '''files that have been removed'''
48 48 return self[2]
49 49
50 50 @property
51 51 def deleted(self):
52 52 '''files that are in the dirstate, but have been deleted from the
53 53 working copy (aka "missing")
54 54 '''
55 55 return self[3]
56 56
57 57 @property
58 58 def unknown(self):
59 59 '''files not in the dirstate that are not ignored'''
60 60 return self[4]
61 61
62 62 @property
63 63 def ignored(self):
64 64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 65 return self[5]
66 66
67 67 @property
68 68 def clean(self):
69 69 '''files that have not been modified'''
70 70 return self[6]
71 71
72 72 def __repr__(self, *args, **kwargs):
73 73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 74 'unknown=%r, ignored=%r, clean=%r>') % self)
75 75
76 76 def itersubrepos(ctx1, ctx2):
77 77 """find subrepos in ctx1 or ctx2"""
78 78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 83 for subpath, ctx in sorted(subpaths.iteritems()):
84 84 yield subpath, ctx.sub(subpath)
85 85
86 86 def nochangesfound(ui, repo, excluded=None):
87 87 '''Report no changes for push/pull, excluded is None or a list of
88 88 nodes excluded from the push/pull.
89 89 '''
90 90 secretlist = []
91 91 if excluded:
92 92 for n in excluded:
93 93 if n not in repo:
94 94 # discovery should not have included the filtered revision,
95 95 # we have to explicitly exclude it until discovery is cleanup.
96 96 continue
97 97 ctx = repo[n]
98 98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 99 secretlist.append(n)
100 100
101 101 if secretlist:
102 102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 103 % len(secretlist))
104 104 else:
105 105 ui.status(_("no changes found\n"))
106 106
107 107 def checknewlabel(repo, lbl, kind):
108 108 # Do not use the "kind" parameter in ui output.
109 109 # It makes strings difficult to translate.
110 110 if lbl in ['tip', '.', 'null']:
111 111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 112 for c in (':', '\0', '\n', '\r'):
113 113 if c in lbl:
114 114 raise util.Abort(_("%r cannot be used in a name") % c)
115 115 try:
116 116 int(lbl)
117 117 raise util.Abort(_("cannot use an integer as a name"))
118 118 except ValueError:
119 119 pass
120 120
121 121 def checkfilename(f):
122 122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 123 if '\r' in f or '\n' in f:
124 124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125 125
126 126 def checkportable(ui, f):
127 127 '''Check if filename f is portable and warn or abort depending on config'''
128 128 checkfilename(f)
129 129 abort, warn = checkportabilityalert(ui)
130 130 if abort or warn:
131 131 msg = util.checkwinfilename(f)
132 132 if msg:
133 133 msg = "%s: %r" % (msg, f)
134 134 if abort:
135 135 raise util.Abort(msg)
136 136 ui.warn(_("warning: %s\n") % msg)
137 137
138 138 def checkportabilityalert(ui):
139 139 '''check if the user's config requests nothing, a warning, or abort for
140 140 non-portable filenames'''
141 141 val = ui.config('ui', 'portablefilenames', 'warn')
142 142 lval = val.lower()
143 143 bval = util.parsebool(val)
144 144 abort = os.name == 'nt' or lval == 'abort'
145 145 warn = bval or lval == 'warn'
146 146 if bval is None and not (warn or abort or lval == 'ignore'):
147 147 raise error.ConfigError(
148 148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 149 return abort, warn
150 150
151 151 class casecollisionauditor(object):
152 152 def __init__(self, ui, abort, dirstate):
153 153 self._ui = ui
154 154 self._abort = abort
155 155 allfiles = '\0'.join(dirstate._map)
156 156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 157 self._dirstate = dirstate
158 158 # The purpose of _newfiles is so that we don't complain about
159 159 # case collisions if someone were to call this object with the
160 160 # same filename twice.
161 161 self._newfiles = set()
162 162
163 163 def __call__(self, f):
164 164 if f in self._newfiles:
165 165 return
166 166 fl = encoding.lower(f)
167 167 if fl in self._loweredfiles and f not in self._dirstate:
168 168 msg = _('possible case-folding collision for %s') % f
169 169 if self._abort:
170 170 raise util.Abort(msg)
171 171 self._ui.warn(_("warning: %s\n") % msg)
172 172 self._loweredfiles.add(fl)
173 173 self._newfiles.add(f)
174 174
175 175 class abstractvfs(object):
176 176 """Abstract base class; cannot be instantiated"""
177 177
178 178 def __init__(self, *args, **kwargs):
179 179 '''Prevent instantiation; don't call this from subclasses.'''
180 180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181 181
182 182 def tryread(self, path):
183 183 '''gracefully return an empty string for missing files'''
184 184 try:
185 185 return self.read(path)
186 186 except IOError, inst:
187 187 if inst.errno != errno.ENOENT:
188 188 raise
189 189 return ""
190 190
191 191 def tryreadlines(self, path, mode='rb'):
192 192 '''gracefully return an empty array for missing files'''
193 193 try:
194 194 return self.readlines(path, mode=mode)
195 195 except IOError, inst:
196 196 if inst.errno != errno.ENOENT:
197 197 raise
198 198 return []
199 199
200 200 def open(self, path, mode="r", text=False, atomictemp=False,
201 201 notindexed=False):
202 202 '''Open ``path`` file, which is relative to vfs root.
203 203
204 204 Newly created directories are marked as "not to be indexed by
205 205 the content indexing service", if ``notindexed`` is specified
206 206 for "write" mode access.
207 207 '''
208 208 self.open = self.__call__
209 209 return self.__call__(path, mode, text, atomictemp, notindexed)
210 210
211 211 def read(self, path):
212 212 fp = self(path, 'rb')
213 213 try:
214 214 return fp.read()
215 215 finally:
216 216 fp.close()
217 217
218 218 def readlines(self, path, mode='rb'):
219 219 fp = self(path, mode=mode)
220 220 try:
221 221 return fp.readlines()
222 222 finally:
223 223 fp.close()
224 224
225 225 def write(self, path, data):
226 226 fp = self(path, 'wb')
227 227 try:
228 228 return fp.write(data)
229 229 finally:
230 230 fp.close()
231 231
232 232 def writelines(self, path, data, mode='wb', notindexed=False):
233 233 fp = self(path, mode=mode, notindexed=notindexed)
234 234 try:
235 235 return fp.writelines(data)
236 236 finally:
237 237 fp.close()
238 238
239 239 def append(self, path, data):
240 240 fp = self(path, 'ab')
241 241 try:
242 242 return fp.write(data)
243 243 finally:
244 244 fp.close()
245 245
246 246 def chmod(self, path, mode):
247 247 return os.chmod(self.join(path), mode)
248 248
249 249 def exists(self, path=None):
250 250 return os.path.exists(self.join(path))
251 251
252 252 def fstat(self, fp):
253 253 return util.fstat(fp)
254 254
255 255 def isdir(self, path=None):
256 256 return os.path.isdir(self.join(path))
257 257
258 258 def isfile(self, path=None):
259 259 return os.path.isfile(self.join(path))
260 260
261 261 def islink(self, path=None):
262 262 return os.path.islink(self.join(path))
263 263
264 264 def reljoin(self, *paths):
265 265 """join various elements of a path together (as os.path.join would do)
266 266
267 267 The vfs base is not injected so that path stay relative. This exists
268 268 to allow handling of strange encoding if needed."""
269 269 return os.path.join(*paths)
270 270
271 271 def split(self, path):
272 272 """split top-most element of a path (as os.path.split would do)
273 273
274 274 This exists to allow handling of strange encoding if needed."""
275 275 return os.path.split(path)
276 276
277 277 def lexists(self, path=None):
278 278 return os.path.lexists(self.join(path))
279 279
280 280 def lstat(self, path=None):
281 281 return os.lstat(self.join(path))
282 282
283 283 def listdir(self, path=None):
284 284 return os.listdir(self.join(path))
285 285
286 286 def makedir(self, path=None, notindexed=True):
287 287 return util.makedir(self.join(path), notindexed)
288 288
289 289 def makedirs(self, path=None, mode=None):
290 290 return util.makedirs(self.join(path), mode)
291 291
292 292 def makelock(self, info, path):
293 293 return util.makelock(info, self.join(path))
294 294
295 295 def mkdir(self, path=None):
296 296 return os.mkdir(self.join(path))
297 297
298 298 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
299 299 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
300 300 dir=self.join(dir), text=text)
301 301 dname, fname = util.split(name)
302 302 if dir:
303 303 return fd, os.path.join(dir, fname)
304 304 else:
305 305 return fd, fname
306 306
307 307 def readdir(self, path=None, stat=None, skip=None):
308 308 return osutil.listdir(self.join(path), stat, skip)
309 309
310 310 def readlock(self, path):
311 311 return util.readlock(self.join(path))
312 312
313 313 def rename(self, src, dst):
314 314 return util.rename(self.join(src), self.join(dst))
315 315
316 316 def readlink(self, path):
317 317 return os.readlink(self.join(path))
318 318
319 319 def setflags(self, path, l, x):
320 320 return util.setflags(self.join(path), l, x)
321 321
322 322 def stat(self, path=None):
323 323 return os.stat(self.join(path))
324 324
325 325 def unlink(self, path=None):
326 326 return util.unlink(self.join(path))
327 327
328 328 def unlinkpath(self, path=None, ignoremissing=False):
329 329 return util.unlinkpath(self.join(path), ignoremissing)
330 330
331 331 def utime(self, path=None, t=None):
332 332 return os.utime(self.join(path), t)
333 333
334 334 class vfs(abstractvfs):
335 335 '''Operate files relative to a base directory
336 336
337 337 This class is used to hide the details of COW semantics and
338 338 remote file access from higher level code.
339 339 '''
340 340 def __init__(self, base, audit=True, expandpath=False, realpath=False):
341 341 if expandpath:
342 342 base = util.expandpath(base)
343 343 if realpath:
344 344 base = os.path.realpath(base)
345 345 self.base = base
346 346 self._setmustaudit(audit)
347 347 self.createmode = None
348 348 self._trustnlink = None
349 349
350 350 def _getmustaudit(self):
351 351 return self._audit
352 352
353 353 def _setmustaudit(self, onoff):
354 354 self._audit = onoff
355 355 if onoff:
356 356 self.audit = pathutil.pathauditor(self.base)
357 357 else:
358 358 self.audit = util.always
359 359
360 360 mustaudit = property(_getmustaudit, _setmustaudit)
361 361
362 362 @util.propertycache
363 363 def _cansymlink(self):
364 364 return util.checklink(self.base)
365 365
366 366 @util.propertycache
367 367 def _chmod(self):
368 368 return util.checkexec(self.base)
369 369
370 370 def _fixfilemode(self, name):
371 371 if self.createmode is None or not self._chmod:
372 372 return
373 373 os.chmod(name, self.createmode & 0666)
374 374
375 375 def __call__(self, path, mode="r", text=False, atomictemp=False,
376 376 notindexed=False):
377 377 '''Open ``path`` file, which is relative to vfs root.
378 378
379 379 Newly created directories are marked as "not to be indexed by
380 380 the content indexing service", if ``notindexed`` is specified
381 381 for "write" mode access.
382 382 '''
383 383 if self._audit:
384 384 r = util.checkosfilename(path)
385 385 if r:
386 386 raise util.Abort("%s: %r" % (r, path))
387 387 self.audit(path)
388 388 f = self.join(path)
389 389
390 390 if not text and "b" not in mode:
391 391 mode += "b" # for that other OS
392 392
393 393 nlink = -1
394 394 if mode not in ('r', 'rb'):
395 395 dirname, basename = util.split(f)
396 396 # If basename is empty, then the path is malformed because it points
397 397 # to a directory. Let the posixfile() call below raise IOError.
398 398 if basename:
399 399 if atomictemp:
400 400 util.ensuredirs(dirname, self.createmode, notindexed)
401 401 return util.atomictempfile(f, mode, self.createmode)
402 402 try:
403 403 if 'w' in mode:
404 404 util.unlink(f)
405 405 nlink = 0
406 406 else:
407 407 # nlinks() may behave differently for files on Windows
408 408 # shares if the file is open.
409 409 fd = util.posixfile(f)
410 410 nlink = util.nlinks(f)
411 411 if nlink < 1:
412 412 nlink = 2 # force mktempcopy (issue1922)
413 413 fd.close()
414 414 except (OSError, IOError), e:
415 415 if e.errno != errno.ENOENT:
416 416 raise
417 417 nlink = 0
418 418 util.ensuredirs(dirname, self.createmode, notindexed)
419 419 if nlink > 0:
420 420 if self._trustnlink is None:
421 421 self._trustnlink = nlink > 1 or util.checknlink(f)
422 422 if nlink > 1 or not self._trustnlink:
423 423 util.rename(util.mktempcopy(f), f)
424 424 fp = util.posixfile(f, mode)
425 425 if nlink == 0:
426 426 self._fixfilemode(f)
427 427 return fp
428 428
429 429 def symlink(self, src, dst):
430 430 self.audit(dst)
431 431 linkname = self.join(dst)
432 432 try:
433 433 os.unlink(linkname)
434 434 except OSError:
435 435 pass
436 436
437 437 util.ensuredirs(os.path.dirname(linkname), self.createmode)
438 438
439 439 if self._cansymlink:
440 440 try:
441 441 os.symlink(src, linkname)
442 442 except OSError, err:
443 443 raise OSError(err.errno, _('could not symlink to %r: %s') %
444 444 (src, err.strerror), linkname)
445 445 else:
446 446 self.write(dst, src)
447 447
448 448 def join(self, path):
449 449 if path:
450 450 return os.path.join(self.base, path)
451 451 else:
452 452 return self.base
453 453
454 454 opener = vfs
455 455
456 456 class auditvfs(object):
457 457 def __init__(self, vfs):
458 458 self.vfs = vfs
459 459
460 460 def _getmustaudit(self):
461 461 return self.vfs.mustaudit
462 462
463 463 def _setmustaudit(self, onoff):
464 464 self.vfs.mustaudit = onoff
465 465
466 466 mustaudit = property(_getmustaudit, _setmustaudit)
467 467
468 468 class filtervfs(abstractvfs, auditvfs):
469 469 '''Wrapper vfs for filtering filenames with a function.'''
470 470
471 471 def __init__(self, vfs, filter):
472 472 auditvfs.__init__(self, vfs)
473 473 self._filter = filter
474 474
475 475 def __call__(self, path, *args, **kwargs):
476 476 return self.vfs(self._filter(path), *args, **kwargs)
477 477
478 478 def join(self, path):
479 479 if path:
480 480 return self.vfs.join(self._filter(path))
481 481 else:
482 482 return self.vfs.join(path)
483 483
484 484 filteropener = filtervfs
485 485
486 486 class readonlyvfs(abstractvfs, auditvfs):
487 487 '''Wrapper vfs preventing any writing.'''
488 488
489 489 def __init__(self, vfs):
490 490 auditvfs.__init__(self, vfs)
491 491
492 492 def __call__(self, path, mode='r', *args, **kw):
493 493 if mode not in ('r', 'rb'):
494 494 raise util.Abort('this vfs is read only')
495 495 return self.vfs(path, mode, *args, **kw)
496 496
497 497
498 498 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
499 499 '''yield every hg repository under path, always recursively.
500 500 The recurse flag will only control recursion into repo working dirs'''
501 501 def errhandler(err):
502 502 if err.filename == path:
503 503 raise err
504 504 samestat = getattr(os.path, 'samestat', None)
505 505 if followsym and samestat is not None:
506 506 def adddir(dirlst, dirname):
507 507 match = False
508 508 dirstat = os.stat(dirname)
509 509 for lstdirstat in dirlst:
510 510 if samestat(dirstat, lstdirstat):
511 511 match = True
512 512 break
513 513 if not match:
514 514 dirlst.append(dirstat)
515 515 return not match
516 516 else:
517 517 followsym = False
518 518
519 519 if (seen_dirs is None) and followsym:
520 520 seen_dirs = []
521 521 adddir(seen_dirs, path)
522 522 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
523 523 dirs.sort()
524 524 if '.hg' in dirs:
525 525 yield root # found a repository
526 526 qroot = os.path.join(root, '.hg', 'patches')
527 527 if os.path.isdir(os.path.join(qroot, '.hg')):
528 528 yield qroot # we have a patch queue repo here
529 529 if recurse:
530 530 # avoid recursing inside the .hg directory
531 531 dirs.remove('.hg')
532 532 else:
533 533 dirs[:] = [] # don't descend further
534 534 elif followsym:
535 535 newdirs = []
536 536 for d in dirs:
537 537 fname = os.path.join(root, d)
538 538 if adddir(seen_dirs, fname):
539 539 if os.path.islink(fname):
540 540 for hgname in walkrepos(fname, True, seen_dirs):
541 541 yield hgname
542 542 else:
543 543 newdirs.append(d)
544 544 dirs[:] = newdirs
545 545
546 546 def osrcpath():
547 547 '''return default os-specific hgrc search path'''
548 548 path = []
549 549 defaultpath = os.path.join(util.datapath, 'default.d')
550 550 if os.path.isdir(defaultpath):
551 551 for f, kind in osutil.listdir(defaultpath):
552 552 if f.endswith('.rc'):
553 553 path.append(os.path.join(defaultpath, f))
554 554 path.extend(systemrcpath())
555 555 path.extend(userrcpath())
556 556 path = [os.path.normpath(f) for f in path]
557 557 return path
558 558
559 559 _rcpath = None
560 560
561 561 def rcpath():
562 562 '''return hgrc search path. if env var HGRCPATH is set, use it.
563 563 for each item in path, if directory, use files ending in .rc,
564 564 else use item.
565 565 make HGRCPATH empty to only look in .hg/hgrc of current repo.
566 566 if no HGRCPATH, use default os-specific path.'''
567 567 global _rcpath
568 568 if _rcpath is None:
569 569 if 'HGRCPATH' in os.environ:
570 570 _rcpath = []
571 571 for p in os.environ['HGRCPATH'].split(os.pathsep):
572 572 if not p:
573 573 continue
574 574 p = util.expandpath(p)
575 575 if os.path.isdir(p):
576 576 for f, kind in osutil.listdir(p):
577 577 if f.endswith('.rc'):
578 578 _rcpath.append(os.path.join(p, f))
579 579 else:
580 580 _rcpath.append(p)
581 581 else:
582 582 _rcpath = osrcpath()
583 583 return _rcpath
584 584
585 585 def revsingle(repo, revspec, default='.'):
586 586 if not revspec and revspec != 0:
587 587 return repo[default]
588 588
589 589 l = revrange(repo, [revspec])
590 590 if not l:
591 591 raise util.Abort(_('empty revision set'))
592 592 return repo[l.last()]
593 593
594 594 def revpair(repo, revs):
595 595 if not revs:
596 596 return repo.dirstate.p1(), None
597 597
598 598 l = revrange(repo, revs)
599 599
600 600 if not l:
601 601 first = second = None
602 602 elif l.isascending():
603 603 first = l.min()
604 604 second = l.max()
605 605 elif l.isdescending():
606 606 first = l.max()
607 607 second = l.min()
608 608 else:
609 609 first = l.first()
610 610 second = l.last()
611 611
612 612 if first is None:
613 613 raise util.Abort(_('empty revision range'))
614 614
615 615 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
616 616 return repo.lookup(first), None
617 617
618 618 return repo.lookup(first), repo.lookup(second)
619 619
620 620 _revrangesep = ':'
621 621
622 622 def revrange(repo, revs):
623 623 """Yield revision as strings from a list of revision specifications."""
624 624
625 625 def revfix(repo, val, defval):
626 626 if not val and val != 0 and defval is not None:
627 627 return defval
628 628 return repo[val].rev()
629 629
630 630 seen, l = set(), revset.baseset([])
631 631 for spec in revs:
632 632 if l and not seen:
633 633 seen = set(l)
634 634 # attempt to parse old-style ranges first to deal with
635 635 # things like old-tag which contain query metacharacters
636 636 try:
637 637 if isinstance(spec, int):
638 638 seen.add(spec)
639 639 l = l + revset.baseset([spec])
640 640 continue
641 641
642 642 if _revrangesep in spec:
643 643 start, end = spec.split(_revrangesep, 1)
644 644 start = revfix(repo, start, 0)
645 645 end = revfix(repo, end, len(repo) - 1)
646 646 if end == nullrev and start < 0:
647 647 start = nullrev
648 648 rangeiter = repo.changelog.revs(start, end)
649 649 if not seen and not l:
650 650 # by far the most common case: revs = ["-1:0"]
651 651 l = revset.baseset(rangeiter)
652 652 # defer syncing seen until next iteration
653 653 continue
654 654 newrevs = set(rangeiter)
655 655 if seen:
656 656 newrevs.difference_update(seen)
657 657 seen.update(newrevs)
658 658 else:
659 659 seen = newrevs
660 660 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
661 661 continue
662 662 elif spec and spec in repo: # single unquoted rev
663 663 rev = revfix(repo, spec, None)
664 664 if rev in seen:
665 665 continue
666 666 seen.add(rev)
667 667 l = l + revset.baseset([rev])
668 668 continue
669 669 except error.RepoLookupError:
670 670 pass
671 671
672 672 # fall through to new-style queries if old-style fails
673 673 m = revset.match(repo.ui, spec, repo)
674 674 if seen or l:
675 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
675 dl = [r for r in m(repo) if r not in seen]
676 676 l = l + revset.baseset(dl)
677 677 seen.update(dl)
678 678 else:
679 l = m(repo, revset.spanset(repo))
679 l = m(repo)
680 680
681 681 return l
682 682
683 683 def expandpats(pats):
684 684 '''Expand bare globs when running on windows.
685 685 On posix we assume it already has already been done by sh.'''
686 686 if not util.expandglobs:
687 687 return list(pats)
688 688 ret = []
689 689 for kindpat in pats:
690 690 kind, pat = matchmod._patsplit(kindpat, None)
691 691 if kind is None:
692 692 try:
693 693 globbed = glob.glob(pat)
694 694 except re.error:
695 695 globbed = [pat]
696 696 if globbed:
697 697 ret.extend(globbed)
698 698 continue
699 699 ret.append(kindpat)
700 700 return ret
701 701
702 702 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
703 703 '''Return a matcher and the patterns that were used.
704 704 The matcher will warn about bad matches.'''
705 705 if pats == ("",):
706 706 pats = []
707 707 if not globbed and default == 'relpath':
708 708 pats = expandpats(pats or [])
709 709
710 710 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
711 711 default)
712 712 def badfn(f, msg):
713 713 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
714 714 m.bad = badfn
715 715 return m, pats
716 716
717 717 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
718 718 '''Return a matcher that will warn about bad matches.'''
719 719 return matchandpats(ctx, pats, opts, globbed, default)[0]
720 720
721 721 def matchall(repo):
722 722 '''Return a matcher that will efficiently match everything.'''
723 723 return matchmod.always(repo.root, repo.getcwd())
724 724
725 725 def matchfiles(repo, files):
726 726 '''Return a matcher that will efficiently match exactly these files.'''
727 727 return matchmod.exact(repo.root, repo.getcwd(), files)
728 728
729 729 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
730 730 m = matcher
731 731 if dry_run is None:
732 732 dry_run = opts.get('dry_run')
733 733 if similarity is None:
734 734 similarity = float(opts.get('similarity') or 0)
735 735
736 736 ret = 0
737 737 join = lambda f: os.path.join(prefix, f)
738 738
739 739 def matchessubrepo(matcher, subpath):
740 740 if matcher.exact(subpath):
741 741 return True
742 742 for f in matcher.files():
743 743 if f.startswith(subpath):
744 744 return True
745 745 return False
746 746
747 747 wctx = repo[None]
748 748 for subpath in sorted(wctx.substate):
749 749 if opts.get('subrepos') or matchessubrepo(m, subpath):
750 750 sub = wctx.sub(subpath)
751 751 try:
752 752 submatch = matchmod.narrowmatcher(subpath, m)
753 753 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
754 754 ret = 1
755 755 except error.LookupError:
756 756 repo.ui.status(_("skipping missing subrepository: %s\n")
757 757 % join(subpath))
758 758
759 759 rejected = []
760 760 origbad = m.bad
761 761 def badfn(f, msg):
762 762 if f in m.files():
763 763 origbad(f, msg)
764 764 rejected.append(f)
765 765
766 766 m.bad = badfn
767 767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
768 768 m.bad = origbad
769 769
770 770 unknownset = set(unknown + forgotten)
771 771 toprint = unknownset.copy()
772 772 toprint.update(deleted)
773 773 for abs in sorted(toprint):
774 774 if repo.ui.verbose or not m.exact(abs):
775 775 if abs in unknownset:
776 776 status = _('adding %s\n') % m.uipath(abs)
777 777 else:
778 778 status = _('removing %s\n') % m.uipath(abs)
779 779 repo.ui.status(status)
780 780
781 781 renames = _findrenames(repo, m, added + unknown, removed + deleted,
782 782 similarity)
783 783
784 784 if not dry_run:
785 785 _markchanges(repo, unknown + forgotten, deleted, renames)
786 786
787 787 for f in rejected:
788 788 if f in m.files():
789 789 return 1
790 790 return ret
791 791
792 792 def marktouched(repo, files, similarity=0.0):
793 793 '''Assert that files have somehow been operated upon. files are relative to
794 794 the repo root.'''
795 795 m = matchfiles(repo, files)
796 796 rejected = []
797 797 m.bad = lambda x, y: rejected.append(x)
798 798
799 799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
800 800
801 801 if repo.ui.verbose:
802 802 unknownset = set(unknown + forgotten)
803 803 toprint = unknownset.copy()
804 804 toprint.update(deleted)
805 805 for abs in sorted(toprint):
806 806 if abs in unknownset:
807 807 status = _('adding %s\n') % abs
808 808 else:
809 809 status = _('removing %s\n') % abs
810 810 repo.ui.status(status)
811 811
812 812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
813 813 similarity)
814 814
815 815 _markchanges(repo, unknown + forgotten, deleted, renames)
816 816
817 817 for f in rejected:
818 818 if f in m.files():
819 819 return 1
820 820 return 0
821 821
822 822 def _interestingfiles(repo, matcher):
823 823 '''Walk dirstate with matcher, looking for files that addremove would care
824 824 about.
825 825
826 826 This is different from dirstate.status because it doesn't care about
827 827 whether files are modified or clean.'''
828 828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
829 829 audit_path = pathutil.pathauditor(repo.root)
830 830
831 831 ctx = repo[None]
832 832 dirstate = repo.dirstate
833 833 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
834 834 full=False)
835 835 for abs, st in walkresults.iteritems():
836 836 dstate = dirstate[abs]
837 837 if dstate == '?' and audit_path.check(abs):
838 838 unknown.append(abs)
839 839 elif dstate != 'r' and not st:
840 840 deleted.append(abs)
841 841 elif dstate == 'r' and st:
842 842 forgotten.append(abs)
843 843 # for finding renames
844 844 elif dstate == 'r' and not st:
845 845 removed.append(abs)
846 846 elif dstate == 'a':
847 847 added.append(abs)
848 848
849 849 return added, unknown, deleted, removed, forgotten
850 850
851 851 def _findrenames(repo, matcher, added, removed, similarity):
852 852 '''Find renames from removed files to added ones.'''
853 853 renames = {}
854 854 if similarity > 0:
855 855 for old, new, score in similar.findrenames(repo, added, removed,
856 856 similarity):
857 857 if (repo.ui.verbose or not matcher.exact(old)
858 858 or not matcher.exact(new)):
859 859 repo.ui.status(_('recording removal of %s as rename to %s '
860 860 '(%d%% similar)\n') %
861 861 (matcher.rel(old), matcher.rel(new),
862 862 score * 100))
863 863 renames[new] = old
864 864 return renames
865 865
866 866 def _markchanges(repo, unknown, deleted, renames):
867 867 '''Marks the files in unknown as added, the files in deleted as removed,
868 868 and the files in renames as copied.'''
869 869 wctx = repo[None]
870 870 wlock = repo.wlock()
871 871 try:
872 872 wctx.forget(deleted)
873 873 wctx.add(unknown)
874 874 for new, old in renames.iteritems():
875 875 wctx.copy(old, new)
876 876 finally:
877 877 wlock.release()
878 878
879 879 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
880 880 """Update the dirstate to reflect the intent of copying src to dst. For
881 881 different reasons it might not end with dst being marked as copied from src.
882 882 """
883 883 origsrc = repo.dirstate.copied(src) or src
884 884 if dst == origsrc: # copying back a copy?
885 885 if repo.dirstate[dst] not in 'mn' and not dryrun:
886 886 repo.dirstate.normallookup(dst)
887 887 else:
888 888 if repo.dirstate[origsrc] == 'a' and origsrc == src:
889 889 if not ui.quiet:
890 890 ui.warn(_("%s has not been committed yet, so no copy "
891 891 "data will be stored for %s.\n")
892 892 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
893 893 if repo.dirstate[dst] in '?r' and not dryrun:
894 894 wctx.add([dst])
895 895 elif not dryrun:
896 896 wctx.copy(origsrc, dst)
897 897
898 898 def readrequires(opener, supported):
899 899 '''Reads and parses .hg/requires and checks if all entries found
900 900 are in the list of supported features.'''
901 901 requirements = set(opener.read("requires").splitlines())
902 902 missings = []
903 903 for r in requirements:
904 904 if r not in supported:
905 905 if not r or not r[0].isalnum():
906 906 raise error.RequirementError(_(".hg/requires file is corrupt"))
907 907 missings.append(r)
908 908 missings.sort()
909 909 if missings:
910 910 raise error.RequirementError(
911 911 _("repository requires features unknown to this Mercurial: %s")
912 912 % " ".join(missings),
913 913 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
914 914 " for more information"))
915 915 return requirements
916 916
917 917 class filecachesubentry(object):
918 918 def __init__(self, path, stat):
919 919 self.path = path
920 920 self.cachestat = None
921 921 self._cacheable = None
922 922
923 923 if stat:
924 924 self.cachestat = filecachesubentry.stat(self.path)
925 925
926 926 if self.cachestat:
927 927 self._cacheable = self.cachestat.cacheable()
928 928 else:
929 929 # None means we don't know yet
930 930 self._cacheable = None
931 931
932 932 def refresh(self):
933 933 if self.cacheable():
934 934 self.cachestat = filecachesubentry.stat(self.path)
935 935
936 936 def cacheable(self):
937 937 if self._cacheable is not None:
938 938 return self._cacheable
939 939
940 940 # we don't know yet, assume it is for now
941 941 return True
942 942
943 943 def changed(self):
944 944 # no point in going further if we can't cache it
945 945 if not self.cacheable():
946 946 return True
947 947
948 948 newstat = filecachesubentry.stat(self.path)
949 949
950 950 # we may not know if it's cacheable yet, check again now
951 951 if newstat and self._cacheable is None:
952 952 self._cacheable = newstat.cacheable()
953 953
954 954 # check again
955 955 if not self._cacheable:
956 956 return True
957 957
958 958 if self.cachestat != newstat:
959 959 self.cachestat = newstat
960 960 return True
961 961 else:
962 962 return False
963 963
964 964 @staticmethod
965 965 def stat(path):
966 966 try:
967 967 return util.cachestat(path)
968 968 except OSError, e:
969 969 if e.errno != errno.ENOENT:
970 970 raise
971 971
972 972 class filecacheentry(object):
973 973 def __init__(self, paths, stat=True):
974 974 self._entries = []
975 975 for path in paths:
976 976 self._entries.append(filecachesubentry(path, stat))
977 977
978 978 def changed(self):
979 979 '''true if any entry has changed'''
980 980 for entry in self._entries:
981 981 if entry.changed():
982 982 return True
983 983 return False
984 984
985 985 def refresh(self):
986 986 for entry in self._entries:
987 987 entry.refresh()
988 988
989 989 class filecache(object):
990 990 '''A property like decorator that tracks files under .hg/ for updates.
991 991
992 992 Records stat info when called in _filecache.
993 993
994 994 On subsequent calls, compares old stat info with new info, and recreates the
995 995 object when any of the files changes, updating the new stat info in
996 996 _filecache.
997 997
998 998 Mercurial either atomic renames or appends for files under .hg,
999 999 so to ensure the cache is reliable we need the filesystem to be able
1000 1000 to tell us if a file has been replaced. If it can't, we fallback to
1001 1001 recreating the object on every call (essentially the same behaviour as
1002 1002 propertycache).
1003 1003
1004 1004 '''
1005 1005 def __init__(self, *paths):
1006 1006 self.paths = paths
1007 1007
1008 1008 def join(self, obj, fname):
1009 1009 """Used to compute the runtime path of a cached file.
1010 1010
1011 1011 Users should subclass filecache and provide their own version of this
1012 1012 function to call the appropriate join function on 'obj' (an instance
1013 1013 of the class that its member function was decorated).
1014 1014 """
1015 1015 return obj.join(fname)
1016 1016
1017 1017 def __call__(self, func):
1018 1018 self.func = func
1019 1019 self.name = func.__name__
1020 1020 return self
1021 1021
1022 1022 def __get__(self, obj, type=None):
1023 1023 # do we need to check if the file changed?
1024 1024 if self.name in obj.__dict__:
1025 1025 assert self.name in obj._filecache, self.name
1026 1026 return obj.__dict__[self.name]
1027 1027
1028 1028 entry = obj._filecache.get(self.name)
1029 1029
1030 1030 if entry:
1031 1031 if entry.changed():
1032 1032 entry.obj = self.func(obj)
1033 1033 else:
1034 1034 paths = [self.join(obj, path) for path in self.paths]
1035 1035
1036 1036 # We stat -before- creating the object so our cache doesn't lie if
1037 1037 # a writer modified between the time we read and stat
1038 1038 entry = filecacheentry(paths, True)
1039 1039 entry.obj = self.func(obj)
1040 1040
1041 1041 obj._filecache[self.name] = entry
1042 1042
1043 1043 obj.__dict__[self.name] = entry.obj
1044 1044 return entry.obj
1045 1045
1046 1046 def __set__(self, obj, value):
1047 1047 if self.name not in obj._filecache:
1048 1048 # we add an entry for the missing value because X in __dict__
1049 1049 # implies X in _filecache
1050 1050 paths = [self.join(obj, path) for path in self.paths]
1051 1051 ce = filecacheentry(paths, False)
1052 1052 obj._filecache[self.name] = ce
1053 1053 else:
1054 1054 ce = obj._filecache[self.name]
1055 1055
1056 1056 ce.obj = value # update cached copy
1057 1057 obj.__dict__[self.name] = value # update copy returned by obj.x
1058 1058
1059 1059 def __delete__(self, obj):
1060 1060 try:
1061 1061 del obj.__dict__[self.name]
1062 1062 except KeyError:
1063 1063 raise AttributeError(self.name)
1064 1064
1065 1065 class dirs(object):
1066 1066 '''a multiset of directory names from a dirstate or manifest'''
1067 1067
1068 1068 def __init__(self, map, skip=None):
1069 1069 self._dirs = {}
1070 1070 addpath = self.addpath
1071 1071 if util.safehasattr(map, 'iteritems') and skip is not None:
1072 1072 for f, s in map.iteritems():
1073 1073 if s[0] != skip:
1074 1074 addpath(f)
1075 1075 else:
1076 1076 for f in map:
1077 1077 addpath(f)
1078 1078
1079 1079 def addpath(self, path):
1080 1080 dirs = self._dirs
1081 1081 for base in finddirs(path):
1082 1082 if base in dirs:
1083 1083 dirs[base] += 1
1084 1084 return
1085 1085 dirs[base] = 1
1086 1086
1087 1087 def delpath(self, path):
1088 1088 dirs = self._dirs
1089 1089 for base in finddirs(path):
1090 1090 if dirs[base] > 1:
1091 1091 dirs[base] -= 1
1092 1092 return
1093 1093 del dirs[base]
1094 1094
1095 1095 def __iter__(self):
1096 1096 return self._dirs.iterkeys()
1097 1097
1098 1098 def __contains__(self, d):
1099 1099 return d in self._dirs
1100 1100
1101 1101 if util.safehasattr(parsers, 'dirs'):
1102 1102 dirs = parsers.dirs
1103 1103
1104 1104 def finddirs(path):
1105 1105 pos = path.rfind('/')
1106 1106 while pos != -1:
1107 1107 yield path[:pos]
1108 1108 pos = path.rfind('/', 0, pos)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now