##// END OF EJS Templates
py3: replace os.sep with pycompat.ossep (part 1 of 4)...
Pulkit Goyal -
r30613:1112ff99 default
parent child Browse files
Show More
@@ -1,6602 +1,6602
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import os
13 13 import re
14 14 import shlex
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 archival,
32 32 bookmarks,
33 33 bundle2,
34 34 changegroup,
35 35 cmdutil,
36 36 copies,
37 37 destutil,
38 38 dirstateguard,
39 39 discovery,
40 40 encoding,
41 41 error,
42 42 exchange,
43 43 extensions,
44 44 formatter,
45 45 graphmod,
46 46 hbisect,
47 47 help,
48 48 hg,
49 49 lock as lockmod,
50 50 merge as mergemod,
51 51 minirst,
52 52 obsolete,
53 53 patch,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 repair,
59 59 revlog,
60 60 revset,
61 61 scmutil,
62 62 server,
63 63 sshserver,
64 64 sslutil,
65 65 streamclone,
66 66 templatekw,
67 67 templater,
68 68 ui as uimod,
69 69 util,
70 70 )
71 71
72 72 release = lockmod.release
73 73
74 74 table = {}
75 75
76 76 command = cmdutil.command(table)
77 77
78 78 # label constants
79 79 # until 3.5, bookmarks.current was the advertised name, not
80 80 # bookmarks.active, so we must use both to avoid breaking old
81 81 # custom styles
82 82 activebookmarklabel = 'bookmarks.active bookmarks.current'
83 83
84 84 # common command options
85 85
86 86 globalopts = [
87 87 ('R', 'repository', '',
88 88 _('repository root directory or name of overlay bundle file'),
89 89 _('REPO')),
90 90 ('', 'cwd', '',
91 91 _('change working directory'), _('DIR')),
92 92 ('y', 'noninteractive', None,
93 93 _('do not prompt, automatically pick the first choice for all prompts')),
94 94 ('q', 'quiet', None, _('suppress output')),
95 95 ('v', 'verbose', None, _('enable additional output')),
96 96 ('', 'config', [],
97 97 _('set/override config option (use \'section.name=value\')'),
98 98 _('CONFIG')),
99 99 ('', 'debug', None, _('enable debugging output')),
100 100 ('', 'debugger', None, _('start debugger')),
101 101 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
102 102 _('ENCODE')),
103 103 ('', 'encodingmode', encoding.encodingmode,
104 104 _('set the charset encoding mode'), _('MODE')),
105 105 ('', 'traceback', None, _('always print a traceback on exception')),
106 106 ('', 'time', None, _('time how long the command takes')),
107 107 ('', 'profile', None, _('print command execution profile')),
108 108 ('', 'version', None, _('output version information and exit')),
109 109 ('h', 'help', None, _('display help and exit')),
110 110 ('', 'hidden', False, _('consider hidden changesets')),
111 111 ]
112 112
113 113 dryrunopts = [('n', 'dry-run', None,
114 114 _('do not perform actions, just print output'))]
115 115
116 116 remoteopts = [
117 117 ('e', 'ssh', '',
118 118 _('specify ssh command to use'), _('CMD')),
119 119 ('', 'remotecmd', '',
120 120 _('specify hg command to run on the remote side'), _('CMD')),
121 121 ('', 'insecure', None,
122 122 _('do not verify server certificate (ignoring web.cacerts config)')),
123 123 ]
124 124
125 125 walkopts = [
126 126 ('I', 'include', [],
127 127 _('include names matching the given patterns'), _('PATTERN')),
128 128 ('X', 'exclude', [],
129 129 _('exclude names matching the given patterns'), _('PATTERN')),
130 130 ]
131 131
132 132 commitopts = [
133 133 ('m', 'message', '',
134 134 _('use text as commit message'), _('TEXT')),
135 135 ('l', 'logfile', '',
136 136 _('read commit message from file'), _('FILE')),
137 137 ]
138 138
139 139 commitopts2 = [
140 140 ('d', 'date', '',
141 141 _('record the specified date as commit date'), _('DATE')),
142 142 ('u', 'user', '',
143 143 _('record the specified user as committer'), _('USER')),
144 144 ]
145 145
146 146 # hidden for now
147 147 formatteropts = [
148 148 ('T', 'template', '',
149 149 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
150 150 ]
151 151
152 152 templateopts = [
153 153 ('', 'style', '',
154 154 _('display using template map file (DEPRECATED)'), _('STYLE')),
155 155 ('T', 'template', '',
156 156 _('display with template'), _('TEMPLATE')),
157 157 ]
158 158
159 159 logopts = [
160 160 ('p', 'patch', None, _('show patch')),
161 161 ('g', 'git', None, _('use git extended diff format')),
162 162 ('l', 'limit', '',
163 163 _('limit number of changes displayed'), _('NUM')),
164 164 ('M', 'no-merges', None, _('do not show merges')),
165 165 ('', 'stat', None, _('output diffstat-style summary of changes')),
166 166 ('G', 'graph', None, _("show the revision DAG")),
167 167 ] + templateopts
168 168
169 169 diffopts = [
170 170 ('a', 'text', None, _('treat all files as text')),
171 171 ('g', 'git', None, _('use git extended diff format')),
172 172 ('', 'nodates', None, _('omit dates from diff headers'))
173 173 ]
174 174
175 175 diffwsopts = [
176 176 ('w', 'ignore-all-space', None,
177 177 _('ignore white space when comparing lines')),
178 178 ('b', 'ignore-space-change', None,
179 179 _('ignore changes in the amount of white space')),
180 180 ('B', 'ignore-blank-lines', None,
181 181 _('ignore changes whose lines are all blank')),
182 182 ]
183 183
184 184 diffopts2 = [
185 185 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
186 186 ('p', 'show-function', None, _('show which function each change is in')),
187 187 ('', 'reverse', None, _('produce a diff that undoes the changes')),
188 188 ] + diffwsopts + [
189 189 ('U', 'unified', '',
190 190 _('number of lines of context to show'), _('NUM')),
191 191 ('', 'stat', None, _('output diffstat-style summary of changes')),
192 192 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
193 193 ]
194 194
195 195 mergetoolopts = [
196 196 ('t', 'tool', '', _('specify merge tool')),
197 197 ]
198 198
199 199 similarityopts = [
200 200 ('s', 'similarity', '',
201 201 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
202 202 ]
203 203
204 204 subrepoopts = [
205 205 ('S', 'subrepos', None,
206 206 _('recurse into subrepositories'))
207 207 ]
208 208
209 209 debugrevlogopts = [
210 210 ('c', 'changelog', False, _('open changelog')),
211 211 ('m', 'manifest', False, _('open manifest')),
212 212 ('', 'dir', '', _('open directory manifest')),
213 213 ]
214 214
215 215 # Commands start here, listed alphabetically
216 216
217 217 @command('^add',
218 218 walkopts + subrepoopts + dryrunopts,
219 219 _('[OPTION]... [FILE]...'),
220 220 inferrepo=True)
221 221 def add(ui, repo, *pats, **opts):
222 222 """add the specified files on the next commit
223 223
224 224 Schedule files to be version controlled and added to the
225 225 repository.
226 226
227 227 The files will be added to the repository at the next commit. To
228 228 undo an add before that, see :hg:`forget`.
229 229
230 230 If no names are given, add all files to the repository (except
231 231 files matching ``.hgignore``).
232 232
233 233 .. container:: verbose
234 234
235 235 Examples:
236 236
237 237 - New (unknown) files are added
238 238 automatically by :hg:`add`::
239 239
240 240 $ ls
241 241 foo.c
242 242 $ hg status
243 243 ? foo.c
244 244 $ hg add
245 245 adding foo.c
246 246 $ hg status
247 247 A foo.c
248 248
249 249 - Specific files to be added can be specified::
250 250
251 251 $ ls
252 252 bar.c foo.c
253 253 $ hg status
254 254 ? bar.c
255 255 ? foo.c
256 256 $ hg add bar.c
257 257 $ hg status
258 258 A bar.c
259 259 ? foo.c
260 260
261 261 Returns 0 if all files are successfully added.
262 262 """
263 263
264 264 m = scmutil.match(repo[None], pats, opts)
265 265 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
266 266 return rejected and 1 or 0
267 267
268 268 @command('addremove',
269 269 similarityopts + subrepoopts + walkopts + dryrunopts,
270 270 _('[OPTION]... [FILE]...'),
271 271 inferrepo=True)
272 272 def addremove(ui, repo, *pats, **opts):
273 273 """add all new files, delete all missing files
274 274
275 275 Add all new files and remove all missing files from the
276 276 repository.
277 277
278 278 Unless names are given, new files are ignored if they match any of
279 279 the patterns in ``.hgignore``. As with add, these changes take
280 280 effect at the next commit.
281 281
282 282 Use the -s/--similarity option to detect renamed files. This
283 283 option takes a percentage between 0 (disabled) and 100 (files must
284 284 be identical) as its parameter. With a parameter greater than 0,
285 285 this compares every removed file with every added file and records
286 286 those similar enough as renames. Detecting renamed files this way
287 287 can be expensive. After using this option, :hg:`status -C` can be
288 288 used to check which files were identified as moved or renamed. If
289 289 not specified, -s/--similarity defaults to 100 and only renames of
290 290 identical files are detected.
291 291
292 292 .. container:: verbose
293 293
294 294 Examples:
295 295
296 296 - A number of files (bar.c and foo.c) are new,
297 297 while foobar.c has been removed (without using :hg:`remove`)
298 298 from the repository::
299 299
300 300 $ ls
301 301 bar.c foo.c
302 302 $ hg status
303 303 ! foobar.c
304 304 ? bar.c
305 305 ? foo.c
306 306 $ hg addremove
307 307 adding bar.c
308 308 adding foo.c
309 309 removing foobar.c
310 310 $ hg status
311 311 A bar.c
312 312 A foo.c
313 313 R foobar.c
314 314
315 315 - A file foobar.c was moved to foo.c without using :hg:`rename`.
316 316 Afterwards, it was edited slightly::
317 317
318 318 $ ls
319 319 foo.c
320 320 $ hg status
321 321 ! foobar.c
322 322 ? foo.c
323 323 $ hg addremove --similarity 90
324 324 removing foobar.c
325 325 adding foo.c
326 326 recording removal of foobar.c as rename to foo.c (94% similar)
327 327 $ hg status -C
328 328 A foo.c
329 329 foobar.c
330 330 R foobar.c
331 331
332 332 Returns 0 if all files are successfully added.
333 333 """
334 334 try:
335 335 sim = float(opts.get('similarity') or 100)
336 336 except ValueError:
337 337 raise error.Abort(_('similarity must be a number'))
338 338 if sim < 0 or sim > 100:
339 339 raise error.Abort(_('similarity must be between 0 and 100'))
340 340 matcher = scmutil.match(repo[None], pats, opts)
341 341 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
342 342
343 343 @command('^annotate|blame',
344 344 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
345 345 ('', 'follow', None,
346 346 _('follow copies/renames and list the filename (DEPRECATED)')),
347 347 ('', 'no-follow', None, _("don't follow copies and renames")),
348 348 ('a', 'text', None, _('treat all files as text')),
349 349 ('u', 'user', None, _('list the author (long with -v)')),
350 350 ('f', 'file', None, _('list the filename')),
351 351 ('d', 'date', None, _('list the date (short with -q)')),
352 352 ('n', 'number', None, _('list the revision number (default)')),
353 353 ('c', 'changeset', None, _('list the changeset')),
354 354 ('l', 'line-number', None, _('show line number at the first appearance'))
355 355 ] + diffwsopts + walkopts + formatteropts,
356 356 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
357 357 inferrepo=True)
358 358 def annotate(ui, repo, *pats, **opts):
359 359 """show changeset information by line for each file
360 360
361 361 List changes in files, showing the revision id responsible for
362 362 each line.
363 363
364 364 This command is useful for discovering when a change was made and
365 365 by whom.
366 366
367 367 If you include --file, --user, or --date, the revision number is
368 368 suppressed unless you also include --number.
369 369
370 370 Without the -a/--text option, annotate will avoid processing files
371 371 it detects as binary. With -a, annotate will annotate the file
372 372 anyway, although the results will probably be neither useful
373 373 nor desirable.
374 374
375 375 Returns 0 on success.
376 376 """
377 377 if not pats:
378 378 raise error.Abort(_('at least one filename or pattern is required'))
379 379
380 380 if opts.get('follow'):
381 381 # --follow is deprecated and now just an alias for -f/--file
382 382 # to mimic the behavior of Mercurial before version 1.5
383 383 opts['file'] = True
384 384
385 385 ctx = scmutil.revsingle(repo, opts.get('rev'))
386 386
387 387 fm = ui.formatter('annotate', opts)
388 388 if ui.quiet:
389 389 datefunc = util.shortdate
390 390 else:
391 391 datefunc = util.datestr
392 392 if ctx.rev() is None:
393 393 def hexfn(node):
394 394 if node is None:
395 395 return None
396 396 else:
397 397 return fm.hexfunc(node)
398 398 if opts.get('changeset'):
399 399 # omit "+" suffix which is appended to node hex
400 400 def formatrev(rev):
401 401 if rev is None:
402 402 return '%d' % ctx.p1().rev()
403 403 else:
404 404 return '%d' % rev
405 405 else:
406 406 def formatrev(rev):
407 407 if rev is None:
408 408 return '%d+' % ctx.p1().rev()
409 409 else:
410 410 return '%d ' % rev
411 411 def formathex(hex):
412 412 if hex is None:
413 413 return '%s+' % fm.hexfunc(ctx.p1().node())
414 414 else:
415 415 return '%s ' % hex
416 416 else:
417 417 hexfn = fm.hexfunc
418 418 formatrev = formathex = str
419 419
420 420 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
421 421 ('number', ' ', lambda x: x[0].rev(), formatrev),
422 422 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
423 423 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
424 424 ('file', ' ', lambda x: x[0].path(), str),
425 425 ('line_number', ':', lambda x: x[1], str),
426 426 ]
427 427 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
428 428
429 429 if (not opts.get('user') and not opts.get('changeset')
430 430 and not opts.get('date') and not opts.get('file')):
431 431 opts['number'] = True
432 432
433 433 linenumber = opts.get('line_number') is not None
434 434 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
435 435 raise error.Abort(_('at least one of -n/-c is required for -l'))
436 436
437 437 if fm.isplain():
438 438 def makefunc(get, fmt):
439 439 return lambda x: fmt(get(x))
440 440 else:
441 441 def makefunc(get, fmt):
442 442 return get
443 443 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
444 444 if opts.get(op)]
445 445 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
446 446 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
447 447 if opts.get(op))
448 448
449 449 def bad(x, y):
450 450 raise error.Abort("%s: %s" % (x, y))
451 451
452 452 m = scmutil.match(ctx, pats, opts, badfn=bad)
453 453
454 454 follow = not opts.get('no_follow')
455 455 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
456 456 whitespace=True)
457 457 for abs in ctx.walk(m):
458 458 fctx = ctx[abs]
459 459 if not opts.get('text') and util.binary(fctx.data()):
460 460 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
461 461 continue
462 462
463 463 lines = fctx.annotate(follow=follow, linenumber=linenumber,
464 464 diffopts=diffopts)
465 465 if not lines:
466 466 continue
467 467 formats = []
468 468 pieces = []
469 469
470 470 for f, sep in funcmap:
471 471 l = [f(n) for n, dummy in lines]
472 472 if fm.isplain():
473 473 sizes = [encoding.colwidth(x) for x in l]
474 474 ml = max(sizes)
475 475 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
476 476 else:
477 477 formats.append(['%s' for x in l])
478 478 pieces.append(l)
479 479
480 480 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
481 481 fm.startitem()
482 482 fm.write(fields, "".join(f), *p)
483 483 fm.write('line', ": %s", l[1])
484 484
485 485 if not lines[-1][1].endswith('\n'):
486 486 fm.plain('\n')
487 487
488 488 fm.end()
489 489
490 490 @command('archive',
491 491 [('', 'no-decode', None, _('do not pass files through decoders')),
492 492 ('p', 'prefix', '', _('directory prefix for files in archive'),
493 493 _('PREFIX')),
494 494 ('r', 'rev', '', _('revision to distribute'), _('REV')),
495 495 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
496 496 ] + subrepoopts + walkopts,
497 497 _('[OPTION]... DEST'))
498 498 def archive(ui, repo, dest, **opts):
499 499 '''create an unversioned archive of a repository revision
500 500
501 501 By default, the revision used is the parent of the working
502 502 directory; use -r/--rev to specify a different revision.
503 503
504 504 The archive type is automatically detected based on file
505 505 extension (to override, use -t/--type).
506 506
507 507 .. container:: verbose
508 508
509 509 Examples:
510 510
511 511 - create a zip file containing the 1.0 release::
512 512
513 513 hg archive -r 1.0 project-1.0.zip
514 514
515 515 - create a tarball excluding .hg files::
516 516
517 517 hg archive project.tar.gz -X ".hg*"
518 518
519 519 Valid types are:
520 520
521 521 :``files``: a directory full of files (default)
522 522 :``tar``: tar archive, uncompressed
523 523 :``tbz2``: tar archive, compressed using bzip2
524 524 :``tgz``: tar archive, compressed using gzip
525 525 :``uzip``: zip archive, uncompressed
526 526 :``zip``: zip archive, compressed using deflate
527 527
528 528 The exact name of the destination archive or directory is given
529 529 using a format string; see :hg:`help export` for details.
530 530
531 531 Each member added to an archive file has a directory prefix
532 532 prepended. Use -p/--prefix to specify a format string for the
533 533 prefix. The default is the basename of the archive, with suffixes
534 534 removed.
535 535
536 536 Returns 0 on success.
537 537 '''
538 538
539 539 ctx = scmutil.revsingle(repo, opts.get('rev'))
540 540 if not ctx:
541 541 raise error.Abort(_('no working directory: please specify a revision'))
542 542 node = ctx.node()
543 543 dest = cmdutil.makefilename(repo, dest, node)
544 544 if os.path.realpath(dest) == repo.root:
545 545 raise error.Abort(_('repository root cannot be destination'))
546 546
547 547 kind = opts.get('type') or archival.guesskind(dest) or 'files'
548 548 prefix = opts.get('prefix')
549 549
550 550 if dest == '-':
551 551 if kind == 'files':
552 552 raise error.Abort(_('cannot archive plain files to stdout'))
553 553 dest = cmdutil.makefileobj(repo, dest)
554 554 if not prefix:
555 555 prefix = os.path.basename(repo.root) + '-%h'
556 556
557 557 prefix = cmdutil.makefilename(repo, prefix, node)
558 558 matchfn = scmutil.match(ctx, [], opts)
559 559 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
560 560 matchfn, prefix, subrepos=opts.get('subrepos'))
561 561
562 562 @command('backout',
563 563 [('', 'merge', None, _('merge with old dirstate parent after backout')),
564 564 ('', 'commit', None,
565 565 _('commit if no conflicts were encountered (DEPRECATED)')),
566 566 ('', 'no-commit', None, _('do not commit')),
567 567 ('', 'parent', '',
568 568 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
569 569 ('r', 'rev', '', _('revision to backout'), _('REV')),
570 570 ('e', 'edit', False, _('invoke editor on commit messages')),
571 571 ] + mergetoolopts + walkopts + commitopts + commitopts2,
572 572 _('[OPTION]... [-r] REV'))
573 573 def backout(ui, repo, node=None, rev=None, **opts):
574 574 '''reverse effect of earlier changeset
575 575
576 576 Prepare a new changeset with the effect of REV undone in the
577 577 current working directory. If no conflicts were encountered,
578 578 it will be committed immediately.
579 579
580 580 If REV is the parent of the working directory, then this new changeset
581 581 is committed automatically (unless --no-commit is specified).
582 582
583 583 .. note::
584 584
585 585 :hg:`backout` cannot be used to fix either an unwanted or
586 586 incorrect merge.
587 587
588 588 .. container:: verbose
589 589
590 590 Examples:
591 591
592 592 - Reverse the effect of the parent of the working directory.
593 593 This backout will be committed immediately::
594 594
595 595 hg backout -r .
596 596
597 597 - Reverse the effect of previous bad revision 23::
598 598
599 599 hg backout -r 23
600 600
601 601 - Reverse the effect of previous bad revision 23 and
602 602 leave changes uncommitted::
603 603
604 604 hg backout -r 23 --no-commit
605 605 hg commit -m "Backout revision 23"
606 606
607 607 By default, the pending changeset will have one parent,
608 608 maintaining a linear history. With --merge, the pending
609 609 changeset will instead have two parents: the old parent of the
610 610 working directory and a new child of REV that simply undoes REV.
611 611
612 612 Before version 1.7, the behavior without --merge was equivalent
613 613 to specifying --merge followed by :hg:`update --clean .` to
614 614 cancel the merge and leave the child of REV as a head to be
615 615 merged separately.
616 616
617 617 See :hg:`help dates` for a list of formats valid for -d/--date.
618 618
619 619 See :hg:`help revert` for a way to restore files to the state
620 620 of another revision.
621 621
622 622 Returns 0 on success, 1 if nothing to backout or there are unresolved
623 623 files.
624 624 '''
625 625 wlock = lock = None
626 626 try:
627 627 wlock = repo.wlock()
628 628 lock = repo.lock()
629 629 return _dobackout(ui, repo, node, rev, **opts)
630 630 finally:
631 631 release(lock, wlock)
632 632
633 633 def _dobackout(ui, repo, node=None, rev=None, **opts):
634 634 if opts.get('commit') and opts.get('no_commit'):
635 635 raise error.Abort(_("cannot use --commit with --no-commit"))
636 636 if opts.get('merge') and opts.get('no_commit'):
637 637 raise error.Abort(_("cannot use --merge with --no-commit"))
638 638
639 639 if rev and node:
640 640 raise error.Abort(_("please specify just one revision"))
641 641
642 642 if not rev:
643 643 rev = node
644 644
645 645 if not rev:
646 646 raise error.Abort(_("please specify a revision to backout"))
647 647
648 648 date = opts.get('date')
649 649 if date:
650 650 opts['date'] = util.parsedate(date)
651 651
652 652 cmdutil.checkunfinished(repo)
653 653 cmdutil.bailifchanged(repo)
654 654 node = scmutil.revsingle(repo, rev).node()
655 655
656 656 op1, op2 = repo.dirstate.parents()
657 657 if not repo.changelog.isancestor(node, op1):
658 658 raise error.Abort(_('cannot backout change that is not an ancestor'))
659 659
660 660 p1, p2 = repo.changelog.parents(node)
661 661 if p1 == nullid:
662 662 raise error.Abort(_('cannot backout a change with no parents'))
663 663 if p2 != nullid:
664 664 if not opts.get('parent'):
665 665 raise error.Abort(_('cannot backout a merge changeset'))
666 666 p = repo.lookup(opts['parent'])
667 667 if p not in (p1, p2):
668 668 raise error.Abort(_('%s is not a parent of %s') %
669 669 (short(p), short(node)))
670 670 parent = p
671 671 else:
672 672 if opts.get('parent'):
673 673 raise error.Abort(_('cannot use --parent on non-merge changeset'))
674 674 parent = p1
675 675
676 676 # the backout should appear on the same branch
677 677 branch = repo.dirstate.branch()
678 678 bheads = repo.branchheads(branch)
679 679 rctx = scmutil.revsingle(repo, hex(parent))
680 680 if not opts.get('merge') and op1 != node:
681 681 dsguard = dirstateguard.dirstateguard(repo, 'backout')
682 682 try:
683 683 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
684 684 'backout')
685 685 stats = mergemod.update(repo, parent, True, True, node, False)
686 686 repo.setparents(op1, op2)
687 687 dsguard.close()
688 688 hg._showstats(repo, stats)
689 689 if stats[3]:
690 690 repo.ui.status(_("use 'hg resolve' to retry unresolved "
691 691 "file merges\n"))
692 692 return 1
693 693 finally:
694 694 ui.setconfig('ui', 'forcemerge', '', '')
695 695 lockmod.release(dsguard)
696 696 else:
697 697 hg.clean(repo, node, show_stats=False)
698 698 repo.dirstate.setbranch(branch)
699 699 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
700 700
701 701 if opts.get('no_commit'):
702 702 msg = _("changeset %s backed out, "
703 703 "don't forget to commit.\n")
704 704 ui.status(msg % short(node))
705 705 return 0
706 706
707 707 def commitfunc(ui, repo, message, match, opts):
708 708 editform = 'backout'
709 709 e = cmdutil.getcommiteditor(editform=editform, **opts)
710 710 if not message:
711 711 # we don't translate commit messages
712 712 message = "Backed out changeset %s" % short(node)
713 713 e = cmdutil.getcommiteditor(edit=True, editform=editform)
714 714 return repo.commit(message, opts.get('user'), opts.get('date'),
715 715 match, editor=e)
716 716 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
717 717 if not newnode:
718 718 ui.status(_("nothing changed\n"))
719 719 return 1
720 720 cmdutil.commitstatus(repo, newnode, branch, bheads)
721 721
722 722 def nice(node):
723 723 return '%d:%s' % (repo.changelog.rev(node), short(node))
724 724 ui.status(_('changeset %s backs out changeset %s\n') %
725 725 (nice(repo.changelog.tip()), nice(node)))
726 726 if opts.get('merge') and op1 != node:
727 727 hg.clean(repo, op1, show_stats=False)
728 728 ui.status(_('merging with changeset %s\n')
729 729 % nice(repo.changelog.tip()))
730 730 try:
731 731 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
732 732 'backout')
733 733 return hg.merge(repo, hex(repo.changelog.tip()))
734 734 finally:
735 735 ui.setconfig('ui', 'forcemerge', '', '')
736 736 return 0
737 737
738 738 @command('bisect',
739 739 [('r', 'reset', False, _('reset bisect state')),
740 740 ('g', 'good', False, _('mark changeset good')),
741 741 ('b', 'bad', False, _('mark changeset bad')),
742 742 ('s', 'skip', False, _('skip testing changeset')),
743 743 ('e', 'extend', False, _('extend the bisect range')),
744 744 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
745 745 ('U', 'noupdate', False, _('do not update to target'))],
746 746 _("[-gbsr] [-U] [-c CMD] [REV]"))
747 747 def bisect(ui, repo, rev=None, extra=None, command=None,
748 748 reset=None, good=None, bad=None, skip=None, extend=None,
749 749 noupdate=None):
750 750 """subdivision search of changesets
751 751
752 752 This command helps to find changesets which introduce problems. To
753 753 use, mark the earliest changeset you know exhibits the problem as
754 754 bad, then mark the latest changeset which is free from the problem
755 755 as good. Bisect will update your working directory to a revision
756 756 for testing (unless the -U/--noupdate option is specified). Once
757 757 you have performed tests, mark the working directory as good or
758 758 bad, and bisect will either update to another candidate changeset
759 759 or announce that it has found the bad revision.
760 760
761 761 As a shortcut, you can also use the revision argument to mark a
762 762 revision as good or bad without checking it out first.
763 763
764 764 If you supply a command, it will be used for automatic bisection.
765 765 The environment variable HG_NODE will contain the ID of the
766 766 changeset being tested. The exit status of the command will be
767 767 used to mark revisions as good or bad: status 0 means good, 125
768 768 means to skip the revision, 127 (command not found) will abort the
769 769 bisection, and any other non-zero exit status means the revision
770 770 is bad.
771 771
772 772 .. container:: verbose
773 773
774 774 Some examples:
775 775
776 776 - start a bisection with known bad revision 34, and good revision 12::
777 777
778 778 hg bisect --bad 34
779 779 hg bisect --good 12
780 780
781 781 - advance the current bisection by marking current revision as good or
782 782 bad::
783 783
784 784 hg bisect --good
785 785 hg bisect --bad
786 786
787 787 - mark the current revision, or a known revision, to be skipped (e.g. if
788 788 that revision is not usable because of another issue)::
789 789
790 790 hg bisect --skip
791 791 hg bisect --skip 23
792 792
793 793 - skip all revisions that do not touch directories ``foo`` or ``bar``::
794 794
795 795 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
796 796
797 797 - forget the current bisection::
798 798
799 799 hg bisect --reset
800 800
801 801 - use 'make && make tests' to automatically find the first broken
802 802 revision::
803 803
804 804 hg bisect --reset
805 805 hg bisect --bad 34
806 806 hg bisect --good 12
807 807 hg bisect --command "make && make tests"
808 808
809 809 - see all changesets whose states are already known in the current
810 810 bisection::
811 811
812 812 hg log -r "bisect(pruned)"
813 813
814 814 - see the changeset currently being bisected (especially useful
815 815 if running with -U/--noupdate)::
816 816
817 817 hg log -r "bisect(current)"
818 818
819 819 - see all changesets that took part in the current bisection::
820 820
821 821 hg log -r "bisect(range)"
822 822
823 823 - you can even get a nice graph::
824 824
825 825 hg log --graph -r "bisect(range)"
826 826
827 827 See :hg:`help revsets` for more about the `bisect()` keyword.
828 828
829 829 Returns 0 on success.
830 830 """
831 831 # backward compatibility
832 832 if rev in "good bad reset init".split():
833 833 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
834 834 cmd, rev, extra = rev, extra, None
835 835 if cmd == "good":
836 836 good = True
837 837 elif cmd == "bad":
838 838 bad = True
839 839 else:
840 840 reset = True
841 841 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
842 842 raise error.Abort(_('incompatible arguments'))
843 843
844 844 cmdutil.checkunfinished(repo)
845 845
846 846 if reset:
847 847 hbisect.resetstate(repo)
848 848 return
849 849
850 850 state = hbisect.load_state(repo)
851 851
852 852 # update state
853 853 if good or bad or skip:
854 854 if rev:
855 855 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
856 856 else:
857 857 nodes = [repo.lookup('.')]
858 858 if good:
859 859 state['good'] += nodes
860 860 elif bad:
861 861 state['bad'] += nodes
862 862 elif skip:
863 863 state['skip'] += nodes
864 864 hbisect.save_state(repo, state)
865 865 if not (state['good'] and state['bad']):
866 866 return
867 867
868 868 def mayupdate(repo, node, show_stats=True):
869 869 """common used update sequence"""
870 870 if noupdate:
871 871 return
872 872 cmdutil.bailifchanged(repo)
873 873 return hg.clean(repo, node, show_stats=show_stats)
874 874
875 875 displayer = cmdutil.show_changeset(ui, repo, {})
876 876
877 877 if command:
878 878 changesets = 1
879 879 if noupdate:
880 880 try:
881 881 node = state['current'][0]
882 882 except LookupError:
883 883 raise error.Abort(_('current bisect revision is unknown - '
884 884 'start a new bisect to fix'))
885 885 else:
886 886 node, p2 = repo.dirstate.parents()
887 887 if p2 != nullid:
888 888 raise error.Abort(_('current bisect revision is a merge'))
889 889 if rev:
890 890 node = repo[scmutil.revsingle(repo, rev, node)].node()
891 891 try:
892 892 while changesets:
893 893 # update state
894 894 state['current'] = [node]
895 895 hbisect.save_state(repo, state)
896 896 status = ui.system(command, environ={'HG_NODE': hex(node)})
897 897 if status == 125:
898 898 transition = "skip"
899 899 elif status == 0:
900 900 transition = "good"
901 901 # status < 0 means process was killed
902 902 elif status == 127:
903 903 raise error.Abort(_("failed to execute %s") % command)
904 904 elif status < 0:
905 905 raise error.Abort(_("%s killed") % command)
906 906 else:
907 907 transition = "bad"
908 908 state[transition].append(node)
909 909 ctx = repo[node]
910 910 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
911 911 hbisect.checkstate(state)
912 912 # bisect
913 913 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
914 914 # update to next check
915 915 node = nodes[0]
916 916 mayupdate(repo, node, show_stats=False)
917 917 finally:
918 918 state['current'] = [node]
919 919 hbisect.save_state(repo, state)
920 920 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
921 921 return
922 922
923 923 hbisect.checkstate(state)
924 924
925 925 # actually bisect
926 926 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
927 927 if extend:
928 928 if not changesets:
929 929 extendnode = hbisect.extendrange(repo, state, nodes, good)
930 930 if extendnode is not None:
931 931 ui.write(_("Extending search to changeset %d:%s\n")
932 932 % (extendnode.rev(), extendnode))
933 933 state['current'] = [extendnode.node()]
934 934 hbisect.save_state(repo, state)
935 935 return mayupdate(repo, extendnode.node())
936 936 raise error.Abort(_("nothing to extend"))
937 937
938 938 if changesets == 0:
939 939 hbisect.printresult(ui, repo, state, displayer, nodes, good)
940 940 else:
941 941 assert len(nodes) == 1 # only a single node can be tested next
942 942 node = nodes[0]
943 943 # compute the approximate number of remaining tests
944 944 tests, size = 0, 2
945 945 while size <= changesets:
946 946 tests, size = tests + 1, size * 2
947 947 rev = repo.changelog.rev(node)
948 948 ui.write(_("Testing changeset %d:%s "
949 949 "(%d changesets remaining, ~%d tests)\n")
950 950 % (rev, short(node), changesets, tests))
951 951 state['current'] = [node]
952 952 hbisect.save_state(repo, state)
953 953 return mayupdate(repo, node)
954 954
955 955 @command('bookmarks|bookmark',
956 956 [('f', 'force', False, _('force')),
957 957 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
958 958 ('d', 'delete', False, _('delete a given bookmark')),
959 959 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
960 960 ('i', 'inactive', False, _('mark a bookmark inactive')),
961 961 ] + formatteropts,
962 962 _('hg bookmarks [OPTIONS]... [NAME]...'))
963 963 def bookmark(ui, repo, *names, **opts):
964 964 '''create a new bookmark or list existing bookmarks
965 965
966 966 Bookmarks are labels on changesets to help track lines of development.
967 967 Bookmarks are unversioned and can be moved, renamed and deleted.
968 968 Deleting or moving a bookmark has no effect on the associated changesets.
969 969
970 970 Creating or updating to a bookmark causes it to be marked as 'active'.
971 971 The active bookmark is indicated with a '*'.
972 972 When a commit is made, the active bookmark will advance to the new commit.
973 973 A plain :hg:`update` will also advance an active bookmark, if possible.
974 974 Updating away from a bookmark will cause it to be deactivated.
975 975
976 976 Bookmarks can be pushed and pulled between repositories (see
977 977 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
978 978 diverged, a new 'divergent bookmark' of the form 'name@path' will
979 979 be created. Using :hg:`merge` will resolve the divergence.
980 980
981 981 A bookmark named '@' has the special property that :hg:`clone` will
982 982 check it out by default if it exists.
983 983
984 984 .. container:: verbose
985 985
986 986 Examples:
987 987
988 988 - create an active bookmark for a new line of development::
989 989
990 990 hg book new-feature
991 991
992 992 - create an inactive bookmark as a place marker::
993 993
994 994 hg book -i reviewed
995 995
996 996 - create an inactive bookmark on another changeset::
997 997
998 998 hg book -r .^ tested
999 999
1000 1000 - rename bookmark turkey to dinner::
1001 1001
1002 1002 hg book -m turkey dinner
1003 1003
1004 1004 - move the '@' bookmark from another branch::
1005 1005
1006 1006 hg book -f @
1007 1007 '''
1008 1008 force = opts.get('force')
1009 1009 rev = opts.get('rev')
1010 1010 delete = opts.get('delete')
1011 1011 rename = opts.get('rename')
1012 1012 inactive = opts.get('inactive')
1013 1013
1014 1014 def checkformat(mark):
1015 1015 mark = mark.strip()
1016 1016 if not mark:
1017 1017 raise error.Abort(_("bookmark names cannot consist entirely of "
1018 1018 "whitespace"))
1019 1019 scmutil.checknewlabel(repo, mark, 'bookmark')
1020 1020 return mark
1021 1021
1022 1022 def checkconflict(repo, mark, cur, force=False, target=None):
1023 1023 if mark in marks and not force:
1024 1024 if target:
1025 1025 if marks[mark] == target and target == cur:
1026 1026 # re-activating a bookmark
1027 1027 return
1028 1028 anc = repo.changelog.ancestors([repo[target].rev()])
1029 1029 bmctx = repo[marks[mark]]
1030 1030 divs = [repo[b].node() for b in marks
1031 1031 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1032 1032
1033 1033 # allow resolving a single divergent bookmark even if moving
1034 1034 # the bookmark across branches when a revision is specified
1035 1035 # that contains a divergent bookmark
1036 1036 if bmctx.rev() not in anc and target in divs:
1037 1037 bookmarks.deletedivergent(repo, [target], mark)
1038 1038 return
1039 1039
1040 1040 deletefrom = [b for b in divs
1041 1041 if repo[b].rev() in anc or b == target]
1042 1042 bookmarks.deletedivergent(repo, deletefrom, mark)
1043 1043 if bookmarks.validdest(repo, bmctx, repo[target]):
1044 1044 ui.status(_("moving bookmark '%s' forward from %s\n") %
1045 1045 (mark, short(bmctx.node())))
1046 1046 return
1047 1047 raise error.Abort(_("bookmark '%s' already exists "
1048 1048 "(use -f to force)") % mark)
1049 1049 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1050 1050 and not force):
1051 1051 raise error.Abort(
1052 1052 _("a bookmark cannot have the name of an existing branch"))
1053 1053
1054 1054 if delete and rename:
1055 1055 raise error.Abort(_("--delete and --rename are incompatible"))
1056 1056 if delete and rev:
1057 1057 raise error.Abort(_("--rev is incompatible with --delete"))
1058 1058 if rename and rev:
1059 1059 raise error.Abort(_("--rev is incompatible with --rename"))
1060 1060 if not names and (delete or rev):
1061 1061 raise error.Abort(_("bookmark name required"))
1062 1062
1063 1063 if delete or rename or names or inactive:
1064 1064 wlock = lock = tr = None
1065 1065 try:
1066 1066 wlock = repo.wlock()
1067 1067 lock = repo.lock()
1068 1068 cur = repo.changectx('.').node()
1069 1069 marks = repo._bookmarks
1070 1070 if delete:
1071 1071 tr = repo.transaction('bookmark')
1072 1072 for mark in names:
1073 1073 if mark not in marks:
1074 1074 raise error.Abort(_("bookmark '%s' does not exist") %
1075 1075 mark)
1076 1076 if mark == repo._activebookmark:
1077 1077 bookmarks.deactivate(repo)
1078 1078 del marks[mark]
1079 1079
1080 1080 elif rename:
1081 1081 tr = repo.transaction('bookmark')
1082 1082 if not names:
1083 1083 raise error.Abort(_("new bookmark name required"))
1084 1084 elif len(names) > 1:
1085 1085 raise error.Abort(_("only one new bookmark name allowed"))
1086 1086 mark = checkformat(names[0])
1087 1087 if rename not in marks:
1088 1088 raise error.Abort(_("bookmark '%s' does not exist")
1089 1089 % rename)
1090 1090 checkconflict(repo, mark, cur, force)
1091 1091 marks[mark] = marks[rename]
1092 1092 if repo._activebookmark == rename and not inactive:
1093 1093 bookmarks.activate(repo, mark)
1094 1094 del marks[rename]
1095 1095 elif names:
1096 1096 tr = repo.transaction('bookmark')
1097 1097 newact = None
1098 1098 for mark in names:
1099 1099 mark = checkformat(mark)
1100 1100 if newact is None:
1101 1101 newact = mark
1102 1102 if inactive and mark == repo._activebookmark:
1103 1103 bookmarks.deactivate(repo)
1104 1104 return
1105 1105 tgt = cur
1106 1106 if rev:
1107 1107 tgt = scmutil.revsingle(repo, rev).node()
1108 1108 checkconflict(repo, mark, cur, force, tgt)
1109 1109 marks[mark] = tgt
1110 1110 if not inactive and cur == marks[newact] and not rev:
1111 1111 bookmarks.activate(repo, newact)
1112 1112 elif cur != tgt and newact == repo._activebookmark:
1113 1113 bookmarks.deactivate(repo)
1114 1114 elif inactive:
1115 1115 if len(marks) == 0:
1116 1116 ui.status(_("no bookmarks set\n"))
1117 1117 elif not repo._activebookmark:
1118 1118 ui.status(_("no active bookmark\n"))
1119 1119 else:
1120 1120 bookmarks.deactivate(repo)
1121 1121 if tr is not None:
1122 1122 marks.recordchange(tr)
1123 1123 tr.close()
1124 1124 finally:
1125 1125 lockmod.release(tr, lock, wlock)
1126 1126 else: # show bookmarks
1127 1127 fm = ui.formatter('bookmarks', opts)
1128 1128 hexfn = fm.hexfunc
1129 1129 marks = repo._bookmarks
1130 1130 if len(marks) == 0 and fm.isplain():
1131 1131 ui.status(_("no bookmarks set\n"))
1132 1132 for bmark, n in sorted(marks.iteritems()):
1133 1133 active = repo._activebookmark
1134 1134 if bmark == active:
1135 1135 prefix, label = '*', activebookmarklabel
1136 1136 else:
1137 1137 prefix, label = ' ', ''
1138 1138
1139 1139 fm.startitem()
1140 1140 if not ui.quiet:
1141 1141 fm.plain(' %s ' % prefix, label=label)
1142 1142 fm.write('bookmark', '%s', bmark, label=label)
1143 1143 pad = " " * (25 - encoding.colwidth(bmark))
1144 1144 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1145 1145 repo.changelog.rev(n), hexfn(n), label=label)
1146 1146 fm.data(active=(bmark == active))
1147 1147 fm.plain('\n')
1148 1148 fm.end()
1149 1149
1150 1150 @command('branch',
1151 1151 [('f', 'force', None,
1152 1152 _('set branch name even if it shadows an existing branch')),
1153 1153 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1154 1154 _('[-fC] [NAME]'))
1155 1155 def branch(ui, repo, label=None, **opts):
1156 1156 """set or show the current branch name
1157 1157
1158 1158 .. note::
1159 1159
1160 1160 Branch names are permanent and global. Use :hg:`bookmark` to create a
1161 1161 light-weight bookmark instead. See :hg:`help glossary` for more
1162 1162 information about named branches and bookmarks.
1163 1163
1164 1164 With no argument, show the current branch name. With one argument,
1165 1165 set the working directory branch name (the branch will not exist
1166 1166 in the repository until the next commit). Standard practice
1167 1167 recommends that primary development take place on the 'default'
1168 1168 branch.
1169 1169
1170 1170 Unless -f/--force is specified, branch will not let you set a
1171 1171 branch name that already exists.
1172 1172
1173 1173 Use -C/--clean to reset the working directory branch to that of
1174 1174 the parent of the working directory, negating a previous branch
1175 1175 change.
1176 1176
1177 1177 Use the command :hg:`update` to switch to an existing branch. Use
1178 1178 :hg:`commit --close-branch` to mark this branch head as closed.
1179 1179 When all heads of a branch are closed, the branch will be
1180 1180 considered closed.
1181 1181
1182 1182 Returns 0 on success.
1183 1183 """
1184 1184 if label:
1185 1185 label = label.strip()
1186 1186
1187 1187 if not opts.get('clean') and not label:
1188 1188 ui.write("%s\n" % repo.dirstate.branch())
1189 1189 return
1190 1190
1191 1191 with repo.wlock():
1192 1192 if opts.get('clean'):
1193 1193 label = repo[None].p1().branch()
1194 1194 repo.dirstate.setbranch(label)
1195 1195 ui.status(_('reset working directory to branch %s\n') % label)
1196 1196 elif label:
1197 1197 if not opts.get('force') and label in repo.branchmap():
1198 1198 if label not in [p.branch() for p in repo[None].parents()]:
1199 1199 raise error.Abort(_('a branch of the same name already'
1200 1200 ' exists'),
1201 1201 # i18n: "it" refers to an existing branch
1202 1202 hint=_("use 'hg update' to switch to it"))
1203 1203 scmutil.checknewlabel(repo, label, 'branch')
1204 1204 repo.dirstate.setbranch(label)
1205 1205 ui.status(_('marked working directory as branch %s\n') % label)
1206 1206
1207 1207 # find any open named branches aside from default
1208 1208 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1209 1209 if n != "default" and not c]
1210 1210 if not others:
1211 1211 ui.status(_('(branches are permanent and global, '
1212 1212 'did you want a bookmark?)\n'))
1213 1213
1214 1214 @command('branches',
1215 1215 [('a', 'active', False,
1216 1216 _('show only branches that have unmerged heads (DEPRECATED)')),
1217 1217 ('c', 'closed', False, _('show normal and closed branches')),
1218 1218 ] + formatteropts,
1219 1219 _('[-c]'))
1220 1220 def branches(ui, repo, active=False, closed=False, **opts):
1221 1221 """list repository named branches
1222 1222
1223 1223 List the repository's named branches, indicating which ones are
1224 1224 inactive. If -c/--closed is specified, also list branches which have
1225 1225 been marked closed (see :hg:`commit --close-branch`).
1226 1226
1227 1227 Use the command :hg:`update` to switch to an existing branch.
1228 1228
1229 1229 Returns 0.
1230 1230 """
1231 1231
1232 1232 fm = ui.formatter('branches', opts)
1233 1233 hexfunc = fm.hexfunc
1234 1234
1235 1235 allheads = set(repo.heads())
1236 1236 branches = []
1237 1237 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1238 1238 isactive = not isclosed and bool(set(heads) & allheads)
1239 1239 branches.append((tag, repo[tip], isactive, not isclosed))
1240 1240 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1241 1241 reverse=True)
1242 1242
1243 1243 for tag, ctx, isactive, isopen in branches:
1244 1244 if active and not isactive:
1245 1245 continue
1246 1246 if isactive:
1247 1247 label = 'branches.active'
1248 1248 notice = ''
1249 1249 elif not isopen:
1250 1250 if not closed:
1251 1251 continue
1252 1252 label = 'branches.closed'
1253 1253 notice = _(' (closed)')
1254 1254 else:
1255 1255 label = 'branches.inactive'
1256 1256 notice = _(' (inactive)')
1257 1257 current = (tag == repo.dirstate.branch())
1258 1258 if current:
1259 1259 label = 'branches.current'
1260 1260
1261 1261 fm.startitem()
1262 1262 fm.write('branch', '%s', tag, label=label)
1263 1263 rev = ctx.rev()
1264 1264 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1265 1265 fmt = ' ' * padsize + ' %d:%s'
1266 1266 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1267 1267 label='log.changeset changeset.%s' % ctx.phasestr())
1268 1268 fm.data(active=isactive, closed=not isopen, current=current)
1269 1269 if not ui.quiet:
1270 1270 fm.plain(notice)
1271 1271 fm.plain('\n')
1272 1272 fm.end()
1273 1273
1274 1274 @command('bundle',
1275 1275 [('f', 'force', None, _('run even when the destination is unrelated')),
1276 1276 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1277 1277 _('REV')),
1278 1278 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1279 1279 _('BRANCH')),
1280 1280 ('', 'base', [],
1281 1281 _('a base changeset assumed to be available at the destination'),
1282 1282 _('REV')),
1283 1283 ('a', 'all', None, _('bundle all changesets in the repository')),
1284 1284 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1285 1285 ] + remoteopts,
1286 1286 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1287 1287 def bundle(ui, repo, fname, dest=None, **opts):
1288 1288 """create a changegroup file
1289 1289
1290 1290 Generate a changegroup file collecting changesets to be added
1291 1291 to a repository.
1292 1292
1293 1293 To create a bundle containing all changesets, use -a/--all
1294 1294 (or --base null). Otherwise, hg assumes the destination will have
1295 1295 all the nodes you specify with --base parameters. Otherwise, hg
1296 1296 will assume the repository has all the nodes in destination, or
1297 1297 default-push/default if no destination is specified.
1298 1298
1299 1299 You can change bundle format with the -t/--type option. You can
1300 1300 specify a compression, a bundle version or both using a dash
1301 1301 (comp-version). The available compression methods are: none, bzip2,
1302 1302 and gzip (by default, bundles are compressed using bzip2). The
1303 1303 available formats are: v1, v2 (default to most suitable).
1304 1304
1305 1305 The bundle file can then be transferred using conventional means
1306 1306 and applied to another repository with the unbundle or pull
1307 1307 command. This is useful when direct push and pull are not
1308 1308 available or when exporting an entire repository is undesirable.
1309 1309
1310 1310 Applying bundles preserves all changeset contents including
1311 1311 permissions, copy/rename information, and revision history.
1312 1312
1313 1313 Returns 0 on success, 1 if no changes found.
1314 1314 """
1315 1315 revs = None
1316 1316 if 'rev' in opts:
1317 1317 revstrings = opts['rev']
1318 1318 revs = scmutil.revrange(repo, revstrings)
1319 1319 if revstrings and not revs:
1320 1320 raise error.Abort(_('no commits to bundle'))
1321 1321
1322 1322 bundletype = opts.get('type', 'bzip2').lower()
1323 1323 try:
1324 1324 bcompression, cgversion, params = exchange.parsebundlespec(
1325 1325 repo, bundletype, strict=False)
1326 1326 except error.UnsupportedBundleSpecification as e:
1327 1327 raise error.Abort(str(e),
1328 1328 hint=_("see 'hg help bundle' for supported "
1329 1329 "values for --type"))
1330 1330
1331 1331 # Packed bundles are a pseudo bundle format for now.
1332 1332 if cgversion == 's1':
1333 1333 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1334 1334 hint=_("use 'hg debugcreatestreamclonebundle'"))
1335 1335
1336 1336 if opts.get('all'):
1337 1337 if dest:
1338 1338 raise error.Abort(_("--all is incompatible with specifying "
1339 1339 "a destination"))
1340 1340 if opts.get('base'):
1341 1341 ui.warn(_("ignoring --base because --all was specified\n"))
1342 1342 base = ['null']
1343 1343 else:
1344 1344 base = scmutil.revrange(repo, opts.get('base'))
1345 1345 # TODO: get desired bundlecaps from command line.
1346 1346 bundlecaps = None
1347 1347 if cgversion not in changegroup.supportedoutgoingversions(repo):
1348 1348 raise error.Abort(_("repository does not support bundle version %s") %
1349 1349 cgversion)
1350 1350
1351 1351 if base:
1352 1352 if dest:
1353 1353 raise error.Abort(_("--base is incompatible with specifying "
1354 1354 "a destination"))
1355 1355 common = [repo.lookup(rev) for rev in base]
1356 1356 heads = revs and map(repo.lookup, revs) or None
1357 1357 outgoing = discovery.outgoing(repo, common, heads)
1358 1358 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1359 1359 bundlecaps=bundlecaps,
1360 1360 version=cgversion)
1361 1361 outgoing = None
1362 1362 else:
1363 1363 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1364 1364 dest, branches = hg.parseurl(dest, opts.get('branch'))
1365 1365 other = hg.peer(repo, opts, dest)
1366 1366 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1367 1367 heads = revs and map(repo.lookup, revs) or revs
1368 1368 outgoing = discovery.findcommonoutgoing(repo, other,
1369 1369 onlyheads=heads,
1370 1370 force=opts.get('force'),
1371 1371 portable=True)
1372 1372 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1373 1373 bundlecaps, version=cgversion)
1374 1374 if not cg:
1375 1375 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1376 1376 return 1
1377 1377
1378 1378 if cgversion == '01': #bundle1
1379 1379 if bcompression is None:
1380 1380 bcompression = 'UN'
1381 1381 bversion = 'HG10' + bcompression
1382 1382 bcompression = None
1383 1383 else:
1384 1384 assert cgversion == '02'
1385 1385 bversion = 'HG20'
1386 1386
1387 1387 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1388 1388
1389 1389 @command('cat',
1390 1390 [('o', 'output', '',
1391 1391 _('print output to file with formatted name'), _('FORMAT')),
1392 1392 ('r', 'rev', '', _('print the given revision'), _('REV')),
1393 1393 ('', 'decode', None, _('apply any matching decode filter')),
1394 1394 ] + walkopts,
1395 1395 _('[OPTION]... FILE...'),
1396 1396 inferrepo=True)
1397 1397 def cat(ui, repo, file1, *pats, **opts):
1398 1398 """output the current or given revision of files
1399 1399
1400 1400 Print the specified files as they were at the given revision. If
1401 1401 no revision is given, the parent of the working directory is used.
1402 1402
1403 1403 Output may be to a file, in which case the name of the file is
1404 1404 given using a format string. The formatting rules as follows:
1405 1405
1406 1406 :``%%``: literal "%" character
1407 1407 :``%s``: basename of file being printed
1408 1408 :``%d``: dirname of file being printed, or '.' if in repository root
1409 1409 :``%p``: root-relative path name of file being printed
1410 1410 :``%H``: changeset hash (40 hexadecimal digits)
1411 1411 :``%R``: changeset revision number
1412 1412 :``%h``: short-form changeset hash (12 hexadecimal digits)
1413 1413 :``%r``: zero-padded changeset revision number
1414 1414 :``%b``: basename of the exporting repository
1415 1415
1416 1416 Returns 0 on success.
1417 1417 """
1418 1418 ctx = scmutil.revsingle(repo, opts.get('rev'))
1419 1419 m = scmutil.match(ctx, (file1,) + pats, opts)
1420 1420
1421 1421 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1422 1422
1423 1423 @command('^clone',
1424 1424 [('U', 'noupdate', None, _('the clone will include an empty working '
1425 1425 'directory (only a repository)')),
1426 1426 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1427 1427 _('REV')),
1428 1428 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1429 1429 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1430 1430 ('', 'pull', None, _('use pull protocol to copy metadata')),
1431 1431 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1432 1432 ] + remoteopts,
1433 1433 _('[OPTION]... SOURCE [DEST]'),
1434 1434 norepo=True)
1435 1435 def clone(ui, source, dest=None, **opts):
1436 1436 """make a copy of an existing repository
1437 1437
1438 1438 Create a copy of an existing repository in a new directory.
1439 1439
1440 1440 If no destination directory name is specified, it defaults to the
1441 1441 basename of the source.
1442 1442
1443 1443 The location of the source is added to the new repository's
1444 1444 ``.hg/hgrc`` file, as the default to be used for future pulls.
1445 1445
1446 1446 Only local paths and ``ssh://`` URLs are supported as
1447 1447 destinations. For ``ssh://`` destinations, no working directory or
1448 1448 ``.hg/hgrc`` will be created on the remote side.
1449 1449
1450 1450 If the source repository has a bookmark called '@' set, that
1451 1451 revision will be checked out in the new repository by default.
1452 1452
1453 1453 To check out a particular version, use -u/--update, or
1454 1454 -U/--noupdate to create a clone with no working directory.
1455 1455
1456 1456 To pull only a subset of changesets, specify one or more revisions
1457 1457 identifiers with -r/--rev or branches with -b/--branch. The
1458 1458 resulting clone will contain only the specified changesets and
1459 1459 their ancestors. These options (or 'clone src#rev dest') imply
1460 1460 --pull, even for local source repositories.
1461 1461
1462 1462 .. note::
1463 1463
1464 1464 Specifying a tag will include the tagged changeset but not the
1465 1465 changeset containing the tag.
1466 1466
1467 1467 .. container:: verbose
1468 1468
1469 1469 For efficiency, hardlinks are used for cloning whenever the
1470 1470 source and destination are on the same filesystem (note this
1471 1471 applies only to the repository data, not to the working
1472 1472 directory). Some filesystems, such as AFS, implement hardlinking
1473 1473 incorrectly, but do not report errors. In these cases, use the
1474 1474 --pull option to avoid hardlinking.
1475 1475
1476 1476 In some cases, you can clone repositories and the working
1477 1477 directory using full hardlinks with ::
1478 1478
1479 1479 $ cp -al REPO REPOCLONE
1480 1480
1481 1481 This is the fastest way to clone, but it is not always safe. The
1482 1482 operation is not atomic (making sure REPO is not modified during
1483 1483 the operation is up to you) and you have to make sure your
1484 1484 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1485 1485 so). Also, this is not compatible with certain extensions that
1486 1486 place their metadata under the .hg directory, such as mq.
1487 1487
1488 1488 Mercurial will update the working directory to the first applicable
1489 1489 revision from this list:
1490 1490
1491 1491 a) null if -U or the source repository has no changesets
1492 1492 b) if -u . and the source repository is local, the first parent of
1493 1493 the source repository's working directory
1494 1494 c) the changeset specified with -u (if a branch name, this means the
1495 1495 latest head of that branch)
1496 1496 d) the changeset specified with -r
1497 1497 e) the tipmost head specified with -b
1498 1498 f) the tipmost head specified with the url#branch source syntax
1499 1499 g) the revision marked with the '@' bookmark, if present
1500 1500 h) the tipmost head of the default branch
1501 1501 i) tip
1502 1502
1503 1503 When cloning from servers that support it, Mercurial may fetch
1504 1504 pre-generated data from a server-advertised URL. When this is done,
1505 1505 hooks operating on incoming changesets and changegroups may fire twice,
1506 1506 once for the bundle fetched from the URL and another for any additional
1507 1507 data not fetched from this URL. In addition, if an error occurs, the
1508 1508 repository may be rolled back to a partial clone. This behavior may
1509 1509 change in future releases. See :hg:`help -e clonebundles` for more.
1510 1510
1511 1511 Examples:
1512 1512
1513 1513 - clone a remote repository to a new directory named hg/::
1514 1514
1515 1515 hg clone https://www.mercurial-scm.org/repo/hg/
1516 1516
1517 1517 - create a lightweight local clone::
1518 1518
1519 1519 hg clone project/ project-feature/
1520 1520
1521 1521 - clone from an absolute path on an ssh server (note double-slash)::
1522 1522
1523 1523 hg clone ssh://user@server//home/projects/alpha/
1524 1524
1525 1525 - do a high-speed clone over a LAN while checking out a
1526 1526 specified version::
1527 1527
1528 1528 hg clone --uncompressed http://server/repo -u 1.5
1529 1529
1530 1530 - create a repository without changesets after a particular revision::
1531 1531
1532 1532 hg clone -r 04e544 experimental/ good/
1533 1533
1534 1534 - clone (and track) a particular named branch::
1535 1535
1536 1536 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1537 1537
1538 1538 See :hg:`help urls` for details on specifying URLs.
1539 1539
1540 1540 Returns 0 on success.
1541 1541 """
1542 1542 if opts.get('noupdate') and opts.get('updaterev'):
1543 1543 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1544 1544
1545 1545 r = hg.clone(ui, opts, source, dest,
1546 1546 pull=opts.get('pull'),
1547 1547 stream=opts.get('uncompressed'),
1548 1548 rev=opts.get('rev'),
1549 1549 update=opts.get('updaterev') or not opts.get('noupdate'),
1550 1550 branch=opts.get('branch'),
1551 1551 shareopts=opts.get('shareopts'))
1552 1552
1553 1553 return r is None
1554 1554
1555 1555 @command('^commit|ci',
1556 1556 [('A', 'addremove', None,
1557 1557 _('mark new/missing files as added/removed before committing')),
1558 1558 ('', 'close-branch', None,
1559 1559 _('mark a branch head as closed')),
1560 1560 ('', 'amend', None, _('amend the parent of the working directory')),
1561 1561 ('s', 'secret', None, _('use the secret phase for committing')),
1562 1562 ('e', 'edit', None, _('invoke editor on commit messages')),
1563 1563 ('i', 'interactive', None, _('use interactive mode')),
1564 1564 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1565 1565 _('[OPTION]... [FILE]...'),
1566 1566 inferrepo=True)
1567 1567 def commit(ui, repo, *pats, **opts):
1568 1568 """commit the specified files or all outstanding changes
1569 1569
1570 1570 Commit changes to the given files into the repository. Unlike a
1571 1571 centralized SCM, this operation is a local operation. See
1572 1572 :hg:`push` for a way to actively distribute your changes.
1573 1573
1574 1574 If a list of files is omitted, all changes reported by :hg:`status`
1575 1575 will be committed.
1576 1576
1577 1577 If you are committing the result of a merge, do not provide any
1578 1578 filenames or -I/-X filters.
1579 1579
1580 1580 If no commit message is specified, Mercurial starts your
1581 1581 configured editor where you can enter a message. In case your
1582 1582 commit fails, you will find a backup of your message in
1583 1583 ``.hg/last-message.txt``.
1584 1584
1585 1585 The --close-branch flag can be used to mark the current branch
1586 1586 head closed. When all heads of a branch are closed, the branch
1587 1587 will be considered closed and no longer listed.
1588 1588
1589 1589 The --amend flag can be used to amend the parent of the
1590 1590 working directory with a new commit that contains the changes
1591 1591 in the parent in addition to those currently reported by :hg:`status`,
1592 1592 if there are any. The old commit is stored in a backup bundle in
1593 1593 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1594 1594 on how to restore it).
1595 1595
1596 1596 Message, user and date are taken from the amended commit unless
1597 1597 specified. When a message isn't specified on the command line,
1598 1598 the editor will open with the message of the amended commit.
1599 1599
1600 1600 It is not possible to amend public changesets (see :hg:`help phases`)
1601 1601 or changesets that have children.
1602 1602
1603 1603 See :hg:`help dates` for a list of formats valid for -d/--date.
1604 1604
1605 1605 Returns 0 on success, 1 if nothing changed.
1606 1606
1607 1607 .. container:: verbose
1608 1608
1609 1609 Examples:
1610 1610
1611 1611 - commit all files ending in .py::
1612 1612
1613 1613 hg commit --include "set:**.py"
1614 1614
1615 1615 - commit all non-binary files::
1616 1616
1617 1617 hg commit --exclude "set:binary()"
1618 1618
1619 1619 - amend the current commit and set the date to now::
1620 1620
1621 1621 hg commit --amend --date now
1622 1622 """
1623 1623 wlock = lock = None
1624 1624 try:
1625 1625 wlock = repo.wlock()
1626 1626 lock = repo.lock()
1627 1627 return _docommit(ui, repo, *pats, **opts)
1628 1628 finally:
1629 1629 release(lock, wlock)
1630 1630
1631 1631 def _docommit(ui, repo, *pats, **opts):
1632 1632 if opts.get('interactive'):
1633 1633 opts.pop('interactive')
1634 1634 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1635 1635 cmdutil.recordfilter, *pats, **opts)
1636 1636 # ret can be 0 (no changes to record) or the value returned by
1637 1637 # commit(), 1 if nothing changed or None on success.
1638 1638 return 1 if ret == 0 else ret
1639 1639
1640 1640 if opts.get('subrepos'):
1641 1641 if opts.get('amend'):
1642 1642 raise error.Abort(_('cannot amend with --subrepos'))
1643 1643 # Let --subrepos on the command line override config setting.
1644 1644 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1645 1645
1646 1646 cmdutil.checkunfinished(repo, commit=True)
1647 1647
1648 1648 branch = repo[None].branch()
1649 1649 bheads = repo.branchheads(branch)
1650 1650
1651 1651 extra = {}
1652 1652 if opts.get('close_branch'):
1653 1653 extra['close'] = 1
1654 1654
1655 1655 if not bheads:
1656 1656 raise error.Abort(_('can only close branch heads'))
1657 1657 elif opts.get('amend'):
1658 1658 if repo[None].parents()[0].p1().branch() != branch and \
1659 1659 repo[None].parents()[0].p2().branch() != branch:
1660 1660 raise error.Abort(_('can only close branch heads'))
1661 1661
1662 1662 if opts.get('amend'):
1663 1663 if ui.configbool('ui', 'commitsubrepos'):
1664 1664 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1665 1665
1666 1666 old = repo['.']
1667 1667 if not old.mutable():
1668 1668 raise error.Abort(_('cannot amend public changesets'))
1669 1669 if len(repo[None].parents()) > 1:
1670 1670 raise error.Abort(_('cannot amend while merging'))
1671 1671 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1672 1672 if not allowunstable and old.children():
1673 1673 raise error.Abort(_('cannot amend changeset with children'))
1674 1674
1675 1675 # Currently histedit gets confused if an amend happens while histedit
1676 1676 # is in progress. Since we have a checkunfinished command, we are
1677 1677 # temporarily honoring it.
1678 1678 #
1679 1679 # Note: eventually this guard will be removed. Please do not expect
1680 1680 # this behavior to remain.
1681 1681 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1682 1682 cmdutil.checkunfinished(repo)
1683 1683
1684 1684 # commitfunc is used only for temporary amend commit by cmdutil.amend
1685 1685 def commitfunc(ui, repo, message, match, opts):
1686 1686 return repo.commit(message,
1687 1687 opts.get('user') or old.user(),
1688 1688 opts.get('date') or old.date(),
1689 1689 match,
1690 1690 extra=extra)
1691 1691
1692 1692 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1693 1693 if node == old.node():
1694 1694 ui.status(_("nothing changed\n"))
1695 1695 return 1
1696 1696 else:
1697 1697 def commitfunc(ui, repo, message, match, opts):
1698 1698 backup = ui.backupconfig('phases', 'new-commit')
1699 1699 baseui = repo.baseui
1700 1700 basebackup = baseui.backupconfig('phases', 'new-commit')
1701 1701 try:
1702 1702 if opts.get('secret'):
1703 1703 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1704 1704 # Propagate to subrepos
1705 1705 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1706 1706
1707 1707 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1708 1708 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1709 1709 return repo.commit(message, opts.get('user'), opts.get('date'),
1710 1710 match,
1711 1711 editor=editor,
1712 1712 extra=extra)
1713 1713 finally:
1714 1714 ui.restoreconfig(backup)
1715 1715 repo.baseui.restoreconfig(basebackup)
1716 1716
1717 1717
1718 1718 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1719 1719
1720 1720 if not node:
1721 1721 stat = cmdutil.postcommitstatus(repo, pats, opts)
1722 1722 if stat[3]:
1723 1723 ui.status(_("nothing changed (%d missing files, see "
1724 1724 "'hg status')\n") % len(stat[3]))
1725 1725 else:
1726 1726 ui.status(_("nothing changed\n"))
1727 1727 return 1
1728 1728
1729 1729 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1730 1730
1731 1731 @command('config|showconfig|debugconfig',
1732 1732 [('u', 'untrusted', None, _('show untrusted configuration options')),
1733 1733 ('e', 'edit', None, _('edit user config')),
1734 1734 ('l', 'local', None, _('edit repository config')),
1735 1735 ('g', 'global', None, _('edit global config'))] + formatteropts,
1736 1736 _('[-u] [NAME]...'),
1737 1737 optionalrepo=True)
1738 1738 def config(ui, repo, *values, **opts):
1739 1739 """show combined config settings from all hgrc files
1740 1740
1741 1741 With no arguments, print names and values of all config items.
1742 1742
1743 1743 With one argument of the form section.name, print just the value
1744 1744 of that config item.
1745 1745
1746 1746 With multiple arguments, print names and values of all config
1747 1747 items with matching section names.
1748 1748
1749 1749 With --edit, start an editor on the user-level config file. With
1750 1750 --global, edit the system-wide config file. With --local, edit the
1751 1751 repository-level config file.
1752 1752
1753 1753 With --debug, the source (filename and line number) is printed
1754 1754 for each config item.
1755 1755
1756 1756 See :hg:`help config` for more information about config files.
1757 1757
1758 1758 Returns 0 on success, 1 if NAME does not exist.
1759 1759
1760 1760 """
1761 1761
1762 1762 if opts.get('edit') or opts.get('local') or opts.get('global'):
1763 1763 if opts.get('local') and opts.get('global'):
1764 1764 raise error.Abort(_("can't use --local and --global together"))
1765 1765
1766 1766 if opts.get('local'):
1767 1767 if not repo:
1768 1768 raise error.Abort(_("can't use --local outside a repository"))
1769 1769 paths = [repo.join('hgrc')]
1770 1770 elif opts.get('global'):
1771 1771 paths = scmutil.systemrcpath()
1772 1772 else:
1773 1773 paths = scmutil.userrcpath()
1774 1774
1775 1775 for f in paths:
1776 1776 if os.path.exists(f):
1777 1777 break
1778 1778 else:
1779 1779 if opts.get('global'):
1780 1780 samplehgrc = uimod.samplehgrcs['global']
1781 1781 elif opts.get('local'):
1782 1782 samplehgrc = uimod.samplehgrcs['local']
1783 1783 else:
1784 1784 samplehgrc = uimod.samplehgrcs['user']
1785 1785
1786 1786 f = paths[0]
1787 1787 fp = open(f, "w")
1788 1788 fp.write(samplehgrc)
1789 1789 fp.close()
1790 1790
1791 1791 editor = ui.geteditor()
1792 1792 ui.system("%s \"%s\"" % (editor, f),
1793 1793 onerr=error.Abort, errprefix=_("edit failed"))
1794 1794 return
1795 1795
1796 1796 fm = ui.formatter('config', opts)
1797 1797 for f in scmutil.rcpath():
1798 1798 ui.debug('read config from: %s\n' % f)
1799 1799 untrusted = bool(opts.get('untrusted'))
1800 1800 if values:
1801 1801 sections = [v for v in values if '.' not in v]
1802 1802 items = [v for v in values if '.' in v]
1803 1803 if len(items) > 1 or items and sections:
1804 1804 raise error.Abort(_('only one config item permitted'))
1805 1805 matched = False
1806 1806 for section, name, value in ui.walkconfig(untrusted=untrusted):
1807 1807 value = str(value)
1808 1808 if fm.isplain():
1809 1809 value = value.replace('\n', '\\n')
1810 1810 entryname = section + '.' + name
1811 1811 if values:
1812 1812 for v in values:
1813 1813 if v == section:
1814 1814 fm.startitem()
1815 1815 fm.condwrite(ui.debugflag, 'source', '%s: ',
1816 1816 ui.configsource(section, name, untrusted))
1817 1817 fm.write('name value', '%s=%s\n', entryname, value)
1818 1818 matched = True
1819 1819 elif v == entryname:
1820 1820 fm.startitem()
1821 1821 fm.condwrite(ui.debugflag, 'source', '%s: ',
1822 1822 ui.configsource(section, name, untrusted))
1823 1823 fm.write('value', '%s\n', value)
1824 1824 fm.data(name=entryname)
1825 1825 matched = True
1826 1826 else:
1827 1827 fm.startitem()
1828 1828 fm.condwrite(ui.debugflag, 'source', '%s: ',
1829 1829 ui.configsource(section, name, untrusted))
1830 1830 fm.write('name value', '%s=%s\n', entryname, value)
1831 1831 matched = True
1832 1832 fm.end()
1833 1833 if matched:
1834 1834 return 0
1835 1835 return 1
1836 1836
1837 1837 @command('copy|cp',
1838 1838 [('A', 'after', None, _('record a copy that has already occurred')),
1839 1839 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1840 1840 ] + walkopts + dryrunopts,
1841 1841 _('[OPTION]... [SOURCE]... DEST'))
1842 1842 def copy(ui, repo, *pats, **opts):
1843 1843 """mark files as copied for the next commit
1844 1844
1845 1845 Mark dest as having copies of source files. If dest is a
1846 1846 directory, copies are put in that directory. If dest is a file,
1847 1847 the source must be a single file.
1848 1848
1849 1849 By default, this command copies the contents of files as they
1850 1850 exist in the working directory. If invoked with -A/--after, the
1851 1851 operation is recorded, but no copying is performed.
1852 1852
1853 1853 This command takes effect with the next commit. To undo a copy
1854 1854 before that, see :hg:`revert`.
1855 1855
1856 1856 Returns 0 on success, 1 if errors are encountered.
1857 1857 """
1858 1858 with repo.wlock(False):
1859 1859 return cmdutil.copy(ui, repo, pats, opts)
1860 1860
1861 1861 @command('debuginstall', [] + formatteropts, '', norepo=True)
1862 1862 def debuginstall(ui, **opts):
1863 1863 '''test Mercurial installation
1864 1864
1865 1865 Returns 0 on success.
1866 1866 '''
1867 1867
1868 1868 def writetemp(contents):
1869 1869 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1870 1870 f = os.fdopen(fd, "wb")
1871 1871 f.write(contents)
1872 1872 f.close()
1873 1873 return name
1874 1874
1875 1875 problems = 0
1876 1876
1877 1877 fm = ui.formatter('debuginstall', opts)
1878 1878 fm.startitem()
1879 1879
1880 1880 # encoding
1881 1881 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1882 1882 err = None
1883 1883 try:
1884 1884 encoding.fromlocal("test")
1885 1885 except error.Abort as inst:
1886 1886 err = inst
1887 1887 problems += 1
1888 1888 fm.condwrite(err, 'encodingerror', _(" %s\n"
1889 1889 " (check that your locale is properly set)\n"), err)
1890 1890
1891 1891 # Python
1892 1892 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1893 1893 sys.executable)
1894 1894 fm.write('pythonver', _("checking Python version (%s)\n"),
1895 1895 ("%d.%d.%d" % sys.version_info[:3]))
1896 1896 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1897 1897 os.path.dirname(os.__file__))
1898 1898
1899 1899 security = set(sslutil.supportedprotocols)
1900 1900 if sslutil.hassni:
1901 1901 security.add('sni')
1902 1902
1903 1903 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1904 1904 fm.formatlist(sorted(security), name='protocol',
1905 1905 fmt='%s', sep=','))
1906 1906
1907 1907 # These are warnings, not errors. So don't increment problem count. This
1908 1908 # may change in the future.
1909 1909 if 'tls1.2' not in security:
1910 1910 fm.plain(_(' TLS 1.2 not supported by Python install; '
1911 1911 'network connections lack modern security\n'))
1912 1912 if 'sni' not in security:
1913 1913 fm.plain(_(' SNI not supported by Python install; may have '
1914 1914 'connectivity issues with some servers\n'))
1915 1915
1916 1916 # TODO print CA cert info
1917 1917
1918 1918 # hg version
1919 1919 hgver = util.version()
1920 1920 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1921 1921 hgver.split('+')[0])
1922 1922 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1923 1923 '+'.join(hgver.split('+')[1:]))
1924 1924
1925 1925 # compiled modules
1926 1926 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1927 1927 policy.policy)
1928 1928 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1929 1929 os.path.dirname(__file__))
1930 1930
1931 1931 err = None
1932 1932 try:
1933 1933 from . import (
1934 1934 base85,
1935 1935 bdiff,
1936 1936 mpatch,
1937 1937 osutil,
1938 1938 )
1939 1939 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1940 1940 except Exception as inst:
1941 1941 err = inst
1942 1942 problems += 1
1943 1943 fm.condwrite(err, 'extensionserror', " %s\n", err)
1944 1944
1945 1945 compengines = util.compengines._engines.values()
1946 1946 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1947 1947 fm.formatlist(sorted(e.name() for e in compengines),
1948 1948 name='compengine', fmt='%s', sep=', '))
1949 1949 fm.write('compenginesavail', _('checking available compression engines '
1950 1950 '(%s)\n'),
1951 1951 fm.formatlist(sorted(e.name() for e in compengines
1952 1952 if e.available()),
1953 1953 name='compengine', fmt='%s', sep=', '))
1954 1954
1955 1955 # templates
1956 1956 p = templater.templatepaths()
1957 1957 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1958 1958 fm.condwrite(not p, '', _(" no template directories found\n"))
1959 1959 if p:
1960 1960 m = templater.templatepath("map-cmdline.default")
1961 1961 if m:
1962 1962 # template found, check if it is working
1963 1963 err = None
1964 1964 try:
1965 1965 templater.templater.frommapfile(m)
1966 1966 except Exception as inst:
1967 1967 err = inst
1968 1968 p = None
1969 1969 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1970 1970 else:
1971 1971 p = None
1972 1972 fm.condwrite(p, 'defaulttemplate',
1973 1973 _("checking default template (%s)\n"), m)
1974 1974 fm.condwrite(not m, 'defaulttemplatenotfound',
1975 1975 _(" template '%s' not found\n"), "default")
1976 1976 if not p:
1977 1977 problems += 1
1978 1978 fm.condwrite(not p, '',
1979 1979 _(" (templates seem to have been installed incorrectly)\n"))
1980 1980
1981 1981 # editor
1982 1982 editor = ui.geteditor()
1983 1983 editor = util.expandpath(editor)
1984 1984 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1985 1985 cmdpath = util.findexe(shlex.split(editor)[0])
1986 1986 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1987 1987 _(" No commit editor set and can't find %s in PATH\n"
1988 1988 " (specify a commit editor in your configuration"
1989 1989 " file)\n"), not cmdpath and editor == 'vi' and editor)
1990 1990 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1991 1991 _(" Can't find editor '%s' in PATH\n"
1992 1992 " (specify a commit editor in your configuration"
1993 1993 " file)\n"), not cmdpath and editor)
1994 1994 if not cmdpath and editor != 'vi':
1995 1995 problems += 1
1996 1996
1997 1997 # check username
1998 1998 username = None
1999 1999 err = None
2000 2000 try:
2001 2001 username = ui.username()
2002 2002 except error.Abort as e:
2003 2003 err = e
2004 2004 problems += 1
2005 2005
2006 2006 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2007 2007 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2008 2008 " (specify a username in your configuration file)\n"), err)
2009 2009
2010 2010 fm.condwrite(not problems, '',
2011 2011 _("no problems detected\n"))
2012 2012 if not problems:
2013 2013 fm.data(problems=problems)
2014 2014 fm.condwrite(problems, 'problems',
2015 2015 _("%d problems detected,"
2016 2016 " please check your install!\n"), problems)
2017 2017 fm.end()
2018 2018
2019 2019 return problems
2020 2020
2021 2021 @command('debugknown', [], _('REPO ID...'), norepo=True)
2022 2022 def debugknown(ui, repopath, *ids, **opts):
2023 2023 """test whether node ids are known to a repo
2024 2024
2025 2025 Every ID must be a full-length hex node id string. Returns a list of 0s
2026 2026 and 1s indicating unknown/known.
2027 2027 """
2028 2028 repo = hg.peer(ui, opts, repopath)
2029 2029 if not repo.capable('known'):
2030 2030 raise error.Abort("known() not supported by target repository")
2031 2031 flags = repo.known([bin(s) for s in ids])
2032 2032 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2033 2033
2034 2034 @command('debuglabelcomplete', [], _('LABEL...'))
2035 2035 def debuglabelcomplete(ui, repo, *args):
2036 2036 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2037 2037 debugnamecomplete(ui, repo, *args)
2038 2038
2039 2039 @command('debugmergestate', [], '')
2040 2040 def debugmergestate(ui, repo, *args):
2041 2041 """print merge state
2042 2042
2043 2043 Use --verbose to print out information about whether v1 or v2 merge state
2044 2044 was chosen."""
2045 2045 def _hashornull(h):
2046 2046 if h == nullhex:
2047 2047 return 'null'
2048 2048 else:
2049 2049 return h
2050 2050
2051 2051 def printrecords(version):
2052 2052 ui.write(('* version %s records\n') % version)
2053 2053 if version == 1:
2054 2054 records = v1records
2055 2055 else:
2056 2056 records = v2records
2057 2057
2058 2058 for rtype, record in records:
2059 2059 # pretty print some record types
2060 2060 if rtype == 'L':
2061 2061 ui.write(('local: %s\n') % record)
2062 2062 elif rtype == 'O':
2063 2063 ui.write(('other: %s\n') % record)
2064 2064 elif rtype == 'm':
2065 2065 driver, mdstate = record.split('\0', 1)
2066 2066 ui.write(('merge driver: %s (state "%s")\n')
2067 2067 % (driver, mdstate))
2068 2068 elif rtype in 'FDC':
2069 2069 r = record.split('\0')
2070 2070 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2071 2071 if version == 1:
2072 2072 onode = 'not stored in v1 format'
2073 2073 flags = r[7]
2074 2074 else:
2075 2075 onode, flags = r[7:9]
2076 2076 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2077 2077 % (f, rtype, state, _hashornull(hash)))
2078 2078 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2079 2079 ui.write((' ancestor path: %s (node %s)\n')
2080 2080 % (afile, _hashornull(anode)))
2081 2081 ui.write((' other path: %s (node %s)\n')
2082 2082 % (ofile, _hashornull(onode)))
2083 2083 elif rtype == 'f':
2084 2084 filename, rawextras = record.split('\0', 1)
2085 2085 extras = rawextras.split('\0')
2086 2086 i = 0
2087 2087 extrastrings = []
2088 2088 while i < len(extras):
2089 2089 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2090 2090 i += 2
2091 2091
2092 2092 ui.write(('file extras: %s (%s)\n')
2093 2093 % (filename, ', '.join(extrastrings)))
2094 2094 elif rtype == 'l':
2095 2095 labels = record.split('\0', 2)
2096 2096 labels = [l for l in labels if len(l) > 0]
2097 2097 ui.write(('labels:\n'))
2098 2098 ui.write((' local: %s\n' % labels[0]))
2099 2099 ui.write((' other: %s\n' % labels[1]))
2100 2100 if len(labels) > 2:
2101 2101 ui.write((' base: %s\n' % labels[2]))
2102 2102 else:
2103 2103 ui.write(('unrecognized entry: %s\t%s\n')
2104 2104 % (rtype, record.replace('\0', '\t')))
2105 2105
2106 2106 # Avoid mergestate.read() since it may raise an exception for unsupported
2107 2107 # merge state records. We shouldn't be doing this, but this is OK since this
2108 2108 # command is pretty low-level.
2109 2109 ms = mergemod.mergestate(repo)
2110 2110
2111 2111 # sort so that reasonable information is on top
2112 2112 v1records = ms._readrecordsv1()
2113 2113 v2records = ms._readrecordsv2()
2114 2114 order = 'LOml'
2115 2115 def key(r):
2116 2116 idx = order.find(r[0])
2117 2117 if idx == -1:
2118 2118 return (1, r[1])
2119 2119 else:
2120 2120 return (0, idx)
2121 2121 v1records.sort(key=key)
2122 2122 v2records.sort(key=key)
2123 2123
2124 2124 if not v1records and not v2records:
2125 2125 ui.write(('no merge state found\n'))
2126 2126 elif not v2records:
2127 2127 ui.note(('no version 2 merge state\n'))
2128 2128 printrecords(1)
2129 2129 elif ms._v1v2match(v1records, v2records):
2130 2130 ui.note(('v1 and v2 states match: using v2\n'))
2131 2131 printrecords(2)
2132 2132 else:
2133 2133 ui.note(('v1 and v2 states mismatch: using v1\n'))
2134 2134 printrecords(1)
2135 2135 if ui.verbose:
2136 2136 printrecords(2)
2137 2137
2138 2138 @command('debugnamecomplete', [], _('NAME...'))
2139 2139 def debugnamecomplete(ui, repo, *args):
2140 2140 '''complete "names" - tags, open branch names, bookmark names'''
2141 2141
2142 2142 names = set()
2143 2143 # since we previously only listed open branches, we will handle that
2144 2144 # specially (after this for loop)
2145 2145 for name, ns in repo.names.iteritems():
2146 2146 if name != 'branches':
2147 2147 names.update(ns.listnames(repo))
2148 2148 names.update(tag for (tag, heads, tip, closed)
2149 2149 in repo.branchmap().iterbranches() if not closed)
2150 2150 completions = set()
2151 2151 if not args:
2152 2152 args = ['']
2153 2153 for a in args:
2154 2154 completions.update(n for n in names if n.startswith(a))
2155 2155 ui.write('\n'.join(sorted(completions)))
2156 2156 ui.write('\n')
2157 2157
2158 2158 @command('debuglocks',
2159 2159 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2160 2160 ('W', 'force-wlock', None,
2161 2161 _('free the working state lock (DANGEROUS)'))],
2162 2162 _('[OPTION]...'))
2163 2163 def debuglocks(ui, repo, **opts):
2164 2164 """show or modify state of locks
2165 2165
2166 2166 By default, this command will show which locks are held. This
2167 2167 includes the user and process holding the lock, the amount of time
2168 2168 the lock has been held, and the machine name where the process is
2169 2169 running if it's not local.
2170 2170
2171 2171 Locks protect the integrity of Mercurial's data, so should be
2172 2172 treated with care. System crashes or other interruptions may cause
2173 2173 locks to not be properly released, though Mercurial will usually
2174 2174 detect and remove such stale locks automatically.
2175 2175
2176 2176 However, detecting stale locks may not always be possible (for
2177 2177 instance, on a shared filesystem). Removing locks may also be
2178 2178 blocked by filesystem permissions.
2179 2179
2180 2180 Returns 0 if no locks are held.
2181 2181
2182 2182 """
2183 2183
2184 2184 if opts.get('force_lock'):
2185 2185 repo.svfs.unlink('lock')
2186 2186 if opts.get('force_wlock'):
2187 2187 repo.vfs.unlink('wlock')
2188 2188 if opts.get('force_lock') or opts.get('force_lock'):
2189 2189 return 0
2190 2190
2191 2191 now = time.time()
2192 2192 held = 0
2193 2193
2194 2194 def report(vfs, name, method):
2195 2195 # this causes stale locks to get reaped for more accurate reporting
2196 2196 try:
2197 2197 l = method(False)
2198 2198 except error.LockHeld:
2199 2199 l = None
2200 2200
2201 2201 if l:
2202 2202 l.release()
2203 2203 else:
2204 2204 try:
2205 2205 stat = vfs.lstat(name)
2206 2206 age = now - stat.st_mtime
2207 2207 user = util.username(stat.st_uid)
2208 2208 locker = vfs.readlock(name)
2209 2209 if ":" in locker:
2210 2210 host, pid = locker.split(':')
2211 2211 if host == socket.gethostname():
2212 2212 locker = 'user %s, process %s' % (user, pid)
2213 2213 else:
2214 2214 locker = 'user %s, process %s, host %s' \
2215 2215 % (user, pid, host)
2216 2216 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
2217 2217 return 1
2218 2218 except OSError as e:
2219 2219 if e.errno != errno.ENOENT:
2220 2220 raise
2221 2221
2222 2222 ui.write(("%-6s free\n") % (name + ":"))
2223 2223 return 0
2224 2224
2225 2225 held += report(repo.svfs, "lock", repo.lock)
2226 2226 held += report(repo.vfs, "wlock", repo.wlock)
2227 2227
2228 2228 return held
2229 2229
2230 2230 @command('debugobsolete',
2231 2231 [('', 'flags', 0, _('markers flag')),
2232 2232 ('', 'record-parents', False,
2233 2233 _('record parent information for the precursor')),
2234 2234 ('r', 'rev', [], _('display markers relevant to REV')),
2235 2235 ('', 'index', False, _('display index of the marker')),
2236 2236 ('', 'delete', [], _('delete markers specified by indices')),
2237 2237 ] + commitopts2 + formatteropts,
2238 2238 _('[OBSOLETED [REPLACEMENT ...]]'))
2239 2239 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2240 2240 """create arbitrary obsolete marker
2241 2241
2242 2242 With no arguments, displays the list of obsolescence markers."""
2243 2243
2244 2244 def parsenodeid(s):
2245 2245 try:
2246 2246 # We do not use revsingle/revrange functions here to accept
2247 2247 # arbitrary node identifiers, possibly not present in the
2248 2248 # local repository.
2249 2249 n = bin(s)
2250 2250 if len(n) != len(nullid):
2251 2251 raise TypeError()
2252 2252 return n
2253 2253 except TypeError:
2254 2254 raise error.Abort('changeset references must be full hexadecimal '
2255 2255 'node identifiers')
2256 2256
2257 2257 if opts.get('delete'):
2258 2258 indices = []
2259 2259 for v in opts.get('delete'):
2260 2260 try:
2261 2261 indices.append(int(v))
2262 2262 except ValueError:
2263 2263 raise error.Abort(_('invalid index value: %r') % v,
2264 2264 hint=_('use integers for indices'))
2265 2265
2266 2266 if repo.currenttransaction():
2267 2267 raise error.Abort(_('cannot delete obsmarkers in the middle '
2268 2268 'of transaction.'))
2269 2269
2270 2270 with repo.lock():
2271 2271 n = repair.deleteobsmarkers(repo.obsstore, indices)
2272 2272 ui.write(_('deleted %i obsolescence markers\n') % n)
2273 2273
2274 2274 return
2275 2275
2276 2276 if precursor is not None:
2277 2277 if opts['rev']:
2278 2278 raise error.Abort('cannot select revision when creating marker')
2279 2279 metadata = {}
2280 2280 metadata['user'] = opts['user'] or ui.username()
2281 2281 succs = tuple(parsenodeid(succ) for succ in successors)
2282 2282 l = repo.lock()
2283 2283 try:
2284 2284 tr = repo.transaction('debugobsolete')
2285 2285 try:
2286 2286 date = opts.get('date')
2287 2287 if date:
2288 2288 date = util.parsedate(date)
2289 2289 else:
2290 2290 date = None
2291 2291 prec = parsenodeid(precursor)
2292 2292 parents = None
2293 2293 if opts['record_parents']:
2294 2294 if prec not in repo.unfiltered():
2295 2295 raise error.Abort('cannot used --record-parents on '
2296 2296 'unknown changesets')
2297 2297 parents = repo.unfiltered()[prec].parents()
2298 2298 parents = tuple(p.node() for p in parents)
2299 2299 repo.obsstore.create(tr, prec, succs, opts['flags'],
2300 2300 parents=parents, date=date,
2301 2301 metadata=metadata)
2302 2302 tr.close()
2303 2303 except ValueError as exc:
2304 2304 raise error.Abort(_('bad obsmarker input: %s') % exc)
2305 2305 finally:
2306 2306 tr.release()
2307 2307 finally:
2308 2308 l.release()
2309 2309 else:
2310 2310 if opts['rev']:
2311 2311 revs = scmutil.revrange(repo, opts['rev'])
2312 2312 nodes = [repo[r].node() for r in revs]
2313 2313 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2314 2314 markers.sort(key=lambda x: x._data)
2315 2315 else:
2316 2316 markers = obsolete.getmarkers(repo)
2317 2317
2318 2318 markerstoiter = markers
2319 2319 isrelevant = lambda m: True
2320 2320 if opts.get('rev') and opts.get('index'):
2321 2321 markerstoiter = obsolete.getmarkers(repo)
2322 2322 markerset = set(markers)
2323 2323 isrelevant = lambda m: m in markerset
2324 2324
2325 2325 fm = ui.formatter('debugobsolete', opts)
2326 2326 for i, m in enumerate(markerstoiter):
2327 2327 if not isrelevant(m):
2328 2328 # marker can be irrelevant when we're iterating over a set
2329 2329 # of markers (markerstoiter) which is bigger than the set
2330 2330 # of markers we want to display (markers)
2331 2331 # this can happen if both --index and --rev options are
2332 2332 # provided and thus we need to iterate over all of the markers
2333 2333 # to get the correct indices, but only display the ones that
2334 2334 # are relevant to --rev value
2335 2335 continue
2336 2336 fm.startitem()
2337 2337 ind = i if opts.get('index') else None
2338 2338 cmdutil.showmarker(fm, m, index=ind)
2339 2339 fm.end()
2340 2340
2341 2341 @command('debugpathcomplete',
2342 2342 [('f', 'full', None, _('complete an entire path')),
2343 2343 ('n', 'normal', None, _('show only normal files')),
2344 2344 ('a', 'added', None, _('show only added files')),
2345 2345 ('r', 'removed', None, _('show only removed files'))],
2346 2346 _('FILESPEC...'))
2347 2347 def debugpathcomplete(ui, repo, *specs, **opts):
2348 2348 '''complete part or all of a tracked path
2349 2349
2350 2350 This command supports shells that offer path name completion. It
2351 2351 currently completes only files already known to the dirstate.
2352 2352
2353 2353 Completion extends only to the next path segment unless
2354 2354 --full is specified, in which case entire paths are used.'''
2355 2355
2356 2356 def complete(path, acceptable):
2357 2357 dirstate = repo.dirstate
2358 2358 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
2359 rootdir = repo.root + os.sep
2359 rootdir = repo.root + pycompat.ossep
2360 2360 if spec != repo.root and not spec.startswith(rootdir):
2361 2361 return [], []
2362 2362 if os.path.isdir(spec):
2363 2363 spec += '/'
2364 2364 spec = spec[len(rootdir):]
2365 2365 fixpaths = pycompat.ossep != '/'
2366 2366 if fixpaths:
2367 spec = spec.replace(os.sep, '/')
2367 spec = spec.replace(pycompat.ossep, '/')
2368 2368 speclen = len(spec)
2369 2369 fullpaths = opts['full']
2370 2370 files, dirs = set(), set()
2371 2371 adddir, addfile = dirs.add, files.add
2372 2372 for f, st in dirstate.iteritems():
2373 2373 if f.startswith(spec) and st[0] in acceptable:
2374 2374 if fixpaths:
2375 f = f.replace('/', os.sep)
2375 f = f.replace('/', pycompat.ossep)
2376 2376 if fullpaths:
2377 2377 addfile(f)
2378 2378 continue
2379 s = f.find(os.sep, speclen)
2379 s = f.find(pycompat.ossep, speclen)
2380 2380 if s >= 0:
2381 2381 adddir(f[:s])
2382 2382 else:
2383 2383 addfile(f)
2384 2384 return files, dirs
2385 2385
2386 2386 acceptable = ''
2387 2387 if opts['normal']:
2388 2388 acceptable += 'nm'
2389 2389 if opts['added']:
2390 2390 acceptable += 'a'
2391 2391 if opts['removed']:
2392 2392 acceptable += 'r'
2393 2393 cwd = repo.getcwd()
2394 2394 if not specs:
2395 2395 specs = ['.']
2396 2396
2397 2397 files, dirs = set(), set()
2398 2398 for spec in specs:
2399 2399 f, d = complete(spec, acceptable or 'nmar')
2400 2400 files.update(f)
2401 2401 dirs.update(d)
2402 2402 files.update(dirs)
2403 2403 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2404 2404 ui.write('\n')
2405 2405
2406 2406 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2407 2407 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2408 2408 '''access the pushkey key/value protocol
2409 2409
2410 2410 With two args, list the keys in the given namespace.
2411 2411
2412 2412 With five args, set a key to new if it currently is set to old.
2413 2413 Reports success or failure.
2414 2414 '''
2415 2415
2416 2416 target = hg.peer(ui, {}, repopath)
2417 2417 if keyinfo:
2418 2418 key, old, new = keyinfo
2419 2419 r = target.pushkey(namespace, key, old, new)
2420 2420 ui.status(str(r) + '\n')
2421 2421 return not r
2422 2422 else:
2423 2423 for k, v in sorted(target.listkeys(namespace).iteritems()):
2424 2424 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2425 2425 v.encode('string-escape')))
2426 2426
2427 2427 @command('debugpvec', [], _('A B'))
2428 2428 def debugpvec(ui, repo, a, b=None):
2429 2429 ca = scmutil.revsingle(repo, a)
2430 2430 cb = scmutil.revsingle(repo, b)
2431 2431 pa = pvec.ctxpvec(ca)
2432 2432 pb = pvec.ctxpvec(cb)
2433 2433 if pa == pb:
2434 2434 rel = "="
2435 2435 elif pa > pb:
2436 2436 rel = ">"
2437 2437 elif pa < pb:
2438 2438 rel = "<"
2439 2439 elif pa | pb:
2440 2440 rel = "|"
2441 2441 ui.write(_("a: %s\n") % pa)
2442 2442 ui.write(_("b: %s\n") % pb)
2443 2443 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2444 2444 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2445 2445 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2446 2446 pa.distance(pb), rel))
2447 2447
2448 2448 @command('debugrebuilddirstate|debugrebuildstate',
2449 2449 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2450 2450 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2451 2451 'the working copy parent')),
2452 2452 ],
2453 2453 _('[-r REV]'))
2454 2454 def debugrebuilddirstate(ui, repo, rev, **opts):
2455 2455 """rebuild the dirstate as it would look like for the given revision
2456 2456
2457 2457 If no revision is specified the first current parent will be used.
2458 2458
2459 2459 The dirstate will be set to the files of the given revision.
2460 2460 The actual working directory content or existing dirstate
2461 2461 information such as adds or removes is not considered.
2462 2462
2463 2463 ``minimal`` will only rebuild the dirstate status for files that claim to be
2464 2464 tracked but are not in the parent manifest, or that exist in the parent
2465 2465 manifest but are not in the dirstate. It will not change adds, removes, or
2466 2466 modified files that are in the working copy parent.
2467 2467
2468 2468 One use of this command is to make the next :hg:`status` invocation
2469 2469 check the actual file content.
2470 2470 """
2471 2471 ctx = scmutil.revsingle(repo, rev)
2472 2472 with repo.wlock():
2473 2473 dirstate = repo.dirstate
2474 2474 changedfiles = None
2475 2475 # See command doc for what minimal does.
2476 2476 if opts.get('minimal'):
2477 2477 manifestfiles = set(ctx.manifest().keys())
2478 2478 dirstatefiles = set(dirstate)
2479 2479 manifestonly = manifestfiles - dirstatefiles
2480 2480 dsonly = dirstatefiles - manifestfiles
2481 2481 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2482 2482 changedfiles = manifestonly | dsnotadded
2483 2483
2484 2484 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2485 2485
2486 2486 @command('debugrebuildfncache', [], '')
2487 2487 def debugrebuildfncache(ui, repo):
2488 2488 """rebuild the fncache file"""
2489 2489 repair.rebuildfncache(ui, repo)
2490 2490
2491 2491 @command('debugrename',
2492 2492 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2493 2493 _('[-r REV] FILE'))
2494 2494 def debugrename(ui, repo, file1, *pats, **opts):
2495 2495 """dump rename information"""
2496 2496
2497 2497 ctx = scmutil.revsingle(repo, opts.get('rev'))
2498 2498 m = scmutil.match(ctx, (file1,) + pats, opts)
2499 2499 for abs in ctx.walk(m):
2500 2500 fctx = ctx[abs]
2501 2501 o = fctx.filelog().renamed(fctx.filenode())
2502 2502 rel = m.rel(abs)
2503 2503 if o:
2504 2504 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2505 2505 else:
2506 2506 ui.write(_("%s not renamed\n") % rel)
2507 2507
2508 2508 @command('debugrevlog', debugrevlogopts +
2509 2509 [('d', 'dump', False, _('dump index data'))],
2510 2510 _('-c|-m|FILE'),
2511 2511 optionalrepo=True)
2512 2512 def debugrevlog(ui, repo, file_=None, **opts):
2513 2513 """show data and statistics about a revlog"""
2514 2514 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2515 2515
2516 2516 if opts.get("dump"):
2517 2517 numrevs = len(r)
2518 2518 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2519 2519 " rawsize totalsize compression heads chainlen\n"))
2520 2520 ts = 0
2521 2521 heads = set()
2522 2522
2523 2523 for rev in xrange(numrevs):
2524 2524 dbase = r.deltaparent(rev)
2525 2525 if dbase == -1:
2526 2526 dbase = rev
2527 2527 cbase = r.chainbase(rev)
2528 2528 clen = r.chainlen(rev)
2529 2529 p1, p2 = r.parentrevs(rev)
2530 2530 rs = r.rawsize(rev)
2531 2531 ts = ts + rs
2532 2532 heads -= set(r.parentrevs(rev))
2533 2533 heads.add(rev)
2534 2534 try:
2535 2535 compression = ts / r.end(rev)
2536 2536 except ZeroDivisionError:
2537 2537 compression = 0
2538 2538 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2539 2539 "%11d %5d %8d\n" %
2540 2540 (rev, p1, p2, r.start(rev), r.end(rev),
2541 2541 r.start(dbase), r.start(cbase),
2542 2542 r.start(p1), r.start(p2),
2543 2543 rs, ts, compression, len(heads), clen))
2544 2544 return 0
2545 2545
2546 2546 v = r.version
2547 2547 format = v & 0xFFFF
2548 2548 flags = []
2549 2549 gdelta = False
2550 2550 if v & revlog.REVLOGNGINLINEDATA:
2551 2551 flags.append('inline')
2552 2552 if v & revlog.REVLOGGENERALDELTA:
2553 2553 gdelta = True
2554 2554 flags.append('generaldelta')
2555 2555 if not flags:
2556 2556 flags = ['(none)']
2557 2557
2558 2558 nummerges = 0
2559 2559 numfull = 0
2560 2560 numprev = 0
2561 2561 nump1 = 0
2562 2562 nump2 = 0
2563 2563 numother = 0
2564 2564 nump1prev = 0
2565 2565 nump2prev = 0
2566 2566 chainlengths = []
2567 2567
2568 2568 datasize = [None, 0, 0]
2569 2569 fullsize = [None, 0, 0]
2570 2570 deltasize = [None, 0, 0]
2571 2571 chunktypecounts = {}
2572 2572 chunktypesizes = {}
2573 2573
2574 2574 def addsize(size, l):
2575 2575 if l[0] is None or size < l[0]:
2576 2576 l[0] = size
2577 2577 if size > l[1]:
2578 2578 l[1] = size
2579 2579 l[2] += size
2580 2580
2581 2581 numrevs = len(r)
2582 2582 for rev in xrange(numrevs):
2583 2583 p1, p2 = r.parentrevs(rev)
2584 2584 delta = r.deltaparent(rev)
2585 2585 if format > 0:
2586 2586 addsize(r.rawsize(rev), datasize)
2587 2587 if p2 != nullrev:
2588 2588 nummerges += 1
2589 2589 size = r.length(rev)
2590 2590 if delta == nullrev:
2591 2591 chainlengths.append(0)
2592 2592 numfull += 1
2593 2593 addsize(size, fullsize)
2594 2594 else:
2595 2595 chainlengths.append(chainlengths[delta] + 1)
2596 2596 addsize(size, deltasize)
2597 2597 if delta == rev - 1:
2598 2598 numprev += 1
2599 2599 if delta == p1:
2600 2600 nump1prev += 1
2601 2601 elif delta == p2:
2602 2602 nump2prev += 1
2603 2603 elif delta == p1:
2604 2604 nump1 += 1
2605 2605 elif delta == p2:
2606 2606 nump2 += 1
2607 2607 elif delta != nullrev:
2608 2608 numother += 1
2609 2609
2610 2610 # Obtain data on the raw chunks in the revlog.
2611 2611 chunk = r._chunkraw(rev, rev)[1]
2612 2612 if chunk:
2613 2613 chunktype = chunk[0]
2614 2614 else:
2615 2615 chunktype = 'empty'
2616 2616
2617 2617 if chunktype not in chunktypecounts:
2618 2618 chunktypecounts[chunktype] = 0
2619 2619 chunktypesizes[chunktype] = 0
2620 2620
2621 2621 chunktypecounts[chunktype] += 1
2622 2622 chunktypesizes[chunktype] += size
2623 2623
2624 2624 # Adjust size min value for empty cases
2625 2625 for size in (datasize, fullsize, deltasize):
2626 2626 if size[0] is None:
2627 2627 size[0] = 0
2628 2628
2629 2629 numdeltas = numrevs - numfull
2630 2630 numoprev = numprev - nump1prev - nump2prev
2631 2631 totalrawsize = datasize[2]
2632 2632 datasize[2] /= numrevs
2633 2633 fulltotal = fullsize[2]
2634 2634 fullsize[2] /= numfull
2635 2635 deltatotal = deltasize[2]
2636 2636 if numrevs - numfull > 0:
2637 2637 deltasize[2] /= numrevs - numfull
2638 2638 totalsize = fulltotal + deltatotal
2639 2639 avgchainlen = sum(chainlengths) / numrevs
2640 2640 maxchainlen = max(chainlengths)
2641 2641 compratio = 1
2642 2642 if totalsize:
2643 2643 compratio = totalrawsize / totalsize
2644 2644
2645 2645 basedfmtstr = '%%%dd\n'
2646 2646 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2647 2647
2648 2648 def dfmtstr(max):
2649 2649 return basedfmtstr % len(str(max))
2650 2650 def pcfmtstr(max, padding=0):
2651 2651 return basepcfmtstr % (len(str(max)), ' ' * padding)
2652 2652
2653 2653 def pcfmt(value, total):
2654 2654 if total:
2655 2655 return (value, 100 * float(value) / total)
2656 2656 else:
2657 2657 return value, 100.0
2658 2658
2659 2659 ui.write(('format : %d\n') % format)
2660 2660 ui.write(('flags : %s\n') % ', '.join(flags))
2661 2661
2662 2662 ui.write('\n')
2663 2663 fmt = pcfmtstr(totalsize)
2664 2664 fmt2 = dfmtstr(totalsize)
2665 2665 ui.write(('revisions : ') + fmt2 % numrevs)
2666 2666 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2667 2667 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2668 2668 ui.write(('revisions : ') + fmt2 % numrevs)
2669 2669 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2670 2670 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2671 2671 ui.write(('revision size : ') + fmt2 % totalsize)
2672 2672 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2673 2673 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2674 2674
2675 2675 def fmtchunktype(chunktype):
2676 2676 if chunktype == 'empty':
2677 2677 return ' %s : ' % chunktype
2678 2678 elif chunktype in string.ascii_letters:
2679 2679 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2680 2680 else:
2681 2681 return ' 0x%s : ' % hex(chunktype)
2682 2682
2683 2683 ui.write('\n')
2684 2684 ui.write(('chunks : ') + fmt2 % numrevs)
2685 2685 for chunktype in sorted(chunktypecounts):
2686 2686 ui.write(fmtchunktype(chunktype))
2687 2687 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2688 2688 ui.write(('chunks size : ') + fmt2 % totalsize)
2689 2689 for chunktype in sorted(chunktypecounts):
2690 2690 ui.write(fmtchunktype(chunktype))
2691 2691 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2692 2692
2693 2693 ui.write('\n')
2694 2694 fmt = dfmtstr(max(avgchainlen, compratio))
2695 2695 ui.write(('avg chain length : ') + fmt % avgchainlen)
2696 2696 ui.write(('max chain length : ') + fmt % maxchainlen)
2697 2697 ui.write(('compression ratio : ') + fmt % compratio)
2698 2698
2699 2699 if format > 0:
2700 2700 ui.write('\n')
2701 2701 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2702 2702 % tuple(datasize))
2703 2703 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2704 2704 % tuple(fullsize))
2705 2705 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2706 2706 % tuple(deltasize))
2707 2707
2708 2708 if numdeltas > 0:
2709 2709 ui.write('\n')
2710 2710 fmt = pcfmtstr(numdeltas)
2711 2711 fmt2 = pcfmtstr(numdeltas, 4)
2712 2712 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2713 2713 if numprev > 0:
2714 2714 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2715 2715 numprev))
2716 2716 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2717 2717 numprev))
2718 2718 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2719 2719 numprev))
2720 2720 if gdelta:
2721 2721 ui.write(('deltas against p1 : ')
2722 2722 + fmt % pcfmt(nump1, numdeltas))
2723 2723 ui.write(('deltas against p2 : ')
2724 2724 + fmt % pcfmt(nump2, numdeltas))
2725 2725 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2726 2726 numdeltas))
2727 2727
2728 2728 @command('debugrevspec',
2729 2729 [('', 'optimize', None,
2730 2730 _('print parsed tree after optimizing (DEPRECATED)')),
2731 2731 ('p', 'show-stage', [],
2732 2732 _('print parsed tree at the given stage'), _('NAME')),
2733 2733 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2734 2734 ('', 'verify-optimized', False, _('verify optimized result')),
2735 2735 ],
2736 2736 ('REVSPEC'))
2737 2737 def debugrevspec(ui, repo, expr, **opts):
2738 2738 """parse and apply a revision specification
2739 2739
2740 2740 Use -p/--show-stage option to print the parsed tree at the given stages.
2741 2741 Use -p all to print tree at every stage.
2742 2742
2743 2743 Use --verify-optimized to compare the optimized result with the unoptimized
2744 2744 one. Returns 1 if the optimized result differs.
2745 2745 """
2746 2746 stages = [
2747 2747 ('parsed', lambda tree: tree),
2748 2748 ('expanded', lambda tree: revset.expandaliases(ui, tree)),
2749 2749 ('concatenated', revset.foldconcat),
2750 2750 ('analyzed', revset.analyze),
2751 2751 ('optimized', revset.optimize),
2752 2752 ]
2753 2753 if opts['no_optimized']:
2754 2754 stages = stages[:-1]
2755 2755 if opts['verify_optimized'] and opts['no_optimized']:
2756 2756 raise error.Abort(_('cannot use --verify-optimized with '
2757 2757 '--no-optimized'))
2758 2758 stagenames = set(n for n, f in stages)
2759 2759
2760 2760 showalways = set()
2761 2761 showchanged = set()
2762 2762 if ui.verbose and not opts['show_stage']:
2763 2763 # show parsed tree by --verbose (deprecated)
2764 2764 showalways.add('parsed')
2765 2765 showchanged.update(['expanded', 'concatenated'])
2766 2766 if opts['optimize']:
2767 2767 showalways.add('optimized')
2768 2768 if opts['show_stage'] and opts['optimize']:
2769 2769 raise error.Abort(_('cannot use --optimize with --show-stage'))
2770 2770 if opts['show_stage'] == ['all']:
2771 2771 showalways.update(stagenames)
2772 2772 else:
2773 2773 for n in opts['show_stage']:
2774 2774 if n not in stagenames:
2775 2775 raise error.Abort(_('invalid stage name: %s') % n)
2776 2776 showalways.update(opts['show_stage'])
2777 2777
2778 2778 treebystage = {}
2779 2779 printedtree = None
2780 2780 tree = revset.parse(expr, lookup=repo.__contains__)
2781 2781 for n, f in stages:
2782 2782 treebystage[n] = tree = f(tree)
2783 2783 if n in showalways or (n in showchanged and tree != printedtree):
2784 2784 if opts['show_stage'] or n != 'parsed':
2785 2785 ui.write(("* %s:\n") % n)
2786 2786 ui.write(revset.prettyformat(tree), "\n")
2787 2787 printedtree = tree
2788 2788
2789 2789 if opts['verify_optimized']:
2790 2790 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2791 2791 brevs = revset.makematcher(treebystage['optimized'])(repo)
2792 2792 if ui.verbose:
2793 2793 ui.note(("* analyzed set:\n"), revset.prettyformatset(arevs), "\n")
2794 2794 ui.note(("* optimized set:\n"), revset.prettyformatset(brevs), "\n")
2795 2795 arevs = list(arevs)
2796 2796 brevs = list(brevs)
2797 2797 if arevs == brevs:
2798 2798 return 0
2799 2799 ui.write(('--- analyzed\n'), label='diff.file_a')
2800 2800 ui.write(('+++ optimized\n'), label='diff.file_b')
2801 2801 sm = difflib.SequenceMatcher(None, arevs, brevs)
2802 2802 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2803 2803 if tag in ('delete', 'replace'):
2804 2804 for c in arevs[alo:ahi]:
2805 2805 ui.write('-%s\n' % c, label='diff.deleted')
2806 2806 if tag in ('insert', 'replace'):
2807 2807 for c in brevs[blo:bhi]:
2808 2808 ui.write('+%s\n' % c, label='diff.inserted')
2809 2809 if tag == 'equal':
2810 2810 for c in arevs[alo:ahi]:
2811 2811 ui.write(' %s\n' % c)
2812 2812 return 1
2813 2813
2814 2814 func = revset.makematcher(tree)
2815 2815 revs = func(repo)
2816 2816 if ui.verbose:
2817 2817 ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
2818 2818 for c in revs:
2819 2819 ui.write("%s\n" % c)
2820 2820
2821 2821 @command('debugsetparents', [], _('REV1 [REV2]'))
2822 2822 def debugsetparents(ui, repo, rev1, rev2=None):
2823 2823 """manually set the parents of the current working directory
2824 2824
2825 2825 This is useful for writing repository conversion tools, but should
2826 2826 be used with care. For example, neither the working directory nor the
2827 2827 dirstate is updated, so file status may be incorrect after running this
2828 2828 command.
2829 2829
2830 2830 Returns 0 on success.
2831 2831 """
2832 2832
2833 2833 r1 = scmutil.revsingle(repo, rev1).node()
2834 2834 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2835 2835
2836 2836 with repo.wlock():
2837 2837 repo.setparents(r1, r2)
2838 2838
2839 2839 @command('debugdirstate|debugstate',
2840 2840 [('', 'nodates', None, _('do not display the saved mtime')),
2841 2841 ('', 'datesort', None, _('sort by saved mtime'))],
2842 2842 _('[OPTION]...'))
2843 2843 def debugstate(ui, repo, **opts):
2844 2844 """show the contents of the current dirstate"""
2845 2845
2846 2846 nodates = opts.get('nodates')
2847 2847 datesort = opts.get('datesort')
2848 2848
2849 2849 timestr = ""
2850 2850 if datesort:
2851 2851 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2852 2852 else:
2853 2853 keyfunc = None # sort by filename
2854 2854 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2855 2855 if ent[3] == -1:
2856 2856 timestr = 'unset '
2857 2857 elif nodates:
2858 2858 timestr = 'set '
2859 2859 else:
2860 2860 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2861 2861 time.localtime(ent[3]))
2862 2862 if ent[1] & 0o20000:
2863 2863 mode = 'lnk'
2864 2864 else:
2865 2865 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
2866 2866 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2867 2867 for f in repo.dirstate.copies():
2868 2868 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2869 2869
2870 2870 @command('debugsub',
2871 2871 [('r', 'rev', '',
2872 2872 _('revision to check'), _('REV'))],
2873 2873 _('[-r REV] [REV]'))
2874 2874 def debugsub(ui, repo, rev=None):
2875 2875 ctx = scmutil.revsingle(repo, rev, None)
2876 2876 for k, v in sorted(ctx.substate.items()):
2877 2877 ui.write(('path %s\n') % k)
2878 2878 ui.write((' source %s\n') % v[0])
2879 2879 ui.write((' revision %s\n') % v[1])
2880 2880
2881 2881 @command('debugsuccessorssets',
2882 2882 [],
2883 2883 _('[REV]'))
2884 2884 def debugsuccessorssets(ui, repo, *revs):
2885 2885 """show set of successors for revision
2886 2886
2887 2887 A successors set of changeset A is a consistent group of revisions that
2888 2888 succeed A. It contains non-obsolete changesets only.
2889 2889
2890 2890 In most cases a changeset A has a single successors set containing a single
2891 2891 successor (changeset A replaced by A').
2892 2892
2893 2893 A changeset that is made obsolete with no successors are called "pruned".
2894 2894 Such changesets have no successors sets at all.
2895 2895
2896 2896 A changeset that has been "split" will have a successors set containing
2897 2897 more than one successor.
2898 2898
2899 2899 A changeset that has been rewritten in multiple different ways is called
2900 2900 "divergent". Such changesets have multiple successor sets (each of which
2901 2901 may also be split, i.e. have multiple successors).
2902 2902
2903 2903 Results are displayed as follows::
2904 2904
2905 2905 <rev1>
2906 2906 <successors-1A>
2907 2907 <rev2>
2908 2908 <successors-2A>
2909 2909 <successors-2B1> <successors-2B2> <successors-2B3>
2910 2910
2911 2911 Here rev2 has two possible (i.e. divergent) successors sets. The first
2912 2912 holds one element, whereas the second holds three (i.e. the changeset has
2913 2913 been split).
2914 2914 """
2915 2915 # passed to successorssets caching computation from one call to another
2916 2916 cache = {}
2917 2917 ctx2str = str
2918 2918 node2str = short
2919 2919 if ui.debug():
2920 2920 def ctx2str(ctx):
2921 2921 return ctx.hex()
2922 2922 node2str = hex
2923 2923 for rev in scmutil.revrange(repo, revs):
2924 2924 ctx = repo[rev]
2925 2925 ui.write('%s\n'% ctx2str(ctx))
2926 2926 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2927 2927 if succsset:
2928 2928 ui.write(' ')
2929 2929 ui.write(node2str(succsset[0]))
2930 2930 for node in succsset[1:]:
2931 2931 ui.write(' ')
2932 2932 ui.write(node2str(node))
2933 2933 ui.write('\n')
2934 2934
2935 2935 @command('debugtemplate',
2936 2936 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2937 2937 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2938 2938 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2939 2939 optionalrepo=True)
2940 2940 def debugtemplate(ui, repo, tmpl, **opts):
2941 2941 """parse and apply a template
2942 2942
2943 2943 If -r/--rev is given, the template is processed as a log template and
2944 2944 applied to the given changesets. Otherwise, it is processed as a generic
2945 2945 template.
2946 2946
2947 2947 Use --verbose to print the parsed tree.
2948 2948 """
2949 2949 revs = None
2950 2950 if opts['rev']:
2951 2951 if repo is None:
2952 2952 raise error.RepoError(_('there is no Mercurial repository here '
2953 2953 '(.hg not found)'))
2954 2954 revs = scmutil.revrange(repo, opts['rev'])
2955 2955
2956 2956 props = {}
2957 2957 for d in opts['define']:
2958 2958 try:
2959 2959 k, v = (e.strip() for e in d.split('=', 1))
2960 2960 if not k:
2961 2961 raise ValueError
2962 2962 props[k] = v
2963 2963 except ValueError:
2964 2964 raise error.Abort(_('malformed keyword definition: %s') % d)
2965 2965
2966 2966 if ui.verbose:
2967 2967 aliases = ui.configitems('templatealias')
2968 2968 tree = templater.parse(tmpl)
2969 2969 ui.note(templater.prettyformat(tree), '\n')
2970 2970 newtree = templater.expandaliases(tree, aliases)
2971 2971 if newtree != tree:
2972 2972 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2973 2973
2974 2974 mapfile = None
2975 2975 if revs is None:
2976 2976 k = 'debugtemplate'
2977 2977 t = formatter.maketemplater(ui, k, tmpl)
2978 2978 ui.write(templater.stringify(t(k, **props)))
2979 2979 else:
2980 2980 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2981 2981 mapfile, buffered=False)
2982 2982 for r in revs:
2983 2983 displayer.show(repo[r], **props)
2984 2984 displayer.close()
2985 2985
2986 2986 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2987 2987 def debugwalk(ui, repo, *pats, **opts):
2988 2988 """show how files match on given patterns"""
2989 2989 m = scmutil.match(repo[None], pats, opts)
2990 2990 items = list(repo.walk(m))
2991 2991 if not items:
2992 2992 return
2993 2993 f = lambda fn: fn
2994 2994 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2995 2995 f = lambda fn: util.normpath(fn)
2996 2996 fmt = 'f %%-%ds %%-%ds %%s' % (
2997 2997 max([len(abs) for abs in items]),
2998 2998 max([len(m.rel(abs)) for abs in items]))
2999 2999 for abs in items:
3000 3000 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3001 3001 ui.write("%s\n" % line.rstrip())
3002 3002
3003 3003 @command('debugwireargs',
3004 3004 [('', 'three', '', 'three'),
3005 3005 ('', 'four', '', 'four'),
3006 3006 ('', 'five', '', 'five'),
3007 3007 ] + remoteopts,
3008 3008 _('REPO [OPTIONS]... [ONE [TWO]]'),
3009 3009 norepo=True)
3010 3010 def debugwireargs(ui, repopath, *vals, **opts):
3011 3011 repo = hg.peer(ui, opts, repopath)
3012 3012 for opt in remoteopts:
3013 3013 del opts[opt[1]]
3014 3014 args = {}
3015 3015 for k, v in opts.iteritems():
3016 3016 if v:
3017 3017 args[k] = v
3018 3018 # run twice to check that we don't mess up the stream for the next command
3019 3019 res1 = repo.debugwireargs(*vals, **args)
3020 3020 res2 = repo.debugwireargs(*vals, **args)
3021 3021 ui.write("%s\n" % res1)
3022 3022 if res1 != res2:
3023 3023 ui.warn("%s\n" % res2)
3024 3024
3025 3025 @command('^diff',
3026 3026 [('r', 'rev', [], _('revision'), _('REV')),
3027 3027 ('c', 'change', '', _('change made by revision'), _('REV'))
3028 3028 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3029 3029 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3030 3030 inferrepo=True)
3031 3031 def diff(ui, repo, *pats, **opts):
3032 3032 """diff repository (or selected files)
3033 3033
3034 3034 Show differences between revisions for the specified files.
3035 3035
3036 3036 Differences between files are shown using the unified diff format.
3037 3037
3038 3038 .. note::
3039 3039
3040 3040 :hg:`diff` may generate unexpected results for merges, as it will
3041 3041 default to comparing against the working directory's first
3042 3042 parent changeset if no revisions are specified.
3043 3043
3044 3044 When two revision arguments are given, then changes are shown
3045 3045 between those revisions. If only one revision is specified then
3046 3046 that revision is compared to the working directory, and, when no
3047 3047 revisions are specified, the working directory files are compared
3048 3048 to its first parent.
3049 3049
3050 3050 Alternatively you can specify -c/--change with a revision to see
3051 3051 the changes in that changeset relative to its first parent.
3052 3052
3053 3053 Without the -a/--text option, diff will avoid generating diffs of
3054 3054 files it detects as binary. With -a, diff will generate a diff
3055 3055 anyway, probably with undesirable results.
3056 3056
3057 3057 Use the -g/--git option to generate diffs in the git extended diff
3058 3058 format. For more information, read :hg:`help diffs`.
3059 3059
3060 3060 .. container:: verbose
3061 3061
3062 3062 Examples:
3063 3063
3064 3064 - compare a file in the current working directory to its parent::
3065 3065
3066 3066 hg diff foo.c
3067 3067
3068 3068 - compare two historical versions of a directory, with rename info::
3069 3069
3070 3070 hg diff --git -r 1.0:1.2 lib/
3071 3071
3072 3072 - get change stats relative to the last change on some date::
3073 3073
3074 3074 hg diff --stat -r "date('may 2')"
3075 3075
3076 3076 - diff all newly-added files that contain a keyword::
3077 3077
3078 3078 hg diff "set:added() and grep(GNU)"
3079 3079
3080 3080 - compare a revision and its parents::
3081 3081
3082 3082 hg diff -c 9353 # compare against first parent
3083 3083 hg diff -r 9353^:9353 # same using revset syntax
3084 3084 hg diff -r 9353^2:9353 # compare against the second parent
3085 3085
3086 3086 Returns 0 on success.
3087 3087 """
3088 3088
3089 3089 revs = opts.get('rev')
3090 3090 change = opts.get('change')
3091 3091 stat = opts.get('stat')
3092 3092 reverse = opts.get('reverse')
3093 3093
3094 3094 if revs and change:
3095 3095 msg = _('cannot specify --rev and --change at the same time')
3096 3096 raise error.Abort(msg)
3097 3097 elif change:
3098 3098 node2 = scmutil.revsingle(repo, change, None).node()
3099 3099 node1 = repo[node2].p1().node()
3100 3100 else:
3101 3101 node1, node2 = scmutil.revpair(repo, revs)
3102 3102
3103 3103 if reverse:
3104 3104 node1, node2 = node2, node1
3105 3105
3106 3106 diffopts = patch.diffallopts(ui, opts)
3107 3107 m = scmutil.match(repo[node2], pats, opts)
3108 3108 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3109 3109 listsubrepos=opts.get('subrepos'),
3110 3110 root=opts.get('root'))
3111 3111
3112 3112 @command('^export',
3113 3113 [('o', 'output', '',
3114 3114 _('print output to file with formatted name'), _('FORMAT')),
3115 3115 ('', 'switch-parent', None, _('diff against the second parent')),
3116 3116 ('r', 'rev', [], _('revisions to export'), _('REV')),
3117 3117 ] + diffopts,
3118 3118 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3119 3119 def export(ui, repo, *changesets, **opts):
3120 3120 """dump the header and diffs for one or more changesets
3121 3121
3122 3122 Print the changeset header and diffs for one or more revisions.
3123 3123 If no revision is given, the parent of the working directory is used.
3124 3124
3125 3125 The information shown in the changeset header is: author, date,
3126 3126 branch name (if non-default), changeset hash, parent(s) and commit
3127 3127 comment.
3128 3128
3129 3129 .. note::
3130 3130
3131 3131 :hg:`export` may generate unexpected diff output for merge
3132 3132 changesets, as it will compare the merge changeset against its
3133 3133 first parent only.
3134 3134
3135 3135 Output may be to a file, in which case the name of the file is
3136 3136 given using a format string. The formatting rules are as follows:
3137 3137
3138 3138 :``%%``: literal "%" character
3139 3139 :``%H``: changeset hash (40 hexadecimal digits)
3140 3140 :``%N``: number of patches being generated
3141 3141 :``%R``: changeset revision number
3142 3142 :``%b``: basename of the exporting repository
3143 3143 :``%h``: short-form changeset hash (12 hexadecimal digits)
3144 3144 :``%m``: first line of the commit message (only alphanumeric characters)
3145 3145 :``%n``: zero-padded sequence number, starting at 1
3146 3146 :``%r``: zero-padded changeset revision number
3147 3147
3148 3148 Without the -a/--text option, export will avoid generating diffs
3149 3149 of files it detects as binary. With -a, export will generate a
3150 3150 diff anyway, probably with undesirable results.
3151 3151
3152 3152 Use the -g/--git option to generate diffs in the git extended diff
3153 3153 format. See :hg:`help diffs` for more information.
3154 3154
3155 3155 With the --switch-parent option, the diff will be against the
3156 3156 second parent. It can be useful to review a merge.
3157 3157
3158 3158 .. container:: verbose
3159 3159
3160 3160 Examples:
3161 3161
3162 3162 - use export and import to transplant a bugfix to the current
3163 3163 branch::
3164 3164
3165 3165 hg export -r 9353 | hg import -
3166 3166
3167 3167 - export all the changesets between two revisions to a file with
3168 3168 rename information::
3169 3169
3170 3170 hg export --git -r 123:150 > changes.txt
3171 3171
3172 3172 - split outgoing changes into a series of patches with
3173 3173 descriptive names::
3174 3174
3175 3175 hg export -r "outgoing()" -o "%n-%m.patch"
3176 3176
3177 3177 Returns 0 on success.
3178 3178 """
3179 3179 changesets += tuple(opts.get('rev', []))
3180 3180 if not changesets:
3181 3181 changesets = ['.']
3182 3182 revs = scmutil.revrange(repo, changesets)
3183 3183 if not revs:
3184 3184 raise error.Abort(_("export requires at least one changeset"))
3185 3185 if len(revs) > 1:
3186 3186 ui.note(_('exporting patches:\n'))
3187 3187 else:
3188 3188 ui.note(_('exporting patch:\n'))
3189 3189 cmdutil.export(repo, revs, template=opts.get('output'),
3190 3190 switch_parent=opts.get('switch_parent'),
3191 3191 opts=patch.diffallopts(ui, opts))
3192 3192
3193 3193 @command('files',
3194 3194 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3195 3195 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3196 3196 ] + walkopts + formatteropts + subrepoopts,
3197 3197 _('[OPTION]... [FILE]...'))
3198 3198 def files(ui, repo, *pats, **opts):
3199 3199 """list tracked files
3200 3200
3201 3201 Print files under Mercurial control in the working directory or
3202 3202 specified revision for given files (excluding removed files).
3203 3203 Files can be specified as filenames or filesets.
3204 3204
3205 3205 If no files are given to match, this command prints the names
3206 3206 of all files under Mercurial control.
3207 3207
3208 3208 .. container:: verbose
3209 3209
3210 3210 Examples:
3211 3211
3212 3212 - list all files under the current directory::
3213 3213
3214 3214 hg files .
3215 3215
3216 3216 - shows sizes and flags for current revision::
3217 3217
3218 3218 hg files -vr .
3219 3219
3220 3220 - list all files named README::
3221 3221
3222 3222 hg files -I "**/README"
3223 3223
3224 3224 - list all binary files::
3225 3225
3226 3226 hg files "set:binary()"
3227 3227
3228 3228 - find files containing a regular expression::
3229 3229
3230 3230 hg files "set:grep('bob')"
3231 3231
3232 3232 - search tracked file contents with xargs and grep::
3233 3233
3234 3234 hg files -0 | xargs -0 grep foo
3235 3235
3236 3236 See :hg:`help patterns` and :hg:`help filesets` for more information
3237 3237 on specifying file patterns.
3238 3238
3239 3239 Returns 0 if a match is found, 1 otherwise.
3240 3240
3241 3241 """
3242 3242 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3243 3243
3244 3244 end = '\n'
3245 3245 if opts.get('print0'):
3246 3246 end = '\0'
3247 3247 fmt = '%s' + end
3248 3248
3249 3249 m = scmutil.match(ctx, pats, opts)
3250 3250 with ui.formatter('files', opts) as fm:
3251 3251 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3252 3252
3253 3253 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3254 3254 def forget(ui, repo, *pats, **opts):
3255 3255 """forget the specified files on the next commit
3256 3256
3257 3257 Mark the specified files so they will no longer be tracked
3258 3258 after the next commit.
3259 3259
3260 3260 This only removes files from the current branch, not from the
3261 3261 entire project history, and it does not delete them from the
3262 3262 working directory.
3263 3263
3264 3264 To delete the file from the working directory, see :hg:`remove`.
3265 3265
3266 3266 To undo a forget before the next commit, see :hg:`add`.
3267 3267
3268 3268 .. container:: verbose
3269 3269
3270 3270 Examples:
3271 3271
3272 3272 - forget newly-added binary files::
3273 3273
3274 3274 hg forget "set:added() and binary()"
3275 3275
3276 3276 - forget files that would be excluded by .hgignore::
3277 3277
3278 3278 hg forget "set:hgignore()"
3279 3279
3280 3280 Returns 0 on success.
3281 3281 """
3282 3282
3283 3283 if not pats:
3284 3284 raise error.Abort(_('no files specified'))
3285 3285
3286 3286 m = scmutil.match(repo[None], pats, opts)
3287 3287 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3288 3288 return rejected and 1 or 0
3289 3289
3290 3290 @command(
3291 3291 'graft',
3292 3292 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3293 3293 ('c', 'continue', False, _('resume interrupted graft')),
3294 3294 ('e', 'edit', False, _('invoke editor on commit messages')),
3295 3295 ('', 'log', None, _('append graft info to log message')),
3296 3296 ('f', 'force', False, _('force graft')),
3297 3297 ('D', 'currentdate', False,
3298 3298 _('record the current date as commit date')),
3299 3299 ('U', 'currentuser', False,
3300 3300 _('record the current user as committer'), _('DATE'))]
3301 3301 + commitopts2 + mergetoolopts + dryrunopts,
3302 3302 _('[OPTION]... [-r REV]... REV...'))
3303 3303 def graft(ui, repo, *revs, **opts):
3304 3304 '''copy changes from other branches onto the current branch
3305 3305
3306 3306 This command uses Mercurial's merge logic to copy individual
3307 3307 changes from other branches without merging branches in the
3308 3308 history graph. This is sometimes known as 'backporting' or
3309 3309 'cherry-picking'. By default, graft will copy user, date, and
3310 3310 description from the source changesets.
3311 3311
3312 3312 Changesets that are ancestors of the current revision, that have
3313 3313 already been grafted, or that are merges will be skipped.
3314 3314
3315 3315 If --log is specified, log messages will have a comment appended
3316 3316 of the form::
3317 3317
3318 3318 (grafted from CHANGESETHASH)
3319 3319
3320 3320 If --force is specified, revisions will be grafted even if they
3321 3321 are already ancestors of or have been grafted to the destination.
3322 3322 This is useful when the revisions have since been backed out.
3323 3323
3324 3324 If a graft merge results in conflicts, the graft process is
3325 3325 interrupted so that the current merge can be manually resolved.
3326 3326 Once all conflicts are addressed, the graft process can be
3327 3327 continued with the -c/--continue option.
3328 3328
3329 3329 .. note::
3330 3330
3331 3331 The -c/--continue option does not reapply earlier options, except
3332 3332 for --force.
3333 3333
3334 3334 .. container:: verbose
3335 3335
3336 3336 Examples:
3337 3337
3338 3338 - copy a single change to the stable branch and edit its description::
3339 3339
3340 3340 hg update stable
3341 3341 hg graft --edit 9393
3342 3342
3343 3343 - graft a range of changesets with one exception, updating dates::
3344 3344
3345 3345 hg graft -D "2085::2093 and not 2091"
3346 3346
3347 3347 - continue a graft after resolving conflicts::
3348 3348
3349 3349 hg graft -c
3350 3350
3351 3351 - show the source of a grafted changeset::
3352 3352
3353 3353 hg log --debug -r .
3354 3354
3355 3355 - show revisions sorted by date::
3356 3356
3357 3357 hg log -r "sort(all(), date)"
3358 3358
3359 3359 See :hg:`help revisions` and :hg:`help revsets` for more about
3360 3360 specifying revisions.
3361 3361
3362 3362 Returns 0 on successful completion.
3363 3363 '''
3364 3364 with repo.wlock():
3365 3365 return _dograft(ui, repo, *revs, **opts)
3366 3366
3367 3367 def _dograft(ui, repo, *revs, **opts):
3368 3368 if revs and opts.get('rev'):
3369 3369 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
3370 3370 'revision ordering!\n'))
3371 3371
3372 3372 revs = list(revs)
3373 3373 revs.extend(opts.get('rev'))
3374 3374
3375 3375 if not opts.get('user') and opts.get('currentuser'):
3376 3376 opts['user'] = ui.username()
3377 3377 if not opts.get('date') and opts.get('currentdate'):
3378 3378 opts['date'] = "%d %d" % util.makedate()
3379 3379
3380 3380 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3381 3381
3382 3382 cont = False
3383 3383 if opts.get('continue'):
3384 3384 cont = True
3385 3385 if revs:
3386 3386 raise error.Abort(_("can't specify --continue and revisions"))
3387 3387 # read in unfinished revisions
3388 3388 try:
3389 3389 nodes = repo.vfs.read('graftstate').splitlines()
3390 3390 revs = [repo[node].rev() for node in nodes]
3391 3391 except IOError as inst:
3392 3392 if inst.errno != errno.ENOENT:
3393 3393 raise
3394 3394 cmdutil.wrongtooltocontinue(repo, _('graft'))
3395 3395 else:
3396 3396 cmdutil.checkunfinished(repo)
3397 3397 cmdutil.bailifchanged(repo)
3398 3398 if not revs:
3399 3399 raise error.Abort(_('no revisions specified'))
3400 3400 revs = scmutil.revrange(repo, revs)
3401 3401
3402 3402 skipped = set()
3403 3403 # check for merges
3404 3404 for rev in repo.revs('%ld and merge()', revs):
3405 3405 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3406 3406 skipped.add(rev)
3407 3407 revs = [r for r in revs if r not in skipped]
3408 3408 if not revs:
3409 3409 return -1
3410 3410
3411 3411 # Don't check in the --continue case, in effect retaining --force across
3412 3412 # --continues. That's because without --force, any revisions we decided to
3413 3413 # skip would have been filtered out here, so they wouldn't have made their
3414 3414 # way to the graftstate. With --force, any revisions we would have otherwise
3415 3415 # skipped would not have been filtered out, and if they hadn't been applied
3416 3416 # already, they'd have been in the graftstate.
3417 3417 if not (cont or opts.get('force')):
3418 3418 # check for ancestors of dest branch
3419 3419 crev = repo['.'].rev()
3420 3420 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3421 3421 # XXX make this lazy in the future
3422 3422 # don't mutate while iterating, create a copy
3423 3423 for rev in list(revs):
3424 3424 if rev in ancestors:
3425 3425 ui.warn(_('skipping ancestor revision %d:%s\n') %
3426 3426 (rev, repo[rev]))
3427 3427 # XXX remove on list is slow
3428 3428 revs.remove(rev)
3429 3429 if not revs:
3430 3430 return -1
3431 3431
3432 3432 # analyze revs for earlier grafts
3433 3433 ids = {}
3434 3434 for ctx in repo.set("%ld", revs):
3435 3435 ids[ctx.hex()] = ctx.rev()
3436 3436 n = ctx.extra().get('source')
3437 3437 if n:
3438 3438 ids[n] = ctx.rev()
3439 3439
3440 3440 # check ancestors for earlier grafts
3441 3441 ui.debug('scanning for duplicate grafts\n')
3442 3442
3443 3443 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3444 3444 ctx = repo[rev]
3445 3445 n = ctx.extra().get('source')
3446 3446 if n in ids:
3447 3447 try:
3448 3448 r = repo[n].rev()
3449 3449 except error.RepoLookupError:
3450 3450 r = None
3451 3451 if r in revs:
3452 3452 ui.warn(_('skipping revision %d:%s '
3453 3453 '(already grafted to %d:%s)\n')
3454 3454 % (r, repo[r], rev, ctx))
3455 3455 revs.remove(r)
3456 3456 elif ids[n] in revs:
3457 3457 if r is None:
3458 3458 ui.warn(_('skipping already grafted revision %d:%s '
3459 3459 '(%d:%s also has unknown origin %s)\n')
3460 3460 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3461 3461 else:
3462 3462 ui.warn(_('skipping already grafted revision %d:%s '
3463 3463 '(%d:%s also has origin %d:%s)\n')
3464 3464 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3465 3465 revs.remove(ids[n])
3466 3466 elif ctx.hex() in ids:
3467 3467 r = ids[ctx.hex()]
3468 3468 ui.warn(_('skipping already grafted revision %d:%s '
3469 3469 '(was grafted from %d:%s)\n') %
3470 3470 (r, repo[r], rev, ctx))
3471 3471 revs.remove(r)
3472 3472 if not revs:
3473 3473 return -1
3474 3474
3475 3475 for pos, ctx in enumerate(repo.set("%ld", revs)):
3476 3476 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3477 3477 ctx.description().split('\n', 1)[0])
3478 3478 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3479 3479 if names:
3480 3480 desc += ' (%s)' % ' '.join(names)
3481 3481 ui.status(_('grafting %s\n') % desc)
3482 3482 if opts.get('dry_run'):
3483 3483 continue
3484 3484
3485 3485 source = ctx.extra().get('source')
3486 3486 extra = {}
3487 3487 if source:
3488 3488 extra['source'] = source
3489 3489 extra['intermediate-source'] = ctx.hex()
3490 3490 else:
3491 3491 extra['source'] = ctx.hex()
3492 3492 user = ctx.user()
3493 3493 if opts.get('user'):
3494 3494 user = opts['user']
3495 3495 date = ctx.date()
3496 3496 if opts.get('date'):
3497 3497 date = opts['date']
3498 3498 message = ctx.description()
3499 3499 if opts.get('log'):
3500 3500 message += '\n(grafted from %s)' % ctx.hex()
3501 3501
3502 3502 # we don't merge the first commit when continuing
3503 3503 if not cont:
3504 3504 # perform the graft merge with p1(rev) as 'ancestor'
3505 3505 try:
3506 3506 # ui.forcemerge is an internal variable, do not document
3507 3507 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3508 3508 'graft')
3509 3509 stats = mergemod.graft(repo, ctx, ctx.p1(),
3510 3510 ['local', 'graft'])
3511 3511 finally:
3512 3512 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3513 3513 # report any conflicts
3514 3514 if stats and stats[3] > 0:
3515 3515 # write out state for --continue
3516 3516 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3517 3517 repo.vfs.write('graftstate', ''.join(nodelines))
3518 3518 extra = ''
3519 3519 if opts.get('user'):
3520 3520 extra += ' --user %s' % util.shellquote(opts['user'])
3521 3521 if opts.get('date'):
3522 3522 extra += ' --date %s' % util.shellquote(opts['date'])
3523 3523 if opts.get('log'):
3524 3524 extra += ' --log'
3525 3525 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
3526 3526 raise error.Abort(
3527 3527 _("unresolved conflicts, can't continue"),
3528 3528 hint=hint)
3529 3529 else:
3530 3530 cont = False
3531 3531
3532 3532 # commit
3533 3533 node = repo.commit(text=message, user=user,
3534 3534 date=date, extra=extra, editor=editor)
3535 3535 if node is None:
3536 3536 ui.warn(
3537 3537 _('note: graft of %d:%s created no changes to commit\n') %
3538 3538 (ctx.rev(), ctx))
3539 3539
3540 3540 # remove state when we complete successfully
3541 3541 if not opts.get('dry_run'):
3542 3542 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3543 3543
3544 3544 return 0
3545 3545
3546 3546 @command('grep',
3547 3547 [('0', 'print0', None, _('end fields with NUL')),
3548 3548 ('', 'all', None, _('print all revisions that match')),
3549 3549 ('a', 'text', None, _('treat all files as text')),
3550 3550 ('f', 'follow', None,
3551 3551 _('follow changeset history,'
3552 3552 ' or file history across copies and renames')),
3553 3553 ('i', 'ignore-case', None, _('ignore case when matching')),
3554 3554 ('l', 'files-with-matches', None,
3555 3555 _('print only filenames and revisions that match')),
3556 3556 ('n', 'line-number', None, _('print matching line numbers')),
3557 3557 ('r', 'rev', [],
3558 3558 _('only search files changed within revision range'), _('REV')),
3559 3559 ('u', 'user', None, _('list the author (long with -v)')),
3560 3560 ('d', 'date', None, _('list the date (short with -q)')),
3561 3561 ] + formatteropts + walkopts,
3562 3562 _('[OPTION]... PATTERN [FILE]...'),
3563 3563 inferrepo=True)
3564 3564 def grep(ui, repo, pattern, *pats, **opts):
3565 3565 """search revision history for a pattern in specified files
3566 3566
3567 3567 Search revision history for a regular expression in the specified
3568 3568 files or the entire project.
3569 3569
3570 3570 By default, grep prints the most recent revision number for each
3571 3571 file in which it finds a match. To get it to print every revision
3572 3572 that contains a change in match status ("-" for a match that becomes
3573 3573 a non-match, or "+" for a non-match that becomes a match), use the
3574 3574 --all flag.
3575 3575
3576 3576 PATTERN can be any Python (roughly Perl-compatible) regular
3577 3577 expression.
3578 3578
3579 3579 If no FILEs are specified (and -f/--follow isn't set), all files in
3580 3580 the repository are searched, including those that don't exist in the
3581 3581 current branch or have been deleted in a prior changeset.
3582 3582
3583 3583 Returns 0 if a match is found, 1 otherwise.
3584 3584 """
3585 3585 reflags = re.M
3586 3586 if opts.get('ignore_case'):
3587 3587 reflags |= re.I
3588 3588 try:
3589 3589 regexp = util.re.compile(pattern, reflags)
3590 3590 except re.error as inst:
3591 3591 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3592 3592 return 1
3593 3593 sep, eol = ':', '\n'
3594 3594 if opts.get('print0'):
3595 3595 sep = eol = '\0'
3596 3596
3597 3597 getfile = util.lrucachefunc(repo.file)
3598 3598
3599 3599 def matchlines(body):
3600 3600 begin = 0
3601 3601 linenum = 0
3602 3602 while begin < len(body):
3603 3603 match = regexp.search(body, begin)
3604 3604 if not match:
3605 3605 break
3606 3606 mstart, mend = match.span()
3607 3607 linenum += body.count('\n', begin, mstart) + 1
3608 3608 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3609 3609 begin = body.find('\n', mend) + 1 or len(body) + 1
3610 3610 lend = begin - 1
3611 3611 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3612 3612
3613 3613 class linestate(object):
3614 3614 def __init__(self, line, linenum, colstart, colend):
3615 3615 self.line = line
3616 3616 self.linenum = linenum
3617 3617 self.colstart = colstart
3618 3618 self.colend = colend
3619 3619
3620 3620 def __hash__(self):
3621 3621 return hash((self.linenum, self.line))
3622 3622
3623 3623 def __eq__(self, other):
3624 3624 return self.line == other.line
3625 3625
3626 3626 def findpos(self):
3627 3627 """Iterate all (start, end) indices of matches"""
3628 3628 yield self.colstart, self.colend
3629 3629 p = self.colend
3630 3630 while p < len(self.line):
3631 3631 m = regexp.search(self.line, p)
3632 3632 if not m:
3633 3633 break
3634 3634 yield m.span()
3635 3635 p = m.end()
3636 3636
3637 3637 matches = {}
3638 3638 copies = {}
3639 3639 def grepbody(fn, rev, body):
3640 3640 matches[rev].setdefault(fn, [])
3641 3641 m = matches[rev][fn]
3642 3642 for lnum, cstart, cend, line in matchlines(body):
3643 3643 s = linestate(line, lnum, cstart, cend)
3644 3644 m.append(s)
3645 3645
3646 3646 def difflinestates(a, b):
3647 3647 sm = difflib.SequenceMatcher(None, a, b)
3648 3648 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3649 3649 if tag == 'insert':
3650 3650 for i in xrange(blo, bhi):
3651 3651 yield ('+', b[i])
3652 3652 elif tag == 'delete':
3653 3653 for i in xrange(alo, ahi):
3654 3654 yield ('-', a[i])
3655 3655 elif tag == 'replace':
3656 3656 for i in xrange(alo, ahi):
3657 3657 yield ('-', a[i])
3658 3658 for i in xrange(blo, bhi):
3659 3659 yield ('+', b[i])
3660 3660
3661 3661 def display(fm, fn, ctx, pstates, states):
3662 3662 rev = ctx.rev()
3663 3663 if fm.isplain():
3664 3664 formatuser = ui.shortuser
3665 3665 else:
3666 3666 formatuser = str
3667 3667 if ui.quiet:
3668 3668 datefmt = '%Y-%m-%d'
3669 3669 else:
3670 3670 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
3671 3671 found = False
3672 3672 @util.cachefunc
3673 3673 def binary():
3674 3674 flog = getfile(fn)
3675 3675 return util.binary(flog.read(ctx.filenode(fn)))
3676 3676
3677 3677 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
3678 3678 if opts.get('all'):
3679 3679 iter = difflinestates(pstates, states)
3680 3680 else:
3681 3681 iter = [('', l) for l in states]
3682 3682 for change, l in iter:
3683 3683 fm.startitem()
3684 3684 fm.data(node=fm.hexfunc(ctx.node()))
3685 3685 cols = [
3686 3686 ('filename', fn, True),
3687 3687 ('rev', rev, True),
3688 3688 ('linenumber', l.linenum, opts.get('line_number')),
3689 3689 ]
3690 3690 if opts.get('all'):
3691 3691 cols.append(('change', change, True))
3692 3692 cols.extend([
3693 3693 ('user', formatuser(ctx.user()), opts.get('user')),
3694 3694 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
3695 3695 ])
3696 3696 lastcol = next(name for name, data, cond in reversed(cols) if cond)
3697 3697 for name, data, cond in cols:
3698 3698 field = fieldnamemap.get(name, name)
3699 3699 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
3700 3700 if cond and name != lastcol:
3701 3701 fm.plain(sep, label='grep.sep')
3702 3702 if not opts.get('files_with_matches'):
3703 3703 fm.plain(sep, label='grep.sep')
3704 3704 if not opts.get('text') and binary():
3705 3705 fm.plain(_(" Binary file matches"))
3706 3706 else:
3707 3707 displaymatches(fm.nested('texts'), l)
3708 3708 fm.plain(eol)
3709 3709 found = True
3710 3710 if opts.get('files_with_matches'):
3711 3711 break
3712 3712 return found
3713 3713
3714 3714 def displaymatches(fm, l):
3715 3715 p = 0
3716 3716 for s, e in l.findpos():
3717 3717 if p < s:
3718 3718 fm.startitem()
3719 3719 fm.write('text', '%s', l.line[p:s])
3720 3720 fm.data(matched=False)
3721 3721 fm.startitem()
3722 3722 fm.write('text', '%s', l.line[s:e], label='grep.match')
3723 3723 fm.data(matched=True)
3724 3724 p = e
3725 3725 if p < len(l.line):
3726 3726 fm.startitem()
3727 3727 fm.write('text', '%s', l.line[p:])
3728 3728 fm.data(matched=False)
3729 3729 fm.end()
3730 3730
3731 3731 skip = {}
3732 3732 revfiles = {}
3733 3733 matchfn = scmutil.match(repo[None], pats, opts)
3734 3734 found = False
3735 3735 follow = opts.get('follow')
3736 3736
3737 3737 def prep(ctx, fns):
3738 3738 rev = ctx.rev()
3739 3739 pctx = ctx.p1()
3740 3740 parent = pctx.rev()
3741 3741 matches.setdefault(rev, {})
3742 3742 matches.setdefault(parent, {})
3743 3743 files = revfiles.setdefault(rev, [])
3744 3744 for fn in fns:
3745 3745 flog = getfile(fn)
3746 3746 try:
3747 3747 fnode = ctx.filenode(fn)
3748 3748 except error.LookupError:
3749 3749 continue
3750 3750
3751 3751 copied = flog.renamed(fnode)
3752 3752 copy = follow and copied and copied[0]
3753 3753 if copy:
3754 3754 copies.setdefault(rev, {})[fn] = copy
3755 3755 if fn in skip:
3756 3756 if copy:
3757 3757 skip[copy] = True
3758 3758 continue
3759 3759 files.append(fn)
3760 3760
3761 3761 if fn not in matches[rev]:
3762 3762 grepbody(fn, rev, flog.read(fnode))
3763 3763
3764 3764 pfn = copy or fn
3765 3765 if pfn not in matches[parent]:
3766 3766 try:
3767 3767 fnode = pctx.filenode(pfn)
3768 3768 grepbody(pfn, parent, flog.read(fnode))
3769 3769 except error.LookupError:
3770 3770 pass
3771 3771
3772 3772 fm = ui.formatter('grep', opts)
3773 3773 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3774 3774 rev = ctx.rev()
3775 3775 parent = ctx.p1().rev()
3776 3776 for fn in sorted(revfiles.get(rev, [])):
3777 3777 states = matches[rev][fn]
3778 3778 copy = copies.get(rev, {}).get(fn)
3779 3779 if fn in skip:
3780 3780 if copy:
3781 3781 skip[copy] = True
3782 3782 continue
3783 3783 pstates = matches.get(parent, {}).get(copy or fn, [])
3784 3784 if pstates or states:
3785 3785 r = display(fm, fn, ctx, pstates, states)
3786 3786 found = found or r
3787 3787 if r and not opts.get('all'):
3788 3788 skip[fn] = True
3789 3789 if copy:
3790 3790 skip[copy] = True
3791 3791 del matches[rev]
3792 3792 del revfiles[rev]
3793 3793 fm.end()
3794 3794
3795 3795 return not found
3796 3796
3797 3797 @command('heads',
3798 3798 [('r', 'rev', '',
3799 3799 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3800 3800 ('t', 'topo', False, _('show topological heads only')),
3801 3801 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3802 3802 ('c', 'closed', False, _('show normal and closed branch heads')),
3803 3803 ] + templateopts,
3804 3804 _('[-ct] [-r STARTREV] [REV]...'))
3805 3805 def heads(ui, repo, *branchrevs, **opts):
3806 3806 """show branch heads
3807 3807
3808 3808 With no arguments, show all open branch heads in the repository.
3809 3809 Branch heads are changesets that have no descendants on the
3810 3810 same branch. They are where development generally takes place and
3811 3811 are the usual targets for update and merge operations.
3812 3812
3813 3813 If one or more REVs are given, only open branch heads on the
3814 3814 branches associated with the specified changesets are shown. This
3815 3815 means that you can use :hg:`heads .` to see the heads on the
3816 3816 currently checked-out branch.
3817 3817
3818 3818 If -c/--closed is specified, also show branch heads marked closed
3819 3819 (see :hg:`commit --close-branch`).
3820 3820
3821 3821 If STARTREV is specified, only those heads that are descendants of
3822 3822 STARTREV will be displayed.
3823 3823
3824 3824 If -t/--topo is specified, named branch mechanics will be ignored and only
3825 3825 topological heads (changesets with no children) will be shown.
3826 3826
3827 3827 Returns 0 if matching heads are found, 1 if not.
3828 3828 """
3829 3829
3830 3830 start = None
3831 3831 if 'rev' in opts:
3832 3832 start = scmutil.revsingle(repo, opts['rev'], None).node()
3833 3833
3834 3834 if opts.get('topo'):
3835 3835 heads = [repo[h] for h in repo.heads(start)]
3836 3836 else:
3837 3837 heads = []
3838 3838 for branch in repo.branchmap():
3839 3839 heads += repo.branchheads(branch, start, opts.get('closed'))
3840 3840 heads = [repo[h] for h in heads]
3841 3841
3842 3842 if branchrevs:
3843 3843 branches = set(repo[br].branch() for br in branchrevs)
3844 3844 heads = [h for h in heads if h.branch() in branches]
3845 3845
3846 3846 if opts.get('active') and branchrevs:
3847 3847 dagheads = repo.heads(start)
3848 3848 heads = [h for h in heads if h.node() in dagheads]
3849 3849
3850 3850 if branchrevs:
3851 3851 haveheads = set(h.branch() for h in heads)
3852 3852 if branches - haveheads:
3853 3853 headless = ', '.join(b for b in branches - haveheads)
3854 3854 msg = _('no open branch heads found on branches %s')
3855 3855 if opts.get('rev'):
3856 3856 msg += _(' (started at %s)') % opts['rev']
3857 3857 ui.warn((msg + '\n') % headless)
3858 3858
3859 3859 if not heads:
3860 3860 return 1
3861 3861
3862 3862 heads = sorted(heads, key=lambda x: -x.rev())
3863 3863 displayer = cmdutil.show_changeset(ui, repo, opts)
3864 3864 for ctx in heads:
3865 3865 displayer.show(ctx)
3866 3866 displayer.close()
3867 3867
3868 3868 @command('help',
3869 3869 [('e', 'extension', None, _('show only help for extensions')),
3870 3870 ('c', 'command', None, _('show only help for commands')),
3871 3871 ('k', 'keyword', None, _('show topics matching keyword')),
3872 3872 ('s', 'system', [], _('show help for specific platform(s)')),
3873 3873 ],
3874 3874 _('[-ecks] [TOPIC]'),
3875 3875 norepo=True)
3876 3876 def help_(ui, name=None, **opts):
3877 3877 """show help for a given topic or a help overview
3878 3878
3879 3879 With no arguments, print a list of commands with short help messages.
3880 3880
3881 3881 Given a topic, extension, or command name, print help for that
3882 3882 topic.
3883 3883
3884 3884 Returns 0 if successful.
3885 3885 """
3886 3886
3887 3887 textwidth = ui.configint('ui', 'textwidth', 78)
3888 3888 termwidth = ui.termwidth() - 2
3889 3889 if textwidth <= 0 or termwidth < textwidth:
3890 3890 textwidth = termwidth
3891 3891
3892 3892 keep = opts.get('system') or []
3893 3893 if len(keep) == 0:
3894 3894 if sys.platform.startswith('win'):
3895 3895 keep.append('windows')
3896 3896 elif sys.platform == 'OpenVMS':
3897 3897 keep.append('vms')
3898 3898 elif sys.platform == 'plan9':
3899 3899 keep.append('plan9')
3900 3900 else:
3901 3901 keep.append('unix')
3902 3902 keep.append(sys.platform.lower())
3903 3903 if ui.verbose:
3904 3904 keep.append('verbose')
3905 3905
3906 3906 section = None
3907 3907 subtopic = None
3908 3908 if name and '.' in name:
3909 3909 name, remaining = name.split('.', 1)
3910 3910 remaining = encoding.lower(remaining)
3911 3911 if '.' in remaining:
3912 3912 subtopic, section = remaining.split('.', 1)
3913 3913 else:
3914 3914 if name in help.subtopics:
3915 3915 subtopic = remaining
3916 3916 else:
3917 3917 section = remaining
3918 3918
3919 3919 text = help.help_(ui, name, subtopic=subtopic, **opts)
3920 3920
3921 3921 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3922 3922 section=section)
3923 3923
3924 3924 # We could have been given a weird ".foo" section without a name
3925 3925 # to look for, or we could have simply failed to found "foo.bar"
3926 3926 # because bar isn't a section of foo
3927 3927 if section and not (formatted and name):
3928 3928 raise error.Abort(_("help section not found"))
3929 3929
3930 3930 if 'verbose' in pruned:
3931 3931 keep.append('omitted')
3932 3932 else:
3933 3933 keep.append('notomitted')
3934 3934 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3935 3935 section=section)
3936 3936 ui.write(formatted)
3937 3937
3938 3938
3939 3939 @command('identify|id',
3940 3940 [('r', 'rev', '',
3941 3941 _('identify the specified revision'), _('REV')),
3942 3942 ('n', 'num', None, _('show local revision number')),
3943 3943 ('i', 'id', None, _('show global revision id')),
3944 3944 ('b', 'branch', None, _('show branch')),
3945 3945 ('t', 'tags', None, _('show tags')),
3946 3946 ('B', 'bookmarks', None, _('show bookmarks')),
3947 3947 ] + remoteopts,
3948 3948 _('[-nibtB] [-r REV] [SOURCE]'),
3949 3949 optionalrepo=True)
3950 3950 def identify(ui, repo, source=None, rev=None,
3951 3951 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3952 3952 """identify the working directory or specified revision
3953 3953
3954 3954 Print a summary identifying the repository state at REV using one or
3955 3955 two parent hash identifiers, followed by a "+" if the working
3956 3956 directory has uncommitted changes, the branch name (if not default),
3957 3957 a list of tags, and a list of bookmarks.
3958 3958
3959 3959 When REV is not given, print a summary of the current state of the
3960 3960 repository.
3961 3961
3962 3962 Specifying a path to a repository root or Mercurial bundle will
3963 3963 cause lookup to operate on that repository/bundle.
3964 3964
3965 3965 .. container:: verbose
3966 3966
3967 3967 Examples:
3968 3968
3969 3969 - generate a build identifier for the working directory::
3970 3970
3971 3971 hg id --id > build-id.dat
3972 3972
3973 3973 - find the revision corresponding to a tag::
3974 3974
3975 3975 hg id -n -r 1.3
3976 3976
3977 3977 - check the most recent revision of a remote repository::
3978 3978
3979 3979 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3980 3980
3981 3981 See :hg:`log` for generating more information about specific revisions,
3982 3982 including full hash identifiers.
3983 3983
3984 3984 Returns 0 if successful.
3985 3985 """
3986 3986
3987 3987 if not repo and not source:
3988 3988 raise error.Abort(_("there is no Mercurial repository here "
3989 3989 "(.hg not found)"))
3990 3990
3991 3991 if ui.debugflag:
3992 3992 hexfunc = hex
3993 3993 else:
3994 3994 hexfunc = short
3995 3995 default = not (num or id or branch or tags or bookmarks)
3996 3996 output = []
3997 3997 revs = []
3998 3998
3999 3999 if source:
4000 4000 source, branches = hg.parseurl(ui.expandpath(source))
4001 4001 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4002 4002 repo = peer.local()
4003 4003 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4004 4004
4005 4005 if not repo:
4006 4006 if num or branch or tags:
4007 4007 raise error.Abort(
4008 4008 _("can't query remote revision number, branch, or tags"))
4009 4009 if not rev and revs:
4010 4010 rev = revs[0]
4011 4011 if not rev:
4012 4012 rev = "tip"
4013 4013
4014 4014 remoterev = peer.lookup(rev)
4015 4015 if default or id:
4016 4016 output = [hexfunc(remoterev)]
4017 4017
4018 4018 def getbms():
4019 4019 bms = []
4020 4020
4021 4021 if 'bookmarks' in peer.listkeys('namespaces'):
4022 4022 hexremoterev = hex(remoterev)
4023 4023 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4024 4024 if bmr == hexremoterev]
4025 4025
4026 4026 return sorted(bms)
4027 4027
4028 4028 if bookmarks:
4029 4029 output.extend(getbms())
4030 4030 elif default and not ui.quiet:
4031 4031 # multiple bookmarks for a single parent separated by '/'
4032 4032 bm = '/'.join(getbms())
4033 4033 if bm:
4034 4034 output.append(bm)
4035 4035 else:
4036 4036 ctx = scmutil.revsingle(repo, rev, None)
4037 4037
4038 4038 if ctx.rev() is None:
4039 4039 ctx = repo[None]
4040 4040 parents = ctx.parents()
4041 4041 taglist = []
4042 4042 for p in parents:
4043 4043 taglist.extend(p.tags())
4044 4044
4045 4045 changed = ""
4046 4046 if default or id or num:
4047 4047 if (any(repo.status())
4048 4048 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4049 4049 changed = '+'
4050 4050 if default or id:
4051 4051 output = ["%s%s" %
4052 4052 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4053 4053 if num:
4054 4054 output.append("%s%s" %
4055 4055 ('+'.join([str(p.rev()) for p in parents]), changed))
4056 4056 else:
4057 4057 if default or id:
4058 4058 output = [hexfunc(ctx.node())]
4059 4059 if num:
4060 4060 output.append(str(ctx.rev()))
4061 4061 taglist = ctx.tags()
4062 4062
4063 4063 if default and not ui.quiet:
4064 4064 b = ctx.branch()
4065 4065 if b != 'default':
4066 4066 output.append("(%s)" % b)
4067 4067
4068 4068 # multiple tags for a single parent separated by '/'
4069 4069 t = '/'.join(taglist)
4070 4070 if t:
4071 4071 output.append(t)
4072 4072
4073 4073 # multiple bookmarks for a single parent separated by '/'
4074 4074 bm = '/'.join(ctx.bookmarks())
4075 4075 if bm:
4076 4076 output.append(bm)
4077 4077 else:
4078 4078 if branch:
4079 4079 output.append(ctx.branch())
4080 4080
4081 4081 if tags:
4082 4082 output.extend(taglist)
4083 4083
4084 4084 if bookmarks:
4085 4085 output.extend(ctx.bookmarks())
4086 4086
4087 4087 ui.write("%s\n" % ' '.join(output))
4088 4088
4089 4089 @command('import|patch',
4090 4090 [('p', 'strip', 1,
4091 4091 _('directory strip option for patch. This has the same '
4092 4092 'meaning as the corresponding patch option'), _('NUM')),
4093 4093 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4094 4094 ('e', 'edit', False, _('invoke editor on commit messages')),
4095 4095 ('f', 'force', None,
4096 4096 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4097 4097 ('', 'no-commit', None,
4098 4098 _("don't commit, just update the working directory")),
4099 4099 ('', 'bypass', None,
4100 4100 _("apply patch without touching the working directory")),
4101 4101 ('', 'partial', None,
4102 4102 _('commit even if some hunks fail')),
4103 4103 ('', 'exact', None,
4104 4104 _('abort if patch would apply lossily')),
4105 4105 ('', 'prefix', '',
4106 4106 _('apply patch to subdirectory'), _('DIR')),
4107 4107 ('', 'import-branch', None,
4108 4108 _('use any branch information in patch (implied by --exact)'))] +
4109 4109 commitopts + commitopts2 + similarityopts,
4110 4110 _('[OPTION]... PATCH...'))
4111 4111 def import_(ui, repo, patch1=None, *patches, **opts):
4112 4112 """import an ordered set of patches
4113 4113
4114 4114 Import a list of patches and commit them individually (unless
4115 4115 --no-commit is specified).
4116 4116
4117 4117 To read a patch from standard input, use "-" as the patch name. If
4118 4118 a URL is specified, the patch will be downloaded from there.
4119 4119
4120 4120 Import first applies changes to the working directory (unless
4121 4121 --bypass is specified), import will abort if there are outstanding
4122 4122 changes.
4123 4123
4124 4124 Use --bypass to apply and commit patches directly to the
4125 4125 repository, without affecting the working directory. Without
4126 4126 --exact, patches will be applied on top of the working directory
4127 4127 parent revision.
4128 4128
4129 4129 You can import a patch straight from a mail message. Even patches
4130 4130 as attachments work (to use the body part, it must have type
4131 4131 text/plain or text/x-patch). From and Subject headers of email
4132 4132 message are used as default committer and commit message. All
4133 4133 text/plain body parts before first diff are added to the commit
4134 4134 message.
4135 4135
4136 4136 If the imported patch was generated by :hg:`export`, user and
4137 4137 description from patch override values from message headers and
4138 4138 body. Values given on command line with -m/--message and -u/--user
4139 4139 override these.
4140 4140
4141 4141 If --exact is specified, import will set the working directory to
4142 4142 the parent of each patch before applying it, and will abort if the
4143 4143 resulting changeset has a different ID than the one recorded in
4144 4144 the patch. This will guard against various ways that portable
4145 4145 patch formats and mail systems might fail to transfer Mercurial
4146 4146 data or metadata. See :hg:`bundle` for lossless transmission.
4147 4147
4148 4148 Use --partial to ensure a changeset will be created from the patch
4149 4149 even if some hunks fail to apply. Hunks that fail to apply will be
4150 4150 written to a <target-file>.rej file. Conflicts can then be resolved
4151 4151 by hand before :hg:`commit --amend` is run to update the created
4152 4152 changeset. This flag exists to let people import patches that
4153 4153 partially apply without losing the associated metadata (author,
4154 4154 date, description, ...).
4155 4155
4156 4156 .. note::
4157 4157
4158 4158 When no hunks apply cleanly, :hg:`import --partial` will create
4159 4159 an empty changeset, importing only the patch metadata.
4160 4160
4161 4161 With -s/--similarity, hg will attempt to discover renames and
4162 4162 copies in the patch in the same way as :hg:`addremove`.
4163 4163
4164 4164 It is possible to use external patch programs to perform the patch
4165 4165 by setting the ``ui.patch`` configuration option. For the default
4166 4166 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4167 4167 See :hg:`help config` for more information about configuration
4168 4168 files and how to use these options.
4169 4169
4170 4170 See :hg:`help dates` for a list of formats valid for -d/--date.
4171 4171
4172 4172 .. container:: verbose
4173 4173
4174 4174 Examples:
4175 4175
4176 4176 - import a traditional patch from a website and detect renames::
4177 4177
4178 4178 hg import -s 80 http://example.com/bugfix.patch
4179 4179
4180 4180 - import a changeset from an hgweb server::
4181 4181
4182 4182 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4183 4183
4184 4184 - import all the patches in an Unix-style mbox::
4185 4185
4186 4186 hg import incoming-patches.mbox
4187 4187
4188 4188 - attempt to exactly restore an exported changeset (not always
4189 4189 possible)::
4190 4190
4191 4191 hg import --exact proposed-fix.patch
4192 4192
4193 4193 - use an external tool to apply a patch which is too fuzzy for
4194 4194 the default internal tool.
4195 4195
4196 4196 hg import --config ui.patch="patch --merge" fuzzy.patch
4197 4197
4198 4198 - change the default fuzzing from 2 to a less strict 7
4199 4199
4200 4200 hg import --config ui.fuzz=7 fuzz.patch
4201 4201
4202 4202 Returns 0 on success, 1 on partial success (see --partial).
4203 4203 """
4204 4204
4205 4205 if not patch1:
4206 4206 raise error.Abort(_('need at least one patch to import'))
4207 4207
4208 4208 patches = (patch1,) + patches
4209 4209
4210 4210 date = opts.get('date')
4211 4211 if date:
4212 4212 opts['date'] = util.parsedate(date)
4213 4213
4214 4214 exact = opts.get('exact')
4215 4215 update = not opts.get('bypass')
4216 4216 if not update and opts.get('no_commit'):
4217 4217 raise error.Abort(_('cannot use --no-commit with --bypass'))
4218 4218 try:
4219 4219 sim = float(opts.get('similarity') or 0)
4220 4220 except ValueError:
4221 4221 raise error.Abort(_('similarity must be a number'))
4222 4222 if sim < 0 or sim > 100:
4223 4223 raise error.Abort(_('similarity must be between 0 and 100'))
4224 4224 if sim and not update:
4225 4225 raise error.Abort(_('cannot use --similarity with --bypass'))
4226 4226 if exact:
4227 4227 if opts.get('edit'):
4228 4228 raise error.Abort(_('cannot use --exact with --edit'))
4229 4229 if opts.get('prefix'):
4230 4230 raise error.Abort(_('cannot use --exact with --prefix'))
4231 4231
4232 4232 base = opts["base"]
4233 4233 wlock = dsguard = lock = tr = None
4234 4234 msgs = []
4235 4235 ret = 0
4236 4236
4237 4237
4238 4238 try:
4239 4239 wlock = repo.wlock()
4240 4240
4241 4241 if update:
4242 4242 cmdutil.checkunfinished(repo)
4243 4243 if (exact or not opts.get('force')):
4244 4244 cmdutil.bailifchanged(repo)
4245 4245
4246 4246 if not opts.get('no_commit'):
4247 4247 lock = repo.lock()
4248 4248 tr = repo.transaction('import')
4249 4249 else:
4250 4250 dsguard = dirstateguard.dirstateguard(repo, 'import')
4251 4251 parents = repo[None].parents()
4252 4252 for patchurl in patches:
4253 4253 if patchurl == '-':
4254 4254 ui.status(_('applying patch from stdin\n'))
4255 4255 patchfile = ui.fin
4256 4256 patchurl = 'stdin' # for error message
4257 4257 else:
4258 4258 patchurl = os.path.join(base, patchurl)
4259 4259 ui.status(_('applying %s\n') % patchurl)
4260 4260 patchfile = hg.openpath(ui, patchurl)
4261 4261
4262 4262 haspatch = False
4263 4263 for hunk in patch.split(patchfile):
4264 4264 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4265 4265 parents, opts,
4266 4266 msgs, hg.clean)
4267 4267 if msg:
4268 4268 haspatch = True
4269 4269 ui.note(msg + '\n')
4270 4270 if update or exact:
4271 4271 parents = repo[None].parents()
4272 4272 else:
4273 4273 parents = [repo[node]]
4274 4274 if rej:
4275 4275 ui.write_err(_("patch applied partially\n"))
4276 4276 ui.write_err(_("(fix the .rej files and run "
4277 4277 "`hg commit --amend`)\n"))
4278 4278 ret = 1
4279 4279 break
4280 4280
4281 4281 if not haspatch:
4282 4282 raise error.Abort(_('%s: no diffs found') % patchurl)
4283 4283
4284 4284 if tr:
4285 4285 tr.close()
4286 4286 if msgs:
4287 4287 repo.savecommitmessage('\n* * *\n'.join(msgs))
4288 4288 if dsguard:
4289 4289 dsguard.close()
4290 4290 return ret
4291 4291 finally:
4292 4292 if tr:
4293 4293 tr.release()
4294 4294 release(lock, dsguard, wlock)
4295 4295
4296 4296 @command('incoming|in',
4297 4297 [('f', 'force', None,
4298 4298 _('run even if remote repository is unrelated')),
4299 4299 ('n', 'newest-first', None, _('show newest record first')),
4300 4300 ('', 'bundle', '',
4301 4301 _('file to store the bundles into'), _('FILE')),
4302 4302 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4303 4303 ('B', 'bookmarks', False, _("compare bookmarks")),
4304 4304 ('b', 'branch', [],
4305 4305 _('a specific branch you would like to pull'), _('BRANCH')),
4306 4306 ] + logopts + remoteopts + subrepoopts,
4307 4307 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4308 4308 def incoming(ui, repo, source="default", **opts):
4309 4309 """show new changesets found in source
4310 4310
4311 4311 Show new changesets found in the specified path/URL or the default
4312 4312 pull location. These are the changesets that would have been pulled
4313 4313 if a pull at the time you issued this command.
4314 4314
4315 4315 See pull for valid source format details.
4316 4316
4317 4317 .. container:: verbose
4318 4318
4319 4319 With -B/--bookmarks, the result of bookmark comparison between
4320 4320 local and remote repositories is displayed. With -v/--verbose,
4321 4321 status is also displayed for each bookmark like below::
4322 4322
4323 4323 BM1 01234567890a added
4324 4324 BM2 1234567890ab advanced
4325 4325 BM3 234567890abc diverged
4326 4326 BM4 34567890abcd changed
4327 4327
4328 4328 The action taken locally when pulling depends on the
4329 4329 status of each bookmark:
4330 4330
4331 4331 :``added``: pull will create it
4332 4332 :``advanced``: pull will update it
4333 4333 :``diverged``: pull will create a divergent bookmark
4334 4334 :``changed``: result depends on remote changesets
4335 4335
4336 4336 From the point of view of pulling behavior, bookmark
4337 4337 existing only in the remote repository are treated as ``added``,
4338 4338 even if it is in fact locally deleted.
4339 4339
4340 4340 .. container:: verbose
4341 4341
4342 4342 For remote repository, using --bundle avoids downloading the
4343 4343 changesets twice if the incoming is followed by a pull.
4344 4344
4345 4345 Examples:
4346 4346
4347 4347 - show incoming changes with patches and full description::
4348 4348
4349 4349 hg incoming -vp
4350 4350
4351 4351 - show incoming changes excluding merges, store a bundle::
4352 4352
4353 4353 hg in -vpM --bundle incoming.hg
4354 4354 hg pull incoming.hg
4355 4355
4356 4356 - briefly list changes inside a bundle::
4357 4357
4358 4358 hg in changes.hg -T "{desc|firstline}\\n"
4359 4359
4360 4360 Returns 0 if there are incoming changes, 1 otherwise.
4361 4361 """
4362 4362 if opts.get('graph'):
4363 4363 cmdutil.checkunsupportedgraphflags([], opts)
4364 4364 def display(other, chlist, displayer):
4365 4365 revdag = cmdutil.graphrevs(other, chlist, opts)
4366 4366 cmdutil.displaygraph(ui, repo, revdag, displayer,
4367 4367 graphmod.asciiedges)
4368 4368
4369 4369 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4370 4370 return 0
4371 4371
4372 4372 if opts.get('bundle') and opts.get('subrepos'):
4373 4373 raise error.Abort(_('cannot combine --bundle and --subrepos'))
4374 4374
4375 4375 if opts.get('bookmarks'):
4376 4376 source, branches = hg.parseurl(ui.expandpath(source),
4377 4377 opts.get('branch'))
4378 4378 other = hg.peer(repo, opts, source)
4379 4379 if 'bookmarks' not in other.listkeys('namespaces'):
4380 4380 ui.warn(_("remote doesn't support bookmarks\n"))
4381 4381 return 0
4382 4382 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4383 4383 return bookmarks.incoming(ui, repo, other)
4384 4384
4385 4385 repo._subtoppath = ui.expandpath(source)
4386 4386 try:
4387 4387 return hg.incoming(ui, repo, source, opts)
4388 4388 finally:
4389 4389 del repo._subtoppath
4390 4390
4391 4391
4392 4392 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4393 4393 norepo=True)
4394 4394 def init(ui, dest=".", **opts):
4395 4395 """create a new repository in the given directory
4396 4396
4397 4397 Initialize a new repository in the given directory. If the given
4398 4398 directory does not exist, it will be created.
4399 4399
4400 4400 If no directory is given, the current directory is used.
4401 4401
4402 4402 It is possible to specify an ``ssh://`` URL as the destination.
4403 4403 See :hg:`help urls` for more information.
4404 4404
4405 4405 Returns 0 on success.
4406 4406 """
4407 4407 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4408 4408
4409 4409 @command('locate',
4410 4410 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4411 4411 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4412 4412 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4413 4413 ] + walkopts,
4414 4414 _('[OPTION]... [PATTERN]...'))
4415 4415 def locate(ui, repo, *pats, **opts):
4416 4416 """locate files matching specific patterns (DEPRECATED)
4417 4417
4418 4418 Print files under Mercurial control in the working directory whose
4419 4419 names match the given patterns.
4420 4420
4421 4421 By default, this command searches all directories in the working
4422 4422 directory. To search just the current directory and its
4423 4423 subdirectories, use "--include .".
4424 4424
4425 4425 If no patterns are given to match, this command prints the names
4426 4426 of all files under Mercurial control in the working directory.
4427 4427
4428 4428 If you want to feed the output of this command into the "xargs"
4429 4429 command, use the -0 option to both this command and "xargs". This
4430 4430 will avoid the problem of "xargs" treating single filenames that
4431 4431 contain whitespace as multiple filenames.
4432 4432
4433 4433 See :hg:`help files` for a more versatile command.
4434 4434
4435 4435 Returns 0 if a match is found, 1 otherwise.
4436 4436 """
4437 4437 if opts.get('print0'):
4438 4438 end = '\0'
4439 4439 else:
4440 4440 end = '\n'
4441 4441 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4442 4442
4443 4443 ret = 1
4444 4444 ctx = repo[rev]
4445 4445 m = scmutil.match(ctx, pats, opts, default='relglob',
4446 4446 badfn=lambda x, y: False)
4447 4447
4448 4448 for abs in ctx.matches(m):
4449 4449 if opts.get('fullpath'):
4450 4450 ui.write(repo.wjoin(abs), end)
4451 4451 else:
4452 4452 ui.write(((pats and m.rel(abs)) or abs), end)
4453 4453 ret = 0
4454 4454
4455 4455 return ret
4456 4456
4457 4457 @command('^log|history',
4458 4458 [('f', 'follow', None,
4459 4459 _('follow changeset history, or file history across copies and renames')),
4460 4460 ('', 'follow-first', None,
4461 4461 _('only follow the first parent of merge changesets (DEPRECATED)')),
4462 4462 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4463 4463 ('C', 'copies', None, _('show copied files')),
4464 4464 ('k', 'keyword', [],
4465 4465 _('do case-insensitive search for a given text'), _('TEXT')),
4466 4466 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4467 4467 ('', 'removed', None, _('include revisions where files were removed')),
4468 4468 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4469 4469 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4470 4470 ('', 'only-branch', [],
4471 4471 _('show only changesets within the given named branch (DEPRECATED)'),
4472 4472 _('BRANCH')),
4473 4473 ('b', 'branch', [],
4474 4474 _('show changesets within the given named branch'), _('BRANCH')),
4475 4475 ('P', 'prune', [],
4476 4476 _('do not display revision or any of its ancestors'), _('REV')),
4477 4477 ] + logopts + walkopts,
4478 4478 _('[OPTION]... [FILE]'),
4479 4479 inferrepo=True)
4480 4480 def log(ui, repo, *pats, **opts):
4481 4481 """show revision history of entire repository or files
4482 4482
4483 4483 Print the revision history of the specified files or the entire
4484 4484 project.
4485 4485
4486 4486 If no revision range is specified, the default is ``tip:0`` unless
4487 4487 --follow is set, in which case the working directory parent is
4488 4488 used as the starting revision.
4489 4489
4490 4490 File history is shown without following rename or copy history of
4491 4491 files. Use -f/--follow with a filename to follow history across
4492 4492 renames and copies. --follow without a filename will only show
4493 4493 ancestors or descendants of the starting revision.
4494 4494
4495 4495 By default this command prints revision number and changeset id,
4496 4496 tags, non-trivial parents, user, date and time, and a summary for
4497 4497 each commit. When the -v/--verbose switch is used, the list of
4498 4498 changed files and full commit message are shown.
4499 4499
4500 4500 With --graph the revisions are shown as an ASCII art DAG with the most
4501 4501 recent changeset at the top.
4502 4502 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4503 4503 and '+' represents a fork where the changeset from the lines below is a
4504 4504 parent of the 'o' merge on the same line.
4505 4505
4506 4506 .. note::
4507 4507
4508 4508 :hg:`log --patch` may generate unexpected diff output for merge
4509 4509 changesets, as it will only compare the merge changeset against
4510 4510 its first parent. Also, only files different from BOTH parents
4511 4511 will appear in files:.
4512 4512
4513 4513 .. note::
4514 4514
4515 4515 For performance reasons, :hg:`log FILE` may omit duplicate changes
4516 4516 made on branches and will not show removals or mode changes. To
4517 4517 see all such changes, use the --removed switch.
4518 4518
4519 4519 .. container:: verbose
4520 4520
4521 4521 Some examples:
4522 4522
4523 4523 - changesets with full descriptions and file lists::
4524 4524
4525 4525 hg log -v
4526 4526
4527 4527 - changesets ancestral to the working directory::
4528 4528
4529 4529 hg log -f
4530 4530
4531 4531 - last 10 commits on the current branch::
4532 4532
4533 4533 hg log -l 10 -b .
4534 4534
4535 4535 - changesets showing all modifications of a file, including removals::
4536 4536
4537 4537 hg log --removed file.c
4538 4538
4539 4539 - all changesets that touch a directory, with diffs, excluding merges::
4540 4540
4541 4541 hg log -Mp lib/
4542 4542
4543 4543 - all revision numbers that match a keyword::
4544 4544
4545 4545 hg log -k bug --template "{rev}\\n"
4546 4546
4547 4547 - the full hash identifier of the working directory parent::
4548 4548
4549 4549 hg log -r . --template "{node}\\n"
4550 4550
4551 4551 - list available log templates::
4552 4552
4553 4553 hg log -T list
4554 4554
4555 4555 - check if a given changeset is included in a tagged release::
4556 4556
4557 4557 hg log -r "a21ccf and ancestor(1.9)"
4558 4558
4559 4559 - find all changesets by some user in a date range::
4560 4560
4561 4561 hg log -k alice -d "may 2008 to jul 2008"
4562 4562
4563 4563 - summary of all changesets after the last tag::
4564 4564
4565 4565 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4566 4566
4567 4567 See :hg:`help dates` for a list of formats valid for -d/--date.
4568 4568
4569 4569 See :hg:`help revisions` and :hg:`help revsets` for more about
4570 4570 specifying and ordering revisions.
4571 4571
4572 4572 See :hg:`help templates` for more about pre-packaged styles and
4573 4573 specifying custom templates.
4574 4574
4575 4575 Returns 0 on success.
4576 4576
4577 4577 """
4578 4578 if opts.get('follow') and opts.get('rev'):
4579 4579 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4580 4580 del opts['follow']
4581 4581
4582 4582 if opts.get('graph'):
4583 4583 return cmdutil.graphlog(ui, repo, *pats, **opts)
4584 4584
4585 4585 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4586 4586 limit = cmdutil.loglimit(opts)
4587 4587 count = 0
4588 4588
4589 4589 getrenamed = None
4590 4590 if opts.get('copies'):
4591 4591 endrev = None
4592 4592 if opts.get('rev'):
4593 4593 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4594 4594 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4595 4595
4596 4596 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4597 4597 for rev in revs:
4598 4598 if count == limit:
4599 4599 break
4600 4600 ctx = repo[rev]
4601 4601 copies = None
4602 4602 if getrenamed is not None and rev:
4603 4603 copies = []
4604 4604 for fn in ctx.files():
4605 4605 rename = getrenamed(fn, rev)
4606 4606 if rename:
4607 4607 copies.append((fn, rename[0]))
4608 4608 if filematcher:
4609 4609 revmatchfn = filematcher(ctx.rev())
4610 4610 else:
4611 4611 revmatchfn = None
4612 4612 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4613 4613 if displayer.flush(ctx):
4614 4614 count += 1
4615 4615
4616 4616 displayer.close()
4617 4617
4618 4618 @command('manifest',
4619 4619 [('r', 'rev', '', _('revision to display'), _('REV')),
4620 4620 ('', 'all', False, _("list files from all revisions"))]
4621 4621 + formatteropts,
4622 4622 _('[-r REV]'))
4623 4623 def manifest(ui, repo, node=None, rev=None, **opts):
4624 4624 """output the current or given revision of the project manifest
4625 4625
4626 4626 Print a list of version controlled files for the given revision.
4627 4627 If no revision is given, the first parent of the working directory
4628 4628 is used, or the null revision if no revision is checked out.
4629 4629
4630 4630 With -v, print file permissions, symlink and executable bits.
4631 4631 With --debug, print file revision hashes.
4632 4632
4633 4633 If option --all is specified, the list of all files from all revisions
4634 4634 is printed. This includes deleted and renamed files.
4635 4635
4636 4636 Returns 0 on success.
4637 4637 """
4638 4638
4639 4639 fm = ui.formatter('manifest', opts)
4640 4640
4641 4641 if opts.get('all'):
4642 4642 if rev or node:
4643 4643 raise error.Abort(_("can't specify a revision with --all"))
4644 4644
4645 4645 res = []
4646 4646 prefix = "data/"
4647 4647 suffix = ".i"
4648 4648 plen = len(prefix)
4649 4649 slen = len(suffix)
4650 4650 with repo.lock():
4651 4651 for fn, b, size in repo.store.datafiles():
4652 4652 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4653 4653 res.append(fn[plen:-slen])
4654 4654 for f in res:
4655 4655 fm.startitem()
4656 4656 fm.write("path", '%s\n', f)
4657 4657 fm.end()
4658 4658 return
4659 4659
4660 4660 if rev and node:
4661 4661 raise error.Abort(_("please specify just one revision"))
4662 4662
4663 4663 if not node:
4664 4664 node = rev
4665 4665
4666 4666 char = {'l': '@', 'x': '*', '': ''}
4667 4667 mode = {'l': '644', 'x': '755', '': '644'}
4668 4668 ctx = scmutil.revsingle(repo, node)
4669 4669 mf = ctx.manifest()
4670 4670 for f in ctx:
4671 4671 fm.startitem()
4672 4672 fl = ctx[f].flags()
4673 4673 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4674 4674 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4675 4675 fm.write('path', '%s\n', f)
4676 4676 fm.end()
4677 4677
4678 4678 @command('^merge',
4679 4679 [('f', 'force', None,
4680 4680 _('force a merge including outstanding changes (DEPRECATED)')),
4681 4681 ('r', 'rev', '', _('revision to merge'), _('REV')),
4682 4682 ('P', 'preview', None,
4683 4683 _('review revisions to merge (no merge is performed)'))
4684 4684 ] + mergetoolopts,
4685 4685 _('[-P] [[-r] REV]'))
4686 4686 def merge(ui, repo, node=None, **opts):
4687 4687 """merge another revision into working directory
4688 4688
4689 4689 The current working directory is updated with all changes made in
4690 4690 the requested revision since the last common predecessor revision.
4691 4691
4692 4692 Files that changed between either parent are marked as changed for
4693 4693 the next commit and a commit must be performed before any further
4694 4694 updates to the repository are allowed. The next commit will have
4695 4695 two parents.
4696 4696
4697 4697 ``--tool`` can be used to specify the merge tool used for file
4698 4698 merges. It overrides the HGMERGE environment variable and your
4699 4699 configuration files. See :hg:`help merge-tools` for options.
4700 4700
4701 4701 If no revision is specified, the working directory's parent is a
4702 4702 head revision, and the current branch contains exactly one other
4703 4703 head, the other head is merged with by default. Otherwise, an
4704 4704 explicit revision with which to merge with must be provided.
4705 4705
4706 4706 See :hg:`help resolve` for information on handling file conflicts.
4707 4707
4708 4708 To undo an uncommitted merge, use :hg:`update --clean .` which
4709 4709 will check out a clean copy of the original merge parent, losing
4710 4710 all changes.
4711 4711
4712 4712 Returns 0 on success, 1 if there are unresolved files.
4713 4713 """
4714 4714
4715 4715 if opts.get('rev') and node:
4716 4716 raise error.Abort(_("please specify just one revision"))
4717 4717 if not node:
4718 4718 node = opts.get('rev')
4719 4719
4720 4720 if node:
4721 4721 node = scmutil.revsingle(repo, node).node()
4722 4722
4723 4723 if not node:
4724 4724 node = repo[destutil.destmerge(repo)].node()
4725 4725
4726 4726 if opts.get('preview'):
4727 4727 # find nodes that are ancestors of p2 but not of p1
4728 4728 p1 = repo.lookup('.')
4729 4729 p2 = repo.lookup(node)
4730 4730 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4731 4731
4732 4732 displayer = cmdutil.show_changeset(ui, repo, opts)
4733 4733 for node in nodes:
4734 4734 displayer.show(repo[node])
4735 4735 displayer.close()
4736 4736 return 0
4737 4737
4738 4738 try:
4739 4739 # ui.forcemerge is an internal variable, do not document
4740 4740 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4741 4741 force = opts.get('force')
4742 4742 labels = ['working copy', 'merge rev']
4743 4743 return hg.merge(repo, node, force=force, mergeforce=force,
4744 4744 labels=labels)
4745 4745 finally:
4746 4746 ui.setconfig('ui', 'forcemerge', '', 'merge')
4747 4747
4748 4748 @command('outgoing|out',
4749 4749 [('f', 'force', None, _('run even when the destination is unrelated')),
4750 4750 ('r', 'rev', [],
4751 4751 _('a changeset intended to be included in the destination'), _('REV')),
4752 4752 ('n', 'newest-first', None, _('show newest record first')),
4753 4753 ('B', 'bookmarks', False, _('compare bookmarks')),
4754 4754 ('b', 'branch', [], _('a specific branch you would like to push'),
4755 4755 _('BRANCH')),
4756 4756 ] + logopts + remoteopts + subrepoopts,
4757 4757 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4758 4758 def outgoing(ui, repo, dest=None, **opts):
4759 4759 """show changesets not found in the destination
4760 4760
4761 4761 Show changesets not found in the specified destination repository
4762 4762 or the default push location. These are the changesets that would
4763 4763 be pushed if a push was requested.
4764 4764
4765 4765 See pull for details of valid destination formats.
4766 4766
4767 4767 .. container:: verbose
4768 4768
4769 4769 With -B/--bookmarks, the result of bookmark comparison between
4770 4770 local and remote repositories is displayed. With -v/--verbose,
4771 4771 status is also displayed for each bookmark like below::
4772 4772
4773 4773 BM1 01234567890a added
4774 4774 BM2 deleted
4775 4775 BM3 234567890abc advanced
4776 4776 BM4 34567890abcd diverged
4777 4777 BM5 4567890abcde changed
4778 4778
4779 4779 The action taken when pushing depends on the
4780 4780 status of each bookmark:
4781 4781
4782 4782 :``added``: push with ``-B`` will create it
4783 4783 :``deleted``: push with ``-B`` will delete it
4784 4784 :``advanced``: push will update it
4785 4785 :``diverged``: push with ``-B`` will update it
4786 4786 :``changed``: push with ``-B`` will update it
4787 4787
4788 4788 From the point of view of pushing behavior, bookmarks
4789 4789 existing only in the remote repository are treated as
4790 4790 ``deleted``, even if it is in fact added remotely.
4791 4791
4792 4792 Returns 0 if there are outgoing changes, 1 otherwise.
4793 4793 """
4794 4794 if opts.get('graph'):
4795 4795 cmdutil.checkunsupportedgraphflags([], opts)
4796 4796 o, other = hg._outgoing(ui, repo, dest, opts)
4797 4797 if not o:
4798 4798 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4799 4799 return
4800 4800
4801 4801 revdag = cmdutil.graphrevs(repo, o, opts)
4802 4802 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4803 4803 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
4804 4804 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4805 4805 return 0
4806 4806
4807 4807 if opts.get('bookmarks'):
4808 4808 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4809 4809 dest, branches = hg.parseurl(dest, opts.get('branch'))
4810 4810 other = hg.peer(repo, opts, dest)
4811 4811 if 'bookmarks' not in other.listkeys('namespaces'):
4812 4812 ui.warn(_("remote doesn't support bookmarks\n"))
4813 4813 return 0
4814 4814 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4815 4815 return bookmarks.outgoing(ui, repo, other)
4816 4816
4817 4817 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4818 4818 try:
4819 4819 return hg.outgoing(ui, repo, dest, opts)
4820 4820 finally:
4821 4821 del repo._subtoppath
4822 4822
4823 4823 @command('parents',
4824 4824 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4825 4825 ] + templateopts,
4826 4826 _('[-r REV] [FILE]'),
4827 4827 inferrepo=True)
4828 4828 def parents(ui, repo, file_=None, **opts):
4829 4829 """show the parents of the working directory or revision (DEPRECATED)
4830 4830
4831 4831 Print the working directory's parent revisions. If a revision is
4832 4832 given via -r/--rev, the parent of that revision will be printed.
4833 4833 If a file argument is given, the revision in which the file was
4834 4834 last changed (before the working directory revision or the
4835 4835 argument to --rev if given) is printed.
4836 4836
4837 4837 This command is equivalent to::
4838 4838
4839 4839 hg log -r "p1()+p2()" or
4840 4840 hg log -r "p1(REV)+p2(REV)" or
4841 4841 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4842 4842 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4843 4843
4844 4844 See :hg:`summary` and :hg:`help revsets` for related information.
4845 4845
4846 4846 Returns 0 on success.
4847 4847 """
4848 4848
4849 4849 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4850 4850
4851 4851 if file_:
4852 4852 m = scmutil.match(ctx, (file_,), opts)
4853 4853 if m.anypats() or len(m.files()) != 1:
4854 4854 raise error.Abort(_('can only specify an explicit filename'))
4855 4855 file_ = m.files()[0]
4856 4856 filenodes = []
4857 4857 for cp in ctx.parents():
4858 4858 if not cp:
4859 4859 continue
4860 4860 try:
4861 4861 filenodes.append(cp.filenode(file_))
4862 4862 except error.LookupError:
4863 4863 pass
4864 4864 if not filenodes:
4865 4865 raise error.Abort(_("'%s' not found in manifest!") % file_)
4866 4866 p = []
4867 4867 for fn in filenodes:
4868 4868 fctx = repo.filectx(file_, fileid=fn)
4869 4869 p.append(fctx.node())
4870 4870 else:
4871 4871 p = [cp.node() for cp in ctx.parents()]
4872 4872
4873 4873 displayer = cmdutil.show_changeset(ui, repo, opts)
4874 4874 for n in p:
4875 4875 if n != nullid:
4876 4876 displayer.show(repo[n])
4877 4877 displayer.close()
4878 4878
4879 4879 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
4880 4880 def paths(ui, repo, search=None, **opts):
4881 4881 """show aliases for remote repositories
4882 4882
4883 4883 Show definition of symbolic path name NAME. If no name is given,
4884 4884 show definition of all available names.
4885 4885
4886 4886 Option -q/--quiet suppresses all output when searching for NAME
4887 4887 and shows only the path names when listing all definitions.
4888 4888
4889 4889 Path names are defined in the [paths] section of your
4890 4890 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4891 4891 repository, ``.hg/hgrc`` is used, too.
4892 4892
4893 4893 The path names ``default`` and ``default-push`` have a special
4894 4894 meaning. When performing a push or pull operation, they are used
4895 4895 as fallbacks if no location is specified on the command-line.
4896 4896 When ``default-push`` is set, it will be used for push and
4897 4897 ``default`` will be used for pull; otherwise ``default`` is used
4898 4898 as the fallback for both. When cloning a repository, the clone
4899 4899 source is written as ``default`` in ``.hg/hgrc``.
4900 4900
4901 4901 .. note::
4902 4902
4903 4903 ``default`` and ``default-push`` apply to all inbound (e.g.
4904 4904 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
4905 4905 and :hg:`bundle`) operations.
4906 4906
4907 4907 See :hg:`help urls` for more information.
4908 4908
4909 4909 Returns 0 on success.
4910 4910 """
4911 4911 if search:
4912 4912 pathitems = [(name, path) for name, path in ui.paths.iteritems()
4913 4913 if name == search]
4914 4914 else:
4915 4915 pathitems = sorted(ui.paths.iteritems())
4916 4916
4917 4917 fm = ui.formatter('paths', opts)
4918 4918 if fm.isplain():
4919 4919 hidepassword = util.hidepassword
4920 4920 else:
4921 4921 hidepassword = str
4922 4922 if ui.quiet:
4923 4923 namefmt = '%s\n'
4924 4924 else:
4925 4925 namefmt = '%s = '
4926 4926 showsubopts = not search and not ui.quiet
4927 4927
4928 4928 for name, path in pathitems:
4929 4929 fm.startitem()
4930 4930 fm.condwrite(not search, 'name', namefmt, name)
4931 4931 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
4932 4932 for subopt, value in sorted(path.suboptions.items()):
4933 4933 assert subopt not in ('name', 'url')
4934 4934 if showsubopts:
4935 4935 fm.plain('%s:%s = ' % (name, subopt))
4936 4936 fm.condwrite(showsubopts, subopt, '%s\n', value)
4937 4937
4938 4938 fm.end()
4939 4939
4940 4940 if search and not pathitems:
4941 4941 if not ui.quiet:
4942 4942 ui.warn(_("not found!\n"))
4943 4943 return 1
4944 4944 else:
4945 4945 return 0
4946 4946
4947 4947 @command('phase',
4948 4948 [('p', 'public', False, _('set changeset phase to public')),
4949 4949 ('d', 'draft', False, _('set changeset phase to draft')),
4950 4950 ('s', 'secret', False, _('set changeset phase to secret')),
4951 4951 ('f', 'force', False, _('allow to move boundary backward')),
4952 4952 ('r', 'rev', [], _('target revision'), _('REV')),
4953 4953 ],
4954 4954 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4955 4955 def phase(ui, repo, *revs, **opts):
4956 4956 """set or show the current phase name
4957 4957
4958 4958 With no argument, show the phase name of the current revision(s).
4959 4959
4960 4960 With one of -p/--public, -d/--draft or -s/--secret, change the
4961 4961 phase value of the specified revisions.
4962 4962
4963 4963 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4964 4964 lower phase to an higher phase. Phases are ordered as follows::
4965 4965
4966 4966 public < draft < secret
4967 4967
4968 4968 Returns 0 on success, 1 if some phases could not be changed.
4969 4969
4970 4970 (For more information about the phases concept, see :hg:`help phases`.)
4971 4971 """
4972 4972 # search for a unique phase argument
4973 4973 targetphase = None
4974 4974 for idx, name in enumerate(phases.phasenames):
4975 4975 if opts[name]:
4976 4976 if targetphase is not None:
4977 4977 raise error.Abort(_('only one phase can be specified'))
4978 4978 targetphase = idx
4979 4979
4980 4980 # look for specified revision
4981 4981 revs = list(revs)
4982 4982 revs.extend(opts['rev'])
4983 4983 if not revs:
4984 4984 # display both parents as the second parent phase can influence
4985 4985 # the phase of a merge commit
4986 4986 revs = [c.rev() for c in repo[None].parents()]
4987 4987
4988 4988 revs = scmutil.revrange(repo, revs)
4989 4989
4990 4990 lock = None
4991 4991 ret = 0
4992 4992 if targetphase is None:
4993 4993 # display
4994 4994 for r in revs:
4995 4995 ctx = repo[r]
4996 4996 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4997 4997 else:
4998 4998 tr = None
4999 4999 lock = repo.lock()
5000 5000 try:
5001 5001 tr = repo.transaction("phase")
5002 5002 # set phase
5003 5003 if not revs:
5004 5004 raise error.Abort(_('empty revision set'))
5005 5005 nodes = [repo[r].node() for r in revs]
5006 5006 # moving revision from public to draft may hide them
5007 5007 # We have to check result on an unfiltered repository
5008 5008 unfi = repo.unfiltered()
5009 5009 getphase = unfi._phasecache.phase
5010 5010 olddata = [getphase(unfi, r) for r in unfi]
5011 5011 phases.advanceboundary(repo, tr, targetphase, nodes)
5012 5012 if opts['force']:
5013 5013 phases.retractboundary(repo, tr, targetphase, nodes)
5014 5014 tr.close()
5015 5015 finally:
5016 5016 if tr is not None:
5017 5017 tr.release()
5018 5018 lock.release()
5019 5019 getphase = unfi._phasecache.phase
5020 5020 newdata = [getphase(unfi, r) for r in unfi]
5021 5021 changes = sum(newdata[r] != olddata[r] for r in unfi)
5022 5022 cl = unfi.changelog
5023 5023 rejected = [n for n in nodes
5024 5024 if newdata[cl.rev(n)] < targetphase]
5025 5025 if rejected:
5026 5026 ui.warn(_('cannot move %i changesets to a higher '
5027 5027 'phase, use --force\n') % len(rejected))
5028 5028 ret = 1
5029 5029 if changes:
5030 5030 msg = _('phase changed for %i changesets\n') % changes
5031 5031 if ret:
5032 5032 ui.status(msg)
5033 5033 else:
5034 5034 ui.note(msg)
5035 5035 else:
5036 5036 ui.warn(_('no phases changed\n'))
5037 5037 return ret
5038 5038
5039 5039 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5040 5040 """Run after a changegroup has been added via pull/unbundle
5041 5041
5042 5042 This takes arguments below:
5043 5043
5044 5044 :modheads: change of heads by pull/unbundle
5045 5045 :optupdate: updating working directory is needed or not
5046 5046 :checkout: update destination revision (or None to default destination)
5047 5047 :brev: a name, which might be a bookmark to be activated after updating
5048 5048 """
5049 5049 if modheads == 0:
5050 5050 return
5051 5051 if optupdate:
5052 5052 try:
5053 5053 return hg.updatetotally(ui, repo, checkout, brev)
5054 5054 except error.UpdateAbort as inst:
5055 5055 msg = _("not updating: %s") % str(inst)
5056 5056 hint = inst.hint
5057 5057 raise error.UpdateAbort(msg, hint=hint)
5058 5058 if modheads > 1:
5059 5059 currentbranchheads = len(repo.branchheads())
5060 5060 if currentbranchheads == modheads:
5061 5061 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5062 5062 elif currentbranchheads > 1:
5063 5063 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5064 5064 "merge)\n"))
5065 5065 else:
5066 5066 ui.status(_("(run 'hg heads' to see heads)\n"))
5067 5067 else:
5068 5068 ui.status(_("(run 'hg update' to get a working copy)\n"))
5069 5069
5070 5070 @command('^pull',
5071 5071 [('u', 'update', None,
5072 5072 _('update to new branch head if changesets were pulled')),
5073 5073 ('f', 'force', None, _('run even when remote repository is unrelated')),
5074 5074 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5075 5075 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5076 5076 ('b', 'branch', [], _('a specific branch you would like to pull'),
5077 5077 _('BRANCH')),
5078 5078 ] + remoteopts,
5079 5079 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5080 5080 def pull(ui, repo, source="default", **opts):
5081 5081 """pull changes from the specified source
5082 5082
5083 5083 Pull changes from a remote repository to a local one.
5084 5084
5085 5085 This finds all changes from the repository at the specified path
5086 5086 or URL and adds them to a local repository (the current one unless
5087 5087 -R is specified). By default, this does not update the copy of the
5088 5088 project in the working directory.
5089 5089
5090 5090 Use :hg:`incoming` if you want to see what would have been added
5091 5091 by a pull at the time you issued this command. If you then decide
5092 5092 to add those changes to the repository, you should use :hg:`pull
5093 5093 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5094 5094
5095 5095 If SOURCE is omitted, the 'default' path will be used.
5096 5096 See :hg:`help urls` for more information.
5097 5097
5098 5098 Specifying bookmark as ``.`` is equivalent to specifying the active
5099 5099 bookmark's name.
5100 5100
5101 5101 Returns 0 on success, 1 if an update had unresolved files.
5102 5102 """
5103 5103 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5104 5104 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5105 5105 other = hg.peer(repo, opts, source)
5106 5106 try:
5107 5107 revs, checkout = hg.addbranchrevs(repo, other, branches,
5108 5108 opts.get('rev'))
5109 5109
5110 5110
5111 5111 pullopargs = {}
5112 5112 if opts.get('bookmark'):
5113 5113 if not revs:
5114 5114 revs = []
5115 5115 # The list of bookmark used here is not the one used to actually
5116 5116 # update the bookmark name. This can result in the revision pulled
5117 5117 # not ending up with the name of the bookmark because of a race
5118 5118 # condition on the server. (See issue 4689 for details)
5119 5119 remotebookmarks = other.listkeys('bookmarks')
5120 5120 pullopargs['remotebookmarks'] = remotebookmarks
5121 5121 for b in opts['bookmark']:
5122 5122 b = repo._bookmarks.expandname(b)
5123 5123 if b not in remotebookmarks:
5124 5124 raise error.Abort(_('remote bookmark %s not found!') % b)
5125 5125 revs.append(remotebookmarks[b])
5126 5126
5127 5127 if revs:
5128 5128 try:
5129 5129 # When 'rev' is a bookmark name, we cannot guarantee that it
5130 5130 # will be updated with that name because of a race condition
5131 5131 # server side. (See issue 4689 for details)
5132 5132 oldrevs = revs
5133 5133 revs = [] # actually, nodes
5134 5134 for r in oldrevs:
5135 5135 node = other.lookup(r)
5136 5136 revs.append(node)
5137 5137 if r == checkout:
5138 5138 checkout = node
5139 5139 except error.CapabilityError:
5140 5140 err = _("other repository doesn't support revision lookup, "
5141 5141 "so a rev cannot be specified.")
5142 5142 raise error.Abort(err)
5143 5143
5144 5144 pullopargs.update(opts.get('opargs', {}))
5145 5145 modheads = exchange.pull(repo, other, heads=revs,
5146 5146 force=opts.get('force'),
5147 5147 bookmarks=opts.get('bookmark', ()),
5148 5148 opargs=pullopargs).cgresult
5149 5149
5150 5150 # brev is a name, which might be a bookmark to be activated at
5151 5151 # the end of the update. In other words, it is an explicit
5152 5152 # destination of the update
5153 5153 brev = None
5154 5154
5155 5155 if checkout:
5156 5156 checkout = str(repo.changelog.rev(checkout))
5157 5157
5158 5158 # order below depends on implementation of
5159 5159 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5160 5160 # because 'checkout' is determined without it.
5161 5161 if opts.get('rev'):
5162 5162 brev = opts['rev'][0]
5163 5163 elif opts.get('branch'):
5164 5164 brev = opts['branch'][0]
5165 5165 else:
5166 5166 brev = branches[0]
5167 5167 repo._subtoppath = source
5168 5168 try:
5169 5169 ret = postincoming(ui, repo, modheads, opts.get('update'),
5170 5170 checkout, brev)
5171 5171
5172 5172 finally:
5173 5173 del repo._subtoppath
5174 5174
5175 5175 finally:
5176 5176 other.close()
5177 5177 return ret
5178 5178
5179 5179 @command('^push',
5180 5180 [('f', 'force', None, _('force push')),
5181 5181 ('r', 'rev', [],
5182 5182 _('a changeset intended to be included in the destination'),
5183 5183 _('REV')),
5184 5184 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5185 5185 ('b', 'branch', [],
5186 5186 _('a specific branch you would like to push'), _('BRANCH')),
5187 5187 ('', 'new-branch', False, _('allow pushing a new branch')),
5188 5188 ] + remoteopts,
5189 5189 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5190 5190 def push(ui, repo, dest=None, **opts):
5191 5191 """push changes to the specified destination
5192 5192
5193 5193 Push changesets from the local repository to the specified
5194 5194 destination.
5195 5195
5196 5196 This operation is symmetrical to pull: it is identical to a pull
5197 5197 in the destination repository from the current one.
5198 5198
5199 5199 By default, push will not allow creation of new heads at the
5200 5200 destination, since multiple heads would make it unclear which head
5201 5201 to use. In this situation, it is recommended to pull and merge
5202 5202 before pushing.
5203 5203
5204 5204 Use --new-branch if you want to allow push to create a new named
5205 5205 branch that is not present at the destination. This allows you to
5206 5206 only create a new branch without forcing other changes.
5207 5207
5208 5208 .. note::
5209 5209
5210 5210 Extra care should be taken with the -f/--force option,
5211 5211 which will push all new heads on all branches, an action which will
5212 5212 almost always cause confusion for collaborators.
5213 5213
5214 5214 If -r/--rev is used, the specified revision and all its ancestors
5215 5215 will be pushed to the remote repository.
5216 5216
5217 5217 If -B/--bookmark is used, the specified bookmarked revision, its
5218 5218 ancestors, and the bookmark will be pushed to the remote
5219 5219 repository. Specifying ``.`` is equivalent to specifying the active
5220 5220 bookmark's name.
5221 5221
5222 5222 Please see :hg:`help urls` for important details about ``ssh://``
5223 5223 URLs. If DESTINATION is omitted, a default path will be used.
5224 5224
5225 5225 Returns 0 if push was successful, 1 if nothing to push.
5226 5226 """
5227 5227
5228 5228 if opts.get('bookmark'):
5229 5229 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5230 5230 for b in opts['bookmark']:
5231 5231 # translate -B options to -r so changesets get pushed
5232 5232 b = repo._bookmarks.expandname(b)
5233 5233 if b in repo._bookmarks:
5234 5234 opts.setdefault('rev', []).append(b)
5235 5235 else:
5236 5236 # if we try to push a deleted bookmark, translate it to null
5237 5237 # this lets simultaneous -r, -b options continue working
5238 5238 opts.setdefault('rev', []).append("null")
5239 5239
5240 5240 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5241 5241 if not path:
5242 5242 raise error.Abort(_('default repository not configured!'),
5243 5243 hint=_("see 'hg help config.paths'"))
5244 5244 dest = path.pushloc or path.loc
5245 5245 branches = (path.branch, opts.get('branch') or [])
5246 5246 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5247 5247 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5248 5248 other = hg.peer(repo, opts, dest)
5249 5249
5250 5250 if revs:
5251 5251 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5252 5252 if not revs:
5253 5253 raise error.Abort(_("specified revisions evaluate to an empty set"),
5254 5254 hint=_("use different revision arguments"))
5255 5255 elif path.pushrev:
5256 5256 # It doesn't make any sense to specify ancestor revisions. So limit
5257 5257 # to DAG heads to make discovery simpler.
5258 5258 expr = revset.formatspec('heads(%r)', path.pushrev)
5259 5259 revs = scmutil.revrange(repo, [expr])
5260 5260 revs = [repo[rev].node() for rev in revs]
5261 5261 if not revs:
5262 5262 raise error.Abort(_('default push revset for path evaluates to an '
5263 5263 'empty set'))
5264 5264
5265 5265 repo._subtoppath = dest
5266 5266 try:
5267 5267 # push subrepos depth-first for coherent ordering
5268 5268 c = repo['']
5269 5269 subs = c.substate # only repos that are committed
5270 5270 for s in sorted(subs):
5271 5271 result = c.sub(s).push(opts)
5272 5272 if result == 0:
5273 5273 return not result
5274 5274 finally:
5275 5275 del repo._subtoppath
5276 5276 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5277 5277 newbranch=opts.get('new_branch'),
5278 5278 bookmarks=opts.get('bookmark', ()),
5279 5279 opargs=opts.get('opargs'))
5280 5280
5281 5281 result = not pushop.cgresult
5282 5282
5283 5283 if pushop.bkresult is not None:
5284 5284 if pushop.bkresult == 2:
5285 5285 result = 2
5286 5286 elif not result and pushop.bkresult:
5287 5287 result = 2
5288 5288
5289 5289 return result
5290 5290
5291 5291 @command('recover', [])
5292 5292 def recover(ui, repo):
5293 5293 """roll back an interrupted transaction
5294 5294
5295 5295 Recover from an interrupted commit or pull.
5296 5296
5297 5297 This command tries to fix the repository status after an
5298 5298 interrupted operation. It should only be necessary when Mercurial
5299 5299 suggests it.
5300 5300
5301 5301 Returns 0 if successful, 1 if nothing to recover or verify fails.
5302 5302 """
5303 5303 if repo.recover():
5304 5304 return hg.verify(repo)
5305 5305 return 1
5306 5306
5307 5307 @command('^remove|rm',
5308 5308 [('A', 'after', None, _('record delete for missing files')),
5309 5309 ('f', 'force', None,
5310 5310 _('forget added files, delete modified files')),
5311 5311 ] + subrepoopts + walkopts,
5312 5312 _('[OPTION]... FILE...'),
5313 5313 inferrepo=True)
5314 5314 def remove(ui, repo, *pats, **opts):
5315 5315 """remove the specified files on the next commit
5316 5316
5317 5317 Schedule the indicated files for removal from the current branch.
5318 5318
5319 5319 This command schedules the files to be removed at the next commit.
5320 5320 To undo a remove before that, see :hg:`revert`. To undo added
5321 5321 files, see :hg:`forget`.
5322 5322
5323 5323 .. container:: verbose
5324 5324
5325 5325 -A/--after can be used to remove only files that have already
5326 5326 been deleted, -f/--force can be used to force deletion, and -Af
5327 5327 can be used to remove files from the next revision without
5328 5328 deleting them from the working directory.
5329 5329
5330 5330 The following table details the behavior of remove for different
5331 5331 file states (columns) and option combinations (rows). The file
5332 5332 states are Added [A], Clean [C], Modified [M] and Missing [!]
5333 5333 (as reported by :hg:`status`). The actions are Warn, Remove
5334 5334 (from branch) and Delete (from disk):
5335 5335
5336 5336 ========= == == == ==
5337 5337 opt/state A C M !
5338 5338 ========= == == == ==
5339 5339 none W RD W R
5340 5340 -f R RD RD R
5341 5341 -A W W W R
5342 5342 -Af R R R R
5343 5343 ========= == == == ==
5344 5344
5345 5345 .. note::
5346 5346
5347 5347 :hg:`remove` never deletes files in Added [A] state from the
5348 5348 working directory, not even if ``--force`` is specified.
5349 5349
5350 5350 Returns 0 on success, 1 if any warnings encountered.
5351 5351 """
5352 5352
5353 5353 after, force = opts.get('after'), opts.get('force')
5354 5354 if not pats and not after:
5355 5355 raise error.Abort(_('no files specified'))
5356 5356
5357 5357 m = scmutil.match(repo[None], pats, opts)
5358 5358 subrepos = opts.get('subrepos')
5359 5359 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5360 5360
5361 5361 @command('rename|move|mv',
5362 5362 [('A', 'after', None, _('record a rename that has already occurred')),
5363 5363 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5364 5364 ] + walkopts + dryrunopts,
5365 5365 _('[OPTION]... SOURCE... DEST'))
5366 5366 def rename(ui, repo, *pats, **opts):
5367 5367 """rename files; equivalent of copy + remove
5368 5368
5369 5369 Mark dest as copies of sources; mark sources for deletion. If dest
5370 5370 is a directory, copies are put in that directory. If dest is a
5371 5371 file, there can only be one source.
5372 5372
5373 5373 By default, this command copies the contents of files as they
5374 5374 exist in the working directory. If invoked with -A/--after, the
5375 5375 operation is recorded, but no copying is performed.
5376 5376
5377 5377 This command takes effect at the next commit. To undo a rename
5378 5378 before that, see :hg:`revert`.
5379 5379
5380 5380 Returns 0 on success, 1 if errors are encountered.
5381 5381 """
5382 5382 with repo.wlock(False):
5383 5383 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5384 5384
5385 5385 @command('resolve',
5386 5386 [('a', 'all', None, _('select all unresolved files')),
5387 5387 ('l', 'list', None, _('list state of files needing merge')),
5388 5388 ('m', 'mark', None, _('mark files as resolved')),
5389 5389 ('u', 'unmark', None, _('mark files as unresolved')),
5390 5390 ('n', 'no-status', None, _('hide status prefix'))]
5391 5391 + mergetoolopts + walkopts + formatteropts,
5392 5392 _('[OPTION]... [FILE]...'),
5393 5393 inferrepo=True)
5394 5394 def resolve(ui, repo, *pats, **opts):
5395 5395 """redo merges or set/view the merge status of files
5396 5396
5397 5397 Merges with unresolved conflicts are often the result of
5398 5398 non-interactive merging using the ``internal:merge`` configuration
5399 5399 setting, or a command-line merge tool like ``diff3``. The resolve
5400 5400 command is used to manage the files involved in a merge, after
5401 5401 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5402 5402 working directory must have two parents). See :hg:`help
5403 5403 merge-tools` for information on configuring merge tools.
5404 5404
5405 5405 The resolve command can be used in the following ways:
5406 5406
5407 5407 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5408 5408 files, discarding any previous merge attempts. Re-merging is not
5409 5409 performed for files already marked as resolved. Use ``--all/-a``
5410 5410 to select all unresolved files. ``--tool`` can be used to specify
5411 5411 the merge tool used for the given files. It overrides the HGMERGE
5412 5412 environment variable and your configuration files. Previous file
5413 5413 contents are saved with a ``.orig`` suffix.
5414 5414
5415 5415 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5416 5416 (e.g. after having manually fixed-up the files). The default is
5417 5417 to mark all unresolved files.
5418 5418
5419 5419 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5420 5420 default is to mark all resolved files.
5421 5421
5422 5422 - :hg:`resolve -l`: list files which had or still have conflicts.
5423 5423 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5424 5424
5425 5425 .. note::
5426 5426
5427 5427 Mercurial will not let you commit files with unresolved merge
5428 5428 conflicts. You must use :hg:`resolve -m ...` before you can
5429 5429 commit after a conflicting merge.
5430 5430
5431 5431 Returns 0 on success, 1 if any files fail a resolve attempt.
5432 5432 """
5433 5433
5434 5434 flaglist = 'all mark unmark list no_status'.split()
5435 5435 all, mark, unmark, show, nostatus = \
5436 5436 [opts.get(o) for o in flaglist]
5437 5437
5438 5438 if (show and (mark or unmark)) or (mark and unmark):
5439 5439 raise error.Abort(_("too many options specified"))
5440 5440 if pats and all:
5441 5441 raise error.Abort(_("can't specify --all and patterns"))
5442 5442 if not (all or pats or show or mark or unmark):
5443 5443 raise error.Abort(_('no files or directories specified'),
5444 5444 hint=('use --all to re-merge all unresolved files'))
5445 5445
5446 5446 if show:
5447 5447 fm = ui.formatter('resolve', opts)
5448 5448 ms = mergemod.mergestate.read(repo)
5449 5449 m = scmutil.match(repo[None], pats, opts)
5450 5450 for f in ms:
5451 5451 if not m(f):
5452 5452 continue
5453 5453 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
5454 5454 'd': 'driverresolved'}[ms[f]]
5455 5455 fm.startitem()
5456 5456 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5457 5457 fm.write('path', '%s\n', f, label=l)
5458 5458 fm.end()
5459 5459 return 0
5460 5460
5461 5461 with repo.wlock():
5462 5462 ms = mergemod.mergestate.read(repo)
5463 5463
5464 5464 if not (ms.active() or repo.dirstate.p2() != nullid):
5465 5465 raise error.Abort(
5466 5466 _('resolve command not applicable when not merging'))
5467 5467
5468 5468 wctx = repo[None]
5469 5469
5470 5470 if ms.mergedriver and ms.mdstate() == 'u':
5471 5471 proceed = mergemod.driverpreprocess(repo, ms, wctx)
5472 5472 ms.commit()
5473 5473 # allow mark and unmark to go through
5474 5474 if not mark and not unmark and not proceed:
5475 5475 return 1
5476 5476
5477 5477 m = scmutil.match(wctx, pats, opts)
5478 5478 ret = 0
5479 5479 didwork = False
5480 5480 runconclude = False
5481 5481
5482 5482 tocomplete = []
5483 5483 for f in ms:
5484 5484 if not m(f):
5485 5485 continue
5486 5486
5487 5487 didwork = True
5488 5488
5489 5489 # don't let driver-resolved files be marked, and run the conclude
5490 5490 # step if asked to resolve
5491 5491 if ms[f] == "d":
5492 5492 exact = m.exact(f)
5493 5493 if mark:
5494 5494 if exact:
5495 5495 ui.warn(_('not marking %s as it is driver-resolved\n')
5496 5496 % f)
5497 5497 elif unmark:
5498 5498 if exact:
5499 5499 ui.warn(_('not unmarking %s as it is driver-resolved\n')
5500 5500 % f)
5501 5501 else:
5502 5502 runconclude = True
5503 5503 continue
5504 5504
5505 5505 if mark:
5506 5506 ms.mark(f, "r")
5507 5507 elif unmark:
5508 5508 ms.mark(f, "u")
5509 5509 else:
5510 5510 # backup pre-resolve (merge uses .orig for its own purposes)
5511 5511 a = repo.wjoin(f)
5512 5512 try:
5513 5513 util.copyfile(a, a + ".resolve")
5514 5514 except (IOError, OSError) as inst:
5515 5515 if inst.errno != errno.ENOENT:
5516 5516 raise
5517 5517
5518 5518 try:
5519 5519 # preresolve file
5520 5520 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5521 5521 'resolve')
5522 5522 complete, r = ms.preresolve(f, wctx)
5523 5523 if not complete:
5524 5524 tocomplete.append(f)
5525 5525 elif r:
5526 5526 ret = 1
5527 5527 finally:
5528 5528 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5529 5529 ms.commit()
5530 5530
5531 5531 # replace filemerge's .orig file with our resolve file, but only
5532 5532 # for merges that are complete
5533 5533 if complete:
5534 5534 try:
5535 5535 util.rename(a + ".resolve",
5536 5536 scmutil.origpath(ui, repo, a))
5537 5537 except OSError as inst:
5538 5538 if inst.errno != errno.ENOENT:
5539 5539 raise
5540 5540
5541 5541 for f in tocomplete:
5542 5542 try:
5543 5543 # resolve file
5544 5544 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5545 5545 'resolve')
5546 5546 r = ms.resolve(f, wctx)
5547 5547 if r:
5548 5548 ret = 1
5549 5549 finally:
5550 5550 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5551 5551 ms.commit()
5552 5552
5553 5553 # replace filemerge's .orig file with our resolve file
5554 5554 a = repo.wjoin(f)
5555 5555 try:
5556 5556 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
5557 5557 except OSError as inst:
5558 5558 if inst.errno != errno.ENOENT:
5559 5559 raise
5560 5560
5561 5561 ms.commit()
5562 5562 ms.recordactions()
5563 5563
5564 5564 if not didwork and pats:
5565 5565 hint = None
5566 5566 if not any([p for p in pats if p.find(':') >= 0]):
5567 5567 pats = ['path:%s' % p for p in pats]
5568 5568 m = scmutil.match(wctx, pats, opts)
5569 5569 for f in ms:
5570 5570 if not m(f):
5571 5571 continue
5572 5572 flags = ''.join(['-%s ' % o[0] for o in flaglist
5573 5573 if opts.get(o)])
5574 5574 hint = _("(try: hg resolve %s%s)\n") % (
5575 5575 flags,
5576 5576 ' '.join(pats))
5577 5577 break
5578 5578 ui.warn(_("arguments do not match paths that need resolving\n"))
5579 5579 if hint:
5580 5580 ui.warn(hint)
5581 5581 elif ms.mergedriver and ms.mdstate() != 's':
5582 5582 # run conclude step when either a driver-resolved file is requested
5583 5583 # or there are no driver-resolved files
5584 5584 # we can't use 'ret' to determine whether any files are unresolved
5585 5585 # because we might not have tried to resolve some
5586 5586 if ((runconclude or not list(ms.driverresolved()))
5587 5587 and not list(ms.unresolved())):
5588 5588 proceed = mergemod.driverconclude(repo, ms, wctx)
5589 5589 ms.commit()
5590 5590 if not proceed:
5591 5591 return 1
5592 5592
5593 5593 # Nudge users into finishing an unfinished operation
5594 5594 unresolvedf = list(ms.unresolved())
5595 5595 driverresolvedf = list(ms.driverresolved())
5596 5596 if not unresolvedf and not driverresolvedf:
5597 5597 ui.status(_('(no more unresolved files)\n'))
5598 5598 cmdutil.checkafterresolved(repo)
5599 5599 elif not unresolvedf:
5600 5600 ui.status(_('(no more unresolved files -- '
5601 5601 'run "hg resolve --all" to conclude)\n'))
5602 5602
5603 5603 return ret
5604 5604
5605 5605 @command('revert',
5606 5606 [('a', 'all', None, _('revert all changes when no arguments given')),
5607 5607 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5608 5608 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5609 5609 ('C', 'no-backup', None, _('do not save backup copies of files')),
5610 5610 ('i', 'interactive', None,
5611 5611 _('interactively select the changes (EXPERIMENTAL)')),
5612 5612 ] + walkopts + dryrunopts,
5613 5613 _('[OPTION]... [-r REV] [NAME]...'))
5614 5614 def revert(ui, repo, *pats, **opts):
5615 5615 """restore files to their checkout state
5616 5616
5617 5617 .. note::
5618 5618
5619 5619 To check out earlier revisions, you should use :hg:`update REV`.
5620 5620 To cancel an uncommitted merge (and lose your changes),
5621 5621 use :hg:`update --clean .`.
5622 5622
5623 5623 With no revision specified, revert the specified files or directories
5624 5624 to the contents they had in the parent of the working directory.
5625 5625 This restores the contents of files to an unmodified
5626 5626 state and unschedules adds, removes, copies, and renames. If the
5627 5627 working directory has two parents, you must explicitly specify a
5628 5628 revision.
5629 5629
5630 5630 Using the -r/--rev or -d/--date options, revert the given files or
5631 5631 directories to their states as of a specific revision. Because
5632 5632 revert does not change the working directory parents, this will
5633 5633 cause these files to appear modified. This can be helpful to "back
5634 5634 out" some or all of an earlier change. See :hg:`backout` for a
5635 5635 related method.
5636 5636
5637 5637 Modified files are saved with a .orig suffix before reverting.
5638 5638 To disable these backups, use --no-backup. It is possible to store
5639 5639 the backup files in a custom directory relative to the root of the
5640 5640 repository by setting the ``ui.origbackuppath`` configuration
5641 5641 option.
5642 5642
5643 5643 See :hg:`help dates` for a list of formats valid for -d/--date.
5644 5644
5645 5645 See :hg:`help backout` for a way to reverse the effect of an
5646 5646 earlier changeset.
5647 5647
5648 5648 Returns 0 on success.
5649 5649 """
5650 5650
5651 5651 if opts.get("date"):
5652 5652 if opts.get("rev"):
5653 5653 raise error.Abort(_("you can't specify a revision and a date"))
5654 5654 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5655 5655
5656 5656 parent, p2 = repo.dirstate.parents()
5657 5657 if not opts.get('rev') and p2 != nullid:
5658 5658 # revert after merge is a trap for new users (issue2915)
5659 5659 raise error.Abort(_('uncommitted merge with no revision specified'),
5660 5660 hint=_("use 'hg update' or see 'hg help revert'"))
5661 5661
5662 5662 ctx = scmutil.revsingle(repo, opts.get('rev'))
5663 5663
5664 5664 if (not (pats or opts.get('include') or opts.get('exclude') or
5665 5665 opts.get('all') or opts.get('interactive'))):
5666 5666 msg = _("no files or directories specified")
5667 5667 if p2 != nullid:
5668 5668 hint = _("uncommitted merge, use --all to discard all changes,"
5669 5669 " or 'hg update -C .' to abort the merge")
5670 5670 raise error.Abort(msg, hint=hint)
5671 5671 dirty = any(repo.status())
5672 5672 node = ctx.node()
5673 5673 if node != parent:
5674 5674 if dirty:
5675 5675 hint = _("uncommitted changes, use --all to discard all"
5676 5676 " changes, or 'hg update %s' to update") % ctx.rev()
5677 5677 else:
5678 5678 hint = _("use --all to revert all files,"
5679 5679 " or 'hg update %s' to update") % ctx.rev()
5680 5680 elif dirty:
5681 5681 hint = _("uncommitted changes, use --all to discard all changes")
5682 5682 else:
5683 5683 hint = _("use --all to revert all files")
5684 5684 raise error.Abort(msg, hint=hint)
5685 5685
5686 5686 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5687 5687
5688 5688 @command('rollback', dryrunopts +
5689 5689 [('f', 'force', False, _('ignore safety measures'))])
5690 5690 def rollback(ui, repo, **opts):
5691 5691 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5692 5692
5693 5693 Please use :hg:`commit --amend` instead of rollback to correct
5694 5694 mistakes in the last commit.
5695 5695
5696 5696 This command should be used with care. There is only one level of
5697 5697 rollback, and there is no way to undo a rollback. It will also
5698 5698 restore the dirstate at the time of the last transaction, losing
5699 5699 any dirstate changes since that time. This command does not alter
5700 5700 the working directory.
5701 5701
5702 5702 Transactions are used to encapsulate the effects of all commands
5703 5703 that create new changesets or propagate existing changesets into a
5704 5704 repository.
5705 5705
5706 5706 .. container:: verbose
5707 5707
5708 5708 For example, the following commands are transactional, and their
5709 5709 effects can be rolled back:
5710 5710
5711 5711 - commit
5712 5712 - import
5713 5713 - pull
5714 5714 - push (with this repository as the destination)
5715 5715 - unbundle
5716 5716
5717 5717 To avoid permanent data loss, rollback will refuse to rollback a
5718 5718 commit transaction if it isn't checked out. Use --force to
5719 5719 override this protection.
5720 5720
5721 5721 The rollback command can be entirely disabled by setting the
5722 5722 ``ui.rollback`` configuration setting to false. If you're here
5723 5723 because you want to use rollback and it's disabled, you can
5724 5724 re-enable the command by setting ``ui.rollback`` to true.
5725 5725
5726 5726 This command is not intended for use on public repositories. Once
5727 5727 changes are visible for pull by other users, rolling a transaction
5728 5728 back locally is ineffective (someone else may already have pulled
5729 5729 the changes). Furthermore, a race is possible with readers of the
5730 5730 repository; for example an in-progress pull from the repository
5731 5731 may fail if a rollback is performed.
5732 5732
5733 5733 Returns 0 on success, 1 if no rollback data is available.
5734 5734 """
5735 5735 if not ui.configbool('ui', 'rollback', True):
5736 5736 raise error.Abort(_('rollback is disabled because it is unsafe'),
5737 5737 hint=('see `hg help -v rollback` for information'))
5738 5738 return repo.rollback(dryrun=opts.get('dry_run'),
5739 5739 force=opts.get('force'))
5740 5740
5741 5741 @command('root', [])
5742 5742 def root(ui, repo):
5743 5743 """print the root (top) of the current working directory
5744 5744
5745 5745 Print the root directory of the current repository.
5746 5746
5747 5747 Returns 0 on success.
5748 5748 """
5749 5749 ui.write(repo.root + "\n")
5750 5750
5751 5751 @command('^serve',
5752 5752 [('A', 'accesslog', '', _('name of access log file to write to'),
5753 5753 _('FILE')),
5754 5754 ('d', 'daemon', None, _('run server in background')),
5755 5755 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
5756 5756 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5757 5757 # use string type, then we can check if something was passed
5758 5758 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5759 5759 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5760 5760 _('ADDR')),
5761 5761 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5762 5762 _('PREFIX')),
5763 5763 ('n', 'name', '',
5764 5764 _('name to show in web pages (default: working directory)'), _('NAME')),
5765 5765 ('', 'web-conf', '',
5766 5766 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
5767 5767 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5768 5768 _('FILE')),
5769 5769 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5770 5770 ('', 'stdio', None, _('for remote clients')),
5771 5771 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5772 5772 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5773 5773 ('', 'style', '', _('template style to use'), _('STYLE')),
5774 5774 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5775 5775 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5776 5776 _('[OPTION]...'),
5777 5777 optionalrepo=True)
5778 5778 def serve(ui, repo, **opts):
5779 5779 """start stand-alone webserver
5780 5780
5781 5781 Start a local HTTP repository browser and pull server. You can use
5782 5782 this for ad-hoc sharing and browsing of repositories. It is
5783 5783 recommended to use a real web server to serve a repository for
5784 5784 longer periods of time.
5785 5785
5786 5786 Please note that the server does not implement access control.
5787 5787 This means that, by default, anybody can read from the server and
5788 5788 nobody can write to it by default. Set the ``web.allow_push``
5789 5789 option to ``*`` to allow everybody to push to the server. You
5790 5790 should use a real web server if you need to authenticate users.
5791 5791
5792 5792 By default, the server logs accesses to stdout and errors to
5793 5793 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5794 5794 files.
5795 5795
5796 5796 To have the server choose a free port number to listen on, specify
5797 5797 a port number of 0; in this case, the server will print the port
5798 5798 number it uses.
5799 5799
5800 5800 Returns 0 on success.
5801 5801 """
5802 5802
5803 5803 if opts["stdio"] and opts["cmdserver"]:
5804 5804 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5805 5805
5806 5806 if opts["stdio"]:
5807 5807 if repo is None:
5808 5808 raise error.RepoError(_("there is no Mercurial repository here"
5809 5809 " (.hg not found)"))
5810 5810 s = sshserver.sshserver(ui, repo)
5811 5811 s.serve_forever()
5812 5812
5813 5813 service = server.createservice(ui, repo, opts)
5814 5814 return server.runservice(opts, initfn=service.init, runfn=service.run)
5815 5815
5816 5816 @command('^status|st',
5817 5817 [('A', 'all', None, _('show status of all files')),
5818 5818 ('m', 'modified', None, _('show only modified files')),
5819 5819 ('a', 'added', None, _('show only added files')),
5820 5820 ('r', 'removed', None, _('show only removed files')),
5821 5821 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5822 5822 ('c', 'clean', None, _('show only files without changes')),
5823 5823 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5824 5824 ('i', 'ignored', None, _('show only ignored files')),
5825 5825 ('n', 'no-status', None, _('hide status prefix')),
5826 5826 ('C', 'copies', None, _('show source of copied files')),
5827 5827 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5828 5828 ('', 'rev', [], _('show difference from revision'), _('REV')),
5829 5829 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5830 5830 ] + walkopts + subrepoopts + formatteropts,
5831 5831 _('[OPTION]... [FILE]...'),
5832 5832 inferrepo=True)
5833 5833 def status(ui, repo, *pats, **opts):
5834 5834 """show changed files in the working directory
5835 5835
5836 5836 Show status of files in the repository. If names are given, only
5837 5837 files that match are shown. Files that are clean or ignored or
5838 5838 the source of a copy/move operation, are not listed unless
5839 5839 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5840 5840 Unless options described with "show only ..." are given, the
5841 5841 options -mardu are used.
5842 5842
5843 5843 Option -q/--quiet hides untracked (unknown and ignored) files
5844 5844 unless explicitly requested with -u/--unknown or -i/--ignored.
5845 5845
5846 5846 .. note::
5847 5847
5848 5848 :hg:`status` may appear to disagree with diff if permissions have
5849 5849 changed or a merge has occurred. The standard diff format does
5850 5850 not report permission changes and diff only reports changes
5851 5851 relative to one merge parent.
5852 5852
5853 5853 If one revision is given, it is used as the base revision.
5854 5854 If two revisions are given, the differences between them are
5855 5855 shown. The --change option can also be used as a shortcut to list
5856 5856 the changed files of a revision from its first parent.
5857 5857
5858 5858 The codes used to show the status of files are::
5859 5859
5860 5860 M = modified
5861 5861 A = added
5862 5862 R = removed
5863 5863 C = clean
5864 5864 ! = missing (deleted by non-hg command, but still tracked)
5865 5865 ? = not tracked
5866 5866 I = ignored
5867 5867 = origin of the previous file (with --copies)
5868 5868
5869 5869 .. container:: verbose
5870 5870
5871 5871 Examples:
5872 5872
5873 5873 - show changes in the working directory relative to a
5874 5874 changeset::
5875 5875
5876 5876 hg status --rev 9353
5877 5877
5878 5878 - show changes in the working directory relative to the
5879 5879 current directory (see :hg:`help patterns` for more information)::
5880 5880
5881 5881 hg status re:
5882 5882
5883 5883 - show all changes including copies in an existing changeset::
5884 5884
5885 5885 hg status --copies --change 9353
5886 5886
5887 5887 - get a NUL separated list of added files, suitable for xargs::
5888 5888
5889 5889 hg status -an0
5890 5890
5891 5891 Returns 0 on success.
5892 5892 """
5893 5893
5894 5894 revs = opts.get('rev')
5895 5895 change = opts.get('change')
5896 5896
5897 5897 if revs and change:
5898 5898 msg = _('cannot specify --rev and --change at the same time')
5899 5899 raise error.Abort(msg)
5900 5900 elif change:
5901 5901 node2 = scmutil.revsingle(repo, change, None).node()
5902 5902 node1 = repo[node2].p1().node()
5903 5903 else:
5904 5904 node1, node2 = scmutil.revpair(repo, revs)
5905 5905
5906 5906 if pats:
5907 5907 cwd = repo.getcwd()
5908 5908 else:
5909 5909 cwd = ''
5910 5910
5911 5911 if opts.get('print0'):
5912 5912 end = '\0'
5913 5913 else:
5914 5914 end = '\n'
5915 5915 copy = {}
5916 5916 states = 'modified added removed deleted unknown ignored clean'.split()
5917 5917 show = [k for k in states if opts.get(k)]
5918 5918 if opts.get('all'):
5919 5919 show += ui.quiet and (states[:4] + ['clean']) or states
5920 5920 if not show:
5921 5921 if ui.quiet:
5922 5922 show = states[:4]
5923 5923 else:
5924 5924 show = states[:5]
5925 5925
5926 5926 m = scmutil.match(repo[node2], pats, opts)
5927 5927 stat = repo.status(node1, node2, m,
5928 5928 'ignored' in show, 'clean' in show, 'unknown' in show,
5929 5929 opts.get('subrepos'))
5930 5930 changestates = zip(states, 'MAR!?IC', stat)
5931 5931
5932 5932 if (opts.get('all') or opts.get('copies')
5933 5933 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5934 5934 copy = copies.pathcopies(repo[node1], repo[node2], m)
5935 5935
5936 5936 fm = ui.formatter('status', opts)
5937 5937 fmt = '%s' + end
5938 5938 showchar = not opts.get('no_status')
5939 5939
5940 5940 for state, char, files in changestates:
5941 5941 if state in show:
5942 5942 label = 'status.' + state
5943 5943 for f in files:
5944 5944 fm.startitem()
5945 5945 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5946 5946 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5947 5947 if f in copy:
5948 5948 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5949 5949 label='status.copied')
5950 5950 fm.end()
5951 5951
5952 5952 @command('^summary|sum',
5953 5953 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5954 5954 def summary(ui, repo, **opts):
5955 5955 """summarize working directory state
5956 5956
5957 5957 This generates a brief summary of the working directory state,
5958 5958 including parents, branch, commit status, phase and available updates.
5959 5959
5960 5960 With the --remote option, this will check the default paths for
5961 5961 incoming and outgoing changes. This can be time-consuming.
5962 5962
5963 5963 Returns 0 on success.
5964 5964 """
5965 5965
5966 5966 ctx = repo[None]
5967 5967 parents = ctx.parents()
5968 5968 pnode = parents[0].node()
5969 5969 marks = []
5970 5970
5971 5971 ms = None
5972 5972 try:
5973 5973 ms = mergemod.mergestate.read(repo)
5974 5974 except error.UnsupportedMergeRecords as e:
5975 5975 s = ' '.join(e.recordtypes)
5976 5976 ui.warn(
5977 5977 _('warning: merge state has unsupported record types: %s\n') % s)
5978 5978 unresolved = 0
5979 5979 else:
5980 5980 unresolved = [f for f in ms if ms[f] == 'u']
5981 5981
5982 5982 for p in parents:
5983 5983 # label with log.changeset (instead of log.parent) since this
5984 5984 # shows a working directory parent *changeset*:
5985 5985 # i18n: column positioning for "hg summary"
5986 5986 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5987 5987 label='log.changeset changeset.%s' % p.phasestr())
5988 5988 ui.write(' '.join(p.tags()), label='log.tag')
5989 5989 if p.bookmarks():
5990 5990 marks.extend(p.bookmarks())
5991 5991 if p.rev() == -1:
5992 5992 if not len(repo):
5993 5993 ui.write(_(' (empty repository)'))
5994 5994 else:
5995 5995 ui.write(_(' (no revision checked out)'))
5996 5996 ui.write('\n')
5997 5997 if p.description():
5998 5998 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5999 5999 label='log.summary')
6000 6000
6001 6001 branch = ctx.branch()
6002 6002 bheads = repo.branchheads(branch)
6003 6003 # i18n: column positioning for "hg summary"
6004 6004 m = _('branch: %s\n') % branch
6005 6005 if branch != 'default':
6006 6006 ui.write(m, label='log.branch')
6007 6007 else:
6008 6008 ui.status(m, label='log.branch')
6009 6009
6010 6010 if marks:
6011 6011 active = repo._activebookmark
6012 6012 # i18n: column positioning for "hg summary"
6013 6013 ui.write(_('bookmarks:'), label='log.bookmark')
6014 6014 if active is not None:
6015 6015 if active in marks:
6016 6016 ui.write(' *' + active, label=activebookmarklabel)
6017 6017 marks.remove(active)
6018 6018 else:
6019 6019 ui.write(' [%s]' % active, label=activebookmarklabel)
6020 6020 for m in marks:
6021 6021 ui.write(' ' + m, label='log.bookmark')
6022 6022 ui.write('\n', label='log.bookmark')
6023 6023
6024 6024 status = repo.status(unknown=True)
6025 6025
6026 6026 c = repo.dirstate.copies()
6027 6027 copied, renamed = [], []
6028 6028 for d, s in c.iteritems():
6029 6029 if s in status.removed:
6030 6030 status.removed.remove(s)
6031 6031 renamed.append(d)
6032 6032 else:
6033 6033 copied.append(d)
6034 6034 if d in status.added:
6035 6035 status.added.remove(d)
6036 6036
6037 6037 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6038 6038
6039 6039 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6040 6040 (ui.label(_('%d added'), 'status.added'), status.added),
6041 6041 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6042 6042 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6043 6043 (ui.label(_('%d copied'), 'status.copied'), copied),
6044 6044 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6045 6045 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6046 6046 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6047 6047 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6048 6048 t = []
6049 6049 for l, s in labels:
6050 6050 if s:
6051 6051 t.append(l % len(s))
6052 6052
6053 6053 t = ', '.join(t)
6054 6054 cleanworkdir = False
6055 6055
6056 6056 if repo.vfs.exists('graftstate'):
6057 6057 t += _(' (graft in progress)')
6058 6058 if repo.vfs.exists('updatestate'):
6059 6059 t += _(' (interrupted update)')
6060 6060 elif len(parents) > 1:
6061 6061 t += _(' (merge)')
6062 6062 elif branch != parents[0].branch():
6063 6063 t += _(' (new branch)')
6064 6064 elif (parents[0].closesbranch() and
6065 6065 pnode in repo.branchheads(branch, closed=True)):
6066 6066 t += _(' (head closed)')
6067 6067 elif not (status.modified or status.added or status.removed or renamed or
6068 6068 copied or subs):
6069 6069 t += _(' (clean)')
6070 6070 cleanworkdir = True
6071 6071 elif pnode not in bheads:
6072 6072 t += _(' (new branch head)')
6073 6073
6074 6074 if parents:
6075 6075 pendingphase = max(p.phase() for p in parents)
6076 6076 else:
6077 6077 pendingphase = phases.public
6078 6078
6079 6079 if pendingphase > phases.newcommitphase(ui):
6080 6080 t += ' (%s)' % phases.phasenames[pendingphase]
6081 6081
6082 6082 if cleanworkdir:
6083 6083 # i18n: column positioning for "hg summary"
6084 6084 ui.status(_('commit: %s\n') % t.strip())
6085 6085 else:
6086 6086 # i18n: column positioning for "hg summary"
6087 6087 ui.write(_('commit: %s\n') % t.strip())
6088 6088
6089 6089 # all ancestors of branch heads - all ancestors of parent = new csets
6090 6090 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6091 6091 bheads))
6092 6092
6093 6093 if new == 0:
6094 6094 # i18n: column positioning for "hg summary"
6095 6095 ui.status(_('update: (current)\n'))
6096 6096 elif pnode not in bheads:
6097 6097 # i18n: column positioning for "hg summary"
6098 6098 ui.write(_('update: %d new changesets (update)\n') % new)
6099 6099 else:
6100 6100 # i18n: column positioning for "hg summary"
6101 6101 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6102 6102 (new, len(bheads)))
6103 6103
6104 6104 t = []
6105 6105 draft = len(repo.revs('draft()'))
6106 6106 if draft:
6107 6107 t.append(_('%d draft') % draft)
6108 6108 secret = len(repo.revs('secret()'))
6109 6109 if secret:
6110 6110 t.append(_('%d secret') % secret)
6111 6111
6112 6112 if draft or secret:
6113 6113 ui.status(_('phases: %s\n') % ', '.join(t))
6114 6114
6115 6115 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6116 6116 for trouble in ("unstable", "divergent", "bumped"):
6117 6117 numtrouble = len(repo.revs(trouble + "()"))
6118 6118 # We write all the possibilities to ease translation
6119 6119 troublemsg = {
6120 6120 "unstable": _("unstable: %d changesets"),
6121 6121 "divergent": _("divergent: %d changesets"),
6122 6122 "bumped": _("bumped: %d changesets"),
6123 6123 }
6124 6124 if numtrouble > 0:
6125 6125 ui.status(troublemsg[trouble] % numtrouble + "\n")
6126 6126
6127 6127 cmdutil.summaryhooks(ui, repo)
6128 6128
6129 6129 if opts.get('remote'):
6130 6130 needsincoming, needsoutgoing = True, True
6131 6131 else:
6132 6132 needsincoming, needsoutgoing = False, False
6133 6133 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6134 6134 if i:
6135 6135 needsincoming = True
6136 6136 if o:
6137 6137 needsoutgoing = True
6138 6138 if not needsincoming and not needsoutgoing:
6139 6139 return
6140 6140
6141 6141 def getincoming():
6142 6142 source, branches = hg.parseurl(ui.expandpath('default'))
6143 6143 sbranch = branches[0]
6144 6144 try:
6145 6145 other = hg.peer(repo, {}, source)
6146 6146 except error.RepoError:
6147 6147 if opts.get('remote'):
6148 6148 raise
6149 6149 return source, sbranch, None, None, None
6150 6150 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6151 6151 if revs:
6152 6152 revs = [other.lookup(rev) for rev in revs]
6153 6153 ui.debug('comparing with %s\n' % util.hidepassword(source))
6154 6154 repo.ui.pushbuffer()
6155 6155 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6156 6156 repo.ui.popbuffer()
6157 6157 return source, sbranch, other, commoninc, commoninc[1]
6158 6158
6159 6159 if needsincoming:
6160 6160 source, sbranch, sother, commoninc, incoming = getincoming()
6161 6161 else:
6162 6162 source = sbranch = sother = commoninc = incoming = None
6163 6163
6164 6164 def getoutgoing():
6165 6165 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6166 6166 dbranch = branches[0]
6167 6167 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6168 6168 if source != dest:
6169 6169 try:
6170 6170 dother = hg.peer(repo, {}, dest)
6171 6171 except error.RepoError:
6172 6172 if opts.get('remote'):
6173 6173 raise
6174 6174 return dest, dbranch, None, None
6175 6175 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6176 6176 elif sother is None:
6177 6177 # there is no explicit destination peer, but source one is invalid
6178 6178 return dest, dbranch, None, None
6179 6179 else:
6180 6180 dother = sother
6181 6181 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6182 6182 common = None
6183 6183 else:
6184 6184 common = commoninc
6185 6185 if revs:
6186 6186 revs = [repo.lookup(rev) for rev in revs]
6187 6187 repo.ui.pushbuffer()
6188 6188 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6189 6189 commoninc=common)
6190 6190 repo.ui.popbuffer()
6191 6191 return dest, dbranch, dother, outgoing
6192 6192
6193 6193 if needsoutgoing:
6194 6194 dest, dbranch, dother, outgoing = getoutgoing()
6195 6195 else:
6196 6196 dest = dbranch = dother = outgoing = None
6197 6197
6198 6198 if opts.get('remote'):
6199 6199 t = []
6200 6200 if incoming:
6201 6201 t.append(_('1 or more incoming'))
6202 6202 o = outgoing.missing
6203 6203 if o:
6204 6204 t.append(_('%d outgoing') % len(o))
6205 6205 other = dother or sother
6206 6206 if 'bookmarks' in other.listkeys('namespaces'):
6207 6207 counts = bookmarks.summary(repo, other)
6208 6208 if counts[0] > 0:
6209 6209 t.append(_('%d incoming bookmarks') % counts[0])
6210 6210 if counts[1] > 0:
6211 6211 t.append(_('%d outgoing bookmarks') % counts[1])
6212 6212
6213 6213 if t:
6214 6214 # i18n: column positioning for "hg summary"
6215 6215 ui.write(_('remote: %s\n') % (', '.join(t)))
6216 6216 else:
6217 6217 # i18n: column positioning for "hg summary"
6218 6218 ui.status(_('remote: (synced)\n'))
6219 6219
6220 6220 cmdutil.summaryremotehooks(ui, repo, opts,
6221 6221 ((source, sbranch, sother, commoninc),
6222 6222 (dest, dbranch, dother, outgoing)))
6223 6223
6224 6224 @command('tag',
6225 6225 [('f', 'force', None, _('force tag')),
6226 6226 ('l', 'local', None, _('make the tag local')),
6227 6227 ('r', 'rev', '', _('revision to tag'), _('REV')),
6228 6228 ('', 'remove', None, _('remove a tag')),
6229 6229 # -l/--local is already there, commitopts cannot be used
6230 6230 ('e', 'edit', None, _('invoke editor on commit messages')),
6231 6231 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6232 6232 ] + commitopts2,
6233 6233 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6234 6234 def tag(ui, repo, name1, *names, **opts):
6235 6235 """add one or more tags for the current or given revision
6236 6236
6237 6237 Name a particular revision using <name>.
6238 6238
6239 6239 Tags are used to name particular revisions of the repository and are
6240 6240 very useful to compare different revisions, to go back to significant
6241 6241 earlier versions or to mark branch points as releases, etc. Changing
6242 6242 an existing tag is normally disallowed; use -f/--force to override.
6243 6243
6244 6244 If no revision is given, the parent of the working directory is
6245 6245 used.
6246 6246
6247 6247 To facilitate version control, distribution, and merging of tags,
6248 6248 they are stored as a file named ".hgtags" which is managed similarly
6249 6249 to other project files and can be hand-edited if necessary. This
6250 6250 also means that tagging creates a new commit. The file
6251 6251 ".hg/localtags" is used for local tags (not shared among
6252 6252 repositories).
6253 6253
6254 6254 Tag commits are usually made at the head of a branch. If the parent
6255 6255 of the working directory is not a branch head, :hg:`tag` aborts; use
6256 6256 -f/--force to force the tag commit to be based on a non-head
6257 6257 changeset.
6258 6258
6259 6259 See :hg:`help dates` for a list of formats valid for -d/--date.
6260 6260
6261 6261 Since tag names have priority over branch names during revision
6262 6262 lookup, using an existing branch name as a tag name is discouraged.
6263 6263
6264 6264 Returns 0 on success.
6265 6265 """
6266 6266 wlock = lock = None
6267 6267 try:
6268 6268 wlock = repo.wlock()
6269 6269 lock = repo.lock()
6270 6270 rev_ = "."
6271 6271 names = [t.strip() for t in (name1,) + names]
6272 6272 if len(names) != len(set(names)):
6273 6273 raise error.Abort(_('tag names must be unique'))
6274 6274 for n in names:
6275 6275 scmutil.checknewlabel(repo, n, 'tag')
6276 6276 if not n:
6277 6277 raise error.Abort(_('tag names cannot consist entirely of '
6278 6278 'whitespace'))
6279 6279 if opts.get('rev') and opts.get('remove'):
6280 6280 raise error.Abort(_("--rev and --remove are incompatible"))
6281 6281 if opts.get('rev'):
6282 6282 rev_ = opts['rev']
6283 6283 message = opts.get('message')
6284 6284 if opts.get('remove'):
6285 6285 if opts.get('local'):
6286 6286 expectedtype = 'local'
6287 6287 else:
6288 6288 expectedtype = 'global'
6289 6289
6290 6290 for n in names:
6291 6291 if not repo.tagtype(n):
6292 6292 raise error.Abort(_("tag '%s' does not exist") % n)
6293 6293 if repo.tagtype(n) != expectedtype:
6294 6294 if expectedtype == 'global':
6295 6295 raise error.Abort(_("tag '%s' is not a global tag") % n)
6296 6296 else:
6297 6297 raise error.Abort(_("tag '%s' is not a local tag") % n)
6298 6298 rev_ = 'null'
6299 6299 if not message:
6300 6300 # we don't translate commit messages
6301 6301 message = 'Removed tag %s' % ', '.join(names)
6302 6302 elif not opts.get('force'):
6303 6303 for n in names:
6304 6304 if n in repo.tags():
6305 6305 raise error.Abort(_("tag '%s' already exists "
6306 6306 "(use -f to force)") % n)
6307 6307 if not opts.get('local'):
6308 6308 p1, p2 = repo.dirstate.parents()
6309 6309 if p2 != nullid:
6310 6310 raise error.Abort(_('uncommitted merge'))
6311 6311 bheads = repo.branchheads()
6312 6312 if not opts.get('force') and bheads and p1 not in bheads:
6313 6313 raise error.Abort(_('working directory is not at a branch head '
6314 6314 '(use -f to force)'))
6315 6315 r = scmutil.revsingle(repo, rev_).node()
6316 6316
6317 6317 if not message:
6318 6318 # we don't translate commit messages
6319 6319 message = ('Added tag %s for changeset %s' %
6320 6320 (', '.join(names), short(r)))
6321 6321
6322 6322 date = opts.get('date')
6323 6323 if date:
6324 6324 date = util.parsedate(date)
6325 6325
6326 6326 if opts.get('remove'):
6327 6327 editform = 'tag.remove'
6328 6328 else:
6329 6329 editform = 'tag.add'
6330 6330 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6331 6331
6332 6332 # don't allow tagging the null rev
6333 6333 if (not opts.get('remove') and
6334 6334 scmutil.revsingle(repo, rev_).rev() == nullrev):
6335 6335 raise error.Abort(_("cannot tag null revision"))
6336 6336
6337 6337 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6338 6338 editor=editor)
6339 6339 finally:
6340 6340 release(lock, wlock)
6341 6341
6342 6342 @command('tags', formatteropts, '')
6343 6343 def tags(ui, repo, **opts):
6344 6344 """list repository tags
6345 6345
6346 6346 This lists both regular and local tags. When the -v/--verbose
6347 6347 switch is used, a third column "local" is printed for local tags.
6348 6348 When the -q/--quiet switch is used, only the tag name is printed.
6349 6349
6350 6350 Returns 0 on success.
6351 6351 """
6352 6352
6353 6353 fm = ui.formatter('tags', opts)
6354 6354 hexfunc = fm.hexfunc
6355 6355 tagtype = ""
6356 6356
6357 6357 for t, n in reversed(repo.tagslist()):
6358 6358 hn = hexfunc(n)
6359 6359 label = 'tags.normal'
6360 6360 tagtype = ''
6361 6361 if repo.tagtype(t) == 'local':
6362 6362 label = 'tags.local'
6363 6363 tagtype = 'local'
6364 6364
6365 6365 fm.startitem()
6366 6366 fm.write('tag', '%s', t, label=label)
6367 6367 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6368 6368 fm.condwrite(not ui.quiet, 'rev node', fmt,
6369 6369 repo.changelog.rev(n), hn, label=label)
6370 6370 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6371 6371 tagtype, label=label)
6372 6372 fm.plain('\n')
6373 6373 fm.end()
6374 6374
6375 6375 @command('tip',
6376 6376 [('p', 'patch', None, _('show patch')),
6377 6377 ('g', 'git', None, _('use git extended diff format')),
6378 6378 ] + templateopts,
6379 6379 _('[-p] [-g]'))
6380 6380 def tip(ui, repo, **opts):
6381 6381 """show the tip revision (DEPRECATED)
6382 6382
6383 6383 The tip revision (usually just called the tip) is the changeset
6384 6384 most recently added to the repository (and therefore the most
6385 6385 recently changed head).
6386 6386
6387 6387 If you have just made a commit, that commit will be the tip. If
6388 6388 you have just pulled changes from another repository, the tip of
6389 6389 that repository becomes the current tip. The "tip" tag is special
6390 6390 and cannot be renamed or assigned to a different changeset.
6391 6391
6392 6392 This command is deprecated, please use :hg:`heads` instead.
6393 6393
6394 6394 Returns 0 on success.
6395 6395 """
6396 6396 displayer = cmdutil.show_changeset(ui, repo, opts)
6397 6397 displayer.show(repo['tip'])
6398 6398 displayer.close()
6399 6399
6400 6400 @command('unbundle',
6401 6401 [('u', 'update', None,
6402 6402 _('update to new branch head if changesets were unbundled'))],
6403 6403 _('[-u] FILE...'))
6404 6404 def unbundle(ui, repo, fname1, *fnames, **opts):
6405 6405 """apply one or more changegroup files
6406 6406
6407 6407 Apply one or more compressed changegroup files generated by the
6408 6408 bundle command.
6409 6409
6410 6410 Returns 0 on success, 1 if an update has unresolved files.
6411 6411 """
6412 6412 fnames = (fname1,) + fnames
6413 6413
6414 6414 with repo.lock():
6415 6415 for fname in fnames:
6416 6416 f = hg.openpath(ui, fname)
6417 6417 gen = exchange.readbundle(ui, f, fname)
6418 6418 if isinstance(gen, bundle2.unbundle20):
6419 6419 tr = repo.transaction('unbundle')
6420 6420 try:
6421 6421 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
6422 6422 url='bundle:' + fname)
6423 6423 tr.close()
6424 6424 except error.BundleUnknownFeatureError as exc:
6425 6425 raise error.Abort(_('%s: unknown bundle feature, %s')
6426 6426 % (fname, exc),
6427 6427 hint=_("see https://mercurial-scm.org/"
6428 6428 "wiki/BundleFeature for more "
6429 6429 "information"))
6430 6430 finally:
6431 6431 if tr:
6432 6432 tr.release()
6433 6433 changes = [r.get('return', 0)
6434 6434 for r in op.records['changegroup']]
6435 6435 modheads = changegroup.combineresults(changes)
6436 6436 elif isinstance(gen, streamclone.streamcloneapplier):
6437 6437 raise error.Abort(
6438 6438 _('packed bundles cannot be applied with '
6439 6439 '"hg unbundle"'),
6440 6440 hint=_('use "hg debugapplystreamclonebundle"'))
6441 6441 else:
6442 6442 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
6443 6443
6444 6444 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
6445 6445
6446 6446 @command('^update|up|checkout|co',
6447 6447 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6448 6448 ('c', 'check', None, _('require clean working directory')),
6449 6449 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6450 6450 ('r', 'rev', '', _('revision'), _('REV'))
6451 6451 ] + mergetoolopts,
6452 6452 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6453 6453 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6454 6454 tool=None):
6455 6455 """update working directory (or switch revisions)
6456 6456
6457 6457 Update the repository's working directory to the specified
6458 6458 changeset. If no changeset is specified, update to the tip of the
6459 6459 current named branch and move the active bookmark (see :hg:`help
6460 6460 bookmarks`).
6461 6461
6462 6462 Update sets the working directory's parent revision to the specified
6463 6463 changeset (see :hg:`help parents`).
6464 6464
6465 6465 If the changeset is not a descendant or ancestor of the working
6466 6466 directory's parent, the update is aborted. With the -c/--check
6467 6467 option, the working directory is checked for uncommitted changes; if
6468 6468 none are found, the working directory is updated to the specified
6469 6469 changeset.
6470 6470
6471 6471 .. container:: verbose
6472 6472
6473 6473 The following rules apply when the working directory contains
6474 6474 uncommitted changes:
6475 6475
6476 6476 1. If neither -c/--check nor -C/--clean is specified, and if
6477 6477 the requested changeset is an ancestor or descendant of
6478 6478 the working directory's parent, the uncommitted changes
6479 6479 are merged into the requested changeset and the merged
6480 6480 result is left uncommitted. If the requested changeset is
6481 6481 not an ancestor or descendant (that is, it is on another
6482 6482 branch), the update is aborted and the uncommitted changes
6483 6483 are preserved.
6484 6484
6485 6485 2. With the -c/--check option, the update is aborted and the
6486 6486 uncommitted changes are preserved.
6487 6487
6488 6488 3. With the -C/--clean option, uncommitted changes are discarded and
6489 6489 the working directory is updated to the requested changeset.
6490 6490
6491 6491 To cancel an uncommitted merge (and lose your changes), use
6492 6492 :hg:`update --clean .`.
6493 6493
6494 6494 Use null as the changeset to remove the working directory (like
6495 6495 :hg:`clone -U`).
6496 6496
6497 6497 If you want to revert just one file to an older revision, use
6498 6498 :hg:`revert [-r REV] NAME`.
6499 6499
6500 6500 See :hg:`help dates` for a list of formats valid for -d/--date.
6501 6501
6502 6502 Returns 0 on success, 1 if there are unresolved files.
6503 6503 """
6504 6504 if rev and node:
6505 6505 raise error.Abort(_("please specify just one revision"))
6506 6506
6507 6507 if rev is None or rev == '':
6508 6508 rev = node
6509 6509
6510 6510 if date and rev is not None:
6511 6511 raise error.Abort(_("you can't specify a revision and a date"))
6512 6512
6513 6513 if check and clean:
6514 6514 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
6515 6515
6516 6516 with repo.wlock():
6517 6517 cmdutil.clearunfinished(repo)
6518 6518
6519 6519 if date:
6520 6520 rev = cmdutil.finddate(ui, repo, date)
6521 6521
6522 6522 # if we defined a bookmark, we have to remember the original name
6523 6523 brev = rev
6524 6524 rev = scmutil.revsingle(repo, rev, rev).rev()
6525 6525
6526 6526 if check:
6527 6527 cmdutil.bailifchanged(repo, merge=False)
6528 6528
6529 6529 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6530 6530
6531 6531 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
6532 6532
6533 6533 @command('verify', [])
6534 6534 def verify(ui, repo):
6535 6535 """verify the integrity of the repository
6536 6536
6537 6537 Verify the integrity of the current repository.
6538 6538
6539 6539 This will perform an extensive check of the repository's
6540 6540 integrity, validating the hashes and checksums of each entry in
6541 6541 the changelog, manifest, and tracked files, as well as the
6542 6542 integrity of their crosslinks and indices.
6543 6543
6544 6544 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
6545 6545 for more information about recovery from corruption of the
6546 6546 repository.
6547 6547
6548 6548 Returns 0 on success, 1 if errors are encountered.
6549 6549 """
6550 6550 return hg.verify(repo)
6551 6551
6552 6552 @command('version', [] + formatteropts, norepo=True)
6553 6553 def version_(ui, **opts):
6554 6554 """output version and copyright information"""
6555 6555 fm = ui.formatter("version", opts)
6556 6556 fm.startitem()
6557 6557 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
6558 6558 util.version())
6559 6559 license = _(
6560 6560 "(see https://mercurial-scm.org for more information)\n"
6561 6561 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
6562 6562 "This is free software; see the source for copying conditions. "
6563 6563 "There is NO\nwarranty; "
6564 6564 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6565 6565 )
6566 6566 if not ui.quiet:
6567 6567 fm.plain(license)
6568 6568
6569 6569 if ui.verbose:
6570 6570 fm.plain(_("\nEnabled extensions:\n\n"))
6571 6571 # format names and versions into columns
6572 6572 names = []
6573 6573 vers = []
6574 6574 isinternals = []
6575 6575 for name, module in extensions.extensions():
6576 6576 names.append(name)
6577 6577 vers.append(extensions.moduleversion(module) or None)
6578 6578 isinternals.append(extensions.ismoduleinternal(module))
6579 6579 fn = fm.nested("extensions")
6580 6580 if names:
6581 6581 namefmt = " %%-%ds " % max(len(n) for n in names)
6582 6582 places = [_("external"), _("internal")]
6583 6583 for n, v, p in zip(names, vers, isinternals):
6584 6584 fn.startitem()
6585 6585 fn.condwrite(ui.verbose, "name", namefmt, n)
6586 6586 if ui.verbose:
6587 6587 fn.plain("%s " % places[p])
6588 6588 fn.data(bundled=p)
6589 6589 fn.condwrite(ui.verbose and v, "ver", "%s", v)
6590 6590 if ui.verbose:
6591 6591 fn.plain("\n")
6592 6592 fn.end()
6593 6593 fm.end()
6594 6594
6595 6595 def loadcmdtable(ui, name, cmdtable):
6596 6596 """Load command functions from specified cmdtable
6597 6597 """
6598 6598 overrides = [cmd for cmd in cmdtable if cmd in table]
6599 6599 if overrides:
6600 6600 ui.warn(_("extension '%s' overrides commands: %s\n")
6601 6601 % (name, " ".join(overrides)))
6602 6602 table.update(cmdtable)
@@ -1,3245 +1,3246
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import bz2
19 19 import calendar
20 20 import collections
21 21 import datetime
22 22 import errno
23 23 import gc
24 24 import hashlib
25 25 import imp
26 26 import os
27 27 import platform as pyplatform
28 28 import re as remod
29 29 import shutil
30 30 import signal
31 31 import socket
32 32 import stat
33 33 import string
34 34 import subprocess
35 35 import sys
36 36 import tempfile
37 37 import textwrap
38 38 import time
39 39 import traceback
40 40 import zlib
41 41
42 42 from . import (
43 43 encoding,
44 44 error,
45 45 i18n,
46 46 osutil,
47 47 parsers,
48 48 pycompat,
49 49 )
50 50
51 51 empty = pycompat.empty
52 52 httplib = pycompat.httplib
53 53 httpserver = pycompat.httpserver
54 54 pickle = pycompat.pickle
55 55 queue = pycompat.queue
56 56 socketserver = pycompat.socketserver
57 57 stderr = pycompat.stderr
58 58 stdin = pycompat.stdin
59 59 stdout = pycompat.stdout
60 60 stringio = pycompat.stringio
61 61 urlerr = pycompat.urlerr
62 62 urlparse = pycompat.urlparse
63 63 urlreq = pycompat.urlreq
64 64 xmlrpclib = pycompat.xmlrpclib
65 65
66 66 if os.name == 'nt':
67 67 from . import windows as platform
68 68 stdout = platform.winstdout(pycompat.stdout)
69 69 else:
70 70 from . import posix as platform
71 71
72 72 _ = i18n._
73 73
74 74 bindunixsocket = platform.bindunixsocket
75 75 cachestat = platform.cachestat
76 76 checkexec = platform.checkexec
77 77 checklink = platform.checklink
78 78 copymode = platform.copymode
79 79 executablepath = platform.executablepath
80 80 expandglobs = platform.expandglobs
81 81 explainexit = platform.explainexit
82 82 findexe = platform.findexe
83 83 gethgcmd = platform.gethgcmd
84 84 getuser = platform.getuser
85 85 getpid = os.getpid
86 86 groupmembers = platform.groupmembers
87 87 groupname = platform.groupname
88 88 hidewindow = platform.hidewindow
89 89 isexec = platform.isexec
90 90 isowner = platform.isowner
91 91 localpath = platform.localpath
92 92 lookupreg = platform.lookupreg
93 93 makedir = platform.makedir
94 94 nlinks = platform.nlinks
95 95 normpath = platform.normpath
96 96 normcase = platform.normcase
97 97 normcasespec = platform.normcasespec
98 98 normcasefallback = platform.normcasefallback
99 99 openhardlinks = platform.openhardlinks
100 100 oslink = platform.oslink
101 101 parsepatchoutput = platform.parsepatchoutput
102 102 pconvert = platform.pconvert
103 103 poll = platform.poll
104 104 popen = platform.popen
105 105 posixfile = platform.posixfile
106 106 quotecommand = platform.quotecommand
107 107 readpipe = platform.readpipe
108 108 rename = platform.rename
109 109 removedirs = platform.removedirs
110 110 samedevice = platform.samedevice
111 111 samefile = platform.samefile
112 112 samestat = platform.samestat
113 113 setbinary = platform.setbinary
114 114 setflags = platform.setflags
115 115 setsignalhandler = platform.setsignalhandler
116 116 shellquote = platform.shellquote
117 117 spawndetached = platform.spawndetached
118 118 split = platform.split
119 119 sshargs = platform.sshargs
120 120 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
121 121 statisexec = platform.statisexec
122 122 statislink = platform.statislink
123 123 testpid = platform.testpid
124 124 umask = platform.umask
125 125 unlink = platform.unlink
126 126 unlinkpath = platform.unlinkpath
127 127 username = platform.username
128 128
129 129 # Python compatibility
130 130
131 131 _notset = object()
132 132
133 133 # disable Python's problematic floating point timestamps (issue4836)
134 134 # (Python hypocritically says you shouldn't change this behavior in
135 135 # libraries, and sure enough Mercurial is not a library.)
136 136 os.stat_float_times(False)
137 137
138 138 def safehasattr(thing, attr):
139 139 return getattr(thing, attr, _notset) is not _notset
140 140
141 141 DIGESTS = {
142 142 'md5': hashlib.md5,
143 143 'sha1': hashlib.sha1,
144 144 'sha512': hashlib.sha512,
145 145 }
146 146 # List of digest types from strongest to weakest
147 147 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
148 148
149 149 for k in DIGESTS_BY_STRENGTH:
150 150 assert k in DIGESTS
151 151
152 152 class digester(object):
153 153 """helper to compute digests.
154 154
155 155 This helper can be used to compute one or more digests given their name.
156 156
157 157 >>> d = digester(['md5', 'sha1'])
158 158 >>> d.update('foo')
159 159 >>> [k for k in sorted(d)]
160 160 ['md5', 'sha1']
161 161 >>> d['md5']
162 162 'acbd18db4cc2f85cedef654fccc4a4d8'
163 163 >>> d['sha1']
164 164 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
165 165 >>> digester.preferred(['md5', 'sha1'])
166 166 'sha1'
167 167 """
168 168
169 169 def __init__(self, digests, s=''):
170 170 self._hashes = {}
171 171 for k in digests:
172 172 if k not in DIGESTS:
173 173 raise Abort(_('unknown digest type: %s') % k)
174 174 self._hashes[k] = DIGESTS[k]()
175 175 if s:
176 176 self.update(s)
177 177
178 178 def update(self, data):
179 179 for h in self._hashes.values():
180 180 h.update(data)
181 181
182 182 def __getitem__(self, key):
183 183 if key not in DIGESTS:
184 184 raise Abort(_('unknown digest type: %s') % k)
185 185 return self._hashes[key].hexdigest()
186 186
187 187 def __iter__(self):
188 188 return iter(self._hashes)
189 189
190 190 @staticmethod
191 191 def preferred(supported):
192 192 """returns the strongest digest type in both supported and DIGESTS."""
193 193
194 194 for k in DIGESTS_BY_STRENGTH:
195 195 if k in supported:
196 196 return k
197 197 return None
198 198
199 199 class digestchecker(object):
200 200 """file handle wrapper that additionally checks content against a given
201 201 size and digests.
202 202
203 203 d = digestchecker(fh, size, {'md5': '...'})
204 204
205 205 When multiple digests are given, all of them are validated.
206 206 """
207 207
208 208 def __init__(self, fh, size, digests):
209 209 self._fh = fh
210 210 self._size = size
211 211 self._got = 0
212 212 self._digests = dict(digests)
213 213 self._digester = digester(self._digests.keys())
214 214
215 215 def read(self, length=-1):
216 216 content = self._fh.read(length)
217 217 self._digester.update(content)
218 218 self._got += len(content)
219 219 return content
220 220
221 221 def validate(self):
222 222 if self._size != self._got:
223 223 raise Abort(_('size mismatch: expected %d, got %d') %
224 224 (self._size, self._got))
225 225 for k, v in self._digests.items():
226 226 if v != self._digester[k]:
227 227 # i18n: first parameter is a digest name
228 228 raise Abort(_('%s mismatch: expected %s, got %s') %
229 229 (k, v, self._digester[k]))
230 230
231 231 try:
232 232 buffer = buffer
233 233 except NameError:
234 234 if not pycompat.ispy3:
235 235 def buffer(sliceable, offset=0):
236 236 return sliceable[offset:]
237 237 else:
238 238 def buffer(sliceable, offset=0):
239 239 return memoryview(sliceable)[offset:]
240 240
241 241 closefds = os.name == 'posix'
242 242
243 243 _chunksize = 4096
244 244
245 245 class bufferedinputpipe(object):
246 246 """a manually buffered input pipe
247 247
248 248 Python will not let us use buffered IO and lazy reading with 'polling' at
249 249 the same time. We cannot probe the buffer state and select will not detect
250 250 that data are ready to read if they are already buffered.
251 251
252 252 This class let us work around that by implementing its own buffering
253 253 (allowing efficient readline) while offering a way to know if the buffer is
254 254 empty from the output (allowing collaboration of the buffer with polling).
255 255
256 256 This class lives in the 'util' module because it makes use of the 'os'
257 257 module from the python stdlib.
258 258 """
259 259
260 260 def __init__(self, input):
261 261 self._input = input
262 262 self._buffer = []
263 263 self._eof = False
264 264 self._lenbuf = 0
265 265
266 266 @property
267 267 def hasbuffer(self):
268 268 """True is any data is currently buffered
269 269
270 270 This will be used externally a pre-step for polling IO. If there is
271 271 already data then no polling should be set in place."""
272 272 return bool(self._buffer)
273 273
274 274 @property
275 275 def closed(self):
276 276 return self._input.closed
277 277
278 278 def fileno(self):
279 279 return self._input.fileno()
280 280
281 281 def close(self):
282 282 return self._input.close()
283 283
284 284 def read(self, size):
285 285 while (not self._eof) and (self._lenbuf < size):
286 286 self._fillbuffer()
287 287 return self._frombuffer(size)
288 288
289 289 def readline(self, *args, **kwargs):
290 290 if 1 < len(self._buffer):
291 291 # this should not happen because both read and readline end with a
292 292 # _frombuffer call that collapse it.
293 293 self._buffer = [''.join(self._buffer)]
294 294 self._lenbuf = len(self._buffer[0])
295 295 lfi = -1
296 296 if self._buffer:
297 297 lfi = self._buffer[-1].find('\n')
298 298 while (not self._eof) and lfi < 0:
299 299 self._fillbuffer()
300 300 if self._buffer:
301 301 lfi = self._buffer[-1].find('\n')
302 302 size = lfi + 1
303 303 if lfi < 0: # end of file
304 304 size = self._lenbuf
305 305 elif 1 < len(self._buffer):
306 306 # we need to take previous chunks into account
307 307 size += self._lenbuf - len(self._buffer[-1])
308 308 return self._frombuffer(size)
309 309
310 310 def _frombuffer(self, size):
311 311 """return at most 'size' data from the buffer
312 312
313 313 The data are removed from the buffer."""
314 314 if size == 0 or not self._buffer:
315 315 return ''
316 316 buf = self._buffer[0]
317 317 if 1 < len(self._buffer):
318 318 buf = ''.join(self._buffer)
319 319
320 320 data = buf[:size]
321 321 buf = buf[len(data):]
322 322 if buf:
323 323 self._buffer = [buf]
324 324 self._lenbuf = len(buf)
325 325 else:
326 326 self._buffer = []
327 327 self._lenbuf = 0
328 328 return data
329 329
330 330 def _fillbuffer(self):
331 331 """read data to the buffer"""
332 332 data = os.read(self._input.fileno(), _chunksize)
333 333 if not data:
334 334 self._eof = True
335 335 else:
336 336 self._lenbuf += len(data)
337 337 self._buffer.append(data)
338 338
339 339 def popen2(cmd, env=None, newlines=False):
340 340 # Setting bufsize to -1 lets the system decide the buffer size.
341 341 # The default for bufsize is 0, meaning unbuffered. This leads to
342 342 # poor performance on Mac OS X: http://bugs.python.org/issue4194
343 343 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
344 344 close_fds=closefds,
345 345 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
346 346 universal_newlines=newlines,
347 347 env=env)
348 348 return p.stdin, p.stdout
349 349
350 350 def popen3(cmd, env=None, newlines=False):
351 351 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
352 352 return stdin, stdout, stderr
353 353
354 354 def popen4(cmd, env=None, newlines=False, bufsize=-1):
355 355 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
356 356 close_fds=closefds,
357 357 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
358 358 stderr=subprocess.PIPE,
359 359 universal_newlines=newlines,
360 360 env=env)
361 361 return p.stdin, p.stdout, p.stderr, p
362 362
363 363 def version():
364 364 """Return version information if available."""
365 365 try:
366 366 from . import __version__
367 367 return __version__.version
368 368 except ImportError:
369 369 return 'unknown'
370 370
371 371 def versiontuple(v=None, n=4):
372 372 """Parses a Mercurial version string into an N-tuple.
373 373
374 374 The version string to be parsed is specified with the ``v`` argument.
375 375 If it isn't defined, the current Mercurial version string will be parsed.
376 376
377 377 ``n`` can be 2, 3, or 4. Here is how some version strings map to
378 378 returned values:
379 379
380 380 >>> v = '3.6.1+190-df9b73d2d444'
381 381 >>> versiontuple(v, 2)
382 382 (3, 6)
383 383 >>> versiontuple(v, 3)
384 384 (3, 6, 1)
385 385 >>> versiontuple(v, 4)
386 386 (3, 6, 1, '190-df9b73d2d444')
387 387
388 388 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
389 389 (3, 6, 1, '190-df9b73d2d444+20151118')
390 390
391 391 >>> v = '3.6'
392 392 >>> versiontuple(v, 2)
393 393 (3, 6)
394 394 >>> versiontuple(v, 3)
395 395 (3, 6, None)
396 396 >>> versiontuple(v, 4)
397 397 (3, 6, None, None)
398 398
399 399 >>> v = '3.9-rc'
400 400 >>> versiontuple(v, 2)
401 401 (3, 9)
402 402 >>> versiontuple(v, 3)
403 403 (3, 9, None)
404 404 >>> versiontuple(v, 4)
405 405 (3, 9, None, 'rc')
406 406
407 407 >>> v = '3.9-rc+2-02a8fea4289b'
408 408 >>> versiontuple(v, 2)
409 409 (3, 9)
410 410 >>> versiontuple(v, 3)
411 411 (3, 9, None)
412 412 >>> versiontuple(v, 4)
413 413 (3, 9, None, 'rc+2-02a8fea4289b')
414 414 """
415 415 if not v:
416 416 v = version()
417 417 parts = remod.split('[\+-]', v, 1)
418 418 if len(parts) == 1:
419 419 vparts, extra = parts[0], None
420 420 else:
421 421 vparts, extra = parts
422 422
423 423 vints = []
424 424 for i in vparts.split('.'):
425 425 try:
426 426 vints.append(int(i))
427 427 except ValueError:
428 428 break
429 429 # (3, 6) -> (3, 6, None)
430 430 while len(vints) < 3:
431 431 vints.append(None)
432 432
433 433 if n == 2:
434 434 return (vints[0], vints[1])
435 435 if n == 3:
436 436 return (vints[0], vints[1], vints[2])
437 437 if n == 4:
438 438 return (vints[0], vints[1], vints[2], extra)
439 439
440 440 # used by parsedate
441 441 defaultdateformats = (
442 442 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
443 443 '%Y-%m-%dT%H:%M', # without seconds
444 444 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
445 445 '%Y-%m-%dT%H%M', # without seconds
446 446 '%Y-%m-%d %H:%M:%S', # our common legal variant
447 447 '%Y-%m-%d %H:%M', # without seconds
448 448 '%Y-%m-%d %H%M%S', # without :
449 449 '%Y-%m-%d %H%M', # without seconds
450 450 '%Y-%m-%d %I:%M:%S%p',
451 451 '%Y-%m-%d %H:%M',
452 452 '%Y-%m-%d %I:%M%p',
453 453 '%Y-%m-%d',
454 454 '%m-%d',
455 455 '%m/%d',
456 456 '%m/%d/%y',
457 457 '%m/%d/%Y',
458 458 '%a %b %d %H:%M:%S %Y',
459 459 '%a %b %d %I:%M:%S%p %Y',
460 460 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
461 461 '%b %d %H:%M:%S %Y',
462 462 '%b %d %I:%M:%S%p %Y',
463 463 '%b %d %H:%M:%S',
464 464 '%b %d %I:%M:%S%p',
465 465 '%b %d %H:%M',
466 466 '%b %d %I:%M%p',
467 467 '%b %d %Y',
468 468 '%b %d',
469 469 '%H:%M:%S',
470 470 '%I:%M:%S%p',
471 471 '%H:%M',
472 472 '%I:%M%p',
473 473 )
474 474
475 475 extendeddateformats = defaultdateformats + (
476 476 "%Y",
477 477 "%Y-%m",
478 478 "%b",
479 479 "%b %Y",
480 480 )
481 481
482 482 def cachefunc(func):
483 483 '''cache the result of function calls'''
484 484 # XXX doesn't handle keywords args
485 485 if func.__code__.co_argcount == 0:
486 486 cache = []
487 487 def f():
488 488 if len(cache) == 0:
489 489 cache.append(func())
490 490 return cache[0]
491 491 return f
492 492 cache = {}
493 493 if func.__code__.co_argcount == 1:
494 494 # we gain a small amount of time because
495 495 # we don't need to pack/unpack the list
496 496 def f(arg):
497 497 if arg not in cache:
498 498 cache[arg] = func(arg)
499 499 return cache[arg]
500 500 else:
501 501 def f(*args):
502 502 if args not in cache:
503 503 cache[args] = func(*args)
504 504 return cache[args]
505 505
506 506 return f
507 507
508 508 class sortdict(dict):
509 509 '''a simple sorted dictionary'''
510 510 def __init__(self, data=None):
511 511 self._list = []
512 512 if data:
513 513 self.update(data)
514 514 def copy(self):
515 515 return sortdict(self)
516 516 def __setitem__(self, key, val):
517 517 if key in self:
518 518 self._list.remove(key)
519 519 self._list.append(key)
520 520 dict.__setitem__(self, key, val)
521 521 def __iter__(self):
522 522 return self._list.__iter__()
523 523 def update(self, src):
524 524 if isinstance(src, dict):
525 525 src = src.iteritems()
526 526 for k, v in src:
527 527 self[k] = v
528 528 def clear(self):
529 529 dict.clear(self)
530 530 self._list = []
531 531 def items(self):
532 532 return [(k, self[k]) for k in self._list]
533 533 def __delitem__(self, key):
534 534 dict.__delitem__(self, key)
535 535 self._list.remove(key)
536 536 def pop(self, key, *args, **kwargs):
537 537 dict.pop(self, key, *args, **kwargs)
538 538 try:
539 539 self._list.remove(key)
540 540 except ValueError:
541 541 pass
542 542 def keys(self):
543 543 return self._list
544 544 def iterkeys(self):
545 545 return self._list.__iter__()
546 546 def iteritems(self):
547 547 for k in self._list:
548 548 yield k, self[k]
549 549 def insert(self, index, key, val):
550 550 self._list.insert(index, key)
551 551 dict.__setitem__(self, key, val)
552 552 def __repr__(self):
553 553 if not self:
554 554 return '%s()' % self.__class__.__name__
555 555 return '%s(%r)' % (self.__class__.__name__, self.items())
556 556
557 557 class _lrucachenode(object):
558 558 """A node in a doubly linked list.
559 559
560 560 Holds a reference to nodes on either side as well as a key-value
561 561 pair for the dictionary entry.
562 562 """
563 563 __slots__ = (u'next', u'prev', u'key', u'value')
564 564
565 565 def __init__(self):
566 566 self.next = None
567 567 self.prev = None
568 568
569 569 self.key = _notset
570 570 self.value = None
571 571
572 572 def markempty(self):
573 573 """Mark the node as emptied."""
574 574 self.key = _notset
575 575
576 576 class lrucachedict(object):
577 577 """Dict that caches most recent accesses and sets.
578 578
579 579 The dict consists of an actual backing dict - indexed by original
580 580 key - and a doubly linked circular list defining the order of entries in
581 581 the cache.
582 582
583 583 The head node is the newest entry in the cache. If the cache is full,
584 584 we recycle head.prev and make it the new head. Cache accesses result in
585 585 the node being moved to before the existing head and being marked as the
586 586 new head node.
587 587 """
588 588 def __init__(self, max):
589 589 self._cache = {}
590 590
591 591 self._head = head = _lrucachenode()
592 592 head.prev = head
593 593 head.next = head
594 594 self._size = 1
595 595 self._capacity = max
596 596
597 597 def __len__(self):
598 598 return len(self._cache)
599 599
600 600 def __contains__(self, k):
601 601 return k in self._cache
602 602
603 603 def __iter__(self):
604 604 # We don't have to iterate in cache order, but why not.
605 605 n = self._head
606 606 for i in range(len(self._cache)):
607 607 yield n.key
608 608 n = n.next
609 609
610 610 def __getitem__(self, k):
611 611 node = self._cache[k]
612 612 self._movetohead(node)
613 613 return node.value
614 614
615 615 def __setitem__(self, k, v):
616 616 node = self._cache.get(k)
617 617 # Replace existing value and mark as newest.
618 618 if node is not None:
619 619 node.value = v
620 620 self._movetohead(node)
621 621 return
622 622
623 623 if self._size < self._capacity:
624 624 node = self._addcapacity()
625 625 else:
626 626 # Grab the last/oldest item.
627 627 node = self._head.prev
628 628
629 629 # At capacity. Kill the old entry.
630 630 if node.key is not _notset:
631 631 del self._cache[node.key]
632 632
633 633 node.key = k
634 634 node.value = v
635 635 self._cache[k] = node
636 636 # And mark it as newest entry. No need to adjust order since it
637 637 # is already self._head.prev.
638 638 self._head = node
639 639
640 640 def __delitem__(self, k):
641 641 node = self._cache.pop(k)
642 642 node.markempty()
643 643
644 644 # Temporarily mark as newest item before re-adjusting head to make
645 645 # this node the oldest item.
646 646 self._movetohead(node)
647 647 self._head = node.next
648 648
649 649 # Additional dict methods.
650 650
651 651 def get(self, k, default=None):
652 652 try:
653 653 return self._cache[k].value
654 654 except KeyError:
655 655 return default
656 656
657 657 def clear(self):
658 658 n = self._head
659 659 while n.key is not _notset:
660 660 n.markempty()
661 661 n = n.next
662 662
663 663 self._cache.clear()
664 664
665 665 def copy(self):
666 666 result = lrucachedict(self._capacity)
667 667 n = self._head.prev
668 668 # Iterate in oldest-to-newest order, so the copy has the right ordering
669 669 for i in range(len(self._cache)):
670 670 result[n.key] = n.value
671 671 n = n.prev
672 672 return result
673 673
674 674 def _movetohead(self, node):
675 675 """Mark a node as the newest, making it the new head.
676 676
677 677 When a node is accessed, it becomes the freshest entry in the LRU
678 678 list, which is denoted by self._head.
679 679
680 680 Visually, let's make ``N`` the new head node (* denotes head):
681 681
682 682 previous/oldest <-> head <-> next/next newest
683 683
684 684 ----<->--- A* ---<->-----
685 685 | |
686 686 E <-> D <-> N <-> C <-> B
687 687
688 688 To:
689 689
690 690 ----<->--- N* ---<->-----
691 691 | |
692 692 E <-> D <-> C <-> B <-> A
693 693
694 694 This requires the following moves:
695 695
696 696 C.next = D (node.prev.next = node.next)
697 697 D.prev = C (node.next.prev = node.prev)
698 698 E.next = N (head.prev.next = node)
699 699 N.prev = E (node.prev = head.prev)
700 700 N.next = A (node.next = head)
701 701 A.prev = N (head.prev = node)
702 702 """
703 703 head = self._head
704 704 # C.next = D
705 705 node.prev.next = node.next
706 706 # D.prev = C
707 707 node.next.prev = node.prev
708 708 # N.prev = E
709 709 node.prev = head.prev
710 710 # N.next = A
711 711 # It is tempting to do just "head" here, however if node is
712 712 # adjacent to head, this will do bad things.
713 713 node.next = head.prev.next
714 714 # E.next = N
715 715 node.next.prev = node
716 716 # A.prev = N
717 717 node.prev.next = node
718 718
719 719 self._head = node
720 720
721 721 def _addcapacity(self):
722 722 """Add a node to the circular linked list.
723 723
724 724 The new node is inserted before the head node.
725 725 """
726 726 head = self._head
727 727 node = _lrucachenode()
728 728 head.prev.next = node
729 729 node.prev = head.prev
730 730 node.next = head
731 731 head.prev = node
732 732 self._size += 1
733 733 return node
734 734
735 735 def lrucachefunc(func):
736 736 '''cache most recent results of function calls'''
737 737 cache = {}
738 738 order = collections.deque()
739 739 if func.__code__.co_argcount == 1:
740 740 def f(arg):
741 741 if arg not in cache:
742 742 if len(cache) > 20:
743 743 del cache[order.popleft()]
744 744 cache[arg] = func(arg)
745 745 else:
746 746 order.remove(arg)
747 747 order.append(arg)
748 748 return cache[arg]
749 749 else:
750 750 def f(*args):
751 751 if args not in cache:
752 752 if len(cache) > 20:
753 753 del cache[order.popleft()]
754 754 cache[args] = func(*args)
755 755 else:
756 756 order.remove(args)
757 757 order.append(args)
758 758 return cache[args]
759 759
760 760 return f
761 761
762 762 class propertycache(object):
763 763 def __init__(self, func):
764 764 self.func = func
765 765 self.name = func.__name__
766 766 def __get__(self, obj, type=None):
767 767 result = self.func(obj)
768 768 self.cachevalue(obj, result)
769 769 return result
770 770
771 771 def cachevalue(self, obj, value):
772 772 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
773 773 obj.__dict__[self.name] = value
774 774
775 775 def pipefilter(s, cmd):
776 776 '''filter string S through command CMD, returning its output'''
777 777 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
778 778 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
779 779 pout, perr = p.communicate(s)
780 780 return pout
781 781
782 782 def tempfilter(s, cmd):
783 783 '''filter string S through a pair of temporary files with CMD.
784 784 CMD is used as a template to create the real command to be run,
785 785 with the strings INFILE and OUTFILE replaced by the real names of
786 786 the temporary files generated.'''
787 787 inname, outname = None, None
788 788 try:
789 789 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
790 790 fp = os.fdopen(infd, 'wb')
791 791 fp.write(s)
792 792 fp.close()
793 793 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
794 794 os.close(outfd)
795 795 cmd = cmd.replace('INFILE', inname)
796 796 cmd = cmd.replace('OUTFILE', outname)
797 797 code = os.system(cmd)
798 798 if sys.platform == 'OpenVMS' and code & 1:
799 799 code = 0
800 800 if code:
801 801 raise Abort(_("command '%s' failed: %s") %
802 802 (cmd, explainexit(code)))
803 803 return readfile(outname)
804 804 finally:
805 805 try:
806 806 if inname:
807 807 os.unlink(inname)
808 808 except OSError:
809 809 pass
810 810 try:
811 811 if outname:
812 812 os.unlink(outname)
813 813 except OSError:
814 814 pass
815 815
816 816 filtertable = {
817 817 'tempfile:': tempfilter,
818 818 'pipe:': pipefilter,
819 819 }
820 820
821 821 def filter(s, cmd):
822 822 "filter a string through a command that transforms its input to its output"
823 823 for name, fn in filtertable.iteritems():
824 824 if cmd.startswith(name):
825 825 return fn(s, cmd[len(name):].lstrip())
826 826 return pipefilter(s, cmd)
827 827
828 828 def binary(s):
829 829 """return true if a string is binary data"""
830 830 return bool(s and '\0' in s)
831 831
832 832 def increasingchunks(source, min=1024, max=65536):
833 833 '''return no less than min bytes per chunk while data remains,
834 834 doubling min after each chunk until it reaches max'''
835 835 def log2(x):
836 836 if not x:
837 837 return 0
838 838 i = 0
839 839 while x:
840 840 x >>= 1
841 841 i += 1
842 842 return i - 1
843 843
844 844 buf = []
845 845 blen = 0
846 846 for chunk in source:
847 847 buf.append(chunk)
848 848 blen += len(chunk)
849 849 if blen >= min:
850 850 if min < max:
851 851 min = min << 1
852 852 nmin = 1 << log2(blen)
853 853 if nmin > min:
854 854 min = nmin
855 855 if min > max:
856 856 min = max
857 857 yield ''.join(buf)
858 858 blen = 0
859 859 buf = []
860 860 if buf:
861 861 yield ''.join(buf)
862 862
863 863 Abort = error.Abort
864 864
865 865 def always(fn):
866 866 return True
867 867
868 868 def never(fn):
869 869 return False
870 870
871 871 def nogc(func):
872 872 """disable garbage collector
873 873
874 874 Python's garbage collector triggers a GC each time a certain number of
875 875 container objects (the number being defined by gc.get_threshold()) are
876 876 allocated even when marked not to be tracked by the collector. Tracking has
877 877 no effect on when GCs are triggered, only on what objects the GC looks
878 878 into. As a workaround, disable GC while building complex (huge)
879 879 containers.
880 880
881 881 This garbage collector issue have been fixed in 2.7.
882 882 """
883 883 if sys.version_info >= (2, 7):
884 884 return func
885 885 def wrapper(*args, **kwargs):
886 886 gcenabled = gc.isenabled()
887 887 gc.disable()
888 888 try:
889 889 return func(*args, **kwargs)
890 890 finally:
891 891 if gcenabled:
892 892 gc.enable()
893 893 return wrapper
894 894
895 895 def pathto(root, n1, n2):
896 896 '''return the relative path from one place to another.
897 897 root should use os.sep to separate directories
898 898 n1 should use os.sep to separate directories
899 899 n2 should use "/" to separate directories
900 900 returns an os.sep-separated path.
901 901
902 902 If n1 is a relative path, it's assumed it's
903 903 relative to root.
904 904 n2 should always be relative to root.
905 905 '''
906 906 if not n1:
907 907 return localpath(n2)
908 908 if os.path.isabs(n1):
909 909 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
910 910 return os.path.join(root, localpath(n2))
911 911 n2 = '/'.join((pconvert(root), n2))
912 912 a, b = splitpath(n1), n2.split('/')
913 913 a.reverse()
914 914 b.reverse()
915 915 while a and b and a[-1] == b[-1]:
916 916 a.pop()
917 917 b.pop()
918 918 b.reverse()
919 return os.sep.join((['..'] * len(a)) + b) or '.'
919 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
920 920
921 921 def mainfrozen():
922 922 """return True if we are a frozen executable.
923 923
924 924 The code supports py2exe (most common, Windows only) and tools/freeze
925 925 (portable, not much used).
926 926 """
927 927 return (safehasattr(sys, "frozen") or # new py2exe
928 928 safehasattr(sys, "importers") or # old py2exe
929 929 imp.is_frozen(u"__main__")) # tools/freeze
930 930
931 931 # the location of data files matching the source code
932 932 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
933 933 # executable version (py2exe) doesn't support __file__
934 934 datapath = os.path.dirname(sys.executable)
935 935 else:
936 936 datapath = os.path.dirname(__file__)
937 937
938 938 if not isinstance(datapath, bytes):
939 939 datapath = pycompat.fsencode(datapath)
940 940
941 941 i18n.setdatapath(datapath)
942 942
943 943 _hgexecutable = None
944 944
945 945 def hgexecutable():
946 946 """return location of the 'hg' executable.
947 947
948 948 Defaults to $HG or 'hg' in the search path.
949 949 """
950 950 if _hgexecutable is None:
951 951 hg = os.environ.get('HG')
952 952 mainmod = sys.modules['__main__']
953 953 if hg:
954 954 _sethgexecutable(hg)
955 955 elif mainfrozen():
956 956 if getattr(sys, 'frozen', None) == 'macosx_app':
957 957 # Env variable set by py2app
958 958 _sethgexecutable(os.environ['EXECUTABLEPATH'])
959 959 else:
960 960 _sethgexecutable(sys.executable)
961 961 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
962 962 _sethgexecutable(mainmod.__file__)
963 963 else:
964 964 exe = findexe('hg') or os.path.basename(sys.argv[0])
965 965 _sethgexecutable(exe)
966 966 return _hgexecutable
967 967
968 968 def _sethgexecutable(path):
969 969 """set location of the 'hg' executable"""
970 970 global _hgexecutable
971 971 _hgexecutable = path
972 972
973 973 def _isstdout(f):
974 974 fileno = getattr(f, 'fileno', None)
975 975 return fileno and fileno() == sys.__stdout__.fileno()
976 976
977 977 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
978 978 '''enhanced shell command execution.
979 979 run with environment maybe modified, maybe in different dir.
980 980
981 981 if command fails and onerr is None, return status, else raise onerr
982 982 object as exception.
983 983
984 984 if out is specified, it is assumed to be a file-like object that has a
985 985 write() method. stdout and stderr will be redirected to out.'''
986 986 if environ is None:
987 987 environ = {}
988 988 try:
989 989 stdout.flush()
990 990 except Exception:
991 991 pass
992 992 def py2shell(val):
993 993 'convert python object into string that is useful to shell'
994 994 if val is None or val is False:
995 995 return '0'
996 996 if val is True:
997 997 return '1'
998 998 return str(val)
999 999 origcmd = cmd
1000 1000 cmd = quotecommand(cmd)
1001 1001 if sys.platform == 'plan9' and (sys.version_info[0] == 2
1002 1002 and sys.version_info[1] < 7):
1003 1003 # subprocess kludge to work around issues in half-baked Python
1004 1004 # ports, notably bichued/python:
1005 1005 if not cwd is None:
1006 1006 os.chdir(cwd)
1007 1007 rc = os.system(cmd)
1008 1008 else:
1009 1009 env = dict(os.environ)
1010 1010 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1011 1011 env['HG'] = hgexecutable()
1012 1012 if out is None or _isstdout(out):
1013 1013 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1014 1014 env=env, cwd=cwd)
1015 1015 else:
1016 1016 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1017 1017 env=env, cwd=cwd, stdout=subprocess.PIPE,
1018 1018 stderr=subprocess.STDOUT)
1019 1019 for line in iter(proc.stdout.readline, ''):
1020 1020 out.write(line)
1021 1021 proc.wait()
1022 1022 rc = proc.returncode
1023 1023 if sys.platform == 'OpenVMS' and rc & 1:
1024 1024 rc = 0
1025 1025 if rc and onerr:
1026 1026 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
1027 1027 explainexit(rc)[0])
1028 1028 if errprefix:
1029 1029 errmsg = '%s: %s' % (errprefix, errmsg)
1030 1030 raise onerr(errmsg)
1031 1031 return rc
1032 1032
1033 1033 def checksignature(func):
1034 1034 '''wrap a function with code to check for calling errors'''
1035 1035 def check(*args, **kwargs):
1036 1036 try:
1037 1037 return func(*args, **kwargs)
1038 1038 except TypeError:
1039 1039 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1040 1040 raise error.SignatureError
1041 1041 raise
1042 1042
1043 1043 return check
1044 1044
1045 1045 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1046 1046 '''copy a file, preserving mode and optionally other stat info like
1047 1047 atime/mtime
1048 1048
1049 1049 checkambig argument is used with filestat, and is useful only if
1050 1050 destination file is guarded by any lock (e.g. repo.lock or
1051 1051 repo.wlock).
1052 1052
1053 1053 copystat and checkambig should be exclusive.
1054 1054 '''
1055 1055 assert not (copystat and checkambig)
1056 1056 oldstat = None
1057 1057 if os.path.lexists(dest):
1058 1058 if checkambig:
1059 1059 oldstat = checkambig and filestat(dest)
1060 1060 unlink(dest)
1061 1061 # hardlinks are problematic on CIFS, quietly ignore this flag
1062 1062 # until we find a way to work around it cleanly (issue4546)
1063 1063 if False and hardlink:
1064 1064 try:
1065 1065 oslink(src, dest)
1066 1066 return
1067 1067 except (IOError, OSError):
1068 1068 pass # fall back to normal copy
1069 1069 if os.path.islink(src):
1070 1070 os.symlink(os.readlink(src), dest)
1071 1071 # copytime is ignored for symlinks, but in general copytime isn't needed
1072 1072 # for them anyway
1073 1073 else:
1074 1074 try:
1075 1075 shutil.copyfile(src, dest)
1076 1076 if copystat:
1077 1077 # copystat also copies mode
1078 1078 shutil.copystat(src, dest)
1079 1079 else:
1080 1080 shutil.copymode(src, dest)
1081 1081 if oldstat and oldstat.stat:
1082 1082 newstat = filestat(dest)
1083 1083 if newstat.isambig(oldstat):
1084 1084 # stat of copied file is ambiguous to original one
1085 1085 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1086 1086 os.utime(dest, (advanced, advanced))
1087 1087 except shutil.Error as inst:
1088 1088 raise Abort(str(inst))
1089 1089
1090 1090 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1091 1091 """Copy a directory tree using hardlinks if possible."""
1092 1092 num = 0
1093 1093
1094 1094 if hardlink is None:
1095 1095 hardlink = (os.stat(src).st_dev ==
1096 1096 os.stat(os.path.dirname(dst)).st_dev)
1097 1097 if hardlink:
1098 1098 topic = _('linking')
1099 1099 else:
1100 1100 topic = _('copying')
1101 1101
1102 1102 if os.path.isdir(src):
1103 1103 os.mkdir(dst)
1104 1104 for name, kind in osutil.listdir(src):
1105 1105 srcname = os.path.join(src, name)
1106 1106 dstname = os.path.join(dst, name)
1107 1107 def nprog(t, pos):
1108 1108 if pos is not None:
1109 1109 return progress(t, pos + num)
1110 1110 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1111 1111 num += n
1112 1112 else:
1113 1113 if hardlink:
1114 1114 try:
1115 1115 oslink(src, dst)
1116 1116 except (IOError, OSError):
1117 1117 hardlink = False
1118 1118 shutil.copy(src, dst)
1119 1119 else:
1120 1120 shutil.copy(src, dst)
1121 1121 num += 1
1122 1122 progress(topic, num)
1123 1123 progress(topic, None)
1124 1124
1125 1125 return hardlink, num
1126 1126
1127 1127 _winreservednames = '''con prn aux nul
1128 1128 com1 com2 com3 com4 com5 com6 com7 com8 com9
1129 1129 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1130 1130 _winreservedchars = ':*?"<>|'
1131 1131 def checkwinfilename(path):
1132 1132 r'''Check that the base-relative path is a valid filename on Windows.
1133 1133 Returns None if the path is ok, or a UI string describing the problem.
1134 1134
1135 1135 >>> checkwinfilename("just/a/normal/path")
1136 1136 >>> checkwinfilename("foo/bar/con.xml")
1137 1137 "filename contains 'con', which is reserved on Windows"
1138 1138 >>> checkwinfilename("foo/con.xml/bar")
1139 1139 "filename contains 'con', which is reserved on Windows"
1140 1140 >>> checkwinfilename("foo/bar/xml.con")
1141 1141 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1142 1142 "filename contains 'AUX', which is reserved on Windows"
1143 1143 >>> checkwinfilename("foo/bar/bla:.txt")
1144 1144 "filename contains ':', which is reserved on Windows"
1145 1145 >>> checkwinfilename("foo/bar/b\07la.txt")
1146 1146 "filename contains '\\x07', which is invalid on Windows"
1147 1147 >>> checkwinfilename("foo/bar/bla ")
1148 1148 "filename ends with ' ', which is not allowed on Windows"
1149 1149 >>> checkwinfilename("../bar")
1150 1150 >>> checkwinfilename("foo\\")
1151 1151 "filename ends with '\\', which is invalid on Windows"
1152 1152 >>> checkwinfilename("foo\\/bar")
1153 1153 "directory name ends with '\\', which is invalid on Windows"
1154 1154 '''
1155 1155 if path.endswith('\\'):
1156 1156 return _("filename ends with '\\', which is invalid on Windows")
1157 1157 if '\\/' in path:
1158 1158 return _("directory name ends with '\\', which is invalid on Windows")
1159 1159 for n in path.replace('\\', '/').split('/'):
1160 1160 if not n:
1161 1161 continue
1162 1162 for c in n:
1163 1163 if c in _winreservedchars:
1164 1164 return _("filename contains '%s', which is reserved "
1165 1165 "on Windows") % c
1166 1166 if ord(c) <= 31:
1167 1167 return _("filename contains %r, which is invalid "
1168 1168 "on Windows") % c
1169 1169 base = n.split('.')[0]
1170 1170 if base and base.lower() in _winreservednames:
1171 1171 return _("filename contains '%s', which is reserved "
1172 1172 "on Windows") % base
1173 1173 t = n[-1]
1174 1174 if t in '. ' and n not in '..':
1175 1175 return _("filename ends with '%s', which is not allowed "
1176 1176 "on Windows") % t
1177 1177
1178 1178 if os.name == 'nt':
1179 1179 checkosfilename = checkwinfilename
1180 1180 else:
1181 1181 checkosfilename = platform.checkosfilename
1182 1182
1183 1183 def makelock(info, pathname):
1184 1184 try:
1185 1185 return os.symlink(info, pathname)
1186 1186 except OSError as why:
1187 1187 if why.errno == errno.EEXIST:
1188 1188 raise
1189 1189 except AttributeError: # no symlink in os
1190 1190 pass
1191 1191
1192 1192 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1193 1193 os.write(ld, info)
1194 1194 os.close(ld)
1195 1195
1196 1196 def readlock(pathname):
1197 1197 try:
1198 1198 return os.readlink(pathname)
1199 1199 except OSError as why:
1200 1200 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1201 1201 raise
1202 1202 except AttributeError: # no symlink in os
1203 1203 pass
1204 1204 fp = posixfile(pathname)
1205 1205 r = fp.read()
1206 1206 fp.close()
1207 1207 return r
1208 1208
1209 1209 def fstat(fp):
1210 1210 '''stat file object that may not have fileno method.'''
1211 1211 try:
1212 1212 return os.fstat(fp.fileno())
1213 1213 except AttributeError:
1214 1214 return os.stat(fp.name)
1215 1215
1216 1216 # File system features
1217 1217
1218 1218 def fscasesensitive(path):
1219 1219 """
1220 1220 Return true if the given path is on a case-sensitive filesystem
1221 1221
1222 1222 Requires a path (like /foo/.hg) ending with a foldable final
1223 1223 directory component.
1224 1224 """
1225 1225 s1 = os.lstat(path)
1226 1226 d, b = os.path.split(path)
1227 1227 b2 = b.upper()
1228 1228 if b == b2:
1229 1229 b2 = b.lower()
1230 1230 if b == b2:
1231 1231 return True # no evidence against case sensitivity
1232 1232 p2 = os.path.join(d, b2)
1233 1233 try:
1234 1234 s2 = os.lstat(p2)
1235 1235 if s2 == s1:
1236 1236 return False
1237 1237 return True
1238 1238 except OSError:
1239 1239 return True
1240 1240
1241 1241 try:
1242 1242 import re2
1243 1243 _re2 = None
1244 1244 except ImportError:
1245 1245 _re2 = False
1246 1246
1247 1247 class _re(object):
1248 1248 def _checkre2(self):
1249 1249 global _re2
1250 1250 try:
1251 1251 # check if match works, see issue3964
1252 1252 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1253 1253 except ImportError:
1254 1254 _re2 = False
1255 1255
1256 1256 def compile(self, pat, flags=0):
1257 1257 '''Compile a regular expression, using re2 if possible
1258 1258
1259 1259 For best performance, use only re2-compatible regexp features. The
1260 1260 only flags from the re module that are re2-compatible are
1261 1261 IGNORECASE and MULTILINE.'''
1262 1262 if _re2 is None:
1263 1263 self._checkre2()
1264 1264 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1265 1265 if flags & remod.IGNORECASE:
1266 1266 pat = '(?i)' + pat
1267 1267 if flags & remod.MULTILINE:
1268 1268 pat = '(?m)' + pat
1269 1269 try:
1270 1270 return re2.compile(pat)
1271 1271 except re2.error:
1272 1272 pass
1273 1273 return remod.compile(pat, flags)
1274 1274
1275 1275 @propertycache
1276 1276 def escape(self):
1277 1277 '''Return the version of escape corresponding to self.compile.
1278 1278
1279 1279 This is imperfect because whether re2 or re is used for a particular
1280 1280 function depends on the flags, etc, but it's the best we can do.
1281 1281 '''
1282 1282 global _re2
1283 1283 if _re2 is None:
1284 1284 self._checkre2()
1285 1285 if _re2:
1286 1286 return re2.escape
1287 1287 else:
1288 1288 return remod.escape
1289 1289
1290 1290 re = _re()
1291 1291
1292 1292 _fspathcache = {}
1293 1293 def fspath(name, root):
1294 1294 '''Get name in the case stored in the filesystem
1295 1295
1296 1296 The name should be relative to root, and be normcase-ed for efficiency.
1297 1297
1298 1298 Note that this function is unnecessary, and should not be
1299 1299 called, for case-sensitive filesystems (simply because it's expensive).
1300 1300
1301 1301 The root should be normcase-ed, too.
1302 1302 '''
1303 1303 def _makefspathcacheentry(dir):
1304 1304 return dict((normcase(n), n) for n in os.listdir(dir))
1305 1305
1306 seps = os.sep
1306 seps = pycompat.ossep
1307 1307 if os.altsep:
1308 1308 seps = seps + os.altsep
1309 1309 # Protect backslashes. This gets silly very quickly.
1310 1310 seps.replace('\\','\\\\')
1311 1311 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1312 1312 dir = os.path.normpath(root)
1313 1313 result = []
1314 1314 for part, sep in pattern.findall(name):
1315 1315 if sep:
1316 1316 result.append(sep)
1317 1317 continue
1318 1318
1319 1319 if dir not in _fspathcache:
1320 1320 _fspathcache[dir] = _makefspathcacheentry(dir)
1321 1321 contents = _fspathcache[dir]
1322 1322
1323 1323 found = contents.get(part)
1324 1324 if not found:
1325 1325 # retry "once per directory" per "dirstate.walk" which
1326 1326 # may take place for each patches of "hg qpush", for example
1327 1327 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1328 1328 found = contents.get(part)
1329 1329
1330 1330 result.append(found or part)
1331 1331 dir = os.path.join(dir, part)
1332 1332
1333 1333 return ''.join(result)
1334 1334
1335 1335 def checknlink(testfile):
1336 1336 '''check whether hardlink count reporting works properly'''
1337 1337
1338 1338 # testfile may be open, so we need a separate file for checking to
1339 1339 # work around issue2543 (or testfile may get lost on Samba shares)
1340 1340 f1 = testfile + ".hgtmp1"
1341 1341 if os.path.lexists(f1):
1342 1342 return False
1343 1343 try:
1344 1344 posixfile(f1, 'w').close()
1345 1345 except IOError:
1346 1346 try:
1347 1347 os.unlink(f1)
1348 1348 except OSError:
1349 1349 pass
1350 1350 return False
1351 1351
1352 1352 f2 = testfile + ".hgtmp2"
1353 1353 fd = None
1354 1354 try:
1355 1355 oslink(f1, f2)
1356 1356 # nlinks() may behave differently for files on Windows shares if
1357 1357 # the file is open.
1358 1358 fd = posixfile(f2)
1359 1359 return nlinks(f2) > 1
1360 1360 except OSError:
1361 1361 return False
1362 1362 finally:
1363 1363 if fd is not None:
1364 1364 fd.close()
1365 1365 for f in (f1, f2):
1366 1366 try:
1367 1367 os.unlink(f)
1368 1368 except OSError:
1369 1369 pass
1370 1370
1371 1371 def endswithsep(path):
1372 1372 '''Check path ends with os.sep or os.altsep.'''
1373 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1373 return (path.endswith(pycompat.ossep)
1374 or os.altsep and path.endswith(os.altsep))
1374 1375
1375 1376 def splitpath(path):
1376 1377 '''Split path by os.sep.
1377 1378 Note that this function does not use os.altsep because this is
1378 1379 an alternative of simple "xxx.split(os.sep)".
1379 1380 It is recommended to use os.path.normpath() before using this
1380 1381 function if need.'''
1381 return path.split(os.sep)
1382 return path.split(pycompat.ossep)
1382 1383
1383 1384 def gui():
1384 1385 '''Are we running in a GUI?'''
1385 1386 if sys.platform == 'darwin':
1386 1387 if 'SSH_CONNECTION' in os.environ:
1387 1388 # handle SSH access to a box where the user is logged in
1388 1389 return False
1389 1390 elif getattr(osutil, 'isgui', None):
1390 1391 # check if a CoreGraphics session is available
1391 1392 return osutil.isgui()
1392 1393 else:
1393 1394 # pure build; use a safe default
1394 1395 return True
1395 1396 else:
1396 1397 return os.name == "nt" or os.environ.get("DISPLAY")
1397 1398
1398 1399 def mktempcopy(name, emptyok=False, createmode=None):
1399 1400 """Create a temporary file with the same contents from name
1400 1401
1401 1402 The permission bits are copied from the original file.
1402 1403
1403 1404 If the temporary file is going to be truncated immediately, you
1404 1405 can use emptyok=True as an optimization.
1405 1406
1406 1407 Returns the name of the temporary file.
1407 1408 """
1408 1409 d, fn = os.path.split(name)
1409 1410 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1410 1411 os.close(fd)
1411 1412 # Temporary files are created with mode 0600, which is usually not
1412 1413 # what we want. If the original file already exists, just copy
1413 1414 # its mode. Otherwise, manually obey umask.
1414 1415 copymode(name, temp, createmode)
1415 1416 if emptyok:
1416 1417 return temp
1417 1418 try:
1418 1419 try:
1419 1420 ifp = posixfile(name, "rb")
1420 1421 except IOError as inst:
1421 1422 if inst.errno == errno.ENOENT:
1422 1423 return temp
1423 1424 if not getattr(inst, 'filename', None):
1424 1425 inst.filename = name
1425 1426 raise
1426 1427 ofp = posixfile(temp, "wb")
1427 1428 for chunk in filechunkiter(ifp):
1428 1429 ofp.write(chunk)
1429 1430 ifp.close()
1430 1431 ofp.close()
1431 1432 except: # re-raises
1432 1433 try: os.unlink(temp)
1433 1434 except OSError: pass
1434 1435 raise
1435 1436 return temp
1436 1437
1437 1438 class filestat(object):
1438 1439 """help to exactly detect change of a file
1439 1440
1440 1441 'stat' attribute is result of 'os.stat()' if specified 'path'
1441 1442 exists. Otherwise, it is None. This can avoid preparative
1442 1443 'exists()' examination on client side of this class.
1443 1444 """
1444 1445 def __init__(self, path):
1445 1446 try:
1446 1447 self.stat = os.stat(path)
1447 1448 except OSError as err:
1448 1449 if err.errno != errno.ENOENT:
1449 1450 raise
1450 1451 self.stat = None
1451 1452
1452 1453 __hash__ = object.__hash__
1453 1454
1454 1455 def __eq__(self, old):
1455 1456 try:
1456 1457 # if ambiguity between stat of new and old file is
1457 1458 # avoided, comparison of size, ctime and mtime is enough
1458 1459 # to exactly detect change of a file regardless of platform
1459 1460 return (self.stat.st_size == old.stat.st_size and
1460 1461 self.stat.st_ctime == old.stat.st_ctime and
1461 1462 self.stat.st_mtime == old.stat.st_mtime)
1462 1463 except AttributeError:
1463 1464 return False
1464 1465
1465 1466 def isambig(self, old):
1466 1467 """Examine whether new (= self) stat is ambiguous against old one
1467 1468
1468 1469 "S[N]" below means stat of a file at N-th change:
1469 1470
1470 1471 - S[n-1].ctime < S[n].ctime: can detect change of a file
1471 1472 - S[n-1].ctime == S[n].ctime
1472 1473 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1473 1474 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1474 1475 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1475 1476 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1476 1477
1477 1478 Case (*2) above means that a file was changed twice or more at
1478 1479 same time in sec (= S[n-1].ctime), and comparison of timestamp
1479 1480 is ambiguous.
1480 1481
1481 1482 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1482 1483 timestamp is ambiguous".
1483 1484
1484 1485 But advancing mtime only in case (*2) doesn't work as
1485 1486 expected, because naturally advanced S[n].mtime in case (*1)
1486 1487 might be equal to manually advanced S[n-1 or earlier].mtime.
1487 1488
1488 1489 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1489 1490 treated as ambiguous regardless of mtime, to avoid overlooking
1490 1491 by confliction between such mtime.
1491 1492
1492 1493 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1493 1494 S[n].mtime", even if size of a file isn't changed.
1494 1495 """
1495 1496 try:
1496 1497 return (self.stat.st_ctime == old.stat.st_ctime)
1497 1498 except AttributeError:
1498 1499 return False
1499 1500
1500 1501 def avoidambig(self, path, old):
1501 1502 """Change file stat of specified path to avoid ambiguity
1502 1503
1503 1504 'old' should be previous filestat of 'path'.
1504 1505
1505 1506 This skips avoiding ambiguity, if a process doesn't have
1506 1507 appropriate privileges for 'path'.
1507 1508 """
1508 1509 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1509 1510 try:
1510 1511 os.utime(path, (advanced, advanced))
1511 1512 except OSError as inst:
1512 1513 if inst.errno == errno.EPERM:
1513 1514 # utime() on the file created by another user causes EPERM,
1514 1515 # if a process doesn't have appropriate privileges
1515 1516 return
1516 1517 raise
1517 1518
1518 1519 def __ne__(self, other):
1519 1520 return not self == other
1520 1521
1521 1522 class atomictempfile(object):
1522 1523 '''writable file object that atomically updates a file
1523 1524
1524 1525 All writes will go to a temporary copy of the original file. Call
1525 1526 close() when you are done writing, and atomictempfile will rename
1526 1527 the temporary copy to the original name, making the changes
1527 1528 visible. If the object is destroyed without being closed, all your
1528 1529 writes are discarded.
1529 1530
1530 1531 checkambig argument of constructor is used with filestat, and is
1531 1532 useful only if target file is guarded by any lock (e.g. repo.lock
1532 1533 or repo.wlock).
1533 1534 '''
1534 1535 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1535 1536 self.__name = name # permanent name
1536 1537 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1537 1538 createmode=createmode)
1538 1539 self._fp = posixfile(self._tempname, mode)
1539 1540 self._checkambig = checkambig
1540 1541
1541 1542 # delegated methods
1542 1543 self.read = self._fp.read
1543 1544 self.write = self._fp.write
1544 1545 self.seek = self._fp.seek
1545 1546 self.tell = self._fp.tell
1546 1547 self.fileno = self._fp.fileno
1547 1548
1548 1549 def close(self):
1549 1550 if not self._fp.closed:
1550 1551 self._fp.close()
1551 1552 filename = localpath(self.__name)
1552 1553 oldstat = self._checkambig and filestat(filename)
1553 1554 if oldstat and oldstat.stat:
1554 1555 rename(self._tempname, filename)
1555 1556 newstat = filestat(filename)
1556 1557 if newstat.isambig(oldstat):
1557 1558 # stat of changed file is ambiguous to original one
1558 1559 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1559 1560 os.utime(filename, (advanced, advanced))
1560 1561 else:
1561 1562 rename(self._tempname, filename)
1562 1563
1563 1564 def discard(self):
1564 1565 if not self._fp.closed:
1565 1566 try:
1566 1567 os.unlink(self._tempname)
1567 1568 except OSError:
1568 1569 pass
1569 1570 self._fp.close()
1570 1571
1571 1572 def __del__(self):
1572 1573 if safehasattr(self, '_fp'): # constructor actually did something
1573 1574 self.discard()
1574 1575
1575 1576 def __enter__(self):
1576 1577 return self
1577 1578
1578 1579 def __exit__(self, exctype, excvalue, traceback):
1579 1580 if exctype is not None:
1580 1581 self.discard()
1581 1582 else:
1582 1583 self.close()
1583 1584
1584 1585 def makedirs(name, mode=None, notindexed=False):
1585 1586 """recursive directory creation with parent mode inheritance
1586 1587
1587 1588 Newly created directories are marked as "not to be indexed by
1588 1589 the content indexing service", if ``notindexed`` is specified
1589 1590 for "write" mode access.
1590 1591 """
1591 1592 try:
1592 1593 makedir(name, notindexed)
1593 1594 except OSError as err:
1594 1595 if err.errno == errno.EEXIST:
1595 1596 return
1596 1597 if err.errno != errno.ENOENT or not name:
1597 1598 raise
1598 1599 parent = os.path.dirname(os.path.abspath(name))
1599 1600 if parent == name:
1600 1601 raise
1601 1602 makedirs(parent, mode, notindexed)
1602 1603 try:
1603 1604 makedir(name, notindexed)
1604 1605 except OSError as err:
1605 1606 # Catch EEXIST to handle races
1606 1607 if err.errno == errno.EEXIST:
1607 1608 return
1608 1609 raise
1609 1610 if mode is not None:
1610 1611 os.chmod(name, mode)
1611 1612
1612 1613 def readfile(path):
1613 1614 with open(path, 'rb') as fp:
1614 1615 return fp.read()
1615 1616
1616 1617 def writefile(path, text):
1617 1618 with open(path, 'wb') as fp:
1618 1619 fp.write(text)
1619 1620
1620 1621 def appendfile(path, text):
1621 1622 with open(path, 'ab') as fp:
1622 1623 fp.write(text)
1623 1624
1624 1625 class chunkbuffer(object):
1625 1626 """Allow arbitrary sized chunks of data to be efficiently read from an
1626 1627 iterator over chunks of arbitrary size."""
1627 1628
1628 1629 def __init__(self, in_iter):
1629 1630 """in_iter is the iterator that's iterating over the input chunks.
1630 1631 targetsize is how big a buffer to try to maintain."""
1631 1632 def splitbig(chunks):
1632 1633 for chunk in chunks:
1633 1634 if len(chunk) > 2**20:
1634 1635 pos = 0
1635 1636 while pos < len(chunk):
1636 1637 end = pos + 2 ** 18
1637 1638 yield chunk[pos:end]
1638 1639 pos = end
1639 1640 else:
1640 1641 yield chunk
1641 1642 self.iter = splitbig(in_iter)
1642 1643 self._queue = collections.deque()
1643 1644 self._chunkoffset = 0
1644 1645
1645 1646 def read(self, l=None):
1646 1647 """Read L bytes of data from the iterator of chunks of data.
1647 1648 Returns less than L bytes if the iterator runs dry.
1648 1649
1649 1650 If size parameter is omitted, read everything"""
1650 1651 if l is None:
1651 1652 return ''.join(self.iter)
1652 1653
1653 1654 left = l
1654 1655 buf = []
1655 1656 queue = self._queue
1656 1657 while left > 0:
1657 1658 # refill the queue
1658 1659 if not queue:
1659 1660 target = 2**18
1660 1661 for chunk in self.iter:
1661 1662 queue.append(chunk)
1662 1663 target -= len(chunk)
1663 1664 if target <= 0:
1664 1665 break
1665 1666 if not queue:
1666 1667 break
1667 1668
1668 1669 # The easy way to do this would be to queue.popleft(), modify the
1669 1670 # chunk (if necessary), then queue.appendleft(). However, for cases
1670 1671 # where we read partial chunk content, this incurs 2 dequeue
1671 1672 # mutations and creates a new str for the remaining chunk in the
1672 1673 # queue. Our code below avoids this overhead.
1673 1674
1674 1675 chunk = queue[0]
1675 1676 chunkl = len(chunk)
1676 1677 offset = self._chunkoffset
1677 1678
1678 1679 # Use full chunk.
1679 1680 if offset == 0 and left >= chunkl:
1680 1681 left -= chunkl
1681 1682 queue.popleft()
1682 1683 buf.append(chunk)
1683 1684 # self._chunkoffset remains at 0.
1684 1685 continue
1685 1686
1686 1687 chunkremaining = chunkl - offset
1687 1688
1688 1689 # Use all of unconsumed part of chunk.
1689 1690 if left >= chunkremaining:
1690 1691 left -= chunkremaining
1691 1692 queue.popleft()
1692 1693 # offset == 0 is enabled by block above, so this won't merely
1693 1694 # copy via ``chunk[0:]``.
1694 1695 buf.append(chunk[offset:])
1695 1696 self._chunkoffset = 0
1696 1697
1697 1698 # Partial chunk needed.
1698 1699 else:
1699 1700 buf.append(chunk[offset:offset + left])
1700 1701 self._chunkoffset += left
1701 1702 left -= chunkremaining
1702 1703
1703 1704 return ''.join(buf)
1704 1705
1705 1706 def filechunkiter(f, size=131072, limit=None):
1706 1707 """Create a generator that produces the data in the file size
1707 1708 (default 131072) bytes at a time, up to optional limit (default is
1708 1709 to read all data). Chunks may be less than size bytes if the
1709 1710 chunk is the last chunk in the file, or the file is a socket or
1710 1711 some other type of file that sometimes reads less data than is
1711 1712 requested."""
1712 1713 assert size >= 0
1713 1714 assert limit is None or limit >= 0
1714 1715 while True:
1715 1716 if limit is None:
1716 1717 nbytes = size
1717 1718 else:
1718 1719 nbytes = min(limit, size)
1719 1720 s = nbytes and f.read(nbytes)
1720 1721 if not s:
1721 1722 break
1722 1723 if limit:
1723 1724 limit -= len(s)
1724 1725 yield s
1725 1726
1726 1727 def makedate(timestamp=None):
1727 1728 '''Return a unix timestamp (or the current time) as a (unixtime,
1728 1729 offset) tuple based off the local timezone.'''
1729 1730 if timestamp is None:
1730 1731 timestamp = time.time()
1731 1732 if timestamp < 0:
1732 1733 hint = _("check your clock")
1733 1734 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1734 1735 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1735 1736 datetime.datetime.fromtimestamp(timestamp))
1736 1737 tz = delta.days * 86400 + delta.seconds
1737 1738 return timestamp, tz
1738 1739
1739 1740 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1740 1741 """represent a (unixtime, offset) tuple as a localized time.
1741 1742 unixtime is seconds since the epoch, and offset is the time zone's
1742 1743 number of seconds away from UTC.
1743 1744
1744 1745 >>> datestr((0, 0))
1745 1746 'Thu Jan 01 00:00:00 1970 +0000'
1746 1747 >>> datestr((42, 0))
1747 1748 'Thu Jan 01 00:00:42 1970 +0000'
1748 1749 >>> datestr((-42, 0))
1749 1750 'Wed Dec 31 23:59:18 1969 +0000'
1750 1751 >>> datestr((0x7fffffff, 0))
1751 1752 'Tue Jan 19 03:14:07 2038 +0000'
1752 1753 >>> datestr((-0x80000000, 0))
1753 1754 'Fri Dec 13 20:45:52 1901 +0000'
1754 1755 """
1755 1756 t, tz = date or makedate()
1756 1757 if "%1" in format or "%2" in format or "%z" in format:
1757 1758 sign = (tz > 0) and "-" or "+"
1758 1759 minutes = abs(tz) // 60
1759 1760 q, r = divmod(minutes, 60)
1760 1761 format = format.replace("%z", "%1%2")
1761 1762 format = format.replace("%1", "%c%02d" % (sign, q))
1762 1763 format = format.replace("%2", "%02d" % r)
1763 1764 d = t - tz
1764 1765 if d > 0x7fffffff:
1765 1766 d = 0x7fffffff
1766 1767 elif d < -0x80000000:
1767 1768 d = -0x80000000
1768 1769 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1769 1770 # because they use the gmtime() system call which is buggy on Windows
1770 1771 # for negative values.
1771 1772 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1772 1773 s = t.strftime(format)
1773 1774 return s
1774 1775
1775 1776 def shortdate(date=None):
1776 1777 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1777 1778 return datestr(date, format='%Y-%m-%d')
1778 1779
1779 1780 def parsetimezone(s):
1780 1781 """find a trailing timezone, if any, in string, and return a
1781 1782 (offset, remainder) pair"""
1782 1783
1783 1784 if s.endswith("GMT") or s.endswith("UTC"):
1784 1785 return 0, s[:-3].rstrip()
1785 1786
1786 1787 # Unix-style timezones [+-]hhmm
1787 1788 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1788 1789 sign = (s[-5] == "+") and 1 or -1
1789 1790 hours = int(s[-4:-2])
1790 1791 minutes = int(s[-2:])
1791 1792 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1792 1793
1793 1794 # ISO8601 trailing Z
1794 1795 if s.endswith("Z") and s[-2:-1].isdigit():
1795 1796 return 0, s[:-1]
1796 1797
1797 1798 # ISO8601-style [+-]hh:mm
1798 1799 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1799 1800 s[-5:-3].isdigit() and s[-2:].isdigit()):
1800 1801 sign = (s[-6] == "+") and 1 or -1
1801 1802 hours = int(s[-5:-3])
1802 1803 minutes = int(s[-2:])
1803 1804 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1804 1805
1805 1806 return None, s
1806 1807
1807 1808 def strdate(string, format, defaults=[]):
1808 1809 """parse a localized time string and return a (unixtime, offset) tuple.
1809 1810 if the string cannot be parsed, ValueError is raised."""
1810 1811 # NOTE: unixtime = localunixtime + offset
1811 1812 offset, date = parsetimezone(string)
1812 1813
1813 1814 # add missing elements from defaults
1814 1815 usenow = False # default to using biased defaults
1815 1816 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1816 1817 found = [True for p in part if ("%"+p) in format]
1817 1818 if not found:
1818 1819 date += "@" + defaults[part][usenow]
1819 1820 format += "@%" + part[0]
1820 1821 else:
1821 1822 # We've found a specific time element, less specific time
1822 1823 # elements are relative to today
1823 1824 usenow = True
1824 1825
1825 1826 timetuple = time.strptime(date, format)
1826 1827 localunixtime = int(calendar.timegm(timetuple))
1827 1828 if offset is None:
1828 1829 # local timezone
1829 1830 unixtime = int(time.mktime(timetuple))
1830 1831 offset = unixtime - localunixtime
1831 1832 else:
1832 1833 unixtime = localunixtime + offset
1833 1834 return unixtime, offset
1834 1835
1835 1836 def parsedate(date, formats=None, bias=None):
1836 1837 """parse a localized date/time and return a (unixtime, offset) tuple.
1837 1838
1838 1839 The date may be a "unixtime offset" string or in one of the specified
1839 1840 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1840 1841
1841 1842 >>> parsedate(' today ') == parsedate(\
1842 1843 datetime.date.today().strftime('%b %d'))
1843 1844 True
1844 1845 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1845 1846 datetime.timedelta(days=1)\
1846 1847 ).strftime('%b %d'))
1847 1848 True
1848 1849 >>> now, tz = makedate()
1849 1850 >>> strnow, strtz = parsedate('now')
1850 1851 >>> (strnow - now) < 1
1851 1852 True
1852 1853 >>> tz == strtz
1853 1854 True
1854 1855 """
1855 1856 if bias is None:
1856 1857 bias = {}
1857 1858 if not date:
1858 1859 return 0, 0
1859 1860 if isinstance(date, tuple) and len(date) == 2:
1860 1861 return date
1861 1862 if not formats:
1862 1863 formats = defaultdateformats
1863 1864 date = date.strip()
1864 1865
1865 1866 if date == 'now' or date == _('now'):
1866 1867 return makedate()
1867 1868 if date == 'today' or date == _('today'):
1868 1869 date = datetime.date.today().strftime('%b %d')
1869 1870 elif date == 'yesterday' or date == _('yesterday'):
1870 1871 date = (datetime.date.today() -
1871 1872 datetime.timedelta(days=1)).strftime('%b %d')
1872 1873
1873 1874 try:
1874 1875 when, offset = map(int, date.split(' '))
1875 1876 except ValueError:
1876 1877 # fill out defaults
1877 1878 now = makedate()
1878 1879 defaults = {}
1879 1880 for part in ("d", "mb", "yY", "HI", "M", "S"):
1880 1881 # this piece is for rounding the specific end of unknowns
1881 1882 b = bias.get(part)
1882 1883 if b is None:
1883 1884 if part[0] in "HMS":
1884 1885 b = "00"
1885 1886 else:
1886 1887 b = "0"
1887 1888
1888 1889 # this piece is for matching the generic end to today's date
1889 1890 n = datestr(now, "%" + part[0])
1890 1891
1891 1892 defaults[part] = (b, n)
1892 1893
1893 1894 for format in formats:
1894 1895 try:
1895 1896 when, offset = strdate(date, format, defaults)
1896 1897 except (ValueError, OverflowError):
1897 1898 pass
1898 1899 else:
1899 1900 break
1900 1901 else:
1901 1902 raise Abort(_('invalid date: %r') % date)
1902 1903 # validate explicit (probably user-specified) date and
1903 1904 # time zone offset. values must fit in signed 32 bits for
1904 1905 # current 32-bit linux runtimes. timezones go from UTC-12
1905 1906 # to UTC+14
1906 1907 if when < -0x80000000 or when > 0x7fffffff:
1907 1908 raise Abort(_('date exceeds 32 bits: %d') % when)
1908 1909 if offset < -50400 or offset > 43200:
1909 1910 raise Abort(_('impossible time zone offset: %d') % offset)
1910 1911 return when, offset
1911 1912
1912 1913 def matchdate(date):
1913 1914 """Return a function that matches a given date match specifier
1914 1915
1915 1916 Formats include:
1916 1917
1917 1918 '{date}' match a given date to the accuracy provided
1918 1919
1919 1920 '<{date}' on or before a given date
1920 1921
1921 1922 '>{date}' on or after a given date
1922 1923
1923 1924 >>> p1 = parsedate("10:29:59")
1924 1925 >>> p2 = parsedate("10:30:00")
1925 1926 >>> p3 = parsedate("10:30:59")
1926 1927 >>> p4 = parsedate("10:31:00")
1927 1928 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1928 1929 >>> f = matchdate("10:30")
1929 1930 >>> f(p1[0])
1930 1931 False
1931 1932 >>> f(p2[0])
1932 1933 True
1933 1934 >>> f(p3[0])
1934 1935 True
1935 1936 >>> f(p4[0])
1936 1937 False
1937 1938 >>> f(p5[0])
1938 1939 False
1939 1940 """
1940 1941
1941 1942 def lower(date):
1942 1943 d = {'mb': "1", 'd': "1"}
1943 1944 return parsedate(date, extendeddateformats, d)[0]
1944 1945
1945 1946 def upper(date):
1946 1947 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1947 1948 for days in ("31", "30", "29"):
1948 1949 try:
1949 1950 d["d"] = days
1950 1951 return parsedate(date, extendeddateformats, d)[0]
1951 1952 except Abort:
1952 1953 pass
1953 1954 d["d"] = "28"
1954 1955 return parsedate(date, extendeddateformats, d)[0]
1955 1956
1956 1957 date = date.strip()
1957 1958
1958 1959 if not date:
1959 1960 raise Abort(_("dates cannot consist entirely of whitespace"))
1960 1961 elif date[0] == "<":
1961 1962 if not date[1:]:
1962 1963 raise Abort(_("invalid day spec, use '<DATE'"))
1963 1964 when = upper(date[1:])
1964 1965 return lambda x: x <= when
1965 1966 elif date[0] == ">":
1966 1967 if not date[1:]:
1967 1968 raise Abort(_("invalid day spec, use '>DATE'"))
1968 1969 when = lower(date[1:])
1969 1970 return lambda x: x >= when
1970 1971 elif date[0] == "-":
1971 1972 try:
1972 1973 days = int(date[1:])
1973 1974 except ValueError:
1974 1975 raise Abort(_("invalid day spec: %s") % date[1:])
1975 1976 if days < 0:
1976 1977 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1977 1978 % date[1:])
1978 1979 when = makedate()[0] - days * 3600 * 24
1979 1980 return lambda x: x >= when
1980 1981 elif " to " in date:
1981 1982 a, b = date.split(" to ")
1982 1983 start, stop = lower(a), upper(b)
1983 1984 return lambda x: x >= start and x <= stop
1984 1985 else:
1985 1986 start, stop = lower(date), upper(date)
1986 1987 return lambda x: x >= start and x <= stop
1987 1988
1988 1989 def stringmatcher(pattern):
1989 1990 """
1990 1991 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1991 1992 returns the matcher name, pattern, and matcher function.
1992 1993 missing or unknown prefixes are treated as literal matches.
1993 1994
1994 1995 helper for tests:
1995 1996 >>> def test(pattern, *tests):
1996 1997 ... kind, pattern, matcher = stringmatcher(pattern)
1997 1998 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1998 1999
1999 2000 exact matching (no prefix):
2000 2001 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2001 2002 ('literal', 'abcdefg', [False, False, True])
2002 2003
2003 2004 regex matching ('re:' prefix)
2004 2005 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2005 2006 ('re', 'a.+b', [False, False, True])
2006 2007
2007 2008 force exact matches ('literal:' prefix)
2008 2009 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2009 2010 ('literal', 're:foobar', [False, True])
2010 2011
2011 2012 unknown prefixes are ignored and treated as literals
2012 2013 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2013 2014 ('literal', 'foo:bar', [False, False, True])
2014 2015 """
2015 2016 if pattern.startswith('re:'):
2016 2017 pattern = pattern[3:]
2017 2018 try:
2018 2019 regex = remod.compile(pattern)
2019 2020 except remod.error as e:
2020 2021 raise error.ParseError(_('invalid regular expression: %s')
2021 2022 % e)
2022 2023 return 're', pattern, regex.search
2023 2024 elif pattern.startswith('literal:'):
2024 2025 pattern = pattern[8:]
2025 2026 return 'literal', pattern, pattern.__eq__
2026 2027
2027 2028 def shortuser(user):
2028 2029 """Return a short representation of a user name or email address."""
2029 2030 f = user.find('@')
2030 2031 if f >= 0:
2031 2032 user = user[:f]
2032 2033 f = user.find('<')
2033 2034 if f >= 0:
2034 2035 user = user[f + 1:]
2035 2036 f = user.find(' ')
2036 2037 if f >= 0:
2037 2038 user = user[:f]
2038 2039 f = user.find('.')
2039 2040 if f >= 0:
2040 2041 user = user[:f]
2041 2042 return user
2042 2043
2043 2044 def emailuser(user):
2044 2045 """Return the user portion of an email address."""
2045 2046 f = user.find('@')
2046 2047 if f >= 0:
2047 2048 user = user[:f]
2048 2049 f = user.find('<')
2049 2050 if f >= 0:
2050 2051 user = user[f + 1:]
2051 2052 return user
2052 2053
2053 2054 def email(author):
2054 2055 '''get email of author.'''
2055 2056 r = author.find('>')
2056 2057 if r == -1:
2057 2058 r = None
2058 2059 return author[author.find('<') + 1:r]
2059 2060
2060 2061 def ellipsis(text, maxlength=400):
2061 2062 """Trim string to at most maxlength (default: 400) columns in display."""
2062 2063 return encoding.trim(text, maxlength, ellipsis='...')
2063 2064
2064 2065 def unitcountfn(*unittable):
2065 2066 '''return a function that renders a readable count of some quantity'''
2066 2067
2067 2068 def go(count):
2068 2069 for multiplier, divisor, format in unittable:
2069 2070 if count >= divisor * multiplier:
2070 2071 return format % (count / float(divisor))
2071 2072 return unittable[-1][2] % count
2072 2073
2073 2074 return go
2074 2075
2075 2076 bytecount = unitcountfn(
2076 2077 (100, 1 << 30, _('%.0f GB')),
2077 2078 (10, 1 << 30, _('%.1f GB')),
2078 2079 (1, 1 << 30, _('%.2f GB')),
2079 2080 (100, 1 << 20, _('%.0f MB')),
2080 2081 (10, 1 << 20, _('%.1f MB')),
2081 2082 (1, 1 << 20, _('%.2f MB')),
2082 2083 (100, 1 << 10, _('%.0f KB')),
2083 2084 (10, 1 << 10, _('%.1f KB')),
2084 2085 (1, 1 << 10, _('%.2f KB')),
2085 2086 (1, 1, _('%.0f bytes')),
2086 2087 )
2087 2088
2088 2089 def uirepr(s):
2089 2090 # Avoid double backslash in Windows path repr()
2090 2091 return repr(s).replace('\\\\', '\\')
2091 2092
2092 2093 # delay import of textwrap
2093 2094 def MBTextWrapper(**kwargs):
2094 2095 class tw(textwrap.TextWrapper):
2095 2096 """
2096 2097 Extend TextWrapper for width-awareness.
2097 2098
2098 2099 Neither number of 'bytes' in any encoding nor 'characters' is
2099 2100 appropriate to calculate terminal columns for specified string.
2100 2101
2101 2102 Original TextWrapper implementation uses built-in 'len()' directly,
2102 2103 so overriding is needed to use width information of each characters.
2103 2104
2104 2105 In addition, characters classified into 'ambiguous' width are
2105 2106 treated as wide in East Asian area, but as narrow in other.
2106 2107
2107 2108 This requires use decision to determine width of such characters.
2108 2109 """
2109 2110 def _cutdown(self, ucstr, space_left):
2110 2111 l = 0
2111 2112 colwidth = encoding.ucolwidth
2112 2113 for i in xrange(len(ucstr)):
2113 2114 l += colwidth(ucstr[i])
2114 2115 if space_left < l:
2115 2116 return (ucstr[:i], ucstr[i:])
2116 2117 return ucstr, ''
2117 2118
2118 2119 # overriding of base class
2119 2120 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2120 2121 space_left = max(width - cur_len, 1)
2121 2122
2122 2123 if self.break_long_words:
2123 2124 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2124 2125 cur_line.append(cut)
2125 2126 reversed_chunks[-1] = res
2126 2127 elif not cur_line:
2127 2128 cur_line.append(reversed_chunks.pop())
2128 2129
2129 2130 # this overriding code is imported from TextWrapper of Python 2.6
2130 2131 # to calculate columns of string by 'encoding.ucolwidth()'
2131 2132 def _wrap_chunks(self, chunks):
2132 2133 colwidth = encoding.ucolwidth
2133 2134
2134 2135 lines = []
2135 2136 if self.width <= 0:
2136 2137 raise ValueError("invalid width %r (must be > 0)" % self.width)
2137 2138
2138 2139 # Arrange in reverse order so items can be efficiently popped
2139 2140 # from a stack of chucks.
2140 2141 chunks.reverse()
2141 2142
2142 2143 while chunks:
2143 2144
2144 2145 # Start the list of chunks that will make up the current line.
2145 2146 # cur_len is just the length of all the chunks in cur_line.
2146 2147 cur_line = []
2147 2148 cur_len = 0
2148 2149
2149 2150 # Figure out which static string will prefix this line.
2150 2151 if lines:
2151 2152 indent = self.subsequent_indent
2152 2153 else:
2153 2154 indent = self.initial_indent
2154 2155
2155 2156 # Maximum width for this line.
2156 2157 width = self.width - len(indent)
2157 2158
2158 2159 # First chunk on line is whitespace -- drop it, unless this
2159 2160 # is the very beginning of the text (i.e. no lines started yet).
2160 2161 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2161 2162 del chunks[-1]
2162 2163
2163 2164 while chunks:
2164 2165 l = colwidth(chunks[-1])
2165 2166
2166 2167 # Can at least squeeze this chunk onto the current line.
2167 2168 if cur_len + l <= width:
2168 2169 cur_line.append(chunks.pop())
2169 2170 cur_len += l
2170 2171
2171 2172 # Nope, this line is full.
2172 2173 else:
2173 2174 break
2174 2175
2175 2176 # The current line is full, and the next chunk is too big to
2176 2177 # fit on *any* line (not just this one).
2177 2178 if chunks and colwidth(chunks[-1]) > width:
2178 2179 self._handle_long_word(chunks, cur_line, cur_len, width)
2179 2180
2180 2181 # If the last chunk on this line is all whitespace, drop it.
2181 2182 if (self.drop_whitespace and
2182 2183 cur_line and cur_line[-1].strip() == ''):
2183 2184 del cur_line[-1]
2184 2185
2185 2186 # Convert current line back to a string and store it in list
2186 2187 # of all lines (return value).
2187 2188 if cur_line:
2188 2189 lines.append(indent + ''.join(cur_line))
2189 2190
2190 2191 return lines
2191 2192
2192 2193 global MBTextWrapper
2193 2194 MBTextWrapper = tw
2194 2195 return tw(**kwargs)
2195 2196
2196 2197 def wrap(line, width, initindent='', hangindent=''):
2197 2198 maxindent = max(len(hangindent), len(initindent))
2198 2199 if width <= maxindent:
2199 2200 # adjust for weird terminal size
2200 2201 width = max(78, maxindent + 1)
2201 2202 line = line.decode(encoding.encoding, encoding.encodingmode)
2202 2203 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2203 2204 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2204 2205 wrapper = MBTextWrapper(width=width,
2205 2206 initial_indent=initindent,
2206 2207 subsequent_indent=hangindent)
2207 2208 return wrapper.fill(line).encode(encoding.encoding)
2208 2209
2209 2210 if (pyplatform.python_implementation() == 'CPython' and
2210 2211 sys.version_info < (3, 0)):
2211 2212 # There is an issue in CPython that some IO methods do not handle EINTR
2212 2213 # correctly. The following table shows what CPython version (and functions)
2213 2214 # are affected (buggy: has the EINTR bug, okay: otherwise):
2214 2215 #
2215 2216 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2216 2217 # --------------------------------------------------
2217 2218 # fp.__iter__ | buggy | buggy | okay
2218 2219 # fp.read* | buggy | okay [1] | okay
2219 2220 #
2220 2221 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2221 2222 #
2222 2223 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2223 2224 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2224 2225 #
2225 2226 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2226 2227 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2227 2228 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2228 2229 # fp.__iter__ but not other fp.read* methods.
2229 2230 #
2230 2231 # On modern systems like Linux, the "read" syscall cannot be interrupted
2231 2232 # when reading "fast" files like on-disk files. So the EINTR issue only
2232 2233 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2233 2234 # files approximately as "fast" files and use the fast (unsafe) code path,
2234 2235 # to minimize the performance impact.
2235 2236 if sys.version_info >= (2, 7, 4):
2236 2237 # fp.readline deals with EINTR correctly, use it as a workaround.
2237 2238 def _safeiterfile(fp):
2238 2239 return iter(fp.readline, '')
2239 2240 else:
2240 2241 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2241 2242 # note: this may block longer than necessary because of bufsize.
2242 2243 def _safeiterfile(fp, bufsize=4096):
2243 2244 fd = fp.fileno()
2244 2245 line = ''
2245 2246 while True:
2246 2247 try:
2247 2248 buf = os.read(fd, bufsize)
2248 2249 except OSError as ex:
2249 2250 # os.read only raises EINTR before any data is read
2250 2251 if ex.errno == errno.EINTR:
2251 2252 continue
2252 2253 else:
2253 2254 raise
2254 2255 line += buf
2255 2256 if '\n' in buf:
2256 2257 splitted = line.splitlines(True)
2257 2258 line = ''
2258 2259 for l in splitted:
2259 2260 if l[-1] == '\n':
2260 2261 yield l
2261 2262 else:
2262 2263 line = l
2263 2264 if not buf:
2264 2265 break
2265 2266 if line:
2266 2267 yield line
2267 2268
2268 2269 def iterfile(fp):
2269 2270 fastpath = True
2270 2271 if type(fp) is file:
2271 2272 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2272 2273 if fastpath:
2273 2274 return fp
2274 2275 else:
2275 2276 return _safeiterfile(fp)
2276 2277 else:
2277 2278 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2278 2279 def iterfile(fp):
2279 2280 return fp
2280 2281
2281 2282 def iterlines(iterator):
2282 2283 for chunk in iterator:
2283 2284 for line in chunk.splitlines():
2284 2285 yield line
2285 2286
2286 2287 def expandpath(path):
2287 2288 return os.path.expanduser(os.path.expandvars(path))
2288 2289
2289 2290 def hgcmd():
2290 2291 """Return the command used to execute current hg
2291 2292
2292 2293 This is different from hgexecutable() because on Windows we want
2293 2294 to avoid things opening new shell windows like batch files, so we
2294 2295 get either the python call or current executable.
2295 2296 """
2296 2297 if mainfrozen():
2297 2298 if getattr(sys, 'frozen', None) == 'macosx_app':
2298 2299 # Env variable set by py2app
2299 2300 return [os.environ['EXECUTABLEPATH']]
2300 2301 else:
2301 2302 return [sys.executable]
2302 2303 return gethgcmd()
2303 2304
2304 2305 def rundetached(args, condfn):
2305 2306 """Execute the argument list in a detached process.
2306 2307
2307 2308 condfn is a callable which is called repeatedly and should return
2308 2309 True once the child process is known to have started successfully.
2309 2310 At this point, the child process PID is returned. If the child
2310 2311 process fails to start or finishes before condfn() evaluates to
2311 2312 True, return -1.
2312 2313 """
2313 2314 # Windows case is easier because the child process is either
2314 2315 # successfully starting and validating the condition or exiting
2315 2316 # on failure. We just poll on its PID. On Unix, if the child
2316 2317 # process fails to start, it will be left in a zombie state until
2317 2318 # the parent wait on it, which we cannot do since we expect a long
2318 2319 # running process on success. Instead we listen for SIGCHLD telling
2319 2320 # us our child process terminated.
2320 2321 terminated = set()
2321 2322 def handler(signum, frame):
2322 2323 terminated.add(os.wait())
2323 2324 prevhandler = None
2324 2325 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2325 2326 if SIGCHLD is not None:
2326 2327 prevhandler = signal.signal(SIGCHLD, handler)
2327 2328 try:
2328 2329 pid = spawndetached(args)
2329 2330 while not condfn():
2330 2331 if ((pid in terminated or not testpid(pid))
2331 2332 and not condfn()):
2332 2333 return -1
2333 2334 time.sleep(0.1)
2334 2335 return pid
2335 2336 finally:
2336 2337 if prevhandler is not None:
2337 2338 signal.signal(signal.SIGCHLD, prevhandler)
2338 2339
2339 2340 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2340 2341 """Return the result of interpolating items in the mapping into string s.
2341 2342
2342 2343 prefix is a single character string, or a two character string with
2343 2344 a backslash as the first character if the prefix needs to be escaped in
2344 2345 a regular expression.
2345 2346
2346 2347 fn is an optional function that will be applied to the replacement text
2347 2348 just before replacement.
2348 2349
2349 2350 escape_prefix is an optional flag that allows using doubled prefix for
2350 2351 its escaping.
2351 2352 """
2352 2353 fn = fn or (lambda s: s)
2353 2354 patterns = '|'.join(mapping.keys())
2354 2355 if escape_prefix:
2355 2356 patterns += '|' + prefix
2356 2357 if len(prefix) > 1:
2357 2358 prefix_char = prefix[1:]
2358 2359 else:
2359 2360 prefix_char = prefix
2360 2361 mapping[prefix_char] = prefix_char
2361 2362 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2362 2363 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2363 2364
2364 2365 def getport(port):
2365 2366 """Return the port for a given network service.
2366 2367
2367 2368 If port is an integer, it's returned as is. If it's a string, it's
2368 2369 looked up using socket.getservbyname(). If there's no matching
2369 2370 service, error.Abort is raised.
2370 2371 """
2371 2372 try:
2372 2373 return int(port)
2373 2374 except ValueError:
2374 2375 pass
2375 2376
2376 2377 try:
2377 2378 return socket.getservbyname(port)
2378 2379 except socket.error:
2379 2380 raise Abort(_("no port number associated with service '%s'") % port)
2380 2381
2381 2382 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2382 2383 '0': False, 'no': False, 'false': False, 'off': False,
2383 2384 'never': False}
2384 2385
2385 2386 def parsebool(s):
2386 2387 """Parse s into a boolean.
2387 2388
2388 2389 If s is not a valid boolean, returns None.
2389 2390 """
2390 2391 return _booleans.get(s.lower(), None)
2391 2392
2392 2393 _hextochr = dict((a + b, chr(int(a + b, 16)))
2393 2394 for a in string.hexdigits for b in string.hexdigits)
2394 2395
2395 2396 class url(object):
2396 2397 r"""Reliable URL parser.
2397 2398
2398 2399 This parses URLs and provides attributes for the following
2399 2400 components:
2400 2401
2401 2402 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2402 2403
2403 2404 Missing components are set to None. The only exception is
2404 2405 fragment, which is set to '' if present but empty.
2405 2406
2406 2407 If parsefragment is False, fragment is included in query. If
2407 2408 parsequery is False, query is included in path. If both are
2408 2409 False, both fragment and query are included in path.
2409 2410
2410 2411 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2411 2412
2412 2413 Note that for backward compatibility reasons, bundle URLs do not
2413 2414 take host names. That means 'bundle://../' has a path of '../'.
2414 2415
2415 2416 Examples:
2416 2417
2417 2418 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2418 2419 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2419 2420 >>> url('ssh://[::1]:2200//home/joe/repo')
2420 2421 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2421 2422 >>> url('file:///home/joe/repo')
2422 2423 <url scheme: 'file', path: '/home/joe/repo'>
2423 2424 >>> url('file:///c:/temp/foo/')
2424 2425 <url scheme: 'file', path: 'c:/temp/foo/'>
2425 2426 >>> url('bundle:foo')
2426 2427 <url scheme: 'bundle', path: 'foo'>
2427 2428 >>> url('bundle://../foo')
2428 2429 <url scheme: 'bundle', path: '../foo'>
2429 2430 >>> url(r'c:\foo\bar')
2430 2431 <url path: 'c:\\foo\\bar'>
2431 2432 >>> url(r'\\blah\blah\blah')
2432 2433 <url path: '\\\\blah\\blah\\blah'>
2433 2434 >>> url(r'\\blah\blah\blah#baz')
2434 2435 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2435 2436 >>> url(r'file:///C:\users\me')
2436 2437 <url scheme: 'file', path: 'C:\\users\\me'>
2437 2438
2438 2439 Authentication credentials:
2439 2440
2440 2441 >>> url('ssh://joe:xyz@x/repo')
2441 2442 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2442 2443 >>> url('ssh://joe@x/repo')
2443 2444 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2444 2445
2445 2446 Query strings and fragments:
2446 2447
2447 2448 >>> url('http://host/a?b#c')
2448 2449 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2449 2450 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2450 2451 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2451 2452
2452 2453 Empty path:
2453 2454
2454 2455 >>> url('')
2455 2456 <url path: ''>
2456 2457 >>> url('#a')
2457 2458 <url path: '', fragment: 'a'>
2458 2459 >>> url('http://host/')
2459 2460 <url scheme: 'http', host: 'host', path: ''>
2460 2461 >>> url('http://host/#a')
2461 2462 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2462 2463
2463 2464 Only scheme:
2464 2465
2465 2466 >>> url('http:')
2466 2467 <url scheme: 'http'>
2467 2468 """
2468 2469
2469 2470 _safechars = "!~*'()+"
2470 2471 _safepchars = "/!~*'()+:\\"
2471 2472 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2472 2473
2473 2474 def __init__(self, path, parsequery=True, parsefragment=True):
2474 2475 # We slowly chomp away at path until we have only the path left
2475 2476 self.scheme = self.user = self.passwd = self.host = None
2476 2477 self.port = self.path = self.query = self.fragment = None
2477 2478 self._localpath = True
2478 2479 self._hostport = ''
2479 2480 self._origpath = path
2480 2481
2481 2482 if parsefragment and '#' in path:
2482 2483 path, self.fragment = path.split('#', 1)
2483 2484
2484 2485 # special case for Windows drive letters and UNC paths
2485 2486 if hasdriveletter(path) or path.startswith('\\\\'):
2486 2487 self.path = path
2487 2488 return
2488 2489
2489 2490 # For compatibility reasons, we can't handle bundle paths as
2490 2491 # normal URLS
2491 2492 if path.startswith('bundle:'):
2492 2493 self.scheme = 'bundle'
2493 2494 path = path[7:]
2494 2495 if path.startswith('//'):
2495 2496 path = path[2:]
2496 2497 self.path = path
2497 2498 return
2498 2499
2499 2500 if self._matchscheme(path):
2500 2501 parts = path.split(':', 1)
2501 2502 if parts[0]:
2502 2503 self.scheme, path = parts
2503 2504 self._localpath = False
2504 2505
2505 2506 if not path:
2506 2507 path = None
2507 2508 if self._localpath:
2508 2509 self.path = ''
2509 2510 return
2510 2511 else:
2511 2512 if self._localpath:
2512 2513 self.path = path
2513 2514 return
2514 2515
2515 2516 if parsequery and '?' in path:
2516 2517 path, self.query = path.split('?', 1)
2517 2518 if not path:
2518 2519 path = None
2519 2520 if not self.query:
2520 2521 self.query = None
2521 2522
2522 2523 # // is required to specify a host/authority
2523 2524 if path and path.startswith('//'):
2524 2525 parts = path[2:].split('/', 1)
2525 2526 if len(parts) > 1:
2526 2527 self.host, path = parts
2527 2528 else:
2528 2529 self.host = parts[0]
2529 2530 path = None
2530 2531 if not self.host:
2531 2532 self.host = None
2532 2533 # path of file:///d is /d
2533 2534 # path of file:///d:/ is d:/, not /d:/
2534 2535 if path and not hasdriveletter(path):
2535 2536 path = '/' + path
2536 2537
2537 2538 if self.host and '@' in self.host:
2538 2539 self.user, self.host = self.host.rsplit('@', 1)
2539 2540 if ':' in self.user:
2540 2541 self.user, self.passwd = self.user.split(':', 1)
2541 2542 if not self.host:
2542 2543 self.host = None
2543 2544
2544 2545 # Don't split on colons in IPv6 addresses without ports
2545 2546 if (self.host and ':' in self.host and
2546 2547 not (self.host.startswith('[') and self.host.endswith(']'))):
2547 2548 self._hostport = self.host
2548 2549 self.host, self.port = self.host.rsplit(':', 1)
2549 2550 if not self.host:
2550 2551 self.host = None
2551 2552
2552 2553 if (self.host and self.scheme == 'file' and
2553 2554 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2554 2555 raise Abort(_('file:// URLs can only refer to localhost'))
2555 2556
2556 2557 self.path = path
2557 2558
2558 2559 # leave the query string escaped
2559 2560 for a in ('user', 'passwd', 'host', 'port',
2560 2561 'path', 'fragment'):
2561 2562 v = getattr(self, a)
2562 2563 if v is not None:
2563 2564 setattr(self, a, pycompat.urlunquote(v))
2564 2565
2565 2566 def __repr__(self):
2566 2567 attrs = []
2567 2568 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2568 2569 'query', 'fragment'):
2569 2570 v = getattr(self, a)
2570 2571 if v is not None:
2571 2572 attrs.append('%s: %r' % (a, v))
2572 2573 return '<url %s>' % ', '.join(attrs)
2573 2574
2574 2575 def __str__(self):
2575 2576 r"""Join the URL's components back into a URL string.
2576 2577
2577 2578 Examples:
2578 2579
2579 2580 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2580 2581 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2581 2582 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2582 2583 'http://user:pw@host:80/?foo=bar&baz=42'
2583 2584 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2584 2585 'http://user:pw@host:80/?foo=bar%3dbaz'
2585 2586 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2586 2587 'ssh://user:pw@[::1]:2200//home/joe#'
2587 2588 >>> str(url('http://localhost:80//'))
2588 2589 'http://localhost:80//'
2589 2590 >>> str(url('http://localhost:80/'))
2590 2591 'http://localhost:80/'
2591 2592 >>> str(url('http://localhost:80'))
2592 2593 'http://localhost:80/'
2593 2594 >>> str(url('bundle:foo'))
2594 2595 'bundle:foo'
2595 2596 >>> str(url('bundle://../foo'))
2596 2597 'bundle:../foo'
2597 2598 >>> str(url('path'))
2598 2599 'path'
2599 2600 >>> str(url('file:///tmp/foo/bar'))
2600 2601 'file:///tmp/foo/bar'
2601 2602 >>> str(url('file:///c:/tmp/foo/bar'))
2602 2603 'file:///c:/tmp/foo/bar'
2603 2604 >>> print url(r'bundle:foo\bar')
2604 2605 bundle:foo\bar
2605 2606 >>> print url(r'file:///D:\data\hg')
2606 2607 file:///D:\data\hg
2607 2608 """
2608 2609 if self._localpath:
2609 2610 s = self.path
2610 2611 if self.scheme == 'bundle':
2611 2612 s = 'bundle:' + s
2612 2613 if self.fragment:
2613 2614 s += '#' + self.fragment
2614 2615 return s
2615 2616
2616 2617 s = self.scheme + ':'
2617 2618 if self.user or self.passwd or self.host:
2618 2619 s += '//'
2619 2620 elif self.scheme and (not self.path or self.path.startswith('/')
2620 2621 or hasdriveletter(self.path)):
2621 2622 s += '//'
2622 2623 if hasdriveletter(self.path):
2623 2624 s += '/'
2624 2625 if self.user:
2625 2626 s += urlreq.quote(self.user, safe=self._safechars)
2626 2627 if self.passwd:
2627 2628 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2628 2629 if self.user or self.passwd:
2629 2630 s += '@'
2630 2631 if self.host:
2631 2632 if not (self.host.startswith('[') and self.host.endswith(']')):
2632 2633 s += urlreq.quote(self.host)
2633 2634 else:
2634 2635 s += self.host
2635 2636 if self.port:
2636 2637 s += ':' + urlreq.quote(self.port)
2637 2638 if self.host:
2638 2639 s += '/'
2639 2640 if self.path:
2640 2641 # TODO: similar to the query string, we should not unescape the
2641 2642 # path when we store it, the path might contain '%2f' = '/',
2642 2643 # which we should *not* escape.
2643 2644 s += urlreq.quote(self.path, safe=self._safepchars)
2644 2645 if self.query:
2645 2646 # we store the query in escaped form.
2646 2647 s += '?' + self.query
2647 2648 if self.fragment is not None:
2648 2649 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2649 2650 return s
2650 2651
2651 2652 def authinfo(self):
2652 2653 user, passwd = self.user, self.passwd
2653 2654 try:
2654 2655 self.user, self.passwd = None, None
2655 2656 s = str(self)
2656 2657 finally:
2657 2658 self.user, self.passwd = user, passwd
2658 2659 if not self.user:
2659 2660 return (s, None)
2660 2661 # authinfo[1] is passed to urllib2 password manager, and its
2661 2662 # URIs must not contain credentials. The host is passed in the
2662 2663 # URIs list because Python < 2.4.3 uses only that to search for
2663 2664 # a password.
2664 2665 return (s, (None, (s, self.host),
2665 2666 self.user, self.passwd or ''))
2666 2667
2667 2668 def isabs(self):
2668 2669 if self.scheme and self.scheme != 'file':
2669 2670 return True # remote URL
2670 2671 if hasdriveletter(self.path):
2671 2672 return True # absolute for our purposes - can't be joined()
2672 2673 if self.path.startswith(r'\\'):
2673 2674 return True # Windows UNC path
2674 2675 if self.path.startswith('/'):
2675 2676 return True # POSIX-style
2676 2677 return False
2677 2678
2678 2679 def localpath(self):
2679 2680 if self.scheme == 'file' or self.scheme == 'bundle':
2680 2681 path = self.path or '/'
2681 2682 # For Windows, we need to promote hosts containing drive
2682 2683 # letters to paths with drive letters.
2683 2684 if hasdriveletter(self._hostport):
2684 2685 path = self._hostport + '/' + self.path
2685 2686 elif (self.host is not None and self.path
2686 2687 and not hasdriveletter(path)):
2687 2688 path = '/' + path
2688 2689 return path
2689 2690 return self._origpath
2690 2691
2691 2692 def islocal(self):
2692 2693 '''whether localpath will return something that posixfile can open'''
2693 2694 return (not self.scheme or self.scheme == 'file'
2694 2695 or self.scheme == 'bundle')
2695 2696
2696 2697 def hasscheme(path):
2697 2698 return bool(url(path).scheme)
2698 2699
2699 2700 def hasdriveletter(path):
2700 2701 return path and path[1:2] == ':' and path[0:1].isalpha()
2701 2702
2702 2703 def urllocalpath(path):
2703 2704 return url(path, parsequery=False, parsefragment=False).localpath()
2704 2705
2705 2706 def hidepassword(u):
2706 2707 '''hide user credential in a url string'''
2707 2708 u = url(u)
2708 2709 if u.passwd:
2709 2710 u.passwd = '***'
2710 2711 return str(u)
2711 2712
2712 2713 def removeauth(u):
2713 2714 '''remove all authentication information from a url string'''
2714 2715 u = url(u)
2715 2716 u.user = u.passwd = None
2716 2717 return str(u)
2717 2718
2718 2719 def isatty(fp):
2719 2720 try:
2720 2721 return fp.isatty()
2721 2722 except AttributeError:
2722 2723 return False
2723 2724
2724 2725 timecount = unitcountfn(
2725 2726 (1, 1e3, _('%.0f s')),
2726 2727 (100, 1, _('%.1f s')),
2727 2728 (10, 1, _('%.2f s')),
2728 2729 (1, 1, _('%.3f s')),
2729 2730 (100, 0.001, _('%.1f ms')),
2730 2731 (10, 0.001, _('%.2f ms')),
2731 2732 (1, 0.001, _('%.3f ms')),
2732 2733 (100, 0.000001, _('%.1f us')),
2733 2734 (10, 0.000001, _('%.2f us')),
2734 2735 (1, 0.000001, _('%.3f us')),
2735 2736 (100, 0.000000001, _('%.1f ns')),
2736 2737 (10, 0.000000001, _('%.2f ns')),
2737 2738 (1, 0.000000001, _('%.3f ns')),
2738 2739 )
2739 2740
2740 2741 _timenesting = [0]
2741 2742
2742 2743 def timed(func):
2743 2744 '''Report the execution time of a function call to stderr.
2744 2745
2745 2746 During development, use as a decorator when you need to measure
2746 2747 the cost of a function, e.g. as follows:
2747 2748
2748 2749 @util.timed
2749 2750 def foo(a, b, c):
2750 2751 pass
2751 2752 '''
2752 2753
2753 2754 def wrapper(*args, **kwargs):
2754 2755 start = time.time()
2755 2756 indent = 2
2756 2757 _timenesting[0] += indent
2757 2758 try:
2758 2759 return func(*args, **kwargs)
2759 2760 finally:
2760 2761 elapsed = time.time() - start
2761 2762 _timenesting[0] -= indent
2762 2763 stderr.write('%s%s: %s\n' %
2763 2764 (' ' * _timenesting[0], func.__name__,
2764 2765 timecount(elapsed)))
2765 2766 return wrapper
2766 2767
2767 2768 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2768 2769 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2769 2770
2770 2771 def sizetoint(s):
2771 2772 '''Convert a space specifier to a byte count.
2772 2773
2773 2774 >>> sizetoint('30')
2774 2775 30
2775 2776 >>> sizetoint('2.2kb')
2776 2777 2252
2777 2778 >>> sizetoint('6M')
2778 2779 6291456
2779 2780 '''
2780 2781 t = s.strip().lower()
2781 2782 try:
2782 2783 for k, u in _sizeunits:
2783 2784 if t.endswith(k):
2784 2785 return int(float(t[:-len(k)]) * u)
2785 2786 return int(t)
2786 2787 except ValueError:
2787 2788 raise error.ParseError(_("couldn't parse size: %s") % s)
2788 2789
2789 2790 class hooks(object):
2790 2791 '''A collection of hook functions that can be used to extend a
2791 2792 function's behavior. Hooks are called in lexicographic order,
2792 2793 based on the names of their sources.'''
2793 2794
2794 2795 def __init__(self):
2795 2796 self._hooks = []
2796 2797
2797 2798 def add(self, source, hook):
2798 2799 self._hooks.append((source, hook))
2799 2800
2800 2801 def __call__(self, *args):
2801 2802 self._hooks.sort(key=lambda x: x[0])
2802 2803 results = []
2803 2804 for source, hook in self._hooks:
2804 2805 results.append(hook(*args))
2805 2806 return results
2806 2807
2807 2808 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2808 2809 '''Yields lines for a nicely formatted stacktrace.
2809 2810 Skips the 'skip' last entries.
2810 2811 Each file+linenumber is formatted according to fileline.
2811 2812 Each line is formatted according to line.
2812 2813 If line is None, it yields:
2813 2814 length of longest filepath+line number,
2814 2815 filepath+linenumber,
2815 2816 function
2816 2817
2817 2818 Not be used in production code but very convenient while developing.
2818 2819 '''
2819 2820 entries = [(fileline % (fn, ln), func)
2820 2821 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2821 2822 if entries:
2822 2823 fnmax = max(len(entry[0]) for entry in entries)
2823 2824 for fnln, func in entries:
2824 2825 if line is None:
2825 2826 yield (fnmax, fnln, func)
2826 2827 else:
2827 2828 yield line % (fnmax, fnln, func)
2828 2829
2829 2830 def debugstacktrace(msg='stacktrace', skip=0, f=stderr, otherf=stdout):
2830 2831 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2831 2832 Skips the 'skip' last entries. By default it will flush stdout first.
2832 2833 It can be used everywhere and intentionally does not require an ui object.
2833 2834 Not be used in production code but very convenient while developing.
2834 2835 '''
2835 2836 if otherf:
2836 2837 otherf.flush()
2837 2838 f.write('%s at:\n' % msg)
2838 2839 for line in getstackframes(skip + 1):
2839 2840 f.write(line)
2840 2841 f.flush()
2841 2842
2842 2843 class dirs(object):
2843 2844 '''a multiset of directory names from a dirstate or manifest'''
2844 2845
2845 2846 def __init__(self, map, skip=None):
2846 2847 self._dirs = {}
2847 2848 addpath = self.addpath
2848 2849 if safehasattr(map, 'iteritems') and skip is not None:
2849 2850 for f, s in map.iteritems():
2850 2851 if s[0] != skip:
2851 2852 addpath(f)
2852 2853 else:
2853 2854 for f in map:
2854 2855 addpath(f)
2855 2856
2856 2857 def addpath(self, path):
2857 2858 dirs = self._dirs
2858 2859 for base in finddirs(path):
2859 2860 if base in dirs:
2860 2861 dirs[base] += 1
2861 2862 return
2862 2863 dirs[base] = 1
2863 2864
2864 2865 def delpath(self, path):
2865 2866 dirs = self._dirs
2866 2867 for base in finddirs(path):
2867 2868 if dirs[base] > 1:
2868 2869 dirs[base] -= 1
2869 2870 return
2870 2871 del dirs[base]
2871 2872
2872 2873 def __iter__(self):
2873 2874 return self._dirs.iterkeys()
2874 2875
2875 2876 def __contains__(self, d):
2876 2877 return d in self._dirs
2877 2878
2878 2879 if safehasattr(parsers, 'dirs'):
2879 2880 dirs = parsers.dirs
2880 2881
2881 2882 def finddirs(path):
2882 2883 pos = path.rfind('/')
2883 2884 while pos != -1:
2884 2885 yield path[:pos]
2885 2886 pos = path.rfind('/', 0, pos)
2886 2887
2887 2888 class ctxmanager(object):
2888 2889 '''A context manager for use in 'with' blocks to allow multiple
2889 2890 contexts to be entered at once. This is both safer and more
2890 2891 flexible than contextlib.nested.
2891 2892
2892 2893 Once Mercurial supports Python 2.7+, this will become mostly
2893 2894 unnecessary.
2894 2895 '''
2895 2896
2896 2897 def __init__(self, *args):
2897 2898 '''Accepts a list of no-argument functions that return context
2898 2899 managers. These will be invoked at __call__ time.'''
2899 2900 self._pending = args
2900 2901 self._atexit = []
2901 2902
2902 2903 def __enter__(self):
2903 2904 return self
2904 2905
2905 2906 def enter(self):
2906 2907 '''Create and enter context managers in the order in which they were
2907 2908 passed to the constructor.'''
2908 2909 values = []
2909 2910 for func in self._pending:
2910 2911 obj = func()
2911 2912 values.append(obj.__enter__())
2912 2913 self._atexit.append(obj.__exit__)
2913 2914 del self._pending
2914 2915 return values
2915 2916
2916 2917 def atexit(self, func, *args, **kwargs):
2917 2918 '''Add a function to call when this context manager exits. The
2918 2919 ordering of multiple atexit calls is unspecified, save that
2919 2920 they will happen before any __exit__ functions.'''
2920 2921 def wrapper(exc_type, exc_val, exc_tb):
2921 2922 func(*args, **kwargs)
2922 2923 self._atexit.append(wrapper)
2923 2924 return func
2924 2925
2925 2926 def __exit__(self, exc_type, exc_val, exc_tb):
2926 2927 '''Context managers are exited in the reverse order from which
2927 2928 they were created.'''
2928 2929 received = exc_type is not None
2929 2930 suppressed = False
2930 2931 pending = None
2931 2932 self._atexit.reverse()
2932 2933 for exitfunc in self._atexit:
2933 2934 try:
2934 2935 if exitfunc(exc_type, exc_val, exc_tb):
2935 2936 suppressed = True
2936 2937 exc_type = None
2937 2938 exc_val = None
2938 2939 exc_tb = None
2939 2940 except BaseException:
2940 2941 pending = sys.exc_info()
2941 2942 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2942 2943 del self._atexit
2943 2944 if pending:
2944 2945 raise exc_val
2945 2946 return received and suppressed
2946 2947
2947 2948 # compression code
2948 2949
2949 2950 class compressormanager(object):
2950 2951 """Holds registrations of various compression engines.
2951 2952
2952 2953 This class essentially abstracts the differences between compression
2953 2954 engines to allow new compression formats to be added easily, possibly from
2954 2955 extensions.
2955 2956
2956 2957 Compressors are registered against the global instance by calling its
2957 2958 ``register()`` method.
2958 2959 """
2959 2960 def __init__(self):
2960 2961 self._engines = {}
2961 2962 # Bundle spec human name to engine name.
2962 2963 self._bundlenames = {}
2963 2964 # Internal bundle identifier to engine name.
2964 2965 self._bundletypes = {}
2965 2966
2966 2967 def __getitem__(self, key):
2967 2968 return self._engines[key]
2968 2969
2969 2970 def __contains__(self, key):
2970 2971 return key in self._engines
2971 2972
2972 2973 def __iter__(self):
2973 2974 return iter(self._engines.keys())
2974 2975
2975 2976 def register(self, engine):
2976 2977 """Register a compression engine with the manager.
2977 2978
2978 2979 The argument must be a ``compressionengine`` instance.
2979 2980 """
2980 2981 if not isinstance(engine, compressionengine):
2981 2982 raise ValueError(_('argument must be a compressionengine'))
2982 2983
2983 2984 name = engine.name()
2984 2985
2985 2986 if name in self._engines:
2986 2987 raise error.Abort(_('compression engine %s already registered') %
2987 2988 name)
2988 2989
2989 2990 bundleinfo = engine.bundletype()
2990 2991 if bundleinfo:
2991 2992 bundlename, bundletype = bundleinfo
2992 2993
2993 2994 if bundlename in self._bundlenames:
2994 2995 raise error.Abort(_('bundle name %s already registered') %
2995 2996 bundlename)
2996 2997 if bundletype in self._bundletypes:
2997 2998 raise error.Abort(_('bundle type %s already registered by %s') %
2998 2999 (bundletype, self._bundletypes[bundletype]))
2999 3000
3000 3001 # No external facing name declared.
3001 3002 if bundlename:
3002 3003 self._bundlenames[bundlename] = name
3003 3004
3004 3005 self._bundletypes[bundletype] = name
3005 3006
3006 3007 self._engines[name] = engine
3007 3008
3008 3009 @property
3009 3010 def supportedbundlenames(self):
3010 3011 return set(self._bundlenames.keys())
3011 3012
3012 3013 @property
3013 3014 def supportedbundletypes(self):
3014 3015 return set(self._bundletypes.keys())
3015 3016
3016 3017 def forbundlename(self, bundlename):
3017 3018 """Obtain a compression engine registered to a bundle name.
3018 3019
3019 3020 Will raise KeyError if the bundle type isn't registered.
3020 3021
3021 3022 Will abort if the engine is known but not available.
3022 3023 """
3023 3024 engine = self._engines[self._bundlenames[bundlename]]
3024 3025 if not engine.available():
3025 3026 raise error.Abort(_('compression engine %s could not be loaded') %
3026 3027 engine.name())
3027 3028 return engine
3028 3029
3029 3030 def forbundletype(self, bundletype):
3030 3031 """Obtain a compression engine registered to a bundle type.
3031 3032
3032 3033 Will raise KeyError if the bundle type isn't registered.
3033 3034
3034 3035 Will abort if the engine is known but not available.
3035 3036 """
3036 3037 engine = self._engines[self._bundletypes[bundletype]]
3037 3038 if not engine.available():
3038 3039 raise error.Abort(_('compression engine %s could not be loaded') %
3039 3040 engine.name())
3040 3041 return engine
3041 3042
3042 3043 compengines = compressormanager()
3043 3044
3044 3045 class compressionengine(object):
3045 3046 """Base class for compression engines.
3046 3047
3047 3048 Compression engines must implement the interface defined by this class.
3048 3049 """
3049 3050 def name(self):
3050 3051 """Returns the name of the compression engine.
3051 3052
3052 3053 This is the key the engine is registered under.
3053 3054
3054 3055 This method must be implemented.
3055 3056 """
3056 3057 raise NotImplementedError()
3057 3058
3058 3059 def available(self):
3059 3060 """Whether the compression engine is available.
3060 3061
3061 3062 The intent of this method is to allow optional compression engines
3062 3063 that may not be available in all installations (such as engines relying
3063 3064 on C extensions that may not be present).
3064 3065 """
3065 3066 return True
3066 3067
3067 3068 def bundletype(self):
3068 3069 """Describes bundle identifiers for this engine.
3069 3070
3070 3071 If this compression engine isn't supported for bundles, returns None.
3071 3072
3072 3073 If this engine can be used for bundles, returns a 2-tuple of strings of
3073 3074 the user-facing "bundle spec" compression name and an internal
3074 3075 identifier used to denote the compression format within bundles. To
3075 3076 exclude the name from external usage, set the first element to ``None``.
3076 3077
3077 3078 If bundle compression is supported, the class must also implement
3078 3079 ``compressstream`` and `decompressorreader``.
3079 3080 """
3080 3081 return None
3081 3082
3082 3083 def compressstream(self, it, opts=None):
3083 3084 """Compress an iterator of chunks.
3084 3085
3085 3086 The method receives an iterator (ideally a generator) of chunks of
3086 3087 bytes to be compressed. It returns an iterator (ideally a generator)
3087 3088 of bytes of chunks representing the compressed output.
3088 3089
3089 3090 Optionally accepts an argument defining how to perform compression.
3090 3091 Each engine treats this argument differently.
3091 3092 """
3092 3093 raise NotImplementedError()
3093 3094
3094 3095 def decompressorreader(self, fh):
3095 3096 """Perform decompression on a file object.
3096 3097
3097 3098 Argument is an object with a ``read(size)`` method that returns
3098 3099 compressed data. Return value is an object with a ``read(size)`` that
3099 3100 returns uncompressed data.
3100 3101 """
3101 3102 raise NotImplementedError()
3102 3103
3103 3104 class _zlibengine(compressionengine):
3104 3105 def name(self):
3105 3106 return 'zlib'
3106 3107
3107 3108 def bundletype(self):
3108 3109 return 'gzip', 'GZ'
3109 3110
3110 3111 def compressstream(self, it, opts=None):
3111 3112 opts = opts or {}
3112 3113
3113 3114 z = zlib.compressobj(opts.get('level', -1))
3114 3115 for chunk in it:
3115 3116 data = z.compress(chunk)
3116 3117 # Not all calls to compress emit data. It is cheaper to inspect
3117 3118 # here than to feed empty chunks through generator.
3118 3119 if data:
3119 3120 yield data
3120 3121
3121 3122 yield z.flush()
3122 3123
3123 3124 def decompressorreader(self, fh):
3124 3125 def gen():
3125 3126 d = zlib.decompressobj()
3126 3127 for chunk in filechunkiter(fh):
3127 3128 while chunk:
3128 3129 # Limit output size to limit memory.
3129 3130 yield d.decompress(chunk, 2 ** 18)
3130 3131 chunk = d.unconsumed_tail
3131 3132
3132 3133 return chunkbuffer(gen())
3133 3134
3134 3135 compengines.register(_zlibengine())
3135 3136
3136 3137 class _bz2engine(compressionengine):
3137 3138 def name(self):
3138 3139 return 'bz2'
3139 3140
3140 3141 def bundletype(self):
3141 3142 return 'bzip2', 'BZ'
3142 3143
3143 3144 def compressstream(self, it, opts=None):
3144 3145 opts = opts or {}
3145 3146 z = bz2.BZ2Compressor(opts.get('level', 9))
3146 3147 for chunk in it:
3147 3148 data = z.compress(chunk)
3148 3149 if data:
3149 3150 yield data
3150 3151
3151 3152 yield z.flush()
3152 3153
3153 3154 def decompressorreader(self, fh):
3154 3155 def gen():
3155 3156 d = bz2.BZ2Decompressor()
3156 3157 for chunk in filechunkiter(fh):
3157 3158 yield d.decompress(chunk)
3158 3159
3159 3160 return chunkbuffer(gen())
3160 3161
3161 3162 compengines.register(_bz2engine())
3162 3163
3163 3164 class _truncatedbz2engine(compressionengine):
3164 3165 def name(self):
3165 3166 return 'bz2truncated'
3166 3167
3167 3168 def bundletype(self):
3168 3169 return None, '_truncatedBZ'
3169 3170
3170 3171 # We don't implement compressstream because it is hackily handled elsewhere.
3171 3172
3172 3173 def decompressorreader(self, fh):
3173 3174 def gen():
3174 3175 # The input stream doesn't have the 'BZ' header. So add it back.
3175 3176 d = bz2.BZ2Decompressor()
3176 3177 d.decompress('BZ')
3177 3178 for chunk in filechunkiter(fh):
3178 3179 yield d.decompress(chunk)
3179 3180
3180 3181 return chunkbuffer(gen())
3181 3182
3182 3183 compengines.register(_truncatedbz2engine())
3183 3184
3184 3185 class _noopengine(compressionengine):
3185 3186 def name(self):
3186 3187 return 'none'
3187 3188
3188 3189 def bundletype(self):
3189 3190 return 'none', 'UN'
3190 3191
3191 3192 def compressstream(self, it, opts=None):
3192 3193 return it
3193 3194
3194 3195 def decompressorreader(self, fh):
3195 3196 return fh
3196 3197
3197 3198 compengines.register(_noopengine())
3198 3199
3199 3200 class _zstdengine(compressionengine):
3200 3201 def name(self):
3201 3202 return 'zstd'
3202 3203
3203 3204 @propertycache
3204 3205 def _module(self):
3205 3206 # Not all installs have the zstd module available. So defer importing
3206 3207 # until first access.
3207 3208 try:
3208 3209 from . import zstd
3209 3210 # Force delayed import.
3210 3211 zstd.__version__
3211 3212 return zstd
3212 3213 except ImportError:
3213 3214 return None
3214 3215
3215 3216 def available(self):
3216 3217 return bool(self._module)
3217 3218
3218 3219 def bundletype(self):
3219 3220 return 'zstd', 'ZS'
3220 3221
3221 3222 def compressstream(self, it, opts=None):
3222 3223 opts = opts or {}
3223 3224 # zstd level 3 is almost always significantly faster than zlib
3224 3225 # while providing no worse compression. It strikes a good balance
3225 3226 # between speed and compression.
3226 3227 level = opts.get('level', 3)
3227 3228
3228 3229 zstd = self._module
3229 3230 z = zstd.ZstdCompressor(level=level).compressobj()
3230 3231 for chunk in it:
3231 3232 data = z.compress(chunk)
3232 3233 if data:
3233 3234 yield data
3234 3235
3235 3236 yield z.flush()
3236 3237
3237 3238 def decompressorreader(self, fh):
3238 3239 zstd = self._module
3239 3240 dctx = zstd.ZstdDecompressor()
3240 3241 return chunkbuffer(dctx.read_from(fh))
3241 3242
3242 3243 compengines.register(_zstdengine())
3243 3244
3244 3245 # convenient shortcut
3245 3246 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now