##// END OF EJS Templates
add -f/--force to pull, incoming, outgoing, to work on unrelated repo....
Vadim Gelfer -
r1959:d53a18f5 default
parent child Browse files
Show More
@@ -1,3242 +1,3249
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def filterfiles(filters, files):
22 22 l = [x for x in files if x in filters]
23 23
24 24 for t in filters:
25 25 if t and t[-1] != "/":
26 26 t += "/"
27 27 l += [x for x in files if x.startswith(t)]
28 28 return l
29 29
30 30 def relpath(repo, args):
31 31 cwd = repo.getcwd()
32 32 if cwd:
33 33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 34 return args
35 35
36 36 def matchpats(repo, pats=[], opts={}, head=''):
37 37 cwd = repo.getcwd()
38 38 if not pats and cwd:
39 39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 41 cwd = ''
42 42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 43 opts.get('exclude'), head)
44 44
45 45 def makewalk(repo, pats, opts, node=None, head=''):
46 46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 47 exact = dict(zip(files, files))
48 48 def walk():
49 49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 51 return files, matchfn, walk()
52 52
53 53 def walk(repo, pats, opts, node=None, head=''):
54 54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 55 for r in results:
56 56 yield r
57 57
58 58 def walkchangerevs(ui, repo, pats, opts):
59 59 '''Iterate over files and the revs they changed in.
60 60
61 61 Callers most commonly need to iterate backwards over the history
62 62 it is interested in. Doing so has awful (quadratic-looking)
63 63 performance, so we use iterators in a "windowed" way.
64 64
65 65 We walk a window of revisions in the desired order. Within the
66 66 window, we first walk forwards to gather data, then in the desired
67 67 order (usually backwards) to display it.
68 68
69 69 This function returns an (iterator, getchange, matchfn) tuple. The
70 70 getchange function returns the changelog entry for a numeric
71 71 revision. The iterator yields 3-tuples. They will be of one of
72 72 the following forms:
73 73
74 74 "window", incrementing, lastrev: stepping through a window,
75 75 positive if walking forwards through revs, last rev in the
76 76 sequence iterated over - use to reset state for the current window
77 77
78 78 "add", rev, fns: out-of-order traversal of the given file names
79 79 fns, which changed during revision rev - use to gather data for
80 80 possible display
81 81
82 82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 83 over with "add" - use to display data'''
84 84
85 85 def increasing_windows(start, end, windowsize=8, sizelimit=512):
86 86 if start < end:
87 87 while start < end:
88 88 yield start, min(windowsize, end-start)
89 89 start += windowsize
90 90 if windowsize < sizelimit:
91 91 windowsize *= 2
92 92 else:
93 93 while start > end:
94 94 yield start, min(windowsize, start-end-1)
95 95 start -= windowsize
96 96 if windowsize < sizelimit:
97 97 windowsize *= 2
98 98
99 99
100 100 files, matchfn, anypats = matchpats(repo, pats, opts)
101 101
102 102 if repo.changelog.count() == 0:
103 103 return [], False, matchfn
104 104
105 105 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
106 106 wanted = {}
107 107 slowpath = anypats
108 108 fncache = {}
109 109
110 110 chcache = {}
111 111 def getchange(rev):
112 112 ch = chcache.get(rev)
113 113 if ch is None:
114 114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 115 return ch
116 116
117 117 if not slowpath and not files:
118 118 # No files, no patterns. Display all revs.
119 119 wanted = dict(zip(revs, revs))
120 120 if not slowpath:
121 121 # Only files, no patterns. Check the history of each file.
122 122 def filerevgen(filelog):
123 123 for i, window in increasing_windows(filelog.count()-1, -1):
124 124 revs = []
125 125 for j in xrange(i - window, i + 1):
126 126 revs.append(filelog.linkrev(filelog.node(j)))
127 127 revs.reverse()
128 128 for rev in revs:
129 129 yield rev
130 130
131 131 minrev, maxrev = min(revs), max(revs)
132 132 for file_ in files:
133 133 filelog = repo.file(file_)
134 134 # A zero count may be a directory or deleted file, so
135 135 # try to find matching entries on the slow path.
136 136 if filelog.count() == 0:
137 137 slowpath = True
138 138 break
139 139 for rev in filerevgen(filelog):
140 140 if rev <= maxrev:
141 141 if rev < minrev:
142 142 break
143 143 fncache.setdefault(rev, [])
144 144 fncache[rev].append(file_)
145 145 wanted[rev] = 1
146 146 if slowpath:
147 147 # The slow path checks files modified in every changeset.
148 148 def changerevgen():
149 149 for i, window in increasing_windows(repo.changelog.count()-1, -1):
150 150 for j in xrange(i - window, i + 1):
151 151 yield j, getchange(j)[3]
152 152
153 153 for rev, changefiles in changerevgen():
154 154 matches = filter(matchfn, changefiles)
155 155 if matches:
156 156 fncache[rev] = matches
157 157 wanted[rev] = 1
158 158
159 159 def iterate():
160 160 for i, window in increasing_windows(0, len(revs)):
161 161 yield 'window', revs[0] < revs[-1], revs[-1]
162 162 nrevs = [rev for rev in revs[i:i+window]
163 163 if rev in wanted]
164 164 srevs = list(nrevs)
165 165 srevs.sort()
166 166 for rev in srevs:
167 167 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
168 168 yield 'add', rev, fns
169 169 for rev in nrevs:
170 170 yield 'iter', rev, None
171 171 return iterate(), getchange, matchfn
172 172
173 173 revrangesep = ':'
174 174
175 175 def revrange(ui, repo, revs, revlog=None):
176 176 """Yield revision as strings from a list of revision specifications."""
177 177 if revlog is None:
178 178 revlog = repo.changelog
179 179 revcount = revlog.count()
180 180 def fix(val, defval):
181 181 if not val:
182 182 return defval
183 183 try:
184 184 num = int(val)
185 185 if str(num) != val:
186 186 raise ValueError
187 187 if num < 0:
188 188 num += revcount
189 189 if num < 0:
190 190 num = 0
191 191 elif num >= revcount:
192 192 raise ValueError
193 193 except ValueError:
194 194 try:
195 195 num = repo.changelog.rev(repo.lookup(val))
196 196 except KeyError:
197 197 try:
198 198 num = revlog.rev(revlog.lookup(val))
199 199 except KeyError:
200 200 raise util.Abort(_('invalid revision identifier %s'), val)
201 201 return num
202 202 seen = {}
203 203 for spec in revs:
204 204 if spec.find(revrangesep) >= 0:
205 205 start, end = spec.split(revrangesep, 1)
206 206 start = fix(start, 0)
207 207 end = fix(end, revcount - 1)
208 208 step = start > end and -1 or 1
209 209 for rev in xrange(start, end+step, step):
210 210 if rev in seen:
211 211 continue
212 212 seen[rev] = 1
213 213 yield str(rev)
214 214 else:
215 215 rev = fix(spec, None)
216 216 if rev in seen:
217 217 continue
218 218 seen[rev] = 1
219 219 yield str(rev)
220 220
221 221 def make_filename(repo, r, pat, node=None,
222 222 total=None, seqno=None, revwidth=None, pathname=None):
223 223 node_expander = {
224 224 'H': lambda: hex(node),
225 225 'R': lambda: str(r.rev(node)),
226 226 'h': lambda: short(node),
227 227 }
228 228 expander = {
229 229 '%': lambda: '%',
230 230 'b': lambda: os.path.basename(repo.root),
231 231 }
232 232
233 233 try:
234 234 if node:
235 235 expander.update(node_expander)
236 236 if node and revwidth is not None:
237 237 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
238 238 if total is not None:
239 239 expander['N'] = lambda: str(total)
240 240 if seqno is not None:
241 241 expander['n'] = lambda: str(seqno)
242 242 if total is not None and seqno is not None:
243 243 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
244 244 if pathname is not None:
245 245 expander['s'] = lambda: os.path.basename(pathname)
246 246 expander['d'] = lambda: os.path.dirname(pathname) or '.'
247 247 expander['p'] = lambda: pathname
248 248
249 249 newname = []
250 250 patlen = len(pat)
251 251 i = 0
252 252 while i < patlen:
253 253 c = pat[i]
254 254 if c == '%':
255 255 i += 1
256 256 c = pat[i]
257 257 c = expander[c]()
258 258 newname.append(c)
259 259 i += 1
260 260 return ''.join(newname)
261 261 except KeyError, inst:
262 262 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
263 263 inst.args[0])
264 264
265 265 def make_file(repo, r, pat, node=None,
266 266 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
267 267 if not pat or pat == '-':
268 268 return 'w' in mode and sys.stdout or sys.stdin
269 269 if hasattr(pat, 'write') and 'w' in mode:
270 270 return pat
271 271 if hasattr(pat, 'read') and 'r' in mode:
272 272 return pat
273 273 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
274 274 pathname),
275 275 mode)
276 276
277 277 def write_bundle(cg, filename, compress=True, fh=None):
278 278 if fh is None:
279 279 fh = open(filename, "wb")
280 280
281 281 class nocompress(object):
282 282 def compress(self, x):
283 283 return x
284 284 def flush(self):
285 285 return ""
286 286 try:
287 287 if compress:
288 288 fh.write("HG10")
289 289 z = bz2.BZ2Compressor(9)
290 290 else:
291 291 fh.write("HG11")
292 292 z = nocompress()
293 293 while 1:
294 294 chunk = cg.read(4096)
295 295 if not chunk:
296 296 break
297 297 fh.write(z.compress(chunk))
298 298 fh.write(z.flush())
299 299 except:
300 300 os.unlink(filename)
301 301 raise
302 302
303 303 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
304 304 changes=None, text=False, opts={}):
305 305 if not node1:
306 306 node1 = repo.dirstate.parents()[0]
307 307 # reading the data for node1 early allows it to play nicely
308 308 # with repo.changes and the revlog cache.
309 309 change = repo.changelog.read(node1)
310 310 mmap = repo.manifest.read(change[0])
311 311 date1 = util.datestr(change[2])
312 312
313 313 if not changes:
314 314 changes = repo.changes(node1, node2, files, match=match)
315 315 modified, added, removed, deleted, unknown = changes
316 316 if files:
317 317 modified, added, removed = map(lambda x: filterfiles(files, x),
318 318 (modified, added, removed))
319 319
320 320 if not modified and not added and not removed:
321 321 return
322 322
323 323 if node2:
324 324 change = repo.changelog.read(node2)
325 325 mmap2 = repo.manifest.read(change[0])
326 326 date2 = util.datestr(change[2])
327 327 def read(f):
328 328 return repo.file(f).read(mmap2[f])
329 329 else:
330 330 date2 = util.datestr()
331 331 def read(f):
332 332 return repo.wread(f)
333 333
334 334 if ui.quiet:
335 335 r = None
336 336 else:
337 337 hexfunc = ui.verbose and hex or short
338 338 r = [hexfunc(node) for node in [node1, node2] if node]
339 339
340 340 diffopts = ui.diffopts()
341 341 showfunc = opts.get('show_function') or diffopts['showfunc']
342 342 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
343 343 for f in modified:
344 344 to = None
345 345 if f in mmap:
346 346 to = repo.file(f).read(mmap[f])
347 347 tn = read(f)
348 348 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
349 349 showfunc=showfunc, ignorews=ignorews))
350 350 for f in added:
351 351 to = None
352 352 tn = read(f)
353 353 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
354 354 showfunc=showfunc, ignorews=ignorews))
355 355 for f in removed:
356 356 to = repo.file(f).read(mmap[f])
357 357 tn = None
358 358 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
359 359 showfunc=showfunc, ignorews=ignorews))
360 360
361 361 def trimuser(ui, name, rev, revcache):
362 362 """trim the name of the user who committed a change"""
363 363 user = revcache.get(rev)
364 364 if user is None:
365 365 user = revcache[rev] = ui.shortuser(name)
366 366 return user
367 367
368 368 class changeset_templater(object):
369 369 '''use templater module to format changeset information.'''
370 370
371 371 def __init__(self, ui, repo, mapfile):
372 372 self.t = templater.templater(mapfile, templater.common_filters,
373 373 cache={'parent': '{rev}:{node|short} ',
374 374 'manifest': '{rev}:{node|short}'})
375 375 self.ui = ui
376 376 self.repo = repo
377 377
378 378 def use_template(self, t):
379 379 '''set template string to use'''
380 380 self.t.cache['changeset'] = t
381 381
382 382 def write(self, thing):
383 383 '''write expanded template.
384 384 uses in-order recursive traverse of iterators.'''
385 385 for t in thing:
386 386 if hasattr(t, '__iter__'):
387 387 self.write(t)
388 388 else:
389 389 self.ui.write(t)
390 390
391 391 def show(self, rev=0, changenode=None, brinfo=None):
392 392 '''show a single changeset or file revision'''
393 393 log = self.repo.changelog
394 394 if changenode is None:
395 395 changenode = log.node(rev)
396 396 elif not rev:
397 397 rev = log.rev(changenode)
398 398
399 399 changes = log.read(changenode)
400 400
401 401 def showlist(name, values, plural=None, **args):
402 402 '''expand set of values.
403 403 name is name of key in template map.
404 404 values is list of strings or dicts.
405 405 plural is plural of name, if not simply name + 's'.
406 406
407 407 expansion works like this, given name 'foo'.
408 408
409 409 if values is empty, expand 'no_foos'.
410 410
411 411 if 'foo' not in template map, return values as a string,
412 412 joined by space.
413 413
414 414 expand 'start_foos'.
415 415
416 416 for each value, expand 'foo'. if 'last_foo' in template
417 417 map, expand it instead of 'foo' for last key.
418 418
419 419 expand 'end_foos'.
420 420 '''
421 421 if plural: names = plural
422 422 else: names = name + 's'
423 423 if not values:
424 424 noname = 'no_' + names
425 425 if noname in self.t:
426 426 yield self.t(noname, **args)
427 427 return
428 428 if name not in self.t:
429 429 if isinstance(values[0], str):
430 430 yield ' '.join(values)
431 431 else:
432 432 for v in values:
433 433 yield dict(v, **args)
434 434 return
435 435 startname = 'start_' + names
436 436 if startname in self.t:
437 437 yield self.t(startname, **args)
438 438 vargs = args.copy()
439 439 def one(v, tag=name):
440 440 try:
441 441 vargs.update(v)
442 442 except (AttributeError, ValueError):
443 443 try:
444 444 for a, b in v:
445 445 vargs[a] = b
446 446 except ValueError:
447 447 vargs[name] = v
448 448 return self.t(tag, **vargs)
449 449 lastname = 'last_' + name
450 450 if lastname in self.t:
451 451 last = values.pop()
452 452 else:
453 453 last = None
454 454 for v in values:
455 455 yield one(v)
456 456 if last is not None:
457 457 yield one(last, tag=lastname)
458 458 endname = 'end_' + names
459 459 if endname in self.t:
460 460 yield self.t(endname, **args)
461 461
462 462 if brinfo:
463 463 def showbranches(**args):
464 464 if changenode in brinfo:
465 465 for x in showlist('branch', brinfo[changenode],
466 466 plural='branches', **args):
467 467 yield x
468 468 else:
469 469 showbranches = ''
470 470
471 471 if self.ui.debugflag:
472 472 def showmanifest(**args):
473 473 args = args.copy()
474 474 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
475 475 node=hex(changes[0])))
476 476 yield self.t('manifest', **args)
477 477 else:
478 478 showmanifest = ''
479 479
480 480 def showparents(**args):
481 481 parents = [[('rev', log.rev(p)), ('node', hex(p))]
482 482 for p in log.parents(changenode)
483 483 if self.ui.debugflag or p != nullid]
484 484 if (not self.ui.debugflag and len(parents) == 1 and
485 485 parents[0][0][1] == rev - 1):
486 486 return
487 487 for x in showlist('parent', parents, **args):
488 488 yield x
489 489
490 490 def showtags(**args):
491 491 for x in showlist('tag', self.repo.nodetags(changenode), **args):
492 492 yield x
493 493
494 494 if self.ui.debugflag:
495 495 files = self.repo.changes(log.parents(changenode)[0], changenode)
496 496 def showfiles(**args):
497 497 for x in showlist('file', files[0], **args): yield x
498 498 def showadds(**args):
499 499 for x in showlist('file_add', files[1], **args): yield x
500 500 def showdels(**args):
501 501 for x in showlist('file_del', files[2], **args): yield x
502 502 else:
503 503 def showfiles(**args):
504 504 for x in showlist('file', changes[3], **args): yield x
505 505 showadds = ''
506 506 showdels = ''
507 507
508 508 props = {
509 509 'author': changes[1],
510 510 'branches': showbranches,
511 511 'date': changes[2],
512 512 'desc': changes[4],
513 513 'file_adds': showadds,
514 514 'file_dels': showdels,
515 515 'files': showfiles,
516 516 'manifest': showmanifest,
517 517 'node': hex(changenode),
518 518 'parents': showparents,
519 519 'rev': rev,
520 520 'tags': showtags,
521 521 }
522 522
523 523 try:
524 524 if self.ui.debugflag and 'changeset_debug' in self.t:
525 525 key = 'changeset_debug'
526 526 elif self.ui.quiet and 'changeset_quiet' in self.t:
527 527 key = 'changeset_quiet'
528 528 elif self.ui.verbose and 'changeset_verbose' in self.t:
529 529 key = 'changeset_verbose'
530 530 else:
531 531 key = 'changeset'
532 532 self.write(self.t(key, **props))
533 533 except KeyError, inst:
534 534 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
535 535 inst.args[0]))
536 536 except SyntaxError, inst:
537 537 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
538 538
539 539 class changeset_printer(object):
540 540 '''show changeset information when templating not requested.'''
541 541
542 542 def __init__(self, ui, repo):
543 543 self.ui = ui
544 544 self.repo = repo
545 545
546 546 def show(self, rev=0, changenode=None, brinfo=None):
547 547 '''show a single changeset or file revision'''
548 548 log = self.repo.changelog
549 549 if changenode is None:
550 550 changenode = log.node(rev)
551 551 elif not rev:
552 552 rev = log.rev(changenode)
553 553
554 554 if self.ui.quiet:
555 555 self.ui.write("%d:%s\n" % (rev, short(changenode)))
556 556 return
557 557
558 558 changes = log.read(changenode)
559 559 date = util.datestr(changes[2])
560 560
561 561 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
562 562 for p in log.parents(changenode)
563 563 if self.ui.debugflag or p != nullid]
564 564 if (not self.ui.debugflag and len(parents) == 1 and
565 565 parents[0][0] == rev-1):
566 566 parents = []
567 567
568 568 if self.ui.verbose:
569 569 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
570 570 else:
571 571 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
572 572
573 573 for tag in self.repo.nodetags(changenode):
574 574 self.ui.status(_("tag: %s\n") % tag)
575 575 for parent in parents:
576 576 self.ui.write(_("parent: %d:%s\n") % parent)
577 577
578 578 if brinfo and changenode in brinfo:
579 579 br = brinfo[changenode]
580 580 self.ui.write(_("branch: %s\n") % " ".join(br))
581 581
582 582 self.ui.debug(_("manifest: %d:%s\n") %
583 583 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
584 584 self.ui.status(_("user: %s\n") % changes[1])
585 585 self.ui.status(_("date: %s\n") % date)
586 586
587 587 if self.ui.debugflag:
588 588 files = self.repo.changes(log.parents(changenode)[0], changenode)
589 589 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
590 590 files):
591 591 if value:
592 592 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
593 593 else:
594 594 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
595 595
596 596 description = changes[4].strip()
597 597 if description:
598 598 if self.ui.verbose:
599 599 self.ui.status(_("description:\n"))
600 600 self.ui.status(description)
601 601 self.ui.status("\n\n")
602 602 else:
603 603 self.ui.status(_("summary: %s\n") %
604 604 description.splitlines()[0])
605 605 self.ui.status("\n")
606 606
607 607 def show_changeset(ui, repo, opts):
608 608 '''show one changeset. uses template or regular display. caller
609 609 can pass in 'style' and 'template' options in opts.'''
610 610
611 611 tmpl = opts.get('template')
612 612 if tmpl:
613 613 tmpl = templater.parsestring(tmpl, quoted=False)
614 614 else:
615 615 tmpl = ui.config('ui', 'logtemplate')
616 616 if tmpl: tmpl = templater.parsestring(tmpl)
617 617 mapfile = opts.get('style') or ui.config('ui', 'style')
618 618 if tmpl or mapfile:
619 619 if mapfile:
620 620 if not os.path.isfile(mapfile):
621 621 mapname = templater.templatepath('map-cmdline.' + mapfile)
622 622 if not mapname: mapname = templater.templatepath(mapfile)
623 623 if mapname: mapfile = mapname
624 624 try:
625 625 t = changeset_templater(ui, repo, mapfile)
626 626 except SyntaxError, inst:
627 627 raise util.Abort(inst.args[0])
628 628 if tmpl: t.use_template(tmpl)
629 629 return t
630 630 return changeset_printer(ui, repo)
631 631
632 632 def show_version(ui):
633 633 """output version and copyright information"""
634 634 ui.write(_("Mercurial Distributed SCM (version %s)\n")
635 635 % version.get_version())
636 636 ui.status(_(
637 637 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
638 638 "This is free software; see the source for copying conditions. "
639 639 "There is NO\nwarranty; "
640 640 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
641 641 ))
642 642
643 643 def help_(ui, cmd=None, with_version=False):
644 644 """show help for a given command or all commands"""
645 645 option_lists = []
646 646 if cmd and cmd != 'shortlist':
647 647 if with_version:
648 648 show_version(ui)
649 649 ui.write('\n')
650 650 aliases, i = find(cmd)
651 651 # synopsis
652 652 ui.write("%s\n\n" % i[2])
653 653
654 654 # description
655 655 doc = i[0].__doc__
656 656 if not doc:
657 657 doc = _("(No help text available)")
658 658 if ui.quiet:
659 659 doc = doc.splitlines(0)[0]
660 660 ui.write("%s\n" % doc.rstrip())
661 661
662 662 if not ui.quiet:
663 663 # aliases
664 664 if len(aliases) > 1:
665 665 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
666 666
667 667 # options
668 668 if i[1]:
669 669 option_lists.append(("options", i[1]))
670 670
671 671 else:
672 672 # program name
673 673 if ui.verbose or with_version:
674 674 show_version(ui)
675 675 else:
676 676 ui.status(_("Mercurial Distributed SCM\n"))
677 677 ui.status('\n')
678 678
679 679 # list of commands
680 680 if cmd == "shortlist":
681 681 ui.status(_('basic commands (use "hg help" '
682 682 'for the full list or option "-v" for details):\n\n'))
683 683 elif ui.verbose:
684 684 ui.status(_('list of commands:\n\n'))
685 685 else:
686 686 ui.status(_('list of commands (use "hg help -v" '
687 687 'to show aliases and global options):\n\n'))
688 688
689 689 h = {}
690 690 cmds = {}
691 691 for c, e in table.items():
692 692 f = c.split("|")[0]
693 693 if cmd == "shortlist" and not f.startswith("^"):
694 694 continue
695 695 f = f.lstrip("^")
696 696 if not ui.debugflag and f.startswith("debug"):
697 697 continue
698 698 doc = e[0].__doc__
699 699 if not doc:
700 700 doc = _("(No help text available)")
701 701 h[f] = doc.splitlines(0)[0].rstrip()
702 702 cmds[f] = c.lstrip("^")
703 703
704 704 fns = h.keys()
705 705 fns.sort()
706 706 m = max(map(len, fns))
707 707 for f in fns:
708 708 if ui.verbose:
709 709 commands = cmds[f].replace("|",", ")
710 710 ui.write(" %s:\n %s\n"%(commands, h[f]))
711 711 else:
712 712 ui.write(' %-*s %s\n' % (m, f, h[f]))
713 713
714 714 # global options
715 715 if ui.verbose:
716 716 option_lists.append(("global options", globalopts))
717 717
718 718 # list all option lists
719 719 opt_output = []
720 720 for title, options in option_lists:
721 721 opt_output.append(("\n%s:\n" % title, None))
722 722 for shortopt, longopt, default, desc in options:
723 723 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
724 724 longopt and " --%s" % longopt),
725 725 "%s%s" % (desc,
726 726 default
727 727 and _(" (default: %s)") % default
728 728 or "")))
729 729
730 730 if opt_output:
731 731 opts_len = max([len(line[0]) for line in opt_output if line[1]])
732 732 for first, second in opt_output:
733 733 if second:
734 734 ui.write(" %-*s %s\n" % (opts_len, first, second))
735 735 else:
736 736 ui.write("%s\n" % first)
737 737
738 738 # Commands start here, listed alphabetically
739 739
740 740 def add(ui, repo, *pats, **opts):
741 741 """add the specified files on the next commit
742 742
743 743 Schedule files to be version controlled and added to the repository.
744 744
745 745 The files will be added to the repository at the next commit.
746 746
747 747 If no names are given, add all files in the repository.
748 748 """
749 749
750 750 names = []
751 751 for src, abs, rel, exact in walk(repo, pats, opts):
752 752 if exact:
753 753 if ui.verbose:
754 754 ui.status(_('adding %s\n') % rel)
755 755 names.append(abs)
756 756 elif repo.dirstate.state(abs) == '?':
757 757 ui.status(_('adding %s\n') % rel)
758 758 names.append(abs)
759 759 repo.add(names)
760 760
761 761 def addremove(ui, repo, *pats, **opts):
762 762 """add all new files, delete all missing files
763 763
764 764 Add all new files and remove all missing files from the repository.
765 765
766 766 New files are ignored if they match any of the patterns in .hgignore. As
767 767 with add, these changes take effect at the next commit.
768 768 """
769 769 return addremove_lock(ui, repo, pats, opts)
770 770
771 771 def addremove_lock(ui, repo, pats, opts, wlock=None):
772 772 add, remove = [], []
773 773 for src, abs, rel, exact in walk(repo, pats, opts):
774 774 if src == 'f' and repo.dirstate.state(abs) == '?':
775 775 add.append(abs)
776 776 if ui.verbose or not exact:
777 777 ui.status(_('adding %s\n') % ((pats and rel) or abs))
778 778 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
779 779 remove.append(abs)
780 780 if ui.verbose or not exact:
781 781 ui.status(_('removing %s\n') % ((pats and rel) or abs))
782 782 repo.add(add, wlock=wlock)
783 783 repo.remove(remove, wlock=wlock)
784 784
785 785 def annotate(ui, repo, *pats, **opts):
786 786 """show changeset information per file line
787 787
788 788 List changes in files, showing the revision id responsible for each line
789 789
790 790 This command is useful to discover who did a change or when a change took
791 791 place.
792 792
793 793 Without the -a option, annotate will avoid processing files it
794 794 detects as binary. With -a, annotate will generate an annotation
795 795 anyway, probably with undesirable results.
796 796 """
797 797 def getnode(rev):
798 798 return short(repo.changelog.node(rev))
799 799
800 800 ucache = {}
801 801 def getname(rev):
802 802 cl = repo.changelog.read(repo.changelog.node(rev))
803 803 return trimuser(ui, cl[1], rev, ucache)
804 804
805 805 dcache = {}
806 806 def getdate(rev):
807 807 datestr = dcache.get(rev)
808 808 if datestr is None:
809 809 cl = repo.changelog.read(repo.changelog.node(rev))
810 810 datestr = dcache[rev] = util.datestr(cl[2])
811 811 return datestr
812 812
813 813 if not pats:
814 814 raise util.Abort(_('at least one file name or pattern required'))
815 815
816 816 opmap = [['user', getname], ['number', str], ['changeset', getnode],
817 817 ['date', getdate]]
818 818 if not opts['user'] and not opts['changeset'] and not opts['date']:
819 819 opts['number'] = 1
820 820
821 821 if opts['rev']:
822 822 node = repo.changelog.lookup(opts['rev'])
823 823 else:
824 824 node = repo.dirstate.parents()[0]
825 825 change = repo.changelog.read(node)
826 826 mmap = repo.manifest.read(change[0])
827 827
828 828 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
829 829 f = repo.file(abs)
830 830 if not opts['text'] and util.binary(f.read(mmap[abs])):
831 831 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
832 832 continue
833 833
834 834 lines = f.annotate(mmap[abs])
835 835 pieces = []
836 836
837 837 for o, f in opmap:
838 838 if opts[o]:
839 839 l = [f(n) for n, dummy in lines]
840 840 if l:
841 841 m = max(map(len, l))
842 842 pieces.append(["%*s" % (m, x) for x in l])
843 843
844 844 if pieces:
845 845 for p, l in zip(zip(*pieces), lines):
846 846 ui.write("%s: %s" % (" ".join(p), l[1]))
847 847
848 848 def bundle(ui, repo, fname, dest="default-push", **opts):
849 849 """create a changegroup file
850 850
851 851 Generate a compressed changegroup file collecting all changesets
852 852 not found in the other repository.
853 853
854 854 This file can then be transferred using conventional means and
855 855 applied to another repository with the unbundle command. This is
856 856 useful when native push and pull are not available or when
857 857 exporting an entire repository is undesirable. The standard file
858 858 extension is ".hg".
859 859
860 860 Unlike import/export, this exactly preserves all changeset
861 861 contents including permissions, rename data, and revision history.
862 862 """
863 863 dest = ui.expandpath(dest)
864 864 other = hg.repository(ui, dest)
865 o = repo.findoutgoing(other)
865 o = repo.findoutgoing(other, force=opts['force'])
866 866 cg = repo.changegroup(o, 'bundle')
867 867 write_bundle(cg, fname)
868 868
869 869 def cat(ui, repo, file1, *pats, **opts):
870 870 """output the latest or given revisions of files
871 871
872 872 Print the specified files as they were at the given revision.
873 873 If no revision is given then the tip is used.
874 874
875 875 Output may be to a file, in which case the name of the file is
876 876 given using a format string. The formatting rules are the same as
877 877 for the export command, with the following additions:
878 878
879 879 %s basename of file being printed
880 880 %d dirname of file being printed, or '.' if in repo root
881 881 %p root-relative path name of file being printed
882 882 """
883 883 mf = {}
884 884 rev = opts['rev']
885 885 if rev:
886 886 node = repo.lookup(rev)
887 887 else:
888 888 node = repo.changelog.tip()
889 889 change = repo.changelog.read(node)
890 890 mf = repo.manifest.read(change[0])
891 891 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
892 892 r = repo.file(abs)
893 893 n = mf[abs]
894 894 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
895 895 fp.write(r.read(n))
896 896
897 897 def clone(ui, source, dest=None, **opts):
898 898 """make a copy of an existing repository
899 899
900 900 Create a copy of an existing repository in a new directory.
901 901
902 902 If no destination directory name is specified, it defaults to the
903 903 basename of the source.
904 904
905 905 The location of the source is added to the new repository's
906 906 .hg/hgrc file, as the default to be used for future pulls.
907 907
908 908 For efficiency, hardlinks are used for cloning whenever the source
909 909 and destination are on the same filesystem. Some filesystems,
910 910 such as AFS, implement hardlinking incorrectly, but do not report
911 911 errors. In these cases, use the --pull option to avoid
912 912 hardlinking.
913 913
914 914 See pull for valid source format details.
915 915 """
916 916 if dest is None:
917 917 dest = os.path.basename(os.path.normpath(source))
918 918
919 919 if os.path.exists(dest):
920 920 raise util.Abort(_("destination '%s' already exists"), dest)
921 921
922 922 dest = os.path.realpath(dest)
923 923
924 924 class Dircleanup(object):
925 925 def __init__(self, dir_):
926 926 self.rmtree = shutil.rmtree
927 927 self.dir_ = dir_
928 928 os.mkdir(dir_)
929 929 def close(self):
930 930 self.dir_ = None
931 931 def __del__(self):
932 932 if self.dir_:
933 933 self.rmtree(self.dir_, True)
934 934
935 935 if opts['ssh']:
936 936 ui.setconfig("ui", "ssh", opts['ssh'])
937 937 if opts['remotecmd']:
938 938 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
939 939
940 940 source = ui.expandpath(source)
941 941
942 942 d = Dircleanup(dest)
943 943 abspath = source
944 944 other = hg.repository(ui, source)
945 945
946 946 copy = False
947 947 if other.dev() != -1:
948 948 abspath = os.path.abspath(source)
949 949 if not opts['pull'] and not opts['rev']:
950 950 copy = True
951 951
952 952 if copy:
953 953 try:
954 954 # we use a lock here because if we race with commit, we
955 955 # can end up with extra data in the cloned revlogs that's
956 956 # not pointed to by changesets, thus causing verify to
957 957 # fail
958 958 l1 = other.lock()
959 959 except lock.LockException:
960 960 copy = False
961 961
962 962 if copy:
963 963 # we lock here to avoid premature writing to the target
964 964 os.mkdir(os.path.join(dest, ".hg"))
965 965 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
966 966
967 967 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
968 968 for f in files.split():
969 969 src = os.path.join(source, ".hg", f)
970 970 dst = os.path.join(dest, ".hg", f)
971 971 try:
972 972 util.copyfiles(src, dst)
973 973 except OSError, inst:
974 974 if inst.errno != errno.ENOENT:
975 975 raise
976 976
977 977 repo = hg.repository(ui, dest)
978 978
979 979 else:
980 980 revs = None
981 981 if opts['rev']:
982 982 if not other.local():
983 983 error = _("clone -r not supported yet for remote repositories.")
984 984 raise util.Abort(error)
985 985 else:
986 986 revs = [other.lookup(rev) for rev in opts['rev']]
987 987 repo = hg.repository(ui, dest, create=1)
988 988 repo.pull(other, heads = revs)
989 989
990 990 f = repo.opener("hgrc", "w", text=True)
991 991 f.write("[paths]\n")
992 992 f.write("default = %s\n" % abspath)
993 993 f.close()
994 994
995 995 if not opts['noupdate']:
996 996 update(repo.ui, repo)
997 997
998 998 d.close()
999 999
1000 1000 def commit(ui, repo, *pats, **opts):
1001 1001 """commit the specified files or all outstanding changes
1002 1002
1003 1003 Commit changes to the given files into the repository.
1004 1004
1005 1005 If a list of files is omitted, all changes reported by "hg status"
1006 1006 will be commited.
1007 1007
1008 1008 The HGEDITOR or EDITOR environment variables are used to start an
1009 1009 editor to add a commit comment.
1010 1010 """
1011 1011 message = opts['message']
1012 1012 logfile = opts['logfile']
1013 1013
1014 1014 if message and logfile:
1015 1015 raise util.Abort(_('options --message and --logfile are mutually '
1016 1016 'exclusive'))
1017 1017 if not message and logfile:
1018 1018 try:
1019 1019 if logfile == '-':
1020 1020 message = sys.stdin.read()
1021 1021 else:
1022 1022 message = open(logfile).read()
1023 1023 except IOError, inst:
1024 1024 raise util.Abort(_("can't read commit message '%s': %s") %
1025 1025 (logfile, inst.strerror))
1026 1026
1027 1027 if opts['addremove']:
1028 1028 addremove(ui, repo, *pats, **opts)
1029 1029 fns, match, anypats = matchpats(repo, pats, opts)
1030 1030 if pats:
1031 1031 modified, added, removed, deleted, unknown = (
1032 1032 repo.changes(files=fns, match=match))
1033 1033 files = modified + added + removed
1034 1034 else:
1035 1035 files = []
1036 1036 try:
1037 1037 repo.commit(files, message, opts['user'], opts['date'], match)
1038 1038 except ValueError, inst:
1039 1039 raise util.Abort(str(inst))
1040 1040
1041 1041 def docopy(ui, repo, pats, opts, wlock):
1042 1042 # called with the repo lock held
1043 1043 cwd = repo.getcwd()
1044 1044 errors = 0
1045 1045 copied = []
1046 1046 targets = {}
1047 1047
1048 1048 def okaytocopy(abs, rel, exact):
1049 1049 reasons = {'?': _('is not managed'),
1050 1050 'a': _('has been marked for add'),
1051 1051 'r': _('has been marked for remove')}
1052 1052 state = repo.dirstate.state(abs)
1053 1053 reason = reasons.get(state)
1054 1054 if reason:
1055 1055 if state == 'a':
1056 1056 origsrc = repo.dirstate.copied(abs)
1057 1057 if origsrc is not None:
1058 1058 return origsrc
1059 1059 if exact:
1060 1060 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1061 1061 else:
1062 1062 return abs
1063 1063
1064 1064 def copy(origsrc, abssrc, relsrc, target, exact):
1065 1065 abstarget = util.canonpath(repo.root, cwd, target)
1066 1066 reltarget = util.pathto(cwd, abstarget)
1067 1067 prevsrc = targets.get(abstarget)
1068 1068 if prevsrc is not None:
1069 1069 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1070 1070 (reltarget, abssrc, prevsrc))
1071 1071 return
1072 1072 if (not opts['after'] and os.path.exists(reltarget) or
1073 1073 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1074 1074 if not opts['force']:
1075 1075 ui.warn(_('%s: not overwriting - file exists\n') %
1076 1076 reltarget)
1077 1077 return
1078 1078 if not opts['after']:
1079 1079 os.unlink(reltarget)
1080 1080 if opts['after']:
1081 1081 if not os.path.exists(reltarget):
1082 1082 return
1083 1083 else:
1084 1084 targetdir = os.path.dirname(reltarget) or '.'
1085 1085 if not os.path.isdir(targetdir):
1086 1086 os.makedirs(targetdir)
1087 1087 try:
1088 1088 restore = repo.dirstate.state(abstarget) == 'r'
1089 1089 if restore:
1090 1090 repo.undelete([abstarget], wlock)
1091 1091 try:
1092 1092 shutil.copyfile(relsrc, reltarget)
1093 1093 shutil.copymode(relsrc, reltarget)
1094 1094 restore = False
1095 1095 finally:
1096 1096 if restore:
1097 1097 repo.remove([abstarget], wlock)
1098 1098 except shutil.Error, inst:
1099 1099 raise util.Abort(str(inst))
1100 1100 except IOError, inst:
1101 1101 if inst.errno == errno.ENOENT:
1102 1102 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1103 1103 else:
1104 1104 ui.warn(_('%s: cannot copy - %s\n') %
1105 1105 (relsrc, inst.strerror))
1106 1106 errors += 1
1107 1107 return
1108 1108 if ui.verbose or not exact:
1109 1109 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1110 1110 targets[abstarget] = abssrc
1111 1111 if abstarget != origsrc:
1112 1112 repo.copy(origsrc, abstarget, wlock)
1113 1113 copied.append((abssrc, relsrc, exact))
1114 1114
1115 1115 def targetpathfn(pat, dest, srcs):
1116 1116 if os.path.isdir(pat):
1117 1117 abspfx = util.canonpath(repo.root, cwd, pat)
1118 1118 if destdirexists:
1119 1119 striplen = len(os.path.split(abspfx)[0])
1120 1120 else:
1121 1121 striplen = len(abspfx)
1122 1122 if striplen:
1123 1123 striplen += len(os.sep)
1124 1124 res = lambda p: os.path.join(dest, p[striplen:])
1125 1125 elif destdirexists:
1126 1126 res = lambda p: os.path.join(dest, os.path.basename(p))
1127 1127 else:
1128 1128 res = lambda p: dest
1129 1129 return res
1130 1130
1131 1131 def targetpathafterfn(pat, dest, srcs):
1132 1132 if util.patkind(pat, None)[0]:
1133 1133 # a mercurial pattern
1134 1134 res = lambda p: os.path.join(dest, os.path.basename(p))
1135 1135 else:
1136 1136 abspfx = util.canonpath(repo.root, cwd, pat)
1137 1137 if len(abspfx) < len(srcs[0][0]):
1138 1138 # A directory. Either the target path contains the last
1139 1139 # component of the source path or it does not.
1140 1140 def evalpath(striplen):
1141 1141 score = 0
1142 1142 for s in srcs:
1143 1143 t = os.path.join(dest, s[0][striplen:])
1144 1144 if os.path.exists(t):
1145 1145 score += 1
1146 1146 return score
1147 1147
1148 1148 striplen = len(abspfx)
1149 1149 if striplen:
1150 1150 striplen += len(os.sep)
1151 1151 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1152 1152 score = evalpath(striplen)
1153 1153 striplen1 = len(os.path.split(abspfx)[0])
1154 1154 if striplen1:
1155 1155 striplen1 += len(os.sep)
1156 1156 if evalpath(striplen1) > score:
1157 1157 striplen = striplen1
1158 1158 res = lambda p: os.path.join(dest, p[striplen:])
1159 1159 else:
1160 1160 # a file
1161 1161 if destdirexists:
1162 1162 res = lambda p: os.path.join(dest, os.path.basename(p))
1163 1163 else:
1164 1164 res = lambda p: dest
1165 1165 return res
1166 1166
1167 1167
1168 1168 pats = list(pats)
1169 1169 if not pats:
1170 1170 raise util.Abort(_('no source or destination specified'))
1171 1171 if len(pats) == 1:
1172 1172 raise util.Abort(_('no destination specified'))
1173 1173 dest = pats.pop()
1174 1174 destdirexists = os.path.isdir(dest)
1175 1175 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1176 1176 raise util.Abort(_('with multiple sources, destination must be an '
1177 1177 'existing directory'))
1178 1178 if opts['after']:
1179 1179 tfn = targetpathafterfn
1180 1180 else:
1181 1181 tfn = targetpathfn
1182 1182 copylist = []
1183 1183 for pat in pats:
1184 1184 srcs = []
1185 1185 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1186 1186 origsrc = okaytocopy(abssrc, relsrc, exact)
1187 1187 if origsrc:
1188 1188 srcs.append((origsrc, abssrc, relsrc, exact))
1189 1189 if not srcs:
1190 1190 continue
1191 1191 copylist.append((tfn(pat, dest, srcs), srcs))
1192 1192 if not copylist:
1193 1193 raise util.Abort(_('no files to copy'))
1194 1194
1195 1195 for targetpath, srcs in copylist:
1196 1196 for origsrc, abssrc, relsrc, exact in srcs:
1197 1197 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1198 1198
1199 1199 if errors:
1200 1200 ui.warn(_('(consider using --after)\n'))
1201 1201 return errors, copied
1202 1202
1203 1203 def copy(ui, repo, *pats, **opts):
1204 1204 """mark files as copied for the next commit
1205 1205
1206 1206 Mark dest as having copies of source files. If dest is a
1207 1207 directory, copies are put in that directory. If dest is a file,
1208 1208 there can only be one source.
1209 1209
1210 1210 By default, this command copies the contents of files as they
1211 1211 stand in the working directory. If invoked with --after, the
1212 1212 operation is recorded, but no copying is performed.
1213 1213
1214 1214 This command takes effect in the next commit.
1215 1215
1216 1216 NOTE: This command should be treated as experimental. While it
1217 1217 should properly record copied files, this information is not yet
1218 1218 fully used by merge, nor fully reported by log.
1219 1219 """
1220 1220 try:
1221 1221 wlock = repo.wlock(0)
1222 1222 errs, copied = docopy(ui, repo, pats, opts, wlock)
1223 1223 except lock.LockHeld, inst:
1224 1224 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1225 1225 errs = 1
1226 1226 return errs
1227 1227
1228 1228 def debugancestor(ui, index, rev1, rev2):
1229 1229 """find the ancestor revision of two revisions in a given index"""
1230 1230 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
1231 1231 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1232 1232 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1233 1233
1234 1234 def debugcomplete(ui, cmd):
1235 1235 """returns the completion list associated with the given command"""
1236 1236 clist = findpossible(cmd).keys()
1237 1237 clist.sort()
1238 1238 ui.write("%s\n" % " ".join(clist))
1239 1239
1240 1240 def debugrebuildstate(ui, repo, rev=None):
1241 1241 """rebuild the dirstate as it would look like for the given revision"""
1242 1242 if not rev:
1243 1243 rev = repo.changelog.tip()
1244 1244 else:
1245 1245 rev = repo.lookup(rev)
1246 1246 change = repo.changelog.read(rev)
1247 1247 n = change[0]
1248 1248 files = repo.manifest.readflags(n)
1249 1249 wlock = repo.wlock()
1250 1250 repo.dirstate.rebuild(rev, files.iteritems())
1251 1251
1252 1252 def debugcheckstate(ui, repo):
1253 1253 """validate the correctness of the current dirstate"""
1254 1254 parent1, parent2 = repo.dirstate.parents()
1255 1255 repo.dirstate.read()
1256 1256 dc = repo.dirstate.map
1257 1257 keys = dc.keys()
1258 1258 keys.sort()
1259 1259 m1n = repo.changelog.read(parent1)[0]
1260 1260 m2n = repo.changelog.read(parent2)[0]
1261 1261 m1 = repo.manifest.read(m1n)
1262 1262 m2 = repo.manifest.read(m2n)
1263 1263 errors = 0
1264 1264 for f in dc:
1265 1265 state = repo.dirstate.state(f)
1266 1266 if state in "nr" and f not in m1:
1267 1267 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1268 1268 errors += 1
1269 1269 if state in "a" and f in m1:
1270 1270 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1271 1271 errors += 1
1272 1272 if state in "m" and f not in m1 and f not in m2:
1273 1273 ui.warn(_("%s in state %s, but not in either manifest\n") %
1274 1274 (f, state))
1275 1275 errors += 1
1276 1276 for f in m1:
1277 1277 state = repo.dirstate.state(f)
1278 1278 if state not in "nrm":
1279 1279 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1280 1280 errors += 1
1281 1281 if errors:
1282 1282 error = _(".hg/dirstate inconsistent with current parent's manifest")
1283 1283 raise util.Abort(error)
1284 1284
1285 1285 def debugconfig(ui, repo):
1286 1286 """show combined config settings from all hgrc files"""
1287 1287 for section, name, value in ui.walkconfig():
1288 1288 ui.write('%s.%s=%s\n' % (section, name, value))
1289 1289
1290 1290 def debugsetparents(ui, repo, rev1, rev2=None):
1291 1291 """manually set the parents of the current working directory
1292 1292
1293 1293 This is useful for writing repository conversion tools, but should
1294 1294 be used with care.
1295 1295 """
1296 1296
1297 1297 if not rev2:
1298 1298 rev2 = hex(nullid)
1299 1299
1300 1300 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1301 1301
1302 1302 def debugstate(ui, repo):
1303 1303 """show the contents of the current dirstate"""
1304 1304 repo.dirstate.read()
1305 1305 dc = repo.dirstate.map
1306 1306 keys = dc.keys()
1307 1307 keys.sort()
1308 1308 for file_ in keys:
1309 1309 ui.write("%c %3o %10d %s %s\n"
1310 1310 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1311 1311 time.strftime("%x %X",
1312 1312 time.localtime(dc[file_][3])), file_))
1313 1313 for f in repo.dirstate.copies:
1314 1314 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1315 1315
1316 1316 def debugdata(ui, file_, rev):
1317 1317 """dump the contents of an data file revision"""
1318 1318 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1319 1319 file_[:-2] + ".i", file_)
1320 1320 try:
1321 1321 ui.write(r.revision(r.lookup(rev)))
1322 1322 except KeyError:
1323 1323 raise util.Abort(_('invalid revision identifier %s'), rev)
1324 1324
1325 1325 def debugindex(ui, file_):
1326 1326 """dump the contents of an index file"""
1327 1327 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1328 1328 ui.write(" rev offset length base linkrev" +
1329 1329 " nodeid p1 p2\n")
1330 1330 for i in range(r.count()):
1331 1331 e = r.index[i]
1332 1332 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1333 1333 i, e[0], e[1], e[2], e[3],
1334 1334 short(e[6]), short(e[4]), short(e[5])))
1335 1335
1336 1336 def debugindexdot(ui, file_):
1337 1337 """dump an index DAG as a .dot file"""
1338 1338 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1339 1339 ui.write("digraph G {\n")
1340 1340 for i in range(r.count()):
1341 1341 e = r.index[i]
1342 1342 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1343 1343 if e[5] != nullid:
1344 1344 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1345 1345 ui.write("}\n")
1346 1346
1347 1347 def debugrename(ui, repo, file, rev=None):
1348 1348 """dump rename information"""
1349 1349 r = repo.file(relpath(repo, [file])[0])
1350 1350 if rev:
1351 1351 try:
1352 1352 # assume all revision numbers are for changesets
1353 1353 n = repo.lookup(rev)
1354 1354 change = repo.changelog.read(n)
1355 1355 m = repo.manifest.read(change[0])
1356 1356 n = m[relpath(repo, [file])[0]]
1357 1357 except (hg.RepoError, KeyError):
1358 1358 n = r.lookup(rev)
1359 1359 else:
1360 1360 n = r.tip()
1361 1361 m = r.renamed(n)
1362 1362 if m:
1363 1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 1364 else:
1365 1365 ui.write(_("not renamed\n"))
1366 1366
1367 1367 def debugwalk(ui, repo, *pats, **opts):
1368 1368 """show how files match on given patterns"""
1369 1369 items = list(walk(repo, pats, opts))
1370 1370 if not items:
1371 1371 return
1372 1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 1373 max([len(abs) for (src, abs, rel, exact) in items]),
1374 1374 max([len(rel) for (src, abs, rel, exact) in items]))
1375 1375 for src, abs, rel, exact in items:
1376 1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 1377 ui.write("%s\n" % line.rstrip())
1378 1378
1379 1379 def diff(ui, repo, *pats, **opts):
1380 1380 """diff repository (or selected files)
1381 1381
1382 1382 Show differences between revisions for the specified files.
1383 1383
1384 1384 Differences between files are shown using the unified diff format.
1385 1385
1386 1386 When two revision arguments are given, then changes are shown
1387 1387 between those revisions. If only one revision is specified then
1388 1388 that revision is compared to the working directory, and, when no
1389 1389 revisions are specified, the working directory files are compared
1390 1390 to its parent.
1391 1391
1392 1392 Without the -a option, diff will avoid generating diffs of files
1393 1393 it detects as binary. With -a, diff will generate a diff anyway,
1394 1394 probably with undesirable results.
1395 1395 """
1396 1396 node1, node2 = None, None
1397 1397 revs = [repo.lookup(x) for x in opts['rev']]
1398 1398
1399 1399 if len(revs) > 0:
1400 1400 node1 = revs[0]
1401 1401 if len(revs) > 1:
1402 1402 node2 = revs[1]
1403 1403 if len(revs) > 2:
1404 1404 raise util.Abort(_("too many revisions to diff"))
1405 1405
1406 1406 fns, matchfn, anypats = matchpats(repo, pats, opts)
1407 1407
1408 1408 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1409 1409 text=opts['text'], opts=opts)
1410 1410
1411 1411 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1412 1412 node = repo.lookup(changeset)
1413 1413 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1414 1414 if opts['switch_parent']:
1415 1415 parents.reverse()
1416 1416 prev = (parents and parents[0]) or nullid
1417 1417 change = repo.changelog.read(node)
1418 1418
1419 1419 fp = make_file(repo, repo.changelog, opts['output'],
1420 1420 node=node, total=total, seqno=seqno,
1421 1421 revwidth=revwidth)
1422 1422 if fp != sys.stdout:
1423 1423 ui.note("%s\n" % fp.name)
1424 1424
1425 1425 fp.write("# HG changeset patch\n")
1426 1426 fp.write("# User %s\n" % change[1])
1427 1427 fp.write("# Node ID %s\n" % hex(node))
1428 1428 fp.write("# Parent %s\n" % hex(prev))
1429 1429 if len(parents) > 1:
1430 1430 fp.write("# Parent %s\n" % hex(parents[1]))
1431 1431 fp.write(change[4].rstrip())
1432 1432 fp.write("\n\n")
1433 1433
1434 1434 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1435 1435 if fp != sys.stdout:
1436 1436 fp.close()
1437 1437
1438 1438 def export(ui, repo, *changesets, **opts):
1439 1439 """dump the header and diffs for one or more changesets
1440 1440
1441 1441 Print the changeset header and diffs for one or more revisions.
1442 1442
1443 1443 The information shown in the changeset header is: author,
1444 1444 changeset hash, parent and commit comment.
1445 1445
1446 1446 Output may be to a file, in which case the name of the file is
1447 1447 given using a format string. The formatting rules are as follows:
1448 1448
1449 1449 %% literal "%" character
1450 1450 %H changeset hash (40 bytes of hexadecimal)
1451 1451 %N number of patches being generated
1452 1452 %R changeset revision number
1453 1453 %b basename of the exporting repository
1454 1454 %h short-form changeset hash (12 bytes of hexadecimal)
1455 1455 %n zero-padded sequence number, starting at 1
1456 1456 %r zero-padded changeset revision number
1457 1457
1458 1458 Without the -a option, export will avoid generating diffs of files
1459 1459 it detects as binary. With -a, export will generate a diff anyway,
1460 1460 probably with undesirable results.
1461 1461
1462 1462 With the --switch-parent option, the diff will be against the second
1463 1463 parent. It can be useful to review a merge.
1464 1464 """
1465 1465 if not changesets:
1466 1466 raise util.Abort(_("export requires at least one changeset"))
1467 1467 seqno = 0
1468 1468 revs = list(revrange(ui, repo, changesets))
1469 1469 total = len(revs)
1470 1470 revwidth = max(map(len, revs))
1471 1471 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1472 1472 ui.note(msg)
1473 1473 for cset in revs:
1474 1474 seqno += 1
1475 1475 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1476 1476
1477 1477 def forget(ui, repo, *pats, **opts):
1478 1478 """don't add the specified files on the next commit
1479 1479
1480 1480 Undo an 'hg add' scheduled for the next commit.
1481 1481 """
1482 1482 forget = []
1483 1483 for src, abs, rel, exact in walk(repo, pats, opts):
1484 1484 if repo.dirstate.state(abs) == 'a':
1485 1485 forget.append(abs)
1486 1486 if ui.verbose or not exact:
1487 1487 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1488 1488 repo.forget(forget)
1489 1489
1490 1490 def grep(ui, repo, pattern, *pats, **opts):
1491 1491 """search for a pattern in specified files and revisions
1492 1492
1493 1493 Search revisions of files for a regular expression.
1494 1494
1495 1495 This command behaves differently than Unix grep. It only accepts
1496 1496 Python/Perl regexps. It searches repository history, not the
1497 1497 working directory. It always prints the revision number in which
1498 1498 a match appears.
1499 1499
1500 1500 By default, grep only prints output for the first revision of a
1501 1501 file in which it finds a match. To get it to print every revision
1502 1502 that contains a change in match status ("-" for a match that
1503 1503 becomes a non-match, or "+" for a non-match that becomes a match),
1504 1504 use the --all flag.
1505 1505 """
1506 1506 reflags = 0
1507 1507 if opts['ignore_case']:
1508 1508 reflags |= re.I
1509 1509 regexp = re.compile(pattern, reflags)
1510 1510 sep, eol = ':', '\n'
1511 1511 if opts['print0']:
1512 1512 sep = eol = '\0'
1513 1513
1514 1514 fcache = {}
1515 1515 def getfile(fn):
1516 1516 if fn not in fcache:
1517 1517 fcache[fn] = repo.file(fn)
1518 1518 return fcache[fn]
1519 1519
1520 1520 def matchlines(body):
1521 1521 begin = 0
1522 1522 linenum = 0
1523 1523 while True:
1524 1524 match = regexp.search(body, begin)
1525 1525 if not match:
1526 1526 break
1527 1527 mstart, mend = match.span()
1528 1528 linenum += body.count('\n', begin, mstart) + 1
1529 1529 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1530 1530 lend = body.find('\n', mend)
1531 1531 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1532 1532 begin = lend + 1
1533 1533
1534 1534 class linestate(object):
1535 1535 def __init__(self, line, linenum, colstart, colend):
1536 1536 self.line = line
1537 1537 self.linenum = linenum
1538 1538 self.colstart = colstart
1539 1539 self.colend = colend
1540 1540 def __eq__(self, other):
1541 1541 return self.line == other.line
1542 1542 def __hash__(self):
1543 1543 return hash(self.line)
1544 1544
1545 1545 matches = {}
1546 1546 def grepbody(fn, rev, body):
1547 1547 matches[rev].setdefault(fn, {})
1548 1548 m = matches[rev][fn]
1549 1549 for lnum, cstart, cend, line in matchlines(body):
1550 1550 s = linestate(line, lnum, cstart, cend)
1551 1551 m[s] = s
1552 1552
1553 1553 # FIXME: prev isn't used, why ?
1554 1554 prev = {}
1555 1555 ucache = {}
1556 1556 def display(fn, rev, states, prevstates):
1557 1557 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1558 1558 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1559 1559 counts = {'-': 0, '+': 0}
1560 1560 filerevmatches = {}
1561 1561 for l in diff:
1562 1562 if incrementing or not opts['all']:
1563 1563 change = ((l in prevstates) and '-') or '+'
1564 1564 r = rev
1565 1565 else:
1566 1566 change = ((l in states) and '-') or '+'
1567 1567 r = prev[fn]
1568 1568 cols = [fn, str(rev)]
1569 1569 if opts['line_number']:
1570 1570 cols.append(str(l.linenum))
1571 1571 if opts['all']:
1572 1572 cols.append(change)
1573 1573 if opts['user']:
1574 1574 cols.append(trimuser(ui, getchange(rev)[1], rev,
1575 1575 ucache))
1576 1576 if opts['files_with_matches']:
1577 1577 c = (fn, rev)
1578 1578 if c in filerevmatches:
1579 1579 continue
1580 1580 filerevmatches[c] = 1
1581 1581 else:
1582 1582 cols.append(l.line)
1583 1583 ui.write(sep.join(cols), eol)
1584 1584 counts[change] += 1
1585 1585 return counts['+'], counts['-']
1586 1586
1587 1587 fstate = {}
1588 1588 skip = {}
1589 1589 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1590 1590 count = 0
1591 1591 incrementing = False
1592 1592 for st, rev, fns in changeiter:
1593 1593 if st == 'window':
1594 1594 incrementing = rev
1595 1595 matches.clear()
1596 1596 elif st == 'add':
1597 1597 change = repo.changelog.read(repo.lookup(str(rev)))
1598 1598 mf = repo.manifest.read(change[0])
1599 1599 matches[rev] = {}
1600 1600 for fn in fns:
1601 1601 if fn in skip:
1602 1602 continue
1603 1603 fstate.setdefault(fn, {})
1604 1604 try:
1605 1605 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1606 1606 except KeyError:
1607 1607 pass
1608 1608 elif st == 'iter':
1609 1609 states = matches[rev].items()
1610 1610 states.sort()
1611 1611 for fn, m in states:
1612 1612 if fn in skip:
1613 1613 continue
1614 1614 if incrementing or not opts['all'] or fstate[fn]:
1615 1615 pos, neg = display(fn, rev, m, fstate[fn])
1616 1616 count += pos + neg
1617 1617 if pos and not opts['all']:
1618 1618 skip[fn] = True
1619 1619 fstate[fn] = m
1620 1620 prev[fn] = rev
1621 1621
1622 1622 if not incrementing:
1623 1623 fstate = fstate.items()
1624 1624 fstate.sort()
1625 1625 for fn, state in fstate:
1626 1626 if fn in skip:
1627 1627 continue
1628 1628 display(fn, rev, {}, state)
1629 1629 return (count == 0 and 1) or 0
1630 1630
1631 1631 def heads(ui, repo, **opts):
1632 1632 """show current repository heads
1633 1633
1634 1634 Show all repository head changesets.
1635 1635
1636 1636 Repository "heads" are changesets that don't have children
1637 1637 changesets. They are where development generally takes place and
1638 1638 are the usual targets for update and merge operations.
1639 1639 """
1640 1640 if opts['rev']:
1641 1641 heads = repo.heads(repo.lookup(opts['rev']))
1642 1642 else:
1643 1643 heads = repo.heads()
1644 1644 br = None
1645 1645 if opts['branches']:
1646 1646 br = repo.branchlookup(heads)
1647 1647 displayer = show_changeset(ui, repo, opts)
1648 1648 for n in heads:
1649 1649 displayer.show(changenode=n, brinfo=br)
1650 1650
1651 1651 def identify(ui, repo):
1652 1652 """print information about the working copy
1653 1653
1654 1654 Print a short summary of the current state of the repo.
1655 1655
1656 1656 This summary identifies the repository state using one or two parent
1657 1657 hash identifiers, followed by a "+" if there are uncommitted changes
1658 1658 in the working directory, followed by a list of tags for this revision.
1659 1659 """
1660 1660 parents = [p for p in repo.dirstate.parents() if p != nullid]
1661 1661 if not parents:
1662 1662 ui.write(_("unknown\n"))
1663 1663 return
1664 1664
1665 1665 hexfunc = ui.verbose and hex or short
1666 1666 modified, added, removed, deleted, unknown = repo.changes()
1667 1667 output = ["%s%s" %
1668 1668 ('+'.join([hexfunc(parent) for parent in parents]),
1669 1669 (modified or added or removed or deleted) and "+" or "")]
1670 1670
1671 1671 if not ui.quiet:
1672 1672 # multiple tags for a single parent separated by '/'
1673 1673 parenttags = ['/'.join(tags)
1674 1674 for tags in map(repo.nodetags, parents) if tags]
1675 1675 # tags for multiple parents separated by ' + '
1676 1676 if parenttags:
1677 1677 output.append(' + '.join(parenttags))
1678 1678
1679 1679 ui.write("%s\n" % ' '.join(output))
1680 1680
1681 1681 def import_(ui, repo, patch1, *patches, **opts):
1682 1682 """import an ordered set of patches
1683 1683
1684 1684 Import a list of patches and commit them individually.
1685 1685
1686 1686 If there are outstanding changes in the working directory, import
1687 1687 will abort unless given the -f flag.
1688 1688
1689 1689 If a patch looks like a mail message (its first line starts with
1690 1690 "From " or looks like an RFC822 header), it will not be applied
1691 1691 unless the -f option is used. The importer neither parses nor
1692 1692 discards mail headers, so use -f only to override the "mailness"
1693 1693 safety check, not to import a real mail message.
1694 1694 """
1695 1695 patches = (patch1,) + patches
1696 1696
1697 1697 if not opts['force']:
1698 1698 modified, added, removed, deleted, unknown = repo.changes()
1699 1699 if modified or added or removed or deleted:
1700 1700 raise util.Abort(_("outstanding uncommitted changes"))
1701 1701
1702 1702 d = opts["base"]
1703 1703 strip = opts["strip"]
1704 1704
1705 1705 mailre = re.compile(r'(?:From |[\w-]+:)')
1706 1706
1707 1707 # attempt to detect the start of a patch
1708 1708 # (this heuristic is borrowed from quilt)
1709 1709 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1710 1710 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1711 1711 '(---|\*\*\*)[ \t])')
1712 1712
1713 1713 for patch in patches:
1714 1714 ui.status(_("applying %s\n") % patch)
1715 1715 pf = os.path.join(d, patch)
1716 1716
1717 1717 message = []
1718 1718 user = None
1719 1719 hgpatch = False
1720 1720 for line in file(pf):
1721 1721 line = line.rstrip()
1722 1722 if (not message and not hgpatch and
1723 1723 mailre.match(line) and not opts['force']):
1724 1724 if len(line) > 35:
1725 1725 line = line[:32] + '...'
1726 1726 raise util.Abort(_('first line looks like a '
1727 1727 'mail header: ') + line)
1728 1728 if diffre.match(line):
1729 1729 break
1730 1730 elif hgpatch:
1731 1731 # parse values when importing the result of an hg export
1732 1732 if line.startswith("# User "):
1733 1733 user = line[7:]
1734 1734 ui.debug(_('User: %s\n') % user)
1735 1735 elif not line.startswith("# ") and line:
1736 1736 message.append(line)
1737 1737 hgpatch = False
1738 1738 elif line == '# HG changeset patch':
1739 1739 hgpatch = True
1740 1740 message = [] # We may have collected garbage
1741 1741 else:
1742 1742 message.append(line)
1743 1743
1744 1744 # make sure message isn't empty
1745 1745 if not message:
1746 1746 message = _("imported patch %s\n") % patch
1747 1747 else:
1748 1748 message = "%s\n" % '\n'.join(message)
1749 1749 ui.debug(_('message:\n%s\n') % message)
1750 1750
1751 1751 files = util.patch(strip, pf, ui)
1752 1752
1753 1753 if len(files) > 0:
1754 1754 addremove(ui, repo, *files)
1755 1755 repo.commit(files, message, user)
1756 1756
1757 1757 def incoming(ui, repo, source="default", **opts):
1758 1758 """show new changesets found in source
1759 1759
1760 1760 Show new changesets found in the specified repo or the default
1761 1761 pull repo. These are the changesets that would be pulled if a pull
1762 1762 was requested.
1763 1763
1764 1764 For remote repository, using --bundle avoids downloading the changesets
1765 1765 twice if the incoming is followed by a pull.
1766 1766 """
1767 1767 source = ui.expandpath(source)
1768 1768 other = hg.repository(ui, source)
1769 incoming = repo.findincoming(other)
1769 incoming = repo.findincoming(other, force=opts["force"])
1770 1770 if not incoming:
1771 1771 return
1772 1772
1773 1773 cleanup = None
1774 1774 if not other.local() or opts["bundle"]:
1775 1775 # create an uncompressed bundle
1776 1776 if not opts["bundle"]:
1777 1777 # create a temporary bundle
1778 1778 fd, fname = tempfile.mkstemp(suffix=".hg",
1779 1779 prefix="tmp-hg-incoming")
1780 1780 f = os.fdopen(fd, "wb")
1781 1781 cleanup = fname
1782 1782 else:
1783 1783 fname = opts["bundle"]
1784 1784 f = open(fname, "wb")
1785 1785
1786 1786 cg = other.changegroup(incoming, "incoming")
1787 1787 write_bundle(cg, fname, compress=other.local(), fh=f)
1788 1788 f.close()
1789 1789 if not other.local():
1790 1790 # use a bundlerepo
1791 1791 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1792 1792
1793 1793 o = other.changelog.nodesbetween(incoming)[0]
1794 1794 if opts['newest_first']:
1795 1795 o.reverse()
1796 1796 displayer = show_changeset(ui, other, opts)
1797 1797 for n in o:
1798 1798 parents = [p for p in other.changelog.parents(n) if p != nullid]
1799 1799 if opts['no_merges'] and len(parents) == 2:
1800 1800 continue
1801 1801 displayer.show(changenode=n)
1802 1802 if opts['patch']:
1803 1803 prev = (parents and parents[0]) or nullid
1804 1804 dodiff(ui, ui, other, prev, n)
1805 1805 ui.write("\n")
1806 1806
1807 1807 if cleanup:
1808 1808 os.unlink(cleanup)
1809 1809
1810 1810 def init(ui, dest="."):
1811 1811 """create a new repository in the given directory
1812 1812
1813 1813 Initialize a new repository in the given directory. If the given
1814 1814 directory does not exist, it is created.
1815 1815
1816 1816 If no directory is given, the current directory is used.
1817 1817 """
1818 1818 if not os.path.exists(dest):
1819 1819 os.mkdir(dest)
1820 1820 hg.repository(ui, dest, create=1)
1821 1821
1822 1822 def locate(ui, repo, *pats, **opts):
1823 1823 """locate files matching specific patterns
1824 1824
1825 1825 Print all files under Mercurial control whose names match the
1826 1826 given patterns.
1827 1827
1828 1828 This command searches the current directory and its
1829 1829 subdirectories. To search an entire repository, move to the root
1830 1830 of the repository.
1831 1831
1832 1832 If no patterns are given to match, this command prints all file
1833 1833 names.
1834 1834
1835 1835 If you want to feed the output of this command into the "xargs"
1836 1836 command, use the "-0" option to both this command and "xargs".
1837 1837 This will avoid the problem of "xargs" treating single filenames
1838 1838 that contain white space as multiple filenames.
1839 1839 """
1840 1840 end = opts['print0'] and '\0' or '\n'
1841 1841 rev = opts['rev']
1842 1842 if rev:
1843 1843 node = repo.lookup(rev)
1844 1844 else:
1845 1845 node = None
1846 1846
1847 1847 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1848 1848 head='(?:.*/|)'):
1849 1849 if not node and repo.dirstate.state(abs) == '?':
1850 1850 continue
1851 1851 if opts['fullpath']:
1852 1852 ui.write(os.path.join(repo.root, abs), end)
1853 1853 else:
1854 1854 ui.write(((pats and rel) or abs), end)
1855 1855
1856 1856 def log(ui, repo, *pats, **opts):
1857 1857 """show revision history of entire repository or files
1858 1858
1859 1859 Print the revision history of the specified files or the entire project.
1860 1860
1861 1861 By default this command outputs: changeset id and hash, tags,
1862 1862 non-trivial parents, user, date and time, and a summary for each
1863 1863 commit. When the -v/--verbose switch is used, the list of changed
1864 1864 files and full commit message is shown.
1865 1865 """
1866 1866 class dui(object):
1867 1867 # Implement and delegate some ui protocol. Save hunks of
1868 1868 # output for later display in the desired order.
1869 1869 def __init__(self, ui):
1870 1870 self.ui = ui
1871 1871 self.hunk = {}
1872 1872 def bump(self, rev):
1873 1873 self.rev = rev
1874 1874 self.hunk[rev] = []
1875 1875 def note(self, *args):
1876 1876 if self.verbose:
1877 1877 self.write(*args)
1878 1878 def status(self, *args):
1879 1879 if not self.quiet:
1880 1880 self.write(*args)
1881 1881 def write(self, *args):
1882 1882 self.hunk[self.rev].append(args)
1883 1883 def debug(self, *args):
1884 1884 if self.debugflag:
1885 1885 self.write(*args)
1886 1886 def __getattr__(self, key):
1887 1887 return getattr(self.ui, key)
1888 1888
1889 1889 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1890 1890
1891 1891 if opts['limit']:
1892 1892 try:
1893 1893 limit = int(opts['limit'])
1894 1894 except ValueError:
1895 1895 raise util.Abort(_('limit must be a positive integer'))
1896 1896 if limit <= 0: raise util.Abort(_('limit must be positive'))
1897 1897 else:
1898 1898 limit = sys.maxint
1899 1899 count = 0
1900 1900
1901 1901 displayer = show_changeset(ui, repo, opts)
1902 1902 for st, rev, fns in changeiter:
1903 1903 if st == 'window':
1904 1904 du = dui(ui)
1905 1905 displayer.ui = du
1906 1906 elif st == 'add':
1907 1907 du.bump(rev)
1908 1908 changenode = repo.changelog.node(rev)
1909 1909 parents = [p for p in repo.changelog.parents(changenode)
1910 1910 if p != nullid]
1911 1911 if opts['no_merges'] and len(parents) == 2:
1912 1912 continue
1913 1913 if opts['only_merges'] and len(parents) != 2:
1914 1914 continue
1915 1915
1916 1916 if opts['keyword']:
1917 1917 changes = getchange(rev)
1918 1918 miss = 0
1919 1919 for k in [kw.lower() for kw in opts['keyword']]:
1920 1920 if not (k in changes[1].lower() or
1921 1921 k in changes[4].lower() or
1922 1922 k in " ".join(changes[3][:20]).lower()):
1923 1923 miss = 1
1924 1924 break
1925 1925 if miss:
1926 1926 continue
1927 1927
1928 1928 br = None
1929 1929 if opts['branches']:
1930 1930 br = repo.branchlookup([repo.changelog.node(rev)])
1931 1931
1932 1932 displayer.show(rev, brinfo=br)
1933 1933 if opts['patch']:
1934 1934 prev = (parents and parents[0]) or nullid
1935 1935 dodiff(du, du, repo, prev, changenode, match=matchfn)
1936 1936 du.write("\n\n")
1937 1937 elif st == 'iter':
1938 1938 if count == limit: break
1939 1939 if du.hunk[rev]:
1940 1940 count += 1
1941 1941 for args in du.hunk[rev]:
1942 1942 ui.write(*args)
1943 1943
1944 1944 def manifest(ui, repo, rev=None):
1945 1945 """output the latest or given revision of the project manifest
1946 1946
1947 1947 Print a list of version controlled files for the given revision.
1948 1948
1949 1949 The manifest is the list of files being version controlled. If no revision
1950 1950 is given then the tip is used.
1951 1951 """
1952 1952 if rev:
1953 1953 try:
1954 1954 # assume all revision numbers are for changesets
1955 1955 n = repo.lookup(rev)
1956 1956 change = repo.changelog.read(n)
1957 1957 n = change[0]
1958 1958 except hg.RepoError:
1959 1959 n = repo.manifest.lookup(rev)
1960 1960 else:
1961 1961 n = repo.manifest.tip()
1962 1962 m = repo.manifest.read(n)
1963 1963 mf = repo.manifest.readflags(n)
1964 1964 files = m.keys()
1965 1965 files.sort()
1966 1966
1967 1967 for f in files:
1968 1968 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1969 1969
1970 1970 def outgoing(ui, repo, dest="default-push", **opts):
1971 1971 """show changesets not found in destination
1972 1972
1973 1973 Show changesets not found in the specified destination repo or the
1974 1974 default push repo. These are the changesets that would be pushed
1975 1975 if a push was requested.
1976 1976
1977 1977 See pull for valid source format details.
1978 1978 """
1979 1979 dest = ui.expandpath(dest)
1980 1980 other = hg.repository(ui, dest)
1981 o = repo.findoutgoing(other)
1981 o = repo.findoutgoing(other, force=opts['force'])
1982 1982 o = repo.changelog.nodesbetween(o)[0]
1983 1983 if opts['newest_first']:
1984 1984 o.reverse()
1985 1985 displayer = show_changeset(ui, repo, opts)
1986 1986 for n in o:
1987 1987 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1988 1988 if opts['no_merges'] and len(parents) == 2:
1989 1989 continue
1990 1990 displayer.show(changenode=n)
1991 1991 if opts['patch']:
1992 1992 prev = (parents and parents[0]) or nullid
1993 1993 dodiff(ui, ui, repo, prev, n)
1994 1994 ui.write("\n")
1995 1995
1996 1996 def parents(ui, repo, rev=None, branches=None, **opts):
1997 1997 """show the parents of the working dir or revision
1998 1998
1999 1999 Print the working directory's parent revisions.
2000 2000 """
2001 2001 if rev:
2002 2002 p = repo.changelog.parents(repo.lookup(rev))
2003 2003 else:
2004 2004 p = repo.dirstate.parents()
2005 2005
2006 2006 br = None
2007 2007 if branches is not None:
2008 2008 br = repo.branchlookup(p)
2009 2009 displayer = show_changeset(ui, repo, opts)
2010 2010 for n in p:
2011 2011 if n != nullid:
2012 2012 displayer.show(changenode=n, brinfo=br)
2013 2013
2014 2014 def paths(ui, repo, search=None):
2015 2015 """show definition of symbolic path names
2016 2016
2017 2017 Show definition of symbolic path name NAME. If no name is given, show
2018 2018 definition of available names.
2019 2019
2020 2020 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2021 2021 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2022 2022 """
2023 2023 if search:
2024 2024 for name, path in ui.configitems("paths"):
2025 2025 if name == search:
2026 2026 ui.write("%s\n" % path)
2027 2027 return
2028 2028 ui.warn(_("not found!\n"))
2029 2029 return 1
2030 2030 else:
2031 2031 for name, path in ui.configitems("paths"):
2032 2032 ui.write("%s = %s\n" % (name, path))
2033 2033
2034 2034 def pull(ui, repo, source="default", **opts):
2035 2035 """pull changes from the specified source
2036 2036
2037 2037 Pull changes from a remote repository to a local one.
2038 2038
2039 2039 This finds all changes from the repository at the specified path
2040 2040 or URL and adds them to the local repository. By default, this
2041 2041 does not update the copy of the project in the working directory.
2042 2042
2043 2043 Valid URLs are of the form:
2044 2044
2045 2045 local/filesystem/path
2046 2046 http://[user@]host[:port][/path]
2047 2047 https://[user@]host[:port][/path]
2048 2048 ssh://[user@]host[:port][/path]
2049 2049
2050 2050 SSH requires an accessible shell account on the destination machine
2051 2051 and a copy of hg in the remote path. With SSH, paths are relative
2052 2052 to the remote user's home directory by default; use two slashes at
2053 2053 the start of a path to specify it as relative to the filesystem root.
2054 2054 """
2055 2055 source = ui.expandpath(source)
2056 2056 ui.status(_('pulling from %s\n') % (source))
2057 2057
2058 2058 if opts['ssh']:
2059 2059 ui.setconfig("ui", "ssh", opts['ssh'])
2060 2060 if opts['remotecmd']:
2061 2061 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2062 2062
2063 2063 other = hg.repository(ui, source)
2064 2064 revs = None
2065 2065 if opts['rev'] and not other.local():
2066 2066 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2067 2067 elif opts['rev']:
2068 2068 revs = [other.lookup(rev) for rev in opts['rev']]
2069 r = repo.pull(other, heads=revs)
2069 r = repo.pull(other, heads=revs, force=opts['force'])
2070 2070 if not r:
2071 2071 if opts['update']:
2072 2072 return update(ui, repo)
2073 2073 else:
2074 2074 ui.status(_("(run 'hg update' to get a working copy)\n"))
2075 2075
2076 2076 return r
2077 2077
2078 2078 def push(ui, repo, dest="default-push", **opts):
2079 2079 """push changes to the specified destination
2080 2080
2081 2081 Push changes from the local repository to the given destination.
2082 2082
2083 2083 This is the symmetrical operation for pull. It helps to move
2084 2084 changes from the current repository to a different one. If the
2085 2085 destination is local this is identical to a pull in that directory
2086 2086 from the current one.
2087 2087
2088 2088 By default, push will refuse to run if it detects the result would
2089 2089 increase the number of remote heads. This generally indicates the
2090 2090 the client has forgotten to sync and merge before pushing.
2091 2091
2092 2092 Valid URLs are of the form:
2093 2093
2094 2094 local/filesystem/path
2095 2095 ssh://[user@]host[:port][/path]
2096 2096
2097 2097 SSH requires an accessible shell account on the destination
2098 2098 machine and a copy of hg in the remote path.
2099 2099 """
2100 2100 dest = ui.expandpath(dest)
2101 2101 ui.status('pushing to %s\n' % (dest))
2102 2102
2103 2103 if opts['ssh']:
2104 2104 ui.setconfig("ui", "ssh", opts['ssh'])
2105 2105 if opts['remotecmd']:
2106 2106 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2107 2107
2108 2108 other = hg.repository(ui, dest)
2109 2109 revs = None
2110 2110 if opts['rev']:
2111 2111 revs = [repo.lookup(rev) for rev in opts['rev']]
2112 2112 r = repo.push(other, opts['force'], revs=revs)
2113 2113 return r
2114 2114
2115 2115 def rawcommit(ui, repo, *flist, **rc):
2116 2116 """raw commit interface (DEPRECATED)
2117 2117
2118 2118 (DEPRECATED)
2119 2119 Lowlevel commit, for use in helper scripts.
2120 2120
2121 2121 This command is not intended to be used by normal users, as it is
2122 2122 primarily useful for importing from other SCMs.
2123 2123
2124 2124 This command is now deprecated and will be removed in a future
2125 2125 release, please use debugsetparents and commit instead.
2126 2126 """
2127 2127
2128 2128 ui.warn(_("(the rawcommit command is deprecated)\n"))
2129 2129
2130 2130 message = rc['message']
2131 2131 if not message and rc['logfile']:
2132 2132 try:
2133 2133 message = open(rc['logfile']).read()
2134 2134 except IOError:
2135 2135 pass
2136 2136 if not message and not rc['logfile']:
2137 2137 raise util.Abort(_("missing commit message"))
2138 2138
2139 2139 files = relpath(repo, list(flist))
2140 2140 if rc['files']:
2141 2141 files += open(rc['files']).read().splitlines()
2142 2142
2143 2143 rc['parent'] = map(repo.lookup, rc['parent'])
2144 2144
2145 2145 try:
2146 2146 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2147 2147 except ValueError, inst:
2148 2148 raise util.Abort(str(inst))
2149 2149
2150 2150 def recover(ui, repo):
2151 2151 """roll back an interrupted transaction
2152 2152
2153 2153 Recover from an interrupted commit or pull.
2154 2154
2155 2155 This command tries to fix the repository status after an interrupted
2156 2156 operation. It should only be necessary when Mercurial suggests it.
2157 2157 """
2158 2158 if repo.recover():
2159 2159 return repo.verify()
2160 2160 return False
2161 2161
2162 2162 def remove(ui, repo, pat, *pats, **opts):
2163 2163 """remove the specified files on the next commit
2164 2164
2165 2165 Schedule the indicated files for removal from the repository.
2166 2166
2167 2167 This command schedules the files to be removed at the next commit.
2168 2168 This only removes files from the current branch, not from the
2169 2169 entire project history. If the files still exist in the working
2170 2170 directory, they will be deleted from it.
2171 2171 """
2172 2172 names = []
2173 2173 def okaytoremove(abs, rel, exact):
2174 2174 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2175 2175 reason = None
2176 2176 if modified and not opts['force']:
2177 2177 reason = _('is modified')
2178 2178 elif added:
2179 2179 reason = _('has been marked for add')
2180 2180 elif unknown:
2181 2181 reason = _('is not managed')
2182 2182 if reason:
2183 2183 if exact:
2184 2184 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2185 2185 else:
2186 2186 return True
2187 2187 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2188 2188 if okaytoremove(abs, rel, exact):
2189 2189 if ui.verbose or not exact:
2190 2190 ui.status(_('removing %s\n') % rel)
2191 2191 names.append(abs)
2192 2192 repo.remove(names, unlink=True)
2193 2193
2194 2194 def rename(ui, repo, *pats, **opts):
2195 2195 """rename files; equivalent of copy + remove
2196 2196
2197 2197 Mark dest as copies of sources; mark sources for deletion. If
2198 2198 dest is a directory, copies are put in that directory. If dest is
2199 2199 a file, there can only be one source.
2200 2200
2201 2201 By default, this command copies the contents of files as they
2202 2202 stand in the working directory. If invoked with --after, the
2203 2203 operation is recorded, but no copying is performed.
2204 2204
2205 2205 This command takes effect in the next commit.
2206 2206
2207 2207 NOTE: This command should be treated as experimental. While it
2208 2208 should properly record rename files, this information is not yet
2209 2209 fully used by merge, nor fully reported by log.
2210 2210 """
2211 2211 try:
2212 2212 wlock = repo.wlock(0)
2213 2213 errs, copied = docopy(ui, repo, pats, opts, wlock)
2214 2214 names = []
2215 2215 for abs, rel, exact in copied:
2216 2216 if ui.verbose or not exact:
2217 2217 ui.status(_('removing %s\n') % rel)
2218 2218 names.append(abs)
2219 2219 repo.remove(names, True, wlock)
2220 2220 except lock.LockHeld, inst:
2221 2221 ui.warn(_("repository lock held by %s\n") % inst.args[0])
2222 2222 errs = 1
2223 2223 return errs
2224 2224
2225 2225 def revert(ui, repo, *pats, **opts):
2226 2226 """revert modified files or dirs back to their unmodified states
2227 2227
2228 2228 In its default mode, it reverts any uncommitted modifications made
2229 2229 to the named files or directories. This restores the contents of
2230 2230 the affected files to an unmodified state.
2231 2231
2232 2232 Using the -r option, it reverts the given files or directories to
2233 2233 their state as of an earlier revision. This can be helpful to "roll
2234 2234 back" some or all of a change that should not have been committed.
2235 2235
2236 2236 Revert modifies the working directory. It does not commit any
2237 2237 changes, or change the parent of the current working directory.
2238 2238
2239 2239 If a file has been deleted, it is recreated. If the executable
2240 2240 mode of a file was changed, it is reset.
2241 2241
2242 2242 If names are given, all files matching the names are reverted.
2243 2243
2244 2244 If no arguments are given, all files in the repository are reverted.
2245 2245 """
2246 2246 node = opts['rev'] and repo.lookup(opts['rev']) or \
2247 2247 repo.dirstate.parents()[0]
2248 2248
2249 2249 files, choose, anypats = matchpats(repo, pats, opts)
2250 2250 modified, added, removed, deleted, unknown = repo.changes(match=choose)
2251 2251 repo.forget(added)
2252 2252 repo.undelete(removed)
2253 2253
2254 2254 return repo.update(node, False, True, choose, False)
2255 2255
2256 2256 def root(ui, repo):
2257 2257 """print the root (top) of the current working dir
2258 2258
2259 2259 Print the root directory of the current repository.
2260 2260 """
2261 2261 ui.write(repo.root + "\n")
2262 2262
2263 2263 def serve(ui, repo, **opts):
2264 2264 """export the repository via HTTP
2265 2265
2266 2266 Start a local HTTP repository browser and pull server.
2267 2267
2268 2268 By default, the server logs accesses to stdout and errors to
2269 2269 stderr. Use the "-A" and "-E" options to log to files.
2270 2270 """
2271 2271
2272 2272 if opts["stdio"]:
2273 2273 fin, fout = sys.stdin, sys.stdout
2274 2274 sys.stdout = sys.stderr
2275 2275
2276 2276 # Prevent insertion/deletion of CRs
2277 2277 util.set_binary(fin)
2278 2278 util.set_binary(fout)
2279 2279
2280 2280 def getarg():
2281 2281 argline = fin.readline()[:-1]
2282 2282 arg, l = argline.split()
2283 2283 val = fin.read(int(l))
2284 2284 return arg, val
2285 2285 def respond(v):
2286 2286 fout.write("%d\n" % len(v))
2287 2287 fout.write(v)
2288 2288 fout.flush()
2289 2289
2290 2290 lock = None
2291 2291
2292 2292 while 1:
2293 2293 cmd = fin.readline()[:-1]
2294 2294 if cmd == '':
2295 2295 return
2296 2296 if cmd == "heads":
2297 2297 h = repo.heads()
2298 2298 respond(" ".join(map(hex, h)) + "\n")
2299 2299 if cmd == "lock":
2300 2300 lock = repo.lock()
2301 2301 respond("")
2302 2302 if cmd == "unlock":
2303 2303 if lock:
2304 2304 lock.release()
2305 2305 lock = None
2306 2306 respond("")
2307 2307 elif cmd == "branches":
2308 2308 arg, nodes = getarg()
2309 2309 nodes = map(bin, nodes.split(" "))
2310 2310 r = []
2311 2311 for b in repo.branches(nodes):
2312 2312 r.append(" ".join(map(hex, b)) + "\n")
2313 2313 respond("".join(r))
2314 2314 elif cmd == "between":
2315 2315 arg, pairs = getarg()
2316 2316 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2317 2317 r = []
2318 2318 for b in repo.between(pairs):
2319 2319 r.append(" ".join(map(hex, b)) + "\n")
2320 2320 respond("".join(r))
2321 2321 elif cmd == "changegroup":
2322 2322 nodes = []
2323 2323 arg, roots = getarg()
2324 2324 nodes = map(bin, roots.split(" "))
2325 2325
2326 2326 cg = repo.changegroup(nodes, 'serve')
2327 2327 while 1:
2328 2328 d = cg.read(4096)
2329 2329 if not d:
2330 2330 break
2331 2331 fout.write(d)
2332 2332
2333 2333 fout.flush()
2334 2334
2335 2335 elif cmd == "addchangegroup":
2336 2336 if not lock:
2337 2337 respond("not locked")
2338 2338 continue
2339 2339 respond("")
2340 2340
2341 2341 r = repo.addchangegroup(fin)
2342 2342 respond("")
2343 2343
2344 2344 optlist = "name templates style address port ipv6 accesslog errorlog"
2345 2345 for o in optlist.split():
2346 2346 if opts[o]:
2347 2347 ui.setconfig("web", o, opts[o])
2348 2348
2349 2349 if opts['daemon'] and not opts['daemon_pipefds']:
2350 2350 rfd, wfd = os.pipe()
2351 2351 args = sys.argv[:]
2352 2352 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2353 2353 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2354 2354 args[0], args)
2355 2355 os.close(wfd)
2356 2356 os.read(rfd, 1)
2357 2357 os._exit(0)
2358 2358
2359 2359 try:
2360 2360 httpd = hgweb.create_server(repo)
2361 2361 except socket.error, inst:
2362 2362 raise util.Abort(_('cannot start server: ') + inst.args[1])
2363 2363
2364 2364 if ui.verbose:
2365 2365 addr, port = httpd.socket.getsockname()
2366 2366 if addr == '0.0.0.0':
2367 2367 addr = socket.gethostname()
2368 2368 else:
2369 2369 try:
2370 2370 addr = socket.gethostbyaddr(addr)[0]
2371 2371 except socket.error:
2372 2372 pass
2373 2373 if port != 80:
2374 2374 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2375 2375 else:
2376 2376 ui.status(_('listening at http://%s/\n') % addr)
2377 2377
2378 2378 if opts['pid_file']:
2379 2379 fp = open(opts['pid_file'], 'w')
2380 2380 fp.write(str(os.getpid()))
2381 2381 fp.close()
2382 2382
2383 2383 if opts['daemon_pipefds']:
2384 2384 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2385 2385 os.close(rfd)
2386 2386 os.write(wfd, 'y')
2387 2387 os.close(wfd)
2388 2388 sys.stdout.flush()
2389 2389 sys.stderr.flush()
2390 2390 fd = os.open(util.nulldev, os.O_RDWR)
2391 2391 if fd != 0: os.dup2(fd, 0)
2392 2392 if fd != 1: os.dup2(fd, 1)
2393 2393 if fd != 2: os.dup2(fd, 2)
2394 2394 if fd not in (0, 1, 2): os.close(fd)
2395 2395
2396 2396 httpd.serve_forever()
2397 2397
2398 2398 def status(ui, repo, *pats, **opts):
2399 2399 """show changed files in the working directory
2400 2400
2401 2401 Show changed files in the repository. If names are
2402 2402 given, only files that match are shown.
2403 2403
2404 2404 The codes used to show the status of files are:
2405 2405 M = modified
2406 2406 A = added
2407 2407 R = removed
2408 2408 ! = deleted, but still tracked
2409 2409 ? = not tracked
2410 2410 """
2411 2411
2412 2412 files, matchfn, anypats = matchpats(repo, pats, opts)
2413 2413 cwd = (pats and repo.getcwd()) or ''
2414 2414 modified, added, removed, deleted, unknown = [
2415 2415 [util.pathto(cwd, x) for x in n]
2416 2416 for n in repo.changes(files=files, match=matchfn)]
2417 2417
2418 2418 changetypes = [(_('modified'), 'M', modified),
2419 2419 (_('added'), 'A', added),
2420 2420 (_('removed'), 'R', removed),
2421 2421 (_('deleted'), '!', deleted),
2422 2422 (_('unknown'), '?', unknown)]
2423 2423
2424 2424 end = opts['print0'] and '\0' or '\n'
2425 2425
2426 2426 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2427 2427 or changetypes):
2428 2428 if opts['no_status']:
2429 2429 format = "%%s%s" % end
2430 2430 else:
2431 2431 format = "%s %%s%s" % (char, end);
2432 2432
2433 2433 for f in changes:
2434 2434 ui.write(format % f)
2435 2435
2436 2436 def tag(ui, repo, name, rev_=None, **opts):
2437 2437 """add a tag for the current tip or a given revision
2438 2438
2439 2439 Name a particular revision using <name>.
2440 2440
2441 2441 Tags are used to name particular revisions of the repository and are
2442 2442 very useful to compare different revision, to go back to significant
2443 2443 earlier versions or to mark branch points as releases, etc.
2444 2444
2445 2445 If no revision is given, the tip is used.
2446 2446
2447 2447 To facilitate version control, distribution, and merging of tags,
2448 2448 they are stored as a file named ".hgtags" which is managed
2449 2449 similarly to other project files and can be hand-edited if
2450 2450 necessary. The file '.hg/localtags' is used for local tags (not
2451 2451 shared among repositories).
2452 2452 """
2453 2453 if name == "tip":
2454 2454 raise util.Abort(_("the name 'tip' is reserved"))
2455 2455 if rev_ is not None:
2456 2456 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2457 2457 "please use 'hg tag [-r REV] NAME' instead\n"))
2458 2458 if opts['rev']:
2459 2459 raise util.Abort(_("use only one form to specify the revision"))
2460 2460 if opts['rev']:
2461 2461 rev_ = opts['rev']
2462 2462 if rev_:
2463 2463 r = hex(repo.lookup(rev_))
2464 2464 else:
2465 2465 r = hex(repo.changelog.tip())
2466 2466
2467 2467 disallowed = (revrangesep, '\r', '\n')
2468 2468 for c in disallowed:
2469 2469 if name.find(c) >= 0:
2470 2470 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2471 2471
2472 2472 repo.hook('pretag', throw=True, node=r, tag=name,
2473 2473 local=int(not not opts['local']))
2474 2474
2475 2475 if opts['local']:
2476 2476 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2477 2477 repo.hook('tag', node=r, tag=name, local=1)
2478 2478 return
2479 2479
2480 2480 for x in repo.changes():
2481 2481 if ".hgtags" in x:
2482 2482 raise util.Abort(_("working copy of .hgtags is changed "
2483 2483 "(please commit .hgtags manually)"))
2484 2484
2485 2485 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2486 2486 if repo.dirstate.state(".hgtags") == '?':
2487 2487 repo.add([".hgtags"])
2488 2488
2489 2489 message = (opts['message'] or
2490 2490 _("Added tag %s for changeset %s") % (name, r))
2491 2491 try:
2492 2492 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2493 2493 repo.hook('tag', node=r, tag=name, local=0)
2494 2494 except ValueError, inst:
2495 2495 raise util.Abort(str(inst))
2496 2496
2497 2497 def tags(ui, repo):
2498 2498 """list repository tags
2499 2499
2500 2500 List the repository tags.
2501 2501
2502 2502 This lists both regular and local tags.
2503 2503 """
2504 2504
2505 2505 l = repo.tagslist()
2506 2506 l.reverse()
2507 2507 for t, n in l:
2508 2508 try:
2509 2509 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2510 2510 except KeyError:
2511 2511 r = " ?:?"
2512 2512 ui.write("%-30s %s\n" % (t, r))
2513 2513
2514 2514 def tip(ui, repo, **opts):
2515 2515 """show the tip revision
2516 2516
2517 2517 Show the tip revision.
2518 2518 """
2519 2519 n = repo.changelog.tip()
2520 2520 br = None
2521 2521 if opts['branches']:
2522 2522 br = repo.branchlookup([n])
2523 2523 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2524 2524 if opts['patch']:
2525 2525 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2526 2526
2527 2527 def unbundle(ui, repo, fname, **opts):
2528 2528 """apply a changegroup file
2529 2529
2530 2530 Apply a compressed changegroup file generated by the bundle
2531 2531 command.
2532 2532 """
2533 2533 f = urllib.urlopen(fname)
2534 2534
2535 2535 header = f.read(4)
2536 2536 if header == "HG10":
2537 2537 def generator(f):
2538 2538 zd = bz2.BZ2Decompressor()
2539 2539 for chunk in f:
2540 2540 yield zd.decompress(chunk)
2541 2541 elif header == "HG11":
2542 2542 def generator(f):
2543 2543 for chunk in f:
2544 2544 yield chunk
2545 2545 else:
2546 2546 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2547 2547 gen = generator(util.filechunkiter(f, 4096))
2548 2548 if repo.addchangegroup(util.chunkbuffer(gen)):
2549 2549 return 1
2550 2550
2551 2551 if opts['update']:
2552 2552 return update(ui, repo)
2553 2553 else:
2554 2554 ui.status(_("(run 'hg update' to get a working copy)\n"))
2555 2555
2556 2556 def undo(ui, repo):
2557 2557 """undo the last commit or pull
2558 2558
2559 2559 Roll back the last pull or commit transaction on the
2560 2560 repository, restoring the project to its earlier state.
2561 2561
2562 2562 This command should be used with care. There is only one level of
2563 2563 undo and there is no redo.
2564 2564
2565 2565 This command is not intended for use on public repositories. Once
2566 2566 a change is visible for pull by other users, undoing it locally is
2567 2567 ineffective.
2568 2568 """
2569 2569 repo.undo()
2570 2570
2571 2571 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2572 2572 branch=None, **opts):
2573 2573 """update or merge working directory
2574 2574
2575 2575 Update the working directory to the specified revision.
2576 2576
2577 2577 If there are no outstanding changes in the working directory and
2578 2578 there is a linear relationship between the current version and the
2579 2579 requested version, the result is the requested version.
2580 2580
2581 2581 Otherwise the result is a merge between the contents of the
2582 2582 current working directory and the requested version. Files that
2583 2583 changed between either parent are marked as changed for the next
2584 2584 commit and a commit must be performed before any further updates
2585 2585 are allowed.
2586 2586
2587 2587 By default, update will refuse to run if doing so would require
2588 2588 merging or discarding local changes.
2589 2589 """
2590 2590 if branch:
2591 2591 br = repo.branchlookup(branch=branch)
2592 2592 found = []
2593 2593 for x in br:
2594 2594 if branch in br[x]:
2595 2595 found.append(x)
2596 2596 if len(found) > 1:
2597 2597 ui.warn(_("Found multiple heads for %s\n") % branch)
2598 2598 for x in found:
2599 2599 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2600 2600 return 1
2601 2601 if len(found) == 1:
2602 2602 node = found[0]
2603 2603 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2604 2604 else:
2605 2605 ui.warn(_("branch %s not found\n") % (branch))
2606 2606 return 1
2607 2607 else:
2608 2608 node = node and repo.lookup(node) or repo.changelog.tip()
2609 2609 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2610 2610
2611 2611 def verify(ui, repo):
2612 2612 """verify the integrity of the repository
2613 2613
2614 2614 Verify the integrity of the current repository.
2615 2615
2616 2616 This will perform an extensive check of the repository's
2617 2617 integrity, validating the hashes and checksums of each entry in
2618 2618 the changelog, manifest, and tracked files, as well as the
2619 2619 integrity of their crosslinks and indices.
2620 2620 """
2621 2621 return repo.verify()
2622 2622
2623 2623 # Command options and aliases are listed here, alphabetically
2624 2624
2625 2625 table = {
2626 2626 "^add":
2627 2627 (add,
2628 2628 [('I', 'include', [], _('include names matching the given patterns')),
2629 2629 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2630 2630 _('hg add [OPTION]... [FILE]...')),
2631 2631 "addremove":
2632 2632 (addremove,
2633 2633 [('I', 'include', [], _('include names matching the given patterns')),
2634 2634 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2635 2635 _('hg addremove [OPTION]... [FILE]...')),
2636 2636 "^annotate":
2637 2637 (annotate,
2638 2638 [('r', 'rev', '', _('annotate the specified revision')),
2639 2639 ('a', 'text', None, _('treat all files as text')),
2640 2640 ('u', 'user', None, _('list the author')),
2641 2641 ('d', 'date', None, _('list the date')),
2642 2642 ('n', 'number', None, _('list the revision number (default)')),
2643 2643 ('c', 'changeset', None, _('list the changeset')),
2644 2644 ('I', 'include', [], _('include names matching the given patterns')),
2645 2645 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2646 2646 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2647 2647 "bundle":
2648 2648 (bundle,
2649 [],
2649 [('f', 'force', None,
2650 _('run even when remote repository is unrelated'))],
2650 2651 _('hg bundle FILE DEST')),
2651 2652 "cat":
2652 2653 (cat,
2653 2654 [('o', 'output', '', _('print output to file with formatted name')),
2654 2655 ('r', 'rev', '', _('print the given revision')),
2655 2656 ('I', 'include', [], _('include names matching the given patterns')),
2656 2657 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2657 2658 _('hg cat [OPTION]... FILE...')),
2658 2659 "^clone":
2659 2660 (clone,
2660 2661 [('U', 'noupdate', None, _('do not update the new working directory')),
2661 2662 ('r', 'rev', [],
2662 2663 _('a changeset you would like to have after cloning')),
2663 2664 ('', 'pull', None, _('use pull protocol to copy metadata')),
2664 2665 ('e', 'ssh', '', _('specify ssh command to use')),
2665 2666 ('', 'remotecmd', '',
2666 2667 _('specify hg command to run on the remote side'))],
2667 2668 _('hg clone [OPTION]... SOURCE [DEST]')),
2668 2669 "^commit|ci":
2669 2670 (commit,
2670 2671 [('A', 'addremove', None, _('run addremove during commit')),
2671 2672 ('m', 'message', '', _('use <text> as commit message')),
2672 2673 ('l', 'logfile', '', _('read the commit message from <file>')),
2673 2674 ('d', 'date', '', _('record datecode as commit date')),
2674 2675 ('u', 'user', '', _('record user as commiter')),
2675 2676 ('I', 'include', [], _('include names matching the given patterns')),
2676 2677 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2677 2678 _('hg commit [OPTION]... [FILE]...')),
2678 2679 "copy|cp":
2679 2680 (copy,
2680 2681 [('A', 'after', None, _('record a copy that has already occurred')),
2681 2682 ('f', 'force', None,
2682 2683 _('forcibly copy over an existing managed file')),
2683 2684 ('I', 'include', [], _('include names matching the given patterns')),
2684 2685 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2685 2686 _('hg copy [OPTION]... [SOURCE]... DEST')),
2686 2687 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2687 2688 "debugcomplete": (debugcomplete, [], _('debugcomplete CMD')),
2688 2689 "debugrebuildstate":
2689 2690 (debugrebuildstate,
2690 2691 [('r', 'rev', '', _('revision to rebuild to'))],
2691 2692 _('debugrebuildstate [-r REV] [REV]')),
2692 2693 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2693 2694 "debugconfig": (debugconfig, [], _('debugconfig')),
2694 2695 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2695 2696 "debugstate": (debugstate, [], _('debugstate')),
2696 2697 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2697 2698 "debugindex": (debugindex, [], _('debugindex FILE')),
2698 2699 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2699 2700 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2700 2701 "debugwalk":
2701 2702 (debugwalk,
2702 2703 [('I', 'include', [], _('include names matching the given patterns')),
2703 2704 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2704 2705 _('debugwalk [OPTION]... [FILE]...')),
2705 2706 "^diff":
2706 2707 (diff,
2707 2708 [('r', 'rev', [], _('revision')),
2708 2709 ('a', 'text', None, _('treat all files as text')),
2709 2710 ('p', 'show-function', None,
2710 2711 _('show which function each change is in')),
2711 2712 ('w', 'ignore-all-space', None,
2712 2713 _('ignore white space when comparing lines')),
2713 2714 ('I', 'include', [], _('include names matching the given patterns')),
2714 2715 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2715 2716 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2716 2717 "^export":
2717 2718 (export,
2718 2719 [('o', 'output', '', _('print output to file with formatted name')),
2719 2720 ('a', 'text', None, _('treat all files as text')),
2720 2721 ('', 'switch-parent', None, _('diff against the second parent'))],
2721 2722 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2722 2723 "forget":
2723 2724 (forget,
2724 2725 [('I', 'include', [], _('include names matching the given patterns')),
2725 2726 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2726 2727 _('hg forget [OPTION]... FILE...')),
2727 2728 "grep":
2728 2729 (grep,
2729 2730 [('0', 'print0', None, _('end fields with NUL')),
2730 2731 ('', 'all', None, _('print all revisions that match')),
2731 2732 ('i', 'ignore-case', None, _('ignore case when matching')),
2732 2733 ('l', 'files-with-matches', None,
2733 2734 _('print only filenames and revs that match')),
2734 2735 ('n', 'line-number', None, _('print matching line numbers')),
2735 2736 ('r', 'rev', [], _('search in given revision range')),
2736 2737 ('u', 'user', None, _('print user who committed change')),
2737 2738 ('I', 'include', [], _('include names matching the given patterns')),
2738 2739 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2739 2740 _('hg grep [OPTION]... PATTERN [FILE]...')),
2740 2741 "heads":
2741 2742 (heads,
2742 2743 [('b', 'branches', None, _('show branches')),
2743 2744 ('', 'style', '', _('display using template map file')),
2744 2745 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2745 2746 ('', 'template', '', _('display with template'))],
2746 2747 _('hg heads [-b] [-r <rev>]')),
2747 2748 "help": (help_, [], _('hg help [COMMAND]')),
2748 2749 "identify|id": (identify, [], _('hg identify')),
2749 2750 "import|patch":
2750 2751 (import_,
2751 2752 [('p', 'strip', 1,
2752 2753 _('directory strip option for patch. This has the same\n') +
2753 2754 _('meaning as the corresponding patch option')),
2754 2755 ('b', 'base', '', _('base path')),
2755 2756 ('f', 'force', None,
2756 2757 _('skip check for outstanding uncommitted changes'))],
2757 2758 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2758 2759 "incoming|in": (incoming,
2759 2760 [('M', 'no-merges', None, _('do not show merges')),
2761 ('f', 'force', None,
2762 _('run even when remote repository is unrelated')),
2760 2763 ('', 'style', '', _('display using template map file')),
2761 2764 ('n', 'newest-first', None, _('show newest record first')),
2762 2765 ('', 'bundle', '', _('file to store the bundles into')),
2763 2766 ('p', 'patch', None, _('show patch')),
2764 2767 ('', 'template', '', _('display with template'))],
2765 2768 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2766 2769 "^init": (init, [], _('hg init [DEST]')),
2767 2770 "locate":
2768 2771 (locate,
2769 2772 [('r', 'rev', '', _('search the repository as it stood at rev')),
2770 2773 ('0', 'print0', None,
2771 2774 _('end filenames with NUL, for use with xargs')),
2772 2775 ('f', 'fullpath', None,
2773 2776 _('print complete paths from the filesystem root')),
2774 2777 ('I', 'include', [], _('include names matching the given patterns')),
2775 2778 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2776 2779 _('hg locate [OPTION]... [PATTERN]...')),
2777 2780 "^log|history":
2778 2781 (log,
2779 2782 [('b', 'branches', None, _('show branches')),
2780 2783 ('k', 'keyword', [], _('search for a keyword')),
2781 2784 ('l', 'limit', '', _('limit number of changes displayed')),
2782 2785 ('r', 'rev', [], _('show the specified revision or range')),
2783 2786 ('M', 'no-merges', None, _('do not show merges')),
2784 2787 ('', 'style', '', _('display using template map file')),
2785 2788 ('m', 'only-merges', None, _('show only merges')),
2786 2789 ('p', 'patch', None, _('show patch')),
2787 2790 ('', 'template', '', _('display with template')),
2788 2791 ('I', 'include', [], _('include names matching the given patterns')),
2789 2792 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2790 2793 _('hg log [OPTION]... [FILE]')),
2791 2794 "manifest": (manifest, [], _('hg manifest [REV]')),
2792 2795 "outgoing|out": (outgoing,
2793 2796 [('M', 'no-merges', None, _('do not show merges')),
2797 ('f', 'force', None,
2798 _('run even when remote repository is unrelated')),
2794 2799 ('p', 'patch', None, _('show patch')),
2795 2800 ('', 'style', '', _('display using template map file')),
2796 2801 ('n', 'newest-first', None, _('show newest record first')),
2797 2802 ('', 'template', '', _('display with template'))],
2798 2803 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2799 2804 "^parents":
2800 2805 (parents,
2801 2806 [('b', 'branches', None, _('show branches')),
2802 2807 ('', 'style', '', _('display using template map file')),
2803 2808 ('', 'template', '', _('display with template'))],
2804 2809 _('hg parents [-b] [REV]')),
2805 2810 "paths": (paths, [], _('hg paths [NAME]')),
2806 2811 "^pull":
2807 2812 (pull,
2808 2813 [('u', 'update', None,
2809 2814 _('update the working directory to tip after pull')),
2810 2815 ('e', 'ssh', '', _('specify ssh command to use')),
2816 ('f', 'force', None,
2817 _('run even when remote repository is unrelated')),
2811 2818 ('r', 'rev', [], _('a specific revision you would like to pull')),
2812 2819 ('', 'remotecmd', '',
2813 2820 _('specify hg command to run on the remote side'))],
2814 2821 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
2815 2822 "^push":
2816 2823 (push,
2817 2824 [('f', 'force', None, _('force push')),
2818 2825 ('e', 'ssh', '', _('specify ssh command to use')),
2819 2826 ('r', 'rev', [], _('a specific revision you would like to push')),
2820 2827 ('', 'remotecmd', '',
2821 2828 _('specify hg command to run on the remote side'))],
2822 2829 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
2823 2830 "debugrawcommit|rawcommit":
2824 2831 (rawcommit,
2825 2832 [('p', 'parent', [], _('parent')),
2826 2833 ('d', 'date', '', _('date code')),
2827 2834 ('u', 'user', '', _('user')),
2828 2835 ('F', 'files', '', _('file list')),
2829 2836 ('m', 'message', '', _('commit message')),
2830 2837 ('l', 'logfile', '', _('commit message file'))],
2831 2838 _('hg debugrawcommit [OPTION]... [FILE]...')),
2832 2839 "recover": (recover, [], _('hg recover')),
2833 2840 "^remove|rm":
2834 2841 (remove,
2835 2842 [('f', 'force', None, _('remove file even if modified')),
2836 2843 ('I', 'include', [], _('include names matching the given patterns')),
2837 2844 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2838 2845 _('hg remove [OPTION]... FILE...')),
2839 2846 "rename|mv":
2840 2847 (rename,
2841 2848 [('A', 'after', None, _('record a rename that has already occurred')),
2842 2849 ('f', 'force', None,
2843 2850 _('forcibly copy over an existing managed file')),
2844 2851 ('I', 'include', [], _('include names matching the given patterns')),
2845 2852 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 2853 _('hg rename [OPTION]... SOURCE... DEST')),
2847 2854 "^revert":
2848 2855 (revert,
2849 2856 [('r', 'rev', '', _('revision to revert to')),
2850 2857 ('I', 'include', [], _('include names matching the given patterns')),
2851 2858 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2852 2859 _('hg revert [-r REV] [NAME]...')),
2853 2860 "root": (root, [], _('hg root')),
2854 2861 "^serve":
2855 2862 (serve,
2856 2863 [('A', 'accesslog', '', _('name of access log file to write to')),
2857 2864 ('d', 'daemon', None, _('run server in background')),
2858 2865 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2859 2866 ('E', 'errorlog', '', _('name of error log file to write to')),
2860 2867 ('p', 'port', 0, _('port to use (default: 8000)')),
2861 2868 ('a', 'address', '', _('address to use')),
2862 2869 ('n', 'name', '',
2863 2870 _('name to show in web pages (default: working dir)')),
2864 2871 ('', 'pid-file', '', _('name of file to write process ID to')),
2865 2872 ('', 'stdio', None, _('for remote clients')),
2866 2873 ('t', 'templates', '', _('web templates to use')),
2867 2874 ('', 'style', '', _('template style to use')),
2868 2875 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2869 2876 _('hg serve [OPTION]...')),
2870 2877 "^status|st":
2871 2878 (status,
2872 2879 [('m', 'modified', None, _('show only modified files')),
2873 2880 ('a', 'added', None, _('show only added files')),
2874 2881 ('r', 'removed', None, _('show only removed files')),
2875 2882 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2876 2883 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2877 2884 ('n', 'no-status', None, _('hide status prefix')),
2878 2885 ('0', 'print0', None,
2879 2886 _('end filenames with NUL, for use with xargs')),
2880 2887 ('I', 'include', [], _('include names matching the given patterns')),
2881 2888 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2882 2889 _('hg status [OPTION]... [FILE]...')),
2883 2890 "tag":
2884 2891 (tag,
2885 2892 [('l', 'local', None, _('make the tag local')),
2886 2893 ('m', 'message', '', _('message for tag commit log entry')),
2887 2894 ('d', 'date', '', _('record datecode as commit date')),
2888 2895 ('u', 'user', '', _('record user as commiter')),
2889 2896 ('r', 'rev', '', _('revision to tag'))],
2890 2897 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2891 2898 "tags": (tags, [], _('hg tags')),
2892 2899 "tip":
2893 2900 (tip,
2894 2901 [('b', 'branches', None, _('show branches')),
2895 2902 ('', 'style', '', _('display using template map file')),
2896 2903 ('p', 'patch', None, _('show patch')),
2897 2904 ('', 'template', '', _('display with template'))],
2898 2905 _('hg tip [-b] [-p]')),
2899 2906 "unbundle":
2900 2907 (unbundle,
2901 2908 [('u', 'update', None,
2902 2909 _('update the working directory to tip after unbundle'))],
2903 2910 _('hg unbundle [-u] FILE')),
2904 2911 "undo": (undo, [], _('hg undo')),
2905 2912 "^update|up|checkout|co":
2906 2913 (update,
2907 2914 [('b', 'branch', '', _('checkout the head of a specific branch')),
2908 2915 ('', 'style', '', _('display using template map file')),
2909 2916 ('m', 'merge', None, _('allow merging of branches')),
2910 2917 ('C', 'clean', None, _('overwrite locally modified files')),
2911 2918 ('f', 'force', None, _('force a merge with outstanding changes')),
2912 2919 ('', 'template', '', _('display with template'))],
2913 2920 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2914 2921 "verify": (verify, [], _('hg verify')),
2915 2922 "version": (show_version, [], _('hg version')),
2916 2923 }
2917 2924
2918 2925 globalopts = [
2919 2926 ('R', 'repository', '',
2920 2927 _('repository root directory or symbolic path name')),
2921 2928 ('', 'cwd', '', _('change working directory')),
2922 2929 ('y', 'noninteractive', None,
2923 2930 _('do not prompt, assume \'yes\' for any required answers')),
2924 2931 ('q', 'quiet', None, _('suppress output')),
2925 2932 ('v', 'verbose', None, _('enable additional output')),
2926 2933 ('', 'debug', None, _('enable debugging output')),
2927 2934 ('', 'debugger', None, _('start debugger')),
2928 2935 ('', 'traceback', None, _('print traceback on exception')),
2929 2936 ('', 'time', None, _('time how long the command takes')),
2930 2937 ('', 'profile', None, _('print command execution profile')),
2931 2938 ('', 'version', None, _('output version information and exit')),
2932 2939 ('h', 'help', None, _('display help and exit')),
2933 2940 ]
2934 2941
2935 2942 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2936 2943 " debugindex debugindexdot")
2937 2944 optionalrepo = ("paths debugconfig")
2938 2945
2939 2946 def findpossible(cmd):
2940 2947 """
2941 2948 Return cmd -> (aliases, command table entry)
2942 2949 for each matching command
2943 2950 """
2944 2951 choice = {}
2945 2952 debugchoice = {}
2946 2953 for e in table.keys():
2947 2954 aliases = e.lstrip("^").split("|")
2948 2955 if cmd in aliases:
2949 2956 choice[cmd] = (aliases, table[e])
2950 2957 continue
2951 2958 for a in aliases:
2952 2959 if a.startswith(cmd):
2953 2960 if aliases[0].startswith("debug"):
2954 2961 debugchoice[a] = (aliases, table[e])
2955 2962 else:
2956 2963 choice[a] = (aliases, table[e])
2957 2964 break
2958 2965
2959 2966 if not choice and debugchoice:
2960 2967 choice = debugchoice
2961 2968
2962 2969 return choice
2963 2970
2964 2971 def find(cmd):
2965 2972 """Return (aliases, command table entry) for command string."""
2966 2973 choice = findpossible(cmd)
2967 2974
2968 2975 if choice.has_key(cmd):
2969 2976 return choice[cmd]
2970 2977
2971 2978 if len(choice) > 1:
2972 2979 clist = choice.keys()
2973 2980 clist.sort()
2974 2981 raise AmbiguousCommand(cmd, clist)
2975 2982
2976 2983 if choice:
2977 2984 return choice.values()[0]
2978 2985
2979 2986 raise UnknownCommand(cmd)
2980 2987
2981 2988 class SignalInterrupt(Exception):
2982 2989 """Exception raised on SIGTERM and SIGHUP."""
2983 2990
2984 2991 def catchterm(*args):
2985 2992 raise SignalInterrupt
2986 2993
2987 2994 def run():
2988 2995 sys.exit(dispatch(sys.argv[1:]))
2989 2996
2990 2997 class ParseError(Exception):
2991 2998 """Exception raised on errors in parsing the command line."""
2992 2999
2993 3000 def parse(ui, args):
2994 3001 options = {}
2995 3002 cmdoptions = {}
2996 3003
2997 3004 try:
2998 3005 args = fancyopts.fancyopts(args, globalopts, options)
2999 3006 except fancyopts.getopt.GetoptError, inst:
3000 3007 raise ParseError(None, inst)
3001 3008
3002 3009 if args:
3003 3010 cmd, args = args[0], args[1:]
3004 3011 aliases, i = find(cmd)
3005 3012 cmd = aliases[0]
3006 3013 defaults = ui.config("defaults", cmd)
3007 3014 if defaults:
3008 3015 args = defaults.split() + args
3009 3016 c = list(i[1])
3010 3017 else:
3011 3018 cmd = None
3012 3019 c = []
3013 3020
3014 3021 # combine global options into local
3015 3022 for o in globalopts:
3016 3023 c.append((o[0], o[1], options[o[1]], o[3]))
3017 3024
3018 3025 try:
3019 3026 args = fancyopts.fancyopts(args, c, cmdoptions)
3020 3027 except fancyopts.getopt.GetoptError, inst:
3021 3028 raise ParseError(cmd, inst)
3022 3029
3023 3030 # separate global options back out
3024 3031 for o in globalopts:
3025 3032 n = o[1]
3026 3033 options[n] = cmdoptions[n]
3027 3034 del cmdoptions[n]
3028 3035
3029 3036 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3030 3037
3031 3038 def dispatch(args):
3032 3039 signal.signal(signal.SIGTERM, catchterm)
3033 3040 try:
3034 3041 signal.signal(signal.SIGHUP, catchterm)
3035 3042 except AttributeError:
3036 3043 pass
3037 3044
3038 3045 try:
3039 3046 u = ui.ui()
3040 3047 except util.Abort, inst:
3041 3048 sys.stderr.write(_("abort: %s\n") % inst)
3042 3049 sys.exit(1)
3043 3050
3044 3051 external = []
3045 3052 for x in u.extensions():
3046 3053 def on_exception(exc, inst):
3047 3054 u.warn(_("*** failed to import extension %s\n") % x[1])
3048 3055 u.warn("%s\n" % inst)
3049 3056 if "--traceback" in sys.argv[1:]:
3050 3057 traceback.print_exc()
3051 3058 if x[1]:
3052 3059 try:
3053 3060 mod = imp.load_source(x[0], x[1])
3054 3061 except Exception, inst:
3055 3062 on_exception(Exception, inst)
3056 3063 continue
3057 3064 else:
3058 3065 def importh(name):
3059 3066 mod = __import__(name)
3060 3067 components = name.split('.')
3061 3068 for comp in components[1:]:
3062 3069 mod = getattr(mod, comp)
3063 3070 return mod
3064 3071 try:
3065 3072 try:
3066 3073 mod = importh("hgext." + x[0])
3067 3074 except ImportError:
3068 3075 mod = importh(x[0])
3069 3076 except Exception, inst:
3070 3077 on_exception(Exception, inst)
3071 3078 continue
3072 3079
3073 3080 external.append(mod)
3074 3081 for x in external:
3075 3082 cmdtable = getattr(x, 'cmdtable', {})
3076 3083 for t in cmdtable:
3077 3084 if t in table:
3078 3085 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3079 3086 table.update(cmdtable)
3080 3087
3081 3088 try:
3082 3089 cmd, func, args, options, cmdoptions = parse(u, args)
3083 3090 if options["time"]:
3084 3091 def get_times():
3085 3092 t = os.times()
3086 3093 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3087 3094 t = (t[0], t[1], t[2], t[3], time.clock())
3088 3095 return t
3089 3096 s = get_times()
3090 3097 def print_time():
3091 3098 t = get_times()
3092 3099 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3093 3100 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3094 3101 atexit.register(print_time)
3095 3102
3096 3103 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3097 3104 not options["noninteractive"])
3098 3105
3099 3106 # enter the debugger before command execution
3100 3107 if options['debugger']:
3101 3108 pdb.set_trace()
3102 3109
3103 3110 try:
3104 3111 if options['cwd']:
3105 3112 try:
3106 3113 os.chdir(options['cwd'])
3107 3114 except OSError, inst:
3108 3115 raise util.Abort('%s: %s' %
3109 3116 (options['cwd'], inst.strerror))
3110 3117
3111 3118 path = u.expandpath(options["repository"]) or ""
3112 3119 repo = path and hg.repository(u, path=path) or None
3113 3120
3114 3121 if options['help']:
3115 3122 help_(u, cmd, options['version'])
3116 3123 sys.exit(0)
3117 3124 elif options['version']:
3118 3125 show_version(u)
3119 3126 sys.exit(0)
3120 3127 elif not cmd:
3121 3128 help_(u, 'shortlist')
3122 3129 sys.exit(0)
3123 3130
3124 3131 if cmd not in norepo.split():
3125 3132 try:
3126 3133 if not repo:
3127 3134 repo = hg.repository(u, path=path)
3128 3135 u = repo.ui
3129 3136 for x in external:
3130 3137 if hasattr(x, 'reposetup'):
3131 3138 x.reposetup(u, repo)
3132 3139 except hg.RepoError:
3133 3140 if cmd not in optionalrepo.split():
3134 3141 raise
3135 3142 d = lambda: func(u, repo, *args, **cmdoptions)
3136 3143 else:
3137 3144 d = lambda: func(u, *args, **cmdoptions)
3138 3145
3139 3146 try:
3140 3147 if options['profile']:
3141 3148 import hotshot, hotshot.stats
3142 3149 prof = hotshot.Profile("hg.prof")
3143 3150 try:
3144 3151 try:
3145 3152 return prof.runcall(d)
3146 3153 except:
3147 3154 try:
3148 3155 u.warn(_('exception raised - generating '
3149 3156 'profile anyway\n'))
3150 3157 except:
3151 3158 pass
3152 3159 raise
3153 3160 finally:
3154 3161 prof.close()
3155 3162 stats = hotshot.stats.load("hg.prof")
3156 3163 stats.strip_dirs()
3157 3164 stats.sort_stats('time', 'calls')
3158 3165 stats.print_stats(40)
3159 3166 else:
3160 3167 return d()
3161 3168 finally:
3162 3169 u.flush()
3163 3170 except:
3164 3171 # enter the debugger when we hit an exception
3165 3172 if options['debugger']:
3166 3173 pdb.post_mortem(sys.exc_info()[2])
3167 3174 if options['traceback']:
3168 3175 traceback.print_exc()
3169 3176 raise
3170 3177 except ParseError, inst:
3171 3178 if inst.args[0]:
3172 3179 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3173 3180 help_(u, inst.args[0])
3174 3181 else:
3175 3182 u.warn(_("hg: %s\n") % inst.args[1])
3176 3183 help_(u, 'shortlist')
3177 3184 sys.exit(-1)
3178 3185 except AmbiguousCommand, inst:
3179 3186 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3180 3187 (inst.args[0], " ".join(inst.args[1])))
3181 3188 sys.exit(1)
3182 3189 except UnknownCommand, inst:
3183 3190 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3184 3191 help_(u, 'shortlist')
3185 3192 sys.exit(1)
3186 3193 except hg.RepoError, inst:
3187 3194 u.warn(_("abort: "), inst, "!\n")
3188 3195 except revlog.RevlogError, inst:
3189 3196 u.warn(_("abort: "), inst, "!\n")
3190 3197 except SignalInterrupt:
3191 3198 u.warn(_("killed!\n"))
3192 3199 except KeyboardInterrupt:
3193 3200 try:
3194 3201 u.warn(_("interrupted!\n"))
3195 3202 except IOError, inst:
3196 3203 if inst.errno == errno.EPIPE:
3197 3204 if u.debugflag:
3198 3205 u.warn(_("\nbroken pipe\n"))
3199 3206 else:
3200 3207 raise
3201 3208 except IOError, inst:
3202 3209 if hasattr(inst, "code"):
3203 3210 u.warn(_("abort: %s\n") % inst)
3204 3211 elif hasattr(inst, "reason"):
3205 3212 u.warn(_("abort: error: %s\n") % inst.reason[1])
3206 3213 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3207 3214 if u.debugflag:
3208 3215 u.warn(_("broken pipe\n"))
3209 3216 elif getattr(inst, "strerror", None):
3210 3217 if getattr(inst, "filename", None):
3211 3218 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3212 3219 else:
3213 3220 u.warn(_("abort: %s\n") % inst.strerror)
3214 3221 else:
3215 3222 raise
3216 3223 except OSError, inst:
3217 3224 if hasattr(inst, "filename"):
3218 3225 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3219 3226 else:
3220 3227 u.warn(_("abort: %s\n") % inst.strerror)
3221 3228 except util.Abort, inst:
3222 3229 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3223 3230 sys.exit(1)
3224 3231 except TypeError, inst:
3225 3232 # was this an argument error?
3226 3233 tb = traceback.extract_tb(sys.exc_info()[2])
3227 3234 if len(tb) > 2: # no
3228 3235 raise
3229 3236 u.debug(inst, "\n")
3230 3237 u.warn(_("%s: invalid arguments\n") % cmd)
3231 3238 help_(u, cmd)
3232 3239 except SystemExit:
3233 3240 # don't catch this in the catch-all below
3234 3241 raise
3235 3242 except:
3236 3243 u.warn(_("** unknown exception encountered, details follow\n"))
3237 3244 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3238 3245 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3239 3246 % version.get_version())
3240 3247 raise
3241 3248
3242 3249 sys.exit(-1)
@@ -1,1907 +1,1910
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 14
15 15 class localrepository(object):
16 16 def __del__(self):
17 17 self.transhandle = None
18 18 def __init__(self, parentui, path=None, create=0):
19 19 if not path:
20 20 p = os.getcwd()
21 21 while not os.path.isdir(os.path.join(p, ".hg")):
22 22 oldp = p
23 23 p = os.path.dirname(p)
24 24 if p == oldp:
25 25 raise repo.RepoError(_("no repo found"))
26 26 path = p
27 27 self.path = os.path.join(path, ".hg")
28 28
29 29 if not create and not os.path.isdir(self.path):
30 30 raise repo.RepoError(_("repository %s not found") % path)
31 31
32 32 self.root = os.path.abspath(path)
33 33 self.ui = ui.ui(parentui=parentui)
34 34 self.opener = util.opener(self.path)
35 35 self.wopener = util.opener(self.root)
36 36 self.manifest = manifest.manifest(self.opener)
37 37 self.changelog = changelog.changelog(self.opener)
38 38 self.tagscache = None
39 39 self.nodetagscache = None
40 40 self.encodepats = None
41 41 self.decodepats = None
42 42 self.transhandle = None
43 43
44 44 if create:
45 45 os.mkdir(self.path)
46 46 os.mkdir(self.join("data"))
47 47
48 48 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
49 49 try:
50 50 self.ui.readconfig(self.join("hgrc"), self.root)
51 51 except IOError:
52 52 pass
53 53
54 54 def hook(self, name, throw=False, **args):
55 55 def runhook(name, cmd):
56 56 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 57 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
58 58 r = util.system(cmd, environ=env, cwd=self.root)
59 59 if r:
60 60 desc, r = util.explain_exit(r)
61 61 if throw:
62 62 raise util.Abort(_('%s hook %s') % (name, desc))
63 63 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
64 64 return False
65 65 return True
66 66
67 67 r = True
68 68 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
69 69 if hname.split(".", 1)[0] == name and cmd]
70 70 hooks.sort()
71 71 for hname, cmd in hooks:
72 72 r = runhook(hname, cmd) and r
73 73 return r
74 74
75 75 def tags(self):
76 76 '''return a mapping of tag to node'''
77 77 if not self.tagscache:
78 78 self.tagscache = {}
79 79 def addtag(self, k, n):
80 80 try:
81 81 bin_n = bin(n)
82 82 except TypeError:
83 83 bin_n = ''
84 84 self.tagscache[k.strip()] = bin_n
85 85
86 86 try:
87 87 # read each head of the tags file, ending with the tip
88 88 # and add each tag found to the map, with "newer" ones
89 89 # taking precedence
90 90 fl = self.file(".hgtags")
91 91 h = fl.heads()
92 92 h.reverse()
93 93 for r in h:
94 94 for l in fl.read(r).splitlines():
95 95 if l:
96 96 n, k = l.split(" ", 1)
97 97 addtag(self, k, n)
98 98 except KeyError:
99 99 pass
100 100
101 101 try:
102 102 f = self.opener("localtags")
103 103 for l in f:
104 104 n, k = l.split(" ", 1)
105 105 addtag(self, k, n)
106 106 except IOError:
107 107 pass
108 108
109 109 self.tagscache['tip'] = self.changelog.tip()
110 110
111 111 return self.tagscache
112 112
113 113 def tagslist(self):
114 114 '''return a list of tags ordered by revision'''
115 115 l = []
116 116 for t, n in self.tags().items():
117 117 try:
118 118 r = self.changelog.rev(n)
119 119 except:
120 120 r = -2 # sort to the beginning of the list if unknown
121 121 l.append((r, t, n))
122 122 l.sort()
123 123 return [(t, n) for r, t, n in l]
124 124
125 125 def nodetags(self, node):
126 126 '''return the tags associated with a node'''
127 127 if not self.nodetagscache:
128 128 self.nodetagscache = {}
129 129 for t, n in self.tags().items():
130 130 self.nodetagscache.setdefault(n, []).append(t)
131 131 return self.nodetagscache.get(node, [])
132 132
133 133 def lookup(self, key):
134 134 try:
135 135 return self.tags()[key]
136 136 except KeyError:
137 137 try:
138 138 return self.changelog.lookup(key)
139 139 except:
140 140 raise repo.RepoError(_("unknown revision '%s'") % key)
141 141
142 142 def dev(self):
143 143 return os.stat(self.path).st_dev
144 144
145 145 def local(self):
146 146 return True
147 147
148 148 def join(self, f):
149 149 return os.path.join(self.path, f)
150 150
151 151 def wjoin(self, f):
152 152 return os.path.join(self.root, f)
153 153
154 154 def file(self, f):
155 155 if f[0] == '/':
156 156 f = f[1:]
157 157 return filelog.filelog(self.opener, f)
158 158
159 159 def getcwd(self):
160 160 return self.dirstate.getcwd()
161 161
162 162 def wfile(self, f, mode='r'):
163 163 return self.wopener(f, mode)
164 164
165 165 def wread(self, filename):
166 166 if self.encodepats == None:
167 167 l = []
168 168 for pat, cmd in self.ui.configitems("encode"):
169 169 mf = util.matcher(self.root, "", [pat], [], [])[1]
170 170 l.append((mf, cmd))
171 171 self.encodepats = l
172 172
173 173 data = self.wopener(filename, 'r').read()
174 174
175 175 for mf, cmd in self.encodepats:
176 176 if mf(filename):
177 177 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
178 178 data = util.filter(data, cmd)
179 179 break
180 180
181 181 return data
182 182
183 183 def wwrite(self, filename, data, fd=None):
184 184 if self.decodepats == None:
185 185 l = []
186 186 for pat, cmd in self.ui.configitems("decode"):
187 187 mf = util.matcher(self.root, "", [pat], [], [])[1]
188 188 l.append((mf, cmd))
189 189 self.decodepats = l
190 190
191 191 for mf, cmd in self.decodepats:
192 192 if mf(filename):
193 193 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 194 data = util.filter(data, cmd)
195 195 break
196 196
197 197 if fd:
198 198 return fd.write(data)
199 199 return self.wopener(filename, 'w').write(data)
200 200
201 201 def transaction(self):
202 202 tr = self.transhandle
203 203 if tr != None and tr.running():
204 204 return tr.nest()
205 205
206 206 # save dirstate for undo
207 207 try:
208 208 ds = self.opener("dirstate").read()
209 209 except IOError:
210 210 ds = ""
211 211 self.opener("journal.dirstate", "w").write(ds)
212 212
213 213 tr = transaction.transaction(self.ui.warn, self.opener,
214 214 self.join("journal"),
215 215 aftertrans(self.path))
216 216 self.transhandle = tr
217 217 return tr
218 218
219 219 def recover(self):
220 220 l = self.lock()
221 221 if os.path.exists(self.join("journal")):
222 222 self.ui.status(_("rolling back interrupted transaction\n"))
223 223 transaction.rollback(self.opener, self.join("journal"))
224 224 self.reload()
225 225 return True
226 226 else:
227 227 self.ui.warn(_("no interrupted transaction available\n"))
228 228 return False
229 229
230 230 def undo(self, wlock=None):
231 231 if not wlock:
232 232 wlock = self.wlock()
233 233 l = self.lock()
234 234 if os.path.exists(self.join("undo")):
235 235 self.ui.status(_("rolling back last transaction\n"))
236 236 transaction.rollback(self.opener, self.join("undo"))
237 237 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
238 238 self.reload()
239 239 self.wreload()
240 240 else:
241 241 self.ui.warn(_("no undo information available\n"))
242 242
243 243 def wreload(self):
244 244 self.dirstate.read()
245 245
246 246 def reload(self):
247 247 self.changelog.load()
248 248 self.manifest.load()
249 249 self.tagscache = None
250 250 self.nodetagscache = None
251 251
252 252 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
253 253 try:
254 254 l = lock.lock(self.join(lockname), 0, releasefn)
255 255 except lock.LockHeld, inst:
256 256 if not wait:
257 257 raise inst
258 258 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
259 259 try:
260 260 # default to 600 seconds timeout
261 261 l = lock.lock(self.join(lockname),
262 262 int(self.ui.config("ui", "timeout") or 600),
263 263 releasefn)
264 264 except lock.LockHeld, inst:
265 265 raise util.Abort(_("timeout while waiting for "
266 266 "lock held by %s") % inst.args[0])
267 267 if acquirefn:
268 268 acquirefn()
269 269 return l
270 270
271 271 def lock(self, wait=1):
272 272 return self.do_lock("lock", wait, acquirefn=self.reload)
273 273
274 274 def wlock(self, wait=1):
275 275 return self.do_lock("wlock", wait,
276 276 self.dirstate.write,
277 277 self.wreload)
278 278
279 279 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
280 280 "determine whether a new filenode is needed"
281 281 fp1 = manifest1.get(filename, nullid)
282 282 fp2 = manifest2.get(filename, nullid)
283 283
284 284 if fp2 != nullid:
285 285 # is one parent an ancestor of the other?
286 286 fpa = filelog.ancestor(fp1, fp2)
287 287 if fpa == fp1:
288 288 fp1, fp2 = fp2, nullid
289 289 elif fpa == fp2:
290 290 fp2 = nullid
291 291
292 292 # is the file unmodified from the parent? report existing entry
293 293 if fp2 == nullid and text == filelog.read(fp1):
294 294 return (fp1, None, None)
295 295
296 296 return (None, fp1, fp2)
297 297
298 298 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
299 299 orig_parent = self.dirstate.parents()[0] or nullid
300 300 p1 = p1 or self.dirstate.parents()[0] or nullid
301 301 p2 = p2 or self.dirstate.parents()[1] or nullid
302 302 c1 = self.changelog.read(p1)
303 303 c2 = self.changelog.read(p2)
304 304 m1 = self.manifest.read(c1[0])
305 305 mf1 = self.manifest.readflags(c1[0])
306 306 m2 = self.manifest.read(c2[0])
307 307 changed = []
308 308
309 309 if orig_parent == p1:
310 310 update_dirstate = 1
311 311 else:
312 312 update_dirstate = 0
313 313
314 314 if not wlock:
315 315 wlock = self.wlock()
316 316 l = self.lock()
317 317 tr = self.transaction()
318 318 mm = m1.copy()
319 319 mfm = mf1.copy()
320 320 linkrev = self.changelog.count()
321 321 for f in files:
322 322 try:
323 323 t = self.wread(f)
324 324 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
325 325 r = self.file(f)
326 326 mfm[f] = tm
327 327
328 328 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
329 329 if entry:
330 330 mm[f] = entry
331 331 continue
332 332
333 333 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
334 334 changed.append(f)
335 335 if update_dirstate:
336 336 self.dirstate.update([f], "n")
337 337 except IOError:
338 338 try:
339 339 del mm[f]
340 340 del mfm[f]
341 341 if update_dirstate:
342 342 self.dirstate.forget([f])
343 343 except:
344 344 # deleted from p2?
345 345 pass
346 346
347 347 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
348 348 user = user or self.ui.username()
349 349 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
350 350 tr.close()
351 351 if update_dirstate:
352 352 self.dirstate.setparents(n, nullid)
353 353
354 354 def commit(self, files=None, text="", user=None, date=None,
355 355 match=util.always, force=False, lock=None, wlock=None):
356 356 commit = []
357 357 remove = []
358 358 changed = []
359 359
360 360 if files:
361 361 for f in files:
362 362 s = self.dirstate.state(f)
363 363 if s in 'nmai':
364 364 commit.append(f)
365 365 elif s == 'r':
366 366 remove.append(f)
367 367 else:
368 368 self.ui.warn(_("%s not tracked!\n") % f)
369 369 else:
370 370 modified, added, removed, deleted, unknown = self.changes(match=match)
371 371 commit = modified + added
372 372 remove = removed
373 373
374 374 p1, p2 = self.dirstate.parents()
375 375 c1 = self.changelog.read(p1)
376 376 c2 = self.changelog.read(p2)
377 377 m1 = self.manifest.read(c1[0])
378 378 mf1 = self.manifest.readflags(c1[0])
379 379 m2 = self.manifest.read(c2[0])
380 380
381 381 if not commit and not remove and not force and p2 == nullid:
382 382 self.ui.status(_("nothing changed\n"))
383 383 return None
384 384
385 385 xp1 = hex(p1)
386 386 if p2 == nullid: xp2 = ''
387 387 else: xp2 = hex(p2)
388 388
389 389 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
390 390
391 391 if not wlock:
392 392 wlock = self.wlock()
393 393 if not lock:
394 394 lock = self.lock()
395 395 tr = self.transaction()
396 396
397 397 # check in files
398 398 new = {}
399 399 linkrev = self.changelog.count()
400 400 commit.sort()
401 401 for f in commit:
402 402 self.ui.note(f + "\n")
403 403 try:
404 404 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
405 405 t = self.wread(f)
406 406 except IOError:
407 407 self.ui.warn(_("trouble committing %s!\n") % f)
408 408 raise
409 409
410 410 r = self.file(f)
411 411
412 412 meta = {}
413 413 cp = self.dirstate.copied(f)
414 414 if cp:
415 415 meta["copy"] = cp
416 416 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
417 417 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
418 418 fp1, fp2 = nullid, nullid
419 419 else:
420 420 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
421 421 if entry:
422 422 new[f] = entry
423 423 continue
424 424
425 425 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
426 426 # remember what we've added so that we can later calculate
427 427 # the files to pull from a set of changesets
428 428 changed.append(f)
429 429
430 430 # update manifest
431 431 m1 = m1.copy()
432 432 m1.update(new)
433 433 for f in remove:
434 434 if f in m1:
435 435 del m1[f]
436 436 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
437 437 (new, remove))
438 438
439 439 # add changeset
440 440 new = new.keys()
441 441 new.sort()
442 442
443 443 if not text:
444 444 edittext = [""]
445 445 if p2 != nullid:
446 446 edittext.append("HG: branch merge")
447 447 edittext.extend(["HG: changed %s" % f for f in changed])
448 448 edittext.extend(["HG: removed %s" % f for f in remove])
449 449 if not changed and not remove:
450 450 edittext.append("HG: no files changed")
451 451 edittext.append("")
452 452 # run editor in the repository root
453 453 olddir = os.getcwd()
454 454 os.chdir(self.root)
455 455 edittext = self.ui.edit("\n".join(edittext))
456 456 os.chdir(olddir)
457 457 if not edittext.rstrip():
458 458 return None
459 459 text = edittext
460 460
461 461 user = user or self.ui.username()
462 462 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
463 463 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
464 464 parent2=xp2)
465 465 tr.close()
466 466
467 467 self.dirstate.setparents(n)
468 468 self.dirstate.update(new, "n")
469 469 self.dirstate.forget(remove)
470 470
471 471 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
472 472 return n
473 473
474 474 def walk(self, node=None, files=[], match=util.always):
475 475 if node:
476 476 fdict = dict.fromkeys(files)
477 477 for fn in self.manifest.read(self.changelog.read(node)[0]):
478 478 fdict.pop(fn, None)
479 479 if match(fn):
480 480 yield 'm', fn
481 481 for fn in fdict:
482 482 self.ui.warn(_('%s: No such file in rev %s\n') % (
483 483 util.pathto(self.getcwd(), fn), short(node)))
484 484 else:
485 485 for src, fn in self.dirstate.walk(files, match):
486 486 yield src, fn
487 487
488 488 def changes(self, node1=None, node2=None, files=[], match=util.always,
489 489 wlock=None):
490 490 """return changes between two nodes or node and working directory
491 491
492 492 If node1 is None, use the first dirstate parent instead.
493 493 If node2 is None, compare node1 with working directory.
494 494 """
495 495
496 496 def fcmp(fn, mf):
497 497 t1 = self.wread(fn)
498 498 t2 = self.file(fn).read(mf.get(fn, nullid))
499 499 return cmp(t1, t2)
500 500
501 501 def mfmatches(node):
502 502 change = self.changelog.read(node)
503 503 mf = dict(self.manifest.read(change[0]))
504 504 for fn in mf.keys():
505 505 if not match(fn):
506 506 del mf[fn]
507 507 return mf
508 508
509 509 if node1:
510 510 # read the manifest from node1 before the manifest from node2,
511 511 # so that we'll hit the manifest cache if we're going through
512 512 # all the revisions in parent->child order.
513 513 mf1 = mfmatches(node1)
514 514
515 515 # are we comparing the working directory?
516 516 if not node2:
517 517 if not wlock:
518 518 try:
519 519 wlock = self.wlock(wait=0)
520 520 except lock.LockException:
521 521 wlock = None
522 522 lookup, modified, added, removed, deleted, unknown = (
523 523 self.dirstate.changes(files, match))
524 524
525 525 # are we comparing working dir against its parent?
526 526 if not node1:
527 527 if lookup:
528 528 # do a full compare of any files that might have changed
529 529 mf2 = mfmatches(self.dirstate.parents()[0])
530 530 for f in lookup:
531 531 if fcmp(f, mf2):
532 532 modified.append(f)
533 533 elif wlock is not None:
534 534 self.dirstate.update([f], "n")
535 535 else:
536 536 # we are comparing working dir against non-parent
537 537 # generate a pseudo-manifest for the working dir
538 538 mf2 = mfmatches(self.dirstate.parents()[0])
539 539 for f in lookup + modified + added:
540 540 mf2[f] = ""
541 541 for f in removed:
542 542 if f in mf2:
543 543 del mf2[f]
544 544 else:
545 545 # we are comparing two revisions
546 546 deleted, unknown = [], []
547 547 mf2 = mfmatches(node2)
548 548
549 549 if node1:
550 550 # flush lists from dirstate before comparing manifests
551 551 modified, added = [], []
552 552
553 553 for fn in mf2:
554 554 if mf1.has_key(fn):
555 555 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
556 556 modified.append(fn)
557 557 del mf1[fn]
558 558 else:
559 559 added.append(fn)
560 560
561 561 removed = mf1.keys()
562 562
563 563 # sort and return results:
564 564 for l in modified, added, removed, deleted, unknown:
565 565 l.sort()
566 566 return (modified, added, removed, deleted, unknown)
567 567
568 568 def add(self, list, wlock=None):
569 569 if not wlock:
570 570 wlock = self.wlock()
571 571 for f in list:
572 572 p = self.wjoin(f)
573 573 if not os.path.exists(p):
574 574 self.ui.warn(_("%s does not exist!\n") % f)
575 575 elif not os.path.isfile(p):
576 576 self.ui.warn(_("%s not added: only files supported currently\n")
577 577 % f)
578 578 elif self.dirstate.state(f) in 'an':
579 579 self.ui.warn(_("%s already tracked!\n") % f)
580 580 else:
581 581 self.dirstate.update([f], "a")
582 582
583 583 def forget(self, list, wlock=None):
584 584 if not wlock:
585 585 wlock = self.wlock()
586 586 for f in list:
587 587 if self.dirstate.state(f) not in 'ai':
588 588 self.ui.warn(_("%s not added!\n") % f)
589 589 else:
590 590 self.dirstate.forget([f])
591 591
592 592 def remove(self, list, unlink=False, wlock=None):
593 593 if unlink:
594 594 for f in list:
595 595 try:
596 596 util.unlink(self.wjoin(f))
597 597 except OSError, inst:
598 598 if inst.errno != errno.ENOENT:
599 599 raise
600 600 if not wlock:
601 601 wlock = self.wlock()
602 602 for f in list:
603 603 p = self.wjoin(f)
604 604 if os.path.exists(p):
605 605 self.ui.warn(_("%s still exists!\n") % f)
606 606 elif self.dirstate.state(f) == 'a':
607 607 self.dirstate.forget([f])
608 608 elif f not in self.dirstate:
609 609 self.ui.warn(_("%s not tracked!\n") % f)
610 610 else:
611 611 self.dirstate.update([f], "r")
612 612
613 613 def undelete(self, list, wlock=None):
614 614 p = self.dirstate.parents()[0]
615 615 mn = self.changelog.read(p)[0]
616 616 mf = self.manifest.readflags(mn)
617 617 m = self.manifest.read(mn)
618 618 if not wlock:
619 619 wlock = self.wlock()
620 620 for f in list:
621 621 if self.dirstate.state(f) not in "r":
622 622 self.ui.warn("%s not removed!\n" % f)
623 623 else:
624 624 t = self.file(f).read(m[f])
625 625 self.wwrite(f, t)
626 626 util.set_exec(self.wjoin(f), mf[f])
627 627 self.dirstate.update([f], "n")
628 628
629 629 def copy(self, source, dest, wlock=None):
630 630 p = self.wjoin(dest)
631 631 if not os.path.exists(p):
632 632 self.ui.warn(_("%s does not exist!\n") % dest)
633 633 elif not os.path.isfile(p):
634 634 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
635 635 else:
636 636 if not wlock:
637 637 wlock = self.wlock()
638 638 if self.dirstate.state(dest) == '?':
639 639 self.dirstate.update([dest], "a")
640 640 self.dirstate.copy(source, dest)
641 641
642 642 def heads(self, start=None):
643 643 heads = self.changelog.heads(start)
644 644 # sort the output in rev descending order
645 645 heads = [(-self.changelog.rev(h), h) for h in heads]
646 646 heads.sort()
647 647 return [n for (r, n) in heads]
648 648
649 649 # branchlookup returns a dict giving a list of branches for
650 650 # each head. A branch is defined as the tag of a node or
651 651 # the branch of the node's parents. If a node has multiple
652 652 # branch tags, tags are eliminated if they are visible from other
653 653 # branch tags.
654 654 #
655 655 # So, for this graph: a->b->c->d->e
656 656 # \ /
657 657 # aa -----/
658 658 # a has tag 2.6.12
659 659 # d has tag 2.6.13
660 660 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
661 661 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
662 662 # from the list.
663 663 #
664 664 # It is possible that more than one head will have the same branch tag.
665 665 # callers need to check the result for multiple heads under the same
666 666 # branch tag if that is a problem for them (ie checkout of a specific
667 667 # branch).
668 668 #
669 669 # passing in a specific branch will limit the depth of the search
670 670 # through the parents. It won't limit the branches returned in the
671 671 # result though.
672 672 def branchlookup(self, heads=None, branch=None):
673 673 if not heads:
674 674 heads = self.heads()
675 675 headt = [ h for h in heads ]
676 676 chlog = self.changelog
677 677 branches = {}
678 678 merges = []
679 679 seenmerge = {}
680 680
681 681 # traverse the tree once for each head, recording in the branches
682 682 # dict which tags are visible from this head. The branches
683 683 # dict also records which tags are visible from each tag
684 684 # while we traverse.
685 685 while headt or merges:
686 686 if merges:
687 687 n, found = merges.pop()
688 688 visit = [n]
689 689 else:
690 690 h = headt.pop()
691 691 visit = [h]
692 692 found = [h]
693 693 seen = {}
694 694 while visit:
695 695 n = visit.pop()
696 696 if n in seen:
697 697 continue
698 698 pp = chlog.parents(n)
699 699 tags = self.nodetags(n)
700 700 if tags:
701 701 for x in tags:
702 702 if x == 'tip':
703 703 continue
704 704 for f in found:
705 705 branches.setdefault(f, {})[n] = 1
706 706 branches.setdefault(n, {})[n] = 1
707 707 break
708 708 if n not in found:
709 709 found.append(n)
710 710 if branch in tags:
711 711 continue
712 712 seen[n] = 1
713 713 if pp[1] != nullid and n not in seenmerge:
714 714 merges.append((pp[1], [x for x in found]))
715 715 seenmerge[n] = 1
716 716 if pp[0] != nullid:
717 717 visit.append(pp[0])
718 718 # traverse the branches dict, eliminating branch tags from each
719 719 # head that are visible from another branch tag for that head.
720 720 out = {}
721 721 viscache = {}
722 722 for h in heads:
723 723 def visible(node):
724 724 if node in viscache:
725 725 return viscache[node]
726 726 ret = {}
727 727 visit = [node]
728 728 while visit:
729 729 x = visit.pop()
730 730 if x in viscache:
731 731 ret.update(viscache[x])
732 732 elif x not in ret:
733 733 ret[x] = 1
734 734 if x in branches:
735 735 visit[len(visit):] = branches[x].keys()
736 736 viscache[node] = ret
737 737 return ret
738 738 if h not in branches:
739 739 continue
740 740 # O(n^2), but somewhat limited. This only searches the
741 741 # tags visible from a specific head, not all the tags in the
742 742 # whole repo.
743 743 for b in branches[h]:
744 744 vis = False
745 745 for bb in branches[h].keys():
746 746 if b != bb:
747 747 if b in visible(bb):
748 748 vis = True
749 749 break
750 750 if not vis:
751 751 l = out.setdefault(h, [])
752 752 l[len(l):] = self.nodetags(b)
753 753 return out
754 754
755 755 def branches(self, nodes):
756 756 if not nodes:
757 757 nodes = [self.changelog.tip()]
758 758 b = []
759 759 for n in nodes:
760 760 t = n
761 761 while n:
762 762 p = self.changelog.parents(n)
763 763 if p[1] != nullid or p[0] == nullid:
764 764 b.append((t, n, p[0], p[1]))
765 765 break
766 766 n = p[0]
767 767 return b
768 768
769 769 def between(self, pairs):
770 770 r = []
771 771
772 772 for top, bottom in pairs:
773 773 n, l, i = top, [], 0
774 774 f = 1
775 775
776 776 while n != bottom:
777 777 p = self.changelog.parents(n)[0]
778 778 if i == f:
779 779 l.append(n)
780 780 f = f * 2
781 781 n = p
782 782 i += 1
783 783
784 784 r.append(l)
785 785
786 786 return r
787 787
788 def findincoming(self, remote, base=None, heads=None):
788 def findincoming(self, remote, base=None, heads=None, force=False):
789 789 m = self.changelog.nodemap
790 790 search = []
791 791 fetch = {}
792 792 seen = {}
793 793 seenbranch = {}
794 794 if base == None:
795 795 base = {}
796 796
797 797 # assume we're closer to the tip than the root
798 798 # and start by examining the heads
799 799 self.ui.status(_("searching for changes\n"))
800 800
801 801 if not heads:
802 802 heads = remote.heads()
803 803
804 804 unknown = []
805 805 for h in heads:
806 806 if h not in m:
807 807 unknown.append(h)
808 808 else:
809 809 base[h] = 1
810 810
811 811 if not unknown:
812 812 return []
813 813
814 814 rep = {}
815 815 reqcnt = 0
816 816
817 817 # search through remote branches
818 818 # a 'branch' here is a linear segment of history, with four parts:
819 819 # head, root, first parent, second parent
820 820 # (a branch always has two parents (or none) by definition)
821 821 unknown = remote.branches(unknown)
822 822 while unknown:
823 823 r = []
824 824 while unknown:
825 825 n = unknown.pop(0)
826 826 if n[0] in seen:
827 827 continue
828 828
829 829 self.ui.debug(_("examining %s:%s\n")
830 830 % (short(n[0]), short(n[1])))
831 831 if n[0] == nullid:
832 832 break
833 833 if n in seenbranch:
834 834 self.ui.debug(_("branch already found\n"))
835 835 continue
836 836 if n[1] and n[1] in m: # do we know the base?
837 837 self.ui.debug(_("found incomplete branch %s:%s\n")
838 838 % (short(n[0]), short(n[1])))
839 839 search.append(n) # schedule branch range for scanning
840 840 seenbranch[n] = 1
841 841 else:
842 842 if n[1] not in seen and n[1] not in fetch:
843 843 if n[2] in m and n[3] in m:
844 844 self.ui.debug(_("found new changeset %s\n") %
845 845 short(n[1]))
846 846 fetch[n[1]] = 1 # earliest unknown
847 847 base[n[2]] = 1 # latest known
848 848 continue
849 849
850 850 for a in n[2:4]:
851 851 if a not in rep:
852 852 r.append(a)
853 853 rep[a] = 1
854 854
855 855 seen[n[0]] = 1
856 856
857 857 if r:
858 858 reqcnt += 1
859 859 self.ui.debug(_("request %d: %s\n") %
860 860 (reqcnt, " ".join(map(short, r))))
861 861 for p in range(0, len(r), 10):
862 862 for b in remote.branches(r[p:p+10]):
863 863 self.ui.debug(_("received %s:%s\n") %
864 864 (short(b[0]), short(b[1])))
865 865 if b[0] in m:
866 866 self.ui.debug(_("found base node %s\n")
867 867 % short(b[0]))
868 868 base[b[0]] = 1
869 869 elif b[0] not in seen:
870 870 unknown.append(b)
871 871
872 872 # do binary search on the branches we found
873 873 while search:
874 874 n = search.pop(0)
875 875 reqcnt += 1
876 876 l = remote.between([(n[0], n[1])])[0]
877 877 l.append(n[1])
878 878 p = n[0]
879 879 f = 1
880 880 for i in l:
881 881 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
882 882 if i in m:
883 883 if f <= 2:
884 884 self.ui.debug(_("found new branch changeset %s\n") %
885 885 short(p))
886 886 fetch[p] = 1
887 887 base[i] = 1
888 888 else:
889 889 self.ui.debug(_("narrowed branch search to %s:%s\n")
890 890 % (short(p), short(i)))
891 891 search.append((p, i))
892 892 break
893 893 p, f = i, f * 2
894 894
895 895 # sanity check our fetch list
896 896 for f in fetch.keys():
897 897 if f in m:
898 898 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
899 899
900 900 if base.keys() == [nullid]:
901 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
901 if force:
902 self.ui.warn(_("warning: repository is unrelated\n"))
903 else:
904 raise util.Abort(_("repository is unrelated"))
902 905
903 906 self.ui.note(_("found new changesets starting at ") +
904 907 " ".join([short(f) for f in fetch]) + "\n")
905 908
906 909 self.ui.debug(_("%d total queries\n") % reqcnt)
907 910
908 911 return fetch.keys()
909 912
910 def findoutgoing(self, remote, base=None, heads=None):
913 def findoutgoing(self, remote, base=None, heads=None, force=False):
911 914 if base == None:
912 915 base = {}
913 self.findincoming(remote, base, heads)
916 self.findincoming(remote, base, heads, force=force)
914 917
915 918 self.ui.debug(_("common changesets up to ")
916 919 + " ".join(map(short, base.keys())) + "\n")
917 920
918 921 remain = dict.fromkeys(self.changelog.nodemap)
919 922
920 923 # prune everything remote has from the tree
921 924 del remain[nullid]
922 925 remove = base.keys()
923 926 while remove:
924 927 n = remove.pop(0)
925 928 if n in remain:
926 929 del remain[n]
927 930 for p in self.changelog.parents(n):
928 931 remove.append(p)
929 932
930 933 # find every node whose parents have been pruned
931 934 subset = []
932 935 for n in remain:
933 936 p1, p2 = self.changelog.parents(n)
934 937 if p1 not in remain and p2 not in remain:
935 938 subset.append(n)
936 939
937 940 # this is the set of all roots we have to push
938 941 return subset
939 942
940 def pull(self, remote, heads=None):
943 def pull(self, remote, heads=None, force=False):
941 944 l = self.lock()
942 945
943 946 # if we have an empty repo, fetch everything
944 947 if self.changelog.tip() == nullid:
945 948 self.ui.status(_("requesting all changes\n"))
946 949 fetch = [nullid]
947 950 else:
948 fetch = self.findincoming(remote)
951 fetch = self.findincoming(remote, force=force)
949 952
950 953 if not fetch:
951 954 self.ui.status(_("no changes found\n"))
952 955 return 1
953 956
954 957 if heads is None:
955 958 cg = remote.changegroup(fetch, 'pull')
956 959 else:
957 960 cg = remote.changegroupsubset(fetch, heads, 'pull')
958 961 return self.addchangegroup(cg)
959 962
960 963 def push(self, remote, force=False, revs=None):
961 964 lock = remote.lock()
962 965
963 966 base = {}
964 967 heads = remote.heads()
965 inc = self.findincoming(remote, base, heads)
968 inc = self.findincoming(remote, base, heads, force=force)
966 969 if not force and inc:
967 970 self.ui.warn(_("abort: unsynced remote changes!\n"))
968 971 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
969 972 return 1
970 973
971 974 update = self.findoutgoing(remote, base)
972 975 if revs is not None:
973 976 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
974 977 else:
975 978 bases, heads = update, self.changelog.heads()
976 979
977 980 if not bases:
978 981 self.ui.status(_("no changes found\n"))
979 982 return 1
980 983 elif not force:
981 984 if len(bases) < len(heads):
982 985 self.ui.warn(_("abort: push creates new remote branches!\n"))
983 986 self.ui.status(_("(did you forget to merge?"
984 987 " use push -f to force)\n"))
985 988 return 1
986 989
987 990 if revs is None:
988 991 cg = self.changegroup(update, 'push')
989 992 else:
990 993 cg = self.changegroupsubset(update, revs, 'push')
991 994 return remote.addchangegroup(cg)
992 995
993 996 def changegroupsubset(self, bases, heads, source):
994 997 """This function generates a changegroup consisting of all the nodes
995 998 that are descendents of any of the bases, and ancestors of any of
996 999 the heads.
997 1000
998 1001 It is fairly complex as determining which filenodes and which
999 1002 manifest nodes need to be included for the changeset to be complete
1000 1003 is non-trivial.
1001 1004
1002 1005 Another wrinkle is doing the reverse, figuring out which changeset in
1003 1006 the changegroup a particular filenode or manifestnode belongs to."""
1004 1007
1005 1008 self.hook('preoutgoing', throw=True, source=source)
1006 1009
1007 1010 # Set up some initial variables
1008 1011 # Make it easy to refer to self.changelog
1009 1012 cl = self.changelog
1010 1013 # msng is short for missing - compute the list of changesets in this
1011 1014 # changegroup.
1012 1015 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1013 1016 # Some bases may turn out to be superfluous, and some heads may be
1014 1017 # too. nodesbetween will return the minimal set of bases and heads
1015 1018 # necessary to re-create the changegroup.
1016 1019
1017 1020 # Known heads are the list of heads that it is assumed the recipient
1018 1021 # of this changegroup will know about.
1019 1022 knownheads = {}
1020 1023 # We assume that all parents of bases are known heads.
1021 1024 for n in bases:
1022 1025 for p in cl.parents(n):
1023 1026 if p != nullid:
1024 1027 knownheads[p] = 1
1025 1028 knownheads = knownheads.keys()
1026 1029 if knownheads:
1027 1030 # Now that we know what heads are known, we can compute which
1028 1031 # changesets are known. The recipient must know about all
1029 1032 # changesets required to reach the known heads from the null
1030 1033 # changeset.
1031 1034 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1032 1035 junk = None
1033 1036 # Transform the list into an ersatz set.
1034 1037 has_cl_set = dict.fromkeys(has_cl_set)
1035 1038 else:
1036 1039 # If there were no known heads, the recipient cannot be assumed to
1037 1040 # know about any changesets.
1038 1041 has_cl_set = {}
1039 1042
1040 1043 # Make it easy to refer to self.manifest
1041 1044 mnfst = self.manifest
1042 1045 # We don't know which manifests are missing yet
1043 1046 msng_mnfst_set = {}
1044 1047 # Nor do we know which filenodes are missing.
1045 1048 msng_filenode_set = {}
1046 1049
1047 1050 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1048 1051 junk = None
1049 1052
1050 1053 # A changeset always belongs to itself, so the changenode lookup
1051 1054 # function for a changenode is identity.
1052 1055 def identity(x):
1053 1056 return x
1054 1057
1055 1058 # A function generating function. Sets up an environment for the
1056 1059 # inner function.
1057 1060 def cmp_by_rev_func(revlog):
1058 1061 # Compare two nodes by their revision number in the environment's
1059 1062 # revision history. Since the revision number both represents the
1060 1063 # most efficient order to read the nodes in, and represents a
1061 1064 # topological sorting of the nodes, this function is often useful.
1062 1065 def cmp_by_rev(a, b):
1063 1066 return cmp(revlog.rev(a), revlog.rev(b))
1064 1067 return cmp_by_rev
1065 1068
1066 1069 # If we determine that a particular file or manifest node must be a
1067 1070 # node that the recipient of the changegroup will already have, we can
1068 1071 # also assume the recipient will have all the parents. This function
1069 1072 # prunes them from the set of missing nodes.
1070 1073 def prune_parents(revlog, hasset, msngset):
1071 1074 haslst = hasset.keys()
1072 1075 haslst.sort(cmp_by_rev_func(revlog))
1073 1076 for node in haslst:
1074 1077 parentlst = [p for p in revlog.parents(node) if p != nullid]
1075 1078 while parentlst:
1076 1079 n = parentlst.pop()
1077 1080 if n not in hasset:
1078 1081 hasset[n] = 1
1079 1082 p = [p for p in revlog.parents(n) if p != nullid]
1080 1083 parentlst.extend(p)
1081 1084 for n in hasset:
1082 1085 msngset.pop(n, None)
1083 1086
1084 1087 # This is a function generating function used to set up an environment
1085 1088 # for the inner function to execute in.
1086 1089 def manifest_and_file_collector(changedfileset):
1087 1090 # This is an information gathering function that gathers
1088 1091 # information from each changeset node that goes out as part of
1089 1092 # the changegroup. The information gathered is a list of which
1090 1093 # manifest nodes are potentially required (the recipient may
1091 1094 # already have them) and total list of all files which were
1092 1095 # changed in any changeset in the changegroup.
1093 1096 #
1094 1097 # We also remember the first changenode we saw any manifest
1095 1098 # referenced by so we can later determine which changenode 'owns'
1096 1099 # the manifest.
1097 1100 def collect_manifests_and_files(clnode):
1098 1101 c = cl.read(clnode)
1099 1102 for f in c[3]:
1100 1103 # This is to make sure we only have one instance of each
1101 1104 # filename string for each filename.
1102 1105 changedfileset.setdefault(f, f)
1103 1106 msng_mnfst_set.setdefault(c[0], clnode)
1104 1107 return collect_manifests_and_files
1105 1108
1106 1109 # Figure out which manifest nodes (of the ones we think might be part
1107 1110 # of the changegroup) the recipient must know about and remove them
1108 1111 # from the changegroup.
1109 1112 def prune_manifests():
1110 1113 has_mnfst_set = {}
1111 1114 for n in msng_mnfst_set:
1112 1115 # If a 'missing' manifest thinks it belongs to a changenode
1113 1116 # the recipient is assumed to have, obviously the recipient
1114 1117 # must have that manifest.
1115 1118 linknode = cl.node(mnfst.linkrev(n))
1116 1119 if linknode in has_cl_set:
1117 1120 has_mnfst_set[n] = 1
1118 1121 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1119 1122
1120 1123 # Use the information collected in collect_manifests_and_files to say
1121 1124 # which changenode any manifestnode belongs to.
1122 1125 def lookup_manifest_link(mnfstnode):
1123 1126 return msng_mnfst_set[mnfstnode]
1124 1127
1125 1128 # A function generating function that sets up the initial environment
1126 1129 # the inner function.
1127 1130 def filenode_collector(changedfiles):
1128 1131 next_rev = [0]
1129 1132 # This gathers information from each manifestnode included in the
1130 1133 # changegroup about which filenodes the manifest node references
1131 1134 # so we can include those in the changegroup too.
1132 1135 #
1133 1136 # It also remembers which changenode each filenode belongs to. It
1134 1137 # does this by assuming the a filenode belongs to the changenode
1135 1138 # the first manifest that references it belongs to.
1136 1139 def collect_msng_filenodes(mnfstnode):
1137 1140 r = mnfst.rev(mnfstnode)
1138 1141 if r == next_rev[0]:
1139 1142 # If the last rev we looked at was the one just previous,
1140 1143 # we only need to see a diff.
1141 1144 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1142 1145 # For each line in the delta
1143 1146 for dline in delta.splitlines():
1144 1147 # get the filename and filenode for that line
1145 1148 f, fnode = dline.split('\0')
1146 1149 fnode = bin(fnode[:40])
1147 1150 f = changedfiles.get(f, None)
1148 1151 # And if the file is in the list of files we care
1149 1152 # about.
1150 1153 if f is not None:
1151 1154 # Get the changenode this manifest belongs to
1152 1155 clnode = msng_mnfst_set[mnfstnode]
1153 1156 # Create the set of filenodes for the file if
1154 1157 # there isn't one already.
1155 1158 ndset = msng_filenode_set.setdefault(f, {})
1156 1159 # And set the filenode's changelog node to the
1157 1160 # manifest's if it hasn't been set already.
1158 1161 ndset.setdefault(fnode, clnode)
1159 1162 else:
1160 1163 # Otherwise we need a full manifest.
1161 1164 m = mnfst.read(mnfstnode)
1162 1165 # For every file in we care about.
1163 1166 for f in changedfiles:
1164 1167 fnode = m.get(f, None)
1165 1168 # If it's in the manifest
1166 1169 if fnode is not None:
1167 1170 # See comments above.
1168 1171 clnode = msng_mnfst_set[mnfstnode]
1169 1172 ndset = msng_filenode_set.setdefault(f, {})
1170 1173 ndset.setdefault(fnode, clnode)
1171 1174 # Remember the revision we hope to see next.
1172 1175 next_rev[0] = r + 1
1173 1176 return collect_msng_filenodes
1174 1177
1175 1178 # We have a list of filenodes we think we need for a file, lets remove
1176 1179 # all those we now the recipient must have.
1177 1180 def prune_filenodes(f, filerevlog):
1178 1181 msngset = msng_filenode_set[f]
1179 1182 hasset = {}
1180 1183 # If a 'missing' filenode thinks it belongs to a changenode we
1181 1184 # assume the recipient must have, then the recipient must have
1182 1185 # that filenode.
1183 1186 for n in msngset:
1184 1187 clnode = cl.node(filerevlog.linkrev(n))
1185 1188 if clnode in has_cl_set:
1186 1189 hasset[n] = 1
1187 1190 prune_parents(filerevlog, hasset, msngset)
1188 1191
1189 1192 # A function generator function that sets up the a context for the
1190 1193 # inner function.
1191 1194 def lookup_filenode_link_func(fname):
1192 1195 msngset = msng_filenode_set[fname]
1193 1196 # Lookup the changenode the filenode belongs to.
1194 1197 def lookup_filenode_link(fnode):
1195 1198 return msngset[fnode]
1196 1199 return lookup_filenode_link
1197 1200
1198 1201 # Now that we have all theses utility functions to help out and
1199 1202 # logically divide up the task, generate the group.
1200 1203 def gengroup():
1201 1204 # The set of changed files starts empty.
1202 1205 changedfiles = {}
1203 1206 # Create a changenode group generator that will call our functions
1204 1207 # back to lookup the owning changenode and collect information.
1205 1208 group = cl.group(msng_cl_lst, identity,
1206 1209 manifest_and_file_collector(changedfiles))
1207 1210 for chnk in group:
1208 1211 yield chnk
1209 1212
1210 1213 # The list of manifests has been collected by the generator
1211 1214 # calling our functions back.
1212 1215 prune_manifests()
1213 1216 msng_mnfst_lst = msng_mnfst_set.keys()
1214 1217 # Sort the manifestnodes by revision number.
1215 1218 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1216 1219 # Create a generator for the manifestnodes that calls our lookup
1217 1220 # and data collection functions back.
1218 1221 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1219 1222 filenode_collector(changedfiles))
1220 1223 for chnk in group:
1221 1224 yield chnk
1222 1225
1223 1226 # These are no longer needed, dereference and toss the memory for
1224 1227 # them.
1225 1228 msng_mnfst_lst = None
1226 1229 msng_mnfst_set.clear()
1227 1230
1228 1231 changedfiles = changedfiles.keys()
1229 1232 changedfiles.sort()
1230 1233 # Go through all our files in order sorted by name.
1231 1234 for fname in changedfiles:
1232 1235 filerevlog = self.file(fname)
1233 1236 # Toss out the filenodes that the recipient isn't really
1234 1237 # missing.
1235 1238 if msng_filenode_set.has_key(fname):
1236 1239 prune_filenodes(fname, filerevlog)
1237 1240 msng_filenode_lst = msng_filenode_set[fname].keys()
1238 1241 else:
1239 1242 msng_filenode_lst = []
1240 1243 # If any filenodes are left, generate the group for them,
1241 1244 # otherwise don't bother.
1242 1245 if len(msng_filenode_lst) > 0:
1243 1246 yield struct.pack(">l", len(fname) + 4) + fname
1244 1247 # Sort the filenodes by their revision #
1245 1248 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1246 1249 # Create a group generator and only pass in a changenode
1247 1250 # lookup function as we need to collect no information
1248 1251 # from filenodes.
1249 1252 group = filerevlog.group(msng_filenode_lst,
1250 1253 lookup_filenode_link_func(fname))
1251 1254 for chnk in group:
1252 1255 yield chnk
1253 1256 if msng_filenode_set.has_key(fname):
1254 1257 # Don't need this anymore, toss it to free memory.
1255 1258 del msng_filenode_set[fname]
1256 1259 # Signal that no more groups are left.
1257 1260 yield struct.pack(">l", 0)
1258 1261
1259 1262 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1260 1263
1261 1264 return util.chunkbuffer(gengroup())
1262 1265
1263 1266 def changegroup(self, basenodes, source):
1264 1267 """Generate a changegroup of all nodes that we have that a recipient
1265 1268 doesn't.
1266 1269
1267 1270 This is much easier than the previous function as we can assume that
1268 1271 the recipient has any changenode we aren't sending them."""
1269 1272
1270 1273 self.hook('preoutgoing', throw=True, source=source)
1271 1274
1272 1275 cl = self.changelog
1273 1276 nodes = cl.nodesbetween(basenodes, None)[0]
1274 1277 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1275 1278
1276 1279 def identity(x):
1277 1280 return x
1278 1281
1279 1282 def gennodelst(revlog):
1280 1283 for r in xrange(0, revlog.count()):
1281 1284 n = revlog.node(r)
1282 1285 if revlog.linkrev(n) in revset:
1283 1286 yield n
1284 1287
1285 1288 def changed_file_collector(changedfileset):
1286 1289 def collect_changed_files(clnode):
1287 1290 c = cl.read(clnode)
1288 1291 for fname in c[3]:
1289 1292 changedfileset[fname] = 1
1290 1293 return collect_changed_files
1291 1294
1292 1295 def lookuprevlink_func(revlog):
1293 1296 def lookuprevlink(n):
1294 1297 return cl.node(revlog.linkrev(n))
1295 1298 return lookuprevlink
1296 1299
1297 1300 def gengroup():
1298 1301 # construct a list of all changed files
1299 1302 changedfiles = {}
1300 1303
1301 1304 for chnk in cl.group(nodes, identity,
1302 1305 changed_file_collector(changedfiles)):
1303 1306 yield chnk
1304 1307 changedfiles = changedfiles.keys()
1305 1308 changedfiles.sort()
1306 1309
1307 1310 mnfst = self.manifest
1308 1311 nodeiter = gennodelst(mnfst)
1309 1312 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1310 1313 yield chnk
1311 1314
1312 1315 for fname in changedfiles:
1313 1316 filerevlog = self.file(fname)
1314 1317 nodeiter = gennodelst(filerevlog)
1315 1318 nodeiter = list(nodeiter)
1316 1319 if nodeiter:
1317 1320 yield struct.pack(">l", len(fname) + 4) + fname
1318 1321 lookup = lookuprevlink_func(filerevlog)
1319 1322 for chnk in filerevlog.group(nodeiter, lookup):
1320 1323 yield chnk
1321 1324
1322 1325 yield struct.pack(">l", 0)
1323 1326 self.hook('outgoing', node=hex(nodes[0]), source=source)
1324 1327
1325 1328 return util.chunkbuffer(gengroup())
1326 1329
1327 1330 def addchangegroup(self, source):
1328 1331
1329 1332 def getchunk():
1330 1333 d = source.read(4)
1331 1334 if not d:
1332 1335 return ""
1333 1336 l = struct.unpack(">l", d)[0]
1334 1337 if l <= 4:
1335 1338 return ""
1336 1339 d = source.read(l - 4)
1337 1340 if len(d) < l - 4:
1338 1341 raise repo.RepoError(_("premature EOF reading chunk"
1339 1342 " (got %d bytes, expected %d)")
1340 1343 % (len(d), l - 4))
1341 1344 return d
1342 1345
1343 1346 def getgroup():
1344 1347 while 1:
1345 1348 c = getchunk()
1346 1349 if not c:
1347 1350 break
1348 1351 yield c
1349 1352
1350 1353 def csmap(x):
1351 1354 self.ui.debug(_("add changeset %s\n") % short(x))
1352 1355 return self.changelog.count()
1353 1356
1354 1357 def revmap(x):
1355 1358 return self.changelog.rev(x)
1356 1359
1357 1360 if not source:
1358 1361 return
1359 1362
1360 1363 self.hook('prechangegroup', throw=True)
1361 1364
1362 1365 changesets = files = revisions = 0
1363 1366
1364 1367 tr = self.transaction()
1365 1368
1366 1369 oldheads = len(self.changelog.heads())
1367 1370
1368 1371 # pull off the changeset group
1369 1372 self.ui.status(_("adding changesets\n"))
1370 1373 co = self.changelog.tip()
1371 1374 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1372 1375 cnr, cor = map(self.changelog.rev, (cn, co))
1373 1376 if cn == nullid:
1374 1377 cnr = cor
1375 1378 changesets = cnr - cor
1376 1379
1377 1380 # pull off the manifest group
1378 1381 self.ui.status(_("adding manifests\n"))
1379 1382 mm = self.manifest.tip()
1380 1383 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1381 1384
1382 1385 # process the files
1383 1386 self.ui.status(_("adding file changes\n"))
1384 1387 while 1:
1385 1388 f = getchunk()
1386 1389 if not f:
1387 1390 break
1388 1391 self.ui.debug(_("adding %s revisions\n") % f)
1389 1392 fl = self.file(f)
1390 1393 o = fl.count()
1391 1394 n = fl.addgroup(getgroup(), revmap, tr)
1392 1395 revisions += fl.count() - o
1393 1396 files += 1
1394 1397
1395 1398 newheads = len(self.changelog.heads())
1396 1399 heads = ""
1397 1400 if oldheads and newheads > oldheads:
1398 1401 heads = _(" (+%d heads)") % (newheads - oldheads)
1399 1402
1400 1403 self.ui.status(_("added %d changesets"
1401 1404 " with %d changes to %d files%s\n")
1402 1405 % (changesets, revisions, files, heads))
1403 1406
1404 1407 self.hook('pretxnchangegroup', throw=True,
1405 1408 node=hex(self.changelog.node(cor+1)))
1406 1409
1407 1410 tr.close()
1408 1411
1409 1412 if changesets > 0:
1410 1413 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1411 1414
1412 1415 for i in range(cor + 1, cnr + 1):
1413 1416 self.hook("incoming", node=hex(self.changelog.node(i)))
1414 1417
1415 1418 def update(self, node, allow=False, force=False, choose=None,
1416 1419 moddirstate=True, forcemerge=False, wlock=None):
1417 1420 pl = self.dirstate.parents()
1418 1421 if not force and pl[1] != nullid:
1419 1422 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1420 1423 return 1
1421 1424
1422 1425 err = False
1423 1426
1424 1427 p1, p2 = pl[0], node
1425 1428 pa = self.changelog.ancestor(p1, p2)
1426 1429 m1n = self.changelog.read(p1)[0]
1427 1430 m2n = self.changelog.read(p2)[0]
1428 1431 man = self.manifest.ancestor(m1n, m2n)
1429 1432 m1 = self.manifest.read(m1n)
1430 1433 mf1 = self.manifest.readflags(m1n)
1431 1434 m2 = self.manifest.read(m2n).copy()
1432 1435 mf2 = self.manifest.readflags(m2n)
1433 1436 ma = self.manifest.read(man)
1434 1437 mfa = self.manifest.readflags(man)
1435 1438
1436 1439 modified, added, removed, deleted, unknown = self.changes()
1437 1440
1438 1441 # is this a jump, or a merge? i.e. is there a linear path
1439 1442 # from p1 to p2?
1440 1443 linear_path = (pa == p1 or pa == p2)
1441 1444
1442 1445 if allow and linear_path:
1443 1446 raise util.Abort(_("there is nothing to merge, "
1444 1447 "just use 'hg update'"))
1445 1448 if allow and not forcemerge:
1446 1449 if modified or added or removed:
1447 1450 raise util.Abort(_("outstanding uncommited changes"))
1448 1451 if not forcemerge and not force:
1449 1452 for f in unknown:
1450 1453 if f in m2:
1451 1454 t1 = self.wread(f)
1452 1455 t2 = self.file(f).read(m2[f])
1453 1456 if cmp(t1, t2) != 0:
1454 1457 raise util.Abort(_("'%s' already exists in the working"
1455 1458 " dir and differs from remote") % f)
1456 1459
1457 1460 # resolve the manifest to determine which files
1458 1461 # we care about merging
1459 1462 self.ui.note(_("resolving manifests\n"))
1460 1463 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1461 1464 (force, allow, moddirstate, linear_path))
1462 1465 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1463 1466 (short(man), short(m1n), short(m2n)))
1464 1467
1465 1468 merge = {}
1466 1469 get = {}
1467 1470 remove = []
1468 1471
1469 1472 # construct a working dir manifest
1470 1473 mw = m1.copy()
1471 1474 mfw = mf1.copy()
1472 1475 umap = dict.fromkeys(unknown)
1473 1476
1474 1477 for f in added + modified + unknown:
1475 1478 mw[f] = ""
1476 1479 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1477 1480
1478 1481 if moddirstate and not wlock:
1479 1482 wlock = self.wlock()
1480 1483
1481 1484 for f in deleted + removed:
1482 1485 if f in mw:
1483 1486 del mw[f]
1484 1487
1485 1488 # If we're jumping between revisions (as opposed to merging),
1486 1489 # and if neither the working directory nor the target rev has
1487 1490 # the file, then we need to remove it from the dirstate, to
1488 1491 # prevent the dirstate from listing the file when it is no
1489 1492 # longer in the manifest.
1490 1493 if moddirstate and linear_path and f not in m2:
1491 1494 self.dirstate.forget((f,))
1492 1495
1493 1496 # Compare manifests
1494 1497 for f, n in mw.iteritems():
1495 1498 if choose and not choose(f):
1496 1499 continue
1497 1500 if f in m2:
1498 1501 s = 0
1499 1502
1500 1503 # is the wfile new since m1, and match m2?
1501 1504 if f not in m1:
1502 1505 t1 = self.wread(f)
1503 1506 t2 = self.file(f).read(m2[f])
1504 1507 if cmp(t1, t2) == 0:
1505 1508 n = m2[f]
1506 1509 del t1, t2
1507 1510
1508 1511 # are files different?
1509 1512 if n != m2[f]:
1510 1513 a = ma.get(f, nullid)
1511 1514 # are both different from the ancestor?
1512 1515 if n != a and m2[f] != a:
1513 1516 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1514 1517 # merge executable bits
1515 1518 # "if we changed or they changed, change in merge"
1516 1519 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1517 1520 mode = ((a^b) | (a^c)) ^ a
1518 1521 merge[f] = (m1.get(f, nullid), m2[f], mode)
1519 1522 s = 1
1520 1523 # are we clobbering?
1521 1524 # is remote's version newer?
1522 1525 # or are we going back in time?
1523 1526 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1524 1527 self.ui.debug(_(" remote %s is newer, get\n") % f)
1525 1528 get[f] = m2[f]
1526 1529 s = 1
1527 1530 elif f in umap:
1528 1531 # this unknown file is the same as the checkout
1529 1532 get[f] = m2[f]
1530 1533
1531 1534 if not s and mfw[f] != mf2[f]:
1532 1535 if force:
1533 1536 self.ui.debug(_(" updating permissions for %s\n") % f)
1534 1537 util.set_exec(self.wjoin(f), mf2[f])
1535 1538 else:
1536 1539 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1537 1540 mode = ((a^b) | (a^c)) ^ a
1538 1541 if mode != b:
1539 1542 self.ui.debug(_(" updating permissions for %s\n")
1540 1543 % f)
1541 1544 util.set_exec(self.wjoin(f), mode)
1542 1545 del m2[f]
1543 1546 elif f in ma:
1544 1547 if n != ma[f]:
1545 1548 r = _("d")
1546 1549 if not force and (linear_path or allow):
1547 1550 r = self.ui.prompt(
1548 1551 (_(" local changed %s which remote deleted\n") % f) +
1549 1552 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1550 1553 if r == _("d"):
1551 1554 remove.append(f)
1552 1555 else:
1553 1556 self.ui.debug(_("other deleted %s\n") % f)
1554 1557 remove.append(f) # other deleted it
1555 1558 else:
1556 1559 # file is created on branch or in working directory
1557 1560 if force and f not in umap:
1558 1561 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1559 1562 remove.append(f)
1560 1563 elif n == m1.get(f, nullid): # same as parent
1561 1564 if p2 == pa: # going backwards?
1562 1565 self.ui.debug(_("remote deleted %s\n") % f)
1563 1566 remove.append(f)
1564 1567 else:
1565 1568 self.ui.debug(_("local modified %s, keeping\n") % f)
1566 1569 else:
1567 1570 self.ui.debug(_("working dir created %s, keeping\n") % f)
1568 1571
1569 1572 for f, n in m2.iteritems():
1570 1573 if choose and not choose(f):
1571 1574 continue
1572 1575 if f[0] == "/":
1573 1576 continue
1574 1577 if f in ma and n != ma[f]:
1575 1578 r = _("k")
1576 1579 if not force and (linear_path or allow):
1577 1580 r = self.ui.prompt(
1578 1581 (_("remote changed %s which local deleted\n") % f) +
1579 1582 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1580 1583 if r == _("k"):
1581 1584 get[f] = n
1582 1585 elif f not in ma:
1583 1586 self.ui.debug(_("remote created %s\n") % f)
1584 1587 get[f] = n
1585 1588 else:
1586 1589 if force or p2 == pa: # going backwards?
1587 1590 self.ui.debug(_("local deleted %s, recreating\n") % f)
1588 1591 get[f] = n
1589 1592 else:
1590 1593 self.ui.debug(_("local deleted %s\n") % f)
1591 1594
1592 1595 del mw, m1, m2, ma
1593 1596
1594 1597 if force:
1595 1598 for f in merge:
1596 1599 get[f] = merge[f][1]
1597 1600 merge = {}
1598 1601
1599 1602 if linear_path or force:
1600 1603 # we don't need to do any magic, just jump to the new rev
1601 1604 branch_merge = False
1602 1605 p1, p2 = p2, nullid
1603 1606 else:
1604 1607 if not allow:
1605 1608 self.ui.status(_("this update spans a branch"
1606 1609 " affecting the following files:\n"))
1607 1610 fl = merge.keys() + get.keys()
1608 1611 fl.sort()
1609 1612 for f in fl:
1610 1613 cf = ""
1611 1614 if f in merge:
1612 1615 cf = _(" (resolve)")
1613 1616 self.ui.status(" %s%s\n" % (f, cf))
1614 1617 self.ui.warn(_("aborting update spanning branches!\n"))
1615 1618 self.ui.status(_("(use update -m to merge across branches"
1616 1619 " or -C to lose changes)\n"))
1617 1620 return 1
1618 1621 branch_merge = True
1619 1622
1620 1623 # get the files we don't need to change
1621 1624 files = get.keys()
1622 1625 files.sort()
1623 1626 for f in files:
1624 1627 if f[0] == "/":
1625 1628 continue
1626 1629 self.ui.note(_("getting %s\n") % f)
1627 1630 t = self.file(f).read(get[f])
1628 1631 self.wwrite(f, t)
1629 1632 util.set_exec(self.wjoin(f), mf2[f])
1630 1633 if moddirstate:
1631 1634 if branch_merge:
1632 1635 self.dirstate.update([f], 'n', st_mtime=-1)
1633 1636 else:
1634 1637 self.dirstate.update([f], 'n')
1635 1638
1636 1639 # merge the tricky bits
1637 1640 failedmerge = []
1638 1641 files = merge.keys()
1639 1642 files.sort()
1640 1643 xp1 = hex(p1)
1641 1644 xp2 = hex(p2)
1642 1645 for f in files:
1643 1646 self.ui.status(_("merging %s\n") % f)
1644 1647 my, other, flag = merge[f]
1645 1648 ret = self.merge3(f, my, other, xp1, xp2)
1646 1649 if ret:
1647 1650 err = True
1648 1651 failedmerge.append(f)
1649 1652 util.set_exec(self.wjoin(f), flag)
1650 1653 if moddirstate:
1651 1654 if branch_merge:
1652 1655 # We've done a branch merge, mark this file as merged
1653 1656 # so that we properly record the merger later
1654 1657 self.dirstate.update([f], 'm')
1655 1658 else:
1656 1659 # We've update-merged a locally modified file, so
1657 1660 # we set the dirstate to emulate a normal checkout
1658 1661 # of that file some time in the past. Thus our
1659 1662 # merge will appear as a normal local file
1660 1663 # modification.
1661 1664 f_len = len(self.file(f).read(other))
1662 1665 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1663 1666
1664 1667 remove.sort()
1665 1668 for f in remove:
1666 1669 self.ui.note(_("removing %s\n") % f)
1667 1670 util.audit_path(f)
1668 1671 try:
1669 1672 util.unlink(self.wjoin(f))
1670 1673 except OSError, inst:
1671 1674 if inst.errno != errno.ENOENT:
1672 1675 self.ui.warn(_("update failed to remove %s: %s!\n") %
1673 1676 (f, inst.strerror))
1674 1677 if moddirstate:
1675 1678 if branch_merge:
1676 1679 self.dirstate.update(remove, 'r')
1677 1680 else:
1678 1681 self.dirstate.forget(remove)
1679 1682
1680 1683 if moddirstate:
1681 1684 self.dirstate.setparents(p1, p2)
1682 1685
1683 1686 stat = ((len(get), _("updated")),
1684 1687 (len(merge) - len(failedmerge), _("merged")),
1685 1688 (len(remove), _("removed")),
1686 1689 (len(failedmerge), _("unresolved")))
1687 1690 note = ", ".join([_("%d files %s") % s for s in stat])
1688 1691 self.ui.note("%s\n" % note)
1689 1692 if moddirstate and branch_merge:
1690 1693 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1691 1694
1692 1695 return err
1693 1696
1694 1697 def merge3(self, fn, my, other, p1, p2):
1695 1698 """perform a 3-way merge in the working directory"""
1696 1699
1697 1700 def temp(prefix, node):
1698 1701 pre = "%s~%s." % (os.path.basename(fn), prefix)
1699 1702 (fd, name) = tempfile.mkstemp("", pre)
1700 1703 f = os.fdopen(fd, "wb")
1701 1704 self.wwrite(fn, fl.read(node), f)
1702 1705 f.close()
1703 1706 return name
1704 1707
1705 1708 fl = self.file(fn)
1706 1709 base = fl.ancestor(my, other)
1707 1710 a = self.wjoin(fn)
1708 1711 b = temp("base", base)
1709 1712 c = temp("other", other)
1710 1713
1711 1714 self.ui.note(_("resolving %s\n") % fn)
1712 1715 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1713 1716 (fn, short(my), short(other), short(base)))
1714 1717
1715 1718 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1716 1719 or "hgmerge")
1717 1720 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1718 1721 environ={'HG_FILE': fn,
1719 1722 'HG_MY_NODE': p1,
1720 1723 'HG_OTHER_NODE': p2,
1721 1724 'HG_FILE_MY_NODE': hex(my),
1722 1725 'HG_FILE_OTHER_NODE': hex(other),
1723 1726 'HG_FILE_BASE_NODE': hex(base)})
1724 1727 if r:
1725 1728 self.ui.warn(_("merging %s failed!\n") % fn)
1726 1729
1727 1730 os.unlink(b)
1728 1731 os.unlink(c)
1729 1732 return r
1730 1733
1731 1734 def verify(self):
1732 1735 filelinkrevs = {}
1733 1736 filenodes = {}
1734 1737 changesets = revisions = files = 0
1735 1738 errors = [0]
1736 1739 neededmanifests = {}
1737 1740
1738 1741 def err(msg):
1739 1742 self.ui.warn(msg + "\n")
1740 1743 errors[0] += 1
1741 1744
1742 1745 def checksize(obj, name):
1743 1746 d = obj.checksize()
1744 1747 if d[0]:
1745 1748 err(_("%s data length off by %d bytes") % (name, d[0]))
1746 1749 if d[1]:
1747 1750 err(_("%s index contains %d extra bytes") % (name, d[1]))
1748 1751
1749 1752 seen = {}
1750 1753 self.ui.status(_("checking changesets\n"))
1751 1754 checksize(self.changelog, "changelog")
1752 1755
1753 1756 for i in range(self.changelog.count()):
1754 1757 changesets += 1
1755 1758 n = self.changelog.node(i)
1756 1759 l = self.changelog.linkrev(n)
1757 1760 if l != i:
1758 1761 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1759 1762 if n in seen:
1760 1763 err(_("duplicate changeset at revision %d") % i)
1761 1764 seen[n] = 1
1762 1765
1763 1766 for p in self.changelog.parents(n):
1764 1767 if p not in self.changelog.nodemap:
1765 1768 err(_("changeset %s has unknown parent %s") %
1766 1769 (short(n), short(p)))
1767 1770 try:
1768 1771 changes = self.changelog.read(n)
1769 1772 except KeyboardInterrupt:
1770 1773 self.ui.warn(_("interrupted"))
1771 1774 raise
1772 1775 except Exception, inst:
1773 1776 err(_("unpacking changeset %s: %s") % (short(n), inst))
1774 1777 continue
1775 1778
1776 1779 neededmanifests[changes[0]] = n
1777 1780
1778 1781 for f in changes[3]:
1779 1782 filelinkrevs.setdefault(f, []).append(i)
1780 1783
1781 1784 seen = {}
1782 1785 self.ui.status(_("checking manifests\n"))
1783 1786 checksize(self.manifest, "manifest")
1784 1787
1785 1788 for i in range(self.manifest.count()):
1786 1789 n = self.manifest.node(i)
1787 1790 l = self.manifest.linkrev(n)
1788 1791
1789 1792 if l < 0 or l >= self.changelog.count():
1790 1793 err(_("bad manifest link (%d) at revision %d") % (l, i))
1791 1794
1792 1795 if n in neededmanifests:
1793 1796 del neededmanifests[n]
1794 1797
1795 1798 if n in seen:
1796 1799 err(_("duplicate manifest at revision %d") % i)
1797 1800
1798 1801 seen[n] = 1
1799 1802
1800 1803 for p in self.manifest.parents(n):
1801 1804 if p not in self.manifest.nodemap:
1802 1805 err(_("manifest %s has unknown parent %s") %
1803 1806 (short(n), short(p)))
1804 1807
1805 1808 try:
1806 1809 delta = mdiff.patchtext(self.manifest.delta(n))
1807 1810 except KeyboardInterrupt:
1808 1811 self.ui.warn(_("interrupted"))
1809 1812 raise
1810 1813 except Exception, inst:
1811 1814 err(_("unpacking manifest %s: %s") % (short(n), inst))
1812 1815 continue
1813 1816
1814 1817 try:
1815 1818 ff = [ l.split('\0') for l in delta.splitlines() ]
1816 1819 for f, fn in ff:
1817 1820 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1818 1821 except (ValueError, TypeError), inst:
1819 1822 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1820 1823
1821 1824 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1822 1825
1823 1826 for m, c in neededmanifests.items():
1824 1827 err(_("Changeset %s refers to unknown manifest %s") %
1825 1828 (short(m), short(c)))
1826 1829 del neededmanifests
1827 1830
1828 1831 for f in filenodes:
1829 1832 if f not in filelinkrevs:
1830 1833 err(_("file %s in manifest but not in changesets") % f)
1831 1834
1832 1835 for f in filelinkrevs:
1833 1836 if f not in filenodes:
1834 1837 err(_("file %s in changeset but not in manifest") % f)
1835 1838
1836 1839 self.ui.status(_("checking files\n"))
1837 1840 ff = filenodes.keys()
1838 1841 ff.sort()
1839 1842 for f in ff:
1840 1843 if f == "/dev/null":
1841 1844 continue
1842 1845 files += 1
1843 1846 if not f:
1844 1847 err(_("file without name in manifest %s") % short(n))
1845 1848 continue
1846 1849 fl = self.file(f)
1847 1850 checksize(fl, f)
1848 1851
1849 1852 nodes = {nullid: 1}
1850 1853 seen = {}
1851 1854 for i in range(fl.count()):
1852 1855 revisions += 1
1853 1856 n = fl.node(i)
1854 1857
1855 1858 if n in seen:
1856 1859 err(_("%s: duplicate revision %d") % (f, i))
1857 1860 if n not in filenodes[f]:
1858 1861 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1859 1862 else:
1860 1863 del filenodes[f][n]
1861 1864
1862 1865 flr = fl.linkrev(n)
1863 1866 if flr not in filelinkrevs.get(f, []):
1864 1867 err(_("%s:%s points to unexpected changeset %d")
1865 1868 % (f, short(n), flr))
1866 1869 else:
1867 1870 filelinkrevs[f].remove(flr)
1868 1871
1869 1872 # verify contents
1870 1873 try:
1871 1874 t = fl.read(n)
1872 1875 except KeyboardInterrupt:
1873 1876 self.ui.warn(_("interrupted"))
1874 1877 raise
1875 1878 except Exception, inst:
1876 1879 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1877 1880
1878 1881 # verify parents
1879 1882 (p1, p2) = fl.parents(n)
1880 1883 if p1 not in nodes:
1881 1884 err(_("file %s:%s unknown parent 1 %s") %
1882 1885 (f, short(n), short(p1)))
1883 1886 if p2 not in nodes:
1884 1887 err(_("file %s:%s unknown parent 2 %s") %
1885 1888 (f, short(n), short(p1)))
1886 1889 nodes[n] = 1
1887 1890
1888 1891 # cross-check
1889 1892 for node in filenodes[f]:
1890 1893 err(_("node %s in manifests not in %s") % (hex(node), f))
1891 1894
1892 1895 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1893 1896 (files, changesets, revisions))
1894 1897
1895 1898 if errors[0]:
1896 1899 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1897 1900 return 1
1898 1901
1899 1902 # used to avoid circular references so destructors work
1900 1903 def aftertrans(base):
1901 1904 p = base
1902 1905 def a():
1903 1906 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1904 1907 util.rename(os.path.join(p, "journal.dirstate"),
1905 1908 os.path.join(p, "undo.dirstate"))
1906 1909 return a
1907 1910
General Comments 0
You need to be logged in to leave comments. Login now