##// END OF EJS Templates
del transaction before lock before wlock...
Alexis S. L. Carvalho -
r5053:47a8ea1e default
parent child Browse files
Show More
@@ -1,2250 +1,2250 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head)
341 341 self.strip(repo, n, update=False, backup='strip')
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 348 if n == None:
349 349 raise util.Abort(_("repo commit failed"))
350 350 try:
351 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 352 except:
353 353 raise util.Abort(_("unable to read %s") % patch)
354 354
355 355 patchf = self.opener(patch, "w")
356 356 if comments:
357 357 comments = "\n".join(comments) + '\n\n'
358 358 patchf.write(comments)
359 359 self.printdiff(repo, head, n, fp=patchf)
360 360 patchf.close()
361 361 self.removeundo(repo)
362 362 return (0, n)
363 363
364 364 def qparents(self, repo, rev=None):
365 365 if rev is None:
366 366 (p1, p2) = repo.dirstate.parents()
367 367 if p2 == revlog.nullid:
368 368 return p1
369 369 if len(self.applied) == 0:
370 370 return None
371 371 return revlog.bin(self.applied[-1].rev)
372 372 pp = repo.changelog.parents(rev)
373 373 if pp[1] != revlog.nullid:
374 374 arevs = [ x.rev for x in self.applied ]
375 375 p0 = revlog.hex(pp[0])
376 376 p1 = revlog.hex(pp[1])
377 377 if p0 in arevs:
378 378 return pp[0]
379 379 if p1 in arevs:
380 380 return pp[1]
381 381 return pp[0]
382 382
383 383 def mergepatch(self, repo, mergeq, series):
384 384 if len(self.applied) == 0:
385 385 # each of the patches merged in will have two parents. This
386 386 # can confuse the qrefresh, qdiff, and strip code because it
387 387 # needs to know which parent is actually in the patch queue.
388 388 # so, we insert a merge marker with only one parent. This way
389 389 # the first patch in the queue is never a merge patch
390 390 #
391 391 pname = ".hg.patches.merge.marker"
392 392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 393 self.removeundo(repo)
394 394 self.applied.append(statusentry(revlog.hex(n), pname))
395 395 self.applied_dirty = 1
396 396
397 397 head = self.qparents(repo)
398 398
399 399 for patch in series:
400 400 patch = mergeq.lookup(patch, strict=True)
401 401 if not patch:
402 402 self.ui.warn("patch %s does not exist\n" % patch)
403 403 return (1, None)
404 404 pushable, reason = self.pushable(patch)
405 405 if not pushable:
406 406 self.explain_pushable(patch, all_patches=True)
407 407 continue
408 408 info = mergeq.isapplied(patch)
409 409 if not info:
410 410 self.ui.warn("patch %s is not applied\n" % patch)
411 411 return (1, None)
412 412 rev = revlog.bin(info[1])
413 413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 414 if head:
415 415 self.applied.append(statusentry(revlog.hex(head), patch))
416 416 self.applied_dirty = 1
417 417 if err:
418 418 return (err, head)
419 419 self.save_dirty()
420 420 return (0, head)
421 421
422 422 def patch(self, repo, patchfile):
423 423 '''Apply patchfile to the working directory.
424 424 patchfile: file name of patch'''
425 425 files = {}
426 426 try:
427 427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 428 files=files)
429 429 except Exception, inst:
430 430 self.ui.note(str(inst) + '\n')
431 431 if not self.ui.verbose:
432 432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 433 return (False, files, False)
434 434
435 435 return (True, files, fuzz)
436 436
437 437 def apply(self, repo, series, list=False, update_status=True,
438 438 strict=False, patchdir=None, merge=None, all_files={}):
439 439 wlock = lock = tr = None
440 440 try:
441 441 wlock = repo.wlock()
442 442 lock = repo.lock()
443 443 tr = repo.transaction()
444 444 try:
445 445 ret = self._apply(repo, series, list, update_status,
446 446 strict, patchdir, merge, all_files=all_files)
447 447 tr.close()
448 448 self.save_dirty()
449 449 return ret
450 450 except:
451 451 try:
452 452 tr.abort()
453 453 finally:
454 454 repo.invalidate()
455 455 repo.dirstate.invalidate()
456 456 raise
457 457 finally:
458 del lock, wlock, tr
458 del tr, lock, wlock
459 459
460 460 def _apply(self, repo, series, list=False, update_status=True,
461 461 strict=False, patchdir=None, merge=None, all_files={}):
462 462 # TODO unify with commands.py
463 463 if not patchdir:
464 464 patchdir = self.path
465 465 err = 0
466 466 n = None
467 467 for patchname in series:
468 468 pushable, reason = self.pushable(patchname)
469 469 if not pushable:
470 470 self.explain_pushable(patchname, all_patches=True)
471 471 continue
472 472 self.ui.warn("applying %s\n" % patchname)
473 473 pf = os.path.join(patchdir, patchname)
474 474
475 475 try:
476 476 message, comments, user, date, patchfound = self.readheaders(patchname)
477 477 except:
478 478 self.ui.warn("Unable to read %s\n" % patchname)
479 479 err = 1
480 480 break
481 481
482 482 if not message:
483 483 message = "imported patch %s\n" % patchname
484 484 else:
485 485 if list:
486 486 message.append("\nimported patch %s" % patchname)
487 487 message = '\n'.join(message)
488 488
489 489 (patcherr, files, fuzz) = self.patch(repo, pf)
490 490 all_files.update(files)
491 491 patcherr = not patcherr
492 492
493 493 if merge and files:
494 494 # Mark as removed/merged and update dirstate parent info
495 495 removed = []
496 496 merged = []
497 497 for f in files:
498 498 if os.path.exists(repo.wjoin(f)):
499 499 merged.append(f)
500 500 else:
501 501 removed.append(f)
502 502 for f in removed:
503 503 repo.dirstate.remove(f)
504 504 for f in merged:
505 505 repo.dirstate.merge(f)
506 506 p1, p2 = repo.dirstate.parents()
507 507 repo.dirstate.setparents(p1, merge)
508 508 files = patch.updatedir(self.ui, repo, files)
509 509 n = repo.commit(files, message, user, date, force=1)
510 510
511 511 if n == None:
512 512 raise util.Abort(_("repo commit failed"))
513 513
514 514 if update_status:
515 515 self.applied.append(statusentry(revlog.hex(n), patchname))
516 516
517 517 if patcherr:
518 518 if not patchfound:
519 519 self.ui.warn("patch %s is empty\n" % patchname)
520 520 err = 0
521 521 else:
522 522 self.ui.warn("patch failed, rejects left in working dir\n")
523 523 err = 1
524 524 break
525 525
526 526 if fuzz and strict:
527 527 self.ui.warn("fuzz found when applying patch, stopping\n")
528 528 err = 1
529 529 break
530 530 self.removeundo(repo)
531 531 return (err, n)
532 532
533 533 def delete(self, repo, patches, opts):
534 534 if not patches and not opts.get('rev'):
535 535 raise util.Abort(_('qdelete requires at least one revision or '
536 536 'patch name'))
537 537
538 538 realpatches = []
539 539 for patch in patches:
540 540 patch = self.lookup(patch, strict=True)
541 541 info = self.isapplied(patch)
542 542 if info:
543 543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 544 if patch not in self.series:
545 545 raise util.Abort(_("patch %s not in series file") % patch)
546 546 realpatches.append(patch)
547 547
548 548 appliedbase = 0
549 549 if opts.get('rev'):
550 550 if not self.applied:
551 551 raise util.Abort(_('no patches applied'))
552 552 revs = cmdutil.revrange(repo, opts['rev'])
553 553 if len(revs) > 1 and revs[0] > revs[1]:
554 554 revs.reverse()
555 555 for rev in revs:
556 556 if appliedbase >= len(self.applied):
557 557 raise util.Abort(_("revision %d is not managed") % rev)
558 558
559 559 base = revlog.bin(self.applied[appliedbase].rev)
560 560 node = repo.changelog.node(rev)
561 561 if node != base:
562 562 raise util.Abort(_("cannot delete revision %d above "
563 563 "applied patches") % rev)
564 564 realpatches.append(self.applied[appliedbase].name)
565 565 appliedbase += 1
566 566
567 567 if not opts.get('keep'):
568 568 r = self.qrepo()
569 569 if r:
570 570 r.remove(realpatches, True)
571 571 else:
572 572 for p in realpatches:
573 573 os.unlink(self.join(p))
574 574
575 575 if appliedbase:
576 576 del self.applied[:appliedbase]
577 577 self.applied_dirty = 1
578 578 indices = [self.find_series(p) for p in realpatches]
579 579 indices.sort()
580 580 for i in indices[-1::-1]:
581 581 del self.full_series[i]
582 582 self.parse_series()
583 583 self.series_dirty = 1
584 584
585 585 def check_toppatch(self, repo):
586 586 if len(self.applied) > 0:
587 587 top = revlog.bin(self.applied[-1].rev)
588 588 pp = repo.dirstate.parents()
589 589 if top not in pp:
590 590 raise util.Abort(_("queue top not at same revision as working directory"))
591 591 return top
592 592 return None
593 593 def check_localchanges(self, repo, force=False, refresh=True):
594 594 m, a, r, d = repo.status()[:4]
595 595 if m or a or r or d:
596 596 if not force:
597 597 if refresh:
598 598 raise util.Abort(_("local changes found, refresh first"))
599 599 else:
600 600 raise util.Abort(_("local changes found"))
601 601 return m, a, r, d
602 602
603 603 def new(self, repo, patch, *pats, **opts):
604 604 msg = opts.get('msg')
605 605 force = opts.get('force')
606 606 if os.path.exists(self.join(patch)):
607 607 raise util.Abort(_('patch "%s" already exists') % patch)
608 608 if opts.get('include') or opts.get('exclude') or pats:
609 609 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
610 610 m, a, r, d = repo.status(files=fns, match=match)[:4]
611 611 else:
612 612 m, a, r, d = self.check_localchanges(repo, force)
613 613 commitfiles = m + a + r
614 614 self.check_toppatch(repo)
615 615 wlock = repo.wlock()
616 616 try:
617 617 insert = self.full_series_end()
618 618 if msg:
619 619 n = repo.commit(commitfiles, msg, force=True)
620 620 else:
621 621 n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
622 622 if n == None:
623 623 raise util.Abort(_("repo commit failed"))
624 624 self.full_series[insert:insert] = [patch]
625 625 self.applied.append(statusentry(revlog.hex(n), patch))
626 626 self.parse_series()
627 627 self.series_dirty = 1
628 628 self.applied_dirty = 1
629 629 p = self.opener(patch, "w")
630 630 if msg:
631 631 msg = msg + "\n"
632 632 p.write(msg)
633 633 p.close()
634 634 wlock = None
635 635 r = self.qrepo()
636 636 if r: r.add([patch])
637 637 if commitfiles:
638 638 self.refresh(repo, short=True, git=opts.get('git'))
639 639 self.removeundo(repo)
640 640 finally:
641 641 del wlock
642 642
643 643 def strip(self, repo, rev, update=True, backup="all"):
644 644 wlock = lock = None
645 645 try:
646 646 wlock = repo.wlock()
647 647 lock = repo.lock()
648 648
649 649 if update:
650 650 self.check_localchanges(repo, refresh=False)
651 651 urev = self.qparents(repo, rev)
652 652 hg.clean(repo, urev)
653 653 repo.dirstate.write()
654 654
655 655 self.removeundo(repo)
656 656 repair.strip(self.ui, repo, rev, backup)
657 657 finally:
658 658 del lock, wlock
659 659
660 660 def isapplied(self, patch):
661 661 """returns (index, rev, patch)"""
662 662 for i in xrange(len(self.applied)):
663 663 a = self.applied[i]
664 664 if a.name == patch:
665 665 return (i, a.rev, a.name)
666 666 return None
667 667
668 668 # if the exact patch name does not exist, we try a few
669 669 # variations. If strict is passed, we try only #1
670 670 #
671 671 # 1) a number to indicate an offset in the series file
672 672 # 2) a unique substring of the patch name was given
673 673 # 3) patchname[-+]num to indicate an offset in the series file
674 674 def lookup(self, patch, strict=False):
675 675 patch = patch and str(patch)
676 676
677 677 def partial_name(s):
678 678 if s in self.series:
679 679 return s
680 680 matches = [x for x in self.series if s in x]
681 681 if len(matches) > 1:
682 682 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
683 683 for m in matches:
684 684 self.ui.warn(' %s\n' % m)
685 685 return None
686 686 if matches:
687 687 return matches[0]
688 688 if len(self.series) > 0 and len(self.applied) > 0:
689 689 if s == 'qtip':
690 690 return self.series[self.series_end(True)-1]
691 691 if s == 'qbase':
692 692 return self.series[0]
693 693 return None
694 694 if patch == None:
695 695 return None
696 696
697 697 # we don't want to return a partial match until we make
698 698 # sure the file name passed in does not exist (checked below)
699 699 res = partial_name(patch)
700 700 if res and res == patch:
701 701 return res
702 702
703 703 if not os.path.isfile(self.join(patch)):
704 704 try:
705 705 sno = int(patch)
706 706 except(ValueError, OverflowError):
707 707 pass
708 708 else:
709 709 if sno < len(self.series):
710 710 return self.series[sno]
711 711 if not strict:
712 712 # return any partial match made above
713 713 if res:
714 714 return res
715 715 minus = patch.rfind('-')
716 716 if minus >= 0:
717 717 res = partial_name(patch[:minus])
718 718 if res:
719 719 i = self.series.index(res)
720 720 try:
721 721 off = int(patch[minus+1:] or 1)
722 722 except(ValueError, OverflowError):
723 723 pass
724 724 else:
725 725 if i - off >= 0:
726 726 return self.series[i - off]
727 727 plus = patch.rfind('+')
728 728 if plus >= 0:
729 729 res = partial_name(patch[:plus])
730 730 if res:
731 731 i = self.series.index(res)
732 732 try:
733 733 off = int(patch[plus+1:] or 1)
734 734 except(ValueError, OverflowError):
735 735 pass
736 736 else:
737 737 if i + off < len(self.series):
738 738 return self.series[i + off]
739 739 raise util.Abort(_("patch %s not in series") % patch)
740 740
741 741 def push(self, repo, patch=None, force=False, list=False,
742 742 mergeq=None):
743 743 wlock = repo.wlock()
744 744 try:
745 745 patch = self.lookup(patch)
746 746 # Suppose our series file is: A B C and the current 'top'
747 747 # patch is B. qpush C should be performed (moving forward)
748 748 # qpush B is a NOP (no change) qpush A is an error (can't
749 749 # go backwards with qpush)
750 750 if patch:
751 751 info = self.isapplied(patch)
752 752 if info:
753 753 if info[0] < len(self.applied) - 1:
754 754 raise util.Abort(
755 755 _("cannot push to a previous patch: %s") % patch)
756 756 if info[0] < len(self.series) - 1:
757 757 self.ui.warn(
758 758 _('qpush: %s is already at the top\n') % patch)
759 759 else:
760 760 self.ui.warn(_('all patches are currently applied\n'))
761 761 return
762 762
763 763 # Following the above example, starting at 'top' of B:
764 764 # qpush should be performed (pushes C), but a subsequent
765 765 # qpush without an argument is an error (nothing to
766 766 # apply). This allows a loop of "...while hg qpush..." to
767 767 # work as it detects an error when done
768 768 if self.series_end() == len(self.series):
769 769 self.ui.warn(_('patch series already fully applied\n'))
770 770 return 1
771 771 if not force:
772 772 self.check_localchanges(repo)
773 773
774 774 self.applied_dirty = 1;
775 775 start = self.series_end()
776 776 if start > 0:
777 777 self.check_toppatch(repo)
778 778 if not patch:
779 779 patch = self.series[start]
780 780 end = start + 1
781 781 else:
782 782 end = self.series.index(patch, start) + 1
783 783 s = self.series[start:end]
784 784 all_files = {}
785 785 try:
786 786 if mergeq:
787 787 ret = self.mergepatch(repo, mergeq, s)
788 788 else:
789 789 ret = self.apply(repo, s, list, all_files=all_files)
790 790 except:
791 791 self.ui.warn(_('cleaning up working directory...'))
792 792 node = repo.dirstate.parents()[0]
793 793 hg.revert(repo, node, None)
794 794 unknown = repo.status()[4]
795 795 # only remove unknown files that we know we touched or
796 796 # created while patching
797 797 for f in unknown:
798 798 if f in all_files:
799 799 util.unlink(repo.wjoin(f))
800 800 self.ui.warn(_('done\n'))
801 801 raise
802 802 top = self.applied[-1].name
803 803 if ret[0]:
804 804 self.ui.write(
805 805 "Errors during apply, please fix and refresh %s\n" % top)
806 806 else:
807 807 self.ui.write("Now at: %s\n" % top)
808 808 return ret[0]
809 809 finally:
810 810 del wlock
811 811
812 812 def pop(self, repo, patch=None, force=False, update=True, all=False):
813 813 def getfile(f, rev):
814 814 t = repo.file(f).read(rev)
815 815 repo.wfile(f, "w").write(t)
816 816
817 817 wlock = repo.wlock()
818 818 try:
819 819 if patch:
820 820 # index, rev, patch
821 821 info = self.isapplied(patch)
822 822 if not info:
823 823 patch = self.lookup(patch)
824 824 info = self.isapplied(patch)
825 825 if not info:
826 826 raise util.Abort(_("patch %s is not applied") % patch)
827 827
828 828 if len(self.applied) == 0:
829 829 # Allow qpop -a to work repeatedly,
830 830 # but not qpop without an argument
831 831 self.ui.warn(_("no patches applied\n"))
832 832 return not all
833 833
834 834 if not update:
835 835 parents = repo.dirstate.parents()
836 836 rr = [ revlog.bin(x.rev) for x in self.applied ]
837 837 for p in parents:
838 838 if p in rr:
839 839 self.ui.warn("qpop: forcing dirstate update\n")
840 840 update = True
841 841
842 842 if not force and update:
843 843 self.check_localchanges(repo)
844 844
845 845 self.applied_dirty = 1;
846 846 end = len(self.applied)
847 847 if not patch:
848 848 if all:
849 849 popi = 0
850 850 else:
851 851 popi = len(self.applied) - 1
852 852 else:
853 853 popi = info[0] + 1
854 854 if popi >= end:
855 855 self.ui.warn("qpop: %s is already at the top\n" % patch)
856 856 return
857 857 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
858 858
859 859 start = info[0]
860 860 rev = revlog.bin(info[1])
861 861
862 862 # we know there are no local changes, so we can make a simplified
863 863 # form of hg.update.
864 864 if update:
865 865 top = self.check_toppatch(repo)
866 866 qp = self.qparents(repo, rev)
867 867 changes = repo.changelog.read(qp)
868 868 mmap = repo.manifest.read(changes[0])
869 869 m, a, r, d, u = repo.status(qp, top)[:5]
870 870 if d:
871 871 raise util.Abort("deletions found between repo revs")
872 872 for f in m:
873 873 getfile(f, mmap[f])
874 874 for f in r:
875 875 getfile(f, mmap[f])
876 876 util.set_exec(repo.wjoin(f), mmap.execf(f))
877 877 for f in m + r:
878 878 repo.dirstate.normal(f)
879 879 for f in a:
880 880 try:
881 881 os.unlink(repo.wjoin(f))
882 882 except OSError, e:
883 883 if e.errno != errno.ENOENT:
884 884 raise
885 885 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
886 886 except: pass
887 887 repo.dirstate.forget(f)
888 888 repo.dirstate.setparents(qp, revlog.nullid)
889 889 self.strip(repo, rev, update=False, backup='strip')
890 890 del self.applied[start:end]
891 891 if len(self.applied):
892 892 self.ui.write("Now at: %s\n" % self.applied[-1].name)
893 893 else:
894 894 self.ui.write("Patch queue now empty\n")
895 895 finally:
896 896 del wlock
897 897
898 898 def diff(self, repo, pats, opts):
899 899 top = self.check_toppatch(repo)
900 900 if not top:
901 901 self.ui.write("No patches applied\n")
902 902 return
903 903 qp = self.qparents(repo, top)
904 904 if opts.get('git'):
905 905 self.diffopts().git = True
906 906 self.printdiff(repo, qp, files=pats, opts=opts)
907 907
908 908 def refresh(self, repo, pats=None, **opts):
909 909 if len(self.applied) == 0:
910 910 self.ui.write("No patches applied\n")
911 911 return 1
912 912 wlock = repo.wlock()
913 913 try:
914 914 self.check_toppatch(repo)
915 915 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
916 916 top = revlog.bin(top)
917 917 cparents = repo.changelog.parents(top)
918 918 patchparent = self.qparents(repo, top)
919 919 message, comments, user, date, patchfound = self.readheaders(patchfn)
920 920
921 921 patchf = self.opener(patchfn, 'r+')
922 922
923 923 # if the patch was a git patch, refresh it as a git patch
924 924 for line in patchf:
925 925 if line.startswith('diff --git'):
926 926 self.diffopts().git = True
927 927 break
928 928 patchf.seek(0)
929 929 patchf.truncate()
930 930
931 931 msg = opts.get('msg', '').rstrip()
932 932 if msg:
933 933 if comments:
934 934 # Remove existing message.
935 935 ci = 0
936 936 subj = None
937 937 for mi in xrange(len(message)):
938 938 if comments[ci].lower().startswith('subject: '):
939 939 subj = comments[ci][9:]
940 940 while message[mi] != comments[ci] and message[mi] != subj:
941 941 ci += 1
942 942 del comments[ci]
943 943 comments.append(msg)
944 944 if comments:
945 945 comments = "\n".join(comments) + '\n\n'
946 946 patchf.write(comments)
947 947
948 948 if opts.get('git'):
949 949 self.diffopts().git = True
950 950 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
951 951 tip = repo.changelog.tip()
952 952 if top == tip:
953 953 # if the top of our patch queue is also the tip, there is an
954 954 # optimization here. We update the dirstate in place and strip
955 955 # off the tip commit. Then just commit the current directory
956 956 # tree. We can also send repo.commit the list of files
957 957 # changed to speed up the diff
958 958 #
959 959 # in short mode, we only diff the files included in the
960 960 # patch already
961 961 #
962 962 # this should really read:
963 963 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
964 964 # but we do it backwards to take advantage of manifest/chlog
965 965 # caching against the next repo.status call
966 966 #
967 967 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
968 968 changes = repo.changelog.read(tip)
969 969 man = repo.manifest.read(changes[0])
970 970 aaa = aa[:]
971 971 if opts.get('short'):
972 972 filelist = mm + aa + dd
973 973 match = dict.fromkeys(filelist).__contains__
974 974 else:
975 975 filelist = None
976 976 match = util.always
977 977 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
978 978
979 979 # we might end up with files that were added between
980 980 # tip and the dirstate parent, but then changed in the
981 981 # local dirstate. in this case, we want them to only
982 982 # show up in the added section
983 983 for x in m:
984 984 if x not in aa:
985 985 mm.append(x)
986 986 # we might end up with files added by the local dirstate that
987 987 # were deleted by the patch. In this case, they should only
988 988 # show up in the changed section.
989 989 for x in a:
990 990 if x in dd:
991 991 del dd[dd.index(x)]
992 992 mm.append(x)
993 993 else:
994 994 aa.append(x)
995 995 # make sure any files deleted in the local dirstate
996 996 # are not in the add or change column of the patch
997 997 forget = []
998 998 for x in d + r:
999 999 if x in aa:
1000 1000 del aa[aa.index(x)]
1001 1001 forget.append(x)
1002 1002 continue
1003 1003 elif x in mm:
1004 1004 del mm[mm.index(x)]
1005 1005 dd.append(x)
1006 1006
1007 1007 m = util.unique(mm)
1008 1008 r = util.unique(dd)
1009 1009 a = util.unique(aa)
1010 1010 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1011 1011 filelist = util.unique(c[0] + c[1] + c[2])
1012 1012 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1013 1013 fp=patchf, changes=c, opts=self.diffopts())
1014 1014 patchf.close()
1015 1015
1016 1016 repo.dirstate.setparents(*cparents)
1017 1017 copies = {}
1018 1018 for dst in a:
1019 1019 src = repo.dirstate.copied(dst)
1020 1020 if src is None:
1021 1021 continue
1022 1022 copies.setdefault(src, []).append(dst)
1023 1023 repo.dirstate.add(dst)
1024 1024 # remember the copies between patchparent and tip
1025 1025 # this may be slow, so don't do it if we're not tracking copies
1026 1026 if self.diffopts().git:
1027 1027 for dst in aaa:
1028 1028 f = repo.file(dst)
1029 1029 src = f.renamed(man[dst])
1030 1030 if src:
1031 1031 copies[src[0]] = copies.get(dst, [])
1032 1032 if dst in a:
1033 1033 copies[src[0]].append(dst)
1034 1034 # we can't copy a file created by the patch itself
1035 1035 if dst in copies:
1036 1036 del copies[dst]
1037 1037 for src, dsts in copies.iteritems():
1038 1038 for dst in dsts:
1039 1039 repo.dirstate.copy(src, dst)
1040 1040 for f in r:
1041 1041 repo.dirstate.remove(f)
1042 1042 # if the patch excludes a modified file, mark that
1043 1043 # file with mtime=0 so status can see it.
1044 1044 mm = []
1045 1045 for i in xrange(len(m)-1, -1, -1):
1046 1046 if not matchfn(m[i]):
1047 1047 mm.append(m[i])
1048 1048 del m[i]
1049 1049 for f in m:
1050 1050 repo.dirstate.normal(f)
1051 1051 for f in mm:
1052 1052 repo.dirstate.normaldirty(f)
1053 1053 for f in forget:
1054 1054 repo.dirstate.forget(f)
1055 1055
1056 1056 if not msg:
1057 1057 if not message:
1058 1058 message = "[mq]: %s\n" % patchfn
1059 1059 else:
1060 1060 message = "\n".join(message)
1061 1061 else:
1062 1062 message = msg
1063 1063
1064 1064 self.strip(repo, top, update=False,
1065 1065 backup='strip')
1066 1066 n = repo.commit(filelist, message, changes[1], match=matchfn,
1067 1067 force=1)
1068 1068 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1069 1069 self.applied_dirty = 1
1070 1070 self.removeundo(repo)
1071 1071 else:
1072 1072 self.printdiff(repo, patchparent, fp=patchf)
1073 1073 patchf.close()
1074 1074 added = repo.status()[1]
1075 1075 for a in added:
1076 1076 f = repo.wjoin(a)
1077 1077 try:
1078 1078 os.unlink(f)
1079 1079 except OSError, e:
1080 1080 if e.errno != errno.ENOENT:
1081 1081 raise
1082 1082 try: os.removedirs(os.path.dirname(f))
1083 1083 except: pass
1084 1084 # forget the file copies in the dirstate
1085 1085 # push should readd the files later on
1086 1086 repo.dirstate.forget(a)
1087 1087 self.pop(repo, force=True)
1088 1088 self.push(repo, force=True)
1089 1089 finally:
1090 1090 del wlock
1091 1091
1092 1092 def init(self, repo, create=False):
1093 1093 if not create and os.path.isdir(self.path):
1094 1094 raise util.Abort(_("patch queue directory already exists"))
1095 1095 try:
1096 1096 os.mkdir(self.path)
1097 1097 except OSError, inst:
1098 1098 if inst.errno != errno.EEXIST or not create:
1099 1099 raise
1100 1100 if create:
1101 1101 return self.qrepo(create=True)
1102 1102
1103 1103 def unapplied(self, repo, patch=None):
1104 1104 if patch and patch not in self.series:
1105 1105 raise util.Abort(_("patch %s is not in series file") % patch)
1106 1106 if not patch:
1107 1107 start = self.series_end()
1108 1108 else:
1109 1109 start = self.series.index(patch) + 1
1110 1110 unapplied = []
1111 1111 for i in xrange(start, len(self.series)):
1112 1112 pushable, reason = self.pushable(i)
1113 1113 if pushable:
1114 1114 unapplied.append((i, self.series[i]))
1115 1115 self.explain_pushable(i)
1116 1116 return unapplied
1117 1117
1118 1118 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1119 1119 summary=False):
1120 1120 def displayname(patchname):
1121 1121 if summary:
1122 1122 msg = self.readheaders(patchname)[0]
1123 1123 msg = msg and ': ' + msg[0] or ': '
1124 1124 else:
1125 1125 msg = ''
1126 1126 return '%s%s' % (patchname, msg)
1127 1127
1128 1128 applied = dict.fromkeys([p.name for p in self.applied])
1129 1129 if length is None:
1130 1130 length = len(self.series) - start
1131 1131 if not missing:
1132 1132 for i in xrange(start, start+length):
1133 1133 patch = self.series[i]
1134 1134 if patch in applied:
1135 1135 stat = 'A'
1136 1136 elif self.pushable(i)[0]:
1137 1137 stat = 'U'
1138 1138 else:
1139 1139 stat = 'G'
1140 1140 pfx = ''
1141 1141 if self.ui.verbose:
1142 1142 pfx = '%d %s ' % (i, stat)
1143 1143 elif status and status != stat:
1144 1144 continue
1145 1145 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1146 1146 else:
1147 1147 msng_list = []
1148 1148 for root, dirs, files in os.walk(self.path):
1149 1149 d = root[len(self.path) + 1:]
1150 1150 for f in files:
1151 1151 fl = os.path.join(d, f)
1152 1152 if (fl not in self.series and
1153 1153 fl not in (self.status_path, self.series_path,
1154 1154 self.guards_path)
1155 1155 and not fl.startswith('.')):
1156 1156 msng_list.append(fl)
1157 1157 msng_list.sort()
1158 1158 for x in msng_list:
1159 1159 pfx = self.ui.verbose and ('D ') or ''
1160 1160 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1161 1161
1162 1162 def issaveline(self, l):
1163 1163 if l.name == '.hg.patches.save.line':
1164 1164 return True
1165 1165
1166 1166 def qrepo(self, create=False):
1167 1167 if create or os.path.isdir(self.join(".hg")):
1168 1168 return hg.repository(self.ui, path=self.path, create=create)
1169 1169
1170 1170 def restore(self, repo, rev, delete=None, qupdate=None):
1171 1171 c = repo.changelog.read(rev)
1172 1172 desc = c[4].strip()
1173 1173 lines = desc.splitlines()
1174 1174 i = 0
1175 1175 datastart = None
1176 1176 series = []
1177 1177 applied = []
1178 1178 qpp = None
1179 1179 for i in xrange(0, len(lines)):
1180 1180 if lines[i] == 'Patch Data:':
1181 1181 datastart = i + 1
1182 1182 elif lines[i].startswith('Dirstate:'):
1183 1183 l = lines[i].rstrip()
1184 1184 l = l[10:].split(' ')
1185 1185 qpp = [ hg.bin(x) for x in l ]
1186 1186 elif datastart != None:
1187 1187 l = lines[i].rstrip()
1188 1188 se = statusentry(l)
1189 1189 file_ = se.name
1190 1190 if se.rev:
1191 1191 applied.append(se)
1192 1192 else:
1193 1193 series.append(file_)
1194 1194 if datastart == None:
1195 1195 self.ui.warn("No saved patch data found\n")
1196 1196 return 1
1197 1197 self.ui.warn("restoring status: %s\n" % lines[0])
1198 1198 self.full_series = series
1199 1199 self.applied = applied
1200 1200 self.parse_series()
1201 1201 self.series_dirty = 1
1202 1202 self.applied_dirty = 1
1203 1203 heads = repo.changelog.heads()
1204 1204 if delete:
1205 1205 if rev not in heads:
1206 1206 self.ui.warn("save entry has children, leaving it alone\n")
1207 1207 else:
1208 1208 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1209 1209 pp = repo.dirstate.parents()
1210 1210 if rev in pp:
1211 1211 update = True
1212 1212 else:
1213 1213 update = False
1214 1214 self.strip(repo, rev, update=update, backup='strip')
1215 1215 if qpp:
1216 1216 self.ui.warn("saved queue repository parents: %s %s\n" %
1217 1217 (hg.short(qpp[0]), hg.short(qpp[1])))
1218 1218 if qupdate:
1219 1219 print "queue directory updating"
1220 1220 r = self.qrepo()
1221 1221 if not r:
1222 1222 self.ui.warn("Unable to load queue repository\n")
1223 1223 return 1
1224 1224 hg.clean(r, qpp[0])
1225 1225
1226 1226 def save(self, repo, msg=None):
1227 1227 if len(self.applied) == 0:
1228 1228 self.ui.warn("save: no patches applied, exiting\n")
1229 1229 return 1
1230 1230 if self.issaveline(self.applied[-1]):
1231 1231 self.ui.warn("status is already saved\n")
1232 1232 return 1
1233 1233
1234 1234 ar = [ ':' + x for x in self.full_series ]
1235 1235 if not msg:
1236 1236 msg = "hg patches saved state"
1237 1237 else:
1238 1238 msg = "hg patches: " + msg.rstrip('\r\n')
1239 1239 r = self.qrepo()
1240 1240 if r:
1241 1241 pp = r.dirstate.parents()
1242 1242 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1243 1243 msg += "\n\nPatch Data:\n"
1244 1244 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1245 1245 "\n".join(ar) + '\n' or "")
1246 1246 n = repo.commit(None, text, user=None, force=1)
1247 1247 if not n:
1248 1248 self.ui.warn("repo commit failed\n")
1249 1249 return 1
1250 1250 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1251 1251 self.applied_dirty = 1
1252 1252 self.removeundo(repo)
1253 1253
1254 1254 def full_series_end(self):
1255 1255 if len(self.applied) > 0:
1256 1256 p = self.applied[-1].name
1257 1257 end = self.find_series(p)
1258 1258 if end == None:
1259 1259 return len(self.full_series)
1260 1260 return end + 1
1261 1261 return 0
1262 1262
1263 1263 def series_end(self, all_patches=False):
1264 1264 """If all_patches is False, return the index of the next pushable patch
1265 1265 in the series, or the series length. If all_patches is True, return the
1266 1266 index of the first patch past the last applied one.
1267 1267 """
1268 1268 end = 0
1269 1269 def next(start):
1270 1270 if all_patches:
1271 1271 return start
1272 1272 i = start
1273 1273 while i < len(self.series):
1274 1274 p, reason = self.pushable(i)
1275 1275 if p:
1276 1276 break
1277 1277 self.explain_pushable(i)
1278 1278 i += 1
1279 1279 return i
1280 1280 if len(self.applied) > 0:
1281 1281 p = self.applied[-1].name
1282 1282 try:
1283 1283 end = self.series.index(p)
1284 1284 except ValueError:
1285 1285 return 0
1286 1286 return next(end + 1)
1287 1287 return next(end)
1288 1288
1289 1289 def appliedname(self, index):
1290 1290 pname = self.applied[index].name
1291 1291 if not self.ui.verbose:
1292 1292 p = pname
1293 1293 else:
1294 1294 p = str(self.series.index(pname)) + " " + pname
1295 1295 return p
1296 1296
1297 1297 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1298 1298 force=None, git=False):
1299 1299 def checkseries(patchname):
1300 1300 if patchname in self.series:
1301 1301 raise util.Abort(_('patch %s is already in the series file')
1302 1302 % patchname)
1303 1303 def checkfile(patchname):
1304 1304 if not force and os.path.exists(self.join(patchname)):
1305 1305 raise util.Abort(_('patch "%s" already exists')
1306 1306 % patchname)
1307 1307
1308 1308 if rev:
1309 1309 if files:
1310 1310 raise util.Abort(_('option "-r" not valid when importing '
1311 1311 'files'))
1312 1312 rev = cmdutil.revrange(repo, rev)
1313 1313 rev.sort(lambda x, y: cmp(y, x))
1314 1314 if (len(files) > 1 or len(rev) > 1) and patchname:
1315 1315 raise util.Abort(_('option "-n" not valid when importing multiple '
1316 1316 'patches'))
1317 1317 i = 0
1318 1318 added = []
1319 1319 if rev:
1320 1320 # If mq patches are applied, we can only import revisions
1321 1321 # that form a linear path to qbase.
1322 1322 # Otherwise, they should form a linear path to a head.
1323 1323 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1324 1324 if len(heads) > 1:
1325 1325 raise util.Abort(_('revision %d is the root of more than one '
1326 1326 'branch') % rev[-1])
1327 1327 if self.applied:
1328 1328 base = revlog.hex(repo.changelog.node(rev[0]))
1329 1329 if base in [n.rev for n in self.applied]:
1330 1330 raise util.Abort(_('revision %d is already managed')
1331 1331 % rev[0])
1332 1332 if heads != [revlog.bin(self.applied[-1].rev)]:
1333 1333 raise util.Abort(_('revision %d is not the parent of '
1334 1334 'the queue') % rev[0])
1335 1335 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1336 1336 lastparent = repo.changelog.parentrevs(base)[0]
1337 1337 else:
1338 1338 if heads != [repo.changelog.node(rev[0])]:
1339 1339 raise util.Abort(_('revision %d has unmanaged children')
1340 1340 % rev[0])
1341 1341 lastparent = None
1342 1342
1343 1343 if git:
1344 1344 self.diffopts().git = True
1345 1345
1346 1346 for r in rev:
1347 1347 p1, p2 = repo.changelog.parentrevs(r)
1348 1348 n = repo.changelog.node(r)
1349 1349 if p2 != revlog.nullrev:
1350 1350 raise util.Abort(_('cannot import merge revision %d') % r)
1351 1351 if lastparent and lastparent != r:
1352 1352 raise util.Abort(_('revision %d is not the parent of %d')
1353 1353 % (r, lastparent))
1354 1354 lastparent = p1
1355 1355
1356 1356 if not patchname:
1357 1357 patchname = normname('%d.diff' % r)
1358 1358 checkseries(patchname)
1359 1359 checkfile(patchname)
1360 1360 self.full_series.insert(0, patchname)
1361 1361
1362 1362 patchf = self.opener(patchname, "w")
1363 1363 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1364 1364 patchf.close()
1365 1365
1366 1366 se = statusentry(revlog.hex(n), patchname)
1367 1367 self.applied.insert(0, se)
1368 1368
1369 1369 added.append(patchname)
1370 1370 patchname = None
1371 1371 self.parse_series()
1372 1372 self.applied_dirty = 1
1373 1373
1374 1374 for filename in files:
1375 1375 if existing:
1376 1376 if filename == '-':
1377 1377 raise util.Abort(_('-e is incompatible with import from -'))
1378 1378 if not patchname:
1379 1379 patchname = normname(filename)
1380 1380 if not os.path.isfile(self.join(patchname)):
1381 1381 raise util.Abort(_("patch %s does not exist") % patchname)
1382 1382 else:
1383 1383 try:
1384 1384 if filename == '-':
1385 1385 if not patchname:
1386 1386 raise util.Abort(_('need --name to import a patch from -'))
1387 1387 text = sys.stdin.read()
1388 1388 else:
1389 1389 text = file(filename).read()
1390 1390 except IOError:
1391 1391 raise util.Abort(_("unable to read %s") % patchname)
1392 1392 if not patchname:
1393 1393 patchname = normname(os.path.basename(filename))
1394 1394 checkfile(patchname)
1395 1395 patchf = self.opener(patchname, "w")
1396 1396 patchf.write(text)
1397 1397 checkseries(patchname)
1398 1398 index = self.full_series_end() + i
1399 1399 self.full_series[index:index] = [patchname]
1400 1400 self.parse_series()
1401 1401 self.ui.warn("adding %s to series file\n" % patchname)
1402 1402 i += 1
1403 1403 added.append(patchname)
1404 1404 patchname = None
1405 1405 self.series_dirty = 1
1406 1406 qrepo = self.qrepo()
1407 1407 if qrepo:
1408 1408 qrepo.add(added)
1409 1409
1410 1410 def delete(ui, repo, *patches, **opts):
1411 1411 """remove patches from queue
1412 1412
1413 1413 The patches must not be applied, unless they are arguments to
1414 1414 the --rev parameter. At least one patch or revision is required.
1415 1415
1416 1416 With --rev, mq will stop managing the named revisions (converting
1417 1417 them to regular mercurial changesets). The patches must be applied
1418 1418 and at the base of the stack. This option is useful when the patches
1419 1419 have been applied upstream.
1420 1420
1421 1421 With --keep, the patch files are preserved in the patch directory."""
1422 1422 q = repo.mq
1423 1423 q.delete(repo, patches, opts)
1424 1424 q.save_dirty()
1425 1425 return 0
1426 1426
1427 1427 def applied(ui, repo, patch=None, **opts):
1428 1428 """print the patches already applied"""
1429 1429 q = repo.mq
1430 1430 if patch:
1431 1431 if patch not in q.series:
1432 1432 raise util.Abort(_("patch %s is not in series file") % patch)
1433 1433 end = q.series.index(patch) + 1
1434 1434 else:
1435 1435 end = q.series_end(True)
1436 1436 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1437 1437
1438 1438 def unapplied(ui, repo, patch=None, **opts):
1439 1439 """print the patches not yet applied"""
1440 1440 q = repo.mq
1441 1441 if patch:
1442 1442 if patch not in q.series:
1443 1443 raise util.Abort(_("patch %s is not in series file") % patch)
1444 1444 start = q.series.index(patch) + 1
1445 1445 else:
1446 1446 start = q.series_end(True)
1447 1447 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1448 1448
1449 1449 def qimport(ui, repo, *filename, **opts):
1450 1450 """import a patch
1451 1451
1452 1452 The patch will have the same name as its source file unless you
1453 1453 give it a new one with --name.
1454 1454
1455 1455 You can register an existing patch inside the patch directory
1456 1456 with the --existing flag.
1457 1457
1458 1458 With --force, an existing patch of the same name will be overwritten.
1459 1459
1460 1460 An existing changeset may be placed under mq control with --rev
1461 1461 (e.g. qimport --rev tip -n patch will place tip under mq control).
1462 1462 With --git, patches imported with --rev will use the git diff
1463 1463 format.
1464 1464 """
1465 1465 q = repo.mq
1466 1466 q.qimport(repo, filename, patchname=opts['name'],
1467 1467 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1468 1468 git=opts['git'])
1469 1469 q.save_dirty()
1470 1470 return 0
1471 1471
1472 1472 def init(ui, repo, **opts):
1473 1473 """init a new queue repository
1474 1474
1475 1475 The queue repository is unversioned by default. If -c is
1476 1476 specified, qinit will create a separate nested repository
1477 1477 for patches (qinit -c may also be run later to convert
1478 1478 an unversioned patch repository into a versioned one).
1479 1479 You can use qcommit to commit changes to this queue repository."""
1480 1480 q = repo.mq
1481 1481 r = q.init(repo, create=opts['create_repo'])
1482 1482 q.save_dirty()
1483 1483 if r:
1484 1484 if not os.path.exists(r.wjoin('.hgignore')):
1485 1485 fp = r.wopener('.hgignore', 'w')
1486 1486 fp.write('syntax: glob\n')
1487 1487 fp.write('status\n')
1488 1488 fp.write('guards\n')
1489 1489 fp.close()
1490 1490 if not os.path.exists(r.wjoin('series')):
1491 1491 r.wopener('series', 'w').close()
1492 1492 r.add(['.hgignore', 'series'])
1493 1493 commands.add(ui, r)
1494 1494 return 0
1495 1495
1496 1496 def clone(ui, source, dest=None, **opts):
1497 1497 '''clone main and patch repository at same time
1498 1498
1499 1499 If source is local, destination will have no patches applied. If
1500 1500 source is remote, this command can not check if patches are
1501 1501 applied in source, so cannot guarantee that patches are not
1502 1502 applied in destination. If you clone remote repository, be sure
1503 1503 before that it has no patches applied.
1504 1504
1505 1505 Source patch repository is looked for in <src>/.hg/patches by
1506 1506 default. Use -p <url> to change.
1507 1507
1508 1508 The patch directory must be a nested mercurial repository, as
1509 1509 would be created by qinit -c.
1510 1510 '''
1511 1511 cmdutil.setremoteconfig(ui, opts)
1512 1512 if dest is None:
1513 1513 dest = hg.defaultdest(source)
1514 1514 sr = hg.repository(ui, ui.expandpath(source))
1515 1515 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1516 1516 try:
1517 1517 pr = hg.repository(ui, patchdir)
1518 1518 except hg.RepoError:
1519 1519 raise util.Abort(_('versioned patch repository not found'
1520 1520 ' (see qinit -c)'))
1521 1521 qbase, destrev = None, None
1522 1522 if sr.local():
1523 1523 if sr.mq.applied:
1524 1524 qbase = revlog.bin(sr.mq.applied[0].rev)
1525 1525 if not hg.islocal(dest):
1526 1526 heads = dict.fromkeys(sr.heads())
1527 1527 for h in sr.heads(qbase):
1528 1528 del heads[h]
1529 1529 destrev = heads.keys()
1530 1530 destrev.append(sr.changelog.parents(qbase)[0])
1531 1531 ui.note(_('cloning main repo\n'))
1532 1532 sr, dr = hg.clone(ui, sr.url(), dest,
1533 1533 pull=opts['pull'],
1534 1534 rev=destrev,
1535 1535 update=False,
1536 1536 stream=opts['uncompressed'])
1537 1537 ui.note(_('cloning patch repo\n'))
1538 1538 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1539 1539 dr.url() + '/.hg/patches',
1540 1540 pull=opts['pull'],
1541 1541 update=not opts['noupdate'],
1542 1542 stream=opts['uncompressed'])
1543 1543 if dr.local():
1544 1544 if qbase:
1545 1545 ui.note(_('stripping applied patches from destination repo\n'))
1546 1546 dr.mq.strip(dr, qbase, update=False, backup=None)
1547 1547 if not opts['noupdate']:
1548 1548 ui.note(_('updating destination repo\n'))
1549 1549 hg.update(dr, dr.changelog.tip())
1550 1550
1551 1551 def commit(ui, repo, *pats, **opts):
1552 1552 """commit changes in the queue repository"""
1553 1553 q = repo.mq
1554 1554 r = q.qrepo()
1555 1555 if not r: raise util.Abort('no queue repository')
1556 1556 commands.commit(r.ui, r, *pats, **opts)
1557 1557
1558 1558 def series(ui, repo, **opts):
1559 1559 """print the entire series file"""
1560 1560 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1561 1561 return 0
1562 1562
1563 1563 def top(ui, repo, **opts):
1564 1564 """print the name of the current patch"""
1565 1565 q = repo.mq
1566 1566 t = q.applied and q.series_end(True) or 0
1567 1567 if t:
1568 1568 return q.qseries(repo, start=t-1, length=1, status='A',
1569 1569 summary=opts.get('summary'))
1570 1570 else:
1571 1571 ui.write("No patches applied\n")
1572 1572 return 1
1573 1573
1574 1574 def next(ui, repo, **opts):
1575 1575 """print the name of the next patch"""
1576 1576 q = repo.mq
1577 1577 end = q.series_end()
1578 1578 if end == len(q.series):
1579 1579 ui.write("All patches applied\n")
1580 1580 return 1
1581 1581 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1582 1582
1583 1583 def prev(ui, repo, **opts):
1584 1584 """print the name of the previous patch"""
1585 1585 q = repo.mq
1586 1586 l = len(q.applied)
1587 1587 if l == 1:
1588 1588 ui.write("Only one patch applied\n")
1589 1589 return 1
1590 1590 if not l:
1591 1591 ui.write("No patches applied\n")
1592 1592 return 1
1593 1593 return q.qseries(repo, start=l-2, length=1, status='A',
1594 1594 summary=opts.get('summary'))
1595 1595
1596 1596 def new(ui, repo, patch, *args, **opts):
1597 1597 """create a new patch
1598 1598
1599 1599 qnew creates a new patch on top of the currently-applied patch
1600 1600 (if any). It will refuse to run if there are any outstanding
1601 1601 changes unless -f is specified, in which case the patch will
1602 1602 be initialised with them. You may also use -I, -X, and/or a list of
1603 1603 files after the patch name to add only changes to matching files
1604 1604 to the new patch, leaving the rest as uncommitted modifications.
1605 1605
1606 1606 -e, -m or -l set the patch header as well as the commit message.
1607 1607 If none is specified, the patch header is empty and the
1608 1608 commit message is '[mq]: PATCH'"""
1609 1609 q = repo.mq
1610 1610 message = cmdutil.logmessage(opts)
1611 1611 if opts['edit']:
1612 1612 message = ui.edit(message, ui.username())
1613 1613 opts['msg'] = message
1614 1614 q.new(repo, patch, *args, **opts)
1615 1615 q.save_dirty()
1616 1616 return 0
1617 1617
1618 1618 def refresh(ui, repo, *pats, **opts):
1619 1619 """update the current patch
1620 1620
1621 1621 If any file patterns are provided, the refreshed patch will contain only
1622 1622 the modifications that match those patterns; the remaining modifications
1623 1623 will remain in the working directory.
1624 1624
1625 1625 hg add/remove/copy/rename work as usual, though you might want to use
1626 1626 git-style patches (--git or [diff] git=1) to track copies and renames.
1627 1627 """
1628 1628 q = repo.mq
1629 1629 message = cmdutil.logmessage(opts)
1630 1630 if opts['edit']:
1631 1631 if message:
1632 1632 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1633 1633 patch = q.applied[-1].name
1634 1634 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1635 1635 message = ui.edit('\n'.join(message), user or ui.username())
1636 1636 ret = q.refresh(repo, pats, msg=message, **opts)
1637 1637 q.save_dirty()
1638 1638 return ret
1639 1639
1640 1640 def diff(ui, repo, *pats, **opts):
1641 1641 """diff of the current patch"""
1642 1642 repo.mq.diff(repo, pats, opts)
1643 1643 return 0
1644 1644
1645 1645 def fold(ui, repo, *files, **opts):
1646 1646 """fold the named patches into the current patch
1647 1647
1648 1648 Patches must not yet be applied. Each patch will be successively
1649 1649 applied to the current patch in the order given. If all the
1650 1650 patches apply successfully, the current patch will be refreshed
1651 1651 with the new cumulative patch, and the folded patches will
1652 1652 be deleted. With -k/--keep, the folded patch files will not
1653 1653 be removed afterwards.
1654 1654
1655 1655 The header for each folded patch will be concatenated with
1656 1656 the current patch header, separated by a line of '* * *'."""
1657 1657
1658 1658 q = repo.mq
1659 1659
1660 1660 if not files:
1661 1661 raise util.Abort(_('qfold requires at least one patch name'))
1662 1662 if not q.check_toppatch(repo):
1663 1663 raise util.Abort(_('No patches applied'))
1664 1664
1665 1665 message = cmdutil.logmessage(opts)
1666 1666 if opts['edit']:
1667 1667 if message:
1668 1668 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1669 1669
1670 1670 parent = q.lookup('qtip')
1671 1671 patches = []
1672 1672 messages = []
1673 1673 for f in files:
1674 1674 p = q.lookup(f)
1675 1675 if p in patches or p == parent:
1676 1676 ui.warn(_('Skipping already folded patch %s') % p)
1677 1677 if q.isapplied(p):
1678 1678 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1679 1679 patches.append(p)
1680 1680
1681 1681 for p in patches:
1682 1682 if not message:
1683 1683 messages.append(q.readheaders(p)[0])
1684 1684 pf = q.join(p)
1685 1685 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1686 1686 if not patchsuccess:
1687 1687 raise util.Abort(_('Error folding patch %s') % p)
1688 1688 patch.updatedir(ui, repo, files)
1689 1689
1690 1690 if not message:
1691 1691 message, comments, user = q.readheaders(parent)[0:3]
1692 1692 for msg in messages:
1693 1693 message.append('* * *')
1694 1694 message.extend(msg)
1695 1695 message = '\n'.join(message)
1696 1696
1697 1697 if opts['edit']:
1698 1698 message = ui.edit(message, user or ui.username())
1699 1699
1700 1700 q.refresh(repo, msg=message)
1701 1701 q.delete(repo, patches, opts)
1702 1702 q.save_dirty()
1703 1703
1704 1704 def goto(ui, repo, patch, **opts):
1705 1705 '''push or pop patches until named patch is at top of stack'''
1706 1706 q = repo.mq
1707 1707 patch = q.lookup(patch)
1708 1708 if q.isapplied(patch):
1709 1709 ret = q.pop(repo, patch, force=opts['force'])
1710 1710 else:
1711 1711 ret = q.push(repo, patch, force=opts['force'])
1712 1712 q.save_dirty()
1713 1713 return ret
1714 1714
1715 1715 def guard(ui, repo, *args, **opts):
1716 1716 '''set or print guards for a patch
1717 1717
1718 1718 Guards control whether a patch can be pushed. A patch with no
1719 1719 guards is always pushed. A patch with a positive guard ("+foo") is
1720 1720 pushed only if the qselect command has activated it. A patch with
1721 1721 a negative guard ("-foo") is never pushed if the qselect command
1722 1722 has activated it.
1723 1723
1724 1724 With no arguments, print the currently active guards.
1725 1725 With arguments, set guards for the named patch.
1726 1726
1727 1727 To set a negative guard "-foo" on topmost patch ("--" is needed so
1728 1728 hg will not interpret "-foo" as an option):
1729 1729 hg qguard -- -foo
1730 1730
1731 1731 To set guards on another patch:
1732 1732 hg qguard other.patch +2.6.17 -stable
1733 1733 '''
1734 1734 def status(idx):
1735 1735 guards = q.series_guards[idx] or ['unguarded']
1736 1736 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1737 1737 q = repo.mq
1738 1738 patch = None
1739 1739 args = list(args)
1740 1740 if opts['list']:
1741 1741 if args or opts['none']:
1742 1742 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1743 1743 for i in xrange(len(q.series)):
1744 1744 status(i)
1745 1745 return
1746 1746 if not args or args[0][0:1] in '-+':
1747 1747 if not q.applied:
1748 1748 raise util.Abort(_('no patches applied'))
1749 1749 patch = q.applied[-1].name
1750 1750 if patch is None and args[0][0:1] not in '-+':
1751 1751 patch = args.pop(0)
1752 1752 if patch is None:
1753 1753 raise util.Abort(_('no patch to work with'))
1754 1754 if args or opts['none']:
1755 1755 idx = q.find_series(patch)
1756 1756 if idx is None:
1757 1757 raise util.Abort(_('no patch named %s') % patch)
1758 1758 q.set_guards(idx, args)
1759 1759 q.save_dirty()
1760 1760 else:
1761 1761 status(q.series.index(q.lookup(patch)))
1762 1762
1763 1763 def header(ui, repo, patch=None):
1764 1764 """Print the header of the topmost or specified patch"""
1765 1765 q = repo.mq
1766 1766
1767 1767 if patch:
1768 1768 patch = q.lookup(patch)
1769 1769 else:
1770 1770 if not q.applied:
1771 1771 ui.write('No patches applied\n')
1772 1772 return 1
1773 1773 patch = q.lookup('qtip')
1774 1774 message = repo.mq.readheaders(patch)[0]
1775 1775
1776 1776 ui.write('\n'.join(message) + '\n')
1777 1777
1778 1778 def lastsavename(path):
1779 1779 (directory, base) = os.path.split(path)
1780 1780 names = os.listdir(directory)
1781 1781 namere = re.compile("%s.([0-9]+)" % base)
1782 1782 maxindex = None
1783 1783 maxname = None
1784 1784 for f in names:
1785 1785 m = namere.match(f)
1786 1786 if m:
1787 1787 index = int(m.group(1))
1788 1788 if maxindex == None or index > maxindex:
1789 1789 maxindex = index
1790 1790 maxname = f
1791 1791 if maxname:
1792 1792 return (os.path.join(directory, maxname), maxindex)
1793 1793 return (None, None)
1794 1794
1795 1795 def savename(path):
1796 1796 (last, index) = lastsavename(path)
1797 1797 if last is None:
1798 1798 index = 0
1799 1799 newpath = path + ".%d" % (index + 1)
1800 1800 return newpath
1801 1801
1802 1802 def push(ui, repo, patch=None, **opts):
1803 1803 """push the next patch onto the stack"""
1804 1804 q = repo.mq
1805 1805 mergeq = None
1806 1806
1807 1807 if opts['all']:
1808 1808 if not q.series:
1809 1809 ui.warn(_('no patches in series\n'))
1810 1810 return 0
1811 1811 patch = q.series[-1]
1812 1812 if opts['merge']:
1813 1813 if opts['name']:
1814 1814 newpath = opts['name']
1815 1815 else:
1816 1816 newpath, i = lastsavename(q.path)
1817 1817 if not newpath:
1818 1818 ui.warn("no saved queues found, please use -n\n")
1819 1819 return 1
1820 1820 mergeq = queue(ui, repo.join(""), newpath)
1821 1821 ui.warn("merging with queue at: %s\n" % mergeq.path)
1822 1822 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1823 1823 mergeq=mergeq)
1824 1824 return ret
1825 1825
1826 1826 def pop(ui, repo, patch=None, **opts):
1827 1827 """pop the current patch off the stack"""
1828 1828 localupdate = True
1829 1829 if opts['name']:
1830 1830 q = queue(ui, repo.join(""), repo.join(opts['name']))
1831 1831 ui.warn('using patch queue: %s\n' % q.path)
1832 1832 localupdate = False
1833 1833 else:
1834 1834 q = repo.mq
1835 1835 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1836 1836 all=opts['all'])
1837 1837 q.save_dirty()
1838 1838 return ret
1839 1839
1840 1840 def rename(ui, repo, patch, name=None, **opts):
1841 1841 """rename a patch
1842 1842
1843 1843 With one argument, renames the current patch to PATCH1.
1844 1844 With two arguments, renames PATCH1 to PATCH2."""
1845 1845
1846 1846 q = repo.mq
1847 1847
1848 1848 if not name:
1849 1849 name = patch
1850 1850 patch = None
1851 1851
1852 1852 if patch:
1853 1853 patch = q.lookup(patch)
1854 1854 else:
1855 1855 if not q.applied:
1856 1856 ui.write(_('No patches applied\n'))
1857 1857 return
1858 1858 patch = q.lookup('qtip')
1859 1859 absdest = q.join(name)
1860 1860 if os.path.isdir(absdest):
1861 1861 name = normname(os.path.join(name, os.path.basename(patch)))
1862 1862 absdest = q.join(name)
1863 1863 if os.path.exists(absdest):
1864 1864 raise util.Abort(_('%s already exists') % absdest)
1865 1865
1866 1866 if name in q.series:
1867 1867 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1868 1868
1869 1869 if ui.verbose:
1870 1870 ui.write('Renaming %s to %s\n' % (patch, name))
1871 1871 i = q.find_series(patch)
1872 1872 guards = q.guard_re.findall(q.full_series[i])
1873 1873 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1874 1874 q.parse_series()
1875 1875 q.series_dirty = 1
1876 1876
1877 1877 info = q.isapplied(patch)
1878 1878 if info:
1879 1879 q.applied[info[0]] = statusentry(info[1], name)
1880 1880 q.applied_dirty = 1
1881 1881
1882 1882 util.rename(q.join(patch), absdest)
1883 1883 r = q.qrepo()
1884 1884 if r:
1885 1885 wlock = r.wlock()
1886 1886 try:
1887 1887 if r.dirstate[name] == 'r':
1888 1888 r.undelete([name])
1889 1889 r.copy(patch, name)
1890 1890 r.remove([patch], False)
1891 1891 finally:
1892 1892 del wlock
1893 1893
1894 1894 q.save_dirty()
1895 1895
1896 1896 def restore(ui, repo, rev, **opts):
1897 1897 """restore the queue state saved by a rev"""
1898 1898 rev = repo.lookup(rev)
1899 1899 q = repo.mq
1900 1900 q.restore(repo, rev, delete=opts['delete'],
1901 1901 qupdate=opts['update'])
1902 1902 q.save_dirty()
1903 1903 return 0
1904 1904
1905 1905 def save(ui, repo, **opts):
1906 1906 """save current queue state"""
1907 1907 q = repo.mq
1908 1908 message = cmdutil.logmessage(opts)
1909 1909 ret = q.save(repo, msg=message)
1910 1910 if ret:
1911 1911 return ret
1912 1912 q.save_dirty()
1913 1913 if opts['copy']:
1914 1914 path = q.path
1915 1915 if opts['name']:
1916 1916 newpath = os.path.join(q.basepath, opts['name'])
1917 1917 if os.path.exists(newpath):
1918 1918 if not os.path.isdir(newpath):
1919 1919 raise util.Abort(_('destination %s exists and is not '
1920 1920 'a directory') % newpath)
1921 1921 if not opts['force']:
1922 1922 raise util.Abort(_('destination %s exists, '
1923 1923 'use -f to force') % newpath)
1924 1924 else:
1925 1925 newpath = savename(path)
1926 1926 ui.warn("copy %s to %s\n" % (path, newpath))
1927 1927 util.copyfiles(path, newpath)
1928 1928 if opts['empty']:
1929 1929 try:
1930 1930 os.unlink(q.join(q.status_path))
1931 1931 except:
1932 1932 pass
1933 1933 return 0
1934 1934
1935 1935 def strip(ui, repo, rev, **opts):
1936 1936 """strip a revision and all later revs on the same branch"""
1937 1937 rev = repo.lookup(rev)
1938 1938 backup = 'all'
1939 1939 if opts['backup']:
1940 1940 backup = 'strip'
1941 1941 elif opts['nobackup']:
1942 1942 backup = 'none'
1943 1943 update = repo.dirstate.parents()[0] != revlog.nullid
1944 1944 repo.mq.strip(repo, rev, backup=backup, update=update)
1945 1945 return 0
1946 1946
1947 1947 def select(ui, repo, *args, **opts):
1948 1948 '''set or print guarded patches to push
1949 1949
1950 1950 Use the qguard command to set or print guards on patch, then use
1951 1951 qselect to tell mq which guards to use. A patch will be pushed if it
1952 1952 has no guards or any positive guards match the currently selected guard,
1953 1953 but will not be pushed if any negative guards match the current guard.
1954 1954 For example:
1955 1955
1956 1956 qguard foo.patch -stable (negative guard)
1957 1957 qguard bar.patch +stable (positive guard)
1958 1958 qselect stable
1959 1959
1960 1960 This activates the "stable" guard. mq will skip foo.patch (because
1961 1961 it has a negative match) but push bar.patch (because it
1962 1962 has a positive match).
1963 1963
1964 1964 With no arguments, prints the currently active guards.
1965 1965 With one argument, sets the active guard.
1966 1966
1967 1967 Use -n/--none to deactivate guards (no other arguments needed).
1968 1968 When no guards are active, patches with positive guards are skipped
1969 1969 and patches with negative guards are pushed.
1970 1970
1971 1971 qselect can change the guards on applied patches. It does not pop
1972 1972 guarded patches by default. Use --pop to pop back to the last applied
1973 1973 patch that is not guarded. Use --reapply (which implies --pop) to push
1974 1974 back to the current patch afterwards, but skip guarded patches.
1975 1975
1976 1976 Use -s/--series to print a list of all guards in the series file (no
1977 1977 other arguments needed). Use -v for more information.'''
1978 1978
1979 1979 q = repo.mq
1980 1980 guards = q.active()
1981 1981 if args or opts['none']:
1982 1982 old_unapplied = q.unapplied(repo)
1983 1983 old_guarded = [i for i in xrange(len(q.applied)) if
1984 1984 not q.pushable(i)[0]]
1985 1985 q.set_active(args)
1986 1986 q.save_dirty()
1987 1987 if not args:
1988 1988 ui.status(_('guards deactivated\n'))
1989 1989 if not opts['pop'] and not opts['reapply']:
1990 1990 unapplied = q.unapplied(repo)
1991 1991 guarded = [i for i in xrange(len(q.applied))
1992 1992 if not q.pushable(i)[0]]
1993 1993 if len(unapplied) != len(old_unapplied):
1994 1994 ui.status(_('number of unguarded, unapplied patches has '
1995 1995 'changed from %d to %d\n') %
1996 1996 (len(old_unapplied), len(unapplied)))
1997 1997 if len(guarded) != len(old_guarded):
1998 1998 ui.status(_('number of guarded, applied patches has changed '
1999 1999 'from %d to %d\n') %
2000 2000 (len(old_guarded), len(guarded)))
2001 2001 elif opts['series']:
2002 2002 guards = {}
2003 2003 noguards = 0
2004 2004 for gs in q.series_guards:
2005 2005 if not gs:
2006 2006 noguards += 1
2007 2007 for g in gs:
2008 2008 guards.setdefault(g, 0)
2009 2009 guards[g] += 1
2010 2010 if ui.verbose:
2011 2011 guards['NONE'] = noguards
2012 2012 guards = guards.items()
2013 2013 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2014 2014 if guards:
2015 2015 ui.note(_('guards in series file:\n'))
2016 2016 for guard, count in guards:
2017 2017 ui.note('%2d ' % count)
2018 2018 ui.write(guard, '\n')
2019 2019 else:
2020 2020 ui.note(_('no guards in series file\n'))
2021 2021 else:
2022 2022 if guards:
2023 2023 ui.note(_('active guards:\n'))
2024 2024 for g in guards:
2025 2025 ui.write(g, '\n')
2026 2026 else:
2027 2027 ui.write(_('no active guards\n'))
2028 2028 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2029 2029 popped = False
2030 2030 if opts['pop'] or opts['reapply']:
2031 2031 for i in xrange(len(q.applied)):
2032 2032 pushable, reason = q.pushable(i)
2033 2033 if not pushable:
2034 2034 ui.status(_('popping guarded patches\n'))
2035 2035 popped = True
2036 2036 if i == 0:
2037 2037 q.pop(repo, all=True)
2038 2038 else:
2039 2039 q.pop(repo, i-1)
2040 2040 break
2041 2041 if popped:
2042 2042 try:
2043 2043 if reapply:
2044 2044 ui.status(_('reapplying unguarded patches\n'))
2045 2045 q.push(repo, reapply)
2046 2046 finally:
2047 2047 q.save_dirty()
2048 2048
2049 2049 def reposetup(ui, repo):
2050 2050 class mqrepo(repo.__class__):
2051 2051 def abort_if_wdir_patched(self, errmsg, force=False):
2052 2052 if self.mq.applied and not force:
2053 2053 parent = revlog.hex(self.dirstate.parents()[0])
2054 2054 if parent in [s.rev for s in self.mq.applied]:
2055 2055 raise util.Abort(errmsg)
2056 2056
2057 2057 def commit(self, *args, **opts):
2058 2058 if len(args) >= 6:
2059 2059 force = args[5]
2060 2060 else:
2061 2061 force = opts.get('force')
2062 2062 self.abort_if_wdir_patched(
2063 2063 _('cannot commit over an applied mq patch'),
2064 2064 force)
2065 2065
2066 2066 return super(mqrepo, self).commit(*args, **opts)
2067 2067
2068 2068 def push(self, remote, force=False, revs=None):
2069 2069 if self.mq.applied and not force and not revs:
2070 2070 raise util.Abort(_('source has mq patches applied'))
2071 2071 return super(mqrepo, self).push(remote, force, revs)
2072 2072
2073 2073 def tags(self):
2074 2074 if self.tagscache:
2075 2075 return self.tagscache
2076 2076
2077 2077 tagscache = super(mqrepo, self).tags()
2078 2078
2079 2079 q = self.mq
2080 2080 if not q.applied:
2081 2081 return tagscache
2082 2082
2083 2083 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2084 2084 mqtags.append((mqtags[-1][0], 'qtip'))
2085 2085 mqtags.append((mqtags[0][0], 'qbase'))
2086 2086 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2087 2087 for patch in mqtags:
2088 2088 if patch[1] in tagscache:
2089 2089 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2090 2090 else:
2091 2091 tagscache[patch[1]] = patch[0]
2092 2092
2093 2093 return tagscache
2094 2094
2095 2095 def _branchtags(self):
2096 2096 q = self.mq
2097 2097 if not q.applied:
2098 2098 return super(mqrepo, self)._branchtags()
2099 2099
2100 2100 self.branchcache = {} # avoid recursion in changectx
2101 2101 cl = self.changelog
2102 2102 partial, last, lrev = self._readbranchcache()
2103 2103
2104 2104 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2105 2105 start = lrev + 1
2106 2106 if start < qbase:
2107 2107 # update the cache (excluding the patches) and save it
2108 2108 self._updatebranchcache(partial, lrev+1, qbase)
2109 2109 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2110 2110 start = qbase
2111 2111 # if start = qbase, the cache is as updated as it should be.
2112 2112 # if start > qbase, the cache includes (part of) the patches.
2113 2113 # we might as well use it, but we won't save it.
2114 2114
2115 2115 # update the cache up to the tip
2116 2116 self._updatebranchcache(partial, start, cl.count())
2117 2117
2118 2118 return partial
2119 2119
2120 2120 if repo.local():
2121 2121 repo.__class__ = mqrepo
2122 2122 repo.mq = queue(ui, repo.join(""))
2123 2123
2124 2124 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2125 2125
2126 2126 cmdtable = {
2127 2127 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2128 2128 "qclone":
2129 2129 (clone,
2130 2130 [('', 'pull', None, _('use pull protocol to copy metadata')),
2131 2131 ('U', 'noupdate', None, _('do not update the new working directories')),
2132 2132 ('', 'uncompressed', None,
2133 2133 _('use uncompressed transfer (fast over LAN)')),
2134 2134 ('e', 'ssh', '', _('specify ssh command to use')),
2135 2135 ('p', 'patches', '', _('location of source patch repo')),
2136 2136 ('', 'remotecmd', '',
2137 2137 _('specify hg command to run on the remote side'))],
2138 2138 _('hg qclone [OPTION]... SOURCE [DEST]')),
2139 2139 "qcommit|qci":
2140 2140 (commit,
2141 2141 commands.table["^commit|ci"][1],
2142 2142 _('hg qcommit [OPTION]... [FILE]...')),
2143 2143 "^qdiff":
2144 2144 (diff,
2145 2145 [('g', 'git', None, _('use git extended diff format')),
2146 2146 ('I', 'include', [], _('include names matching the given patterns')),
2147 2147 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2148 2148 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2149 2149 "qdelete|qremove|qrm":
2150 2150 (delete,
2151 2151 [('k', 'keep', None, _('keep patch file')),
2152 2152 ('r', 'rev', [], _('stop managing a revision'))],
2153 2153 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2154 2154 'qfold':
2155 2155 (fold,
2156 2156 [('e', 'edit', None, _('edit patch header')),
2157 2157 ('k', 'keep', None, _('keep folded patch files')),
2158 2158 ] + commands.commitopts,
2159 2159 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2160 2160 'qgoto':
2161 2161 (goto,
2162 2162 [('f', 'force', None, _('overwrite any local changes'))],
2163 2163 _('hg qgoto [OPTION]... PATCH')),
2164 2164 'qguard':
2165 2165 (guard,
2166 2166 [('l', 'list', None, _('list all patches and guards')),
2167 2167 ('n', 'none', None, _('drop all guards'))],
2168 2168 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2169 2169 'qheader': (header, [], _('hg qheader [PATCH]')),
2170 2170 "^qimport":
2171 2171 (qimport,
2172 2172 [('e', 'existing', None, 'import file in patch dir'),
2173 2173 ('n', 'name', '', 'patch file name'),
2174 2174 ('f', 'force', None, 'overwrite existing files'),
2175 2175 ('r', 'rev', [], 'place existing revisions under mq control'),
2176 2176 ('g', 'git', None, _('use git extended diff format'))],
2177 2177 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2178 2178 "^qinit":
2179 2179 (init,
2180 2180 [('c', 'create-repo', None, 'create queue repository')],
2181 2181 _('hg qinit [-c]')),
2182 2182 "qnew":
2183 2183 (new,
2184 2184 [('e', 'edit', None, _('edit commit message')),
2185 2185 ('f', 'force', None, _('import uncommitted changes into patch')),
2186 2186 ('g', 'git', None, _('use git extended diff format')),
2187 2187 ('I', 'include', [], _('include names matching the given patterns')),
2188 2188 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2189 2189 ] + commands.commitopts,
2190 2190 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2191 2191 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2192 2192 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2193 2193 "^qpop":
2194 2194 (pop,
2195 2195 [('a', 'all', None, _('pop all patches')),
2196 2196 ('n', 'name', '', _('queue name to pop')),
2197 2197 ('f', 'force', None, _('forget any local changes'))],
2198 2198 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2199 2199 "^qpush":
2200 2200 (push,
2201 2201 [('f', 'force', None, _('apply if the patch has rejects')),
2202 2202 ('l', 'list', None, _('list patch name in commit text')),
2203 2203 ('a', 'all', None, _('apply all patches')),
2204 2204 ('m', 'merge', None, _('merge from another queue')),
2205 2205 ('n', 'name', '', _('merge queue name'))],
2206 2206 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2207 2207 "^qrefresh":
2208 2208 (refresh,
2209 2209 [('e', 'edit', None, _('edit commit message')),
2210 2210 ('g', 'git', None, _('use git extended diff format')),
2211 2211 ('s', 'short', None, _('refresh only files already in the patch')),
2212 2212 ('I', 'include', [], _('include names matching the given patterns')),
2213 2213 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2214 2214 ] + commands.commitopts,
2215 2215 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2216 2216 'qrename|qmv':
2217 2217 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2218 2218 "qrestore":
2219 2219 (restore,
2220 2220 [('d', 'delete', None, _('delete save entry')),
2221 2221 ('u', 'update', None, _('update queue working dir'))],
2222 2222 _('hg qrestore [-d] [-u] REV')),
2223 2223 "qsave":
2224 2224 (save,
2225 2225 [('c', 'copy', None, _('copy patch directory')),
2226 2226 ('n', 'name', '', _('copy directory name')),
2227 2227 ('e', 'empty', None, _('clear queue status file')),
2228 2228 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2229 2229 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2230 2230 "qselect":
2231 2231 (select,
2232 2232 [('n', 'none', None, _('disable all guards')),
2233 2233 ('s', 'series', None, _('list all guards in series file')),
2234 2234 ('', 'pop', None, _('pop to before first guarded applied patch')),
2235 2235 ('', 'reapply', None, _('pop, then reapply patches'))],
2236 2236 _('hg qselect [OPTION]... [GUARD]...')),
2237 2237 "qseries":
2238 2238 (series,
2239 2239 [('m', 'missing', None, _('print patches not in series')),
2240 2240 ] + seriesopts,
2241 2241 _('hg qseries [-ms]')),
2242 2242 "^strip":
2243 2243 (strip,
2244 2244 [('f', 'force', None, _('force multi-head removal')),
2245 2245 ('b', 'backup', None, _('bundle unrelated changesets')),
2246 2246 ('n', 'nobackup', None, _('no backups'))],
2247 2247 _('hg strip [-f] [-b] [-n] REV')),
2248 2248 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2249 2249 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2250 2250 }
@@ -1,3141 +1,3141 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import os, re, sys, urllib
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 36 elif abs not in repo.dirstate:
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 try:
57 57 sim = float(opts.get('similarity') or 0)
58 58 except ValueError:
59 59 raise util.Abort(_('similarity must be a number'))
60 60 if sim < 0 or sim > 100:
61 61 raise util.Abort(_('similarity must be between 0 and 100'))
62 62 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
63 63
64 64 def annotate(ui, repo, *pats, **opts):
65 65 """show changeset information per file line
66 66
67 67 List changes in files, showing the revision id responsible for each line
68 68
69 69 This command is useful to discover who did a change or when a change took
70 70 place.
71 71
72 72 Without the -a option, annotate will avoid processing files it
73 73 detects as binary. With -a, annotate will generate an annotation
74 74 anyway, probably with undesirable results.
75 75 """
76 76 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
77 77
78 78 if not pats:
79 79 raise util.Abort(_('at least one file name or pattern required'))
80 80
81 81 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
82 82 ('number', lambda x: str(x[0].rev())),
83 83 ('changeset', lambda x: short(x[0].node())),
84 84 ('date', getdate),
85 85 ('follow', lambda x: x[0].path()),
86 86 ]
87 87
88 88 if (not opts['user'] and not opts['changeset'] and not opts['date']
89 89 and not opts['follow']):
90 90 opts['number'] = 1
91 91
92 92 linenumber = opts.get('line_number') is not None
93 93 if (linenumber and (not opts['changeset']) and (not opts['number'])):
94 94 raise util.Abort(_('at least one of -n/-c is required for -l'))
95 95
96 96 funcmap = [func for op, func in opmap if opts.get(op)]
97 97 if linenumber:
98 98 lastfunc = funcmap[-1]
99 99 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
100 100
101 101 ctx = repo.changectx(opts['rev'])
102 102
103 103 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
104 104 node=ctx.node()):
105 105 fctx = ctx.filectx(abs)
106 106 if not opts['text'] and util.binary(fctx.data()):
107 107 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
108 108 continue
109 109
110 110 lines = fctx.annotate(follow=opts.get('follow'),
111 111 linenumber=linenumber)
112 112 pieces = []
113 113
114 114 for f in funcmap:
115 115 l = [f(n) for n, dummy in lines]
116 116 if l:
117 117 m = max(map(len, l))
118 118 pieces.append(["%*s" % (m, x) for x in l])
119 119
120 120 if pieces:
121 121 for p, l in zip(zip(*pieces), lines):
122 122 ui.write("%s: %s" % (" ".join(p), l[1]))
123 123
124 124 def archive(ui, repo, dest, **opts):
125 125 '''create unversioned archive of a repository revision
126 126
127 127 By default, the revision used is the parent of the working
128 128 directory; use "-r" to specify a different revision.
129 129
130 130 To specify the type of archive to create, use "-t". Valid
131 131 types are:
132 132
133 133 "files" (default): a directory full of files
134 134 "tar": tar archive, uncompressed
135 135 "tbz2": tar archive, compressed using bzip2
136 136 "tgz": tar archive, compressed using gzip
137 137 "uzip": zip archive, uncompressed
138 138 "zip": zip archive, compressed using deflate
139 139
140 140 The exact name of the destination archive or directory is given
141 141 using a format string; see "hg help export" for details.
142 142
143 143 Each member added to an archive file has a directory prefix
144 144 prepended. Use "-p" to specify a format string for the prefix.
145 145 The default is the basename of the archive, with suffixes removed.
146 146 '''
147 147
148 148 ctx = repo.changectx(opts['rev'])
149 149 if not ctx:
150 150 raise util.Abort(_('repository has no revisions'))
151 151 node = ctx.node()
152 152 dest = cmdutil.make_filename(repo, dest, node)
153 153 if os.path.realpath(dest) == repo.root:
154 154 raise util.Abort(_('repository root cannot be destination'))
155 155 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
156 156 kind = opts.get('type') or 'files'
157 157 prefix = opts['prefix']
158 158 if dest == '-':
159 159 if kind == 'files':
160 160 raise util.Abort(_('cannot archive plain files to stdout'))
161 161 dest = sys.stdout
162 162 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
163 163 prefix = cmdutil.make_filename(repo, prefix, node)
164 164 archival.archive(repo, dest, node, kind, not opts['no_decode'],
165 165 matchfn, prefix)
166 166
167 167 def backout(ui, repo, node=None, rev=None, **opts):
168 168 '''reverse effect of earlier changeset
169 169
170 170 Commit the backed out changes as a new changeset. The new
171 171 changeset is a child of the backed out changeset.
172 172
173 173 If you back out a changeset other than the tip, a new head is
174 174 created. This head is the parent of the working directory. If
175 175 you back out an old changeset, your working directory will appear
176 176 old after the backout. You should merge the backout changeset
177 177 with another head.
178 178
179 179 The --merge option remembers the parent of the working directory
180 180 before starting the backout, then merges the new head with that
181 181 changeset afterwards. This saves you from doing the merge by
182 182 hand. The result of this merge is not committed, as for a normal
183 183 merge.'''
184 184 if rev and node:
185 185 raise util.Abort(_("please specify just one revision"))
186 186
187 187 if not rev:
188 188 rev = node
189 189
190 190 if not rev:
191 191 raise util.Abort(_("please specify a revision to backout"))
192 192
193 193 cmdutil.bail_if_changed(repo)
194 194 op1, op2 = repo.dirstate.parents()
195 195 if op2 != nullid:
196 196 raise util.Abort(_('outstanding uncommitted merge'))
197 197 node = repo.lookup(rev)
198 198 p1, p2 = repo.changelog.parents(node)
199 199 if p1 == nullid:
200 200 raise util.Abort(_('cannot back out a change with no parents'))
201 201 if p2 != nullid:
202 202 if not opts['parent']:
203 203 raise util.Abort(_('cannot back out a merge changeset without '
204 204 '--parent'))
205 205 p = repo.lookup(opts['parent'])
206 206 if p not in (p1, p2):
207 207 raise util.Abort(_('%s is not a parent of %s') %
208 208 (short(p), short(node)))
209 209 parent = p
210 210 else:
211 211 if opts['parent']:
212 212 raise util.Abort(_('cannot use --parent on non-merge changeset'))
213 213 parent = p1
214 214 hg.clean(repo, node, show_stats=False)
215 215 revert_opts = opts.copy()
216 216 revert_opts['date'] = None
217 217 revert_opts['all'] = True
218 218 revert_opts['rev'] = hex(parent)
219 219 revert(ui, repo, **revert_opts)
220 220 commit_opts = opts.copy()
221 221 commit_opts['addremove'] = False
222 222 if not commit_opts['message'] and not commit_opts['logfile']:
223 223 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
224 224 commit_opts['force_editor'] = True
225 225 commit(ui, repo, **commit_opts)
226 226 def nice(node):
227 227 return '%d:%s' % (repo.changelog.rev(node), short(node))
228 228 ui.status(_('changeset %s backs out changeset %s\n') %
229 229 (nice(repo.changelog.tip()), nice(node)))
230 230 if op1 != node:
231 231 if opts['merge']:
232 232 ui.status(_('merging with changeset %s\n') % nice(op1))
233 233 hg.merge(repo, hex(op1))
234 234 else:
235 235 ui.status(_('the backout changeset is a new head - '
236 236 'do not forget to merge\n'))
237 237 ui.status(_('(use "backout --merge" '
238 238 'if you want to auto-merge)\n'))
239 239
240 240 def branch(ui, repo, label=None, **opts):
241 241 """set or show the current branch name
242 242
243 243 With no argument, show the current branch name. With one argument,
244 244 set the working directory branch name (the branch does not exist in
245 245 the repository until the next commit).
246 246
247 247 Unless --force is specified, branch will not let you set a
248 248 branch name that shadows an existing branch.
249 249 """
250 250
251 251 if label:
252 252 if not opts.get('force') and label in repo.branchtags():
253 253 if label not in [p.branch() for p in repo.workingctx().parents()]:
254 254 raise util.Abort(_('a branch of the same name already exists'
255 255 ' (use --force to override)'))
256 256 repo.dirstate.setbranch(util.fromlocal(label))
257 257 ui.status(_('marked working directory as branch %s\n') % label)
258 258 else:
259 259 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
260 260
261 261 def branches(ui, repo, active=False):
262 262 """list repository named branches
263 263
264 264 List the repository's named branches, indicating which ones are
265 265 inactive. If active is specified, only show active branches.
266 266
267 267 A branch is considered active if it contains unmerged heads.
268 268 """
269 269 b = repo.branchtags()
270 270 heads = dict.fromkeys(repo.heads(), 1)
271 271 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
272 272 l.sort()
273 273 l.reverse()
274 274 for ishead, r, n, t in l:
275 275 if active and not ishead:
276 276 # If we're only displaying active branches, abort the loop on
277 277 # encountering the first inactive head
278 278 break
279 279 else:
280 280 hexfunc = ui.debugflag and hex or short
281 281 if ui.quiet:
282 282 ui.write("%s\n" % t)
283 283 else:
284 284 spaces = " " * (30 - util.locallen(t))
285 285 # The code only gets here if inactive branches are being
286 286 # displayed or the branch is active.
287 287 isinactive = ((not ishead) and " (inactive)") or ''
288 288 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
289 289
290 290 def bundle(ui, repo, fname, dest=None, **opts):
291 291 """create a changegroup file
292 292
293 293 Generate a compressed changegroup file collecting changesets not
294 294 found in the other repository.
295 295
296 296 If no destination repository is specified the destination is assumed
297 297 to have all the nodes specified by one or more --base parameters.
298 298
299 299 The bundle file can then be transferred using conventional means and
300 300 applied to another repository with the unbundle or pull command.
301 301 This is useful when direct push and pull are not available or when
302 302 exporting an entire repository is undesirable.
303 303
304 304 Applying bundles preserves all changeset contents including
305 305 permissions, copy/rename information, and revision history.
306 306 """
307 307 revs = opts.get('rev') or None
308 308 if revs:
309 309 revs = [repo.lookup(rev) for rev in revs]
310 310 base = opts.get('base')
311 311 if base:
312 312 if dest:
313 313 raise util.Abort(_("--base is incompatible with specifiying "
314 314 "a destination"))
315 315 base = [repo.lookup(rev) for rev in base]
316 316 # create the right base
317 317 # XXX: nodesbetween / changegroup* should be "fixed" instead
318 318 o = []
319 319 has = {nullid: None}
320 320 for n in base:
321 321 has.update(repo.changelog.reachable(n))
322 322 if revs:
323 323 visit = list(revs)
324 324 else:
325 325 visit = repo.changelog.heads()
326 326 seen = {}
327 327 while visit:
328 328 n = visit.pop(0)
329 329 parents = [p for p in repo.changelog.parents(n) if p not in has]
330 330 if len(parents) == 0:
331 331 o.insert(0, n)
332 332 else:
333 333 for p in parents:
334 334 if p not in seen:
335 335 seen[p] = 1
336 336 visit.append(p)
337 337 else:
338 338 cmdutil.setremoteconfig(ui, opts)
339 339 dest, revs = cmdutil.parseurl(
340 340 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
341 341 other = hg.repository(ui, dest)
342 342 o = repo.findoutgoing(other, force=opts['force'])
343 343
344 344 if revs:
345 345 cg = repo.changegroupsubset(o, revs, 'bundle')
346 346 else:
347 347 cg = repo.changegroup(o, 'bundle')
348 348 changegroup.writebundle(cg, fname, "HG10BZ")
349 349
350 350 def cat(ui, repo, file1, *pats, **opts):
351 351 """output the current or given revision of files
352 352
353 353 Print the specified files as they were at the given revision.
354 354 If no revision is given, the parent of the working directory is used,
355 355 or tip if no revision is checked out.
356 356
357 357 Output may be to a file, in which case the name of the file is
358 358 given using a format string. The formatting rules are the same as
359 359 for the export command, with the following additions:
360 360
361 361 %s basename of file being printed
362 362 %d dirname of file being printed, or '.' if in repo root
363 363 %p root-relative path name of file being printed
364 364 """
365 365 ctx = repo.changectx(opts['rev'])
366 366 err = 1
367 367 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
368 368 ctx.node()):
369 369 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
370 370 fp.write(ctx.filectx(abs).data())
371 371 err = 0
372 372 return err
373 373
374 374 def clone(ui, source, dest=None, **opts):
375 375 """make a copy of an existing repository
376 376
377 377 Create a copy of an existing repository in a new directory.
378 378
379 379 If no destination directory name is specified, it defaults to the
380 380 basename of the source.
381 381
382 382 The location of the source is added to the new repository's
383 383 .hg/hgrc file, as the default to be used for future pulls.
384 384
385 385 For efficiency, hardlinks are used for cloning whenever the source
386 386 and destination are on the same filesystem (note this applies only
387 387 to the repository data, not to the checked out files). Some
388 388 filesystems, such as AFS, implement hardlinking incorrectly, but
389 389 do not report errors. In these cases, use the --pull option to
390 390 avoid hardlinking.
391 391
392 392 You can safely clone repositories and checked out files using full
393 393 hardlinks with
394 394
395 395 $ cp -al REPO REPOCLONE
396 396
397 397 which is the fastest way to clone. However, the operation is not
398 398 atomic (making sure REPO is not modified during the operation is
399 399 up to you) and you have to make sure your editor breaks hardlinks
400 400 (Emacs and most Linux Kernel tools do so).
401 401
402 402 If you use the -r option to clone up to a specific revision, no
403 403 subsequent revisions will be present in the cloned repository.
404 404 This option implies --pull, even on local repositories.
405 405
406 406 See pull for valid source format details.
407 407
408 408 It is possible to specify an ssh:// URL as the destination, but no
409 409 .hg/hgrc and working directory will be created on the remote side.
410 410 Look at the help text for the pull command for important details
411 411 about ssh:// URLs.
412 412 """
413 413 cmdutil.setremoteconfig(ui, opts)
414 414 hg.clone(ui, source, dest,
415 415 pull=opts['pull'],
416 416 stream=opts['uncompressed'],
417 417 rev=opts['rev'],
418 418 update=not opts['noupdate'])
419 419
420 420 def commit(ui, repo, *pats, **opts):
421 421 """commit the specified files or all outstanding changes
422 422
423 423 Commit changes to the given files into the repository.
424 424
425 425 If a list of files is omitted, all changes reported by "hg status"
426 426 will be committed.
427 427
428 428 If no commit message is specified, the editor configured in your hgrc
429 429 or in the EDITOR environment variable is started to enter a message.
430 430 """
431 431 def commitfunc(ui, repo, files, message, match, opts):
432 432 return repo.commit(files, message, opts['user'], opts['date'], match,
433 433 force_editor=opts.get('force_editor'))
434 434 cmdutil.commit(ui, repo, commitfunc, pats, opts)
435 435
436 436 def docopy(ui, repo, pats, opts):
437 437 # called with the repo lock held
438 438 #
439 439 # hgsep => pathname that uses "/" to separate directories
440 440 # ossep => pathname that uses os.sep to separate directories
441 441 cwd = repo.getcwd()
442 442 errors = 0
443 443 copied = []
444 444 targets = {}
445 445
446 446 # abs: hgsep
447 447 # rel: ossep
448 448 # return: hgsep
449 449 def okaytocopy(abs, rel, exact):
450 450 reasons = {'?': _('is not managed'),
451 451 'r': _('has been marked for remove')}
452 452 state = repo.dirstate[abs]
453 453 reason = reasons.get(state)
454 454 if reason:
455 455 if exact:
456 456 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
457 457 else:
458 458 if state == 'a':
459 459 origsrc = repo.dirstate.copied(abs)
460 460 if origsrc is not None:
461 461 return origsrc
462 462 return abs
463 463
464 464 # origsrc: hgsep
465 465 # abssrc: hgsep
466 466 # relsrc: ossep
467 467 # otarget: ossep
468 468 def copy(origsrc, abssrc, relsrc, otarget, exact):
469 469 abstarget = util.canonpath(repo.root, cwd, otarget)
470 470 reltarget = repo.pathto(abstarget, cwd)
471 471 prevsrc = targets.get(abstarget)
472 472 src = repo.wjoin(abssrc)
473 473 target = repo.wjoin(abstarget)
474 474 if prevsrc is not None:
475 475 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
476 476 (reltarget, repo.pathto(abssrc, cwd),
477 477 repo.pathto(prevsrc, cwd)))
478 478 return
479 479 if (not opts['after'] and os.path.exists(target) or
480 480 opts['after'] and repo.dirstate[abstarget] in 'mn'):
481 481 if not opts['force']:
482 482 ui.warn(_('%s: not overwriting - file exists\n') %
483 483 reltarget)
484 484 return
485 485 if not opts['after'] and not opts.get('dry_run'):
486 486 os.unlink(target)
487 487 if opts['after']:
488 488 if not os.path.exists(target):
489 489 return
490 490 else:
491 491 targetdir = os.path.dirname(target) or '.'
492 492 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
493 493 os.makedirs(targetdir)
494 494 try:
495 495 restore = repo.dirstate[abstarget] == 'r'
496 496 if restore and not opts.get('dry_run'):
497 497 repo.undelete([abstarget])
498 498 try:
499 499 if not opts.get('dry_run'):
500 500 util.copyfile(src, target)
501 501 restore = False
502 502 finally:
503 503 if restore:
504 504 repo.remove([abstarget])
505 505 except IOError, inst:
506 506 if inst.errno == errno.ENOENT:
507 507 ui.warn(_('%s: deleted in working copy\n') % relsrc)
508 508 else:
509 509 ui.warn(_('%s: cannot copy - %s\n') %
510 510 (relsrc, inst.strerror))
511 511 errors += 1
512 512 return
513 513 if ui.verbose or not exact:
514 514 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
515 515 targets[abstarget] = abssrc
516 516 if abstarget != origsrc:
517 517 if repo.dirstate[origsrc] == 'a':
518 518 if not ui.quiet:
519 519 ui.warn(_("%s has not been committed yet, so no copy "
520 520 "data will be stored for %s.\n")
521 521 % (repo.pathto(origsrc, cwd), reltarget))
522 522 if abstarget not in repo.dirstate and not opts.get('dry_run'):
523 523 repo.add([abstarget])
524 524 elif not opts.get('dry_run'):
525 525 repo.copy(origsrc, abstarget)
526 526 copied.append((abssrc, relsrc, exact))
527 527
528 528 # pat: ossep
529 529 # dest ossep
530 530 # srcs: list of (hgsep, hgsep, ossep, bool)
531 531 # return: function that takes hgsep and returns ossep
532 532 def targetpathfn(pat, dest, srcs):
533 533 if os.path.isdir(pat):
534 534 abspfx = util.canonpath(repo.root, cwd, pat)
535 535 abspfx = util.localpath(abspfx)
536 536 if destdirexists:
537 537 striplen = len(os.path.split(abspfx)[0])
538 538 else:
539 539 striplen = len(abspfx)
540 540 if striplen:
541 541 striplen += len(os.sep)
542 542 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
543 543 elif destdirexists:
544 544 res = lambda p: os.path.join(dest,
545 545 os.path.basename(util.localpath(p)))
546 546 else:
547 547 res = lambda p: dest
548 548 return res
549 549
550 550 # pat: ossep
551 551 # dest ossep
552 552 # srcs: list of (hgsep, hgsep, ossep, bool)
553 553 # return: function that takes hgsep and returns ossep
554 554 def targetpathafterfn(pat, dest, srcs):
555 555 if util.patkind(pat, None)[0]:
556 556 # a mercurial pattern
557 557 res = lambda p: os.path.join(dest,
558 558 os.path.basename(util.localpath(p)))
559 559 else:
560 560 abspfx = util.canonpath(repo.root, cwd, pat)
561 561 if len(abspfx) < len(srcs[0][0]):
562 562 # A directory. Either the target path contains the last
563 563 # component of the source path or it does not.
564 564 def evalpath(striplen):
565 565 score = 0
566 566 for s in srcs:
567 567 t = os.path.join(dest, util.localpath(s[0])[striplen:])
568 568 if os.path.exists(t):
569 569 score += 1
570 570 return score
571 571
572 572 abspfx = util.localpath(abspfx)
573 573 striplen = len(abspfx)
574 574 if striplen:
575 575 striplen += len(os.sep)
576 576 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
577 577 score = evalpath(striplen)
578 578 striplen1 = len(os.path.split(abspfx)[0])
579 579 if striplen1:
580 580 striplen1 += len(os.sep)
581 581 if evalpath(striplen1) > score:
582 582 striplen = striplen1
583 583 res = lambda p: os.path.join(dest,
584 584 util.localpath(p)[striplen:])
585 585 else:
586 586 # a file
587 587 if destdirexists:
588 588 res = lambda p: os.path.join(dest,
589 589 os.path.basename(util.localpath(p)))
590 590 else:
591 591 res = lambda p: dest
592 592 return res
593 593
594 594
595 595 pats = util.expand_glob(pats)
596 596 if not pats:
597 597 raise util.Abort(_('no source or destination specified'))
598 598 if len(pats) == 1:
599 599 raise util.Abort(_('no destination specified'))
600 600 dest = pats.pop()
601 601 destdirexists = os.path.isdir(dest)
602 602 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
603 603 raise util.Abort(_('with multiple sources, destination must be an '
604 604 'existing directory'))
605 605 if opts['after']:
606 606 tfn = targetpathafterfn
607 607 else:
608 608 tfn = targetpathfn
609 609 copylist = []
610 610 for pat in pats:
611 611 srcs = []
612 612 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
613 613 globbed=True):
614 614 origsrc = okaytocopy(abssrc, relsrc, exact)
615 615 if origsrc:
616 616 srcs.append((origsrc, abssrc, relsrc, exact))
617 617 if not srcs:
618 618 continue
619 619 copylist.append((tfn(pat, dest, srcs), srcs))
620 620 if not copylist:
621 621 raise util.Abort(_('no files to copy'))
622 622
623 623 for targetpath, srcs in copylist:
624 624 for origsrc, abssrc, relsrc, exact in srcs:
625 625 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
626 626
627 627 if errors:
628 628 ui.warn(_('(consider using --after)\n'))
629 629 return errors, copied
630 630
631 631 def copy(ui, repo, *pats, **opts):
632 632 """mark files as copied for the next commit
633 633
634 634 Mark dest as having copies of source files. If dest is a
635 635 directory, copies are put in that directory. If dest is a file,
636 636 there can only be one source.
637 637
638 638 By default, this command copies the contents of files as they
639 639 stand in the working directory. If invoked with --after, the
640 640 operation is recorded, but no copying is performed.
641 641
642 642 This command takes effect in the next commit. To undo a copy
643 643 before that, see hg revert.
644 644 """
645 645 wlock = repo.wlock(False)
646 646 try:
647 647 errs, copied = docopy(ui, repo, pats, opts)
648 648 finally:
649 649 del wlock
650 650 return errs
651 651
652 652 def debugancestor(ui, index, rev1, rev2):
653 653 """find the ancestor revision of two revisions in a given index"""
654 654 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
655 655 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
656 656 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
657 657
658 658 def debugcomplete(ui, cmd='', **opts):
659 659 """returns the completion list associated with the given command"""
660 660
661 661 if opts['options']:
662 662 options = []
663 663 otables = [globalopts]
664 664 if cmd:
665 665 aliases, entry = cmdutil.findcmd(ui, cmd)
666 666 otables.append(entry[1])
667 667 for t in otables:
668 668 for o in t:
669 669 if o[0]:
670 670 options.append('-%s' % o[0])
671 671 options.append('--%s' % o[1])
672 672 ui.write("%s\n" % "\n".join(options))
673 673 return
674 674
675 675 clist = cmdutil.findpossible(ui, cmd).keys()
676 676 clist.sort()
677 677 ui.write("%s\n" % "\n".join(clist))
678 678
679 679 def debugrebuildstate(ui, repo, rev=""):
680 680 """rebuild the dirstate as it would look like for the given revision"""
681 681 if rev == "":
682 682 rev = repo.changelog.tip()
683 683 ctx = repo.changectx(rev)
684 684 files = ctx.manifest()
685 685 wlock = repo.wlock()
686 686 try:
687 687 repo.dirstate.rebuild(rev, files)
688 688 finally:
689 689 del wlock
690 690
691 691 def debugcheckstate(ui, repo):
692 692 """validate the correctness of the current dirstate"""
693 693 parent1, parent2 = repo.dirstate.parents()
694 694 m1 = repo.changectx(parent1).manifest()
695 695 m2 = repo.changectx(parent2).manifest()
696 696 errors = 0
697 697 for f in repo.dirstate:
698 698 state = repo.dirstate[f]
699 699 if state in "nr" and f not in m1:
700 700 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
701 701 errors += 1
702 702 if state in "a" and f in m1:
703 703 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
704 704 errors += 1
705 705 if state in "m" and f not in m1 and f not in m2:
706 706 ui.warn(_("%s in state %s, but not in either manifest\n") %
707 707 (f, state))
708 708 errors += 1
709 709 for f in m1:
710 710 state = repo.dirstate[f]
711 711 if state not in "nrm":
712 712 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
713 713 errors += 1
714 714 if errors:
715 715 error = _(".hg/dirstate inconsistent with current parent's manifest")
716 716 raise util.Abort(error)
717 717
718 718 def showconfig(ui, repo, *values, **opts):
719 719 """show combined config settings from all hgrc files
720 720
721 721 With no args, print names and values of all config items.
722 722
723 723 With one arg of the form section.name, print just the value of
724 724 that config item.
725 725
726 726 With multiple args, print names and values of all config items
727 727 with matching section names."""
728 728
729 729 untrusted = bool(opts.get('untrusted'))
730 730 if values:
731 731 if len([v for v in values if '.' in v]) > 1:
732 732 raise util.Abort(_('only one config item permitted'))
733 733 for section, name, value in ui.walkconfig(untrusted=untrusted):
734 734 sectname = section + '.' + name
735 735 if values:
736 736 for v in values:
737 737 if v == section:
738 738 ui.write('%s=%s\n' % (sectname, value))
739 739 elif v == sectname:
740 740 ui.write(value, '\n')
741 741 else:
742 742 ui.write('%s=%s\n' % (sectname, value))
743 743
744 744 def debugsetparents(ui, repo, rev1, rev2=None):
745 745 """manually set the parents of the current working directory
746 746
747 747 This is useful for writing repository conversion tools, but should
748 748 be used with care.
749 749 """
750 750
751 751 if not rev2:
752 752 rev2 = hex(nullid)
753 753
754 754 wlock = repo.wlock()
755 755 try:
756 756 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
757 757 finally:
758 758 del wlock
759 759
760 760 def debugstate(ui, repo):
761 761 """show the contents of the current dirstate"""
762 762 dc = repo.dirstate._map
763 763 k = dc.keys()
764 764 k.sort()
765 765 for file_ in k:
766 766 if dc[file_][3] == -1:
767 767 # Pad or slice to locale representation
768 768 locale_len = len(time.strftime("%x %X", time.localtime(0)))
769 769 timestr = 'unset'
770 770 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
771 771 else:
772 772 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
773 773 ui.write("%c %3o %10d %s %s\n"
774 774 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
775 775 timestr, file_))
776 776 for f in repo.dirstate.copies():
777 777 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
778 778
779 779 def debugdata(ui, file_, rev):
780 780 """dump the contents of a data file revision"""
781 781 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
782 782 try:
783 783 ui.write(r.revision(r.lookup(rev)))
784 784 except KeyError:
785 785 raise util.Abort(_('invalid revision identifier %s') % rev)
786 786
787 787 def debugdate(ui, date, range=None, **opts):
788 788 """parse and display a date"""
789 789 if opts["extended"]:
790 790 d = util.parsedate(date, util.extendeddateformats)
791 791 else:
792 792 d = util.parsedate(date)
793 793 ui.write("internal: %s %s\n" % d)
794 794 ui.write("standard: %s\n" % util.datestr(d))
795 795 if range:
796 796 m = util.matchdate(range)
797 797 ui.write("match: %s\n" % m(d[0]))
798 798
799 799 def debugindex(ui, file_):
800 800 """dump the contents of an index file"""
801 801 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
802 802 ui.write(" rev offset length base linkrev" +
803 803 " nodeid p1 p2\n")
804 804 for i in xrange(r.count()):
805 805 node = r.node(i)
806 806 pp = r.parents(node)
807 807 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
808 808 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
809 809 short(node), short(pp[0]), short(pp[1])))
810 810
811 811 def debugindexdot(ui, file_):
812 812 """dump an index DAG as a .dot file"""
813 813 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
814 814 ui.write("digraph G {\n")
815 815 for i in xrange(r.count()):
816 816 node = r.node(i)
817 817 pp = r.parents(node)
818 818 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
819 819 if pp[1] != nullid:
820 820 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
821 821 ui.write("}\n")
822 822
823 823 def debuginstall(ui):
824 824 '''test Mercurial installation'''
825 825
826 826 def writetemp(contents):
827 827 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
828 828 f = os.fdopen(fd, "wb")
829 829 f.write(contents)
830 830 f.close()
831 831 return name
832 832
833 833 problems = 0
834 834
835 835 # encoding
836 836 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
837 837 try:
838 838 util.fromlocal("test")
839 839 except util.Abort, inst:
840 840 ui.write(" %s\n" % inst)
841 841 ui.write(_(" (check that your locale is properly set)\n"))
842 842 problems += 1
843 843
844 844 # compiled modules
845 845 ui.status(_("Checking extensions...\n"))
846 846 try:
847 847 import bdiff, mpatch, base85
848 848 except Exception, inst:
849 849 ui.write(" %s\n" % inst)
850 850 ui.write(_(" One or more extensions could not be found"))
851 851 ui.write(_(" (check that you compiled the extensions)\n"))
852 852 problems += 1
853 853
854 854 # templates
855 855 ui.status(_("Checking templates...\n"))
856 856 try:
857 857 import templater
858 858 t = templater.templater(templater.templatepath("map-cmdline.default"))
859 859 except Exception, inst:
860 860 ui.write(" %s\n" % inst)
861 861 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
862 862 problems += 1
863 863
864 864 # patch
865 865 ui.status(_("Checking patch...\n"))
866 866 patcher = ui.config('ui', 'patch')
867 867 patcher = ((patcher and util.find_exe(patcher)) or
868 868 util.find_exe('gpatch') or
869 869 util.find_exe('patch'))
870 870 if not patcher:
871 871 ui.write(_(" Can't find patch or gpatch in PATH\n"))
872 872 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
873 873 problems += 1
874 874 else:
875 875 # actually attempt a patch here
876 876 a = "1\n2\n3\n4\n"
877 877 b = "1\n2\n3\ninsert\n4\n"
878 878 fa = writetemp(a)
879 879 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
880 880 fd = writetemp(d)
881 881
882 882 files = {}
883 883 try:
884 884 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
885 885 except util.Abort, e:
886 886 ui.write(_(" patch call failed:\n"))
887 887 ui.write(" " + str(e) + "\n")
888 888 problems += 1
889 889 else:
890 890 if list(files) != [os.path.basename(fa)]:
891 891 ui.write(_(" unexpected patch output!"))
892 892 ui.write(_(" (you may have an incompatible version of patch)\n"))
893 893 problems += 1
894 894 a = file(fa).read()
895 895 if a != b:
896 896 ui.write(_(" patch test failed!"))
897 897 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 898 problems += 1
899 899
900 900 os.unlink(fa)
901 901 os.unlink(fd)
902 902
903 903 # merge helper
904 904 ui.status(_("Checking merge helper...\n"))
905 905 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
906 906 or "hgmerge")
907 907 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
908 908 if not cmdpath:
909 909 if cmd == 'hgmerge':
910 910 ui.write(_(" No merge helper set and can't find default"
911 911 " hgmerge script in PATH\n"))
912 912 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
913 913 else:
914 914 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
915 915 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
916 916 problems += 1
917 917 else:
918 918 # actually attempt a patch here
919 919 fa = writetemp("1\n2\n3\n4\n")
920 920 fl = writetemp("1\n2\n3\ninsert\n4\n")
921 921 fr = writetemp("begin\n1\n2\n3\n4\n")
922 922 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
923 923 if r:
924 924 ui.write(_(" Got unexpected merge error %d!\n") % r)
925 925 problems += 1
926 926 m = file(fl).read()
927 927 if m != "begin\n1\n2\n3\ninsert\n4\n":
928 928 ui.write(_(" Got unexpected merge results!\n"))
929 929 ui.write(_(" (your merge helper may have the"
930 930 " wrong argument order)\n"))
931 931 ui.write(_(" Result: %r\n") % m)
932 932 problems += 1
933 933 os.unlink(fa)
934 934 os.unlink(fl)
935 935 os.unlink(fr)
936 936
937 937 # editor
938 938 ui.status(_("Checking commit editor...\n"))
939 939 editor = (os.environ.get("HGEDITOR") or
940 940 ui.config("ui", "editor") or
941 941 os.environ.get("EDITOR", "vi"))
942 942 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
943 943 if not cmdpath:
944 944 if editor == 'vi':
945 945 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
946 946 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
947 947 else:
948 948 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
949 949 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
950 950 problems += 1
951 951
952 952 # check username
953 953 ui.status(_("Checking username...\n"))
954 954 user = os.environ.get("HGUSER")
955 955 if user is None:
956 956 user = ui.config("ui", "username")
957 957 if user is None:
958 958 user = os.environ.get("EMAIL")
959 959 if not user:
960 960 ui.warn(" ")
961 961 ui.username()
962 962 ui.write(_(" (specify a username in your .hgrc file)\n"))
963 963
964 964 if not problems:
965 965 ui.status(_("No problems detected\n"))
966 966 else:
967 967 ui.write(_("%s problems detected,"
968 968 " please check your install!\n") % problems)
969 969
970 970 return problems
971 971
972 972 def debugrename(ui, repo, file1, *pats, **opts):
973 973 """dump rename information"""
974 974
975 975 ctx = repo.changectx(opts.get('rev', 'tip'))
976 976 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
977 977 ctx.node()):
978 978 m = ctx.filectx(abs).renamed()
979 979 if m:
980 980 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
981 981 else:
982 982 ui.write(_("%s not renamed\n") % rel)
983 983
984 984 def debugwalk(ui, repo, *pats, **opts):
985 985 """show how files match on given patterns"""
986 986 items = list(cmdutil.walk(repo, pats, opts))
987 987 if not items:
988 988 return
989 989 fmt = '%%s %%-%ds %%-%ds %%s' % (
990 990 max([len(abs) for (src, abs, rel, exact) in items]),
991 991 max([len(rel) for (src, abs, rel, exact) in items]))
992 992 for src, abs, rel, exact in items:
993 993 line = fmt % (src, abs, rel, exact and 'exact' or '')
994 994 ui.write("%s\n" % line.rstrip())
995 995
996 996 def diff(ui, repo, *pats, **opts):
997 997 """diff repository (or selected files)
998 998
999 999 Show differences between revisions for the specified files.
1000 1000
1001 1001 Differences between files are shown using the unified diff format.
1002 1002
1003 1003 NOTE: diff may generate unexpected results for merges, as it will
1004 1004 default to comparing against the working directory's first parent
1005 1005 changeset if no revisions are specified.
1006 1006
1007 1007 When two revision arguments are given, then changes are shown
1008 1008 between those revisions. If only one revision is specified then
1009 1009 that revision is compared to the working directory, and, when no
1010 1010 revisions are specified, the working directory files are compared
1011 1011 to its parent.
1012 1012
1013 1013 Without the -a option, diff will avoid generating diffs of files
1014 1014 it detects as binary. With -a, diff will generate a diff anyway,
1015 1015 probably with undesirable results.
1016 1016 """
1017 1017 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1018 1018
1019 1019 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1020 1020
1021 1021 patch.diff(repo, node1, node2, fns, match=matchfn,
1022 1022 opts=patch.diffopts(ui, opts))
1023 1023
1024 1024 def export(ui, repo, *changesets, **opts):
1025 1025 """dump the header and diffs for one or more changesets
1026 1026
1027 1027 Print the changeset header and diffs for one or more revisions.
1028 1028
1029 1029 The information shown in the changeset header is: author,
1030 1030 changeset hash, parent(s) and commit comment.
1031 1031
1032 1032 NOTE: export may generate unexpected diff output for merge changesets,
1033 1033 as it will compare the merge changeset against its first parent only.
1034 1034
1035 1035 Output may be to a file, in which case the name of the file is
1036 1036 given using a format string. The formatting rules are as follows:
1037 1037
1038 1038 %% literal "%" character
1039 1039 %H changeset hash (40 bytes of hexadecimal)
1040 1040 %N number of patches being generated
1041 1041 %R changeset revision number
1042 1042 %b basename of the exporting repository
1043 1043 %h short-form changeset hash (12 bytes of hexadecimal)
1044 1044 %n zero-padded sequence number, starting at 1
1045 1045 %r zero-padded changeset revision number
1046 1046
1047 1047 Without the -a option, export will avoid generating diffs of files
1048 1048 it detects as binary. With -a, export will generate a diff anyway,
1049 1049 probably with undesirable results.
1050 1050
1051 1051 With the --switch-parent option, the diff will be against the second
1052 1052 parent. It can be useful to review a merge.
1053 1053 """
1054 1054 if not changesets:
1055 1055 raise util.Abort(_("export requires at least one changeset"))
1056 1056 revs = cmdutil.revrange(repo, changesets)
1057 1057 if len(revs) > 1:
1058 1058 ui.note(_('exporting patches:\n'))
1059 1059 else:
1060 1060 ui.note(_('exporting patch:\n'))
1061 1061 patch.export(repo, revs, template=opts['output'],
1062 1062 switch_parent=opts['switch_parent'],
1063 1063 opts=patch.diffopts(ui, opts))
1064 1064
1065 1065 def grep(ui, repo, pattern, *pats, **opts):
1066 1066 """search for a pattern in specified files and revisions
1067 1067
1068 1068 Search revisions of files for a regular expression.
1069 1069
1070 1070 This command behaves differently than Unix grep. It only accepts
1071 1071 Python/Perl regexps. It searches repository history, not the
1072 1072 working directory. It always prints the revision number in which
1073 1073 a match appears.
1074 1074
1075 1075 By default, grep only prints output for the first revision of a
1076 1076 file in which it finds a match. To get it to print every revision
1077 1077 that contains a change in match status ("-" for a match that
1078 1078 becomes a non-match, or "+" for a non-match that becomes a match),
1079 1079 use the --all flag.
1080 1080 """
1081 1081 reflags = 0
1082 1082 if opts['ignore_case']:
1083 1083 reflags |= re.I
1084 1084 try:
1085 1085 regexp = re.compile(pattern, reflags)
1086 1086 except Exception, inst:
1087 1087 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1088 1088 return None
1089 1089 sep, eol = ':', '\n'
1090 1090 if opts['print0']:
1091 1091 sep = eol = '\0'
1092 1092
1093 1093 fcache = {}
1094 1094 def getfile(fn):
1095 1095 if fn not in fcache:
1096 1096 fcache[fn] = repo.file(fn)
1097 1097 return fcache[fn]
1098 1098
1099 1099 def matchlines(body):
1100 1100 begin = 0
1101 1101 linenum = 0
1102 1102 while True:
1103 1103 match = regexp.search(body, begin)
1104 1104 if not match:
1105 1105 break
1106 1106 mstart, mend = match.span()
1107 1107 linenum += body.count('\n', begin, mstart) + 1
1108 1108 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1109 1109 lend = body.find('\n', mend)
1110 1110 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1111 1111 begin = lend + 1
1112 1112
1113 1113 class linestate(object):
1114 1114 def __init__(self, line, linenum, colstart, colend):
1115 1115 self.line = line
1116 1116 self.linenum = linenum
1117 1117 self.colstart = colstart
1118 1118 self.colend = colend
1119 1119
1120 1120 def __eq__(self, other):
1121 1121 return self.line == other.line
1122 1122
1123 1123 matches = {}
1124 1124 copies = {}
1125 1125 def grepbody(fn, rev, body):
1126 1126 matches[rev].setdefault(fn, [])
1127 1127 m = matches[rev][fn]
1128 1128 for lnum, cstart, cend, line in matchlines(body):
1129 1129 s = linestate(line, lnum, cstart, cend)
1130 1130 m.append(s)
1131 1131
1132 1132 def difflinestates(a, b):
1133 1133 sm = difflib.SequenceMatcher(None, a, b)
1134 1134 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1135 1135 if tag == 'insert':
1136 1136 for i in xrange(blo, bhi):
1137 1137 yield ('+', b[i])
1138 1138 elif tag == 'delete':
1139 1139 for i in xrange(alo, ahi):
1140 1140 yield ('-', a[i])
1141 1141 elif tag == 'replace':
1142 1142 for i in xrange(alo, ahi):
1143 1143 yield ('-', a[i])
1144 1144 for i in xrange(blo, bhi):
1145 1145 yield ('+', b[i])
1146 1146
1147 1147 prev = {}
1148 1148 def display(fn, rev, states, prevstates):
1149 1149 found = False
1150 1150 filerevmatches = {}
1151 1151 r = prev.get(fn, -1)
1152 1152 if opts['all']:
1153 1153 iter = difflinestates(states, prevstates)
1154 1154 else:
1155 1155 iter = [('', l) for l in prevstates]
1156 1156 for change, l in iter:
1157 1157 cols = [fn, str(r)]
1158 1158 if opts['line_number']:
1159 1159 cols.append(str(l.linenum))
1160 1160 if opts['all']:
1161 1161 cols.append(change)
1162 1162 if opts['user']:
1163 1163 cols.append(ui.shortuser(get(r)[1]))
1164 1164 if opts['files_with_matches']:
1165 1165 c = (fn, r)
1166 1166 if c in filerevmatches:
1167 1167 continue
1168 1168 filerevmatches[c] = 1
1169 1169 else:
1170 1170 cols.append(l.line)
1171 1171 ui.write(sep.join(cols), eol)
1172 1172 found = True
1173 1173 return found
1174 1174
1175 1175 fstate = {}
1176 1176 skip = {}
1177 1177 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1178 1178 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1179 1179 found = False
1180 1180 follow = opts.get('follow')
1181 1181 for st, rev, fns in changeiter:
1182 1182 if st == 'window':
1183 1183 matches.clear()
1184 1184 elif st == 'add':
1185 1185 mf = repo.changectx(rev).manifest()
1186 1186 matches[rev] = {}
1187 1187 for fn in fns:
1188 1188 if fn in skip:
1189 1189 continue
1190 1190 fstate.setdefault(fn, {})
1191 1191 try:
1192 1192 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1193 1193 if follow:
1194 1194 copied = getfile(fn).renamed(mf[fn])
1195 1195 if copied:
1196 1196 copies.setdefault(rev, {})[fn] = copied[0]
1197 1197 except KeyError:
1198 1198 pass
1199 1199 elif st == 'iter':
1200 1200 states = matches[rev].items()
1201 1201 states.sort()
1202 1202 for fn, m in states:
1203 1203 copy = copies.get(rev, {}).get(fn)
1204 1204 if fn in skip:
1205 1205 if copy:
1206 1206 skip[copy] = True
1207 1207 continue
1208 1208 if fn in prev or fstate[fn]:
1209 1209 r = display(fn, rev, m, fstate[fn])
1210 1210 found = found or r
1211 1211 if r and not opts['all']:
1212 1212 skip[fn] = True
1213 1213 if copy:
1214 1214 skip[copy] = True
1215 1215 fstate[fn] = m
1216 1216 if copy:
1217 1217 fstate[copy] = m
1218 1218 prev[fn] = rev
1219 1219
1220 1220 fstate = fstate.items()
1221 1221 fstate.sort()
1222 1222 for fn, state in fstate:
1223 1223 if fn in skip:
1224 1224 continue
1225 1225 if fn not in copies.get(prev[fn], {}):
1226 1226 found = display(fn, rev, {}, state) or found
1227 1227 return (not found and 1) or 0
1228 1228
1229 1229 def heads(ui, repo, *branchrevs, **opts):
1230 1230 """show current repository heads or show branch heads
1231 1231
1232 1232 With no arguments, show all repository head changesets.
1233 1233
1234 1234 If branch or revisions names are given this will show the heads of
1235 1235 the specified branches or the branches those revisions are tagged
1236 1236 with.
1237 1237
1238 1238 Repository "heads" are changesets that don't have child
1239 1239 changesets. They are where development generally takes place and
1240 1240 are the usual targets for update and merge operations.
1241 1241
1242 1242 Branch heads are changesets that have a given branch tag, but have
1243 1243 no child changesets with that tag. They are usually where
1244 1244 development on the given branch takes place.
1245 1245 """
1246 1246 if opts['rev']:
1247 1247 start = repo.lookup(opts['rev'])
1248 1248 else:
1249 1249 start = None
1250 1250 if not branchrevs:
1251 1251 # Assume we're looking repo-wide heads if no revs were specified.
1252 1252 heads = repo.heads(start)
1253 1253 else:
1254 1254 heads = []
1255 1255 visitedset = util.set()
1256 1256 for branchrev in branchrevs:
1257 1257 branch = repo.changectx(branchrev).branch()
1258 1258 if branch in visitedset:
1259 1259 continue
1260 1260 visitedset.add(branch)
1261 1261 bheads = repo.branchheads(branch, start)
1262 1262 if not bheads:
1263 1263 if branch != branchrev:
1264 1264 ui.warn(_("no changes on branch %s containing %s are "
1265 1265 "reachable from %s\n")
1266 1266 % (branch, branchrev, opts['rev']))
1267 1267 else:
1268 1268 ui.warn(_("no changes on branch %s are reachable from %s\n")
1269 1269 % (branch, opts['rev']))
1270 1270 heads.extend(bheads)
1271 1271 if not heads:
1272 1272 return 1
1273 1273 displayer = cmdutil.show_changeset(ui, repo, opts)
1274 1274 for n in heads:
1275 1275 displayer.show(changenode=n)
1276 1276
1277 1277 def help_(ui, name=None, with_version=False):
1278 1278 """show help for a command, extension, or list of commands
1279 1279
1280 1280 With no arguments, print a list of commands and short help.
1281 1281
1282 1282 Given a command name, print help for that command.
1283 1283
1284 1284 Given an extension name, print help for that extension, and the
1285 1285 commands it provides."""
1286 1286 option_lists = []
1287 1287
1288 1288 def addglobalopts(aliases):
1289 1289 if ui.verbose:
1290 1290 option_lists.append((_("global options:"), globalopts))
1291 1291 if name == 'shortlist':
1292 1292 option_lists.append((_('use "hg help" for the full list '
1293 1293 'of commands'), ()))
1294 1294 else:
1295 1295 if name == 'shortlist':
1296 1296 msg = _('use "hg help" for the full list of commands '
1297 1297 'or "hg -v" for details')
1298 1298 elif aliases:
1299 1299 msg = _('use "hg -v help%s" to show aliases and '
1300 1300 'global options') % (name and " " + name or "")
1301 1301 else:
1302 1302 msg = _('use "hg -v help %s" to show global options') % name
1303 1303 option_lists.append((msg, ()))
1304 1304
1305 1305 def helpcmd(name):
1306 1306 if with_version:
1307 1307 version_(ui)
1308 1308 ui.write('\n')
1309 1309 aliases, i = cmdutil.findcmd(ui, name)
1310 1310 # synopsis
1311 1311 ui.write("%s\n\n" % i[2])
1312 1312
1313 1313 # description
1314 1314 doc = i[0].__doc__
1315 1315 if not doc:
1316 1316 doc = _("(No help text available)")
1317 1317 if ui.quiet:
1318 1318 doc = doc.splitlines(0)[0]
1319 1319 ui.write("%s\n" % doc.rstrip())
1320 1320
1321 1321 if not ui.quiet:
1322 1322 # aliases
1323 1323 if len(aliases) > 1:
1324 1324 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1325 1325
1326 1326 # options
1327 1327 if i[1]:
1328 1328 option_lists.append((_("options:\n"), i[1]))
1329 1329
1330 1330 addglobalopts(False)
1331 1331
1332 1332 def helplist(header, select=None):
1333 1333 h = {}
1334 1334 cmds = {}
1335 1335 for c, e in table.items():
1336 1336 f = c.split("|", 1)[0]
1337 1337 if select and not select(f):
1338 1338 continue
1339 1339 if name == "shortlist" and not f.startswith("^"):
1340 1340 continue
1341 1341 f = f.lstrip("^")
1342 1342 if not ui.debugflag and f.startswith("debug"):
1343 1343 continue
1344 1344 doc = e[0].__doc__
1345 1345 if not doc:
1346 1346 doc = _("(No help text available)")
1347 1347 h[f] = doc.splitlines(0)[0].rstrip()
1348 1348 cmds[f] = c.lstrip("^")
1349 1349
1350 1350 if not h:
1351 1351 ui.status(_('no commands defined\n'))
1352 1352 return
1353 1353
1354 1354 ui.status(header)
1355 1355 fns = h.keys()
1356 1356 fns.sort()
1357 1357 m = max(map(len, fns))
1358 1358 for f in fns:
1359 1359 if ui.verbose:
1360 1360 commands = cmds[f].replace("|",", ")
1361 1361 ui.write(" %s:\n %s\n"%(commands, h[f]))
1362 1362 else:
1363 1363 ui.write(' %-*s %s\n' % (m, f, h[f]))
1364 1364
1365 1365 if not ui.quiet:
1366 1366 addglobalopts(True)
1367 1367
1368 1368 def helptopic(name):
1369 1369 v = None
1370 1370 for i in help.helptable:
1371 1371 l = i.split('|')
1372 1372 if name in l:
1373 1373 v = i
1374 1374 header = l[-1]
1375 1375 if not v:
1376 1376 raise cmdutil.UnknownCommand(name)
1377 1377
1378 1378 # description
1379 1379 doc = help.helptable[v]
1380 1380 if not doc:
1381 1381 doc = _("(No help text available)")
1382 1382 if callable(doc):
1383 1383 doc = doc()
1384 1384
1385 1385 ui.write("%s\n" % header)
1386 1386 ui.write("%s\n" % doc.rstrip())
1387 1387
1388 1388 def helpext(name):
1389 1389 try:
1390 1390 mod = extensions.find(name)
1391 1391 except KeyError:
1392 1392 raise cmdutil.UnknownCommand(name)
1393 1393
1394 1394 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1395 1395 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1396 1396 for d in doc[1:]:
1397 1397 ui.write(d, '\n')
1398 1398
1399 1399 ui.status('\n')
1400 1400
1401 1401 try:
1402 1402 ct = mod.cmdtable
1403 1403 except AttributeError:
1404 1404 ct = {}
1405 1405
1406 1406 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1407 1407 helplist(_('list of commands:\n\n'), modcmds.has_key)
1408 1408
1409 1409 if name and name != 'shortlist':
1410 1410 i = None
1411 1411 for f in (helpcmd, helptopic, helpext):
1412 1412 try:
1413 1413 f(name)
1414 1414 i = None
1415 1415 break
1416 1416 except cmdutil.UnknownCommand, inst:
1417 1417 i = inst
1418 1418 if i:
1419 1419 raise i
1420 1420
1421 1421 else:
1422 1422 # program name
1423 1423 if ui.verbose or with_version:
1424 1424 version_(ui)
1425 1425 else:
1426 1426 ui.status(_("Mercurial Distributed SCM\n"))
1427 1427 ui.status('\n')
1428 1428
1429 1429 # list of commands
1430 1430 if name == "shortlist":
1431 1431 header = _('basic commands:\n\n')
1432 1432 else:
1433 1433 header = _('list of commands:\n\n')
1434 1434
1435 1435 helplist(header)
1436 1436
1437 1437 # list all option lists
1438 1438 opt_output = []
1439 1439 for title, options in option_lists:
1440 1440 opt_output.append(("\n%s" % title, None))
1441 1441 for shortopt, longopt, default, desc in options:
1442 1442 if "DEPRECATED" in desc and not ui.verbose: continue
1443 1443 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1444 1444 longopt and " --%s" % longopt),
1445 1445 "%s%s" % (desc,
1446 1446 default
1447 1447 and _(" (default: %s)") % default
1448 1448 or "")))
1449 1449
1450 1450 if opt_output:
1451 1451 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1452 1452 for first, second in opt_output:
1453 1453 if second:
1454 1454 ui.write(" %-*s %s\n" % (opts_len, first, second))
1455 1455 else:
1456 1456 ui.write("%s\n" % first)
1457 1457
1458 1458 def identify(ui, repo, source=None,
1459 1459 rev=None, num=None, id=None, branch=None, tags=None):
1460 1460 """identify the working copy or specified revision
1461 1461
1462 1462 With no revision, print a summary of the current state of the repo.
1463 1463
1464 1464 With a path, do a lookup in another repository.
1465 1465
1466 1466 This summary identifies the repository state using one or two parent
1467 1467 hash identifiers, followed by a "+" if there are uncommitted changes
1468 1468 in the working directory, a list of tags for this revision and a branch
1469 1469 name for non-default branches.
1470 1470 """
1471 1471
1472 1472 hexfunc = ui.debugflag and hex or short
1473 1473 default = not (num or id or branch or tags)
1474 1474 output = []
1475 1475
1476 1476 if source:
1477 1477 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1478 1478 srepo = hg.repository(ui, source)
1479 1479 if not rev and revs:
1480 1480 rev = revs[0]
1481 1481 if not rev:
1482 1482 rev = "tip"
1483 1483 if num or branch or tags:
1484 1484 raise util.Abort(
1485 1485 "can't query remote revision number, branch, or tags")
1486 1486 output = [hexfunc(srepo.lookup(rev))]
1487 1487 elif not rev:
1488 1488 ctx = repo.workingctx()
1489 1489 parents = ctx.parents()
1490 1490 changed = False
1491 1491 if default or id or num:
1492 1492 changed = ctx.files() + ctx.deleted()
1493 1493 if default or id:
1494 1494 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1495 1495 (changed) and "+" or "")]
1496 1496 if num:
1497 1497 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1498 1498 (changed) and "+" or ""))
1499 1499 else:
1500 1500 ctx = repo.changectx(rev)
1501 1501 if default or id:
1502 1502 output = [hexfunc(ctx.node())]
1503 1503 if num:
1504 1504 output.append(str(ctx.rev()))
1505 1505
1506 1506 if not source and default and not ui.quiet:
1507 1507 b = util.tolocal(ctx.branch())
1508 1508 if b != 'default':
1509 1509 output.append("(%s)" % b)
1510 1510
1511 1511 # multiple tags for a single parent separated by '/'
1512 1512 t = "/".join(ctx.tags())
1513 1513 if t:
1514 1514 output.append(t)
1515 1515
1516 1516 if branch:
1517 1517 output.append(util.tolocal(ctx.branch()))
1518 1518
1519 1519 if tags:
1520 1520 output.extend(ctx.tags())
1521 1521
1522 1522 ui.write("%s\n" % ' '.join(output))
1523 1523
1524 1524 def import_(ui, repo, patch1, *patches, **opts):
1525 1525 """import an ordered set of patches
1526 1526
1527 1527 Import a list of patches and commit them individually.
1528 1528
1529 1529 If there are outstanding changes in the working directory, import
1530 1530 will abort unless given the -f flag.
1531 1531
1532 1532 You can import a patch straight from a mail message. Even patches
1533 1533 as attachments work (body part must be type text/plain or
1534 1534 text/x-patch to be used). From and Subject headers of email
1535 1535 message are used as default committer and commit message. All
1536 1536 text/plain body parts before first diff are added to commit
1537 1537 message.
1538 1538
1539 1539 If the imported patch was generated by hg export, user and description
1540 1540 from patch override values from message headers and body. Values
1541 1541 given on command line with -m and -u override these.
1542 1542
1543 1543 If --exact is specified, import will set the working directory
1544 1544 to the parent of each patch before applying it, and will abort
1545 1545 if the resulting changeset has a different ID than the one
1546 1546 recorded in the patch. This may happen due to character set
1547 1547 problems or other deficiencies in the text patch format.
1548 1548
1549 1549 To read a patch from standard input, use patch name "-".
1550 1550 """
1551 1551 patches = (patch1,) + patches
1552 1552
1553 1553 if opts.get('exact') or not opts['force']:
1554 1554 cmdutil.bail_if_changed(repo)
1555 1555
1556 1556 d = opts["base"]
1557 1557 strip = opts["strip"]
1558 1558 wlock = lock = None
1559 1559 try:
1560 1560 wlock = repo.wlock()
1561 1561 lock = repo.lock()
1562 1562 for p in patches:
1563 1563 pf = os.path.join(d, p)
1564 1564
1565 1565 if pf == '-':
1566 1566 ui.status(_("applying patch from stdin\n"))
1567 1567 data = patch.extract(ui, sys.stdin)
1568 1568 else:
1569 1569 ui.status(_("applying %s\n") % p)
1570 1570 data = patch.extract(ui, file(pf, 'rb'))
1571 1571
1572 1572 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1573 1573
1574 1574 if tmpname is None:
1575 1575 raise util.Abort(_('no diffs found'))
1576 1576
1577 1577 try:
1578 1578 cmdline_message = cmdutil.logmessage(opts)
1579 1579 if cmdline_message:
1580 1580 # pickup the cmdline msg
1581 1581 message = cmdline_message
1582 1582 elif message:
1583 1583 # pickup the patch msg
1584 1584 message = message.strip()
1585 1585 else:
1586 1586 # launch the editor
1587 1587 message = None
1588 1588 ui.debug(_('message:\n%s\n') % message)
1589 1589
1590 1590 wp = repo.workingctx().parents()
1591 1591 if opts.get('exact'):
1592 1592 if not nodeid or not p1:
1593 1593 raise util.Abort(_('not a mercurial patch'))
1594 1594 p1 = repo.lookup(p1)
1595 1595 p2 = repo.lookup(p2 or hex(nullid))
1596 1596
1597 1597 if p1 != wp[0].node():
1598 1598 hg.clean(repo, p1)
1599 1599 repo.dirstate.setparents(p1, p2)
1600 1600 elif p2:
1601 1601 try:
1602 1602 p1 = repo.lookup(p1)
1603 1603 p2 = repo.lookup(p2)
1604 1604 if p1 == wp[0].node():
1605 1605 repo.dirstate.setparents(p1, p2)
1606 1606 except hg.RepoError:
1607 1607 pass
1608 1608 if opts.get('exact') or opts.get('import_branch'):
1609 1609 repo.dirstate.setbranch(branch or 'default')
1610 1610
1611 1611 files = {}
1612 1612 try:
1613 1613 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1614 1614 files=files)
1615 1615 finally:
1616 1616 files = patch.updatedir(ui, repo, files)
1617 1617 n = repo.commit(files, message, user, date)
1618 1618 if opts.get('exact'):
1619 1619 if hex(n) != nodeid:
1620 1620 repo.rollback()
1621 1621 raise util.Abort(_('patch is damaged' +
1622 1622 ' or loses information'))
1623 1623 finally:
1624 1624 os.unlink(tmpname)
1625 1625 finally:
1626 del wlock, lock
1626 del lock, wlock
1627 1627
1628 1628 def incoming(ui, repo, source="default", **opts):
1629 1629 """show new changesets found in source
1630 1630
1631 1631 Show new changesets found in the specified path/URL or the default
1632 1632 pull location. These are the changesets that would be pulled if a pull
1633 1633 was requested.
1634 1634
1635 1635 For remote repository, using --bundle avoids downloading the changesets
1636 1636 twice if the incoming is followed by a pull.
1637 1637
1638 1638 See pull for valid source format details.
1639 1639 """
1640 1640 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1641 1641 cmdutil.setremoteconfig(ui, opts)
1642 1642
1643 1643 other = hg.repository(ui, source)
1644 1644 ui.status(_('comparing with %s\n') % source)
1645 1645 if revs:
1646 1646 if 'lookup' in other.capabilities:
1647 1647 revs = [other.lookup(rev) for rev in revs]
1648 1648 else:
1649 1649 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1650 1650 raise util.Abort(error)
1651 1651 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1652 1652 if not incoming:
1653 1653 try:
1654 1654 os.unlink(opts["bundle"])
1655 1655 except:
1656 1656 pass
1657 1657 ui.status(_("no changes found\n"))
1658 1658 return 1
1659 1659
1660 1660 cleanup = None
1661 1661 try:
1662 1662 fname = opts["bundle"]
1663 1663 if fname or not other.local():
1664 1664 # create a bundle (uncompressed if other repo is not local)
1665 1665 if revs is None:
1666 1666 cg = other.changegroup(incoming, "incoming")
1667 1667 else:
1668 1668 if 'changegroupsubset' not in other.capabilities:
1669 1669 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1670 1670 cg = other.changegroupsubset(incoming, revs, 'incoming')
1671 1671 bundletype = other.local() and "HG10BZ" or "HG10UN"
1672 1672 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1673 1673 # keep written bundle?
1674 1674 if opts["bundle"]:
1675 1675 cleanup = None
1676 1676 if not other.local():
1677 1677 # use the created uncompressed bundlerepo
1678 1678 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1679 1679
1680 1680 o = other.changelog.nodesbetween(incoming, revs)[0]
1681 1681 if opts['newest_first']:
1682 1682 o.reverse()
1683 1683 displayer = cmdutil.show_changeset(ui, other, opts)
1684 1684 for n in o:
1685 1685 parents = [p for p in other.changelog.parents(n) if p != nullid]
1686 1686 if opts['no_merges'] and len(parents) == 2:
1687 1687 continue
1688 1688 displayer.show(changenode=n)
1689 1689 finally:
1690 1690 if hasattr(other, 'close'):
1691 1691 other.close()
1692 1692 if cleanup:
1693 1693 os.unlink(cleanup)
1694 1694
1695 1695 def init(ui, dest=".", **opts):
1696 1696 """create a new repository in the given directory
1697 1697
1698 1698 Initialize a new repository in the given directory. If the given
1699 1699 directory does not exist, it is created.
1700 1700
1701 1701 If no directory is given, the current directory is used.
1702 1702
1703 1703 It is possible to specify an ssh:// URL as the destination.
1704 1704 Look at the help text for the pull command for important details
1705 1705 about ssh:// URLs.
1706 1706 """
1707 1707 cmdutil.setremoteconfig(ui, opts)
1708 1708 hg.repository(ui, dest, create=1)
1709 1709
1710 1710 def locate(ui, repo, *pats, **opts):
1711 1711 """locate files matching specific patterns
1712 1712
1713 1713 Print all files under Mercurial control whose names match the
1714 1714 given patterns.
1715 1715
1716 1716 This command searches the entire repository by default. To search
1717 1717 just the current directory and its subdirectories, use
1718 1718 "--include .".
1719 1719
1720 1720 If no patterns are given to match, this command prints all file
1721 1721 names.
1722 1722
1723 1723 If you want to feed the output of this command into the "xargs"
1724 1724 command, use the "-0" option to both this command and "xargs".
1725 1725 This will avoid the problem of "xargs" treating single filenames
1726 1726 that contain white space as multiple filenames.
1727 1727 """
1728 1728 end = opts['print0'] and '\0' or '\n'
1729 1729 rev = opts['rev']
1730 1730 if rev:
1731 1731 node = repo.lookup(rev)
1732 1732 else:
1733 1733 node = None
1734 1734
1735 1735 ret = 1
1736 1736 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1737 1737 badmatch=util.always,
1738 1738 default='relglob'):
1739 1739 if src == 'b':
1740 1740 continue
1741 1741 if not node and abs not in repo.dirstate:
1742 1742 continue
1743 1743 if opts['fullpath']:
1744 1744 ui.write(os.path.join(repo.root, abs), end)
1745 1745 else:
1746 1746 ui.write(((pats and rel) or abs), end)
1747 1747 ret = 0
1748 1748
1749 1749 return ret
1750 1750
1751 1751 def log(ui, repo, *pats, **opts):
1752 1752 """show revision history of entire repository or files
1753 1753
1754 1754 Print the revision history of the specified files or the entire
1755 1755 project.
1756 1756
1757 1757 File history is shown without following rename or copy history of
1758 1758 files. Use -f/--follow with a file name to follow history across
1759 1759 renames and copies. --follow without a file name will only show
1760 1760 ancestors or descendants of the starting revision. --follow-first
1761 1761 only follows the first parent of merge revisions.
1762 1762
1763 1763 If no revision range is specified, the default is tip:0 unless
1764 1764 --follow is set, in which case the working directory parent is
1765 1765 used as the starting revision.
1766 1766
1767 1767 By default this command outputs: changeset id and hash, tags,
1768 1768 non-trivial parents, user, date and time, and a summary for each
1769 1769 commit. When the -v/--verbose switch is used, the list of changed
1770 1770 files and full commit message is shown.
1771 1771
1772 1772 NOTE: log -p may generate unexpected diff output for merge
1773 1773 changesets, as it will compare the merge changeset against its
1774 1774 first parent only. Also, the files: list will only reflect files
1775 1775 that are different from BOTH parents.
1776 1776
1777 1777 """
1778 1778
1779 1779 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1780 1780 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1781 1781
1782 1782 if opts['limit']:
1783 1783 try:
1784 1784 limit = int(opts['limit'])
1785 1785 except ValueError:
1786 1786 raise util.Abort(_('limit must be a positive integer'))
1787 1787 if limit <= 0: raise util.Abort(_('limit must be positive'))
1788 1788 else:
1789 1789 limit = sys.maxint
1790 1790 count = 0
1791 1791
1792 1792 if opts['copies'] and opts['rev']:
1793 1793 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1794 1794 else:
1795 1795 endrev = repo.changelog.count()
1796 1796 rcache = {}
1797 1797 ncache = {}
1798 1798 dcache = []
1799 1799 def getrenamed(fn, rev, man):
1800 1800 '''looks up all renames for a file (up to endrev) the first
1801 1801 time the file is given. It indexes on the changerev and only
1802 1802 parses the manifest if linkrev != changerev.
1803 1803 Returns rename info for fn at changerev rev.'''
1804 1804 if fn not in rcache:
1805 1805 rcache[fn] = {}
1806 1806 ncache[fn] = {}
1807 1807 fl = repo.file(fn)
1808 1808 for i in xrange(fl.count()):
1809 1809 node = fl.node(i)
1810 1810 lr = fl.linkrev(node)
1811 1811 renamed = fl.renamed(node)
1812 1812 rcache[fn][lr] = renamed
1813 1813 if renamed:
1814 1814 ncache[fn][node] = renamed
1815 1815 if lr >= endrev:
1816 1816 break
1817 1817 if rev in rcache[fn]:
1818 1818 return rcache[fn][rev]
1819 1819 mr = repo.manifest.rev(man)
1820 1820 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1821 1821 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1822 1822 if not dcache or dcache[0] != man:
1823 1823 dcache[:] = [man, repo.manifest.readdelta(man)]
1824 1824 if fn in dcache[1]:
1825 1825 return ncache[fn].get(dcache[1][fn])
1826 1826 return None
1827 1827
1828 1828 df = False
1829 1829 if opts["date"]:
1830 1830 df = util.matchdate(opts["date"])
1831 1831
1832 1832 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1833 1833 for st, rev, fns in changeiter:
1834 1834 if st == 'add':
1835 1835 changenode = repo.changelog.node(rev)
1836 1836 parents = [p for p in repo.changelog.parentrevs(rev)
1837 1837 if p != nullrev]
1838 1838 if opts['no_merges'] and len(parents) == 2:
1839 1839 continue
1840 1840 if opts['only_merges'] and len(parents) != 2:
1841 1841 continue
1842 1842
1843 1843 if df:
1844 1844 changes = get(rev)
1845 1845 if not df(changes[2][0]):
1846 1846 continue
1847 1847
1848 1848 if opts['keyword']:
1849 1849 changes = get(rev)
1850 1850 miss = 0
1851 1851 for k in [kw.lower() for kw in opts['keyword']]:
1852 1852 if not (k in changes[1].lower() or
1853 1853 k in changes[4].lower() or
1854 1854 k in " ".join(changes[3]).lower()):
1855 1855 miss = 1
1856 1856 break
1857 1857 if miss:
1858 1858 continue
1859 1859
1860 1860 copies = []
1861 1861 if opts.get('copies') and rev:
1862 1862 mf = get(rev)[0]
1863 1863 for fn in get(rev)[3]:
1864 1864 rename = getrenamed(fn, rev, mf)
1865 1865 if rename:
1866 1866 copies.append((fn, rename[0]))
1867 1867 displayer.show(rev, changenode, copies=copies)
1868 1868 elif st == 'iter':
1869 1869 if count == limit: break
1870 1870 if displayer.flush(rev):
1871 1871 count += 1
1872 1872
1873 1873 def manifest(ui, repo, rev=None):
1874 1874 """output the current or given revision of the project manifest
1875 1875
1876 1876 Print a list of version controlled files for the given revision.
1877 1877 If no revision is given, the parent of the working directory is used,
1878 1878 or tip if no revision is checked out.
1879 1879
1880 1880 The manifest is the list of files being version controlled. If no revision
1881 1881 is given then the first parent of the working directory is used.
1882 1882
1883 1883 With -v flag, print file permissions. With --debug flag, print
1884 1884 file revision hashes.
1885 1885 """
1886 1886
1887 1887 m = repo.changectx(rev).manifest()
1888 1888 files = m.keys()
1889 1889 files.sort()
1890 1890
1891 1891 for f in files:
1892 1892 if ui.debugflag:
1893 1893 ui.write("%40s " % hex(m[f]))
1894 1894 if ui.verbose:
1895 1895 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1896 1896 ui.write("%s\n" % f)
1897 1897
1898 1898 def merge(ui, repo, node=None, force=None, rev=None):
1899 1899 """merge working directory with another revision
1900 1900
1901 1901 Merge the contents of the current working directory and the
1902 1902 requested revision. Files that changed between either parent are
1903 1903 marked as changed for the next commit and a commit must be
1904 1904 performed before any further updates are allowed.
1905 1905
1906 1906 If no revision is specified, the working directory's parent is a
1907 1907 head revision, and the repository contains exactly one other head,
1908 1908 the other head is merged with by default. Otherwise, an explicit
1909 1909 revision to merge with must be provided.
1910 1910 """
1911 1911
1912 1912 if rev and node:
1913 1913 raise util.Abort(_("please specify just one revision"))
1914 1914
1915 1915 if not node:
1916 1916 node = rev
1917 1917
1918 1918 if not node:
1919 1919 heads = repo.heads()
1920 1920 if len(heads) > 2:
1921 1921 raise util.Abort(_('repo has %d heads - '
1922 1922 'please merge with an explicit rev') %
1923 1923 len(heads))
1924 1924 if len(heads) == 1:
1925 1925 raise util.Abort(_('there is nothing to merge - '
1926 1926 'use "hg update" instead'))
1927 1927 parent = repo.dirstate.parents()[0]
1928 1928 if parent not in heads:
1929 1929 raise util.Abort(_('working dir not at a head rev - '
1930 1930 'use "hg update" or merge with an explicit rev'))
1931 1931 node = parent == heads[0] and heads[-1] or heads[0]
1932 1932 return hg.merge(repo, node, force=force)
1933 1933
1934 1934 def outgoing(ui, repo, dest=None, **opts):
1935 1935 """show changesets not found in destination
1936 1936
1937 1937 Show changesets not found in the specified destination repository or
1938 1938 the default push location. These are the changesets that would be pushed
1939 1939 if a push was requested.
1940 1940
1941 1941 See pull for valid destination format details.
1942 1942 """
1943 1943 dest, revs = cmdutil.parseurl(
1944 1944 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1945 1945 cmdutil.setremoteconfig(ui, opts)
1946 1946 if revs:
1947 1947 revs = [repo.lookup(rev) for rev in revs]
1948 1948
1949 1949 other = hg.repository(ui, dest)
1950 1950 ui.status(_('comparing with %s\n') % dest)
1951 1951 o = repo.findoutgoing(other, force=opts['force'])
1952 1952 if not o:
1953 1953 ui.status(_("no changes found\n"))
1954 1954 return 1
1955 1955 o = repo.changelog.nodesbetween(o, revs)[0]
1956 1956 if opts['newest_first']:
1957 1957 o.reverse()
1958 1958 displayer = cmdutil.show_changeset(ui, repo, opts)
1959 1959 for n in o:
1960 1960 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1961 1961 if opts['no_merges'] and len(parents) == 2:
1962 1962 continue
1963 1963 displayer.show(changenode=n)
1964 1964
1965 1965 def parents(ui, repo, file_=None, **opts):
1966 1966 """show the parents of the working dir or revision
1967 1967
1968 1968 Print the working directory's parent revisions. If a
1969 1969 revision is given via --rev, the parent of that revision
1970 1970 will be printed. If a file argument is given, revision in
1971 1971 which the file was last changed (before the working directory
1972 1972 revision or the argument to --rev if given) is printed.
1973 1973 """
1974 1974 rev = opts.get('rev')
1975 1975 if file_:
1976 1976 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1977 1977 if anypats or len(files) != 1:
1978 1978 raise util.Abort(_('can only specify an explicit file name'))
1979 1979 ctx = repo.filectx(files[0], changeid=rev)
1980 1980 elif rev:
1981 1981 ctx = repo.changectx(rev)
1982 1982 else:
1983 1983 ctx = repo.workingctx()
1984 1984 p = [cp.node() for cp in ctx.parents()]
1985 1985
1986 1986 displayer = cmdutil.show_changeset(ui, repo, opts)
1987 1987 for n in p:
1988 1988 if n != nullid:
1989 1989 displayer.show(changenode=n)
1990 1990
1991 1991 def paths(ui, repo, search=None):
1992 1992 """show definition of symbolic path names
1993 1993
1994 1994 Show definition of symbolic path name NAME. If no name is given, show
1995 1995 definition of available names.
1996 1996
1997 1997 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1998 1998 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1999 1999 """
2000 2000 if search:
2001 2001 for name, path in ui.configitems("paths"):
2002 2002 if name == search:
2003 2003 ui.write("%s\n" % path)
2004 2004 return
2005 2005 ui.warn(_("not found!\n"))
2006 2006 return 1
2007 2007 else:
2008 2008 for name, path in ui.configitems("paths"):
2009 2009 ui.write("%s = %s\n" % (name, path))
2010 2010
2011 2011 def postincoming(ui, repo, modheads, optupdate):
2012 2012 if modheads == 0:
2013 2013 return
2014 2014 if optupdate:
2015 2015 if modheads == 1:
2016 2016 return hg.update(repo, None)
2017 2017 else:
2018 2018 ui.status(_("not updating, since new heads added\n"))
2019 2019 if modheads > 1:
2020 2020 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2021 2021 else:
2022 2022 ui.status(_("(run 'hg update' to get a working copy)\n"))
2023 2023
2024 2024 def pull(ui, repo, source="default", **opts):
2025 2025 """pull changes from the specified source
2026 2026
2027 2027 Pull changes from a remote repository to a local one.
2028 2028
2029 2029 This finds all changes from the repository at the specified path
2030 2030 or URL and adds them to the local repository. By default, this
2031 2031 does not update the copy of the project in the working directory.
2032 2032
2033 2033 Valid URLs are of the form:
2034 2034
2035 2035 local/filesystem/path (or file://local/filesystem/path)
2036 2036 http://[user@]host[:port]/[path]
2037 2037 https://[user@]host[:port]/[path]
2038 2038 ssh://[user@]host[:port]/[path]
2039 2039 static-http://host[:port]/[path]
2040 2040
2041 2041 Paths in the local filesystem can either point to Mercurial
2042 2042 repositories or to bundle files (as created by 'hg bundle' or
2043 2043 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2044 2044 allows access to a Mercurial repository where you simply use a web
2045 2045 server to publish the .hg directory as static content.
2046 2046
2047 2047 An optional identifier after # indicates a particular branch, tag,
2048 2048 or changeset to pull.
2049 2049
2050 2050 Some notes about using SSH with Mercurial:
2051 2051 - SSH requires an accessible shell account on the destination machine
2052 2052 and a copy of hg in the remote path or specified with as remotecmd.
2053 2053 - path is relative to the remote user's home directory by default.
2054 2054 Use an extra slash at the start of a path to specify an absolute path:
2055 2055 ssh://example.com//tmp/repository
2056 2056 - Mercurial doesn't use its own compression via SSH; the right thing
2057 2057 to do is to configure it in your ~/.ssh/config, e.g.:
2058 2058 Host *.mylocalnetwork.example.com
2059 2059 Compression no
2060 2060 Host *
2061 2061 Compression yes
2062 2062 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2063 2063 with the --ssh command line option.
2064 2064 """
2065 2065 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2066 2066 cmdutil.setremoteconfig(ui, opts)
2067 2067
2068 2068 other = hg.repository(ui, source)
2069 2069 ui.status(_('pulling from %s\n') % (source))
2070 2070 if revs:
2071 2071 if 'lookup' in other.capabilities:
2072 2072 revs = [other.lookup(rev) for rev in revs]
2073 2073 else:
2074 2074 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2075 2075 raise util.Abort(error)
2076 2076
2077 2077 modheads = repo.pull(other, heads=revs, force=opts['force'])
2078 2078 return postincoming(ui, repo, modheads, opts['update'])
2079 2079
2080 2080 def push(ui, repo, dest=None, **opts):
2081 2081 """push changes to the specified destination
2082 2082
2083 2083 Push changes from the local repository to the given destination.
2084 2084
2085 2085 This is the symmetrical operation for pull. It helps to move
2086 2086 changes from the current repository to a different one. If the
2087 2087 destination is local this is identical to a pull in that directory
2088 2088 from the current one.
2089 2089
2090 2090 By default, push will refuse to run if it detects the result would
2091 2091 increase the number of remote heads. This generally indicates the
2092 2092 the client has forgotten to sync and merge before pushing.
2093 2093
2094 2094 Valid URLs are of the form:
2095 2095
2096 2096 local/filesystem/path (or file://local/filesystem/path)
2097 2097 ssh://[user@]host[:port]/[path]
2098 2098 http://[user@]host[:port]/[path]
2099 2099 https://[user@]host[:port]/[path]
2100 2100
2101 2101 An optional identifier after # indicates a particular branch, tag,
2102 2102 or changeset to push.
2103 2103
2104 2104 Look at the help text for the pull command for important details
2105 2105 about ssh:// URLs.
2106 2106
2107 2107 Pushing to http:// and https:// URLs is only possible, if this
2108 2108 feature is explicitly enabled on the remote Mercurial server.
2109 2109 """
2110 2110 dest, revs = cmdutil.parseurl(
2111 2111 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2112 2112 cmdutil.setremoteconfig(ui, opts)
2113 2113
2114 2114 other = hg.repository(ui, dest)
2115 2115 ui.status('pushing to %s\n' % (dest))
2116 2116 if revs:
2117 2117 revs = [repo.lookup(rev) for rev in revs]
2118 2118 r = repo.push(other, opts['force'], revs=revs)
2119 2119 return r == 0
2120 2120
2121 2121 def rawcommit(ui, repo, *pats, **opts):
2122 2122 """raw commit interface (DEPRECATED)
2123 2123
2124 2124 (DEPRECATED)
2125 2125 Lowlevel commit, for use in helper scripts.
2126 2126
2127 2127 This command is not intended to be used by normal users, as it is
2128 2128 primarily useful for importing from other SCMs.
2129 2129
2130 2130 This command is now deprecated and will be removed in a future
2131 2131 release, please use debugsetparents and commit instead.
2132 2132 """
2133 2133
2134 2134 ui.warn(_("(the rawcommit command is deprecated)\n"))
2135 2135
2136 2136 message = cmdutil.logmessage(opts)
2137 2137
2138 2138 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2139 2139 if opts['files']:
2140 2140 files += open(opts['files']).read().splitlines()
2141 2141
2142 2142 parents = [repo.lookup(p) for p in opts['parent']]
2143 2143
2144 2144 try:
2145 2145 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2146 2146 except ValueError, inst:
2147 2147 raise util.Abort(str(inst))
2148 2148
2149 2149 def recover(ui, repo):
2150 2150 """roll back an interrupted transaction
2151 2151
2152 2152 Recover from an interrupted commit or pull.
2153 2153
2154 2154 This command tries to fix the repository status after an interrupted
2155 2155 operation. It should only be necessary when Mercurial suggests it.
2156 2156 """
2157 2157 if repo.recover():
2158 2158 return hg.verify(repo)
2159 2159 return 1
2160 2160
2161 2161 def remove(ui, repo, *pats, **opts):
2162 2162 """remove the specified files on the next commit
2163 2163
2164 2164 Schedule the indicated files for removal from the repository.
2165 2165
2166 2166 This only removes files from the current branch, not from the
2167 2167 entire project history. If the files still exist in the working
2168 2168 directory, they will be deleted from it. If invoked with --after,
2169 2169 files are marked as removed, but not actually unlinked unless --force
2170 2170 is also given. Without exact file names, --after will only mark
2171 2171 files as removed if they are no longer in the working directory.
2172 2172
2173 2173 This command schedules the files to be removed at the next commit.
2174 2174 To undo a remove before that, see hg revert.
2175 2175
2176 2176 Modified files and added files are not removed by default. To
2177 2177 remove them, use the -f/--force option.
2178 2178 """
2179 2179 if not opts['after'] and not pats:
2180 2180 raise util.Abort(_('no files specified'))
2181 2181 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2182 2182 exact = dict.fromkeys(files)
2183 2183 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2184 2184 modified, added, removed, deleted, unknown = mardu
2185 2185 remove, forget = [], []
2186 2186 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2187 2187 reason = None
2188 2188 if abs in modified and not opts['force']:
2189 2189 reason = _('is modified (use -f to force removal)')
2190 2190 elif abs in added:
2191 2191 if opts['force']:
2192 2192 forget.append(abs)
2193 2193 continue
2194 2194 reason = _('has been marked for add (use -f to force removal)')
2195 2195 elif abs not in repo.dirstate:
2196 2196 reason = _('is not managed')
2197 2197 elif opts['after'] and not exact and abs not in deleted:
2198 2198 continue
2199 2199 elif abs in removed:
2200 2200 continue
2201 2201 if reason:
2202 2202 if exact:
2203 2203 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2204 2204 else:
2205 2205 if ui.verbose or not exact:
2206 2206 ui.status(_('removing %s\n') % rel)
2207 2207 remove.append(abs)
2208 2208 repo.forget(forget)
2209 2209 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2210 2210
2211 2211 def rename(ui, repo, *pats, **opts):
2212 2212 """rename files; equivalent of copy + remove
2213 2213
2214 2214 Mark dest as copies of sources; mark sources for deletion. If
2215 2215 dest is a directory, copies are put in that directory. If dest is
2216 2216 a file, there can only be one source.
2217 2217
2218 2218 By default, this command copies the contents of files as they
2219 2219 stand in the working directory. If invoked with --after, the
2220 2220 operation is recorded, but no copying is performed.
2221 2221
2222 2222 This command takes effect in the next commit. To undo a rename
2223 2223 before that, see hg revert.
2224 2224 """
2225 2225 wlock = repo.wlock(False)
2226 2226 try:
2227 2227 errs, copied = docopy(ui, repo, pats, opts)
2228 2228 names = []
2229 2229 for abs, rel, exact in copied:
2230 2230 if ui.verbose or not exact:
2231 2231 ui.status(_('removing %s\n') % rel)
2232 2232 names.append(abs)
2233 2233 if not opts.get('dry_run'):
2234 2234 repo.remove(names, True)
2235 2235 return errs
2236 2236 finally:
2237 2237 del wlock
2238 2238
2239 2239 def revert(ui, repo, *pats, **opts):
2240 2240 """revert files or dirs to their states as of some revision
2241 2241
2242 2242 With no revision specified, revert the named files or directories
2243 2243 to the contents they had in the parent of the working directory.
2244 2244 This restores the contents of the affected files to an unmodified
2245 2245 state and unschedules adds, removes, copies, and renames. If the
2246 2246 working directory has two parents, you must explicitly specify the
2247 2247 revision to revert to.
2248 2248
2249 2249 Modified files are saved with a .orig suffix before reverting.
2250 2250 To disable these backups, use --no-backup.
2251 2251
2252 2252 Using the -r option, revert the given files or directories to their
2253 2253 contents as of a specific revision. This can be helpful to "roll
2254 2254 back" some or all of a change that should not have been committed.
2255 2255
2256 2256 Revert modifies the working directory. It does not commit any
2257 2257 changes, or change the parent of the working directory. If you
2258 2258 revert to a revision other than the parent of the working
2259 2259 directory, the reverted files will thus appear modified
2260 2260 afterwards.
2261 2261
2262 2262 If a file has been deleted, it is restored. If the executable
2263 2263 mode of a file was changed, it is reset.
2264 2264
2265 2265 If names are given, all files matching the names are reverted.
2266 2266
2267 2267 If no arguments are given, no files are reverted.
2268 2268 """
2269 2269
2270 2270 if opts["date"]:
2271 2271 if opts["rev"]:
2272 2272 raise util.Abort(_("you can't specify a revision and a date"))
2273 2273 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2274 2274
2275 2275 if not pats and not opts['all']:
2276 2276 raise util.Abort(_('no files or directories specified; '
2277 2277 'use --all to revert the whole repo'))
2278 2278
2279 2279 parent, p2 = repo.dirstate.parents()
2280 2280 if not opts['rev'] and p2 != nullid:
2281 2281 raise util.Abort(_('uncommitted merge - please provide a '
2282 2282 'specific revision'))
2283 2283 ctx = repo.changectx(opts['rev'])
2284 2284 node = ctx.node()
2285 2285 mf = ctx.manifest()
2286 2286 if node == parent:
2287 2287 pmf = mf
2288 2288 else:
2289 2289 pmf = None
2290 2290
2291 2291 # need all matching names in dirstate and manifest of target rev,
2292 2292 # so have to walk both. do not print errors if files exist in one
2293 2293 # but not other.
2294 2294
2295 2295 names = {}
2296 2296 target_only = {}
2297 2297
2298 2298 wlock = repo.wlock()
2299 2299 try:
2300 2300 # walk dirstate.
2301 2301 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2302 2302 badmatch=mf.has_key):
2303 2303 names[abs] = (rel, exact)
2304 2304 if src == 'b':
2305 2305 target_only[abs] = True
2306 2306
2307 2307 # walk target manifest.
2308 2308
2309 2309 def badmatch(path):
2310 2310 if path in names:
2311 2311 return True
2312 2312 path_ = path + '/'
2313 2313 for f in names:
2314 2314 if f.startswith(path_):
2315 2315 return True
2316 2316 return False
2317 2317
2318 2318 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2319 2319 badmatch=badmatch):
2320 2320 if abs in names or src == 'b':
2321 2321 continue
2322 2322 names[abs] = (rel, exact)
2323 2323 target_only[abs] = True
2324 2324
2325 2325 changes = repo.status(match=names.has_key)[:5]
2326 2326 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2327 2327
2328 2328 revert = ([], _('reverting %s\n'))
2329 2329 add = ([], _('adding %s\n'))
2330 2330 remove = ([], _('removing %s\n'))
2331 2331 forget = ([], _('forgetting %s\n'))
2332 2332 undelete = ([], _('undeleting %s\n'))
2333 2333 update = {}
2334 2334
2335 2335 disptable = (
2336 2336 # dispatch table:
2337 2337 # file state
2338 2338 # action if in target manifest
2339 2339 # action if not in target manifest
2340 2340 # make backup if in target manifest
2341 2341 # make backup if not in target manifest
2342 2342 (modified, revert, remove, True, True),
2343 2343 (added, revert, forget, True, False),
2344 2344 (removed, undelete, None, False, False),
2345 2345 (deleted, revert, remove, False, False),
2346 2346 (unknown, add, None, True, False),
2347 2347 (target_only, add, None, False, False),
2348 2348 )
2349 2349
2350 2350 entries = names.items()
2351 2351 entries.sort()
2352 2352
2353 2353 for abs, (rel, exact) in entries:
2354 2354 mfentry = mf.get(abs)
2355 2355 target = repo.wjoin(abs)
2356 2356 def handle(xlist, dobackup):
2357 2357 xlist[0].append(abs)
2358 2358 update[abs] = 1
2359 2359 if dobackup and not opts['no_backup'] and util.lexists(target):
2360 2360 bakname = "%s.orig" % rel
2361 2361 ui.note(_('saving current version of %s as %s\n') %
2362 2362 (rel, bakname))
2363 2363 if not opts.get('dry_run'):
2364 2364 util.copyfile(target, bakname)
2365 2365 if ui.verbose or not exact:
2366 2366 ui.status(xlist[1] % rel)
2367 2367 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2368 2368 if abs not in table: continue
2369 2369 # file has changed in dirstate
2370 2370 if mfentry:
2371 2371 handle(hitlist, backuphit)
2372 2372 elif misslist is not None:
2373 2373 handle(misslist, backupmiss)
2374 2374 else:
2375 2375 if exact: ui.warn(_('file not managed: %s\n') % rel)
2376 2376 break
2377 2377 else:
2378 2378 # file has not changed in dirstate
2379 2379 if node == parent:
2380 2380 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2381 2381 continue
2382 2382 if pmf is None:
2383 2383 # only need parent manifest in this unlikely case,
2384 2384 # so do not read by default
2385 2385 pmf = repo.changectx(parent).manifest()
2386 2386 if abs in pmf:
2387 2387 if mfentry:
2388 2388 # if version of file is same in parent and target
2389 2389 # manifests, do nothing
2390 2390 if pmf[abs] != mfentry:
2391 2391 handle(revert, False)
2392 2392 else:
2393 2393 handle(remove, False)
2394 2394
2395 2395 if not opts.get('dry_run'):
2396 2396 for f in forget[0]:
2397 2397 repo.dirstate.forget(f)
2398 2398 r = hg.revert(repo, node, update.has_key)
2399 2399 for f in add[0]:
2400 2400 repo.dirstate.add(f)
2401 2401 for f in undelete[0]:
2402 2402 repo.dirstate.normal(f)
2403 2403 for f in remove[0]:
2404 2404 repo.dirstate.remove(f)
2405 2405 return r
2406 2406 finally:
2407 2407 del wlock
2408 2408
2409 2409 def rollback(ui, repo):
2410 2410 """roll back the last transaction in this repository
2411 2411
2412 2412 Roll back the last transaction in this repository, restoring the
2413 2413 project to its state prior to the transaction.
2414 2414
2415 2415 Transactions are used to encapsulate the effects of all commands
2416 2416 that create new changesets or propagate existing changesets into a
2417 2417 repository. For example, the following commands are transactional,
2418 2418 and their effects can be rolled back:
2419 2419
2420 2420 commit
2421 2421 import
2422 2422 pull
2423 2423 push (with this repository as destination)
2424 2424 unbundle
2425 2425
2426 2426 This command should be used with care. There is only one level of
2427 2427 rollback, and there is no way to undo a rollback. It will also
2428 2428 restore the dirstate at the time of the last transaction, which
2429 2429 may lose subsequent dirstate changes.
2430 2430
2431 2431 This command is not intended for use on public repositories. Once
2432 2432 changes are visible for pull by other users, rolling a transaction
2433 2433 back locally is ineffective (someone else may already have pulled
2434 2434 the changes). Furthermore, a race is possible with readers of the
2435 2435 repository; for example an in-progress pull from the repository
2436 2436 may fail if a rollback is performed.
2437 2437 """
2438 2438 repo.rollback()
2439 2439
2440 2440 def root(ui, repo):
2441 2441 """print the root (top) of the current working dir
2442 2442
2443 2443 Print the root directory of the current repository.
2444 2444 """
2445 2445 ui.write(repo.root + "\n")
2446 2446
2447 2447 def serve(ui, repo, **opts):
2448 2448 """export the repository via HTTP
2449 2449
2450 2450 Start a local HTTP repository browser and pull server.
2451 2451
2452 2452 By default, the server logs accesses to stdout and errors to
2453 2453 stderr. Use the "-A" and "-E" options to log to files.
2454 2454 """
2455 2455
2456 2456 if opts["stdio"]:
2457 2457 if repo is None:
2458 2458 raise hg.RepoError(_("There is no Mercurial repository here"
2459 2459 " (.hg not found)"))
2460 2460 s = sshserver.sshserver(ui, repo)
2461 2461 s.serve_forever()
2462 2462
2463 2463 parentui = ui.parentui or ui
2464 2464 optlist = ("name templates style address port ipv6"
2465 2465 " accesslog errorlog webdir_conf certificate")
2466 2466 for o in optlist.split():
2467 2467 if opts[o]:
2468 2468 parentui.setconfig("web", o, str(opts[o]))
2469 2469 if repo.ui != parentui:
2470 2470 repo.ui.setconfig("web", o, str(opts[o]))
2471 2471
2472 2472 if repo is None and not ui.config("web", "webdir_conf"):
2473 2473 raise hg.RepoError(_("There is no Mercurial repository here"
2474 2474 " (.hg not found)"))
2475 2475
2476 2476 class service:
2477 2477 def init(self):
2478 2478 util.set_signal_handler()
2479 2479 try:
2480 2480 self.httpd = hgweb.server.create_server(parentui, repo)
2481 2481 except socket.error, inst:
2482 2482 raise util.Abort(_('cannot start server: ') + inst.args[1])
2483 2483
2484 2484 if not ui.verbose: return
2485 2485
2486 2486 if self.httpd.port != 80:
2487 2487 ui.status(_('listening at http://%s:%d/\n') %
2488 2488 (self.httpd.addr, self.httpd.port))
2489 2489 else:
2490 2490 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2491 2491
2492 2492 def run(self):
2493 2493 self.httpd.serve_forever()
2494 2494
2495 2495 service = service()
2496 2496
2497 2497 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2498 2498
2499 2499 def status(ui, repo, *pats, **opts):
2500 2500 """show changed files in the working directory
2501 2501
2502 2502 Show status of files in the repository. If names are given, only
2503 2503 files that match are shown. Files that are clean or ignored, are
2504 2504 not listed unless -c (clean), -i (ignored) or -A is given.
2505 2505
2506 2506 NOTE: status may appear to disagree with diff if permissions have
2507 2507 changed or a merge has occurred. The standard diff format does not
2508 2508 report permission changes and diff only reports changes relative
2509 2509 to one merge parent.
2510 2510
2511 2511 If one revision is given, it is used as the base revision.
2512 2512 If two revisions are given, the difference between them is shown.
2513 2513
2514 2514 The codes used to show the status of files are:
2515 2515 M = modified
2516 2516 A = added
2517 2517 R = removed
2518 2518 C = clean
2519 2519 ! = deleted, but still tracked
2520 2520 ? = not tracked
2521 2521 I = ignored (not shown by default)
2522 2522 = the previous added file was copied from here
2523 2523 """
2524 2524
2525 2525 all = opts['all']
2526 2526 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2527 2527
2528 2528 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2529 2529 cwd = (pats and repo.getcwd()) or ''
2530 2530 modified, added, removed, deleted, unknown, ignored, clean = [
2531 2531 n for n in repo.status(node1=node1, node2=node2, files=files,
2532 2532 match=matchfn,
2533 2533 list_ignored=all or opts['ignored'],
2534 2534 list_clean=all or opts['clean'])]
2535 2535
2536 2536 changetypes = (('modified', 'M', modified),
2537 2537 ('added', 'A', added),
2538 2538 ('removed', 'R', removed),
2539 2539 ('deleted', '!', deleted),
2540 2540 ('unknown', '?', unknown),
2541 2541 ('ignored', 'I', ignored))
2542 2542
2543 2543 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2544 2544
2545 2545 end = opts['print0'] and '\0' or '\n'
2546 2546
2547 2547 for opt, char, changes in ([ct for ct in explicit_changetypes
2548 2548 if all or opts[ct[0]]]
2549 2549 or changetypes):
2550 2550 if opts['no_status']:
2551 2551 format = "%%s%s" % end
2552 2552 else:
2553 2553 format = "%s %%s%s" % (char, end)
2554 2554
2555 2555 for f in changes:
2556 2556 ui.write(format % repo.pathto(f, cwd))
2557 2557 if ((all or opts.get('copies')) and not opts.get('no_status')):
2558 2558 copied = repo.dirstate.copied(f)
2559 2559 if copied:
2560 2560 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2561 2561
2562 2562 def tag(ui, repo, name, rev_=None, **opts):
2563 2563 """add a tag for the current or given revision
2564 2564
2565 2565 Name a particular revision using <name>.
2566 2566
2567 2567 Tags are used to name particular revisions of the repository and are
2568 2568 very useful to compare different revision, to go back to significant
2569 2569 earlier versions or to mark branch points as releases, etc.
2570 2570
2571 2571 If no revision is given, the parent of the working directory is used,
2572 2572 or tip if no revision is checked out.
2573 2573
2574 2574 To facilitate version control, distribution, and merging of tags,
2575 2575 they are stored as a file named ".hgtags" which is managed
2576 2576 similarly to other project files and can be hand-edited if
2577 2577 necessary. The file '.hg/localtags' is used for local tags (not
2578 2578 shared among repositories).
2579 2579 """
2580 2580 if name in ['tip', '.', 'null']:
2581 2581 raise util.Abort(_("the name '%s' is reserved") % name)
2582 2582 if rev_ is not None:
2583 2583 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2584 2584 "please use 'hg tag [-r REV] NAME' instead\n"))
2585 2585 if opts['rev']:
2586 2586 raise util.Abort(_("use only one form to specify the revision"))
2587 2587 if opts['rev'] and opts['remove']:
2588 2588 raise util.Abort(_("--rev and --remove are incompatible"))
2589 2589 if opts['rev']:
2590 2590 rev_ = opts['rev']
2591 2591 message = opts['message']
2592 2592 if opts['remove']:
2593 2593 if not name in repo.tags():
2594 2594 raise util.Abort(_('tag %s does not exist') % name)
2595 2595 rev_ = nullid
2596 2596 if not message:
2597 2597 message = _('Removed tag %s') % name
2598 2598 elif name in repo.tags() and not opts['force']:
2599 2599 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2600 2600 % name)
2601 2601 if not rev_ and repo.dirstate.parents()[1] != nullid:
2602 2602 raise util.Abort(_('uncommitted merge - please provide a '
2603 2603 'specific revision'))
2604 2604 r = repo.changectx(rev_).node()
2605 2605
2606 2606 if not message:
2607 2607 message = _('Added tag %s for changeset %s') % (name, short(r))
2608 2608
2609 2609 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2610 2610
2611 2611 def tags(ui, repo):
2612 2612 """list repository tags
2613 2613
2614 2614 List the repository tags.
2615 2615
2616 2616 This lists both regular and local tags.
2617 2617 """
2618 2618
2619 2619 l = repo.tagslist()
2620 2620 l.reverse()
2621 2621 hexfunc = ui.debugflag and hex or short
2622 2622 for t, n in l:
2623 2623 try:
2624 2624 hn = hexfunc(n)
2625 2625 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2626 2626 except revlog.LookupError:
2627 2627 r = " ?:%s" % hn
2628 2628 if ui.quiet:
2629 2629 ui.write("%s\n" % t)
2630 2630 else:
2631 2631 spaces = " " * (30 - util.locallen(t))
2632 2632 ui.write("%s%s %s\n" % (t, spaces, r))
2633 2633
2634 2634 def tip(ui, repo, **opts):
2635 2635 """show the tip revision
2636 2636
2637 2637 Show the tip revision.
2638 2638 """
2639 2639 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2640 2640
2641 2641 def unbundle(ui, repo, fname1, *fnames, **opts):
2642 2642 """apply one or more changegroup files
2643 2643
2644 2644 Apply one or more compressed changegroup files generated by the
2645 2645 bundle command.
2646 2646 """
2647 2647 fnames = (fname1,) + fnames
2648 2648 for fname in fnames:
2649 2649 if os.path.exists(fname):
2650 2650 f = open(fname, "rb")
2651 2651 else:
2652 2652 f = urllib.urlopen(fname)
2653 2653 gen = changegroup.readbundle(f, fname)
2654 2654 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2655 2655
2656 2656 return postincoming(ui, repo, modheads, opts['update'])
2657 2657
2658 2658 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2659 2659 """update working directory
2660 2660
2661 2661 Update the working directory to the specified revision, or the
2662 2662 tip of the current branch if none is specified.
2663 2663
2664 2664 If there are no outstanding changes in the working directory and
2665 2665 there is a linear relationship between the current version and the
2666 2666 requested version, the result is the requested version.
2667 2667
2668 2668 To merge the working directory with another revision, use the
2669 2669 merge command.
2670 2670
2671 2671 By default, update will refuse to run if doing so would require
2672 2672 discarding local changes.
2673 2673 """
2674 2674 if rev and node:
2675 2675 raise util.Abort(_("please specify just one revision"))
2676 2676
2677 2677 if not rev:
2678 2678 rev = node
2679 2679
2680 2680 if date:
2681 2681 if rev:
2682 2682 raise util.Abort(_("you can't specify a revision and a date"))
2683 2683 rev = cmdutil.finddate(ui, repo, date)
2684 2684
2685 2685 if clean:
2686 2686 return hg.clean(repo, rev)
2687 2687 else:
2688 2688 return hg.update(repo, rev)
2689 2689
2690 2690 def verify(ui, repo):
2691 2691 """verify the integrity of the repository
2692 2692
2693 2693 Verify the integrity of the current repository.
2694 2694
2695 2695 This will perform an extensive check of the repository's
2696 2696 integrity, validating the hashes and checksums of each entry in
2697 2697 the changelog, manifest, and tracked files, as well as the
2698 2698 integrity of their crosslinks and indices.
2699 2699 """
2700 2700 return hg.verify(repo)
2701 2701
2702 2702 def version_(ui):
2703 2703 """output version and copyright information"""
2704 2704 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2705 2705 % version.get_version())
2706 2706 ui.status(_(
2707 2707 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2708 2708 "This is free software; see the source for copying conditions. "
2709 2709 "There is NO\nwarranty; "
2710 2710 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2711 2711 ))
2712 2712
2713 2713 # Command options and aliases are listed here, alphabetically
2714 2714
2715 2715 globalopts = [
2716 2716 ('R', 'repository', '',
2717 2717 _('repository root directory or symbolic path name')),
2718 2718 ('', 'cwd', '', _('change working directory')),
2719 2719 ('y', 'noninteractive', None,
2720 2720 _('do not prompt, assume \'yes\' for any required answers')),
2721 2721 ('q', 'quiet', None, _('suppress output')),
2722 2722 ('v', 'verbose', None, _('enable additional output')),
2723 2723 ('', 'config', [], _('set/override config option')),
2724 2724 ('', 'debug', None, _('enable debugging output')),
2725 2725 ('', 'debugger', None, _('start debugger')),
2726 2726 ('', 'encoding', util._encoding, _('set the charset encoding')),
2727 2727 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2728 2728 ('', 'lsprof', None, _('print improved command execution profile')),
2729 2729 ('', 'traceback', None, _('print traceback on exception')),
2730 2730 ('', 'time', None, _('time how long the command takes')),
2731 2731 ('', 'profile', None, _('print command execution profile')),
2732 2732 ('', 'version', None, _('output version information and exit')),
2733 2733 ('h', 'help', None, _('display help and exit')),
2734 2734 ]
2735 2735
2736 2736 dryrunopts = [('n', 'dry-run', None,
2737 2737 _('do not perform actions, just print output'))]
2738 2738
2739 2739 remoteopts = [
2740 2740 ('e', 'ssh', '', _('specify ssh command to use')),
2741 2741 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2742 2742 ]
2743 2743
2744 2744 walkopts = [
2745 2745 ('I', 'include', [], _('include names matching the given patterns')),
2746 2746 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2747 2747 ]
2748 2748
2749 2749 commitopts = [
2750 2750 ('m', 'message', '', _('use <text> as commit message')),
2751 2751 ('l', 'logfile', '', _('read commit message from <file>')),
2752 2752 ]
2753 2753
2754 2754 table = {
2755 2755 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2756 2756 "addremove":
2757 2757 (addremove,
2758 2758 [('s', 'similarity', '',
2759 2759 _('guess renamed files by similarity (0<=s<=100)')),
2760 2760 ] + walkopts + dryrunopts,
2761 2761 _('hg addremove [OPTION]... [FILE]...')),
2762 2762 "^annotate":
2763 2763 (annotate,
2764 2764 [('r', 'rev', '', _('annotate the specified revision')),
2765 2765 ('f', 'follow', None, _('follow file copies and renames')),
2766 2766 ('a', 'text', None, _('treat all files as text')),
2767 2767 ('u', 'user', None, _('list the author')),
2768 2768 ('d', 'date', None, _('list the date')),
2769 2769 ('n', 'number', None, _('list the revision number (default)')),
2770 2770 ('c', 'changeset', None, _('list the changeset')),
2771 2771 ('l', 'line-number', None,
2772 2772 _('show line number at the first appearance'))
2773 2773 ] + walkopts,
2774 2774 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2775 2775 "archive":
2776 2776 (archive,
2777 2777 [('', 'no-decode', None, _('do not pass files through decoders')),
2778 2778 ('p', 'prefix', '', _('directory prefix for files in archive')),
2779 2779 ('r', 'rev', '', _('revision to distribute')),
2780 2780 ('t', 'type', '', _('type of distribution to create')),
2781 2781 ] + walkopts,
2782 2782 _('hg archive [OPTION]... DEST')),
2783 2783 "backout":
2784 2784 (backout,
2785 2785 [('', 'merge', None,
2786 2786 _('merge with old dirstate parent after backout')),
2787 2787 ('d', 'date', '', _('record datecode as commit date')),
2788 2788 ('', 'parent', '', _('parent to choose when backing out merge')),
2789 2789 ('u', 'user', '', _('record user as committer')),
2790 2790 ('r', 'rev', '', _('revision to backout')),
2791 2791 ] + walkopts + commitopts,
2792 2792 _('hg backout [OPTION]... [-r] REV')),
2793 2793 "branch":
2794 2794 (branch,
2795 2795 [('f', 'force', None,
2796 2796 _('set branch name even if it shadows an existing branch'))],
2797 2797 _('hg branch [NAME]')),
2798 2798 "branches":
2799 2799 (branches,
2800 2800 [('a', 'active', False,
2801 2801 _('show only branches that have unmerged heads'))],
2802 2802 _('hg branches [-a]')),
2803 2803 "bundle":
2804 2804 (bundle,
2805 2805 [('f', 'force', None,
2806 2806 _('run even when remote repository is unrelated')),
2807 2807 ('r', 'rev', [],
2808 2808 _('a changeset you would like to bundle')),
2809 2809 ('', 'base', [],
2810 2810 _('a base changeset to specify instead of a destination')),
2811 2811 ] + remoteopts,
2812 2812 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2813 2813 "cat":
2814 2814 (cat,
2815 2815 [('o', 'output', '', _('print output to file with formatted name')),
2816 2816 ('r', 'rev', '', _('print the given revision')),
2817 2817 ] + walkopts,
2818 2818 _('hg cat [OPTION]... FILE...')),
2819 2819 "^clone":
2820 2820 (clone,
2821 2821 [('U', 'noupdate', None, _('do not update the new working directory')),
2822 2822 ('r', 'rev', [],
2823 2823 _('a changeset you would like to have after cloning')),
2824 2824 ('', 'pull', None, _('use pull protocol to copy metadata')),
2825 2825 ('', 'uncompressed', None,
2826 2826 _('use uncompressed transfer (fast over LAN)')),
2827 2827 ] + remoteopts,
2828 2828 _('hg clone [OPTION]... SOURCE [DEST]')),
2829 2829 "^commit|ci":
2830 2830 (commit,
2831 2831 [('A', 'addremove', None,
2832 2832 _('mark new/missing files as added/removed before committing')),
2833 2833 ('d', 'date', '', _('record datecode as commit date')),
2834 2834 ('u', 'user', '', _('record user as commiter')),
2835 2835 ] + walkopts + commitopts,
2836 2836 _('hg commit [OPTION]... [FILE]...')),
2837 2837 "copy|cp":
2838 2838 (copy,
2839 2839 [('A', 'after', None, _('record a copy that has already occurred')),
2840 2840 ('f', 'force', None,
2841 2841 _('forcibly copy over an existing managed file')),
2842 2842 ] + walkopts + dryrunopts,
2843 2843 _('hg copy [OPTION]... [SOURCE]... DEST')),
2844 2844 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2845 2845 "debugcomplete":
2846 2846 (debugcomplete,
2847 2847 [('o', 'options', None, _('show the command options'))],
2848 2848 _('debugcomplete [-o] CMD')),
2849 2849 "debuginstall": (debuginstall, [], _('debuginstall')),
2850 2850 "debugrebuildstate":
2851 2851 (debugrebuildstate,
2852 2852 [('r', 'rev', '', _('revision to rebuild to'))],
2853 2853 _('debugrebuildstate [-r REV] [REV]')),
2854 2854 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2855 2855 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2856 2856 "debugstate": (debugstate, [], _('debugstate')),
2857 2857 "debugdate":
2858 2858 (debugdate,
2859 2859 [('e', 'extended', None, _('try extended date formats'))],
2860 2860 _('debugdate [-e] DATE [RANGE]')),
2861 2861 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2862 2862 "debugindex": (debugindex, [], _('debugindex FILE')),
2863 2863 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2864 2864 "debugrename":
2865 2865 (debugrename,
2866 2866 [('r', 'rev', '', _('revision to debug'))],
2867 2867 _('debugrename [-r REV] FILE')),
2868 2868 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2869 2869 "^diff":
2870 2870 (diff,
2871 2871 [('r', 'rev', [], _('revision')),
2872 2872 ('a', 'text', None, _('treat all files as text')),
2873 2873 ('p', 'show-function', None,
2874 2874 _('show which function each change is in')),
2875 2875 ('g', 'git', None, _('use git extended diff format')),
2876 2876 ('', 'nodates', None, _("don't include dates in diff headers")),
2877 2877 ('w', 'ignore-all-space', None,
2878 2878 _('ignore white space when comparing lines')),
2879 2879 ('b', 'ignore-space-change', None,
2880 2880 _('ignore changes in the amount of white space')),
2881 2881 ('B', 'ignore-blank-lines', None,
2882 2882 _('ignore changes whose lines are all blank')),
2883 2883 ] + walkopts,
2884 2884 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2885 2885 "^export":
2886 2886 (export,
2887 2887 [('o', 'output', '', _('print output to file with formatted name')),
2888 2888 ('a', 'text', None, _('treat all files as text')),
2889 2889 ('g', 'git', None, _('use git extended diff format')),
2890 2890 ('', 'nodates', None, _("don't include dates in diff headers")),
2891 2891 ('', 'switch-parent', None, _('diff against the second parent'))],
2892 2892 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2893 2893 "grep":
2894 2894 (grep,
2895 2895 [('0', 'print0', None, _('end fields with NUL')),
2896 2896 ('', 'all', None, _('print all revisions that match')),
2897 2897 ('f', 'follow', None,
2898 2898 _('follow changeset history, or file history across copies and renames')),
2899 2899 ('i', 'ignore-case', None, _('ignore case when matching')),
2900 2900 ('l', 'files-with-matches', None,
2901 2901 _('print only filenames and revs that match')),
2902 2902 ('n', 'line-number', None, _('print matching line numbers')),
2903 2903 ('r', 'rev', [], _('search in given revision range')),
2904 2904 ('u', 'user', None, _('print user who committed change')),
2905 2905 ] + walkopts,
2906 2906 _('hg grep [OPTION]... PATTERN [FILE]...')),
2907 2907 "heads":
2908 2908 (heads,
2909 2909 [('', 'style', '', _('display using template map file')),
2910 2910 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2911 2911 ('', 'template', '', _('display with template'))],
2912 2912 _('hg heads [-r REV] [REV]...')),
2913 2913 "help": (help_, [], _('hg help [COMMAND]')),
2914 2914 "identify|id":
2915 2915 (identify,
2916 2916 [('r', 'rev', '', _('identify the specified rev')),
2917 2917 ('n', 'num', None, _('show local revision number')),
2918 2918 ('i', 'id', None, _('show global revision id')),
2919 2919 ('b', 'branch', None, _('show branch')),
2920 2920 ('t', 'tags', None, _('show tags'))],
2921 2921 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2922 2922 "import|patch":
2923 2923 (import_,
2924 2924 [('p', 'strip', 1,
2925 2925 _('directory strip option for patch. This has the same\n'
2926 2926 'meaning as the corresponding patch option')),
2927 2927 ('b', 'base', '', _('base path')),
2928 2928 ('f', 'force', None,
2929 2929 _('skip check for outstanding uncommitted changes')),
2930 2930 ('', 'exact', None,
2931 2931 _('apply patch to the nodes from which it was generated')),
2932 2932 ('', 'import-branch', None,
2933 2933 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2934 2934 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2935 2935 "incoming|in": (incoming,
2936 2936 [('M', 'no-merges', None, _('do not show merges')),
2937 2937 ('f', 'force', None,
2938 2938 _('run even when remote repository is unrelated')),
2939 2939 ('', 'style', '', _('display using template map file')),
2940 2940 ('n', 'newest-first', None, _('show newest record first')),
2941 2941 ('', 'bundle', '', _('file to store the bundles into')),
2942 2942 ('p', 'patch', None, _('show patch')),
2943 2943 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2944 2944 ('', 'template', '', _('display with template')),
2945 2945 ] + remoteopts,
2946 2946 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2947 2947 ' [--bundle FILENAME] [SOURCE]')),
2948 2948 "^init":
2949 2949 (init,
2950 2950 remoteopts,
2951 2951 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2952 2952 "locate":
2953 2953 (locate,
2954 2954 [('r', 'rev', '', _('search the repository as it stood at rev')),
2955 2955 ('0', 'print0', None,
2956 2956 _('end filenames with NUL, for use with xargs')),
2957 2957 ('f', 'fullpath', None,
2958 2958 _('print complete paths from the filesystem root')),
2959 2959 ] + walkopts,
2960 2960 _('hg locate [OPTION]... [PATTERN]...')),
2961 2961 "^log|history":
2962 2962 (log,
2963 2963 [('f', 'follow', None,
2964 2964 _('follow changeset history, or file history across copies and renames')),
2965 2965 ('', 'follow-first', None,
2966 2966 _('only follow the first parent of merge changesets')),
2967 2967 ('d', 'date', '', _('show revs matching date spec')),
2968 2968 ('C', 'copies', None, _('show copied files')),
2969 2969 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2970 2970 ('l', 'limit', '', _('limit number of changes displayed')),
2971 2971 ('r', 'rev', [], _('show the specified revision or range')),
2972 2972 ('', 'removed', None, _('include revs where files were removed')),
2973 2973 ('M', 'no-merges', None, _('do not show merges')),
2974 2974 ('', 'style', '', _('display using template map file')),
2975 2975 ('m', 'only-merges', None, _('show only merges')),
2976 2976 ('p', 'patch', None, _('show patch')),
2977 2977 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2978 2978 ('', 'template', '', _('display with template')),
2979 2979 ] + walkopts,
2980 2980 _('hg log [OPTION]... [FILE]')),
2981 2981 "manifest": (manifest, [], _('hg manifest [REV]')),
2982 2982 "^merge":
2983 2983 (merge,
2984 2984 [('f', 'force', None, _('force a merge with outstanding changes')),
2985 2985 ('r', 'rev', '', _('revision to merge')),
2986 2986 ],
2987 2987 _('hg merge [-f] [[-r] REV]')),
2988 2988 "outgoing|out": (outgoing,
2989 2989 [('M', 'no-merges', None, _('do not show merges')),
2990 2990 ('f', 'force', None,
2991 2991 _('run even when remote repository is unrelated')),
2992 2992 ('p', 'patch', None, _('show patch')),
2993 2993 ('', 'style', '', _('display using template map file')),
2994 2994 ('r', 'rev', [], _('a specific revision you would like to push')),
2995 2995 ('n', 'newest-first', None, _('show newest record first')),
2996 2996 ('', 'template', '', _('display with template')),
2997 2997 ] + remoteopts,
2998 2998 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2999 2999 "^parents":
3000 3000 (parents,
3001 3001 [('r', 'rev', '', _('show parents from the specified rev')),
3002 3002 ('', 'style', '', _('display using template map file')),
3003 3003 ('', 'template', '', _('display with template'))],
3004 3004 _('hg parents [-r REV] [FILE]')),
3005 3005 "paths": (paths, [], _('hg paths [NAME]')),
3006 3006 "^pull":
3007 3007 (pull,
3008 3008 [('u', 'update', None,
3009 3009 _('update to new tip if changesets were pulled')),
3010 3010 ('f', 'force', None,
3011 3011 _('run even when remote repository is unrelated')),
3012 3012 ('r', 'rev', [],
3013 3013 _('a specific revision up to which you would like to pull')),
3014 3014 ] + remoteopts,
3015 3015 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3016 3016 "^push":
3017 3017 (push,
3018 3018 [('f', 'force', None, _('force push')),
3019 3019 ('r', 'rev', [], _('a specific revision you would like to push')),
3020 3020 ] + remoteopts,
3021 3021 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3022 3022 "debugrawcommit|rawcommit":
3023 3023 (rawcommit,
3024 3024 [('p', 'parent', [], _('parent')),
3025 3025 ('d', 'date', '', _('date code')),
3026 3026 ('u', 'user', '', _('user')),
3027 3027 ('F', 'files', '', _('file list'))
3028 3028 ] + commitopts,
3029 3029 _('hg debugrawcommit [OPTION]... [FILE]...')),
3030 3030 "recover": (recover, [], _('hg recover')),
3031 3031 "^remove|rm":
3032 3032 (remove,
3033 3033 [('A', 'after', None, _('record remove that has already occurred')),
3034 3034 ('f', 'force', None, _('remove file even if modified')),
3035 3035 ] + walkopts,
3036 3036 _('hg remove [OPTION]... FILE...')),
3037 3037 "rename|mv":
3038 3038 (rename,
3039 3039 [('A', 'after', None, _('record a rename that has already occurred')),
3040 3040 ('f', 'force', None,
3041 3041 _('forcibly copy over an existing managed file')),
3042 3042 ] + walkopts + dryrunopts,
3043 3043 _('hg rename [OPTION]... SOURCE... DEST')),
3044 3044 "^revert":
3045 3045 (revert,
3046 3046 [('a', 'all', None, _('revert all changes when no arguments given')),
3047 3047 ('d', 'date', '', _('tipmost revision matching date')),
3048 3048 ('r', 'rev', '', _('revision to revert to')),
3049 3049 ('', 'no-backup', None, _('do not save backup copies of files')),
3050 3050 ] + walkopts + dryrunopts,
3051 3051 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3052 3052 "rollback": (rollback, [], _('hg rollback')),
3053 3053 "root": (root, [], _('hg root')),
3054 3054 "showconfig|debugconfig":
3055 3055 (showconfig,
3056 3056 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3057 3057 _('showconfig [-u] [NAME]...')),
3058 3058 "^serve":
3059 3059 (serve,
3060 3060 [('A', 'accesslog', '', _('name of access log file to write to')),
3061 3061 ('d', 'daemon', None, _('run server in background')),
3062 3062 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3063 3063 ('E', 'errorlog', '', _('name of error log file to write to')),
3064 3064 ('p', 'port', 0, _('port to use (default: 8000)')),
3065 3065 ('a', 'address', '', _('address to use')),
3066 3066 ('n', 'name', '',
3067 3067 _('name to show in web pages (default: working dir)')),
3068 3068 ('', 'webdir-conf', '', _('name of the webdir config file'
3069 3069 ' (serve more than one repo)')),
3070 3070 ('', 'pid-file', '', _('name of file to write process ID to')),
3071 3071 ('', 'stdio', None, _('for remote clients')),
3072 3072 ('t', 'templates', '', _('web templates to use')),
3073 3073 ('', 'style', '', _('template style to use')),
3074 3074 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3075 3075 ('', 'certificate', '', _('SSL certificate file'))],
3076 3076 _('hg serve [OPTION]...')),
3077 3077 "^status|st":
3078 3078 (status,
3079 3079 [('A', 'all', None, _('show status of all files')),
3080 3080 ('m', 'modified', None, _('show only modified files')),
3081 3081 ('a', 'added', None, _('show only added files')),
3082 3082 ('r', 'removed', None, _('show only removed files')),
3083 3083 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3084 3084 ('c', 'clean', None, _('show only files without changes')),
3085 3085 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3086 3086 ('i', 'ignored', None, _('show only ignored files')),
3087 3087 ('n', 'no-status', None, _('hide status prefix')),
3088 3088 ('C', 'copies', None, _('show source of copied files')),
3089 3089 ('0', 'print0', None,
3090 3090 _('end filenames with NUL, for use with xargs')),
3091 3091 ('', 'rev', [], _('show difference from revision')),
3092 3092 ] + walkopts,
3093 3093 _('hg status [OPTION]... [FILE]...')),
3094 3094 "tag":
3095 3095 (tag,
3096 3096 [('f', 'force', None, _('replace existing tag')),
3097 3097 ('l', 'local', None, _('make the tag local')),
3098 3098 ('m', 'message', '', _('message for tag commit log entry')),
3099 3099 ('d', 'date', '', _('record datecode as commit date')),
3100 3100 ('u', 'user', '', _('record user as commiter')),
3101 3101 ('r', 'rev', '', _('revision to tag')),
3102 3102 ('', 'remove', None, _('remove a tag'))],
3103 3103 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3104 3104 "tags": (tags, [], _('hg tags')),
3105 3105 "tip":
3106 3106 (tip,
3107 3107 [('', 'style', '', _('display using template map file')),
3108 3108 ('p', 'patch', None, _('show patch')),
3109 3109 ('', 'template', '', _('display with template'))],
3110 3110 _('hg tip [-p]')),
3111 3111 "unbundle":
3112 3112 (unbundle,
3113 3113 [('u', 'update', None,
3114 3114 _('update to new tip if changesets were unbundled'))],
3115 3115 _('hg unbundle [-u] FILE...')),
3116 3116 "^update|up|checkout|co":
3117 3117 (update,
3118 3118 [('C', 'clean', None, _('overwrite locally modified files')),
3119 3119 ('d', 'date', '', _('tipmost revision matching date')),
3120 3120 ('r', 'rev', '', _('revision'))],
3121 3121 _('hg update [-C] [-d DATE] [[-r] REV]')),
3122 3122 "verify": (verify, [], _('hg verify')),
3123 3123 "version": (version_, [], _('hg version')),
3124 3124 }
3125 3125
3126 3126 extensions.commandtable = table
3127 3127
3128 3128 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3129 3129 " debugindex debugindexdot debugdate debuginstall")
3130 3130 optionalrepo = ("paths serve showconfig")
3131 3131
3132 3132 def dispatch(args, argv0=None):
3133 3133 try:
3134 3134 u = ui.ui(traceback='--traceback' in args)
3135 3135 except util.Abort, inst:
3136 3136 sys.stderr.write(_("abort: %s\n") % inst)
3137 3137 return -1
3138 3138 return cmdutil.runcatch(u, args, argv0=argv0)
3139 3139
3140 3140 def run():
3141 3141 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,1986 +1,1986 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.path = path
22 22 self.root = os.path.realpath(path)
23 23 self.path = os.path.join(self.root, ".hg")
24 24 self.origroot = path
25 25 self.opener = util.opener(self.path)
26 26 self.wopener = util.opener(self.root)
27 27
28 28 if not os.path.isdir(self.path):
29 29 if create:
30 30 if not os.path.exists(path):
31 31 os.mkdir(path)
32 32 os.mkdir(self.path)
33 33 requirements = ["revlogv1"]
34 34 if parentui.configbool('format', 'usestore', True):
35 35 os.mkdir(os.path.join(self.path, "store"))
36 36 requirements.append("store")
37 37 # create an invalid changelog
38 38 self.opener("00changelog.i", "a").write(
39 39 '\0\0\0\2' # represents revlogv2
40 40 ' dummy changelog to prevent using the old repo layout'
41 41 )
42 42 reqfile = self.opener("requires", "w")
43 43 for r in requirements:
44 44 reqfile.write("%s\n" % r)
45 45 reqfile.close()
46 46 else:
47 47 raise repo.RepoError(_("repository %s not found") % path)
48 48 elif create:
49 49 raise repo.RepoError(_("repository %s already exists") % path)
50 50 else:
51 51 # find requirements
52 52 try:
53 53 requirements = self.opener("requires").read().splitlines()
54 54 except IOError, inst:
55 55 if inst.errno != errno.ENOENT:
56 56 raise
57 57 requirements = []
58 58 # check them
59 59 for r in requirements:
60 60 if r not in self.supported:
61 61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62 62
63 63 # setup store
64 64 if "store" in requirements:
65 65 self.encodefn = util.encodefilename
66 66 self.decodefn = util.decodefilename
67 67 self.spath = os.path.join(self.path, "store")
68 68 else:
69 69 self.encodefn = lambda x: x
70 70 self.decodefn = lambda x: x
71 71 self.spath = self.path
72 72 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
73 73
74 74 self.ui = ui.ui(parentui=parentui)
75 75 try:
76 76 self.ui.readconfig(self.join("hgrc"), self.root)
77 77 extensions.loadall(self.ui)
78 78 except IOError:
79 79 pass
80 80
81 81 self.tagscache = None
82 82 self.branchcache = None
83 83 self.nodetagscache = None
84 84 self.filterpats = {}
85 85 self._transref = self._lockref = self._wlockref = None
86 86
87 87 def __getattr__(self, name):
88 88 if name == 'changelog':
89 89 self.changelog = changelog.changelog(self.sopener)
90 90 self.sopener.defversion = self.changelog.version
91 91 return self.changelog
92 92 if name == 'manifest':
93 93 self.changelog
94 94 self.manifest = manifest.manifest(self.sopener)
95 95 return self.manifest
96 96 if name == 'dirstate':
97 97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 98 return self.dirstate
99 99 else:
100 100 raise AttributeError, name
101 101
102 102 def url(self):
103 103 return 'file:' + self.root
104 104
105 105 def hook(self, name, throw=False, **args):
106 106 return hook.hook(self.ui, self, name, throw, **args)
107 107
108 108 tag_disallowed = ':\r\n'
109 109
110 110 def _tag(self, name, node, message, local, user, date, parent=None,
111 111 extra={}):
112 112 use_dirstate = parent is None
113 113
114 114 for c in self.tag_disallowed:
115 115 if c in name:
116 116 raise util.Abort(_('%r cannot be used in a tag name') % c)
117 117
118 118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119 119
120 120 def writetag(fp, name, munge, prevtags):
121 121 if prevtags and prevtags[-1] != '\n':
122 122 fp.write('\n')
123 123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 124 fp.close()
125 125 self.hook('tag', node=hex(node), tag=name, local=local)
126 126
127 127 prevtags = ''
128 128 if local:
129 129 try:
130 130 fp = self.opener('localtags', 'r+')
131 131 except IOError, err:
132 132 fp = self.opener('localtags', 'a')
133 133 else:
134 134 prevtags = fp.read()
135 135
136 136 # local tags are stored in the current charset
137 137 writetag(fp, name, None, prevtags)
138 138 return
139 139
140 140 if use_dirstate:
141 141 try:
142 142 fp = self.wfile('.hgtags', 'rb+')
143 143 except IOError, err:
144 144 fp = self.wfile('.hgtags', 'ab')
145 145 else:
146 146 prevtags = fp.read()
147 147 else:
148 148 try:
149 149 prevtags = self.filectx('.hgtags', parent).data()
150 150 except revlog.LookupError:
151 151 pass
152 152 fp = self.wfile('.hgtags', 'wb')
153 153
154 154 # committed tags are stored in UTF-8
155 155 writetag(fp, name, util.fromlocal, prevtags)
156 156
157 157 if use_dirstate and '.hgtags' not in self.dirstate:
158 158 self.add(['.hgtags'])
159 159
160 160 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
161 161 extra=extra)
162 162
163 163 self.hook('tag', node=hex(node), tag=name, local=local)
164 164
165 165 return tagnode
166 166
167 167 def tag(self, name, node, message, local, user, date):
168 168 '''tag a revision with a symbolic name.
169 169
170 170 if local is True, the tag is stored in a per-repository file.
171 171 otherwise, it is stored in the .hgtags file, and a new
172 172 changeset is committed with the change.
173 173
174 174 keyword arguments:
175 175
176 176 local: whether to store tag in non-version-controlled file
177 177 (default False)
178 178
179 179 message: commit message to use if committing
180 180
181 181 user: name of user to use if committing
182 182
183 183 date: date tuple to use if committing'''
184 184
185 185 for x in self.status()[:5]:
186 186 if '.hgtags' in x:
187 187 raise util.Abort(_('working copy of .hgtags is changed '
188 188 '(please commit .hgtags manually)'))
189 189
190 190
191 191 self._tag(name, node, message, local, user, date)
192 192
193 193 def tags(self):
194 194 '''return a mapping of tag to node'''
195 195 if self.tagscache:
196 196 return self.tagscache
197 197
198 198 globaltags = {}
199 199
200 200 def readtags(lines, fn):
201 201 filetags = {}
202 202 count = 0
203 203
204 204 def warn(msg):
205 205 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
206 206
207 207 for l in lines:
208 208 count += 1
209 209 if not l:
210 210 continue
211 211 s = l.split(" ", 1)
212 212 if len(s) != 2:
213 213 warn(_("cannot parse entry"))
214 214 continue
215 215 node, key = s
216 216 key = util.tolocal(key.strip()) # stored in UTF-8
217 217 try:
218 218 bin_n = bin(node)
219 219 except TypeError:
220 220 warn(_("node '%s' is not well formed") % node)
221 221 continue
222 222 if bin_n not in self.changelog.nodemap:
223 223 warn(_("tag '%s' refers to unknown node") % key)
224 224 continue
225 225
226 226 h = []
227 227 if key in filetags:
228 228 n, h = filetags[key]
229 229 h.append(n)
230 230 filetags[key] = (bin_n, h)
231 231
232 232 for k, nh in filetags.items():
233 233 if k not in globaltags:
234 234 globaltags[k] = nh
235 235 continue
236 236 # we prefer the global tag if:
237 237 # it supercedes us OR
238 238 # mutual supercedes and it has a higher rank
239 239 # otherwise we win because we're tip-most
240 240 an, ah = nh
241 241 bn, bh = globaltags[k]
242 242 if (bn != an and an in bh and
243 243 (bn not in ah or len(bh) > len(ah))):
244 244 an = bn
245 245 ah.extend([n for n in bh if n not in ah])
246 246 globaltags[k] = an, ah
247 247
248 248 # read the tags file from each head, ending with the tip
249 249 f = None
250 250 for rev, node, fnode in self._hgtagsnodes():
251 251 f = (f and f.filectx(fnode) or
252 252 self.filectx('.hgtags', fileid=fnode))
253 253 readtags(f.data().splitlines(), f)
254 254
255 255 try:
256 256 data = util.fromlocal(self.opener("localtags").read())
257 257 # localtags are stored in the local character set
258 258 # while the internal tag table is stored in UTF-8
259 259 readtags(data.splitlines(), "localtags")
260 260 except IOError:
261 261 pass
262 262
263 263 self.tagscache = {}
264 264 for k,nh in globaltags.items():
265 265 n = nh[0]
266 266 if n != nullid:
267 267 self.tagscache[k] = n
268 268 self.tagscache['tip'] = self.changelog.tip()
269 269
270 270 return self.tagscache
271 271
272 272 def _hgtagsnodes(self):
273 273 heads = self.heads()
274 274 heads.reverse()
275 275 last = {}
276 276 ret = []
277 277 for node in heads:
278 278 c = self.changectx(node)
279 279 rev = c.rev()
280 280 try:
281 281 fnode = c.filenode('.hgtags')
282 282 except revlog.LookupError:
283 283 continue
284 284 ret.append((rev, node, fnode))
285 285 if fnode in last:
286 286 ret[last[fnode]] = None
287 287 last[fnode] = len(ret) - 1
288 288 return [item for item in ret if item]
289 289
290 290 def tagslist(self):
291 291 '''return a list of tags ordered by revision'''
292 292 l = []
293 293 for t, n in self.tags().items():
294 294 try:
295 295 r = self.changelog.rev(n)
296 296 except:
297 297 r = -2 # sort to the beginning of the list if unknown
298 298 l.append((r, t, n))
299 299 l.sort()
300 300 return [(t, n) for r, t, n in l]
301 301
302 302 def nodetags(self, node):
303 303 '''return the tags associated with a node'''
304 304 if not self.nodetagscache:
305 305 self.nodetagscache = {}
306 306 for t, n in self.tags().items():
307 307 self.nodetagscache.setdefault(n, []).append(t)
308 308 return self.nodetagscache.get(node, [])
309 309
310 310 def _branchtags(self):
311 311 partial, last, lrev = self._readbranchcache()
312 312
313 313 tiprev = self.changelog.count() - 1
314 314 if lrev != tiprev:
315 315 self._updatebranchcache(partial, lrev+1, tiprev+1)
316 316 self._writebranchcache(partial, self.changelog.tip(), tiprev)
317 317
318 318 return partial
319 319
320 320 def branchtags(self):
321 321 if self.branchcache is not None:
322 322 return self.branchcache
323 323
324 324 self.branchcache = {} # avoid recursion in changectx
325 325 partial = self._branchtags()
326 326
327 327 # the branch cache is stored on disk as UTF-8, but in the local
328 328 # charset internally
329 329 for k, v in partial.items():
330 330 self.branchcache[util.tolocal(k)] = v
331 331 return self.branchcache
332 332
333 333 def _readbranchcache(self):
334 334 partial = {}
335 335 try:
336 336 f = self.opener("branch.cache")
337 337 lines = f.read().split('\n')
338 338 f.close()
339 339 except (IOError, OSError):
340 340 return {}, nullid, nullrev
341 341
342 342 try:
343 343 last, lrev = lines.pop(0).split(" ", 1)
344 344 last, lrev = bin(last), int(lrev)
345 345 if not (lrev < self.changelog.count() and
346 346 self.changelog.node(lrev) == last): # sanity check
347 347 # invalidate the cache
348 348 raise ValueError('Invalid branch cache: unknown tip')
349 349 for l in lines:
350 350 if not l: continue
351 351 node, label = l.split(" ", 1)
352 352 partial[label.strip()] = bin(node)
353 353 except (KeyboardInterrupt, util.SignalInterrupt):
354 354 raise
355 355 except Exception, inst:
356 356 if self.ui.debugflag:
357 357 self.ui.warn(str(inst), '\n')
358 358 partial, last, lrev = {}, nullid, nullrev
359 359 return partial, last, lrev
360 360
361 361 def _writebranchcache(self, branches, tip, tiprev):
362 362 try:
363 363 f = self.opener("branch.cache", "w", atomictemp=True)
364 364 f.write("%s %s\n" % (hex(tip), tiprev))
365 365 for label, node in branches.iteritems():
366 366 f.write("%s %s\n" % (hex(node), label))
367 367 f.rename()
368 368 except (IOError, OSError):
369 369 pass
370 370
371 371 def _updatebranchcache(self, partial, start, end):
372 372 for r in xrange(start, end):
373 373 c = self.changectx(r)
374 374 b = c.branch()
375 375 partial[b] = c.node()
376 376
377 377 def lookup(self, key):
378 378 if key == '.':
379 379 key, second = self.dirstate.parents()
380 380 if key == nullid:
381 381 raise repo.RepoError(_("no revision checked out"))
382 382 if second != nullid:
383 383 self.ui.warn(_("warning: working directory has two parents, "
384 384 "tag '.' uses the first\n"))
385 385 elif key == 'null':
386 386 return nullid
387 387 n = self.changelog._match(key)
388 388 if n:
389 389 return n
390 390 if key in self.tags():
391 391 return self.tags()[key]
392 392 if key in self.branchtags():
393 393 return self.branchtags()[key]
394 394 n = self.changelog._partialmatch(key)
395 395 if n:
396 396 return n
397 397 try:
398 398 if len(key) == 20:
399 399 key = hex(key)
400 400 except:
401 401 pass
402 402 raise repo.RepoError(_("unknown revision '%s'") % key)
403 403
404 404 def dev(self):
405 405 return os.lstat(self.path).st_dev
406 406
407 407 def local(self):
408 408 return True
409 409
410 410 def join(self, f):
411 411 return os.path.join(self.path, f)
412 412
413 413 def sjoin(self, f):
414 414 f = self.encodefn(f)
415 415 return os.path.join(self.spath, f)
416 416
417 417 def wjoin(self, f):
418 418 return os.path.join(self.root, f)
419 419
420 420 def file(self, f):
421 421 if f[0] == '/':
422 422 f = f[1:]
423 423 return filelog.filelog(self.sopener, f)
424 424
425 425 def changectx(self, changeid=None):
426 426 return context.changectx(self, changeid)
427 427
428 428 def workingctx(self):
429 429 return context.workingctx(self)
430 430
431 431 def parents(self, changeid=None):
432 432 '''
433 433 get list of changectxs for parents of changeid or working directory
434 434 '''
435 435 if changeid is None:
436 436 pl = self.dirstate.parents()
437 437 else:
438 438 n = self.changelog.lookup(changeid)
439 439 pl = self.changelog.parents(n)
440 440 if pl[1] == nullid:
441 441 return [self.changectx(pl[0])]
442 442 return [self.changectx(pl[0]), self.changectx(pl[1])]
443 443
444 444 def filectx(self, path, changeid=None, fileid=None):
445 445 """changeid can be a changeset revision, node, or tag.
446 446 fileid can be a file revision or node."""
447 447 return context.filectx(self, path, changeid, fileid)
448 448
449 449 def getcwd(self):
450 450 return self.dirstate.getcwd()
451 451
452 452 def pathto(self, f, cwd=None):
453 453 return self.dirstate.pathto(f, cwd)
454 454
455 455 def wfile(self, f, mode='r'):
456 456 return self.wopener(f, mode)
457 457
458 458 def _link(self, f):
459 459 return os.path.islink(self.wjoin(f))
460 460
461 461 def _filter(self, filter, filename, data):
462 462 if filter not in self.filterpats:
463 463 l = []
464 464 for pat, cmd in self.ui.configitems(filter):
465 465 mf = util.matcher(self.root, "", [pat], [], [])[1]
466 466 l.append((mf, cmd))
467 467 self.filterpats[filter] = l
468 468
469 469 for mf, cmd in self.filterpats[filter]:
470 470 if mf(filename):
471 471 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
472 472 data = util.filter(data, cmd)
473 473 break
474 474
475 475 return data
476 476
477 477 def wread(self, filename):
478 478 if self._link(filename):
479 479 data = os.readlink(self.wjoin(filename))
480 480 else:
481 481 data = self.wopener(filename, 'r').read()
482 482 return self._filter("encode", filename, data)
483 483
484 484 def wwrite(self, filename, data, flags):
485 485 data = self._filter("decode", filename, data)
486 486 if "l" in flags:
487 487 self.wopener.symlink(data, filename)
488 488 else:
489 489 try:
490 490 if self._link(filename):
491 491 os.unlink(self.wjoin(filename))
492 492 except OSError:
493 493 pass
494 494 self.wopener(filename, 'w').write(data)
495 495 util.set_exec(self.wjoin(filename), "x" in flags)
496 496
497 497 def wwritedata(self, filename, data):
498 498 return self._filter("decode", filename, data)
499 499
500 500 def transaction(self):
501 501 if self._transref and self._transref():
502 502 return self._transref().nest()
503 503
504 504 # save dirstate for rollback
505 505 try:
506 506 ds = self.opener("dirstate").read()
507 507 except IOError:
508 508 ds = ""
509 509 self.opener("journal.dirstate", "w").write(ds)
510 510
511 511 renames = [(self.sjoin("journal"), self.sjoin("undo")),
512 512 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
513 513 tr = transaction.transaction(self.ui.warn, self.sopener,
514 514 self.sjoin("journal"),
515 515 aftertrans(renames))
516 516 self._transref = weakref.ref(tr)
517 517 return tr
518 518
519 519 def recover(self):
520 520 l = self.lock()
521 521 try:
522 522 if os.path.exists(self.sjoin("journal")):
523 523 self.ui.status(_("rolling back interrupted transaction\n"))
524 524 transaction.rollback(self.sopener, self.sjoin("journal"))
525 525 self.invalidate()
526 526 return True
527 527 else:
528 528 self.ui.warn(_("no interrupted transaction available\n"))
529 529 return False
530 530 finally:
531 531 del l
532 532
533 533 def rollback(self):
534 534 wlock = lock = None
535 535 try:
536 536 wlock = self.wlock()
537 537 lock = self.lock()
538 538 if os.path.exists(self.sjoin("undo")):
539 539 self.ui.status(_("rolling back last transaction\n"))
540 540 transaction.rollback(self.sopener, self.sjoin("undo"))
541 541 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
542 542 self.invalidate()
543 543 self.dirstate.invalidate()
544 544 else:
545 545 self.ui.warn(_("no rollback information available\n"))
546 546 finally:
547 del wlock, lock
547 del lock, wlock
548 548
549 549 def invalidate(self):
550 550 for a in "changelog manifest".split():
551 551 if hasattr(self, a):
552 552 self.__delattr__(a)
553 553 self.tagscache = None
554 554 self.nodetagscache = None
555 555
556 556 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
557 557 try:
558 558 l = lock.lock(lockname, 0, releasefn, desc=desc)
559 559 except lock.LockHeld, inst:
560 560 if not wait:
561 561 raise
562 562 self.ui.warn(_("waiting for lock on %s held by %r\n") %
563 563 (desc, inst.locker))
564 564 # default to 600 seconds timeout
565 565 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
566 566 releasefn, desc=desc)
567 567 if acquirefn:
568 568 acquirefn()
569 569 return l
570 570
571 571 def lock(self, wait=True):
572 572 if self._lockref and self._lockref():
573 573 return self._lockref()
574 574
575 575 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
576 576 _('repository %s') % self.origroot)
577 577 self._lockref = weakref.ref(l)
578 578 return l
579 579
580 580 def wlock(self, wait=True):
581 581 if self._wlockref and self._wlockref():
582 582 return self._wlockref()
583 583
584 584 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
585 585 self.dirstate.invalidate, _('working directory of %s') %
586 586 self.origroot)
587 587 self._wlockref = weakref.ref(l)
588 588 return l
589 589
590 590 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
591 591 """
592 592 commit an individual file as part of a larger transaction
593 593 """
594 594
595 595 t = self.wread(fn)
596 596 fl = self.file(fn)
597 597 fp1 = manifest1.get(fn, nullid)
598 598 fp2 = manifest2.get(fn, nullid)
599 599
600 600 meta = {}
601 601 cp = self.dirstate.copied(fn)
602 602 if cp:
603 603 # Mark the new revision of this file as a copy of another
604 604 # file. This copy data will effectively act as a parent
605 605 # of this new revision. If this is a merge, the first
606 606 # parent will be the nullid (meaning "look up the copy data")
607 607 # and the second one will be the other parent. For example:
608 608 #
609 609 # 0 --- 1 --- 3 rev1 changes file foo
610 610 # \ / rev2 renames foo to bar and changes it
611 611 # \- 2 -/ rev3 should have bar with all changes and
612 612 # should record that bar descends from
613 613 # bar in rev2 and foo in rev1
614 614 #
615 615 # this allows this merge to succeed:
616 616 #
617 617 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
618 618 # \ / merging rev3 and rev4 should use bar@rev2
619 619 # \- 2 --- 4 as the merge base
620 620 #
621 621 meta["copy"] = cp
622 622 if not manifest2: # not a branch merge
623 623 meta["copyrev"] = hex(manifest1.get(cp, nullid))
624 624 fp2 = nullid
625 625 elif fp2 != nullid: # copied on remote side
626 626 meta["copyrev"] = hex(manifest1.get(cp, nullid))
627 627 elif fp1 != nullid: # copied on local side, reversed
628 628 meta["copyrev"] = hex(manifest2.get(cp))
629 629 fp2 = fp1
630 630 else: # directory rename
631 631 meta["copyrev"] = hex(manifest1.get(cp, nullid))
632 632 self.ui.debug(_(" %s: copy %s:%s\n") %
633 633 (fn, cp, meta["copyrev"]))
634 634 fp1 = nullid
635 635 elif fp2 != nullid:
636 636 # is one parent an ancestor of the other?
637 637 fpa = fl.ancestor(fp1, fp2)
638 638 if fpa == fp1:
639 639 fp1, fp2 = fp2, nullid
640 640 elif fpa == fp2:
641 641 fp2 = nullid
642 642
643 643 # is the file unmodified from the parent? report existing entry
644 644 if fp2 == nullid and not fl.cmp(fp1, t):
645 645 return fp1
646 646
647 647 changelist.append(fn)
648 648 return fl.add(t, meta, tr, linkrev, fp1, fp2)
649 649
650 650 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
651 651 if p1 is None:
652 652 p1, p2 = self.dirstate.parents()
653 653 return self.commit(files=files, text=text, user=user, date=date,
654 654 p1=p1, p2=p2, extra=extra, empty_ok=True)
655 655
656 656 def commit(self, files=None, text="", user=None, date=None,
657 657 match=util.always, force=False, force_editor=False,
658 658 p1=None, p2=None, extra={}, empty_ok=False):
659 659 wlock = lock = tr = None
660 660 try:
661 661 commit = []
662 662 remove = []
663 663 changed = []
664 664 use_dirstate = (p1 is None) # not rawcommit
665 665 extra = extra.copy()
666 666
667 667 if use_dirstate:
668 668 if files:
669 669 for f in files:
670 670 s = self.dirstate[f]
671 671 if s in 'nma':
672 672 commit.append(f)
673 673 elif s == 'r':
674 674 remove.append(f)
675 675 else:
676 676 self.ui.warn(_("%s not tracked!\n") % f)
677 677 else:
678 678 changes = self.status(match=match)[:5]
679 679 modified, added, removed, deleted, unknown = changes
680 680 commit = modified + added
681 681 remove = removed
682 682 else:
683 683 commit = files
684 684
685 685 if use_dirstate:
686 686 p1, p2 = self.dirstate.parents()
687 687 update_dirstate = True
688 688 else:
689 689 p1, p2 = p1, p2 or nullid
690 690 update_dirstate = (self.dirstate.parents()[0] == p1)
691 691
692 692 c1 = self.changelog.read(p1)
693 693 c2 = self.changelog.read(p2)
694 694 m1 = self.manifest.read(c1[0]).copy()
695 695 m2 = self.manifest.read(c2[0])
696 696
697 697 if use_dirstate:
698 698 branchname = self.workingctx().branch()
699 699 try:
700 700 branchname = branchname.decode('UTF-8').encode('UTF-8')
701 701 except UnicodeDecodeError:
702 702 raise util.Abort(_('branch name not in UTF-8!'))
703 703 else:
704 704 branchname = ""
705 705
706 706 if use_dirstate:
707 707 oldname = c1[5].get("branch") # stored in UTF-8
708 708 if (not commit and not remove and not force and p2 == nullid
709 709 and branchname == oldname):
710 710 self.ui.status(_("nothing changed\n"))
711 711 return None
712 712
713 713 xp1 = hex(p1)
714 714 if p2 == nullid: xp2 = ''
715 715 else: xp2 = hex(p2)
716 716
717 717 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
718 718
719 719 wlock = self.wlock()
720 720 lock = self.lock()
721 721 tr = self.transaction()
722 722 trp = weakref.proxy(tr)
723 723
724 724 # check in files
725 725 new = {}
726 726 linkrev = self.changelog.count()
727 727 commit.sort()
728 728 is_exec = util.execfunc(self.root, m1.execf)
729 729 is_link = util.linkfunc(self.root, m1.linkf)
730 730 for f in commit:
731 731 self.ui.note(f + "\n")
732 732 try:
733 733 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
734 734 new_exec = is_exec(f)
735 735 new_link = is_link(f)
736 736 if not changed or changed[-1] != f:
737 737 # mention the file in the changelog if some
738 738 # flag changed, even if there was no content
739 739 # change.
740 740 old_exec = m1.execf(f)
741 741 old_link = m1.linkf(f)
742 742 if old_exec != new_exec or old_link != new_link:
743 743 changed.append(f)
744 744 m1.set(f, new_exec, new_link)
745 745 except (OSError, IOError):
746 746 if use_dirstate:
747 747 self.ui.warn(_("trouble committing %s!\n") % f)
748 748 raise
749 749 else:
750 750 remove.append(f)
751 751
752 752 # update manifest
753 753 m1.update(new)
754 754 remove.sort()
755 755 removed = []
756 756
757 757 for f in remove:
758 758 if f in m1:
759 759 del m1[f]
760 760 removed.append(f)
761 761 elif f in m2:
762 762 removed.append(f)
763 763 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
764 764 (new, removed))
765 765
766 766 # add changeset
767 767 new = new.keys()
768 768 new.sort()
769 769
770 770 user = user or self.ui.username()
771 771 if (not empty_ok and not text) or force_editor:
772 772 edittext = []
773 773 if text:
774 774 edittext.append(text)
775 775 edittext.append("")
776 776 edittext.append("HG: user: %s" % user)
777 777 if p2 != nullid:
778 778 edittext.append("HG: branch merge")
779 779 if branchname:
780 780 edittext.append("HG: branch %s" % util.tolocal(branchname))
781 781 edittext.extend(["HG: changed %s" % f for f in changed])
782 782 edittext.extend(["HG: removed %s" % f for f in removed])
783 783 if not changed and not remove:
784 784 edittext.append("HG: no files changed")
785 785 edittext.append("")
786 786 # run editor in the repository root
787 787 olddir = os.getcwd()
788 788 os.chdir(self.root)
789 789 text = self.ui.edit("\n".join(edittext), user)
790 790 os.chdir(olddir)
791 791
792 792 if branchname:
793 793 extra["branch"] = branchname
794 794
795 795 if use_dirstate:
796 796 lines = [line.rstrip() for line in text.rstrip().splitlines()]
797 797 while lines and not lines[0]:
798 798 del lines[0]
799 799 if not lines:
800 800 return None
801 801 text = '\n'.join(lines)
802 802
803 803 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
804 804 user, date, extra)
805 805 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
806 806 parent2=xp2)
807 807 tr.close()
808 808
809 809 if self.branchcache and "branch" in extra:
810 810 self.branchcache[util.tolocal(extra["branch"])] = n
811 811
812 812 if use_dirstate or update_dirstate:
813 813 self.dirstate.setparents(n)
814 814 if use_dirstate:
815 815 for f in new:
816 816 self.dirstate.normal(f)
817 817 for f in removed:
818 818 self.dirstate.forget(f)
819 819
820 820 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
821 821 return n
822 822 finally:
823 del lock, wlock, tr
823 del tr, lock, wlock
824 824
825 825 def walk(self, node=None, files=[], match=util.always, badmatch=None):
826 826 '''
827 827 walk recursively through the directory tree or a given
828 828 changeset, finding all files matched by the match
829 829 function
830 830
831 831 results are yielded in a tuple (src, filename), where src
832 832 is one of:
833 833 'f' the file was found in the directory tree
834 834 'm' the file was only in the dirstate and not in the tree
835 835 'b' file was not found and matched badmatch
836 836 '''
837 837
838 838 if node:
839 839 fdict = dict.fromkeys(files)
840 840 # for dirstate.walk, files=['.'] means "walk the whole tree".
841 841 # follow that here, too
842 842 fdict.pop('.', None)
843 843 mdict = self.manifest.read(self.changelog.read(node)[0])
844 844 mfiles = mdict.keys()
845 845 mfiles.sort()
846 846 for fn in mfiles:
847 847 for ffn in fdict:
848 848 # match if the file is the exact name or a directory
849 849 if ffn == fn or fn.startswith("%s/" % ffn):
850 850 del fdict[ffn]
851 851 break
852 852 if match(fn):
853 853 yield 'm', fn
854 854 ffiles = fdict.keys()
855 855 ffiles.sort()
856 856 for fn in ffiles:
857 857 if badmatch and badmatch(fn):
858 858 if match(fn):
859 859 yield 'b', fn
860 860 else:
861 861 self.ui.warn(_('%s: No such file in rev %s\n')
862 862 % (self.pathto(fn), short(node)))
863 863 else:
864 864 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
865 865 yield src, fn
866 866
867 867 def status(self, node1=None, node2=None, files=[], match=util.always,
868 868 list_ignored=False, list_clean=False):
869 869 """return status of files between two nodes or node and working directory
870 870
871 871 If node1 is None, use the first dirstate parent instead.
872 872 If node2 is None, compare node1 with working directory.
873 873 """
874 874
875 875 def fcmp(fn, getnode):
876 876 t1 = self.wread(fn)
877 877 return self.file(fn).cmp(getnode(fn), t1)
878 878
879 879 def mfmatches(node):
880 880 change = self.changelog.read(node)
881 881 mf = self.manifest.read(change[0]).copy()
882 882 for fn in mf.keys():
883 883 if not match(fn):
884 884 del mf[fn]
885 885 return mf
886 886
887 887 modified, added, removed, deleted, unknown = [], [], [], [], []
888 888 ignored, clean = [], []
889 889
890 890 compareworking = False
891 891 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
892 892 compareworking = True
893 893
894 894 if not compareworking:
895 895 # read the manifest from node1 before the manifest from node2,
896 896 # so that we'll hit the manifest cache if we're going through
897 897 # all the revisions in parent->child order.
898 898 mf1 = mfmatches(node1)
899 899
900 900 # are we comparing the working directory?
901 901 if not node2:
902 902 (lookup, modified, added, removed, deleted, unknown,
903 903 ignored, clean) = self.dirstate.status(files, match,
904 904 list_ignored, list_clean)
905 905
906 906 # are we comparing working dir against its parent?
907 907 if compareworking:
908 908 if lookup:
909 909 fixup = []
910 910 # do a full compare of any files that might have changed
911 911 ctx = self.changectx()
912 912 for f in lookup:
913 913 if f not in ctx or ctx[f].cmp(self.wread(f)):
914 914 modified.append(f)
915 915 else:
916 916 fixup.append(f)
917 917 if list_clean:
918 918 clean.append(f)
919 919
920 920 # update dirstate for files that are actually clean
921 921 if fixup:
922 922 wlock = None
923 923 try:
924 924 try:
925 925 wlock = self.wlock(False)
926 926 except lock.LockException:
927 927 pass
928 928 if wlock:
929 929 for f in fixup:
930 930 self.dirstate.normal(f)
931 931 finally:
932 932 del wlock
933 933 else:
934 934 # we are comparing working dir against non-parent
935 935 # generate a pseudo-manifest for the working dir
936 936 # XXX: create it in dirstate.py ?
937 937 mf2 = mfmatches(self.dirstate.parents()[0])
938 938 is_exec = util.execfunc(self.root, mf2.execf)
939 939 is_link = util.linkfunc(self.root, mf2.linkf)
940 940 for f in lookup + modified + added:
941 941 mf2[f] = ""
942 942 mf2.set(f, is_exec(f), is_link(f))
943 943 for f in removed:
944 944 if f in mf2:
945 945 del mf2[f]
946 946
947 947 else:
948 948 # we are comparing two revisions
949 949 mf2 = mfmatches(node2)
950 950
951 951 if not compareworking:
952 952 # flush lists from dirstate before comparing manifests
953 953 modified, added, clean = [], [], []
954 954
955 955 # make sure to sort the files so we talk to the disk in a
956 956 # reasonable order
957 957 mf2keys = mf2.keys()
958 958 mf2keys.sort()
959 959 getnode = lambda fn: mf1.get(fn, nullid)
960 960 for fn in mf2keys:
961 961 if mf1.has_key(fn):
962 962 if (mf1.flags(fn) != mf2.flags(fn) or
963 963 (mf1[fn] != mf2[fn] and
964 964 (mf2[fn] != "" or fcmp(fn, getnode)))):
965 965 modified.append(fn)
966 966 elif list_clean:
967 967 clean.append(fn)
968 968 del mf1[fn]
969 969 else:
970 970 added.append(fn)
971 971
972 972 removed = mf1.keys()
973 973
974 974 # sort and return results:
975 975 for l in modified, added, removed, deleted, unknown, ignored, clean:
976 976 l.sort()
977 977 return (modified, added, removed, deleted, unknown, ignored, clean)
978 978
979 979 def add(self, list):
980 980 wlock = self.wlock()
981 981 try:
982 982 for f in list:
983 983 p = self.wjoin(f)
984 984 try:
985 985 st = os.lstat(p)
986 986 except:
987 987 self.ui.warn(_("%s does not exist!\n") % f)
988 988 continue
989 989 if st.st_size > 10000000:
990 990 self.ui.warn(_("%s: files over 10MB may cause memory and"
991 991 " performance problems\n"
992 992 "(use 'hg revert %s' to unadd the file)\n")
993 993 % (f, f))
994 994 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
995 995 self.ui.warn(_("%s not added: only files and symlinks "
996 996 "supported currently\n") % f)
997 997 elif self.dirstate[f] in 'an':
998 998 self.ui.warn(_("%s already tracked!\n") % f)
999 999 else:
1000 1000 self.dirstate.add(f)
1001 1001 finally:
1002 1002 del wlock
1003 1003
1004 1004 def forget(self, list):
1005 1005 wlock = self.wlock()
1006 1006 try:
1007 1007 for f in list:
1008 1008 if self.dirstate[f] != 'a':
1009 1009 self.ui.warn(_("%s not added!\n") % f)
1010 1010 else:
1011 1011 self.dirstate.forget(f)
1012 1012 finally:
1013 1013 del wlock
1014 1014
1015 1015 def remove(self, list, unlink=False):
1016 1016 wlock = None
1017 1017 try:
1018 1018 if unlink:
1019 1019 for f in list:
1020 1020 try:
1021 1021 util.unlink(self.wjoin(f))
1022 1022 except OSError, inst:
1023 1023 if inst.errno != errno.ENOENT:
1024 1024 raise
1025 1025 wlock = self.wlock()
1026 1026 for f in list:
1027 1027 if unlink and os.path.exists(self.wjoin(f)):
1028 1028 self.ui.warn(_("%s still exists!\n") % f)
1029 1029 elif self.dirstate[f] == 'a':
1030 1030 self.dirstate.forget(f)
1031 1031 elif f not in self.dirstate:
1032 1032 self.ui.warn(_("%s not tracked!\n") % f)
1033 1033 else:
1034 1034 self.dirstate.remove(f)
1035 1035 finally:
1036 1036 del wlock
1037 1037
1038 1038 def undelete(self, list):
1039 1039 wlock = None
1040 1040 try:
1041 1041 p = self.dirstate.parents()[0]
1042 1042 mn = self.changelog.read(p)[0]
1043 1043 m = self.manifest.read(mn)
1044 1044 wlock = self.wlock()
1045 1045 for f in list:
1046 1046 if self.dirstate[f] != 'r':
1047 1047 self.ui.warn("%s not removed!\n" % f)
1048 1048 else:
1049 1049 t = self.file(f).read(m[f])
1050 1050 self.wwrite(f, t, m.flags(f))
1051 1051 self.dirstate.normal(f)
1052 1052 finally:
1053 1053 del wlock
1054 1054
1055 1055 def copy(self, source, dest):
1056 1056 wlock = None
1057 1057 try:
1058 1058 p = self.wjoin(dest)
1059 1059 if not (os.path.exists(p) or os.path.islink(p)):
1060 1060 self.ui.warn(_("%s does not exist!\n") % dest)
1061 1061 elif not (os.path.isfile(p) or os.path.islink(p)):
1062 1062 self.ui.warn(_("copy failed: %s is not a file or a "
1063 1063 "symbolic link\n") % dest)
1064 1064 else:
1065 1065 wlock = self.wlock()
1066 1066 if dest not in self.dirstate:
1067 1067 self.dirstate.add(dest)
1068 1068 self.dirstate.copy(source, dest)
1069 1069 finally:
1070 1070 del wlock
1071 1071
1072 1072 def heads(self, start=None):
1073 1073 heads = self.changelog.heads(start)
1074 1074 # sort the output in rev descending order
1075 1075 heads = [(-self.changelog.rev(h), h) for h in heads]
1076 1076 heads.sort()
1077 1077 return [n for (r, n) in heads]
1078 1078
1079 1079 def branchheads(self, branch, start=None):
1080 1080 branches = self.branchtags()
1081 1081 if branch not in branches:
1082 1082 return []
1083 1083 # The basic algorithm is this:
1084 1084 #
1085 1085 # Start from the branch tip since there are no later revisions that can
1086 1086 # possibly be in this branch, and the tip is a guaranteed head.
1087 1087 #
1088 1088 # Remember the tip's parents as the first ancestors, since these by
1089 1089 # definition are not heads.
1090 1090 #
1091 1091 # Step backwards from the brach tip through all the revisions. We are
1092 1092 # guaranteed by the rules of Mercurial that we will now be visiting the
1093 1093 # nodes in reverse topological order (children before parents).
1094 1094 #
1095 1095 # If a revision is one of the ancestors of a head then we can toss it
1096 1096 # out of the ancestors set (we've already found it and won't be
1097 1097 # visiting it again) and put its parents in the ancestors set.
1098 1098 #
1099 1099 # Otherwise, if a revision is in the branch it's another head, since it
1100 1100 # wasn't in the ancestor list of an existing head. So add it to the
1101 1101 # head list, and add its parents to the ancestor list.
1102 1102 #
1103 1103 # If it is not in the branch ignore it.
1104 1104 #
1105 1105 # Once we have a list of heads, use nodesbetween to filter out all the
1106 1106 # heads that cannot be reached from startrev. There may be a more
1107 1107 # efficient way to do this as part of the previous algorithm.
1108 1108
1109 1109 set = util.set
1110 1110 heads = [self.changelog.rev(branches[branch])]
1111 1111 # Don't care if ancestors contains nullrev or not.
1112 1112 ancestors = set(self.changelog.parentrevs(heads[0]))
1113 1113 for rev in xrange(heads[0] - 1, nullrev, -1):
1114 1114 if rev in ancestors:
1115 1115 ancestors.update(self.changelog.parentrevs(rev))
1116 1116 ancestors.remove(rev)
1117 1117 elif self.changectx(rev).branch() == branch:
1118 1118 heads.append(rev)
1119 1119 ancestors.update(self.changelog.parentrevs(rev))
1120 1120 heads = [self.changelog.node(rev) for rev in heads]
1121 1121 if start is not None:
1122 1122 heads = self.changelog.nodesbetween([start], heads)[2]
1123 1123 return heads
1124 1124
1125 1125 def branches(self, nodes):
1126 1126 if not nodes:
1127 1127 nodes = [self.changelog.tip()]
1128 1128 b = []
1129 1129 for n in nodes:
1130 1130 t = n
1131 1131 while 1:
1132 1132 p = self.changelog.parents(n)
1133 1133 if p[1] != nullid or p[0] == nullid:
1134 1134 b.append((t, n, p[0], p[1]))
1135 1135 break
1136 1136 n = p[0]
1137 1137 return b
1138 1138
1139 1139 def between(self, pairs):
1140 1140 r = []
1141 1141
1142 1142 for top, bottom in pairs:
1143 1143 n, l, i = top, [], 0
1144 1144 f = 1
1145 1145
1146 1146 while n != bottom:
1147 1147 p = self.changelog.parents(n)[0]
1148 1148 if i == f:
1149 1149 l.append(n)
1150 1150 f = f * 2
1151 1151 n = p
1152 1152 i += 1
1153 1153
1154 1154 r.append(l)
1155 1155
1156 1156 return r
1157 1157
1158 1158 def findincoming(self, remote, base=None, heads=None, force=False):
1159 1159 """Return list of roots of the subsets of missing nodes from remote
1160 1160
1161 1161 If base dict is specified, assume that these nodes and their parents
1162 1162 exist on the remote side and that no child of a node of base exists
1163 1163 in both remote and self.
1164 1164 Furthermore base will be updated to include the nodes that exists
1165 1165 in self and remote but no children exists in self and remote.
1166 1166 If a list of heads is specified, return only nodes which are heads
1167 1167 or ancestors of these heads.
1168 1168
1169 1169 All the ancestors of base are in self and in remote.
1170 1170 All the descendants of the list returned are missing in self.
1171 1171 (and so we know that the rest of the nodes are missing in remote, see
1172 1172 outgoing)
1173 1173 """
1174 1174 m = self.changelog.nodemap
1175 1175 search = []
1176 1176 fetch = {}
1177 1177 seen = {}
1178 1178 seenbranch = {}
1179 1179 if base == None:
1180 1180 base = {}
1181 1181
1182 1182 if not heads:
1183 1183 heads = remote.heads()
1184 1184
1185 1185 if self.changelog.tip() == nullid:
1186 1186 base[nullid] = 1
1187 1187 if heads != [nullid]:
1188 1188 return [nullid]
1189 1189 return []
1190 1190
1191 1191 # assume we're closer to the tip than the root
1192 1192 # and start by examining the heads
1193 1193 self.ui.status(_("searching for changes\n"))
1194 1194
1195 1195 unknown = []
1196 1196 for h in heads:
1197 1197 if h not in m:
1198 1198 unknown.append(h)
1199 1199 else:
1200 1200 base[h] = 1
1201 1201
1202 1202 if not unknown:
1203 1203 return []
1204 1204
1205 1205 req = dict.fromkeys(unknown)
1206 1206 reqcnt = 0
1207 1207
1208 1208 # search through remote branches
1209 1209 # a 'branch' here is a linear segment of history, with four parts:
1210 1210 # head, root, first parent, second parent
1211 1211 # (a branch always has two parents (or none) by definition)
1212 1212 unknown = remote.branches(unknown)
1213 1213 while unknown:
1214 1214 r = []
1215 1215 while unknown:
1216 1216 n = unknown.pop(0)
1217 1217 if n[0] in seen:
1218 1218 continue
1219 1219
1220 1220 self.ui.debug(_("examining %s:%s\n")
1221 1221 % (short(n[0]), short(n[1])))
1222 1222 if n[0] == nullid: # found the end of the branch
1223 1223 pass
1224 1224 elif n in seenbranch:
1225 1225 self.ui.debug(_("branch already found\n"))
1226 1226 continue
1227 1227 elif n[1] and n[1] in m: # do we know the base?
1228 1228 self.ui.debug(_("found incomplete branch %s:%s\n")
1229 1229 % (short(n[0]), short(n[1])))
1230 1230 search.append(n) # schedule branch range for scanning
1231 1231 seenbranch[n] = 1
1232 1232 else:
1233 1233 if n[1] not in seen and n[1] not in fetch:
1234 1234 if n[2] in m and n[3] in m:
1235 1235 self.ui.debug(_("found new changeset %s\n") %
1236 1236 short(n[1]))
1237 1237 fetch[n[1]] = 1 # earliest unknown
1238 1238 for p in n[2:4]:
1239 1239 if p in m:
1240 1240 base[p] = 1 # latest known
1241 1241
1242 1242 for p in n[2:4]:
1243 1243 if p not in req and p not in m:
1244 1244 r.append(p)
1245 1245 req[p] = 1
1246 1246 seen[n[0]] = 1
1247 1247
1248 1248 if r:
1249 1249 reqcnt += 1
1250 1250 self.ui.debug(_("request %d: %s\n") %
1251 1251 (reqcnt, " ".join(map(short, r))))
1252 1252 for p in xrange(0, len(r), 10):
1253 1253 for b in remote.branches(r[p:p+10]):
1254 1254 self.ui.debug(_("received %s:%s\n") %
1255 1255 (short(b[0]), short(b[1])))
1256 1256 unknown.append(b)
1257 1257
1258 1258 # do binary search on the branches we found
1259 1259 while search:
1260 1260 n = search.pop(0)
1261 1261 reqcnt += 1
1262 1262 l = remote.between([(n[0], n[1])])[0]
1263 1263 l.append(n[1])
1264 1264 p = n[0]
1265 1265 f = 1
1266 1266 for i in l:
1267 1267 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1268 1268 if i in m:
1269 1269 if f <= 2:
1270 1270 self.ui.debug(_("found new branch changeset %s\n") %
1271 1271 short(p))
1272 1272 fetch[p] = 1
1273 1273 base[i] = 1
1274 1274 else:
1275 1275 self.ui.debug(_("narrowed branch search to %s:%s\n")
1276 1276 % (short(p), short(i)))
1277 1277 search.append((p, i))
1278 1278 break
1279 1279 p, f = i, f * 2
1280 1280
1281 1281 # sanity check our fetch list
1282 1282 for f in fetch.keys():
1283 1283 if f in m:
1284 1284 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1285 1285
1286 1286 if base.keys() == [nullid]:
1287 1287 if force:
1288 1288 self.ui.warn(_("warning: repository is unrelated\n"))
1289 1289 else:
1290 1290 raise util.Abort(_("repository is unrelated"))
1291 1291
1292 1292 self.ui.debug(_("found new changesets starting at ") +
1293 1293 " ".join([short(f) for f in fetch]) + "\n")
1294 1294
1295 1295 self.ui.debug(_("%d total queries\n") % reqcnt)
1296 1296
1297 1297 return fetch.keys()
1298 1298
1299 1299 def findoutgoing(self, remote, base=None, heads=None, force=False):
1300 1300 """Return list of nodes that are roots of subsets not in remote
1301 1301
1302 1302 If base dict is specified, assume that these nodes and their parents
1303 1303 exist on the remote side.
1304 1304 If a list of heads is specified, return only nodes which are heads
1305 1305 or ancestors of these heads, and return a second element which
1306 1306 contains all remote heads which get new children.
1307 1307 """
1308 1308 if base == None:
1309 1309 base = {}
1310 1310 self.findincoming(remote, base, heads, force=force)
1311 1311
1312 1312 self.ui.debug(_("common changesets up to ")
1313 1313 + " ".join(map(short, base.keys())) + "\n")
1314 1314
1315 1315 remain = dict.fromkeys(self.changelog.nodemap)
1316 1316
1317 1317 # prune everything remote has from the tree
1318 1318 del remain[nullid]
1319 1319 remove = base.keys()
1320 1320 while remove:
1321 1321 n = remove.pop(0)
1322 1322 if n in remain:
1323 1323 del remain[n]
1324 1324 for p in self.changelog.parents(n):
1325 1325 remove.append(p)
1326 1326
1327 1327 # find every node whose parents have been pruned
1328 1328 subset = []
1329 1329 # find every remote head that will get new children
1330 1330 updated_heads = {}
1331 1331 for n in remain:
1332 1332 p1, p2 = self.changelog.parents(n)
1333 1333 if p1 not in remain and p2 not in remain:
1334 1334 subset.append(n)
1335 1335 if heads:
1336 1336 if p1 in heads:
1337 1337 updated_heads[p1] = True
1338 1338 if p2 in heads:
1339 1339 updated_heads[p2] = True
1340 1340
1341 1341 # this is the set of all roots we have to push
1342 1342 if heads:
1343 1343 return subset, updated_heads.keys()
1344 1344 else:
1345 1345 return subset
1346 1346
1347 1347 def pull(self, remote, heads=None, force=False):
1348 1348 lock = self.lock()
1349 1349 try:
1350 1350 fetch = self.findincoming(remote, force=force)
1351 1351 if fetch == [nullid]:
1352 1352 self.ui.status(_("requesting all changes\n"))
1353 1353
1354 1354 if not fetch:
1355 1355 self.ui.status(_("no changes found\n"))
1356 1356 return 0
1357 1357
1358 1358 if heads is None:
1359 1359 cg = remote.changegroup(fetch, 'pull')
1360 1360 else:
1361 1361 if 'changegroupsubset' not in remote.capabilities:
1362 1362 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1363 1363 cg = remote.changegroupsubset(fetch, heads, 'pull')
1364 1364 return self.addchangegroup(cg, 'pull', remote.url())
1365 1365 finally:
1366 1366 del lock
1367 1367
1368 1368 def push(self, remote, force=False, revs=None):
1369 1369 # there are two ways to push to remote repo:
1370 1370 #
1371 1371 # addchangegroup assumes local user can lock remote
1372 1372 # repo (local filesystem, old ssh servers).
1373 1373 #
1374 1374 # unbundle assumes local user cannot lock remote repo (new ssh
1375 1375 # servers, http servers).
1376 1376
1377 1377 if remote.capable('unbundle'):
1378 1378 return self.push_unbundle(remote, force, revs)
1379 1379 return self.push_addchangegroup(remote, force, revs)
1380 1380
1381 1381 def prepush(self, remote, force, revs):
1382 1382 base = {}
1383 1383 remote_heads = remote.heads()
1384 1384 inc = self.findincoming(remote, base, remote_heads, force=force)
1385 1385
1386 1386 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1387 1387 if revs is not None:
1388 1388 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1389 1389 else:
1390 1390 bases, heads = update, self.changelog.heads()
1391 1391
1392 1392 if not bases:
1393 1393 self.ui.status(_("no changes found\n"))
1394 1394 return None, 1
1395 1395 elif not force:
1396 1396 # check if we're creating new remote heads
1397 1397 # to be a remote head after push, node must be either
1398 1398 # - unknown locally
1399 1399 # - a local outgoing head descended from update
1400 1400 # - a remote head that's known locally and not
1401 1401 # ancestral to an outgoing head
1402 1402
1403 1403 warn = 0
1404 1404
1405 1405 if remote_heads == [nullid]:
1406 1406 warn = 0
1407 1407 elif not revs and len(heads) > len(remote_heads):
1408 1408 warn = 1
1409 1409 else:
1410 1410 newheads = list(heads)
1411 1411 for r in remote_heads:
1412 1412 if r in self.changelog.nodemap:
1413 1413 desc = self.changelog.heads(r, heads)
1414 1414 l = [h for h in heads if h in desc]
1415 1415 if not l:
1416 1416 newheads.append(r)
1417 1417 else:
1418 1418 newheads.append(r)
1419 1419 if len(newheads) > len(remote_heads):
1420 1420 warn = 1
1421 1421
1422 1422 if warn:
1423 1423 self.ui.warn(_("abort: push creates new remote branches!\n"))
1424 1424 self.ui.status(_("(did you forget to merge?"
1425 1425 " use push -f to force)\n"))
1426 1426 return None, 1
1427 1427 elif inc:
1428 1428 self.ui.warn(_("note: unsynced remote changes!\n"))
1429 1429
1430 1430
1431 1431 if revs is None:
1432 1432 cg = self.changegroup(update, 'push')
1433 1433 else:
1434 1434 cg = self.changegroupsubset(update, revs, 'push')
1435 1435 return cg, remote_heads
1436 1436
1437 1437 def push_addchangegroup(self, remote, force, revs):
1438 1438 lock = remote.lock()
1439 1439 try:
1440 1440 ret = self.prepush(remote, force, revs)
1441 1441 if ret[0] is not None:
1442 1442 cg, remote_heads = ret
1443 1443 return remote.addchangegroup(cg, 'push', self.url())
1444 1444 return ret[1]
1445 1445 finally:
1446 1446 del lock
1447 1447
1448 1448 def push_unbundle(self, remote, force, revs):
1449 1449 # local repo finds heads on server, finds out what revs it
1450 1450 # must push. once revs transferred, if server finds it has
1451 1451 # different heads (someone else won commit/push race), server
1452 1452 # aborts.
1453 1453
1454 1454 ret = self.prepush(remote, force, revs)
1455 1455 if ret[0] is not None:
1456 1456 cg, remote_heads = ret
1457 1457 if force: remote_heads = ['force']
1458 1458 return remote.unbundle(cg, remote_heads, 'push')
1459 1459 return ret[1]
1460 1460
1461 1461 def changegroupinfo(self, nodes):
1462 1462 self.ui.note(_("%d changesets found\n") % len(nodes))
1463 1463 if self.ui.debugflag:
1464 1464 self.ui.debug(_("List of changesets:\n"))
1465 1465 for node in nodes:
1466 1466 self.ui.debug("%s\n" % hex(node))
1467 1467
1468 1468 def changegroupsubset(self, bases, heads, source):
1469 1469 """This function generates a changegroup consisting of all the nodes
1470 1470 that are descendents of any of the bases, and ancestors of any of
1471 1471 the heads.
1472 1472
1473 1473 It is fairly complex as determining which filenodes and which
1474 1474 manifest nodes need to be included for the changeset to be complete
1475 1475 is non-trivial.
1476 1476
1477 1477 Another wrinkle is doing the reverse, figuring out which changeset in
1478 1478 the changegroup a particular filenode or manifestnode belongs to."""
1479 1479
1480 1480 self.hook('preoutgoing', throw=True, source=source)
1481 1481
1482 1482 # Set up some initial variables
1483 1483 # Make it easy to refer to self.changelog
1484 1484 cl = self.changelog
1485 1485 # msng is short for missing - compute the list of changesets in this
1486 1486 # changegroup.
1487 1487 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1488 1488 self.changegroupinfo(msng_cl_lst)
1489 1489 # Some bases may turn out to be superfluous, and some heads may be
1490 1490 # too. nodesbetween will return the minimal set of bases and heads
1491 1491 # necessary to re-create the changegroup.
1492 1492
1493 1493 # Known heads are the list of heads that it is assumed the recipient
1494 1494 # of this changegroup will know about.
1495 1495 knownheads = {}
1496 1496 # We assume that all parents of bases are known heads.
1497 1497 for n in bases:
1498 1498 for p in cl.parents(n):
1499 1499 if p != nullid:
1500 1500 knownheads[p] = 1
1501 1501 knownheads = knownheads.keys()
1502 1502 if knownheads:
1503 1503 # Now that we know what heads are known, we can compute which
1504 1504 # changesets are known. The recipient must know about all
1505 1505 # changesets required to reach the known heads from the null
1506 1506 # changeset.
1507 1507 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1508 1508 junk = None
1509 1509 # Transform the list into an ersatz set.
1510 1510 has_cl_set = dict.fromkeys(has_cl_set)
1511 1511 else:
1512 1512 # If there were no known heads, the recipient cannot be assumed to
1513 1513 # know about any changesets.
1514 1514 has_cl_set = {}
1515 1515
1516 1516 # Make it easy to refer to self.manifest
1517 1517 mnfst = self.manifest
1518 1518 # We don't know which manifests are missing yet
1519 1519 msng_mnfst_set = {}
1520 1520 # Nor do we know which filenodes are missing.
1521 1521 msng_filenode_set = {}
1522 1522
1523 1523 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1524 1524 junk = None
1525 1525
1526 1526 # A changeset always belongs to itself, so the changenode lookup
1527 1527 # function for a changenode is identity.
1528 1528 def identity(x):
1529 1529 return x
1530 1530
1531 1531 # A function generating function. Sets up an environment for the
1532 1532 # inner function.
1533 1533 def cmp_by_rev_func(revlog):
1534 1534 # Compare two nodes by their revision number in the environment's
1535 1535 # revision history. Since the revision number both represents the
1536 1536 # most efficient order to read the nodes in, and represents a
1537 1537 # topological sorting of the nodes, this function is often useful.
1538 1538 def cmp_by_rev(a, b):
1539 1539 return cmp(revlog.rev(a), revlog.rev(b))
1540 1540 return cmp_by_rev
1541 1541
1542 1542 # If we determine that a particular file or manifest node must be a
1543 1543 # node that the recipient of the changegroup will already have, we can
1544 1544 # also assume the recipient will have all the parents. This function
1545 1545 # prunes them from the set of missing nodes.
1546 1546 def prune_parents(revlog, hasset, msngset):
1547 1547 haslst = hasset.keys()
1548 1548 haslst.sort(cmp_by_rev_func(revlog))
1549 1549 for node in haslst:
1550 1550 parentlst = [p for p in revlog.parents(node) if p != nullid]
1551 1551 while parentlst:
1552 1552 n = parentlst.pop()
1553 1553 if n not in hasset:
1554 1554 hasset[n] = 1
1555 1555 p = [p for p in revlog.parents(n) if p != nullid]
1556 1556 parentlst.extend(p)
1557 1557 for n in hasset:
1558 1558 msngset.pop(n, None)
1559 1559
1560 1560 # This is a function generating function used to set up an environment
1561 1561 # for the inner function to execute in.
1562 1562 def manifest_and_file_collector(changedfileset):
1563 1563 # This is an information gathering function that gathers
1564 1564 # information from each changeset node that goes out as part of
1565 1565 # the changegroup. The information gathered is a list of which
1566 1566 # manifest nodes are potentially required (the recipient may
1567 1567 # already have them) and total list of all files which were
1568 1568 # changed in any changeset in the changegroup.
1569 1569 #
1570 1570 # We also remember the first changenode we saw any manifest
1571 1571 # referenced by so we can later determine which changenode 'owns'
1572 1572 # the manifest.
1573 1573 def collect_manifests_and_files(clnode):
1574 1574 c = cl.read(clnode)
1575 1575 for f in c[3]:
1576 1576 # This is to make sure we only have one instance of each
1577 1577 # filename string for each filename.
1578 1578 changedfileset.setdefault(f, f)
1579 1579 msng_mnfst_set.setdefault(c[0], clnode)
1580 1580 return collect_manifests_and_files
1581 1581
1582 1582 # Figure out which manifest nodes (of the ones we think might be part
1583 1583 # of the changegroup) the recipient must know about and remove them
1584 1584 # from the changegroup.
1585 1585 def prune_manifests():
1586 1586 has_mnfst_set = {}
1587 1587 for n in msng_mnfst_set:
1588 1588 # If a 'missing' manifest thinks it belongs to a changenode
1589 1589 # the recipient is assumed to have, obviously the recipient
1590 1590 # must have that manifest.
1591 1591 linknode = cl.node(mnfst.linkrev(n))
1592 1592 if linknode in has_cl_set:
1593 1593 has_mnfst_set[n] = 1
1594 1594 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1595 1595
1596 1596 # Use the information collected in collect_manifests_and_files to say
1597 1597 # which changenode any manifestnode belongs to.
1598 1598 def lookup_manifest_link(mnfstnode):
1599 1599 return msng_mnfst_set[mnfstnode]
1600 1600
1601 1601 # A function generating function that sets up the initial environment
1602 1602 # the inner function.
1603 1603 def filenode_collector(changedfiles):
1604 1604 next_rev = [0]
1605 1605 # This gathers information from each manifestnode included in the
1606 1606 # changegroup about which filenodes the manifest node references
1607 1607 # so we can include those in the changegroup too.
1608 1608 #
1609 1609 # It also remembers which changenode each filenode belongs to. It
1610 1610 # does this by assuming the a filenode belongs to the changenode
1611 1611 # the first manifest that references it belongs to.
1612 1612 def collect_msng_filenodes(mnfstnode):
1613 1613 r = mnfst.rev(mnfstnode)
1614 1614 if r == next_rev[0]:
1615 1615 # If the last rev we looked at was the one just previous,
1616 1616 # we only need to see a diff.
1617 1617 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1618 1618 # For each line in the delta
1619 1619 for dline in delta.splitlines():
1620 1620 # get the filename and filenode for that line
1621 1621 f, fnode = dline.split('\0')
1622 1622 fnode = bin(fnode[:40])
1623 1623 f = changedfiles.get(f, None)
1624 1624 # And if the file is in the list of files we care
1625 1625 # about.
1626 1626 if f is not None:
1627 1627 # Get the changenode this manifest belongs to
1628 1628 clnode = msng_mnfst_set[mnfstnode]
1629 1629 # Create the set of filenodes for the file if
1630 1630 # there isn't one already.
1631 1631 ndset = msng_filenode_set.setdefault(f, {})
1632 1632 # And set the filenode's changelog node to the
1633 1633 # manifest's if it hasn't been set already.
1634 1634 ndset.setdefault(fnode, clnode)
1635 1635 else:
1636 1636 # Otherwise we need a full manifest.
1637 1637 m = mnfst.read(mnfstnode)
1638 1638 # For every file in we care about.
1639 1639 for f in changedfiles:
1640 1640 fnode = m.get(f, None)
1641 1641 # If it's in the manifest
1642 1642 if fnode is not None:
1643 1643 # See comments above.
1644 1644 clnode = msng_mnfst_set[mnfstnode]
1645 1645 ndset = msng_filenode_set.setdefault(f, {})
1646 1646 ndset.setdefault(fnode, clnode)
1647 1647 # Remember the revision we hope to see next.
1648 1648 next_rev[0] = r + 1
1649 1649 return collect_msng_filenodes
1650 1650
1651 1651 # We have a list of filenodes we think we need for a file, lets remove
1652 1652 # all those we now the recipient must have.
1653 1653 def prune_filenodes(f, filerevlog):
1654 1654 msngset = msng_filenode_set[f]
1655 1655 hasset = {}
1656 1656 # If a 'missing' filenode thinks it belongs to a changenode we
1657 1657 # assume the recipient must have, then the recipient must have
1658 1658 # that filenode.
1659 1659 for n in msngset:
1660 1660 clnode = cl.node(filerevlog.linkrev(n))
1661 1661 if clnode in has_cl_set:
1662 1662 hasset[n] = 1
1663 1663 prune_parents(filerevlog, hasset, msngset)
1664 1664
1665 1665 # A function generator function that sets up the a context for the
1666 1666 # inner function.
1667 1667 def lookup_filenode_link_func(fname):
1668 1668 msngset = msng_filenode_set[fname]
1669 1669 # Lookup the changenode the filenode belongs to.
1670 1670 def lookup_filenode_link(fnode):
1671 1671 return msngset[fnode]
1672 1672 return lookup_filenode_link
1673 1673
1674 1674 # Now that we have all theses utility functions to help out and
1675 1675 # logically divide up the task, generate the group.
1676 1676 def gengroup():
1677 1677 # The set of changed files starts empty.
1678 1678 changedfiles = {}
1679 1679 # Create a changenode group generator that will call our functions
1680 1680 # back to lookup the owning changenode and collect information.
1681 1681 group = cl.group(msng_cl_lst, identity,
1682 1682 manifest_and_file_collector(changedfiles))
1683 1683 for chnk in group:
1684 1684 yield chnk
1685 1685
1686 1686 # The list of manifests has been collected by the generator
1687 1687 # calling our functions back.
1688 1688 prune_manifests()
1689 1689 msng_mnfst_lst = msng_mnfst_set.keys()
1690 1690 # Sort the manifestnodes by revision number.
1691 1691 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1692 1692 # Create a generator for the manifestnodes that calls our lookup
1693 1693 # and data collection functions back.
1694 1694 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1695 1695 filenode_collector(changedfiles))
1696 1696 for chnk in group:
1697 1697 yield chnk
1698 1698
1699 1699 # These are no longer needed, dereference and toss the memory for
1700 1700 # them.
1701 1701 msng_mnfst_lst = None
1702 1702 msng_mnfst_set.clear()
1703 1703
1704 1704 changedfiles = changedfiles.keys()
1705 1705 changedfiles.sort()
1706 1706 # Go through all our files in order sorted by name.
1707 1707 for fname in changedfiles:
1708 1708 filerevlog = self.file(fname)
1709 1709 # Toss out the filenodes that the recipient isn't really
1710 1710 # missing.
1711 1711 if msng_filenode_set.has_key(fname):
1712 1712 prune_filenodes(fname, filerevlog)
1713 1713 msng_filenode_lst = msng_filenode_set[fname].keys()
1714 1714 else:
1715 1715 msng_filenode_lst = []
1716 1716 # If any filenodes are left, generate the group for them,
1717 1717 # otherwise don't bother.
1718 1718 if len(msng_filenode_lst) > 0:
1719 1719 yield changegroup.genchunk(fname)
1720 1720 # Sort the filenodes by their revision #
1721 1721 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1722 1722 # Create a group generator and only pass in a changenode
1723 1723 # lookup function as we need to collect no information
1724 1724 # from filenodes.
1725 1725 group = filerevlog.group(msng_filenode_lst,
1726 1726 lookup_filenode_link_func(fname))
1727 1727 for chnk in group:
1728 1728 yield chnk
1729 1729 if msng_filenode_set.has_key(fname):
1730 1730 # Don't need this anymore, toss it to free memory.
1731 1731 del msng_filenode_set[fname]
1732 1732 # Signal that no more groups are left.
1733 1733 yield changegroup.closechunk()
1734 1734
1735 1735 if msng_cl_lst:
1736 1736 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1737 1737
1738 1738 return util.chunkbuffer(gengroup())
1739 1739
1740 1740 def changegroup(self, basenodes, source):
1741 1741 """Generate a changegroup of all nodes that we have that a recipient
1742 1742 doesn't.
1743 1743
1744 1744 This is much easier than the previous function as we can assume that
1745 1745 the recipient has any changenode we aren't sending them."""
1746 1746
1747 1747 self.hook('preoutgoing', throw=True, source=source)
1748 1748
1749 1749 cl = self.changelog
1750 1750 nodes = cl.nodesbetween(basenodes, None)[0]
1751 1751 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1752 1752 self.changegroupinfo(nodes)
1753 1753
1754 1754 def identity(x):
1755 1755 return x
1756 1756
1757 1757 def gennodelst(revlog):
1758 1758 for r in xrange(0, revlog.count()):
1759 1759 n = revlog.node(r)
1760 1760 if revlog.linkrev(n) in revset:
1761 1761 yield n
1762 1762
1763 1763 def changed_file_collector(changedfileset):
1764 1764 def collect_changed_files(clnode):
1765 1765 c = cl.read(clnode)
1766 1766 for fname in c[3]:
1767 1767 changedfileset[fname] = 1
1768 1768 return collect_changed_files
1769 1769
1770 1770 def lookuprevlink_func(revlog):
1771 1771 def lookuprevlink(n):
1772 1772 return cl.node(revlog.linkrev(n))
1773 1773 return lookuprevlink
1774 1774
1775 1775 def gengroup():
1776 1776 # construct a list of all changed files
1777 1777 changedfiles = {}
1778 1778
1779 1779 for chnk in cl.group(nodes, identity,
1780 1780 changed_file_collector(changedfiles)):
1781 1781 yield chnk
1782 1782 changedfiles = changedfiles.keys()
1783 1783 changedfiles.sort()
1784 1784
1785 1785 mnfst = self.manifest
1786 1786 nodeiter = gennodelst(mnfst)
1787 1787 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1788 1788 yield chnk
1789 1789
1790 1790 for fname in changedfiles:
1791 1791 filerevlog = self.file(fname)
1792 1792 nodeiter = gennodelst(filerevlog)
1793 1793 nodeiter = list(nodeiter)
1794 1794 if nodeiter:
1795 1795 yield changegroup.genchunk(fname)
1796 1796 lookup = lookuprevlink_func(filerevlog)
1797 1797 for chnk in filerevlog.group(nodeiter, lookup):
1798 1798 yield chnk
1799 1799
1800 1800 yield changegroup.closechunk()
1801 1801
1802 1802 if nodes:
1803 1803 self.hook('outgoing', node=hex(nodes[0]), source=source)
1804 1804
1805 1805 return util.chunkbuffer(gengroup())
1806 1806
1807 1807 def addchangegroup(self, source, srctype, url):
1808 1808 """add changegroup to repo.
1809 1809
1810 1810 return values:
1811 1811 - nothing changed or no source: 0
1812 1812 - more heads than before: 1+added heads (2..n)
1813 1813 - less heads than before: -1-removed heads (-2..-n)
1814 1814 - number of heads stays the same: 1
1815 1815 """
1816 1816 def csmap(x):
1817 1817 self.ui.debug(_("add changeset %s\n") % short(x))
1818 1818 return cl.count()
1819 1819
1820 1820 def revmap(x):
1821 1821 return cl.rev(x)
1822 1822
1823 1823 if not source:
1824 1824 return 0
1825 1825
1826 1826 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1827 1827
1828 1828 changesets = files = revisions = 0
1829 1829
1830 1830 # write changelog data to temp files so concurrent readers will not see
1831 1831 # inconsistent view
1832 1832 cl = self.changelog
1833 1833 cl.delayupdate()
1834 1834 oldheads = len(cl.heads())
1835 1835
1836 1836 tr = self.transaction()
1837 1837 try:
1838 1838 trp = weakref.proxy(tr)
1839 1839 # pull off the changeset group
1840 1840 self.ui.status(_("adding changesets\n"))
1841 1841 cor = cl.count() - 1
1842 1842 chunkiter = changegroup.chunkiter(source)
1843 1843 if cl.addgroup(chunkiter, csmap, trp, 1) is None:
1844 1844 raise util.Abort(_("received changelog group is empty"))
1845 1845 cnr = cl.count() - 1
1846 1846 changesets = cnr - cor
1847 1847
1848 1848 # pull off the manifest group
1849 1849 self.ui.status(_("adding manifests\n"))
1850 1850 chunkiter = changegroup.chunkiter(source)
1851 1851 # no need to check for empty manifest group here:
1852 1852 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1853 1853 # no new manifest will be created and the manifest group will
1854 1854 # be empty during the pull
1855 1855 self.manifest.addgroup(chunkiter, revmap, trp)
1856 1856
1857 1857 # process the files
1858 1858 self.ui.status(_("adding file changes\n"))
1859 1859 while 1:
1860 1860 f = changegroup.getchunk(source)
1861 1861 if not f:
1862 1862 break
1863 1863 self.ui.debug(_("adding %s revisions\n") % f)
1864 1864 fl = self.file(f)
1865 1865 o = fl.count()
1866 1866 chunkiter = changegroup.chunkiter(source)
1867 1867 if fl.addgroup(chunkiter, revmap, trp) is None:
1868 1868 raise util.Abort(_("received file revlog group is empty"))
1869 1869 revisions += fl.count() - o
1870 1870 files += 1
1871 1871
1872 1872 # make changelog see real files again
1873 1873 cl.finalize(trp)
1874 1874
1875 1875 newheads = len(self.changelog.heads())
1876 1876 heads = ""
1877 1877 if oldheads and newheads != oldheads:
1878 1878 heads = _(" (%+d heads)") % (newheads - oldheads)
1879 1879
1880 1880 self.ui.status(_("added %d changesets"
1881 1881 " with %d changes to %d files%s\n")
1882 1882 % (changesets, revisions, files, heads))
1883 1883
1884 1884 if changesets > 0:
1885 1885 self.hook('pretxnchangegroup', throw=True,
1886 1886 node=hex(self.changelog.node(cor+1)), source=srctype,
1887 1887 url=url)
1888 1888
1889 1889 tr.close()
1890 1890 finally:
1891 1891 del tr
1892 1892
1893 1893 if changesets > 0:
1894 1894 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1895 1895 source=srctype, url=url)
1896 1896
1897 1897 for i in xrange(cor + 1, cnr + 1):
1898 1898 self.hook("incoming", node=hex(self.changelog.node(i)),
1899 1899 source=srctype, url=url)
1900 1900
1901 1901 # never return 0 here:
1902 1902 if newheads < oldheads:
1903 1903 return newheads - oldheads - 1
1904 1904 else:
1905 1905 return newheads - oldheads + 1
1906 1906
1907 1907
1908 1908 def stream_in(self, remote):
1909 1909 fp = remote.stream_out()
1910 1910 l = fp.readline()
1911 1911 try:
1912 1912 resp = int(l)
1913 1913 except ValueError:
1914 1914 raise util.UnexpectedOutput(
1915 1915 _('Unexpected response from remote server:'), l)
1916 1916 if resp == 1:
1917 1917 raise util.Abort(_('operation forbidden by server'))
1918 1918 elif resp == 2:
1919 1919 raise util.Abort(_('locking the remote repository failed'))
1920 1920 elif resp != 0:
1921 1921 raise util.Abort(_('the server sent an unknown error code'))
1922 1922 self.ui.status(_('streaming all changes\n'))
1923 1923 l = fp.readline()
1924 1924 try:
1925 1925 total_files, total_bytes = map(int, l.split(' ', 1))
1926 1926 except ValueError, TypeError:
1927 1927 raise util.UnexpectedOutput(
1928 1928 _('Unexpected response from remote server:'), l)
1929 1929 self.ui.status(_('%d files to transfer, %s of data\n') %
1930 1930 (total_files, util.bytecount(total_bytes)))
1931 1931 start = time.time()
1932 1932 for i in xrange(total_files):
1933 1933 # XXX doesn't support '\n' or '\r' in filenames
1934 1934 l = fp.readline()
1935 1935 try:
1936 1936 name, size = l.split('\0', 1)
1937 1937 size = int(size)
1938 1938 except ValueError, TypeError:
1939 1939 raise util.UnexpectedOutput(
1940 1940 _('Unexpected response from remote server:'), l)
1941 1941 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1942 1942 ofp = self.sopener(name, 'w')
1943 1943 for chunk in util.filechunkiter(fp, limit=size):
1944 1944 ofp.write(chunk)
1945 1945 ofp.close()
1946 1946 elapsed = time.time() - start
1947 1947 if elapsed <= 0:
1948 1948 elapsed = 0.001
1949 1949 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1950 1950 (util.bytecount(total_bytes), elapsed,
1951 1951 util.bytecount(total_bytes / elapsed)))
1952 1952 self.invalidate()
1953 1953 return len(self.heads()) + 1
1954 1954
1955 1955 def clone(self, remote, heads=[], stream=False):
1956 1956 '''clone remote repository.
1957 1957
1958 1958 keyword arguments:
1959 1959 heads: list of revs to clone (forces use of pull)
1960 1960 stream: use streaming clone if possible'''
1961 1961
1962 1962 # now, all clients that can request uncompressed clones can
1963 1963 # read repo formats supported by all servers that can serve
1964 1964 # them.
1965 1965
1966 1966 # if revlog format changes, client will have to check version
1967 1967 # and format flags on "stream" capability, and use
1968 1968 # uncompressed only if compatible.
1969 1969
1970 1970 if stream and not heads and remote.capable('stream'):
1971 1971 return self.stream_in(remote)
1972 1972 return self.pull(remote, heads)
1973 1973
1974 1974 # used to avoid circular references so destructors work
1975 1975 def aftertrans(files):
1976 1976 renamefiles = [tuple(t) for t in files]
1977 1977 def a():
1978 1978 for src, dest in renamefiles:
1979 1979 util.rename(src, dest)
1980 1980 return a
1981 1981
1982 1982 def instance(ui, path, create):
1983 1983 return localrepository(ui, util.drop_scheme('file', path), create)
1984 1984
1985 1985 def islocal(path):
1986 1986 return True
General Comments 0
You need to be logged in to leave comments. Login now