##// END OF EJS Templates
Merge with crew.
Bryan O'Sullivan -
r4494:649dd249 merge default
parent child Browse files
Show More
@@ -1,2300 +1,2300 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util, changegroup
34 34 import os, sys, re, errno
35 35
36 36 commands.norepo += " qclone qversion"
37 37
38 38 # Patch names looks like unix-file names.
39 39 # They must be joinable with queue directory and result in the patch path.
40 40 normname = util.normpath
41 41
42 42 class statusentry:
43 43 def __init__(self, rev, name=None):
44 44 if not name:
45 45 fields = rev.split(':', 1)
46 46 if len(fields) == 2:
47 47 self.rev, self.name = fields
48 48 else:
49 49 self.rev, self.name = None, None
50 50 else:
51 51 self.rev, self.name = rev, name
52 52
53 53 def __str__(self):
54 54 return self.rev + ':' + self.name
55 55
56 56 class queue:
57 57 def __init__(self, ui, path, patchdir=None):
58 58 self.basepath = path
59 59 self.path = patchdir or os.path.join(path, "patches")
60 60 self.opener = util.opener(self.path)
61 61 self.ui = ui
62 62 self.applied = []
63 63 self.full_series = []
64 64 self.applied_dirty = 0
65 65 self.series_dirty = 0
66 66 self.series_path = "series"
67 67 self.status_path = "status"
68 68 self.guards_path = "guards"
69 69 self.active_guards = None
70 70 self.guards_dirty = False
71 71 self._diffopts = None
72 72
73 73 if os.path.exists(self.join(self.series_path)):
74 74 self.full_series = self.opener(self.series_path).read().splitlines()
75 75 self.parse_series()
76 76
77 77 if os.path.exists(self.join(self.status_path)):
78 78 lines = self.opener(self.status_path).read().splitlines()
79 79 self.applied = [statusentry(l) for l in lines]
80 80
81 81 def diffopts(self):
82 82 if self._diffopts is None:
83 83 self._diffopts = patch.diffopts(self.ui)
84 84 return self._diffopts
85 85
86 86 def join(self, *p):
87 87 return os.path.join(self.path, *p)
88 88
89 89 def find_series(self, patch):
90 90 pre = re.compile("(\s*)([^#]+)")
91 91 index = 0
92 92 for l in self.full_series:
93 93 m = pre.match(l)
94 94 if m:
95 95 s = m.group(2)
96 96 s = s.rstrip()
97 97 if s == patch:
98 98 return index
99 99 index += 1
100 100 return None
101 101
102 102 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 103
104 104 def parse_series(self):
105 105 self.series = []
106 106 self.series_guards = []
107 107 for l in self.full_series:
108 108 h = l.find('#')
109 109 if h == -1:
110 110 patch = l
111 111 comment = ''
112 112 elif h == 0:
113 113 continue
114 114 else:
115 115 patch = l[:h]
116 116 comment = l[h:]
117 117 patch = patch.strip()
118 118 if patch:
119 119 if patch in self.series:
120 120 raise util.Abort(_('%s appears more than once in %s') %
121 121 (patch, self.join(self.series_path)))
122 122 self.series.append(patch)
123 123 self.series_guards.append(self.guard_re.findall(comment))
124 124
125 125 def check_guard(self, guard):
126 126 bad_chars = '# \t\r\n\f'
127 127 first = guard[0]
128 128 for c in '-+':
129 129 if first == c:
130 130 return (_('guard %r starts with invalid character: %r') %
131 131 (guard, c))
132 132 for c in bad_chars:
133 133 if c in guard:
134 134 return _('invalid character in guard %r: %r') % (guard, c)
135 135
136 136 def set_active(self, guards):
137 137 for guard in guards:
138 138 bad = self.check_guard(guard)
139 139 if bad:
140 140 raise util.Abort(bad)
141 141 guards = dict.fromkeys(guards).keys()
142 142 guards.sort()
143 143 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 144 self.active_guards = guards
145 145 self.guards_dirty = True
146 146
147 147 def active(self):
148 148 if self.active_guards is None:
149 149 self.active_guards = []
150 150 try:
151 151 guards = self.opener(self.guards_path).read().split()
152 152 except IOError, err:
153 153 if err.errno != errno.ENOENT: raise
154 154 guards = []
155 155 for i, guard in enumerate(guards):
156 156 bad = self.check_guard(guard)
157 157 if bad:
158 158 self.ui.warn('%s:%d: %s\n' %
159 159 (self.join(self.guards_path), i + 1, bad))
160 160 else:
161 161 self.active_guards.append(guard)
162 162 return self.active_guards
163 163
164 164 def set_guards(self, idx, guards):
165 165 for g in guards:
166 166 if len(g) < 2:
167 167 raise util.Abort(_('guard %r too short') % g)
168 168 if g[0] not in '-+':
169 169 raise util.Abort(_('guard %r starts with invalid char') % g)
170 170 bad = self.check_guard(g[1:])
171 171 if bad:
172 172 raise util.Abort(bad)
173 173 drop = self.guard_re.sub('', self.full_series[idx])
174 174 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 175 self.parse_series()
176 176 self.series_dirty = True
177 177
178 178 def pushable(self, idx):
179 179 if isinstance(idx, str):
180 180 idx = self.series.index(idx)
181 181 patchguards = self.series_guards[idx]
182 182 if not patchguards:
183 183 return True, None
184 184 default = False
185 185 guards = self.active()
186 186 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 187 if exactneg:
188 188 return False, exactneg[0]
189 189 pos = [g for g in patchguards if g[0] == '+']
190 190 exactpos = [g for g in pos if g[1:] in guards]
191 191 if pos:
192 192 if exactpos:
193 193 return True, exactpos[0]
194 194 return False, pos
195 195 return True, ''
196 196
197 197 def explain_pushable(self, idx, all_patches=False):
198 198 write = all_patches and self.ui.write or self.ui.warn
199 199 if all_patches or self.ui.verbose:
200 200 if isinstance(idx, str):
201 201 idx = self.series.index(idx)
202 202 pushable, why = self.pushable(idx)
203 203 if all_patches and pushable:
204 204 if why is None:
205 205 write(_('allowing %s - no guards in effect\n') %
206 206 self.series[idx])
207 207 else:
208 208 if not why:
209 209 write(_('allowing %s - no matching negative guards\n') %
210 210 self.series[idx])
211 211 else:
212 212 write(_('allowing %s - guarded by %r\n') %
213 213 (self.series[idx], why))
214 214 if not pushable:
215 215 if why:
216 216 write(_('skipping %s - guarded by %r\n') %
217 217 (self.series[idx], why))
218 218 else:
219 219 write(_('skipping %s - no matching guards\n') %
220 220 self.series[idx])
221 221
222 222 def save_dirty(self):
223 223 def write_list(items, path):
224 224 fp = self.opener(path, 'w')
225 225 for i in items:
226 226 print >> fp, i
227 227 fp.close()
228 228 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 229 if self.series_dirty: write_list(self.full_series, self.series_path)
230 230 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 231
232 232 def readheaders(self, patch):
233 233 def eatdiff(lines):
234 234 while lines:
235 235 l = lines[-1]
236 236 if (l.startswith("diff -") or
237 237 l.startswith("Index:") or
238 238 l.startswith("===========")):
239 239 del lines[-1]
240 240 else:
241 241 break
242 242 def eatempty(lines):
243 243 while lines:
244 244 l = lines[-1]
245 245 if re.match('\s*$', l):
246 246 del lines[-1]
247 247 else:
248 248 break
249 249
250 250 pf = self.join(patch)
251 251 message = []
252 252 comments = []
253 253 user = None
254 254 date = None
255 255 format = None
256 256 subject = None
257 257 diffstart = 0
258 258
259 259 for line in file(pf):
260 260 line = line.rstrip()
261 261 if line.startswith('diff --git'):
262 262 diffstart = 2
263 263 break
264 264 if diffstart:
265 265 if line.startswith('+++ '):
266 266 diffstart = 2
267 267 break
268 268 if line.startswith("--- "):
269 269 diffstart = 1
270 270 continue
271 271 elif format == "hgpatch":
272 272 # parse values when importing the result of an hg export
273 273 if line.startswith("# User "):
274 274 user = line[7:]
275 275 elif line.startswith("# Date "):
276 276 date = line[7:]
277 277 elif not line.startswith("# ") and line:
278 278 message.append(line)
279 279 format = None
280 280 elif line == '# HG changeset patch':
281 281 format = "hgpatch"
282 282 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 283 line.startswith("subject: "))):
284 284 subject = line[9:]
285 285 format = "tag"
286 286 elif (format != "tagdone" and (line.startswith("From: ") or
287 287 line.startswith("from: "))):
288 288 user = line[6:]
289 289 format = "tag"
290 290 elif format == "tag" and line == "":
291 291 # when looking for tags (subject: from: etc) they
292 292 # end once you find a blank line in the source
293 293 format = "tagdone"
294 294 elif message or line:
295 295 message.append(line)
296 296 comments.append(line)
297 297
298 298 eatdiff(message)
299 299 eatdiff(comments)
300 300 eatempty(message)
301 301 eatempty(comments)
302 302
303 303 # make sure message isn't empty
304 304 if format and format.startswith("tag") and subject:
305 305 message.insert(0, "")
306 306 message.insert(0, subject)
307 307 return (message, comments, user, date, diffstart > 1)
308 308
309 309 def removeundo(self, repo):
310 310 undo = repo.sjoin('undo')
311 311 if not os.path.exists(undo):
312 312 return
313 313 try:
314 314 os.unlink(undo)
315 315 except OSError, inst:
316 316 self.ui.warn('error removing undo: %s\n' % str(inst))
317 317
318 318 def printdiff(self, repo, node1, node2=None, files=None,
319 319 fp=None, changes=None, opts={}):
320 320 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 321
322 322 patch.diff(repo, node1, node2, fns, match=matchfn,
323 323 fp=fp, changes=changes, opts=self.diffopts())
324 324
325 325 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 326 # first try just applying the patch
327 327 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 328 strict=True, merge=rev, wlock=wlock)
329 329
330 330 if err == 0:
331 331 return (err, n)
332 332
333 333 if n is None:
334 334 raise util.Abort(_("apply failed for patch %s") % patch)
335 335
336 336 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 337
338 338 # apply failed, strip away that rev and merge.
339 339 hg.clean(repo, head, wlock=wlock)
340 340 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341 341
342 342 ctx = repo.changectx(rev)
343 343 ret = hg.merge(repo, rev, wlock=wlock)
344 344 if ret:
345 345 raise util.Abort(_("update returned %d") % ret)
346 346 n = repo.commit(None, ctx.description(), ctx.user(),
347 347 force=1, wlock=wlock)
348 348 if n == None:
349 349 raise util.Abort(_("repo commit failed"))
350 350 try:
351 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 352 except:
353 353 raise util.Abort(_("unable to read %s") % patch)
354 354
355 355 patchf = self.opener(patch, "w")
356 356 if comments:
357 357 comments = "\n".join(comments) + '\n\n'
358 358 patchf.write(comments)
359 359 self.printdiff(repo, head, n, fp=patchf)
360 360 patchf.close()
361 361 self.removeundo(repo)
362 362 return (0, n)
363 363
364 364 def qparents(self, repo, rev=None):
365 365 if rev is None:
366 366 (p1, p2) = repo.dirstate.parents()
367 367 if p2 == revlog.nullid:
368 368 return p1
369 369 if len(self.applied) == 0:
370 370 return None
371 371 return revlog.bin(self.applied[-1].rev)
372 372 pp = repo.changelog.parents(rev)
373 373 if pp[1] != revlog.nullid:
374 374 arevs = [ x.rev for x in self.applied ]
375 375 p0 = revlog.hex(pp[0])
376 376 p1 = revlog.hex(pp[1])
377 377 if p0 in arevs:
378 378 return pp[0]
379 379 if p1 in arevs:
380 380 return pp[1]
381 381 return pp[0]
382 382
383 383 def mergepatch(self, repo, mergeq, series, wlock):
384 384 if len(self.applied) == 0:
385 385 # each of the patches merged in will have two parents. This
386 386 # can confuse the qrefresh, qdiff, and strip code because it
387 387 # needs to know which parent is actually in the patch queue.
388 388 # so, we insert a merge marker with only one parent. This way
389 389 # the first patch in the queue is never a merge patch
390 390 #
391 391 pname = ".hg.patches.merge.marker"
392 392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
393 393 wlock=wlock)
394 394 self.removeundo(repo)
395 395 self.applied.append(statusentry(revlog.hex(n), pname))
396 396 self.applied_dirty = 1
397 397
398 398 head = self.qparents(repo)
399 399
400 400 for patch in series:
401 401 patch = mergeq.lookup(patch, strict=True)
402 402 if not patch:
403 403 self.ui.warn("patch %s does not exist\n" % patch)
404 404 return (1, None)
405 405 pushable, reason = self.pushable(patch)
406 406 if not pushable:
407 407 self.explain_pushable(patch, all_patches=True)
408 408 continue
409 409 info = mergeq.isapplied(patch)
410 410 if not info:
411 411 self.ui.warn("patch %s is not applied\n" % patch)
412 412 return (1, None)
413 413 rev = revlog.bin(info[1])
414 414 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
415 415 if head:
416 416 self.applied.append(statusentry(revlog.hex(head), patch))
417 417 self.applied_dirty = 1
418 418 if err:
419 419 return (err, head)
420 420 self.save_dirty()
421 421 return (0, head)
422 422
423 423 def patch(self, repo, patchfile):
424 424 '''Apply patchfile to the working directory.
425 425 patchfile: file name of patch'''
426 426 files = {}
427 427 try:
428 428 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 429 files=files)
430 430 except Exception, inst:
431 431 self.ui.note(str(inst) + '\n')
432 432 if not self.ui.verbose:
433 433 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 434 return (False, files, False)
435 435
436 436 return (True, files, fuzz)
437 437
438 438 def apply(self, repo, series, list=False, update_status=True,
439 439 strict=False, patchdir=None, merge=None, wlock=None,
440 440 all_files={}):
441 441 tr = repo.transaction()
442 442 try:
443 443 ret = self._apply(tr, repo, series, list, update_status,
444 444 strict, patchdir, merge, wlock,
445 445 all_files=all_files)
446 446 tr.close()
447 447 self.save_dirty()
448 448 return ret
449 449 except:
450 450 try:
451 451 tr.abort()
452 452 finally:
453 453 repo.reload()
454 454 repo.wreload()
455 455 raise
456 456
457 457 def _apply(self, tr, repo, series, list=False, update_status=True,
458 458 strict=False, patchdir=None, merge=None, wlock=None,
459 459 all_files={}):
460 460 # TODO unify with commands.py
461 461 if not patchdir:
462 462 patchdir = self.path
463 463 err = 0
464 464 if not wlock:
465 465 wlock = repo.wlock()
466 466 lock = repo.lock()
467 467 n = None
468 468 for patchname in series:
469 469 pushable, reason = self.pushable(patchname)
470 470 if not pushable:
471 471 self.explain_pushable(patchname, all_patches=True)
472 472 continue
473 473 self.ui.warn("applying %s\n" % patchname)
474 474 pf = os.path.join(patchdir, patchname)
475 475
476 476 try:
477 477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 478 except:
479 479 self.ui.warn("Unable to read %s\n" % patchname)
480 480 err = 1
481 481 break
482 482
483 483 if not message:
484 484 message = "imported patch %s\n" % patchname
485 485 else:
486 486 if list:
487 487 message.append("\nimported patch %s" % patchname)
488 488 message = '\n'.join(message)
489 489
490 490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 491 all_files.update(files)
492 492 patcherr = not patcherr
493 493
494 494 if merge and files:
495 495 # Mark as removed/merged and update dirstate parent info
496 496 removed = []
497 497 merged = []
498 498 for f in files:
499 499 if os.path.exists(repo.dirstate.wjoin(f)):
500 500 merged.append(f)
501 501 else:
502 502 removed.append(f)
503 503 repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
504 504 repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
505 505 p1, p2 = repo.dirstate.parents()
506 506 repo.dirstate.setparents(p1, merge)
507 507 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
508 508 n = repo.commit(files, message, user, date, force=1, lock=lock,
509 509 wlock=wlock)
510 510
511 511 if n == None:
512 512 raise util.Abort(_("repo commit failed"))
513 513
514 514 if update_status:
515 515 self.applied.append(statusentry(revlog.hex(n), patchname))
516 516
517 517 if patcherr:
518 518 if not patchfound:
519 519 self.ui.warn("patch %s is empty\n" % patchname)
520 520 err = 0
521 521 else:
522 522 self.ui.warn("patch failed, rejects left in working dir\n")
523 523 err = 1
524 524 break
525 525
526 526 if fuzz and strict:
527 527 self.ui.warn("fuzz found when applying patch, stopping\n")
528 528 err = 1
529 529 break
530 530 self.removeundo(repo)
531 531 return (err, n)
532 532
533 533 def delete(self, repo, patches, opts):
534 534 realpatches = []
535 535 for patch in patches:
536 536 patch = self.lookup(patch, strict=True)
537 537 info = self.isapplied(patch)
538 538 if info:
539 539 raise util.Abort(_("cannot delete applied patch %s") % patch)
540 540 if patch not in self.series:
541 541 raise util.Abort(_("patch %s not in series file") % patch)
542 542 realpatches.append(patch)
543 543
544 544 appliedbase = 0
545 545 if opts.get('rev'):
546 546 if not self.applied:
547 547 raise util.Abort(_('no patches applied'))
548 548 revs = cmdutil.revrange(repo, opts['rev'])
549 549 if len(revs) > 1 and revs[0] > revs[1]:
550 550 revs.reverse()
551 551 for rev in revs:
552 552 if appliedbase >= len(self.applied):
553 553 raise util.Abort(_("revision %d is not managed") % rev)
554 554
555 555 base = revlog.bin(self.applied[appliedbase].rev)
556 556 node = repo.changelog.node(rev)
557 557 if node != base:
558 558 raise util.Abort(_("cannot delete revision %d above "
559 559 "applied patches") % rev)
560 560 realpatches.append(self.applied[appliedbase].name)
561 561 appliedbase += 1
562 562
563 563 if not opts.get('keep'):
564 564 r = self.qrepo()
565 565 if r:
566 566 r.remove(realpatches, True)
567 567 else:
568 568 for p in realpatches:
569 569 os.unlink(self.join(p))
570 570
571 571 if appliedbase:
572 572 del self.applied[:appliedbase]
573 573 self.applied_dirty = 1
574 574 indices = [self.find_series(p) for p in realpatches]
575 575 indices.sort()
576 576 for i in indices[-1::-1]:
577 577 del self.full_series[i]
578 578 self.parse_series()
579 579 self.series_dirty = 1
580 580
581 581 def check_toppatch(self, repo):
582 582 if len(self.applied) > 0:
583 583 top = revlog.bin(self.applied[-1].rev)
584 584 pp = repo.dirstate.parents()
585 585 if top not in pp:
586 586 raise util.Abort(_("queue top not at same revision as working directory"))
587 587 return top
588 588 return None
589 589 def check_localchanges(self, repo, force=False, refresh=True):
590 590 m, a, r, d = repo.status()[:4]
591 591 if m or a or r or d:
592 592 if not force:
593 593 if refresh:
594 594 raise util.Abort(_("local changes found, refresh first"))
595 595 else:
596 596 raise util.Abort(_("local changes found"))
597 597 return m, a, r, d
598 598 def new(self, repo, patch, msg=None, force=None):
599 599 if os.path.exists(self.join(patch)):
600 600 raise util.Abort(_('patch "%s" already exists') % patch)
601 601 m, a, r, d = self.check_localchanges(repo, force)
602 602 commitfiles = m + a + r
603 603 self.check_toppatch(repo)
604 604 wlock = repo.wlock()
605 605 insert = self.full_series_end()
606 606 if msg:
607 607 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
608 608 wlock=wlock)
609 609 else:
610 610 n = repo.commit(commitfiles,
611 611 "New patch: %s" % patch, force=True, wlock=wlock)
612 612 if n == None:
613 613 raise util.Abort(_("repo commit failed"))
614 614 self.full_series[insert:insert] = [patch]
615 615 self.applied.append(statusentry(revlog.hex(n), patch))
616 616 self.parse_series()
617 617 self.series_dirty = 1
618 618 self.applied_dirty = 1
619 619 p = self.opener(patch, "w")
620 620 if msg:
621 621 msg = msg + "\n"
622 622 p.write(msg)
623 623 p.close()
624 624 wlock = None
625 625 r = self.qrepo()
626 626 if r: r.add([patch])
627 627 if commitfiles:
628 628 self.refresh(repo, short=True)
629 629 self.removeundo(repo)
630 630
631 631 def strip(self, repo, rev, update=True, backup="all", wlock=None):
632 632 def limitheads(chlog, stop):
633 633 """return the list of all nodes that have no children"""
634 634 p = {}
635 635 h = []
636 636 stoprev = 0
637 637 if stop in chlog.nodemap:
638 638 stoprev = chlog.rev(stop)
639 639
640 640 for r in xrange(chlog.count() - 1, -1, -1):
641 641 n = chlog.node(r)
642 642 if n not in p:
643 643 h.append(n)
644 644 if n == stop:
645 645 break
646 646 if r < stoprev:
647 647 break
648 648 for pn in chlog.parents(n):
649 649 p[pn] = 1
650 650 return h
651 651
652 652 def bundle(cg):
653 653 backupdir = repo.join("strip-backup")
654 654 if not os.path.isdir(backupdir):
655 655 os.mkdir(backupdir)
656 656 name = os.path.join(backupdir, "%s" % revlog.short(rev))
657 657 name = savename(name)
658 658 self.ui.warn("saving bundle to %s\n" % name)
659 659 return changegroup.writebundle(cg, name, "HG10BZ")
660 660
661 661 def stripall(revnum):
662 662 mm = repo.changectx(rev).manifest()
663 663 seen = {}
664 664
665 665 for x in xrange(revnum, repo.changelog.count()):
666 666 for f in repo.changectx(x).files():
667 667 if f in seen:
668 668 continue
669 669 seen[f] = 1
670 670 if f in mm:
671 671 filerev = mm[f]
672 672 else:
673 673 filerev = 0
674 674 seen[f] = filerev
675 675 # we go in two steps here so the strip loop happens in a
676 676 # sensible order. When stripping many files, this helps keep
677 677 # our disk access patterns under control.
678 678 seen_list = seen.keys()
679 679 seen_list.sort()
680 680 for f in seen_list:
681 681 ff = repo.file(f)
682 682 filerev = seen[f]
683 683 if filerev != 0:
684 684 if filerev in ff.nodemap:
685 685 filerev = ff.rev(filerev)
686 686 else:
687 687 filerev = 0
688 688 ff.strip(filerev, revnum)
689 689
690 690 if not wlock:
691 691 wlock = repo.wlock()
692 692 lock = repo.lock()
693 693 chlog = repo.changelog
694 694 # TODO delete the undo files, and handle undo of merge sets
695 695 pp = chlog.parents(rev)
696 696 revnum = chlog.rev(rev)
697 697
698 698 if update:
699 699 self.check_localchanges(repo, refresh=False)
700 700 urev = self.qparents(repo, rev)
701 701 hg.clean(repo, urev, wlock=wlock)
702 702 repo.dirstate.write()
703 703
704 704 # save is a list of all the branches we are truncating away
705 705 # that we actually want to keep. changegroup will be used
706 706 # to preserve them and add them back after the truncate
707 707 saveheads = []
708 708 savebases = {}
709 709
710 710 heads = limitheads(chlog, rev)
711 711 seen = {}
712 712
713 713 # search through all the heads, finding those where the revision
714 714 # we want to strip away is an ancestor. Also look for merges
715 715 # that might be turned into new heads by the strip.
716 716 while heads:
717 717 h = heads.pop()
718 718 n = h
719 719 while True:
720 720 seen[n] = 1
721 721 pp = chlog.parents(n)
722 722 if pp[1] != revlog.nullid:
723 723 for p in pp:
724 724 if chlog.rev(p) > revnum and p not in seen:
725 725 heads.append(p)
726 726 if pp[0] == revlog.nullid:
727 727 break
728 728 if chlog.rev(pp[0]) < revnum:
729 729 break
730 730 n = pp[0]
731 731 if n == rev:
732 732 break
733 733 r = chlog.reachable(h, rev)
734 734 if rev not in r:
735 735 saveheads.append(h)
736 736 for x in r:
737 737 if chlog.rev(x) > revnum:
738 738 savebases[x] = 1
739 739
740 740 # create a changegroup for all the branches we need to keep
741 741 if backup == "all":
742 742 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
743 743 bundle(backupch)
744 744 if saveheads:
745 745 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
746 746 chgrpfile = bundle(backupch)
747 747
748 748 stripall(revnum)
749 749
750 750 change = chlog.read(rev)
751 751 chlog.strip(revnum, revnum)
752 752 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
753 753 self.removeundo(repo)
754 754 if saveheads:
755 755 self.ui.status("adding branch\n")
756 756 commands.unbundle(self.ui, repo, "file:%s" % chgrpfile,
757 757 update=False)
758 758 if backup != "strip":
759 759 os.unlink(chgrpfile)
760 760
761 761 def isapplied(self, patch):
762 762 """returns (index, rev, patch)"""
763 763 for i in xrange(len(self.applied)):
764 764 a = self.applied[i]
765 765 if a.name == patch:
766 766 return (i, a.rev, a.name)
767 767 return None
768 768
769 769 # if the exact patch name does not exist, we try a few
770 770 # variations. If strict is passed, we try only #1
771 771 #
772 772 # 1) a number to indicate an offset in the series file
773 773 # 2) a unique substring of the patch name was given
774 774 # 3) patchname[-+]num to indicate an offset in the series file
775 775 def lookup(self, patch, strict=False):
776 776 patch = patch and str(patch)
777 777
778 778 def partial_name(s):
779 779 if s in self.series:
780 780 return s
781 781 matches = [x for x in self.series if s in x]
782 782 if len(matches) > 1:
783 783 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
784 784 for m in matches:
785 785 self.ui.warn(' %s\n' % m)
786 786 return None
787 787 if matches:
788 788 return matches[0]
789 789 if len(self.series) > 0 and len(self.applied) > 0:
790 790 if s == 'qtip':
791 791 return self.series[self.series_end(True)-1]
792 792 if s == 'qbase':
793 793 return self.series[0]
794 794 return None
795 795 if patch == None:
796 796 return None
797 797
798 798 # we don't want to return a partial match until we make
799 799 # sure the file name passed in does not exist (checked below)
800 800 res = partial_name(patch)
801 801 if res and res == patch:
802 802 return res
803 803
804 804 if not os.path.isfile(self.join(patch)):
805 805 try:
806 806 sno = int(patch)
807 807 except(ValueError, OverflowError):
808 808 pass
809 809 else:
810 810 if sno < len(self.series):
811 811 return self.series[sno]
812 812 if not strict:
813 813 # return any partial match made above
814 814 if res:
815 815 return res
816 816 minus = patch.rfind('-')
817 817 if minus >= 0:
818 818 res = partial_name(patch[:minus])
819 819 if res:
820 820 i = self.series.index(res)
821 821 try:
822 822 off = int(patch[minus+1:] or 1)
823 823 except(ValueError, OverflowError):
824 824 pass
825 825 else:
826 826 if i - off >= 0:
827 827 return self.series[i - off]
828 828 plus = patch.rfind('+')
829 829 if plus >= 0:
830 830 res = partial_name(patch[:plus])
831 831 if res:
832 832 i = self.series.index(res)
833 833 try:
834 834 off = int(patch[plus+1:] or 1)
835 835 except(ValueError, OverflowError):
836 836 pass
837 837 else:
838 838 if i + off < len(self.series):
839 839 return self.series[i + off]
840 840 raise util.Abort(_("patch %s not in series") % patch)
841 841
842 842 def push(self, repo, patch=None, force=False, list=False,
843 843 mergeq=None, wlock=None):
844 844 if not wlock:
845 845 wlock = repo.wlock()
846 846 patch = self.lookup(patch)
847 847 # Suppose our series file is: A B C and the current 'top' patch is B.
848 848 # qpush C should be performed (moving forward)
849 849 # qpush B is a NOP (no change)
850 850 # qpush A is an error (can't go backwards with qpush)
851 851 if patch:
852 852 info = self.isapplied(patch)
853 853 if info:
854 854 if info[0] < len(self.applied) - 1:
855 855 raise util.Abort(_("cannot push to a previous patch: %s") %
856 856 patch)
857 857 if info[0] < len(self.series) - 1:
858 858 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
859 859 else:
860 860 self.ui.warn(_('all patches are currently applied\n'))
861 861 return
862 862
863 863 # Following the above example, starting at 'top' of B:
864 864 # qpush should be performed (pushes C), but a subsequent qpush without
865 865 # an argument is an error (nothing to apply). This allows a loop
866 866 # of "...while hg qpush..." to work as it detects an error when done
867 867 if self.series_end() == len(self.series):
868 868 self.ui.warn(_('patch series already fully applied\n'))
869 869 return 1
870 870 if not force:
871 871 self.check_localchanges(repo)
872 872
873 873 self.applied_dirty = 1;
874 874 start = self.series_end()
875 875 if start > 0:
876 876 self.check_toppatch(repo)
877 877 if not patch:
878 878 patch = self.series[start]
879 879 end = start + 1
880 880 else:
881 881 end = self.series.index(patch, start) + 1
882 882 s = self.series[start:end]
883 883 all_files = {}
884 884 try:
885 885 if mergeq:
886 886 ret = self.mergepatch(repo, mergeq, s, wlock)
887 887 else:
888 888 ret = self.apply(repo, s, list, wlock=wlock,
889 889 all_files=all_files)
890 890 except:
891 891 self.ui.warn(_('cleaning up working directory...'))
892 892 node = repo.dirstate.parents()[0]
893 893 hg.revert(repo, node, None, wlock)
894 894 unknown = repo.status(wlock=wlock)[4]
895 895 # only remove unknown files that we know we touched or
896 896 # created while patching
897 897 for f in unknown:
898 898 if f in all_files:
899 899 util.unlink(repo.wjoin(f))
900 900 self.ui.warn(_('done\n'))
901 901 raise
902 902 top = self.applied[-1].name
903 903 if ret[0]:
904 904 self.ui.write("Errors during apply, please fix and refresh %s\n" %
905 905 top)
906 906 else:
907 907 self.ui.write("Now at: %s\n" % top)
908 908 return ret[0]
909 909
910 910 def pop(self, repo, patch=None, force=False, update=True, all=False,
911 911 wlock=None):
912 912 def getfile(f, rev):
913 913 t = repo.file(f).read(rev)
914 914 repo.wfile(f, "w").write(t)
915 915
916 916 if not wlock:
917 917 wlock = repo.wlock()
918 918 if patch:
919 919 # index, rev, patch
920 920 info = self.isapplied(patch)
921 921 if not info:
922 922 patch = self.lookup(patch)
923 923 info = self.isapplied(patch)
924 924 if not info:
925 925 raise util.Abort(_("patch %s is not applied") % patch)
926 926
927 927 if len(self.applied) == 0:
928 928 # Allow qpop -a to work repeatedly,
929 929 # but not qpop without an argument
930 930 self.ui.warn(_("no patches applied\n"))
931 931 return not all
932 932
933 933 if not update:
934 934 parents = repo.dirstate.parents()
935 935 rr = [ revlog.bin(x.rev) for x in self.applied ]
936 936 for p in parents:
937 937 if p in rr:
938 938 self.ui.warn("qpop: forcing dirstate update\n")
939 939 update = True
940 940
941 941 if not force and update:
942 942 self.check_localchanges(repo)
943 943
944 944 self.applied_dirty = 1;
945 945 end = len(self.applied)
946 946 if not patch:
947 947 if all:
948 948 popi = 0
949 949 else:
950 950 popi = len(self.applied) - 1
951 951 else:
952 952 popi = info[0] + 1
953 953 if popi >= end:
954 954 self.ui.warn("qpop: %s is already at the top\n" % patch)
955 955 return
956 956 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
957 957
958 958 start = info[0]
959 959 rev = revlog.bin(info[1])
960 960
961 961 # we know there are no local changes, so we can make a simplified
962 962 # form of hg.update.
963 963 if update:
964 964 top = self.check_toppatch(repo)
965 965 qp = self.qparents(repo, rev)
966 966 changes = repo.changelog.read(qp)
967 967 mmap = repo.manifest.read(changes[0])
968 968 m, a, r, d, u = repo.status(qp, top)[:5]
969 969 if d:
970 970 raise util.Abort("deletions found between repo revs")
971 971 for f in m:
972 972 getfile(f, mmap[f])
973 973 for f in r:
974 974 getfile(f, mmap[f])
975 975 util.set_exec(repo.wjoin(f), mmap.execf(f))
976 976 repo.dirstate.update(m + r, 'n')
977 977 for f in a:
978 978 try:
979 979 os.unlink(repo.wjoin(f))
980 980 except OSError, e:
981 981 if e.errno != errno.ENOENT:
982 982 raise
983 983 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
984 984 except: pass
985 985 if a:
986 986 repo.dirstate.forget(a)
987 987 repo.dirstate.setparents(qp, revlog.nullid)
988 988 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
989 989 del self.applied[start:end]
990 990 if len(self.applied):
991 991 self.ui.write("Now at: %s\n" % self.applied[-1].name)
992 992 else:
993 993 self.ui.write("Patch queue now empty\n")
994 994
995 995 def diff(self, repo, pats, opts):
996 996 top = self.check_toppatch(repo)
997 997 if not top:
998 998 self.ui.write("No patches applied\n")
999 999 return
1000 1000 qp = self.qparents(repo, top)
1001 1001 if opts.get('git'):
1002 1002 self.diffopts().git = True
1003 1003 self.printdiff(repo, qp, files=pats, opts=opts)
1004 1004
1005 1005 def refresh(self, repo, pats=None, **opts):
1006 1006 if len(self.applied) == 0:
1007 1007 self.ui.write("No patches applied\n")
1008 1008 return 1
1009 1009 wlock = repo.wlock()
1010 1010 self.check_toppatch(repo)
1011 1011 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1012 1012 top = revlog.bin(top)
1013 1013 cparents = repo.changelog.parents(top)
1014 1014 patchparent = self.qparents(repo, top)
1015 1015 message, comments, user, date, patchfound = self.readheaders(patchfn)
1016 1016
1017 1017 patchf = self.opener(patchfn, "w")
1018 1018 msg = opts.get('msg', '').rstrip()
1019 1019 if msg:
1020 1020 if comments:
1021 1021 # Remove existing message.
1022 1022 ci = 0
1023 1023 subj = None
1024 1024 for mi in xrange(len(message)):
1025 1025 if comments[ci].lower().startswith('subject: '):
1026 1026 subj = comments[ci][9:]
1027 1027 while message[mi] != comments[ci] and message[mi] != subj:
1028 1028 ci += 1
1029 1029 del comments[ci]
1030 1030 comments.append(msg)
1031 1031 if comments:
1032 1032 comments = "\n".join(comments) + '\n\n'
1033 1033 patchf.write(comments)
1034 1034
1035 1035 if opts.get('git'):
1036 1036 self.diffopts().git = True
1037 1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1038 1038 tip = repo.changelog.tip()
1039 1039 if top == tip:
1040 1040 # if the top of our patch queue is also the tip, there is an
1041 1041 # optimization here. We update the dirstate in place and strip
1042 1042 # off the tip commit. Then just commit the current directory
1043 1043 # tree. We can also send repo.commit the list of files
1044 1044 # changed to speed up the diff
1045 1045 #
1046 1046 # in short mode, we only diff the files included in the
1047 1047 # patch already
1048 1048 #
1049 1049 # this should really read:
1050 1050 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1051 1051 # but we do it backwards to take advantage of manifest/chlog
1052 1052 # caching against the next repo.status call
1053 1053 #
1054 1054 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1055 1055 changes = repo.changelog.read(tip)
1056 1056 man = repo.manifest.read(changes[0])
1057 1057 aaa = aa[:]
1058 1058 if opts.get('short'):
1059 1059 filelist = mm + aa + dd
1060 1060 else:
1061 1061 filelist = None
1062 1062 m, a, r, d, u = repo.status(files=filelist)[:5]
1063 1063
1064 1064 # we might end up with files that were added between tip and
1065 1065 # the dirstate parent, but then changed in the local dirstate.
1066 1066 # in this case, we want them to only show up in the added section
1067 1067 for x in m:
1068 1068 if x not in aa:
1069 1069 mm.append(x)
1070 1070 # we might end up with files added by the local dirstate that
1071 1071 # were deleted by the patch. In this case, they should only
1072 1072 # show up in the changed section.
1073 1073 for x in a:
1074 1074 if x in dd:
1075 1075 del dd[dd.index(x)]
1076 1076 mm.append(x)
1077 1077 else:
1078 1078 aa.append(x)
1079 1079 # make sure any files deleted in the local dirstate
1080 1080 # are not in the add or change column of the patch
1081 1081 forget = []
1082 1082 for x in d + r:
1083 1083 if x in aa:
1084 1084 del aa[aa.index(x)]
1085 1085 forget.append(x)
1086 1086 continue
1087 1087 elif x in mm:
1088 1088 del mm[mm.index(x)]
1089 1089 dd.append(x)
1090 1090
1091 1091 m = util.unique(mm)
1092 1092 r = util.unique(dd)
1093 1093 a = util.unique(aa)
1094 1094 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1095 1095 filelist = util.unique(c[0] + c[1] + c[2])
1096 1096 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1097 1097 fp=patchf, changes=c, opts=self.diffopts())
1098 1098 patchf.close()
1099 1099
1100 1100 repo.dirstate.setparents(*cparents)
1101 1101 copies = {}
1102 1102 for dst in a:
1103 1103 src = repo.dirstate.copied(dst)
1104 1104 if src is None:
1105 1105 continue
1106 1106 copies.setdefault(src, []).append(dst)
1107 1107 repo.dirstate.update(a, 'a')
1108 1108 # remember the copies between patchparent and tip
1109 1109 # this may be slow, so don't do it if we're not tracking copies
1110 1110 if self.diffopts().git:
1111 1111 for dst in aaa:
1112 1112 f = repo.file(dst)
1113 1113 src = f.renamed(man[dst])
1114 1114 if src:
1115 1115 copies[src[0]] = copies.get(dst, [])
1116 1116 if dst in a:
1117 1117 copies[src[0]].append(dst)
1118 1118 # we can't copy a file created by the patch itself
1119 1119 if dst in copies:
1120 1120 del copies[dst]
1121 1121 for src, dsts in copies.iteritems():
1122 1122 for dst in dsts:
1123 1123 repo.dirstate.copy(src, dst)
1124 1124 repo.dirstate.update(r, 'r')
1125 1125 # if the patch excludes a modified file, mark that file with mtime=0
1126 1126 # so status can see it.
1127 1127 mm = []
1128 1128 for i in xrange(len(m)-1, -1, -1):
1129 1129 if not matchfn(m[i]):
1130 1130 mm.append(m[i])
1131 1131 del m[i]
1132 1132 repo.dirstate.update(m, 'n')
1133 1133 repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
1134 1134 repo.dirstate.forget(forget)
1135 1135
1136 1136 if not msg:
1137 1137 if not message:
1138 1138 message = "patch queue: %s\n" % patchfn
1139 1139 else:
1140 1140 message = "\n".join(message)
1141 1141 else:
1142 1142 message = msg
1143 1143
1144 1144 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1145 1145 n = repo.commit(filelist, message, changes[1], match=matchfn,
1146 1146 force=1, wlock=wlock)
1147 1147 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1148 1148 self.applied_dirty = 1
1149 1149 self.removeundo(repo)
1150 1150 else:
1151 1151 self.printdiff(repo, patchparent, fp=patchf)
1152 1152 patchf.close()
1153 1153 added = repo.status()[1]
1154 1154 for a in added:
1155 1155 f = repo.wjoin(a)
1156 1156 try:
1157 1157 os.unlink(f)
1158 1158 except OSError, e:
1159 1159 if e.errno != errno.ENOENT:
1160 1160 raise
1161 1161 try: os.removedirs(os.path.dirname(f))
1162 1162 except: pass
1163 1163 # forget the file copies in the dirstate
1164 1164 # push should readd the files later on
1165 1165 repo.dirstate.forget(added)
1166 1166 self.pop(repo, force=True, wlock=wlock)
1167 1167 self.push(repo, force=True, wlock=wlock)
1168 1168
1169 1169 def init(self, repo, create=False):
1170 1170 if not create and os.path.isdir(self.path):
1171 1171 raise util.Abort(_("patch queue directory already exists"))
1172 1172 try:
1173 1173 os.mkdir(self.path)
1174 1174 except OSError, inst:
1175 1175 if inst.errno != errno.EEXIST or not create:
1176 1176 raise
1177 1177 if create:
1178 1178 return self.qrepo(create=True)
1179 1179
1180 1180 def unapplied(self, repo, patch=None):
1181 1181 if patch and patch not in self.series:
1182 1182 raise util.Abort(_("patch %s is not in series file") % patch)
1183 1183 if not patch:
1184 1184 start = self.series_end()
1185 1185 else:
1186 1186 start = self.series.index(patch) + 1
1187 1187 unapplied = []
1188 1188 for i in xrange(start, len(self.series)):
1189 1189 pushable, reason = self.pushable(i)
1190 1190 if pushable:
1191 1191 unapplied.append((i, self.series[i]))
1192 1192 self.explain_pushable(i)
1193 1193 return unapplied
1194 1194
1195 1195 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1196 1196 summary=False):
1197 1197 def displayname(patchname):
1198 1198 if summary:
1199 1199 msg = self.readheaders(patchname)[0]
1200 1200 msg = msg and ': ' + msg[0] or ': '
1201 1201 else:
1202 1202 msg = ''
1203 1203 return '%s%s' % (patchname, msg)
1204 1204
1205 1205 applied = dict.fromkeys([p.name for p in self.applied])
1206 1206 if length is None:
1207 1207 length = len(self.series) - start
1208 1208 if not missing:
1209 1209 for i in xrange(start, start+length):
1210 1210 patch = self.series[i]
1211 1211 if patch in applied:
1212 1212 stat = 'A'
1213 1213 elif self.pushable(i)[0]:
1214 1214 stat = 'U'
1215 1215 else:
1216 1216 stat = 'G'
1217 1217 pfx = ''
1218 1218 if self.ui.verbose:
1219 1219 pfx = '%d %s ' % (i, stat)
1220 1220 elif status and status != stat:
1221 1221 continue
1222 1222 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1223 1223 else:
1224 1224 msng_list = []
1225 1225 for root, dirs, files in os.walk(self.path):
1226 1226 d = root[len(self.path) + 1:]
1227 1227 for f in files:
1228 1228 fl = os.path.join(d, f)
1229 1229 if (fl not in self.series and
1230 1230 fl not in (self.status_path, self.series_path,
1231 1231 self.guards_path)
1232 1232 and not fl.startswith('.')):
1233 1233 msng_list.append(fl)
1234 1234 msng_list.sort()
1235 1235 for x in msng_list:
1236 1236 pfx = self.ui.verbose and ('D ') or ''
1237 1237 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1238 1238
1239 1239 def issaveline(self, l):
1240 1240 if l.name == '.hg.patches.save.line':
1241 1241 return True
1242 1242
1243 1243 def qrepo(self, create=False):
1244 1244 if create or os.path.isdir(self.join(".hg")):
1245 1245 return hg.repository(self.ui, path=self.path, create=create)
1246 1246
1247 1247 def restore(self, repo, rev, delete=None, qupdate=None):
1248 1248 c = repo.changelog.read(rev)
1249 1249 desc = c[4].strip()
1250 1250 lines = desc.splitlines()
1251 1251 i = 0
1252 1252 datastart = None
1253 1253 series = []
1254 1254 applied = []
1255 1255 qpp = None
1256 1256 for i in xrange(0, len(lines)):
1257 1257 if lines[i] == 'Patch Data:':
1258 1258 datastart = i + 1
1259 1259 elif lines[i].startswith('Dirstate:'):
1260 1260 l = lines[i].rstrip()
1261 1261 l = l[10:].split(' ')
1262 1262 qpp = [ hg.bin(x) for x in l ]
1263 1263 elif datastart != None:
1264 1264 l = lines[i].rstrip()
1265 1265 se = statusentry(l)
1266 1266 file_ = se.name
1267 1267 if se.rev:
1268 1268 applied.append(se)
1269 1269 else:
1270 1270 series.append(file_)
1271 1271 if datastart == None:
1272 1272 self.ui.warn("No saved patch data found\n")
1273 1273 return 1
1274 1274 self.ui.warn("restoring status: %s\n" % lines[0])
1275 1275 self.full_series = series
1276 1276 self.applied = applied
1277 1277 self.parse_series()
1278 1278 self.series_dirty = 1
1279 1279 self.applied_dirty = 1
1280 1280 heads = repo.changelog.heads()
1281 1281 if delete:
1282 1282 if rev not in heads:
1283 1283 self.ui.warn("save entry has children, leaving it alone\n")
1284 1284 else:
1285 1285 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1286 1286 pp = repo.dirstate.parents()
1287 1287 if rev in pp:
1288 1288 update = True
1289 1289 else:
1290 1290 update = False
1291 1291 self.strip(repo, rev, update=update, backup='strip')
1292 1292 if qpp:
1293 1293 self.ui.warn("saved queue repository parents: %s %s\n" %
1294 1294 (hg.short(qpp[0]), hg.short(qpp[1])))
1295 1295 if qupdate:
1296 1296 print "queue directory updating"
1297 1297 r = self.qrepo()
1298 1298 if not r:
1299 1299 self.ui.warn("Unable to load queue repository\n")
1300 1300 return 1
1301 1301 hg.clean(r, qpp[0])
1302 1302
1303 1303 def save(self, repo, msg=None):
1304 1304 if len(self.applied) == 0:
1305 1305 self.ui.warn("save: no patches applied, exiting\n")
1306 1306 return 1
1307 1307 if self.issaveline(self.applied[-1]):
1308 1308 self.ui.warn("status is already saved\n")
1309 1309 return 1
1310 1310
1311 1311 ar = [ ':' + x for x in self.full_series ]
1312 1312 if not msg:
1313 1313 msg = "hg patches saved state"
1314 1314 else:
1315 1315 msg = "hg patches: " + msg.rstrip('\r\n')
1316 1316 r = self.qrepo()
1317 1317 if r:
1318 1318 pp = r.dirstate.parents()
1319 1319 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1320 1320 msg += "\n\nPatch Data:\n"
1321 1321 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1322 1322 "\n".join(ar) + '\n' or "")
1323 1323 n = repo.commit(None, text, user=None, force=1)
1324 1324 if not n:
1325 1325 self.ui.warn("repo commit failed\n")
1326 1326 return 1
1327 1327 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1328 1328 self.applied_dirty = 1
1329 1329 self.removeundo(repo)
1330 1330
1331 1331 def full_series_end(self):
1332 1332 if len(self.applied) > 0:
1333 1333 p = self.applied[-1].name
1334 1334 end = self.find_series(p)
1335 1335 if end == None:
1336 1336 return len(self.full_series)
1337 1337 return end + 1
1338 1338 return 0
1339 1339
1340 1340 def series_end(self, all_patches=False):
1341 1341 """If all_patches is False, return the index of the next pushable patch
1342 1342 in the series, or the series length. If all_patches is True, return the
1343 1343 index of the first patch past the last applied one.
1344 1344 """
1345 1345 end = 0
1346 1346 def next(start):
1347 1347 if all_patches:
1348 1348 return start
1349 1349 i = start
1350 1350 while i < len(self.series):
1351 1351 p, reason = self.pushable(i)
1352 1352 if p:
1353 1353 break
1354 1354 self.explain_pushable(i)
1355 1355 i += 1
1356 1356 return i
1357 1357 if len(self.applied) > 0:
1358 1358 p = self.applied[-1].name
1359 1359 try:
1360 1360 end = self.series.index(p)
1361 1361 except ValueError:
1362 1362 return 0
1363 1363 return next(end + 1)
1364 1364 return next(end)
1365 1365
1366 1366 def appliedname(self, index):
1367 1367 pname = self.applied[index].name
1368 1368 if not self.ui.verbose:
1369 1369 p = pname
1370 1370 else:
1371 1371 p = str(self.series.index(pname)) + " " + pname
1372 1372 return p
1373 1373
1374 1374 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1375 1375 force=None, git=False):
1376 1376 def checkseries(patchname):
1377 1377 if patchname in self.series:
1378 1378 raise util.Abort(_('patch %s is already in the series file')
1379 1379 % patchname)
1380 1380 def checkfile(patchname):
1381 1381 if not force and os.path.exists(self.join(patchname)):
1382 1382 raise util.Abort(_('patch "%s" already exists')
1383 1383 % patchname)
1384 1384
1385 1385 if rev:
1386 1386 if files:
1387 1387 raise util.Abort(_('option "-r" not valid when importing '
1388 1388 'files'))
1389 1389 rev = cmdutil.revrange(repo, rev)
1390 1390 rev.sort(lambda x, y: cmp(y, x))
1391 1391 if (len(files) > 1 or len(rev) > 1) and patchname:
1392 1392 raise util.Abort(_('option "-n" not valid when importing multiple '
1393 1393 'patches'))
1394 1394 i = 0
1395 1395 added = []
1396 1396 if rev:
1397 1397 # If mq patches are applied, we can only import revisions
1398 1398 # that form a linear path to qbase.
1399 1399 # Otherwise, they should form a linear path to a head.
1400 1400 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1401 1401 if len(heads) > 1:
1402 1402 raise util.Abort(_('revision %d is the root of more than one '
1403 1403 'branch') % rev[-1])
1404 1404 if self.applied:
1405 1405 base = revlog.hex(repo.changelog.node(rev[0]))
1406 1406 if base in [n.rev for n in self.applied]:
1407 1407 raise util.Abort(_('revision %d is already managed')
1408 1408 % rev[0])
1409 1409 if heads != [revlog.bin(self.applied[-1].rev)]:
1410 1410 raise util.Abort(_('revision %d is not the parent of '
1411 1411 'the queue') % rev[0])
1412 1412 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1413 1413 lastparent = repo.changelog.parentrevs(base)[0]
1414 1414 else:
1415 1415 if heads != [repo.changelog.node(rev[0])]:
1416 1416 raise util.Abort(_('revision %d has unmanaged children')
1417 1417 % rev[0])
1418 1418 lastparent = None
1419 1419
1420 1420 if git:
1421 1421 self.diffopts().git = True
1422 1422
1423 1423 for r in rev:
1424 1424 p1, p2 = repo.changelog.parentrevs(r)
1425 1425 n = repo.changelog.node(r)
1426 1426 if p2 != revlog.nullrev:
1427 1427 raise util.Abort(_('cannot import merge revision %d') % r)
1428 1428 if lastparent and lastparent != r:
1429 1429 raise util.Abort(_('revision %d is not the parent of %d')
1430 1430 % (r, lastparent))
1431 1431 lastparent = p1
1432 1432
1433 1433 if not patchname:
1434 1434 patchname = normname('%d.diff' % r)
1435 1435 checkseries(patchname)
1436 1436 checkfile(patchname)
1437 1437 self.full_series.insert(0, patchname)
1438 1438
1439 1439 patchf = self.opener(patchname, "w")
1440 1440 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1441 1441 patchf.close()
1442 1442
1443 1443 se = statusentry(revlog.hex(n), patchname)
1444 1444 self.applied.insert(0, se)
1445 1445
1446 1446 added.append(patchname)
1447 1447 patchname = None
1448 1448 self.parse_series()
1449 1449 self.applied_dirty = 1
1450 1450
1451 1451 for filename in files:
1452 1452 if existing:
1453 1453 if filename == '-':
1454 1454 raise util.Abort(_('-e is incompatible with import from -'))
1455 1455 if not patchname:
1456 1456 patchname = normname(filename)
1457 1457 if not os.path.isfile(self.join(patchname)):
1458 1458 raise util.Abort(_("patch %s does not exist") % patchname)
1459 1459 else:
1460 1460 try:
1461 1461 if filename == '-':
1462 1462 if not patchname:
1463 1463 raise util.Abort(_('need --name to import a patch from -'))
1464 1464 text = sys.stdin.read()
1465 1465 else:
1466 1466 text = file(filename).read()
1467 1467 except IOError:
1468 1468 raise util.Abort(_("unable to read %s") % patchname)
1469 1469 if not patchname:
1470 1470 patchname = normname(os.path.basename(filename))
1471 1471 checkfile(patchname)
1472 1472 patchf = self.opener(patchname, "w")
1473 1473 patchf.write(text)
1474 1474 checkseries(patchname)
1475 1475 index = self.full_series_end() + i
1476 1476 self.full_series[index:index] = [patchname]
1477 1477 self.parse_series()
1478 1478 self.ui.warn("adding %s to series file\n" % patchname)
1479 1479 i += 1
1480 1480 added.append(patchname)
1481 1481 patchname = None
1482 1482 self.series_dirty = 1
1483 1483 qrepo = self.qrepo()
1484 1484 if qrepo:
1485 1485 qrepo.add(added)
1486 1486
1487 1487 def delete(ui, repo, *patches, **opts):
1488 1488 """remove patches from queue
1489 1489
1490 1490 With --rev, mq will stop managing the named revisions. The
1491 1491 patches must be applied and at the base of the stack. This option
1492 1492 is useful when the patches have been applied upstream.
1493 1493
1494 1494 Otherwise, the patches must not be applied.
1495 1495
1496 1496 With --keep, the patch files are preserved in the patch directory."""
1497 1497 q = repo.mq
1498 1498 q.delete(repo, patches, opts)
1499 1499 q.save_dirty()
1500 1500 return 0
1501 1501
1502 1502 def applied(ui, repo, patch=None, **opts):
1503 1503 """print the patches already applied"""
1504 1504 q = repo.mq
1505 1505 if patch:
1506 1506 if patch not in q.series:
1507 1507 raise util.Abort(_("patch %s is not in series file") % patch)
1508 1508 end = q.series.index(patch) + 1
1509 1509 else:
1510 1510 end = q.series_end(True)
1511 1511 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1512 1512
1513 1513 def unapplied(ui, repo, patch=None, **opts):
1514 1514 """print the patches not yet applied"""
1515 1515 q = repo.mq
1516 1516 if patch:
1517 1517 if patch not in q.series:
1518 1518 raise util.Abort(_("patch %s is not in series file") % patch)
1519 1519 start = q.series.index(patch) + 1
1520 1520 else:
1521 1521 start = q.series_end(True)
1522 1522 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1523 1523
1524 1524 def qimport(ui, repo, *filename, **opts):
1525 1525 """import a patch
1526 1526
1527 1527 The patch will have the same name as its source file unless you
1528 1528 give it a new one with --name.
1529 1529
1530 1530 You can register an existing patch inside the patch directory
1531 1531 with the --existing flag.
1532 1532
1533 1533 With --force, an existing patch of the same name will be overwritten.
1534 1534
1535 1535 An existing changeset may be placed under mq control with --rev
1536 1536 (e.g. qimport --rev tip -n patch will place tip under mq control).
1537 1537 With --git, patches imported with --rev will use the git diff
1538 1538 format.
1539 1539 """
1540 1540 q = repo.mq
1541 1541 q.qimport(repo, filename, patchname=opts['name'],
1542 1542 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1543 1543 git=opts['git'])
1544 1544 q.save_dirty()
1545 1545 return 0
1546 1546
1547 1547 def init(ui, repo, **opts):
1548 1548 """init a new queue repository
1549 1549
1550 1550 The queue repository is unversioned by default. If -c is
1551 1551 specified, qinit will create a separate nested repository
1552 1552 for patches. Use qcommit to commit changes to this queue
1553 1553 repository."""
1554 1554 q = repo.mq
1555 1555 r = q.init(repo, create=opts['create_repo'])
1556 1556 q.save_dirty()
1557 1557 if r:
1558 1558 if not os.path.exists(r.wjoin('.hgignore')):
1559 1559 fp = r.wopener('.hgignore', 'w')
1560 1560 fp.write('syntax: glob\n')
1561 1561 fp.write('status\n')
1562 1562 fp.write('guards\n')
1563 1563 fp.close()
1564 1564 if not os.path.exists(r.wjoin('series')):
1565 1565 r.wopener('series', 'w').close()
1566 1566 r.add(['.hgignore', 'series'])
1567 1567 commands.add(ui, r)
1568 1568 return 0
1569 1569
1570 1570 def clone(ui, source, dest=None, **opts):
1571 1571 '''clone main and patch repository at same time
1572 1572
1573 1573 If source is local, destination will have no patches applied. If
1574 1574 source is remote, this command can not check if patches are
1575 1575 applied in source, so cannot guarantee that patches are not
1576 1576 applied in destination. If you clone remote repository, be sure
1577 1577 before that it has no patches applied.
1578 1578
1579 1579 Source patch repository is looked for in <src>/.hg/patches by
1580 1580 default. Use -p <url> to change.
1581 1581 '''
1582 1582 commands.setremoteconfig(ui, opts)
1583 1583 if dest is None:
1584 1584 dest = hg.defaultdest(source)
1585 1585 sr = hg.repository(ui, ui.expandpath(source))
1586 1586 qbase, destrev = None, None
1587 1587 if sr.local():
1588 1588 if sr.mq.applied:
1589 1589 qbase = revlog.bin(sr.mq.applied[0].rev)
1590 1590 if not hg.islocal(dest):
1591 1591 heads = dict.fromkeys(sr.heads())
1592 1592 for h in sr.heads(qbase):
1593 1593 del heads[h]
1594 1594 destrev = heads.keys()
1595 1595 destrev.append(sr.changelog.parents(qbase)[0])
1596 1596 ui.note(_('cloning main repo\n'))
1597 sr, dr = hg.clone(ui, sr, dest,
1597 sr, dr = hg.clone(ui, sr.url(), dest,
1598 1598 pull=opts['pull'],
1599 1599 rev=destrev,
1600 1600 update=False,
1601 1601 stream=opts['uncompressed'])
1602 1602 ui.note(_('cloning patch repo\n'))
1603 1603 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1604 1604 dr.url() + '/.hg/patches',
1605 1605 pull=opts['pull'],
1606 1606 update=not opts['noupdate'],
1607 1607 stream=opts['uncompressed'])
1608 1608 if dr.local():
1609 1609 if qbase:
1610 1610 ui.note(_('stripping applied patches from destination repo\n'))
1611 1611 dr.mq.strip(dr, qbase, update=False, backup=None)
1612 1612 if not opts['noupdate']:
1613 1613 ui.note(_('updating destination repo\n'))
1614 1614 hg.update(dr, dr.changelog.tip())
1615 1615
1616 1616 def commit(ui, repo, *pats, **opts):
1617 1617 """commit changes in the queue repository"""
1618 1618 q = repo.mq
1619 1619 r = q.qrepo()
1620 1620 if not r: raise util.Abort('no queue repository')
1621 1621 commands.commit(r.ui, r, *pats, **opts)
1622 1622
1623 1623 def series(ui, repo, **opts):
1624 1624 """print the entire series file"""
1625 1625 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1626 1626 return 0
1627 1627
1628 1628 def top(ui, repo, **opts):
1629 1629 """print the name of the current patch"""
1630 1630 q = repo.mq
1631 1631 t = q.applied and q.series_end(True) or 0
1632 1632 if t:
1633 1633 return q.qseries(repo, start=t-1, length=1, status='A',
1634 1634 summary=opts.get('summary'))
1635 1635 else:
1636 1636 ui.write("No patches applied\n")
1637 1637 return 1
1638 1638
1639 1639 def next(ui, repo, **opts):
1640 1640 """print the name of the next patch"""
1641 1641 q = repo.mq
1642 1642 end = q.series_end()
1643 1643 if end == len(q.series):
1644 1644 ui.write("All patches applied\n")
1645 1645 return 1
1646 1646 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1647 1647
1648 1648 def prev(ui, repo, **opts):
1649 1649 """print the name of the previous patch"""
1650 1650 q = repo.mq
1651 1651 l = len(q.applied)
1652 1652 if l == 1:
1653 1653 ui.write("Only one patch applied\n")
1654 1654 return 1
1655 1655 if not l:
1656 1656 ui.write("No patches applied\n")
1657 1657 return 1
1658 1658 return q.qseries(repo, start=l-2, length=1, status='A',
1659 1659 summary=opts.get('summary'))
1660 1660
1661 1661 def new(ui, repo, patch, **opts):
1662 1662 """create a new patch
1663 1663
1664 1664 qnew creates a new patch on top of the currently-applied patch
1665 1665 (if any). It will refuse to run if there are any outstanding
1666 1666 changes unless -f is specified, in which case the patch will
1667 1667 be initialised with them.
1668 1668
1669 1669 -e, -m or -l set the patch header as well as the commit message.
1670 1670 If none is specified, the patch header is empty and the
1671 1671 commit message is 'New patch: PATCH'"""
1672 1672 q = repo.mq
1673 1673 message = commands.logmessage(opts)
1674 1674 if opts['edit']:
1675 1675 message = ui.edit(message, ui.username())
1676 1676 q.new(repo, patch, msg=message, force=opts['force'])
1677 1677 q.save_dirty()
1678 1678 return 0
1679 1679
1680 1680 def refresh(ui, repo, *pats, **opts):
1681 1681 """update the current patch
1682 1682
1683 1683 If any file patterns are provided, the refreshed patch will contain only
1684 1684 the modifications that match those patterns; the remaining modifications
1685 1685 will remain in the working directory.
1686 1686
1687 1687 hg add/remove/copy/rename work as usual, though you might want to use
1688 1688 git-style patches (--git or [diff] git=1) to track copies and renames.
1689 1689 """
1690 1690 q = repo.mq
1691 1691 message = commands.logmessage(opts)
1692 1692 if opts['edit']:
1693 1693 if message:
1694 1694 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1695 1695 patch = q.applied[-1].name
1696 1696 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1697 1697 message = ui.edit('\n'.join(message), user or ui.username())
1698 1698 ret = q.refresh(repo, pats, msg=message, **opts)
1699 1699 q.save_dirty()
1700 1700 return ret
1701 1701
1702 1702 def diff(ui, repo, *pats, **opts):
1703 1703 """diff of the current patch"""
1704 1704 repo.mq.diff(repo, pats, opts)
1705 1705 return 0
1706 1706
1707 1707 def fold(ui, repo, *files, **opts):
1708 1708 """fold the named patches into the current patch
1709 1709
1710 1710 Patches must not yet be applied. Each patch will be successively
1711 1711 applied to the current patch in the order given. If all the
1712 1712 patches apply successfully, the current patch will be refreshed
1713 1713 with the new cumulative patch, and the folded patches will
1714 1714 be deleted. With -k/--keep, the folded patch files will not
1715 1715 be removed afterwards.
1716 1716
1717 1717 The header for each folded patch will be concatenated with
1718 1718 the current patch header, separated by a line of '* * *'."""
1719 1719
1720 1720 q = repo.mq
1721 1721
1722 1722 if not files:
1723 1723 raise util.Abort(_('qfold requires at least one patch name'))
1724 1724 if not q.check_toppatch(repo):
1725 1725 raise util.Abort(_('No patches applied'))
1726 1726
1727 1727 message = commands.logmessage(opts)
1728 1728 if opts['edit']:
1729 1729 if message:
1730 1730 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1731 1731
1732 1732 parent = q.lookup('qtip')
1733 1733 patches = []
1734 1734 messages = []
1735 1735 for f in files:
1736 1736 p = q.lookup(f)
1737 1737 if p in patches or p == parent:
1738 1738 ui.warn(_('Skipping already folded patch %s') % p)
1739 1739 if q.isapplied(p):
1740 1740 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1741 1741 patches.append(p)
1742 1742
1743 1743 for p in patches:
1744 1744 if not message:
1745 1745 messages.append(q.readheaders(p)[0])
1746 1746 pf = q.join(p)
1747 1747 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1748 1748 if not patchsuccess:
1749 1749 raise util.Abort(_('Error folding patch %s') % p)
1750 1750 patch.updatedir(ui, repo, files)
1751 1751
1752 1752 if not message:
1753 1753 message, comments, user = q.readheaders(parent)[0:3]
1754 1754 for msg in messages:
1755 1755 message.append('* * *')
1756 1756 message.extend(msg)
1757 1757 message = '\n'.join(message)
1758 1758
1759 1759 if opts['edit']:
1760 1760 message = ui.edit(message, user or ui.username())
1761 1761
1762 1762 q.refresh(repo, msg=message)
1763 1763 q.delete(repo, patches, opts)
1764 1764 q.save_dirty()
1765 1765
1766 1766 def goto(ui, repo, patch, **opts):
1767 1767 '''push or pop patches until named patch is at top of stack'''
1768 1768 q = repo.mq
1769 1769 patch = q.lookup(patch)
1770 1770 if q.isapplied(patch):
1771 1771 ret = q.pop(repo, patch, force=opts['force'])
1772 1772 else:
1773 1773 ret = q.push(repo, patch, force=opts['force'])
1774 1774 q.save_dirty()
1775 1775 return ret
1776 1776
1777 1777 def guard(ui, repo, *args, **opts):
1778 1778 '''set or print guards for a patch
1779 1779
1780 1780 Guards control whether a patch can be pushed. A patch with no
1781 1781 guards is always pushed. A patch with a positive guard ("+foo") is
1782 1782 pushed only if the qselect command has activated it. A patch with
1783 1783 a negative guard ("-foo") is never pushed if the qselect command
1784 1784 has activated it.
1785 1785
1786 1786 With no arguments, print the currently active guards.
1787 1787 With arguments, set guards for the named patch.
1788 1788
1789 1789 To set a negative guard "-foo" on topmost patch ("--" is needed so
1790 1790 hg will not interpret "-foo" as an option):
1791 1791 hg qguard -- -foo
1792 1792
1793 1793 To set guards on another patch:
1794 1794 hg qguard other.patch +2.6.17 -stable
1795 1795 '''
1796 1796 def status(idx):
1797 1797 guards = q.series_guards[idx] or ['unguarded']
1798 1798 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1799 1799 q = repo.mq
1800 1800 patch = None
1801 1801 args = list(args)
1802 1802 if opts['list']:
1803 1803 if args or opts['none']:
1804 1804 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1805 1805 for i in xrange(len(q.series)):
1806 1806 status(i)
1807 1807 return
1808 1808 if not args or args[0][0:1] in '-+':
1809 1809 if not q.applied:
1810 1810 raise util.Abort(_('no patches applied'))
1811 1811 patch = q.applied[-1].name
1812 1812 if patch is None and args[0][0:1] not in '-+':
1813 1813 patch = args.pop(0)
1814 1814 if patch is None:
1815 1815 raise util.Abort(_('no patch to work with'))
1816 1816 if args or opts['none']:
1817 1817 idx = q.find_series(patch)
1818 1818 if idx is None:
1819 1819 raise util.Abort(_('no patch named %s') % patch)
1820 1820 q.set_guards(idx, args)
1821 1821 q.save_dirty()
1822 1822 else:
1823 1823 status(q.series.index(q.lookup(patch)))
1824 1824
1825 1825 def header(ui, repo, patch=None):
1826 1826 """Print the header of the topmost or specified patch"""
1827 1827 q = repo.mq
1828 1828
1829 1829 if patch:
1830 1830 patch = q.lookup(patch)
1831 1831 else:
1832 1832 if not q.applied:
1833 1833 ui.write('No patches applied\n')
1834 1834 return 1
1835 1835 patch = q.lookup('qtip')
1836 1836 message = repo.mq.readheaders(patch)[0]
1837 1837
1838 1838 ui.write('\n'.join(message) + '\n')
1839 1839
1840 1840 def lastsavename(path):
1841 1841 (directory, base) = os.path.split(path)
1842 1842 names = os.listdir(directory)
1843 1843 namere = re.compile("%s.([0-9]+)" % base)
1844 1844 maxindex = None
1845 1845 maxname = None
1846 1846 for f in names:
1847 1847 m = namere.match(f)
1848 1848 if m:
1849 1849 index = int(m.group(1))
1850 1850 if maxindex == None or index > maxindex:
1851 1851 maxindex = index
1852 1852 maxname = f
1853 1853 if maxname:
1854 1854 return (os.path.join(directory, maxname), maxindex)
1855 1855 return (None, None)
1856 1856
1857 1857 def savename(path):
1858 1858 (last, index) = lastsavename(path)
1859 1859 if last is None:
1860 1860 index = 0
1861 1861 newpath = path + ".%d" % (index + 1)
1862 1862 return newpath
1863 1863
1864 1864 def push(ui, repo, patch=None, **opts):
1865 1865 """push the next patch onto the stack"""
1866 1866 q = repo.mq
1867 1867 mergeq = None
1868 1868
1869 1869 if opts['all']:
1870 1870 if not q.series:
1871 1871 ui.warn(_('no patches in series\n'))
1872 1872 return 0
1873 1873 patch = q.series[-1]
1874 1874 if opts['merge']:
1875 1875 if opts['name']:
1876 1876 newpath = opts['name']
1877 1877 else:
1878 1878 newpath, i = lastsavename(q.path)
1879 1879 if not newpath:
1880 1880 ui.warn("no saved queues found, please use -n\n")
1881 1881 return 1
1882 1882 mergeq = queue(ui, repo.join(""), newpath)
1883 1883 ui.warn("merging with queue at: %s\n" % mergeq.path)
1884 1884 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1885 1885 mergeq=mergeq)
1886 1886 return ret
1887 1887
1888 1888 def pop(ui, repo, patch=None, **opts):
1889 1889 """pop the current patch off the stack"""
1890 1890 localupdate = True
1891 1891 if opts['name']:
1892 1892 q = queue(ui, repo.join(""), repo.join(opts['name']))
1893 1893 ui.warn('using patch queue: %s\n' % q.path)
1894 1894 localupdate = False
1895 1895 else:
1896 1896 q = repo.mq
1897 1897 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1898 1898 all=opts['all'])
1899 1899 q.save_dirty()
1900 1900 return ret
1901 1901
1902 1902 def rename(ui, repo, patch, name=None, **opts):
1903 1903 """rename a patch
1904 1904
1905 1905 With one argument, renames the current patch to PATCH1.
1906 1906 With two arguments, renames PATCH1 to PATCH2."""
1907 1907
1908 1908 q = repo.mq
1909 1909
1910 1910 if not name:
1911 1911 name = patch
1912 1912 patch = None
1913 1913
1914 1914 if patch:
1915 1915 patch = q.lookup(patch)
1916 1916 else:
1917 1917 if not q.applied:
1918 1918 ui.write(_('No patches applied\n'))
1919 1919 return
1920 1920 patch = q.lookup('qtip')
1921 1921 absdest = q.join(name)
1922 1922 if os.path.isdir(absdest):
1923 1923 name = normname(os.path.join(name, os.path.basename(patch)))
1924 1924 absdest = q.join(name)
1925 1925 if os.path.exists(absdest):
1926 1926 raise util.Abort(_('%s already exists') % absdest)
1927 1927
1928 1928 if name in q.series:
1929 1929 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1930 1930
1931 1931 if ui.verbose:
1932 1932 ui.write('Renaming %s to %s\n' % (patch, name))
1933 1933 i = q.find_series(patch)
1934 1934 guards = q.guard_re.findall(q.full_series[i])
1935 1935 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1936 1936 q.parse_series()
1937 1937 q.series_dirty = 1
1938 1938
1939 1939 info = q.isapplied(patch)
1940 1940 if info:
1941 1941 q.applied[info[0]] = statusentry(info[1], name)
1942 1942 q.applied_dirty = 1
1943 1943
1944 1944 util.rename(q.join(patch), absdest)
1945 1945 r = q.qrepo()
1946 1946 if r:
1947 1947 wlock = r.wlock()
1948 1948 if r.dirstate.state(name) == 'r':
1949 1949 r.undelete([name], wlock)
1950 1950 r.copy(patch, name, wlock)
1951 1951 r.remove([patch], False, wlock)
1952 1952
1953 1953 q.save_dirty()
1954 1954
1955 1955 def restore(ui, repo, rev, **opts):
1956 1956 """restore the queue state saved by a rev"""
1957 1957 rev = repo.lookup(rev)
1958 1958 q = repo.mq
1959 1959 q.restore(repo, rev, delete=opts['delete'],
1960 1960 qupdate=opts['update'])
1961 1961 q.save_dirty()
1962 1962 return 0
1963 1963
1964 1964 def save(ui, repo, **opts):
1965 1965 """save current queue state"""
1966 1966 q = repo.mq
1967 1967 message = commands.logmessage(opts)
1968 1968 ret = q.save(repo, msg=message)
1969 1969 if ret:
1970 1970 return ret
1971 1971 q.save_dirty()
1972 1972 if opts['copy']:
1973 1973 path = q.path
1974 1974 if opts['name']:
1975 1975 newpath = os.path.join(q.basepath, opts['name'])
1976 1976 if os.path.exists(newpath):
1977 1977 if not os.path.isdir(newpath):
1978 1978 raise util.Abort(_('destination %s exists and is not '
1979 1979 'a directory') % newpath)
1980 1980 if not opts['force']:
1981 1981 raise util.Abort(_('destination %s exists, '
1982 1982 'use -f to force') % newpath)
1983 1983 else:
1984 1984 newpath = savename(path)
1985 1985 ui.warn("copy %s to %s\n" % (path, newpath))
1986 1986 util.copyfiles(path, newpath)
1987 1987 if opts['empty']:
1988 1988 try:
1989 1989 os.unlink(q.join(q.status_path))
1990 1990 except:
1991 1991 pass
1992 1992 return 0
1993 1993
1994 1994 def strip(ui, repo, rev, **opts):
1995 1995 """strip a revision and all later revs on the same branch"""
1996 1996 rev = repo.lookup(rev)
1997 1997 backup = 'all'
1998 1998 if opts['backup']:
1999 1999 backup = 'strip'
2000 2000 elif opts['nobackup']:
2001 2001 backup = 'none'
2002 2002 update = repo.dirstate.parents()[0] != revlog.nullid
2003 2003 repo.mq.strip(repo, rev, backup=backup, update=update)
2004 2004 return 0
2005 2005
2006 2006 def select(ui, repo, *args, **opts):
2007 2007 '''set or print guarded patches to push
2008 2008
2009 2009 Use the qguard command to set or print guards on patch, then use
2010 2010 qselect to tell mq which guards to use. A patch will be pushed if it
2011 2011 has no guards or any positive guards match the currently selected guard,
2012 2012 but will not be pushed if any negative guards match the current guard.
2013 2013 For example:
2014 2014
2015 2015 qguard foo.patch -stable (negative guard)
2016 2016 qguard bar.patch +stable (positive guard)
2017 2017 qselect stable
2018 2018
2019 2019 This activates the "stable" guard. mq will skip foo.patch (because
2020 2020 it has a negative match) but push bar.patch (because it
2021 2021 has a positive match).
2022 2022
2023 2023 With no arguments, prints the currently active guards.
2024 2024 With one argument, sets the active guard.
2025 2025
2026 2026 Use -n/--none to deactivate guards (no other arguments needed).
2027 2027 When no guards are active, patches with positive guards are skipped
2028 2028 and patches with negative guards are pushed.
2029 2029
2030 2030 qselect can change the guards on applied patches. It does not pop
2031 2031 guarded patches by default. Use --pop to pop back to the last applied
2032 2032 patch that is not guarded. Use --reapply (which implies --pop) to push
2033 2033 back to the current patch afterwards, but skip guarded patches.
2034 2034
2035 2035 Use -s/--series to print a list of all guards in the series file (no
2036 2036 other arguments needed). Use -v for more information.'''
2037 2037
2038 2038 q = repo.mq
2039 2039 guards = q.active()
2040 2040 if args or opts['none']:
2041 2041 old_unapplied = q.unapplied(repo)
2042 2042 old_guarded = [i for i in xrange(len(q.applied)) if
2043 2043 not q.pushable(i)[0]]
2044 2044 q.set_active(args)
2045 2045 q.save_dirty()
2046 2046 if not args:
2047 2047 ui.status(_('guards deactivated\n'))
2048 2048 if not opts['pop'] and not opts['reapply']:
2049 2049 unapplied = q.unapplied(repo)
2050 2050 guarded = [i for i in xrange(len(q.applied))
2051 2051 if not q.pushable(i)[0]]
2052 2052 if len(unapplied) != len(old_unapplied):
2053 2053 ui.status(_('number of unguarded, unapplied patches has '
2054 2054 'changed from %d to %d\n') %
2055 2055 (len(old_unapplied), len(unapplied)))
2056 2056 if len(guarded) != len(old_guarded):
2057 2057 ui.status(_('number of guarded, applied patches has changed '
2058 2058 'from %d to %d\n') %
2059 2059 (len(old_guarded), len(guarded)))
2060 2060 elif opts['series']:
2061 2061 guards = {}
2062 2062 noguards = 0
2063 2063 for gs in q.series_guards:
2064 2064 if not gs:
2065 2065 noguards += 1
2066 2066 for g in gs:
2067 2067 guards.setdefault(g, 0)
2068 2068 guards[g] += 1
2069 2069 if ui.verbose:
2070 2070 guards['NONE'] = noguards
2071 2071 guards = guards.items()
2072 2072 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2073 2073 if guards:
2074 2074 ui.note(_('guards in series file:\n'))
2075 2075 for guard, count in guards:
2076 2076 ui.note('%2d ' % count)
2077 2077 ui.write(guard, '\n')
2078 2078 else:
2079 2079 ui.note(_('no guards in series file\n'))
2080 2080 else:
2081 2081 if guards:
2082 2082 ui.note(_('active guards:\n'))
2083 2083 for g in guards:
2084 2084 ui.write(g, '\n')
2085 2085 else:
2086 2086 ui.write(_('no active guards\n'))
2087 2087 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2088 2088 popped = False
2089 2089 if opts['pop'] or opts['reapply']:
2090 2090 for i in xrange(len(q.applied)):
2091 2091 pushable, reason = q.pushable(i)
2092 2092 if not pushable:
2093 2093 ui.status(_('popping guarded patches\n'))
2094 2094 popped = True
2095 2095 if i == 0:
2096 2096 q.pop(repo, all=True)
2097 2097 else:
2098 2098 q.pop(repo, i-1)
2099 2099 break
2100 2100 if popped:
2101 2101 try:
2102 2102 if reapply:
2103 2103 ui.status(_('reapplying unguarded patches\n'))
2104 2104 q.push(repo, reapply)
2105 2105 finally:
2106 2106 q.save_dirty()
2107 2107
2108 2108 def reposetup(ui, repo):
2109 2109 class mqrepo(repo.__class__):
2110 2110 def abort_if_wdir_patched(self, errmsg, force=False):
2111 2111 if self.mq.applied and not force:
2112 2112 parent = revlog.hex(self.dirstate.parents()[0])
2113 2113 if parent in [s.rev for s in self.mq.applied]:
2114 2114 raise util.Abort(errmsg)
2115 2115
2116 2116 def commit(self, *args, **opts):
2117 2117 if len(args) >= 6:
2118 2118 force = args[5]
2119 2119 else:
2120 2120 force = opts.get('force')
2121 2121 self.abort_if_wdir_patched(
2122 2122 _('cannot commit over an applied mq patch'),
2123 2123 force)
2124 2124
2125 2125 return super(mqrepo, self).commit(*args, **opts)
2126 2126
2127 2127 def push(self, remote, force=False, revs=None):
2128 2128 if self.mq.applied and not force and not revs:
2129 2129 raise util.Abort(_('source has mq patches applied'))
2130 2130 return super(mqrepo, self).push(remote, force, revs)
2131 2131
2132 2132 def tags(self):
2133 2133 if self.tagscache:
2134 2134 return self.tagscache
2135 2135
2136 2136 tagscache = super(mqrepo, self).tags()
2137 2137
2138 2138 q = self.mq
2139 2139 if not q.applied:
2140 2140 return tagscache
2141 2141
2142 2142 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2143 2143 mqtags.append((mqtags[-1][0], 'qtip'))
2144 2144 mqtags.append((mqtags[0][0], 'qbase'))
2145 2145 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2146 2146 for patch in mqtags:
2147 2147 if patch[1] in tagscache:
2148 2148 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2149 2149 else:
2150 2150 tagscache[patch[1]] = patch[0]
2151 2151
2152 2152 return tagscache
2153 2153
2154 2154 def _branchtags(self):
2155 2155 q = self.mq
2156 2156 if not q.applied:
2157 2157 return super(mqrepo, self)._branchtags()
2158 2158
2159 2159 self.branchcache = {} # avoid recursion in changectx
2160 2160 cl = self.changelog
2161 2161 partial, last, lrev = self._readbranchcache()
2162 2162
2163 2163 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2164 2164 start = lrev + 1
2165 2165 if start < qbase:
2166 2166 # update the cache (excluding the patches) and save it
2167 2167 self._updatebranchcache(partial, lrev+1, qbase)
2168 2168 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2169 2169 start = qbase
2170 2170 # if start = qbase, the cache is as updated as it should be.
2171 2171 # if start > qbase, the cache includes (part of) the patches.
2172 2172 # we might as well use it, but we won't save it.
2173 2173
2174 2174 # update the cache up to the tip
2175 2175 self._updatebranchcache(partial, start, cl.count())
2176 2176
2177 2177 return partial
2178 2178
2179 2179 if repo.local():
2180 2180 repo.__class__ = mqrepo
2181 2181 repo.mq = queue(ui, repo.join(""))
2182 2182
2183 2183 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2184 2184
2185 2185 cmdtable = {
2186 2186 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2187 2187 "qclone": (clone,
2188 2188 [('', 'pull', None, _('use pull protocol to copy metadata')),
2189 2189 ('U', 'noupdate', None, _('do not update the new working directories')),
2190 2190 ('', 'uncompressed', None,
2191 2191 _('use uncompressed transfer (fast over LAN)')),
2192 2192 ('e', 'ssh', '', _('specify ssh command to use')),
2193 2193 ('p', 'patches', '', _('location of source patch repo')),
2194 2194 ('', 'remotecmd', '',
2195 2195 _('specify hg command to run on the remote side'))],
2196 2196 'hg qclone [OPTION]... SOURCE [DEST]'),
2197 2197 "qcommit|qci":
2198 2198 (commit,
2199 2199 commands.table["^commit|ci"][1],
2200 2200 'hg qcommit [OPTION]... [FILE]...'),
2201 2201 "^qdiff": (diff,
2202 2202 [('g', 'git', None, _('use git extended diff format')),
2203 2203 ('I', 'include', [], _('include names matching the given patterns')),
2204 2204 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2205 2205 'hg qdiff [-I] [-X] [FILE]...'),
2206 2206 "qdelete|qremove|qrm":
2207 2207 (delete,
2208 2208 [('k', 'keep', None, _('keep patch file')),
2209 2209 ('r', 'rev', [], _('stop managing a revision'))],
2210 2210 'hg qdelete [-k] [-r REV]... PATCH...'),
2211 2211 'qfold':
2212 2212 (fold,
2213 2213 [('e', 'edit', None, _('edit patch header')),
2214 2214 ('k', 'keep', None, _('keep folded patch files'))
2215 2215 ] + commands.commitopts,
2216 2216 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2217 2217 'qgoto': (goto, [('f', 'force', None, _('overwrite any local changes'))],
2218 2218 'hg qgoto [OPT]... PATCH'),
2219 2219 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2220 2220 ('n', 'none', None, _('drop all guards'))],
2221 2221 'hg qguard [PATCH] [+GUARD]... [-GUARD]...'),
2222 2222 'qheader': (header, [],
2223 2223 _('hg qheader [PATCH]')),
2224 2224 "^qimport":
2225 2225 (qimport,
2226 2226 [('e', 'existing', None, 'import file in patch dir'),
2227 2227 ('n', 'name', '', 'patch file name'),
2228 2228 ('f', 'force', None, 'overwrite existing files'),
2229 2229 ('r', 'rev', [], 'place existing revisions under mq control'),
2230 2230 ('g', 'git', None, _('use git extended diff format'))],
2231 2231 'hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...'),
2232 2232 "^qinit":
2233 2233 (init,
2234 2234 [('c', 'create-repo', None, 'create queue repository')],
2235 2235 'hg qinit [-c]'),
2236 2236 "qnew":
2237 2237 (new,
2238 2238 [('e', 'edit', None, _('edit commit message')),
2239 2239 ('f', 'force', None, _('import uncommitted changes into patch'))
2240 2240 ] + commands.commitopts,
2241 2241 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2242 2242 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2243 2243 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2244 2244 "^qpop":
2245 2245 (pop,
2246 2246 [('a', 'all', None, 'pop all patches'),
2247 2247 ('n', 'name', '', 'queue name to pop'),
2248 2248 ('f', 'force', None, 'forget any local changes')],
2249 2249 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2250 2250 "^qpush":
2251 2251 (push,
2252 2252 [('f', 'force', None, 'apply if the patch has rejects'),
2253 2253 ('l', 'list', None, 'list patch name in commit text'),
2254 2254 ('a', 'all', None, 'apply all patches'),
2255 2255 ('m', 'merge', None, 'merge from another queue'),
2256 2256 ('n', 'name', '', 'merge queue name')],
2257 2257 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2258 2258 "^qrefresh":
2259 2259 (refresh,
2260 2260 [('e', 'edit', None, _('edit commit message')),
2261 2261 ('g', 'git', None, _('use git extended diff format')),
2262 2262 ('s', 'short', None, 'refresh only files already in the patch'),
2263 2263 ('I', 'include', [], _('include names matching the given patterns')),
2264 2264 ('X', 'exclude', [], _('exclude names matching the given patterns'))
2265 2265 ] + commands.commitopts,
2266 2266 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2267 2267 'qrename|qmv':
2268 2268 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2269 2269 "qrestore":
2270 2270 (restore,
2271 2271 [('d', 'delete', None, 'delete save entry'),
2272 2272 ('u', 'update', None, 'update queue working dir')],
2273 2273 'hg qrestore [-d] [-u] REV'),
2274 2274 "qsave":
2275 2275 (save,
2276 2276 [('c', 'copy', None, 'copy patch directory'),
2277 2277 ('n', 'name', '', 'copy directory name'),
2278 2278 ('e', 'empty', None, 'clear queue status file'),
2279 2279 ('f', 'force', None, 'force copy')] + commands.commitopts,
2280 2280 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2281 2281 "qselect": (select,
2282 2282 [('n', 'none', None, _('disable all guards')),
2283 2283 ('s', 'series', None, _('list all guards in series file')),
2284 2284 ('', 'pop', None,
2285 2285 _('pop to before first guarded applied patch')),
2286 2286 ('', 'reapply', None, _('pop, then reapply patches'))],
2287 2287 'hg qselect [OPTION]... [GUARD]...'),
2288 2288 "qseries":
2289 2289 (series,
2290 2290 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2291 2291 'hg qseries [-ms]'),
2292 2292 "^strip":
2293 2293 (strip,
2294 2294 [('f', 'force', None, 'force multi-head removal'),
2295 2295 ('b', 'backup', None, 'bundle unrelated changesets'),
2296 2296 ('n', 'nobackup', None, 'no backups')],
2297 2297 'hg strip [-f] [-b] [-n] REV'),
2298 2298 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2299 2299 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2300 2300 }
@@ -1,159 +1,164 b''
1 1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 2 #
3 3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 4 # that removes files not known to mercurial
5 5 #
6 6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 7 # (http://www.red-bean.com/cvsutils/).
8 8 #
9 9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 10 # [extensions]
11 11 # hgext.purge =
12 12 #
13 13 # For help on the usage of "hg purge" use:
14 14 # hg help purge
15 15 #
16 16 # This program is free software; you can redistribute it and/or modify
17 17 # it under the terms of the GNU General Public License as published by
18 18 # the Free Software Foundation; either version 2 of the License, or
19 19 # (at your option) any later version.
20 20 #
21 21 # This program is distributed in the hope that it will be useful,
22 22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 24 # GNU General Public License for more details.
25 25 #
26 26 # You should have received a copy of the GNU General Public License
27 27 # along with this program; if not, write to the Free Software
28 28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29 29
30 30 from mercurial import hg, util
31 31 from mercurial.i18n import _
32 32 import os
33 33
34 34 def dopurge(ui, repo, dirs=None, act=True, abort_on_err=False, eol='\n',
35 force=False):
35 force=False, include=None, exclude=None):
36 36 def error(msg):
37 37 if abort_on_err:
38 38 raise util.Abort(msg)
39 39 else:
40 40 ui.warn(_('warning: %s\n') % msg)
41 41
42 42 def remove(remove_func, name):
43 43 if act:
44 44 try:
45 45 remove_func(os.path.join(repo.root, name))
46 46 except OSError, e:
47 47 error(_('%s cannot be removed') % name)
48 48 else:
49 49 ui.write('%s%s' % (name, eol))
50 50
51 51 directories = []
52 52 files = []
53 53 missing = []
54 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs)
54 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
55 include, exclude)
55 56 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
56 57 ignored=True, directories=True):
57 58 if src == 'd':
58 59 directories.append(f)
59 60 elif src == 'm':
60 61 missing.append(f)
61 62 elif src == 'f' and f not in repo.dirstate:
62 63 files.append(f)
63 64
64 65 _check_missing(ui, repo, missing, force)
65 66
66 67 directories.sort()
67 68
68 69 for f in files:
69 70 if f not in repo.dirstate:
70 71 ui.note(_('Removing file %s\n') % f)
71 72 remove(os.remove, f)
72 73
73 74 for f in directories[::-1]:
74 if not os.listdir(repo.wjoin(f)):
75 if match(f) and not os.listdir(repo.wjoin(f)):
75 76 ui.note(_('Removing directory %s\n') % f)
76 77 remove(os.rmdir, f)
77 78
78 79 def _check_missing(ui, repo, missing, force=False):
79 80 """Abort if there is the chance of having problems with name-mangling fs
80 81
81 82 In a name mangling filesystem (e.g. a case insensitive one)
82 83 dirstate.walk() can yield filenames different from the ones
83 84 stored in the dirstate. This already confuses the status and
84 85 add commands, but with purge this may cause data loss.
85 86
86 87 To prevent this, _check_missing will abort if there are missing
87 88 files. The force option will let the user skip the check if he
88 89 knows it is safe.
89 90
90 91 Even with the force option this function will check if any of the
91 92 missing files is still available in the working dir: if so there
92 93 may be some problem with the underlying filesystem, so it
93 94 aborts unconditionally."""
94 95
95 96 found = [f for f in missing if util.lexists(repo.wjoin(f))]
96 97
97 98 if found:
98 99 if not ui.quiet:
99 100 ui.warn(_("The following tracked files weren't listed by the "
100 101 "filesystem, but could still be found:\n"))
101 102 for f in found:
102 103 ui.warn("%s\n" % f)
103 104 if util.checkfolding(repo.path):
104 105 ui.warn(_("This is probably due to a case-insensitive "
105 106 "filesystem\n"))
106 107 raise util.Abort(_("purging on name mangling filesystems is not "
107 108 "yet fully supported"))
108 109
109 110 if missing and not force:
110 111 raise util.Abort(_("there are missing files in the working dir and "
111 112 "purge still has problems with them due to name "
112 113 "mangling filesystems. "
113 114 "Use --force if you know what you are doing"))
114 115
115 116
116 117 def purge(ui, repo, *dirs, **opts):
117 118 '''removes files not tracked by mercurial
118 119
119 120 Delete files not known to mercurial, this is useful to test local and
120 121 uncommitted changes in the otherwise clean source tree.
121 122
122 123 This means that purge will delete:
123 124 - Unknown files: files marked with "?" by "hg status"
124 125 - Ignored files: files usually ignored by Mercurial because they match
125 126 a pattern in a ".hgignore" file
126 127 - Empty directories: in fact Mercurial ignores directories unless they
127 128 contain files under source control managment
128 129 But it will leave untouched:
129 130 - Unmodified tracked files
130 131 - Modified tracked files
131 132 - New files added to the repository (with "hg add")
132 133
133 134 If directories are given on the command line, only files in these
134 135 directories are considered.
135 136
136 137 Be careful with purge, you could irreversibly delete some files you
137 138 forgot to add to the repository. If you only want to print the list of
138 139 files that this program would delete use the --print option.
139 140 '''
140 141 act = not opts['print']
141 142 abort_on_err = bool(opts['abort_on_err'])
142 143 eol = opts['print0'] and '\0' or '\n'
143 144 if eol == '\0':
144 145 # --print0 implies --print
145 146 act = False
146 147 force = bool(opts['force'])
147 dopurge(ui, repo, dirs, act, abort_on_err, eol, force)
148 include = opts['include']
149 exclude = opts['exclude']
150 dopurge(ui, repo, dirs, act, abort_on_err, eol, force, include, exclude)
148 151
149 152
150 153 cmdtable = {
151 154 'purge':
152 155 (purge,
153 156 [('a', 'abort-on-err', None, _('abort if an error occurs')),
154 157 ('f', 'force', None, _('purge even when missing files are detected')),
155 158 ('p', 'print', None, _('print the file names instead of deleting them')),
156 159 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
157 ' (implies -p)'))],
160 ' (implies -p)')),
161 ('I', 'include', [], _('include names matching the given patterns')),
162 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
158 163 _('hg purge [OPTION]... [DIR]...'))
159 164 }
@@ -1,809 +1,820 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, mdiff, bdiff, util, templater, patch
11 11
12 12 revrangesep = ':'
13 13
14 def parseurl(url, revs):
15 '''parse url#branch, returning url, branch + revs'''
16
17 if '#' not in url:
18 return url, (revs or None)
19
20 url, rev = url.split('#', 1)
21 return url, revs + [rev]
22
14 23 def revpair(repo, revs):
15 24 '''return pair of nodes, given list of revisions. second item can
16 25 be None, meaning use working dir.'''
17 26
18 27 def revfix(repo, val, defval):
19 28 if not val and val != 0 and defval is not None:
20 29 val = defval
21 30 return repo.lookup(val)
22 31
23 32 if not revs:
24 33 return repo.dirstate.parents()[0], None
25 34 end = None
26 35 if len(revs) == 1:
27 36 if revrangesep in revs[0]:
28 37 start, end = revs[0].split(revrangesep, 1)
29 38 start = revfix(repo, start, 0)
30 39 end = revfix(repo, end, repo.changelog.count() - 1)
31 40 else:
32 41 start = revfix(repo, revs[0], None)
33 42 elif len(revs) == 2:
34 43 if revrangesep in revs[0] or revrangesep in revs[1]:
35 44 raise util.Abort(_('too many revisions specified'))
36 45 start = revfix(repo, revs[0], None)
37 46 end = revfix(repo, revs[1], None)
38 47 else:
39 48 raise util.Abort(_('too many revisions specified'))
40 49 return start, end
41 50
42 51 def revrange(repo, revs):
43 52 """Yield revision as strings from a list of revision specifications."""
44 53
45 54 def revfix(repo, val, defval):
46 55 if not val and val != 0 and defval is not None:
47 56 return defval
48 57 return repo.changelog.rev(repo.lookup(val))
49 58
50 59 seen, l = {}, []
51 60 for spec in revs:
52 61 if revrangesep in spec:
53 62 start, end = spec.split(revrangesep, 1)
54 63 start = revfix(repo, start, 0)
55 64 end = revfix(repo, end, repo.changelog.count() - 1)
56 65 step = start > end and -1 or 1
57 66 for rev in xrange(start, end+step, step):
58 67 if rev in seen:
59 68 continue
60 69 seen[rev] = 1
61 70 l.append(rev)
62 71 else:
63 72 rev = revfix(repo, spec, None)
64 73 if rev in seen:
65 74 continue
66 75 seen[rev] = 1
67 76 l.append(rev)
68 77
69 78 return l
70 79
71 80 def make_filename(repo, pat, node,
72 81 total=None, seqno=None, revwidth=None, pathname=None):
73 82 node_expander = {
74 83 'H': lambda: hex(node),
75 84 'R': lambda: str(repo.changelog.rev(node)),
76 85 'h': lambda: short(node),
77 86 }
78 87 expander = {
79 88 '%': lambda: '%',
80 89 'b': lambda: os.path.basename(repo.root),
81 90 }
82 91
83 92 try:
84 93 if node:
85 94 expander.update(node_expander)
86 95 if node and revwidth is not None:
87 96 expander['r'] = (lambda:
88 97 str(repo.changelog.rev(node)).zfill(revwidth))
89 98 if total is not None:
90 99 expander['N'] = lambda: str(total)
91 100 if seqno is not None:
92 101 expander['n'] = lambda: str(seqno)
93 102 if total is not None and seqno is not None:
94 103 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
95 104 if pathname is not None:
96 105 expander['s'] = lambda: os.path.basename(pathname)
97 106 expander['d'] = lambda: os.path.dirname(pathname) or '.'
98 107 expander['p'] = lambda: pathname
99 108
100 109 newname = []
101 110 patlen = len(pat)
102 111 i = 0
103 112 while i < patlen:
104 113 c = pat[i]
105 114 if c == '%':
106 115 i += 1
107 116 c = pat[i]
108 117 c = expander[c]()
109 118 newname.append(c)
110 119 i += 1
111 120 return ''.join(newname)
112 121 except KeyError, inst:
113 122 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
114 123 inst.args[0])
115 124
116 125 def make_file(repo, pat, node=None,
117 126 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
118 127 if not pat or pat == '-':
119 128 return 'w' in mode and sys.stdout or sys.stdin
120 129 if hasattr(pat, 'write') and 'w' in mode:
121 130 return pat
122 131 if hasattr(pat, 'read') and 'r' in mode:
123 132 return pat
124 133 return open(make_filename(repo, pat, node, total, seqno, revwidth,
125 134 pathname),
126 135 mode)
127 136
128 137 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
129 138 cwd = repo.getcwd()
130 139 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
131 140 opts.get('exclude'), globbed=globbed,
132 141 default=default)
133 142
134 143 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
135 144 default=None):
136 145 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
137 146 default=default)
138 147 exact = dict.fromkeys(files)
139 148 for src, fn in repo.walk(node=node, files=files, match=matchfn,
140 149 badmatch=badmatch):
141 150 yield src, fn, util.pathto(repo.root, repo.getcwd(), fn), fn in exact
142 151
143 152 def findrenames(repo, added=None, removed=None, threshold=0.5):
144 153 '''find renamed files -- yields (before, after, score) tuples'''
145 154 if added is None or removed is None:
146 155 added, removed = repo.status()[1:3]
147 156 ctx = repo.changectx()
148 157 for a in added:
149 158 aa = repo.wread(a)
150 159 bestname, bestscore = None, threshold
151 160 for r in removed:
152 161 rr = ctx.filectx(r).data()
153 162
154 163 # bdiff.blocks() returns blocks of matching lines
155 164 # count the number of bytes in each
156 165 equal = 0
157 166 alines = mdiff.splitnewlines(aa)
158 167 matches = bdiff.blocks(aa, rr)
159 168 for x1,x2,y1,y2 in matches:
160 169 for line in alines[x1:x2]:
161 170 equal += len(line)
162 171
163 myscore = equal*2.0 / (len(aa)+len(rr))
172 lengths = len(aa) + len(rr)
173 if lengths:
174 myscore = equal*2.0 / lengths
164 175 if myscore >= bestscore:
165 176 bestname, bestscore = r, myscore
166 177 if bestname:
167 178 yield bestname, a, bestscore
168 179
169 180 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
170 181 similarity=None):
171 182 if dry_run is None:
172 183 dry_run = opts.get('dry_run')
173 184 if similarity is None:
174 185 similarity = float(opts.get('similarity') or 0)
175 186 add, remove = [], []
176 187 mapping = {}
177 188 for src, abs, rel, exact in walk(repo, pats, opts):
178 189 if src == 'f' and repo.dirstate.state(abs) == '?':
179 190 add.append(abs)
180 191 mapping[abs] = rel, exact
181 192 if repo.ui.verbose or not exact:
182 193 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
183 194 islink = os.path.islink(rel)
184 195 if repo.dirstate.state(abs) != 'r' and not islink and not os.path.exists(rel):
185 196 remove.append(abs)
186 197 mapping[abs] = rel, exact
187 198 if repo.ui.verbose or not exact:
188 199 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
189 200 if not dry_run:
190 201 repo.add(add, wlock=wlock)
191 202 repo.remove(remove, wlock=wlock)
192 203 if similarity > 0:
193 204 for old, new, score in findrenames(repo, add, remove, similarity):
194 205 oldrel, oldexact = mapping[old]
195 206 newrel, newexact = mapping[new]
196 207 if repo.ui.verbose or not oldexact or not newexact:
197 208 repo.ui.status(_('recording removal of %s as rename to %s '
198 209 '(%d%% similar)\n') %
199 210 (oldrel, newrel, score * 100))
200 211 if not dry_run:
201 212 repo.copy(old, new, wlock=wlock)
202 213
203 214 def service(opts, parentfn=None, initfn=None, runfn=None):
204 215 '''Run a command as a service.'''
205 216
206 217 if opts['daemon'] and not opts['daemon_pipefds']:
207 218 rfd, wfd = os.pipe()
208 219 args = sys.argv[:]
209 220 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
210 221 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
211 222 args[0], args)
212 223 os.close(wfd)
213 224 os.read(rfd, 1)
214 225 if parentfn:
215 226 return parentfn(pid)
216 227 else:
217 228 os._exit(0)
218 229
219 230 if initfn:
220 231 initfn()
221 232
222 233 if opts['pid_file']:
223 234 fp = open(opts['pid_file'], 'w')
224 235 fp.write(str(os.getpid()) + '\n')
225 236 fp.close()
226 237
227 238 if opts['daemon_pipefds']:
228 239 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
229 240 os.close(rfd)
230 241 try:
231 242 os.setsid()
232 243 except AttributeError:
233 244 pass
234 245 os.write(wfd, 'y')
235 246 os.close(wfd)
236 247 sys.stdout.flush()
237 248 sys.stderr.flush()
238 249 fd = os.open(util.nulldev, os.O_RDWR)
239 250 if fd != 0: os.dup2(fd, 0)
240 251 if fd != 1: os.dup2(fd, 1)
241 252 if fd != 2: os.dup2(fd, 2)
242 253 if fd not in (0, 1, 2): os.close(fd)
243 254
244 255 if runfn:
245 256 return runfn()
246 257
247 258 class changeset_printer(object):
248 259 '''show changeset information when templating not requested.'''
249 260
250 261 def __init__(self, ui, repo, patch, buffered):
251 262 self.ui = ui
252 263 self.repo = repo
253 264 self.buffered = buffered
254 265 self.patch = patch
255 266 self.header = {}
256 267 self.hunk = {}
257 268 self.lastheader = None
258 269
259 270 def flush(self, rev):
260 271 if rev in self.header:
261 272 h = self.header[rev]
262 273 if h != self.lastheader:
263 274 self.lastheader = h
264 275 self.ui.write(h)
265 276 del self.header[rev]
266 277 if rev in self.hunk:
267 278 self.ui.write(self.hunk[rev])
268 279 del self.hunk[rev]
269 280 return 1
270 281 return 0
271 282
272 283 def show(self, rev=0, changenode=None, copies=(), **props):
273 284 if self.buffered:
274 285 self.ui.pushbuffer()
275 286 self._show(rev, changenode, copies, props)
276 287 self.hunk[rev] = self.ui.popbuffer()
277 288 else:
278 289 self._show(rev, changenode, copies, props)
279 290
280 291 def _show(self, rev, changenode, copies, props):
281 292 '''show a single changeset or file revision'''
282 293 log = self.repo.changelog
283 294 if changenode is None:
284 295 changenode = log.node(rev)
285 296 elif not rev:
286 297 rev = log.rev(changenode)
287 298
288 299 if self.ui.quiet:
289 300 self.ui.write("%d:%s\n" % (rev, short(changenode)))
290 301 return
291 302
292 303 changes = log.read(changenode)
293 304 date = util.datestr(changes[2])
294 305 extra = changes[5]
295 306 branch = extra.get("branch")
296 307
297 308 hexfunc = self.ui.debugflag and hex or short
298 309
299 310 parents = log.parentrevs(rev)
300 311 if not self.ui.debugflag:
301 312 if parents[1] == nullrev:
302 313 if parents[0] >= rev - 1:
303 314 parents = []
304 315 else:
305 316 parents = [parents[0]]
306 317 parents = [(p, hexfunc(log.node(p))) for p in parents]
307 318
308 319 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
309 320
310 321 # don't show the default branch name
311 322 if branch != 'default':
312 323 branch = util.tolocal(branch)
313 324 self.ui.write(_("branch: %s\n") % branch)
314 325 for tag in self.repo.nodetags(changenode):
315 326 self.ui.write(_("tag: %s\n") % tag)
316 327 for parent in parents:
317 328 self.ui.write(_("parent: %d:%s\n") % parent)
318 329
319 330 if self.ui.debugflag:
320 331 self.ui.write(_("manifest: %d:%s\n") %
321 332 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
322 333 self.ui.write(_("user: %s\n") % changes[1])
323 334 self.ui.write(_("date: %s\n") % date)
324 335
325 336 if self.ui.debugflag:
326 337 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
327 338 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
328 339 files):
329 340 if value:
330 341 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
331 342 elif changes[3] and self.ui.verbose:
332 343 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
333 344 if copies and self.ui.verbose:
334 345 copies = ['%s (%s)' % c for c in copies]
335 346 self.ui.write(_("copies: %s\n") % ' '.join(copies))
336 347
337 348 if extra and self.ui.debugflag:
338 349 extraitems = extra.items()
339 350 extraitems.sort()
340 351 for key, value in extraitems:
341 352 self.ui.write(_("extra: %s=%s\n")
342 353 % (key, value.encode('string_escape')))
343 354
344 355 description = changes[4].strip()
345 356 if description:
346 357 if self.ui.verbose:
347 358 self.ui.write(_("description:\n"))
348 359 self.ui.write(description)
349 360 self.ui.write("\n\n")
350 361 else:
351 362 self.ui.write(_("summary: %s\n") %
352 363 description.splitlines()[0])
353 364 self.ui.write("\n")
354 365
355 366 self.showpatch(changenode)
356 367
357 368 def showpatch(self, node):
358 369 if self.patch:
359 370 prev = self.repo.changelog.parents(node)[0]
360 371 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
361 372 self.ui.write("\n")
362 373
363 374 class changeset_templater(changeset_printer):
364 375 '''format changeset information.'''
365 376
366 377 def __init__(self, ui, repo, patch, mapfile, buffered):
367 378 changeset_printer.__init__(self, ui, repo, patch, buffered)
368 379 filters = templater.common_filters.copy()
369 380 filters['formatnode'] = (ui.debugflag and (lambda x: x)
370 381 or (lambda x: x[:12]))
371 382 self.t = templater.templater(mapfile, filters,
372 383 cache={
373 384 'parent': '{rev}:{node|formatnode} ',
374 385 'manifest': '{rev}:{node|formatnode}',
375 386 'filecopy': '{name} ({source})'})
376 387
377 388 def use_template(self, t):
378 389 '''set template string to use'''
379 390 self.t.cache['changeset'] = t
380 391
381 392 def _show(self, rev, changenode, copies, props):
382 393 '''show a single changeset or file revision'''
383 394 log = self.repo.changelog
384 395 if changenode is None:
385 396 changenode = log.node(rev)
386 397 elif not rev:
387 398 rev = log.rev(changenode)
388 399
389 400 changes = log.read(changenode)
390 401
391 402 def showlist(name, values, plural=None, **args):
392 403 '''expand set of values.
393 404 name is name of key in template map.
394 405 values is list of strings or dicts.
395 406 plural is plural of name, if not simply name + 's'.
396 407
397 408 expansion works like this, given name 'foo'.
398 409
399 410 if values is empty, expand 'no_foos'.
400 411
401 412 if 'foo' not in template map, return values as a string,
402 413 joined by space.
403 414
404 415 expand 'start_foos'.
405 416
406 417 for each value, expand 'foo'. if 'last_foo' in template
407 418 map, expand it instead of 'foo' for last key.
408 419
409 420 expand 'end_foos'.
410 421 '''
411 422 if plural: names = plural
412 423 else: names = name + 's'
413 424 if not values:
414 425 noname = 'no_' + names
415 426 if noname in self.t:
416 427 yield self.t(noname, **args)
417 428 return
418 429 if name not in self.t:
419 430 if isinstance(values[0], str):
420 431 yield ' '.join(values)
421 432 else:
422 433 for v in values:
423 434 yield dict(v, **args)
424 435 return
425 436 startname = 'start_' + names
426 437 if startname in self.t:
427 438 yield self.t(startname, **args)
428 439 vargs = args.copy()
429 440 def one(v, tag=name):
430 441 try:
431 442 vargs.update(v)
432 443 except (AttributeError, ValueError):
433 444 try:
434 445 for a, b in v:
435 446 vargs[a] = b
436 447 except ValueError:
437 448 vargs[name] = v
438 449 return self.t(tag, **vargs)
439 450 lastname = 'last_' + name
440 451 if lastname in self.t:
441 452 last = values.pop()
442 453 else:
443 454 last = None
444 455 for v in values:
445 456 yield one(v)
446 457 if last is not None:
447 458 yield one(last, tag=lastname)
448 459 endname = 'end_' + names
449 460 if endname in self.t:
450 461 yield self.t(endname, **args)
451 462
452 463 def showbranches(**args):
453 464 branch = changes[5].get("branch")
454 465 if branch != 'default':
455 466 branch = util.tolocal(branch)
456 467 return showlist('branch', [branch], plural='branches', **args)
457 468
458 469 def showparents(**args):
459 470 parents = [[('rev', log.rev(p)), ('node', hex(p))]
460 471 for p in log.parents(changenode)
461 472 if self.ui.debugflag or p != nullid]
462 473 if (not self.ui.debugflag and len(parents) == 1 and
463 474 parents[0][0][1] == rev - 1):
464 475 return
465 476 return showlist('parent', parents, **args)
466 477
467 478 def showtags(**args):
468 479 return showlist('tag', self.repo.nodetags(changenode), **args)
469 480
470 481 def showextras(**args):
471 482 extras = changes[5].items()
472 483 extras.sort()
473 484 for key, value in extras:
474 485 args = args.copy()
475 486 args.update(dict(key=key, value=value))
476 487 yield self.t('extra', **args)
477 488
478 489 def showcopies(**args):
479 490 c = [{'name': x[0], 'source': x[1]} for x in copies]
480 491 return showlist('file_copy', c, plural='file_copies', **args)
481 492
482 493 if self.ui.debugflag:
483 494 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
484 495 def showfiles(**args):
485 496 return showlist('file', files[0], **args)
486 497 def showadds(**args):
487 498 return showlist('file_add', files[1], **args)
488 499 def showdels(**args):
489 500 return showlist('file_del', files[2], **args)
490 501 def showmanifest(**args):
491 502 args = args.copy()
492 503 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
493 504 node=hex(changes[0])))
494 505 return self.t('manifest', **args)
495 506 else:
496 507 def showfiles(**args):
497 508 return showlist('file', changes[3], **args)
498 509 showadds = ''
499 510 showdels = ''
500 511 showmanifest = ''
501 512
502 513 defprops = {
503 514 'author': changes[1],
504 515 'branches': showbranches,
505 516 'date': changes[2],
506 517 'desc': changes[4],
507 518 'file_adds': showadds,
508 519 'file_dels': showdels,
509 520 'files': showfiles,
510 521 'file_copies': showcopies,
511 522 'manifest': showmanifest,
512 523 'node': hex(changenode),
513 524 'parents': showparents,
514 525 'rev': rev,
515 526 'tags': showtags,
516 527 'extras': showextras,
517 528 }
518 529 props = props.copy()
519 530 props.update(defprops)
520 531
521 532 try:
522 533 if self.ui.debugflag and 'header_debug' in self.t:
523 534 key = 'header_debug'
524 535 elif self.ui.quiet and 'header_quiet' in self.t:
525 536 key = 'header_quiet'
526 537 elif self.ui.verbose and 'header_verbose' in self.t:
527 538 key = 'header_verbose'
528 539 elif 'header' in self.t:
529 540 key = 'header'
530 541 else:
531 542 key = ''
532 543 if key:
533 544 h = templater.stringify(self.t(key, **props))
534 545 if self.buffered:
535 546 self.header[rev] = h
536 547 else:
537 548 self.ui.write(h)
538 549 if self.ui.debugflag and 'changeset_debug' in self.t:
539 550 key = 'changeset_debug'
540 551 elif self.ui.quiet and 'changeset_quiet' in self.t:
541 552 key = 'changeset_quiet'
542 553 elif self.ui.verbose and 'changeset_verbose' in self.t:
543 554 key = 'changeset_verbose'
544 555 else:
545 556 key = 'changeset'
546 557 self.ui.write(templater.stringify(self.t(key, **props)))
547 558 self.showpatch(changenode)
548 559 except KeyError, inst:
549 560 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
550 561 inst.args[0]))
551 562 except SyntaxError, inst:
552 563 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
553 564
554 565 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
555 566 """show one changeset using template or regular display.
556 567
557 568 Display format will be the first non-empty hit of:
558 569 1. option 'template'
559 570 2. option 'style'
560 571 3. [ui] setting 'logtemplate'
561 572 4. [ui] setting 'style'
562 573 If all of these values are either the unset or the empty string,
563 574 regular display via changeset_printer() is done.
564 575 """
565 576 # options
566 577 patch = False
567 578 if opts.get('patch'):
568 579 patch = matchfn or util.always
569 580
570 581 tmpl = opts.get('template')
571 582 mapfile = None
572 583 if tmpl:
573 584 tmpl = templater.parsestring(tmpl, quoted=False)
574 585 else:
575 586 mapfile = opts.get('style')
576 587 # ui settings
577 588 if not mapfile:
578 589 tmpl = ui.config('ui', 'logtemplate')
579 590 if tmpl:
580 591 tmpl = templater.parsestring(tmpl)
581 592 else:
582 593 mapfile = ui.config('ui', 'style')
583 594
584 595 if tmpl or mapfile:
585 596 if mapfile:
586 597 if not os.path.split(mapfile)[0]:
587 598 mapname = (templater.templatepath('map-cmdline.' + mapfile)
588 599 or templater.templatepath(mapfile))
589 600 if mapname: mapfile = mapname
590 601 try:
591 602 t = changeset_templater(ui, repo, patch, mapfile, buffered)
592 603 except SyntaxError, inst:
593 604 raise util.Abort(inst.args[0])
594 605 if tmpl: t.use_template(tmpl)
595 606 return t
596 607 return changeset_printer(ui, repo, patch, buffered)
597 608
598 609 def finddate(ui, repo, date):
599 610 """Find the tipmost changeset that matches the given date spec"""
600 611 df = util.matchdate(date + " to " + date)
601 612 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
602 613 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
603 614 results = {}
604 615 for st, rev, fns in changeiter:
605 616 if st == 'add':
606 617 d = get(rev)[2]
607 618 if df(d[0]):
608 619 results[rev] = d
609 620 elif st == 'iter':
610 621 if rev in results:
611 622 ui.status("Found revision %s from %s\n" %
612 623 (rev, util.datestr(results[rev])))
613 624 return str(rev)
614 625
615 626 raise util.Abort(_("revision matching date not found"))
616 627
617 628 def walkchangerevs(ui, repo, pats, change, opts):
618 629 '''Iterate over files and the revs they changed in.
619 630
620 631 Callers most commonly need to iterate backwards over the history
621 632 it is interested in. Doing so has awful (quadratic-looking)
622 633 performance, so we use iterators in a "windowed" way.
623 634
624 635 We walk a window of revisions in the desired order. Within the
625 636 window, we first walk forwards to gather data, then in the desired
626 637 order (usually backwards) to display it.
627 638
628 639 This function returns an (iterator, matchfn) tuple. The iterator
629 640 yields 3-tuples. They will be of one of the following forms:
630 641
631 642 "window", incrementing, lastrev: stepping through a window,
632 643 positive if walking forwards through revs, last rev in the
633 644 sequence iterated over - use to reset state for the current window
634 645
635 646 "add", rev, fns: out-of-order traversal of the given file names
636 647 fns, which changed during revision rev - use to gather data for
637 648 possible display
638 649
639 650 "iter", rev, None: in-order traversal of the revs earlier iterated
640 651 over with "add" - use to display data'''
641 652
642 653 def increasing_windows(start, end, windowsize=8, sizelimit=512):
643 654 if start < end:
644 655 while start < end:
645 656 yield start, min(windowsize, end-start)
646 657 start += windowsize
647 658 if windowsize < sizelimit:
648 659 windowsize *= 2
649 660 else:
650 661 while start > end:
651 662 yield start, min(windowsize, start-end-1)
652 663 start -= windowsize
653 664 if windowsize < sizelimit:
654 665 windowsize *= 2
655 666
656 667 files, matchfn, anypats = matchpats(repo, pats, opts)
657 668 follow = opts.get('follow') or opts.get('follow_first')
658 669
659 670 if repo.changelog.count() == 0:
660 671 return [], matchfn
661 672
662 673 if follow:
663 674 defrange = '%s:0' % repo.changectx().rev()
664 675 else:
665 676 defrange = 'tip:0'
666 677 revs = revrange(repo, opts['rev'] or [defrange])
667 678 wanted = {}
668 679 slowpath = anypats or opts.get('removed')
669 680 fncache = {}
670 681
671 682 if not slowpath and not files:
672 683 # No files, no patterns. Display all revs.
673 684 wanted = dict.fromkeys(revs)
674 685 copies = []
675 686 if not slowpath:
676 687 # Only files, no patterns. Check the history of each file.
677 688 def filerevgen(filelog, node):
678 689 cl_count = repo.changelog.count()
679 690 if node is None:
680 691 last = filelog.count() - 1
681 692 else:
682 693 last = filelog.rev(node)
683 694 for i, window in increasing_windows(last, nullrev):
684 695 revs = []
685 696 for j in xrange(i - window, i + 1):
686 697 n = filelog.node(j)
687 698 revs.append((filelog.linkrev(n),
688 699 follow and filelog.renamed(n)))
689 700 revs.reverse()
690 701 for rev in revs:
691 702 # only yield rev for which we have the changelog, it can
692 703 # happen while doing "hg log" during a pull or commit
693 704 if rev[0] < cl_count:
694 705 yield rev
695 706 def iterfiles():
696 707 for filename in files:
697 708 yield filename, None
698 709 for filename_node in copies:
699 710 yield filename_node
700 711 minrev, maxrev = min(revs), max(revs)
701 712 for file_, node in iterfiles():
702 713 filelog = repo.file(file_)
703 714 # A zero count may be a directory or deleted file, so
704 715 # try to find matching entries on the slow path.
705 716 if filelog.count() == 0:
706 717 slowpath = True
707 718 break
708 719 for rev, copied in filerevgen(filelog, node):
709 720 if rev <= maxrev:
710 721 if rev < minrev:
711 722 break
712 723 fncache.setdefault(rev, [])
713 724 fncache[rev].append(file_)
714 725 wanted[rev] = 1
715 726 if follow and copied:
716 727 copies.append(copied)
717 728 if slowpath:
718 729 if follow:
719 730 raise util.Abort(_('can only follow copies/renames for explicit '
720 731 'file names'))
721 732
722 733 # The slow path checks files modified in every changeset.
723 734 def changerevgen():
724 735 for i, window in increasing_windows(repo.changelog.count()-1,
725 736 nullrev):
726 737 for j in xrange(i - window, i + 1):
727 738 yield j, change(j)[3]
728 739
729 740 for rev, changefiles in changerevgen():
730 741 matches = filter(matchfn, changefiles)
731 742 if matches:
732 743 fncache[rev] = matches
733 744 wanted[rev] = 1
734 745
735 746 class followfilter:
736 747 def __init__(self, onlyfirst=False):
737 748 self.startrev = nullrev
738 749 self.roots = []
739 750 self.onlyfirst = onlyfirst
740 751
741 752 def match(self, rev):
742 753 def realparents(rev):
743 754 if self.onlyfirst:
744 755 return repo.changelog.parentrevs(rev)[0:1]
745 756 else:
746 757 return filter(lambda x: x != nullrev,
747 758 repo.changelog.parentrevs(rev))
748 759
749 760 if self.startrev == nullrev:
750 761 self.startrev = rev
751 762 return True
752 763
753 764 if rev > self.startrev:
754 765 # forward: all descendants
755 766 if not self.roots:
756 767 self.roots.append(self.startrev)
757 768 for parent in realparents(rev):
758 769 if parent in self.roots:
759 770 self.roots.append(rev)
760 771 return True
761 772 else:
762 773 # backwards: all parents
763 774 if not self.roots:
764 775 self.roots.extend(realparents(self.startrev))
765 776 if rev in self.roots:
766 777 self.roots.remove(rev)
767 778 self.roots.extend(realparents(rev))
768 779 return True
769 780
770 781 return False
771 782
772 783 # it might be worthwhile to do this in the iterator if the rev range
773 784 # is descending and the prune args are all within that range
774 785 for rev in opts.get('prune', ()):
775 786 rev = repo.changelog.rev(repo.lookup(rev))
776 787 ff = followfilter()
777 788 stop = min(revs[0], revs[-1])
778 789 for x in xrange(rev, stop-1, -1):
779 790 if ff.match(x) and x in wanted:
780 791 del wanted[x]
781 792
782 793 def iterate():
783 794 if follow and not files:
784 795 ff = followfilter(onlyfirst=opts.get('follow_first'))
785 796 def want(rev):
786 797 if ff.match(rev) and rev in wanted:
787 798 return True
788 799 return False
789 800 else:
790 801 def want(rev):
791 802 return rev in wanted
792 803
793 804 for i, window in increasing_windows(0, len(revs)):
794 805 yield 'window', revs[0] < revs[-1], revs[-1]
795 806 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
796 807 srevs = list(nrevs)
797 808 srevs.sort()
798 809 for rev in srevs:
799 810 fns = fncache.get(rev)
800 811 if not fns:
801 812 def fns_generator():
802 813 for f in change(rev)[3]:
803 814 if matchfn(f):
804 815 yield f
805 816 fns = fns_generator()
806 817 yield 'add', rev, fns
807 818 for rev in nrevs:
808 819 yield 'iter', rev, None
809 820 return iterate(), matchfn
@@ -1,3394 +1,3409 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import traceback, errno, version, atexit, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, node=None, rev=None, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202 if rev and node:
203 203 raise util.Abort(_("please specify just one revision"))
204 204
205 205 if not rev:
206 206 rev = node
207 207
208 208 bail_if_changed(repo)
209 209 op1, op2 = repo.dirstate.parents()
210 210 if op2 != nullid:
211 211 raise util.Abort(_('outstanding uncommitted merge'))
212 212 node = repo.lookup(rev)
213 213 p1, p2 = repo.changelog.parents(node)
214 214 if p1 == nullid:
215 215 raise util.Abort(_('cannot back out a change with no parents'))
216 216 if p2 != nullid:
217 217 if not opts['parent']:
218 218 raise util.Abort(_('cannot back out a merge changeset without '
219 219 '--parent'))
220 220 p = repo.lookup(opts['parent'])
221 221 if p not in (p1, p2):
222 222 raise util.Abort(_('%s is not a parent of %s') %
223 223 (short(p), short(node)))
224 224 parent = p
225 225 else:
226 226 if opts['parent']:
227 227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 228 parent = p1
229 229 hg.clean(repo, node, show_stats=False)
230 230 revert_opts = opts.copy()
231 231 revert_opts['date'] = None
232 232 revert_opts['all'] = True
233 233 revert_opts['rev'] = hex(parent)
234 234 revert(ui, repo, **revert_opts)
235 235 commit_opts = opts.copy()
236 236 commit_opts['addremove'] = False
237 237 if not commit_opts['message'] and not commit_opts['logfile']:
238 238 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
239 239 commit_opts['force_editor'] = True
240 240 commit(ui, repo, **commit_opts)
241 241 def nice(node):
242 242 return '%d:%s' % (repo.changelog.rev(node), short(node))
243 243 ui.status(_('changeset %s backs out changeset %s\n') %
244 244 (nice(repo.changelog.tip()), nice(node)))
245 245 if op1 != node:
246 246 if opts['merge']:
247 247 ui.status(_('merging with changeset %s\n') % nice(op1))
248 248 hg.merge(repo, hex(op1))
249 249 else:
250 250 ui.status(_('the backout changeset is a new head - '
251 251 'do not forget to merge\n'))
252 252 ui.status(_('(use "backout --merge" '
253 253 'if you want to auto-merge)\n'))
254 254
255 255 def branch(ui, repo, label=None, **opts):
256 256 """set or show the current branch name
257 257
258 258 With <name>, set the current branch name. Otherwise, show the
259 259 current branch name.
260 260
261 261 Unless --force is specified, branch will not let you set a
262 262 branch name that shadows an existing branch.
263 263 """
264 264
265 265 if label:
266 266 if not opts.get('force') and label in repo.branchtags():
267 267 if label not in [p.branch() for p in repo.workingctx().parents()]:
268 268 raise util.Abort(_('a branch of the same name already exists'
269 269 ' (use --force to override)'))
270 270 repo.dirstate.setbranch(util.fromlocal(label))
271 271 else:
272 272 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
273 273
274 274 def branches(ui, repo):
275 275 """list repository named branches
276 276
277 277 List the repository's named branches.
278 278 """
279 279 b = repo.branchtags()
280 280 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
281 281 l.sort()
282 282 for r, n, t in l:
283 283 hexfunc = ui.debugflag and hex or short
284 284 if ui.quiet:
285 285 ui.write("%s\n" % t)
286 286 else:
287 287 spaces = " " * (30 - util.locallen(t))
288 288 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
289 289
290 290 def bundle(ui, repo, fname, dest=None, **opts):
291 291 """create a changegroup file
292 292
293 293 Generate a compressed changegroup file collecting changesets not
294 294 found in the other repository.
295 295
296 296 If no destination repository is specified the destination is assumed
297 297 to have all the nodes specified by one or more --base parameters.
298 298
299 299 The bundle file can then be transferred using conventional means and
300 300 applied to another repository with the unbundle or pull command.
301 301 This is useful when direct push and pull are not available or when
302 302 exporting an entire repository is undesirable.
303 303
304 304 Applying bundles preserves all changeset contents including
305 305 permissions, copy/rename information, and revision history.
306 306 """
307 307 revs = opts.get('rev') or None
308 308 if revs:
309 309 revs = [repo.lookup(rev) for rev in revs]
310 310 base = opts.get('base')
311 311 if base:
312 312 if dest:
313 313 raise util.Abort(_("--base is incompatible with specifiying "
314 314 "a destination"))
315 315 base = [repo.lookup(rev) for rev in base]
316 316 # create the right base
317 317 # XXX: nodesbetween / changegroup* should be "fixed" instead
318 318 o = []
319 319 has = {nullid: None}
320 320 for n in base:
321 321 has.update(repo.changelog.reachable(n))
322 322 if revs:
323 323 visit = list(revs)
324 324 else:
325 325 visit = repo.changelog.heads()
326 326 seen = {}
327 327 while visit:
328 328 n = visit.pop(0)
329 329 parents = [p for p in repo.changelog.parents(n) if p not in has]
330 330 if len(parents) == 0:
331 331 o.insert(0, n)
332 332 else:
333 333 for p in parents:
334 334 if p not in seen:
335 335 seen[p] = 1
336 336 visit.append(p)
337 337 else:
338 338 setremoteconfig(ui, opts)
339 dest = ui.expandpath(dest or 'default-push', dest or 'default')
339 dest, revs = cmdutil.parseurl(
340 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
340 341 other = hg.repository(ui, dest)
341 342 o = repo.findoutgoing(other, force=opts['force'])
342 343
343 344 if revs:
344 345 cg = repo.changegroupsubset(o, revs, 'bundle')
345 346 else:
346 347 cg = repo.changegroup(o, 'bundle')
347 348 changegroup.writebundle(cg, fname, "HG10BZ")
348 349
349 350 def cat(ui, repo, file1, *pats, **opts):
350 351 """output the current or given revision of files
351 352
352 353 Print the specified files as they were at the given revision.
353 354 If no revision is given, the parent of the working directory is used,
354 355 or tip if no revision is checked out.
355 356
356 357 Output may be to a file, in which case the name of the file is
357 358 given using a format string. The formatting rules are the same as
358 359 for the export command, with the following additions:
359 360
360 361 %s basename of file being printed
361 362 %d dirname of file being printed, or '.' if in repo root
362 363 %p root-relative path name of file being printed
363 364 """
364 365 ctx = repo.changectx(opts['rev'])
365 366 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
366 367 ctx.node()):
367 368 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
368 369 fp.write(ctx.filectx(abs).data())
369 370
370 371 def clone(ui, source, dest=None, **opts):
371 372 """make a copy of an existing repository
372 373
373 374 Create a copy of an existing repository in a new directory.
374 375
375 376 If no destination directory name is specified, it defaults to the
376 377 basename of the source.
377 378
378 379 The location of the source is added to the new repository's
379 380 .hg/hgrc file, as the default to be used for future pulls.
380 381
381 382 For efficiency, hardlinks are used for cloning whenever the source
382 383 and destination are on the same filesystem (note this applies only
383 384 to the repository data, not to the checked out files). Some
384 385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 386 do not report errors. In these cases, use the --pull option to
386 387 avoid hardlinking.
387 388
388 389 You can safely clone repositories and checked out files using full
389 390 hardlinks with
390 391
391 392 $ cp -al REPO REPOCLONE
392 393
393 394 which is the fastest way to clone. However, the operation is not
394 395 atomic (making sure REPO is not modified during the operation is
395 396 up to you) and you have to make sure your editor breaks hardlinks
396 397 (Emacs and most Linux Kernel tools do so).
397 398
398 399 If you use the -r option to clone up to a specific revision, no
399 400 subsequent revisions will be present in the cloned repository.
400 401 This option implies --pull, even on local repositories.
401 402
402 403 See pull for valid source format details.
403 404
404 405 It is possible to specify an ssh:// URL as the destination, but no
405 406 .hg/hgrc and working directory will be created on the remote side.
406 407 Look at the help text for the pull command for important details
407 408 about ssh:// URLs.
408 409 """
409 410 setremoteconfig(ui, opts)
410 hg.clone(ui, ui.expandpath(source), dest,
411 hg.clone(ui, source, dest,
411 412 pull=opts['pull'],
412 413 stream=opts['uncompressed'],
413 414 rev=opts['rev'],
414 415 update=not opts['noupdate'])
415 416
416 417 def commit(ui, repo, *pats, **opts):
417 418 """commit the specified files or all outstanding changes
418 419
419 420 Commit changes to the given files into the repository.
420 421
421 422 If a list of files is omitted, all changes reported by "hg status"
422 423 will be committed.
423 424
424 425 If no commit message is specified, the editor configured in your hgrc
425 426 or in the EDITOR environment variable is started to enter a message.
426 427 """
427 428 message = logmessage(opts)
428 429
429 430 if opts['addremove']:
430 431 cmdutil.addremove(repo, pats, opts)
431 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 433 if pats:
433 434 status = repo.status(files=fns, match=match)
434 435 modified, added, removed, deleted, unknown = status[:5]
435 436 files = modified + added + removed
436 437 slist = None
437 438 for f in fns:
438 439 if f == '.':
439 440 continue
440 441 if f not in files:
441 442 rf = repo.wjoin(f)
442 443 if f in unknown:
443 444 raise util.Abort(_("file %s not tracked!") % rf)
444 445 try:
445 446 mode = os.lstat(rf)[stat.ST_MODE]
446 447 except OSError:
447 448 raise util.Abort(_("file %s not found!") % rf)
448 449 if stat.S_ISDIR(mode):
449 450 name = f + '/'
450 451 if slist is None:
451 452 slist = list(files)
452 453 slist.sort()
453 454 i = bisect.bisect(slist, name)
454 455 if i >= len(slist) or not slist[i].startswith(name):
455 456 raise util.Abort(_("no match under directory %s!")
456 457 % rf)
457 458 elif not stat.S_ISREG(mode):
458 459 raise util.Abort(_("can't commit %s: "
459 460 "unsupported file type!") % rf)
460 461 else:
461 462 files = []
462 463 try:
463 464 repo.commit(files, message, opts['user'], opts['date'], match,
464 465 force_editor=opts.get('force_editor'))
465 466 except ValueError, inst:
466 467 raise util.Abort(str(inst))
467 468
468 469 def docopy(ui, repo, pats, opts, wlock):
469 470 # called with the repo lock held
470 471 #
471 472 # hgsep => pathname that uses "/" to separate directories
472 473 # ossep => pathname that uses os.sep to separate directories
473 474 cwd = repo.getcwd()
474 475 errors = 0
475 476 copied = []
476 477 targets = {}
477 478
478 479 # abs: hgsep
479 480 # rel: ossep
480 481 # return: hgsep
481 482 def okaytocopy(abs, rel, exact):
482 483 reasons = {'?': _('is not managed'),
483 484 'a': _('has been marked for add'),
484 485 'r': _('has been marked for remove')}
485 486 state = repo.dirstate.state(abs)
486 487 reason = reasons.get(state)
487 488 if reason:
488 489 if state == 'a':
489 490 origsrc = repo.dirstate.copied(abs)
490 491 if origsrc is not None:
491 492 return origsrc
492 493 if exact:
493 494 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
494 495 else:
495 496 return abs
496 497
497 498 # origsrc: hgsep
498 499 # abssrc: hgsep
499 500 # relsrc: ossep
500 501 # target: ossep
501 502 def copy(origsrc, abssrc, relsrc, target, exact):
502 503 abstarget = util.canonpath(repo.root, cwd, target)
503 504 reltarget = util.pathto(repo.root, cwd, abstarget)
504 505 prevsrc = targets.get(abstarget)
505 506 if prevsrc is not None:
506 507 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
507 508 (reltarget, util.localpath(abssrc),
508 509 util.localpath(prevsrc)))
509 510 return
510 511 if (not opts['after'] and os.path.exists(reltarget) or
511 512 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
512 513 if not opts['force']:
513 514 ui.warn(_('%s: not overwriting - file exists\n') %
514 515 reltarget)
515 516 return
516 517 if not opts['after'] and not opts.get('dry_run'):
517 518 os.unlink(reltarget)
518 519 if opts['after']:
519 520 if not os.path.exists(reltarget):
520 521 return
521 522 else:
522 523 targetdir = os.path.dirname(reltarget) or '.'
523 524 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
524 525 os.makedirs(targetdir)
525 526 try:
526 527 restore = repo.dirstate.state(abstarget) == 'r'
527 528 if restore and not opts.get('dry_run'):
528 529 repo.undelete([abstarget], wlock)
529 530 try:
530 531 if not opts.get('dry_run'):
531 532 util.copyfile(relsrc, reltarget)
532 533 restore = False
533 534 finally:
534 535 if restore:
535 536 repo.remove([abstarget], wlock=wlock)
536 537 except IOError, inst:
537 538 if inst.errno == errno.ENOENT:
538 539 ui.warn(_('%s: deleted in working copy\n') % relsrc)
539 540 else:
540 541 ui.warn(_('%s: cannot copy - %s\n') %
541 542 (relsrc, inst.strerror))
542 543 errors += 1
543 544 return
544 545 if ui.verbose or not exact:
545 546 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
546 547 targets[abstarget] = abssrc
547 548 if abstarget != origsrc and not opts.get('dry_run'):
548 549 repo.copy(origsrc, abstarget, wlock)
549 550 copied.append((abssrc, relsrc, exact))
550 551
551 552 # pat: ossep
552 553 # dest ossep
553 554 # srcs: list of (hgsep, hgsep, ossep, bool)
554 555 # return: function that takes hgsep and returns ossep
555 556 def targetpathfn(pat, dest, srcs):
556 557 if os.path.isdir(pat):
557 558 abspfx = util.canonpath(repo.root, cwd, pat)
558 559 abspfx = util.localpath(abspfx)
559 560 if destdirexists:
560 561 striplen = len(os.path.split(abspfx)[0])
561 562 else:
562 563 striplen = len(abspfx)
563 564 if striplen:
564 565 striplen += len(os.sep)
565 566 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
566 567 elif destdirexists:
567 568 res = lambda p: os.path.join(dest,
568 569 os.path.basename(util.localpath(p)))
569 570 else:
570 571 res = lambda p: dest
571 572 return res
572 573
573 574 # pat: ossep
574 575 # dest ossep
575 576 # srcs: list of (hgsep, hgsep, ossep, bool)
576 577 # return: function that takes hgsep and returns ossep
577 578 def targetpathafterfn(pat, dest, srcs):
578 579 if util.patkind(pat, None)[0]:
579 580 # a mercurial pattern
580 581 res = lambda p: os.path.join(dest,
581 582 os.path.basename(util.localpath(p)))
582 583 else:
583 584 abspfx = util.canonpath(repo.root, cwd, pat)
584 585 if len(abspfx) < len(srcs[0][0]):
585 586 # A directory. Either the target path contains the last
586 587 # component of the source path or it does not.
587 588 def evalpath(striplen):
588 589 score = 0
589 590 for s in srcs:
590 591 t = os.path.join(dest, util.localpath(s[0])[striplen:])
591 592 if os.path.exists(t):
592 593 score += 1
593 594 return score
594 595
595 596 abspfx = util.localpath(abspfx)
596 597 striplen = len(abspfx)
597 598 if striplen:
598 599 striplen += len(os.sep)
599 600 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
600 601 score = evalpath(striplen)
601 602 striplen1 = len(os.path.split(abspfx)[0])
602 603 if striplen1:
603 604 striplen1 += len(os.sep)
604 605 if evalpath(striplen1) > score:
605 606 striplen = striplen1
606 607 res = lambda p: os.path.join(dest,
607 608 util.localpath(p)[striplen:])
608 609 else:
609 610 # a file
610 611 if destdirexists:
611 612 res = lambda p: os.path.join(dest,
612 613 os.path.basename(util.localpath(p)))
613 614 else:
614 615 res = lambda p: dest
615 616 return res
616 617
617 618
618 619 pats = util.expand_glob(pats)
619 620 if not pats:
620 621 raise util.Abort(_('no source or destination specified'))
621 622 if len(pats) == 1:
622 623 raise util.Abort(_('no destination specified'))
623 624 dest = pats.pop()
624 625 destdirexists = os.path.isdir(dest)
625 626 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
626 627 raise util.Abort(_('with multiple sources, destination must be an '
627 628 'existing directory'))
628 629 if opts['after']:
629 630 tfn = targetpathafterfn
630 631 else:
631 632 tfn = targetpathfn
632 633 copylist = []
633 634 for pat in pats:
634 635 srcs = []
635 636 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
636 637 globbed=True):
637 638 origsrc = okaytocopy(abssrc, relsrc, exact)
638 639 if origsrc:
639 640 srcs.append((origsrc, abssrc, relsrc, exact))
640 641 if not srcs:
641 642 continue
642 643 copylist.append((tfn(pat, dest, srcs), srcs))
643 644 if not copylist:
644 645 raise util.Abort(_('no files to copy'))
645 646
646 647 for targetpath, srcs in copylist:
647 648 for origsrc, abssrc, relsrc, exact in srcs:
648 649 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
649 650
650 651 if errors:
651 652 ui.warn(_('(consider using --after)\n'))
652 653 return errors, copied
653 654
654 655 def copy(ui, repo, *pats, **opts):
655 656 """mark files as copied for the next commit
656 657
657 658 Mark dest as having copies of source files. If dest is a
658 659 directory, copies are put in that directory. If dest is a file,
659 660 there can only be one source.
660 661
661 662 By default, this command copies the contents of files as they
662 663 stand in the working directory. If invoked with --after, the
663 664 operation is recorded, but no copying is performed.
664 665
665 666 This command takes effect in the next commit. To undo a copy
666 667 before that, see hg revert.
667 668 """
668 669 wlock = repo.wlock(0)
669 670 errs, copied = docopy(ui, repo, pats, opts, wlock)
670 671 return errs
671 672
672 673 def debugancestor(ui, index, rev1, rev2):
673 674 """find the ancestor revision of two revisions in a given index"""
674 675 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
675 676 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
676 677 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
677 678
678 679 def debugcomplete(ui, cmd='', **opts):
679 680 """returns the completion list associated with the given command"""
680 681
681 682 if opts['options']:
682 683 options = []
683 684 otables = [globalopts]
684 685 if cmd:
685 686 aliases, entry = findcmd(ui, cmd)
686 687 otables.append(entry[1])
687 688 for t in otables:
688 689 for o in t:
689 690 if o[0]:
690 691 options.append('-%s' % o[0])
691 692 options.append('--%s' % o[1])
692 693 ui.write("%s\n" % "\n".join(options))
693 694 return
694 695
695 696 clist = findpossible(ui, cmd).keys()
696 697 clist.sort()
697 698 ui.write("%s\n" % "\n".join(clist))
698 699
699 700 def debugrebuildstate(ui, repo, rev=""):
700 701 """rebuild the dirstate as it would look like for the given revision"""
701 702 if rev == "":
702 703 rev = repo.changelog.tip()
703 704 ctx = repo.changectx(rev)
704 705 files = ctx.manifest()
705 706 wlock = repo.wlock()
706 707 repo.dirstate.rebuild(rev, files)
707 708
708 709 def debugcheckstate(ui, repo):
709 710 """validate the correctness of the current dirstate"""
710 711 parent1, parent2 = repo.dirstate.parents()
711 712 repo.dirstate.read()
712 713 dc = repo.dirstate.map
713 714 keys = dc.keys()
714 715 keys.sort()
715 716 m1 = repo.changectx(parent1).manifest()
716 717 m2 = repo.changectx(parent2).manifest()
717 718 errors = 0
718 719 for f in dc:
719 720 state = repo.dirstate.state(f)
720 721 if state in "nr" and f not in m1:
721 722 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
722 723 errors += 1
723 724 if state in "a" and f in m1:
724 725 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
725 726 errors += 1
726 727 if state in "m" and f not in m1 and f not in m2:
727 728 ui.warn(_("%s in state %s, but not in either manifest\n") %
728 729 (f, state))
729 730 errors += 1
730 731 for f in m1:
731 732 state = repo.dirstate.state(f)
732 733 if state not in "nrm":
733 734 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
734 735 errors += 1
735 736 if errors:
736 737 error = _(".hg/dirstate inconsistent with current parent's manifest")
737 738 raise util.Abort(error)
738 739
739 740 def showconfig(ui, repo, *values, **opts):
740 741 """show combined config settings from all hgrc files
741 742
742 743 With no args, print names and values of all config items.
743 744
744 745 With one arg of the form section.name, print just the value of
745 746 that config item.
746 747
747 748 With multiple args, print names and values of all config items
748 749 with matching section names."""
749 750
750 751 untrusted = bool(opts.get('untrusted'))
751 752 if values:
752 753 if len([v for v in values if '.' in v]) > 1:
753 754 raise util.Abort(_('only one config item permitted'))
754 755 for section, name, value in ui.walkconfig(untrusted=untrusted):
755 756 sectname = section + '.' + name
756 757 if values:
757 758 for v in values:
758 759 if v == section:
759 760 ui.write('%s=%s\n' % (sectname, value))
760 761 elif v == sectname:
761 762 ui.write(value, '\n')
762 763 else:
763 764 ui.write('%s=%s\n' % (sectname, value))
764 765
765 766 def debugsetparents(ui, repo, rev1, rev2=None):
766 767 """manually set the parents of the current working directory
767 768
768 769 This is useful for writing repository conversion tools, but should
769 770 be used with care.
770 771 """
771 772
772 773 if not rev2:
773 774 rev2 = hex(nullid)
774 775
775 776 wlock = repo.wlock()
776 777 try:
777 778 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
778 779 finally:
779 780 wlock.release()
780 781
781 782 def debugstate(ui, repo):
782 783 """show the contents of the current dirstate"""
783 784 repo.dirstate.read()
784 785 dc = repo.dirstate.map
785 786 keys = dc.keys()
786 787 keys.sort()
787 788 for file_ in keys:
788 789 if dc[file_][3] == -1:
789 790 # Pad or slice to locale representation
790 791 locale_len = len(time.strftime("%x %X", time.localtime(0)))
791 792 timestr = 'unset'
792 793 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
793 794 else:
794 795 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
795 796 ui.write("%c %3o %10d %s %s\n"
796 797 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
797 798 timestr, file_))
798 799 for f in repo.dirstate.copies():
799 800 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
800 801
801 802 def debugdata(ui, file_, rev):
802 803 """dump the contents of a data file revision"""
803 804 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
804 805 try:
805 806 ui.write(r.revision(r.lookup(rev)))
806 807 except KeyError:
807 808 raise util.Abort(_('invalid revision identifier %s') % rev)
808 809
809 810 def debugdate(ui, date, range=None, **opts):
810 811 """parse and display a date"""
811 812 if opts["extended"]:
812 813 d = util.parsedate(date, util.extendeddateformats)
813 814 else:
814 815 d = util.parsedate(date)
815 816 ui.write("internal: %s %s\n" % d)
816 817 ui.write("standard: %s\n" % util.datestr(d))
817 818 if range:
818 819 m = util.matchdate(range)
819 820 ui.write("match: %s\n" % m(d[0]))
820 821
821 822 def debugindex(ui, file_):
822 823 """dump the contents of an index file"""
823 824 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
824 825 ui.write(" rev offset length base linkrev" +
825 826 " nodeid p1 p2\n")
826 827 for i in xrange(r.count()):
827 828 node = r.node(i)
828 829 pp = r.parents(node)
829 830 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
830 831 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
831 832 short(node), short(pp[0]), short(pp[1])))
832 833
833 834 def debugindexdot(ui, file_):
834 835 """dump an index DAG as a .dot file"""
835 836 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
836 837 ui.write("digraph G {\n")
837 838 for i in xrange(r.count()):
838 839 node = r.node(i)
839 840 pp = r.parents(node)
840 841 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
841 842 if pp[1] != nullid:
842 843 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
843 844 ui.write("}\n")
844 845
845 846 def debuginstall(ui):
846 847 '''test Mercurial installation'''
847 848
848 849 def writetemp(contents):
849 850 (fd, name) = tempfile.mkstemp()
850 851 f = os.fdopen(fd, "wb")
851 852 f.write(contents)
852 853 f.close()
853 854 return name
854 855
855 856 problems = 0
856 857
857 858 # encoding
858 859 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
859 860 try:
860 861 util.fromlocal("test")
861 862 except util.Abort, inst:
862 863 ui.write(" %s\n" % inst)
863 864 ui.write(_(" (check that your locale is properly set)\n"))
864 865 problems += 1
865 866
866 867 # compiled modules
867 868 ui.status(_("Checking extensions...\n"))
868 869 try:
869 870 import bdiff, mpatch, base85
870 871 except Exception, inst:
871 872 ui.write(" %s\n" % inst)
872 873 ui.write(_(" One or more extensions could not be found"))
873 874 ui.write(_(" (check that you compiled the extensions)\n"))
874 875 problems += 1
875 876
876 877 # templates
877 878 ui.status(_("Checking templates...\n"))
878 879 try:
879 880 import templater
880 881 t = templater.templater(templater.templatepath("map-cmdline.default"))
881 882 except Exception, inst:
882 883 ui.write(" %s\n" % inst)
883 884 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
884 885 problems += 1
885 886
886 887 # patch
887 888 ui.status(_("Checking patch...\n"))
888 889 patcher = ui.config('ui', 'patch')
889 890 patcher = ((patcher and util.find_exe(patcher)) or
890 891 util.find_exe('gpatch') or
891 892 util.find_exe('patch'))
892 893 if not patcher:
893 894 ui.write(_(" Can't find patch or gpatch in PATH\n"))
894 895 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
895 896 problems += 1
896 897 else:
897 898 # actually attempt a patch here
898 899 a = "1\n2\n3\n4\n"
899 900 b = "1\n2\n3\ninsert\n4\n"
900 901 fa = writetemp(a)
901 902 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
902 903 fd = writetemp(d)
903 904
904 905 files = {}
905 906 try:
906 907 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
907 908 except util.Abort, e:
908 909 ui.write(_(" patch call failed:\n"))
909 910 ui.write(" " + str(e) + "\n")
910 911 problems += 1
911 912 else:
912 913 if list(files) != [os.path.basename(fa)]:
913 914 ui.write(_(" unexpected patch output!"))
914 915 ui.write(_(" (you may have an incompatible version of patch)\n"))
915 916 problems += 1
916 917 a = file(fa).read()
917 918 if a != b:
918 919 ui.write(_(" patch test failed!"))
919 920 ui.write(_(" (you may have an incompatible version of patch)\n"))
920 921 problems += 1
921 922
922 923 os.unlink(fa)
923 924 os.unlink(fd)
924 925
925 926 # merge helper
926 927 ui.status(_("Checking merge helper...\n"))
927 928 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
928 929 or "hgmerge")
929 930 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
930 931 if not cmdpath:
931 932 if cmd == 'hgmerge':
932 933 ui.write(_(" No merge helper set and can't find default"
933 934 " hgmerge script in PATH\n"))
934 935 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
935 936 else:
936 937 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
937 938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
938 939 problems += 1
939 940 else:
940 941 # actually attempt a patch here
941 942 fa = writetemp("1\n2\n3\n4\n")
942 943 fl = writetemp("1\n2\n3\ninsert\n4\n")
943 944 fr = writetemp("begin\n1\n2\n3\n4\n")
944 945 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
945 946 if r:
946 947 ui.write(_(" got unexpected merge error %d!") % r)
947 948 problems += 1
948 949 m = file(fl).read()
949 950 if m != "begin\n1\n2\n3\ninsert\n4\n":
950 951 ui.write(_(" got unexpected merge results!") % r)
951 952 ui.write(_(" (your merge helper may have the"
952 953 " wrong argument order)\n"))
953 954 ui.write(m)
954 955 os.unlink(fa)
955 956 os.unlink(fl)
956 957 os.unlink(fr)
957 958
958 959 # editor
959 960 ui.status(_("Checking commit editor...\n"))
960 961 editor = (os.environ.get("HGEDITOR") or
961 962 ui.config("ui", "editor") or
962 963 os.environ.get("EDITOR", "vi"))
963 964 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
964 965 if not cmdpath:
965 966 if editor == 'vi':
966 967 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
967 968 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
968 969 else:
969 970 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
970 971 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
971 972 problems += 1
972 973
973 974 # check username
974 975 ui.status(_("Checking username...\n"))
975 976 user = os.environ.get("HGUSER")
976 977 if user is None:
977 978 user = ui.config("ui", "username")
978 979 if user is None:
979 980 user = os.environ.get("EMAIL")
980 981 if not user:
981 982 ui.warn(" ")
982 983 ui.username()
983 984 ui.write(_(" (specify a username in your .hgrc file)\n"))
984 985
985 986 if not problems:
986 987 ui.status(_("No problems detected\n"))
987 988 else:
988 989 ui.write(_("%s problems detected,"
989 990 " please check your install!\n") % problems)
990 991
991 992 return problems
992 993
993 994 def debugrename(ui, repo, file1, *pats, **opts):
994 995 """dump rename information"""
995 996
996 997 ctx = repo.changectx(opts.get('rev', 'tip'))
997 998 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
998 999 ctx.node()):
999 1000 m = ctx.filectx(abs).renamed()
1000 1001 if m:
1001 1002 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1002 1003 else:
1003 1004 ui.write(_("%s not renamed\n") % rel)
1004 1005
1005 1006 def debugwalk(ui, repo, *pats, **opts):
1006 1007 """show how files match on given patterns"""
1007 1008 items = list(cmdutil.walk(repo, pats, opts))
1008 1009 if not items:
1009 1010 return
1010 1011 fmt = '%%s %%-%ds %%-%ds %%s' % (
1011 1012 max([len(abs) for (src, abs, rel, exact) in items]),
1012 1013 max([len(rel) for (src, abs, rel, exact) in items]))
1013 1014 for src, abs, rel, exact in items:
1014 1015 line = fmt % (src, abs, rel, exact and 'exact' or '')
1015 1016 ui.write("%s\n" % line.rstrip())
1016 1017
1017 1018 def diff(ui, repo, *pats, **opts):
1018 1019 """diff repository (or selected files)
1019 1020
1020 1021 Show differences between revisions for the specified files.
1021 1022
1022 1023 Differences between files are shown using the unified diff format.
1023 1024
1024 1025 NOTE: diff may generate unexpected results for merges, as it will
1025 1026 default to comparing against the working directory's first parent
1026 1027 changeset if no revisions are specified.
1027 1028
1028 1029 When two revision arguments are given, then changes are shown
1029 1030 between those revisions. If only one revision is specified then
1030 1031 that revision is compared to the working directory, and, when no
1031 1032 revisions are specified, the working directory files are compared
1032 1033 to its parent.
1033 1034
1034 1035 Without the -a option, diff will avoid generating diffs of files
1035 1036 it detects as binary. With -a, diff will generate a diff anyway,
1036 1037 probably with undesirable results.
1037 1038 """
1038 1039 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1039 1040
1040 1041 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1041 1042
1042 1043 patch.diff(repo, node1, node2, fns, match=matchfn,
1043 1044 opts=patch.diffopts(ui, opts))
1044 1045
1045 1046 def export(ui, repo, *changesets, **opts):
1046 1047 """dump the header and diffs for one or more changesets
1047 1048
1048 1049 Print the changeset header and diffs for one or more revisions.
1049 1050
1050 1051 The information shown in the changeset header is: author,
1051 1052 changeset hash, parent(s) and commit comment.
1052 1053
1053 1054 NOTE: export may generate unexpected diff output for merge changesets,
1054 1055 as it will compare the merge changeset against its first parent only.
1055 1056
1056 1057 Output may be to a file, in which case the name of the file is
1057 1058 given using a format string. The formatting rules are as follows:
1058 1059
1059 1060 %% literal "%" character
1060 1061 %H changeset hash (40 bytes of hexadecimal)
1061 1062 %N number of patches being generated
1062 1063 %R changeset revision number
1063 1064 %b basename of the exporting repository
1064 1065 %h short-form changeset hash (12 bytes of hexadecimal)
1065 1066 %n zero-padded sequence number, starting at 1
1066 1067 %r zero-padded changeset revision number
1067 1068
1068 1069 Without the -a option, export will avoid generating diffs of files
1069 1070 it detects as binary. With -a, export will generate a diff anyway,
1070 1071 probably with undesirable results.
1071 1072
1072 1073 With the --switch-parent option, the diff will be against the second
1073 1074 parent. It can be useful to review a merge.
1074 1075 """
1075 1076 if not changesets:
1076 1077 raise util.Abort(_("export requires at least one changeset"))
1077 1078 revs = cmdutil.revrange(repo, changesets)
1078 1079 if len(revs) > 1:
1079 1080 ui.note(_('exporting patches:\n'))
1080 1081 else:
1081 1082 ui.note(_('exporting patch:\n'))
1082 1083 patch.export(repo, revs, template=opts['output'],
1083 1084 switch_parent=opts['switch_parent'],
1084 1085 opts=patch.diffopts(ui, opts))
1085 1086
1086 1087 def grep(ui, repo, pattern, *pats, **opts):
1087 1088 """search for a pattern in specified files and revisions
1088 1089
1089 1090 Search revisions of files for a regular expression.
1090 1091
1091 1092 This command behaves differently than Unix grep. It only accepts
1092 1093 Python/Perl regexps. It searches repository history, not the
1093 1094 working directory. It always prints the revision number in which
1094 1095 a match appears.
1095 1096
1096 1097 By default, grep only prints output for the first revision of a
1097 1098 file in which it finds a match. To get it to print every revision
1098 1099 that contains a change in match status ("-" for a match that
1099 1100 becomes a non-match, or "+" for a non-match that becomes a match),
1100 1101 use the --all flag.
1101 1102 """
1102 1103 reflags = 0
1103 1104 if opts['ignore_case']:
1104 1105 reflags |= re.I
1105 1106 regexp = re.compile(pattern, reflags)
1106 1107 sep, eol = ':', '\n'
1107 1108 if opts['print0']:
1108 1109 sep = eol = '\0'
1109 1110
1110 1111 fcache = {}
1111 1112 def getfile(fn):
1112 1113 if fn not in fcache:
1113 1114 fcache[fn] = repo.file(fn)
1114 1115 return fcache[fn]
1115 1116
1116 1117 def matchlines(body):
1117 1118 begin = 0
1118 1119 linenum = 0
1119 1120 while True:
1120 1121 match = regexp.search(body, begin)
1121 1122 if not match:
1122 1123 break
1123 1124 mstart, mend = match.span()
1124 1125 linenum += body.count('\n', begin, mstart) + 1
1125 1126 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1126 1127 lend = body.find('\n', mend)
1127 1128 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1128 1129 begin = lend + 1
1129 1130
1130 1131 class linestate(object):
1131 1132 def __init__(self, line, linenum, colstart, colend):
1132 1133 self.line = line
1133 1134 self.linenum = linenum
1134 1135 self.colstart = colstart
1135 1136 self.colend = colend
1136 1137
1137 1138 def __eq__(self, other):
1138 1139 return self.line == other.line
1139 1140
1140 1141 matches = {}
1141 1142 copies = {}
1142 1143 def grepbody(fn, rev, body):
1143 1144 matches[rev].setdefault(fn, [])
1144 1145 m = matches[rev][fn]
1145 1146 for lnum, cstart, cend, line in matchlines(body):
1146 1147 s = linestate(line, lnum, cstart, cend)
1147 1148 m.append(s)
1148 1149
1149 1150 def difflinestates(a, b):
1150 1151 sm = difflib.SequenceMatcher(None, a, b)
1151 1152 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1152 1153 if tag == 'insert':
1153 1154 for i in xrange(blo, bhi):
1154 1155 yield ('+', b[i])
1155 1156 elif tag == 'delete':
1156 1157 for i in xrange(alo, ahi):
1157 1158 yield ('-', a[i])
1158 1159 elif tag == 'replace':
1159 1160 for i in xrange(alo, ahi):
1160 1161 yield ('-', a[i])
1161 1162 for i in xrange(blo, bhi):
1162 1163 yield ('+', b[i])
1163 1164
1164 1165 prev = {}
1165 1166 def display(fn, rev, states, prevstates):
1166 1167 found = False
1167 1168 filerevmatches = {}
1168 1169 r = prev.get(fn, -1)
1169 1170 if opts['all']:
1170 1171 iter = difflinestates(states, prevstates)
1171 1172 else:
1172 1173 iter = [('', l) for l in prevstates]
1173 1174 for change, l in iter:
1174 1175 cols = [fn, str(r)]
1175 1176 if opts['line_number']:
1176 1177 cols.append(str(l.linenum))
1177 1178 if opts['all']:
1178 1179 cols.append(change)
1179 1180 if opts['user']:
1180 1181 cols.append(ui.shortuser(get(r)[1]))
1181 1182 if opts['files_with_matches']:
1182 1183 c = (fn, r)
1183 1184 if c in filerevmatches:
1184 1185 continue
1185 1186 filerevmatches[c] = 1
1186 1187 else:
1187 1188 cols.append(l.line)
1188 1189 ui.write(sep.join(cols), eol)
1189 1190 found = True
1190 1191 return found
1191 1192
1192 1193 fstate = {}
1193 1194 skip = {}
1194 1195 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1195 1196 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1196 1197 found = False
1197 1198 follow = opts.get('follow')
1198 1199 for st, rev, fns in changeiter:
1199 1200 if st == 'window':
1200 1201 matches.clear()
1201 1202 elif st == 'add':
1202 1203 mf = repo.changectx(rev).manifest()
1203 1204 matches[rev] = {}
1204 1205 for fn in fns:
1205 1206 if fn in skip:
1206 1207 continue
1207 1208 fstate.setdefault(fn, {})
1208 1209 try:
1209 1210 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1210 1211 if follow:
1211 1212 copied = getfile(fn).renamed(mf[fn])
1212 1213 if copied:
1213 1214 copies.setdefault(rev, {})[fn] = copied[0]
1214 1215 except KeyError:
1215 1216 pass
1216 1217 elif st == 'iter':
1217 1218 states = matches[rev].items()
1218 1219 states.sort()
1219 1220 for fn, m in states:
1220 1221 copy = copies.get(rev, {}).get(fn)
1221 1222 if fn in skip:
1222 1223 if copy:
1223 1224 skip[copy] = True
1224 1225 continue
1225 1226 if fn in prev or fstate[fn]:
1226 1227 r = display(fn, rev, m, fstate[fn])
1227 1228 found = found or r
1228 1229 if r and not opts['all']:
1229 1230 skip[fn] = True
1230 1231 if copy:
1231 1232 skip[copy] = True
1232 1233 fstate[fn] = m
1233 1234 if copy:
1234 1235 fstate[copy] = m
1235 1236 prev[fn] = rev
1236 1237
1237 1238 fstate = fstate.items()
1238 1239 fstate.sort()
1239 1240 for fn, state in fstate:
1240 1241 if fn in skip:
1241 1242 continue
1242 1243 if fn not in copies.get(prev[fn], {}):
1243 1244 found = display(fn, rev, {}, state) or found
1244 1245 return (not found and 1) or 0
1245 1246
1246 1247 def heads(ui, repo, **opts):
1247 1248 """show current repository heads
1248 1249
1249 1250 Show all repository head changesets.
1250 1251
1251 1252 Repository "heads" are changesets that don't have children
1252 1253 changesets. They are where development generally takes place and
1253 1254 are the usual targets for update and merge operations.
1254 1255 """
1255 1256 if opts['rev']:
1256 1257 heads = repo.heads(repo.lookup(opts['rev']))
1257 1258 else:
1258 1259 heads = repo.heads()
1259 1260 displayer = cmdutil.show_changeset(ui, repo, opts)
1260 1261 for n in heads:
1261 1262 displayer.show(changenode=n)
1262 1263
1263 1264 def help_(ui, name=None, with_version=False):
1264 1265 """show help for a command, extension, or list of commands
1265 1266
1266 1267 With no arguments, print a list of commands and short help.
1267 1268
1268 1269 Given a command name, print help for that command.
1269 1270
1270 1271 Given an extension name, print help for that extension, and the
1271 1272 commands it provides."""
1272 1273 option_lists = []
1273 1274
1274 1275 def addglobalopts(aliases):
1275 1276 if ui.verbose:
1276 1277 option_lists.append((_("global options:"), globalopts))
1277 1278 if name == 'shortlist':
1278 1279 option_lists.append((_('use "hg help" for the full list '
1279 1280 'of commands'), ()))
1280 1281 else:
1281 1282 if name == 'shortlist':
1282 1283 msg = _('use "hg help" for the full list of commands '
1283 1284 'or "hg -v" for details')
1284 1285 elif aliases:
1285 1286 msg = _('use "hg -v help%s" to show aliases and '
1286 1287 'global options') % (name and " " + name or "")
1287 1288 else:
1288 1289 msg = _('use "hg -v help %s" to show global options') % name
1289 1290 option_lists.append((msg, ()))
1290 1291
1291 1292 def helpcmd(name):
1292 1293 if with_version:
1293 1294 version_(ui)
1294 1295 ui.write('\n')
1295 1296 aliases, i = findcmd(ui, name)
1296 1297 # synopsis
1297 1298 ui.write("%s\n\n" % i[2])
1298 1299
1299 1300 # description
1300 1301 doc = i[0].__doc__
1301 1302 if not doc:
1302 1303 doc = _("(No help text available)")
1303 1304 if ui.quiet:
1304 1305 doc = doc.splitlines(0)[0]
1305 1306 ui.write("%s\n" % doc.rstrip())
1306 1307
1307 1308 if not ui.quiet:
1308 1309 # aliases
1309 1310 if len(aliases) > 1:
1310 1311 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1311 1312
1312 1313 # options
1313 1314 if i[1]:
1314 1315 option_lists.append((_("options:\n"), i[1]))
1315 1316
1316 1317 addglobalopts(False)
1317 1318
1318 1319 def helplist(select=None):
1319 1320 h = {}
1320 1321 cmds = {}
1321 1322 for c, e in table.items():
1322 1323 f = c.split("|", 1)[0]
1323 1324 if select and not select(f):
1324 1325 continue
1325 1326 if name == "shortlist" and not f.startswith("^"):
1326 1327 continue
1327 1328 f = f.lstrip("^")
1328 1329 if not ui.debugflag and f.startswith("debug"):
1329 1330 continue
1330 1331 doc = e[0].__doc__
1331 1332 if not doc:
1332 1333 doc = _("(No help text available)")
1333 1334 h[f] = doc.splitlines(0)[0].rstrip()
1334 1335 cmds[f] = c.lstrip("^")
1335 1336
1336 1337 fns = h.keys()
1337 1338 fns.sort()
1338 1339 m = max(map(len, fns))
1339 1340 for f in fns:
1340 1341 if ui.verbose:
1341 1342 commands = cmds[f].replace("|",", ")
1342 1343 ui.write(" %s:\n %s\n"%(commands, h[f]))
1343 1344 else:
1344 1345 ui.write(' %-*s %s\n' % (m, f, h[f]))
1345 1346
1346 1347 if not ui.quiet:
1347 1348 addglobalopts(True)
1348 1349
1349 1350 def helptopic(name):
1350 1351 v = None
1351 1352 for i in help.helptable:
1352 1353 l = i.split('|')
1353 1354 if name in l:
1354 1355 v = i
1355 1356 header = l[-1]
1356 1357 if not v:
1357 1358 raise UnknownCommand(name)
1358 1359
1359 1360 # description
1360 1361 doc = help.helptable[v]
1361 1362 if not doc:
1362 1363 doc = _("(No help text available)")
1363 1364 if callable(doc):
1364 1365 doc = doc()
1365 1366
1366 1367 ui.write("%s\n" % header)
1367 1368 ui.write("%s\n" % doc.rstrip())
1368 1369
1369 1370 def helpext(name):
1370 1371 try:
1371 1372 mod = findext(name)
1372 1373 except KeyError:
1373 1374 raise UnknownCommand(name)
1374 1375
1375 1376 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1376 1377 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1377 1378 for d in doc[1:]:
1378 1379 ui.write(d, '\n')
1379 1380
1380 1381 ui.status('\n')
1381 1382
1382 1383 try:
1383 1384 ct = mod.cmdtable
1384 1385 except AttributeError:
1385 1386 ui.status(_('no commands defined\n'))
1386 1387 return
1387 1388
1388 1389 ui.status(_('list of commands:\n\n'))
1389 1390 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1390 1391 helplist(modcmds.has_key)
1391 1392
1392 1393 if name and name != 'shortlist':
1393 1394 i = None
1394 1395 for f in (helpcmd, helptopic, helpext):
1395 1396 try:
1396 1397 f(name)
1397 1398 i = None
1398 1399 break
1399 1400 except UnknownCommand, inst:
1400 1401 i = inst
1401 1402 if i:
1402 1403 raise i
1403 1404
1404 1405 else:
1405 1406 # program name
1406 1407 if ui.verbose or with_version:
1407 1408 version_(ui)
1408 1409 else:
1409 1410 ui.status(_("Mercurial Distributed SCM\n"))
1410 1411 ui.status('\n')
1411 1412
1412 1413 # list of commands
1413 1414 if name == "shortlist":
1414 1415 ui.status(_('basic commands:\n\n'))
1415 1416 else:
1416 1417 ui.status(_('list of commands:\n\n'))
1417 1418
1418 1419 helplist()
1419 1420
1420 1421 # list all option lists
1421 1422 opt_output = []
1422 1423 for title, options in option_lists:
1423 1424 opt_output.append(("\n%s" % title, None))
1424 1425 for shortopt, longopt, default, desc in options:
1425 1426 if "DEPRECATED" in desc and not ui.verbose: continue
1426 1427 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1427 1428 longopt and " --%s" % longopt),
1428 1429 "%s%s" % (desc,
1429 1430 default
1430 1431 and _(" (default: %s)") % default
1431 1432 or "")))
1432 1433
1433 1434 if opt_output:
1434 1435 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1435 1436 for first, second in opt_output:
1436 1437 if second:
1437 1438 ui.write(" %-*s %s\n" % (opts_len, first, second))
1438 1439 else:
1439 1440 ui.write("%s\n" % first)
1440 1441
1441 1442 def identify(ui, repo):
1442 1443 """print information about the working copy
1443 1444
1444 1445 Print a short summary of the current state of the repo.
1445 1446
1446 1447 This summary identifies the repository state using one or two parent
1447 1448 hash identifiers, followed by a "+" if there are uncommitted changes
1448 1449 in the working directory, followed by a list of tags for this revision.
1449 1450 """
1450 1451 parents = [p for p in repo.dirstate.parents() if p != nullid]
1451 1452 if not parents:
1452 1453 ui.write(_("unknown\n"))
1453 1454 return
1454 1455
1455 1456 hexfunc = ui.debugflag and hex or short
1456 1457 modified, added, removed, deleted = repo.status()[:4]
1457 1458 output = ["%s%s" %
1458 1459 ('+'.join([hexfunc(parent) for parent in parents]),
1459 1460 (modified or added or removed or deleted) and "+" or "")]
1460 1461
1461 1462 if not ui.quiet:
1462 1463
1463 1464 branch = util.tolocal(repo.workingctx().branch())
1464 1465 if branch != 'default':
1465 1466 output.append("(%s)" % branch)
1466 1467
1467 1468 # multiple tags for a single parent separated by '/'
1468 1469 parenttags = ['/'.join(tags)
1469 1470 for tags in map(repo.nodetags, parents) if tags]
1470 1471 # tags for multiple parents separated by ' + '
1471 1472 if parenttags:
1472 1473 output.append(' + '.join(parenttags))
1473 1474
1474 1475 ui.write("%s\n" % ' '.join(output))
1475 1476
1476 1477 def import_(ui, repo, patch1, *patches, **opts):
1477 1478 """import an ordered set of patches
1478 1479
1479 1480 Import a list of patches and commit them individually.
1480 1481
1481 1482 If there are outstanding changes in the working directory, import
1482 1483 will abort unless given the -f flag.
1483 1484
1484 1485 You can import a patch straight from a mail message. Even patches
1485 1486 as attachments work (body part must be type text/plain or
1486 1487 text/x-patch to be used). From and Subject headers of email
1487 1488 message are used as default committer and commit message. All
1488 1489 text/plain body parts before first diff are added to commit
1489 1490 message.
1490 1491
1491 1492 If the imported patch was generated by hg export, user and description
1492 1493 from patch override values from message headers and body. Values
1493 1494 given on command line with -m and -u override these.
1494 1495
1495 1496 If --exact is specified, import will set the working directory
1496 1497 to the parent of each patch before applying it, and will abort
1497 1498 if the resulting changeset has a different ID than the one
1498 1499 recorded in the patch. This may happen due to character set
1499 1500 problems or other deficiencies in the text patch format.
1500 1501
1501 1502 To read a patch from standard input, use patch name "-".
1502 1503 """
1503 1504 patches = (patch1,) + patches
1504 1505
1505 1506 if opts.get('exact') or not opts['force']:
1506 1507 bail_if_changed(repo)
1507 1508
1508 1509 d = opts["base"]
1509 1510 strip = opts["strip"]
1510 1511
1511 1512 wlock = repo.wlock()
1512 1513 lock = repo.lock()
1513 1514
1514 1515 for p in patches:
1515 1516 pf = os.path.join(d, p)
1516 1517
1517 1518 if pf == '-':
1518 1519 ui.status(_("applying patch from stdin\n"))
1519 1520 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1520 1521 else:
1521 1522 ui.status(_("applying %s\n") % p)
1522 1523 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf))
1523 1524
1524 1525 if tmpname is None:
1525 1526 raise util.Abort(_('no diffs found'))
1526 1527
1527 1528 try:
1528 1529 cmdline_message = logmessage(opts)
1529 1530 if cmdline_message:
1530 1531 # pickup the cmdline msg
1531 1532 message = cmdline_message
1532 1533 elif message:
1533 1534 # pickup the patch msg
1534 1535 message = message.strip()
1535 1536 else:
1536 1537 # launch the editor
1537 1538 message = None
1538 1539 ui.debug(_('message:\n%s\n') % message)
1539 1540
1540 1541 wp = repo.workingctx().parents()
1541 1542 if opts.get('exact'):
1542 1543 if not nodeid or not p1:
1543 1544 raise util.Abort(_('not a mercurial patch'))
1544 1545 p1 = repo.lookup(p1)
1545 1546 p2 = repo.lookup(p2 or hex(nullid))
1546 1547
1547 1548 if p1 != wp[0].node():
1548 1549 hg.clean(repo, p1, wlock=wlock)
1549 1550 repo.dirstate.setparents(p1, p2)
1550 1551 repo.dirstate.setbranch(branch or 'default')
1551 1552 elif p2:
1552 1553 try:
1553 1554 p1 = repo.lookup(p1)
1554 1555 p2 = repo.lookup(p2)
1555 1556 if p1 == wp[0].node():
1556 1557 repo.dirstate.setparents(p1, p2)
1557 1558 except hg.RepoError:
1558 1559 pass
1559 1560
1560 1561 files = {}
1561 1562 try:
1562 1563 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1563 1564 files=files)
1564 1565 finally:
1565 1566 files = patch.updatedir(ui, repo, files, wlock=wlock)
1566 1567 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1567 1568 if opts.get('exact'):
1568 1569 if hex(n) != nodeid:
1569 1570 repo.rollback(wlock=wlock, lock=lock)
1570 1571 raise util.Abort(_('patch is damaged or loses information'))
1571 1572 finally:
1572 1573 os.unlink(tmpname)
1573 1574
1574 1575 def incoming(ui, repo, source="default", **opts):
1575 1576 """show new changesets found in source
1576 1577
1577 1578 Show new changesets found in the specified path/URL or the default
1578 1579 pull location. These are the changesets that would be pulled if a pull
1579 1580 was requested.
1580 1581
1581 1582 For remote repository, using --bundle avoids downloading the changesets
1582 1583 twice if the incoming is followed by a pull.
1583 1584
1584 1585 See pull for valid source format details.
1585 1586 """
1586 source = ui.expandpath(source)
1587 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1587 1588 setremoteconfig(ui, opts)
1588 1589
1589 1590 other = hg.repository(ui, source)
1590 1591 ui.status(_('comparing with %s\n') % source)
1591 incoming = repo.findincoming(other, force=opts["force"])
1592 if revs:
1593 if 'lookup' in other.capabilities:
1594 revs = [other.lookup(rev) for rev in revs]
1595 else:
1596 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1597 raise util.Abort(error)
1598 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1592 1599 if not incoming:
1593 1600 try:
1594 1601 os.unlink(opts["bundle"])
1595 1602 except:
1596 1603 pass
1597 1604 ui.status(_("no changes found\n"))
1598 1605 return 1
1599 1606
1600 1607 cleanup = None
1601 1608 try:
1602 1609 fname = opts["bundle"]
1603 1610 if fname or not other.local():
1604 1611 # create a bundle (uncompressed if other repo is not local)
1612 if revs is None:
1605 1613 cg = other.changegroup(incoming, "incoming")
1614 else:
1615 if 'changegroupsubset' not in other.capabilities:
1616 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1617 cg = other.changegroupsubset(incoming, revs, 'incoming')
1606 1618 bundletype = other.local() and "HG10BZ" or "HG10UN"
1607 1619 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1608 1620 # keep written bundle?
1609 1621 if opts["bundle"]:
1610 1622 cleanup = None
1611 1623 if not other.local():
1612 1624 # use the created uncompressed bundlerepo
1613 1625 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1614 1626
1615 revs = None
1616 if opts['rev']:
1617 revs = [other.lookup(rev) for rev in opts['rev']]
1618 1627 o = other.changelog.nodesbetween(incoming, revs)[0]
1619 1628 if opts['newest_first']:
1620 1629 o.reverse()
1621 1630 displayer = cmdutil.show_changeset(ui, other, opts)
1622 1631 for n in o:
1623 1632 parents = [p for p in other.changelog.parents(n) if p != nullid]
1624 1633 if opts['no_merges'] and len(parents) == 2:
1625 1634 continue
1626 1635 displayer.show(changenode=n)
1627 1636 finally:
1628 1637 if hasattr(other, 'close'):
1629 1638 other.close()
1630 1639 if cleanup:
1631 1640 os.unlink(cleanup)
1632 1641
1633 1642 def init(ui, dest=".", **opts):
1634 1643 """create a new repository in the given directory
1635 1644
1636 1645 Initialize a new repository in the given directory. If the given
1637 1646 directory does not exist, it is created.
1638 1647
1639 1648 If no directory is given, the current directory is used.
1640 1649
1641 1650 It is possible to specify an ssh:// URL as the destination.
1642 1651 Look at the help text for the pull command for important details
1643 1652 about ssh:// URLs.
1644 1653 """
1645 1654 setremoteconfig(ui, opts)
1646 1655 hg.repository(ui, dest, create=1)
1647 1656
1648 1657 def locate(ui, repo, *pats, **opts):
1649 1658 """locate files matching specific patterns
1650 1659
1651 1660 Print all files under Mercurial control whose names match the
1652 1661 given patterns.
1653 1662
1654 1663 This command searches the entire repository by default. To search
1655 1664 just the current directory and its subdirectories, use "--include .".
1656 1665
1657 1666 If no patterns are given to match, this command prints all file
1658 1667 names.
1659 1668
1660 1669 If you want to feed the output of this command into the "xargs"
1661 1670 command, use the "-0" option to both this command and "xargs".
1662 1671 This will avoid the problem of "xargs" treating single filenames
1663 1672 that contain white space as multiple filenames.
1664 1673 """
1665 1674 end = opts['print0'] and '\0' or '\n'
1666 1675 rev = opts['rev']
1667 1676 if rev:
1668 1677 node = repo.lookup(rev)
1669 1678 else:
1670 1679 node = None
1671 1680
1672 1681 ret = 1
1673 1682 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1674 1683 badmatch=util.always,
1675 1684 default='relglob'):
1676 1685 if src == 'b':
1677 1686 continue
1678 1687 if not node and repo.dirstate.state(abs) == '?':
1679 1688 continue
1680 1689 if opts['fullpath']:
1681 1690 ui.write(os.path.join(repo.root, abs), end)
1682 1691 else:
1683 1692 ui.write(((pats and rel) or abs), end)
1684 1693 ret = 0
1685 1694
1686 1695 return ret
1687 1696
1688 1697 def log(ui, repo, *pats, **opts):
1689 1698 """show revision history of entire repository or files
1690 1699
1691 1700 Print the revision history of the specified files or the entire
1692 1701 project.
1693 1702
1694 1703 File history is shown without following rename or copy history of
1695 1704 files. Use -f/--follow with a file name to follow history across
1696 1705 renames and copies. --follow without a file name will only show
1697 1706 ancestors or descendants of the starting revision. --follow-first
1698 1707 only follows the first parent of merge revisions.
1699 1708
1700 1709 If no revision range is specified, the default is tip:0 unless
1701 1710 --follow is set, in which case the working directory parent is
1702 1711 used as the starting revision.
1703 1712
1704 1713 By default this command outputs: changeset id and hash, tags,
1705 1714 non-trivial parents, user, date and time, and a summary for each
1706 1715 commit. When the -v/--verbose switch is used, the list of changed
1707 1716 files and full commit message is shown.
1708 1717
1709 1718 NOTE: log -p may generate unexpected diff output for merge
1710 1719 changesets, as it will compare the merge changeset against its
1711 1720 first parent only. Also, the files: list will only reflect files
1712 1721 that are different from BOTH parents.
1713 1722
1714 1723 """
1715 1724
1716 1725 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1717 1726 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1718 1727
1719 1728 if opts['limit']:
1720 1729 try:
1721 1730 limit = int(opts['limit'])
1722 1731 except ValueError:
1723 1732 raise util.Abort(_('limit must be a positive integer'))
1724 1733 if limit <= 0: raise util.Abort(_('limit must be positive'))
1725 1734 else:
1726 1735 limit = sys.maxint
1727 1736 count = 0
1728 1737
1729 1738 if opts['copies'] and opts['rev']:
1730 1739 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1731 1740 else:
1732 1741 endrev = repo.changelog.count()
1733 1742 rcache = {}
1734 1743 ncache = {}
1735 1744 dcache = []
1736 1745 def getrenamed(fn, rev, man):
1737 1746 '''looks up all renames for a file (up to endrev) the first
1738 1747 time the file is given. It indexes on the changerev and only
1739 1748 parses the manifest if linkrev != changerev.
1740 1749 Returns rename info for fn at changerev rev.'''
1741 1750 if fn not in rcache:
1742 1751 rcache[fn] = {}
1743 1752 ncache[fn] = {}
1744 1753 fl = repo.file(fn)
1745 1754 for i in xrange(fl.count()):
1746 1755 node = fl.node(i)
1747 1756 lr = fl.linkrev(node)
1748 1757 renamed = fl.renamed(node)
1749 1758 rcache[fn][lr] = renamed
1750 1759 if renamed:
1751 1760 ncache[fn][node] = renamed
1752 1761 if lr >= endrev:
1753 1762 break
1754 1763 if rev in rcache[fn]:
1755 1764 return rcache[fn][rev]
1756 1765 mr = repo.manifest.rev(man)
1757 1766 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1758 1767 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1759 1768 if not dcache or dcache[0] != man:
1760 1769 dcache[:] = [man, repo.manifest.readdelta(man)]
1761 1770 if fn in dcache[1]:
1762 1771 return ncache[fn].get(dcache[1][fn])
1763 1772 return None
1764 1773
1765 1774 df = False
1766 1775 if opts["date"]:
1767 1776 df = util.matchdate(opts["date"])
1768 1777
1769 1778 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1770 1779 for st, rev, fns in changeiter:
1771 1780 if st == 'add':
1772 1781 changenode = repo.changelog.node(rev)
1773 1782 parents = [p for p in repo.changelog.parentrevs(rev)
1774 1783 if p != nullrev]
1775 1784 if opts['no_merges'] and len(parents) == 2:
1776 1785 continue
1777 1786 if opts['only_merges'] and len(parents) != 2:
1778 1787 continue
1779 1788
1780 1789 if df:
1781 1790 changes = get(rev)
1782 1791 if not df(changes[2][0]):
1783 1792 continue
1784 1793
1785 1794 if opts['keyword']:
1786 1795 changes = get(rev)
1787 1796 miss = 0
1788 1797 for k in [kw.lower() for kw in opts['keyword']]:
1789 1798 if not (k in changes[1].lower() or
1790 1799 k in changes[4].lower() or
1791 1800 k in " ".join(changes[3]).lower()):
1792 1801 miss = 1
1793 1802 break
1794 1803 if miss:
1795 1804 continue
1796 1805
1797 1806 copies = []
1798 1807 if opts.get('copies') and rev:
1799 1808 mf = get(rev)[0]
1800 1809 for fn in get(rev)[3]:
1801 1810 rename = getrenamed(fn, rev, mf)
1802 1811 if rename:
1803 1812 copies.append((fn, rename[0]))
1804 1813 displayer.show(rev, changenode, copies=copies)
1805 1814 elif st == 'iter':
1806 1815 if count == limit: break
1807 1816 if displayer.flush(rev):
1808 1817 count += 1
1809 1818
1810 1819 def manifest(ui, repo, rev=None):
1811 1820 """output the current or given revision of the project manifest
1812 1821
1813 1822 Print a list of version controlled files for the given revision.
1814 1823 If no revision is given, the parent of the working directory is used,
1815 1824 or tip if no revision is checked out.
1816 1825
1817 1826 The manifest is the list of files being version controlled. If no revision
1818 1827 is given then the first parent of the working directory is used.
1819 1828
1820 1829 With -v flag, print file permissions. With --debug flag, print
1821 1830 file revision hashes.
1822 1831 """
1823 1832
1824 1833 m = repo.changectx(rev).manifest()
1825 1834 files = m.keys()
1826 1835 files.sort()
1827 1836
1828 1837 for f in files:
1829 1838 if ui.debugflag:
1830 1839 ui.write("%40s " % hex(m[f]))
1831 1840 if ui.verbose:
1832 1841 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1833 1842 ui.write("%s\n" % f)
1834 1843
1835 1844 def merge(ui, repo, node=None, force=None, rev=None):
1836 1845 """merge working directory with another revision
1837 1846
1838 1847 Merge the contents of the current working directory and the
1839 1848 requested revision. Files that changed between either parent are
1840 1849 marked as changed for the next commit and a commit must be
1841 1850 performed before any further updates are allowed.
1842 1851
1843 1852 If no revision is specified, the working directory's parent is a
1844 1853 head revision, and the repository contains exactly one other head,
1845 1854 the other head is merged with by default. Otherwise, an explicit
1846 1855 revision to merge with must be provided.
1847 1856 """
1848 1857
1849 1858 if rev and node:
1850 1859 raise util.Abort(_("please specify just one revision"))
1851 1860
1852 1861 if not node:
1853 1862 node = rev
1854 1863
1855 1864 if not node:
1856 1865 heads = repo.heads()
1857 1866 if len(heads) > 2:
1858 1867 raise util.Abort(_('repo has %d heads - '
1859 1868 'please merge with an explicit rev') %
1860 1869 len(heads))
1861 1870 if len(heads) == 1:
1862 1871 raise util.Abort(_('there is nothing to merge - '
1863 1872 'use "hg update" instead'))
1864 1873 parent = repo.dirstate.parents()[0]
1865 1874 if parent not in heads:
1866 1875 raise util.Abort(_('working dir not at a head rev - '
1867 1876 'use "hg update" or merge with an explicit rev'))
1868 1877 node = parent == heads[0] and heads[-1] or heads[0]
1869 1878 return hg.merge(repo, node, force=force)
1870 1879
1871 1880 def outgoing(ui, repo, dest=None, **opts):
1872 1881 """show changesets not found in destination
1873 1882
1874 1883 Show changesets not found in the specified destination repository or
1875 1884 the default push location. These are the changesets that would be pushed
1876 1885 if a push was requested.
1877 1886
1878 1887 See pull for valid destination format details.
1879 1888 """
1880 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1889 dest, revs = cmdutil.parseurl(
1890 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1881 1891 setremoteconfig(ui, opts)
1882 revs = None
1883 if opts['rev']:
1884 revs = [repo.lookup(rev) for rev in opts['rev']]
1892 if revs:
1893 revs = [repo.lookup(rev) for rev in revs]
1885 1894
1886 1895 other = hg.repository(ui, dest)
1887 1896 ui.status(_('comparing with %s\n') % dest)
1888 1897 o = repo.findoutgoing(other, force=opts['force'])
1889 1898 if not o:
1890 1899 ui.status(_("no changes found\n"))
1891 1900 return 1
1892 1901 o = repo.changelog.nodesbetween(o, revs)[0]
1893 1902 if opts['newest_first']:
1894 1903 o.reverse()
1895 1904 displayer = cmdutil.show_changeset(ui, repo, opts)
1896 1905 for n in o:
1897 1906 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1898 1907 if opts['no_merges'] and len(parents) == 2:
1899 1908 continue
1900 1909 displayer.show(changenode=n)
1901 1910
1902 1911 def parents(ui, repo, file_=None, **opts):
1903 1912 """show the parents of the working dir or revision
1904 1913
1905 1914 Print the working directory's parent revisions.
1906 1915 """
1907 1916 rev = opts.get('rev')
1908 1917 if rev:
1909 1918 if file_:
1910 1919 ctx = repo.filectx(file_, changeid=rev)
1911 1920 else:
1912 1921 ctx = repo.changectx(rev)
1913 1922 p = [cp.node() for cp in ctx.parents()]
1914 1923 else:
1915 1924 p = repo.dirstate.parents()
1916 1925
1917 1926 displayer = cmdutil.show_changeset(ui, repo, opts)
1918 1927 for n in p:
1919 1928 if n != nullid:
1920 1929 displayer.show(changenode=n)
1921 1930
1922 1931 def paths(ui, repo, search=None):
1923 1932 """show definition of symbolic path names
1924 1933
1925 1934 Show definition of symbolic path name NAME. If no name is given, show
1926 1935 definition of available names.
1927 1936
1928 1937 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1929 1938 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1930 1939 """
1931 1940 if search:
1932 1941 for name, path in ui.configitems("paths"):
1933 1942 if name == search:
1934 1943 ui.write("%s\n" % path)
1935 1944 return
1936 1945 ui.warn(_("not found!\n"))
1937 1946 return 1
1938 1947 else:
1939 1948 for name, path in ui.configitems("paths"):
1940 1949 ui.write("%s = %s\n" % (name, path))
1941 1950
1942 1951 def postincoming(ui, repo, modheads, optupdate):
1943 1952 if modheads == 0:
1944 1953 return
1945 1954 if optupdate:
1946 1955 if modheads == 1:
1947 1956 return hg.update(repo, repo.changelog.tip()) # update
1948 1957 else:
1949 1958 ui.status(_("not updating, since new heads added\n"))
1950 1959 if modheads > 1:
1951 1960 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1952 1961 else:
1953 1962 ui.status(_("(run 'hg update' to get a working copy)\n"))
1954 1963
1955 1964 def pull(ui, repo, source="default", **opts):
1956 1965 """pull changes from the specified source
1957 1966
1958 1967 Pull changes from a remote repository to a local one.
1959 1968
1960 1969 This finds all changes from the repository at the specified path
1961 1970 or URL and adds them to the local repository. By default, this
1962 1971 does not update the copy of the project in the working directory.
1963 1972
1964 1973 Valid URLs are of the form:
1965 1974
1966 1975 local/filesystem/path (or file://local/filesystem/path)
1967 1976 http://[user@]host[:port]/[path]
1968 1977 https://[user@]host[:port]/[path]
1969 1978 ssh://[user@]host[:port]/[path]
1970 1979 static-http://host[:port]/[path]
1971 1980
1972 1981 Paths in the local filesystem can either point to Mercurial
1973 1982 repositories or to bundle files (as created by 'hg bundle' or
1974 1983 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1975 1984 allows access to a Mercurial repository where you simply use a web
1976 1985 server to publish the .hg directory as static content.
1977 1986
1987 An optional identifier after # indicates a particular branch, tag,
1988 or changeset to pull.
1989
1978 1990 Some notes about using SSH with Mercurial:
1979 1991 - SSH requires an accessible shell account on the destination machine
1980 1992 and a copy of hg in the remote path or specified with as remotecmd.
1981 1993 - path is relative to the remote user's home directory by default.
1982 1994 Use an extra slash at the start of a path to specify an absolute path:
1983 1995 ssh://example.com//tmp/repository
1984 1996 - Mercurial doesn't use its own compression via SSH; the right thing
1985 1997 to do is to configure it in your ~/.ssh/config, e.g.:
1986 1998 Host *.mylocalnetwork.example.com
1987 1999 Compression no
1988 2000 Host *
1989 2001 Compression yes
1990 2002 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1991 2003 with the --ssh command line option.
1992 2004 """
1993 source = ui.expandpath(source)
2005 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1994 2006 setremoteconfig(ui, opts)
1995 2007
1996 2008 other = hg.repository(ui, source)
1997 2009 ui.status(_('pulling from %s\n') % (source))
1998 revs = None
1999 if opts['rev']:
2010 if revs:
2000 2011 if 'lookup' in other.capabilities:
2001 revs = [other.lookup(rev) for rev in opts['rev']]
2012 revs = [other.lookup(rev) for rev in revs]
2002 2013 else:
2003 2014 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2004 2015 raise util.Abort(error)
2016
2005 2017 modheads = repo.pull(other, heads=revs, force=opts['force'])
2006 2018 return postincoming(ui, repo, modheads, opts['update'])
2007 2019
2008 2020 def push(ui, repo, dest=None, **opts):
2009 2021 """push changes to the specified destination
2010 2022
2011 2023 Push changes from the local repository to the given destination.
2012 2024
2013 2025 This is the symmetrical operation for pull. It helps to move
2014 2026 changes from the current repository to a different one. If the
2015 2027 destination is local this is identical to a pull in that directory
2016 2028 from the current one.
2017 2029
2018 2030 By default, push will refuse to run if it detects the result would
2019 2031 increase the number of remote heads. This generally indicates the
2020 2032 the client has forgotten to sync and merge before pushing.
2021 2033
2022 2034 Valid URLs are of the form:
2023 2035
2024 2036 local/filesystem/path (or file://local/filesystem/path)
2025 2037 ssh://[user@]host[:port]/[path]
2026 2038 http://[user@]host[:port]/[path]
2027 2039 https://[user@]host[:port]/[path]
2028 2040
2041 An optional identifier after # indicates a particular branch, tag,
2042 or changeset to push.
2043
2029 2044 Look at the help text for the pull command for important details
2030 2045 about ssh:// URLs.
2031 2046
2032 2047 Pushing to http:// and https:// URLs is only possible, if this
2033 2048 feature is explicitly enabled on the remote Mercurial server.
2034 2049 """
2035 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2050 dest, revs = cmdutil.parseurl(
2051 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2036 2052 setremoteconfig(ui, opts)
2037 2053
2038 2054 other = hg.repository(ui, dest)
2039 2055 ui.status('pushing to %s\n' % (dest))
2040 revs = None
2041 if opts['rev']:
2042 revs = [repo.lookup(rev) for rev in opts['rev']]
2056 if revs:
2057 revs = [repo.lookup(rev) for rev in revs]
2043 2058 r = repo.push(other, opts['force'], revs=revs)
2044 2059 return r == 0
2045 2060
2046 2061 def rawcommit(ui, repo, *pats, **opts):
2047 2062 """raw commit interface (DEPRECATED)
2048 2063
2049 2064 (DEPRECATED)
2050 2065 Lowlevel commit, for use in helper scripts.
2051 2066
2052 2067 This command is not intended to be used by normal users, as it is
2053 2068 primarily useful for importing from other SCMs.
2054 2069
2055 2070 This command is now deprecated and will be removed in a future
2056 2071 release, please use debugsetparents and commit instead.
2057 2072 """
2058 2073
2059 2074 ui.warn(_("(the rawcommit command is deprecated)\n"))
2060 2075
2061 2076 message = logmessage(opts)
2062 2077
2063 2078 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2064 2079 if opts['files']:
2065 2080 files += open(opts['files']).read().splitlines()
2066 2081
2067 2082 parents = [repo.lookup(p) for p in opts['parent']]
2068 2083
2069 2084 try:
2070 2085 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2071 2086 except ValueError, inst:
2072 2087 raise util.Abort(str(inst))
2073 2088
2074 2089 def recover(ui, repo):
2075 2090 """roll back an interrupted transaction
2076 2091
2077 2092 Recover from an interrupted commit or pull.
2078 2093
2079 2094 This command tries to fix the repository status after an interrupted
2080 2095 operation. It should only be necessary when Mercurial suggests it.
2081 2096 """
2082 2097 if repo.recover():
2083 2098 return hg.verify(repo)
2084 2099 return 1
2085 2100
2086 2101 def remove(ui, repo, *pats, **opts):
2087 2102 """remove the specified files on the next commit
2088 2103
2089 2104 Schedule the indicated files for removal from the repository.
2090 2105
2091 2106 This only removes files from the current branch, not from the
2092 2107 entire project history. If the files still exist in the working
2093 2108 directory, they will be deleted from it. If invoked with --after,
2094 2109 files are marked as removed, but not actually unlinked unless --force
2095 2110 is also given. Without exact file names, --after will only mark
2096 2111 files as removed if they are no longer in the working directory.
2097 2112
2098 2113 This command schedules the files to be removed at the next commit.
2099 2114 To undo a remove before that, see hg revert.
2100 2115
2101 2116 Modified files and added files are not removed by default. To
2102 2117 remove them, use the -f/--force option.
2103 2118 """
2104 2119 names = []
2105 2120 if not opts['after'] and not pats:
2106 2121 raise util.Abort(_('no files specified'))
2107 2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2108 2123 exact = dict.fromkeys(files)
2109 2124 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2110 2125 modified, added, removed, deleted, unknown = mardu
2111 2126 remove, forget = [], []
2112 2127 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2113 2128 reason = None
2114 2129 if abs in modified and not opts['force']:
2115 2130 reason = _('is modified (use -f to force removal)')
2116 2131 elif abs in added:
2117 2132 if opts['force']:
2118 2133 forget.append(abs)
2119 2134 continue
2120 2135 reason = _('has been marked for add (use -f to force removal)')
2121 2136 elif abs in unknown:
2122 2137 reason = _('is not managed')
2123 2138 elif opts['after'] and not exact and abs not in deleted:
2124 2139 continue
2125 2140 elif abs in removed:
2126 2141 continue
2127 2142 if reason:
2128 2143 if exact:
2129 2144 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2130 2145 else:
2131 2146 if ui.verbose or not exact:
2132 2147 ui.status(_('removing %s\n') % rel)
2133 2148 remove.append(abs)
2134 2149 repo.forget(forget)
2135 2150 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2136 2151
2137 2152 def rename(ui, repo, *pats, **opts):
2138 2153 """rename files; equivalent of copy + remove
2139 2154
2140 2155 Mark dest as copies of sources; mark sources for deletion. If
2141 2156 dest is a directory, copies are put in that directory. If dest is
2142 2157 a file, there can only be one source.
2143 2158
2144 2159 By default, this command copies the contents of files as they
2145 2160 stand in the working directory. If invoked with --after, the
2146 2161 operation is recorded, but no copying is performed.
2147 2162
2148 2163 This command takes effect in the next commit. To undo a rename
2149 2164 before that, see hg revert.
2150 2165 """
2151 2166 wlock = repo.wlock(0)
2152 2167 errs, copied = docopy(ui, repo, pats, opts, wlock)
2153 2168 names = []
2154 2169 for abs, rel, exact in copied:
2155 2170 if ui.verbose or not exact:
2156 2171 ui.status(_('removing %s\n') % rel)
2157 2172 names.append(abs)
2158 2173 if not opts.get('dry_run'):
2159 2174 repo.remove(names, True, wlock=wlock)
2160 2175 return errs
2161 2176
2162 2177 def revert(ui, repo, *pats, **opts):
2163 2178 """revert files or dirs to their states as of some revision
2164 2179
2165 2180 With no revision specified, revert the named files or directories
2166 2181 to the contents they had in the parent of the working directory.
2167 2182 This restores the contents of the affected files to an unmodified
2168 2183 state and unschedules adds, removes, copies, and renames. If the
2169 2184 working directory has two parents, you must explicitly specify the
2170 2185 revision to revert to.
2171 2186
2172 2187 Modified files are saved with a .orig suffix before reverting.
2173 2188 To disable these backups, use --no-backup.
2174 2189
2175 2190 Using the -r option, revert the given files or directories to their
2176 2191 contents as of a specific revision. This can be helpful to "roll
2177 2192 back" some or all of a change that should not have been committed.
2178 2193
2179 2194 Revert modifies the working directory. It does not commit any
2180 2195 changes, or change the parent of the working directory. If you
2181 2196 revert to a revision other than the parent of the working
2182 2197 directory, the reverted files will thus appear modified
2183 2198 afterwards.
2184 2199
2185 2200 If a file has been deleted, it is recreated. If the executable
2186 2201 mode of a file was changed, it is reset.
2187 2202
2188 2203 If names are given, all files matching the names are reverted.
2189 2204
2190 2205 If no arguments are given, no files are reverted.
2191 2206 """
2192 2207
2193 2208 if opts["date"]:
2194 2209 if opts["rev"]:
2195 2210 raise util.Abort(_("you can't specify a revision and a date"))
2196 2211 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2197 2212
2198 2213 if not pats and not opts['all']:
2199 2214 raise util.Abort(_('no files or directories specified; '
2200 2215 'use --all to revert the whole repo'))
2201 2216
2202 2217 parent, p2 = repo.dirstate.parents()
2203 2218 if not opts['rev'] and p2 != nullid:
2204 2219 raise util.Abort(_('uncommitted merge - please provide a '
2205 2220 'specific revision'))
2206 2221 ctx = repo.changectx(opts['rev'])
2207 2222 node = ctx.node()
2208 2223 mf = ctx.manifest()
2209 2224 if node == parent:
2210 2225 pmf = mf
2211 2226 else:
2212 2227 pmf = None
2213 2228
2214 2229 wlock = repo.wlock()
2215 2230
2216 2231 # need all matching names in dirstate and manifest of target rev,
2217 2232 # so have to walk both. do not print errors if files exist in one
2218 2233 # but not other.
2219 2234
2220 2235 names = {}
2221 2236 target_only = {}
2222 2237
2223 2238 # walk dirstate.
2224 2239
2225 2240 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2226 2241 badmatch=mf.has_key):
2227 2242 names[abs] = (rel, exact)
2228 2243 if src == 'b':
2229 2244 target_only[abs] = True
2230 2245
2231 2246 # walk target manifest.
2232 2247
2233 2248 def badmatch(path):
2234 2249 if path in names:
2235 2250 return True
2236 2251 path_ = path + '/'
2237 2252 for f in names:
2238 2253 if f.startswith(path_):
2239 2254 return True
2240 2255 return False
2241 2256
2242 2257 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2243 2258 badmatch=badmatch):
2244 2259 if abs in names or src == 'b':
2245 2260 continue
2246 2261 names[abs] = (rel, exact)
2247 2262 target_only[abs] = True
2248 2263
2249 2264 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2250 2265 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2251 2266
2252 2267 revert = ([], _('reverting %s\n'))
2253 2268 add = ([], _('adding %s\n'))
2254 2269 remove = ([], _('removing %s\n'))
2255 2270 forget = ([], _('forgetting %s\n'))
2256 2271 undelete = ([], _('undeleting %s\n'))
2257 2272 update = {}
2258 2273
2259 2274 disptable = (
2260 2275 # dispatch table:
2261 2276 # file state
2262 2277 # action if in target manifest
2263 2278 # action if not in target manifest
2264 2279 # make backup if in target manifest
2265 2280 # make backup if not in target manifest
2266 2281 (modified, revert, remove, True, True),
2267 2282 (added, revert, forget, True, False),
2268 2283 (removed, undelete, None, False, False),
2269 2284 (deleted, revert, remove, False, False),
2270 2285 (unknown, add, None, True, False),
2271 2286 (target_only, add, None, False, False),
2272 2287 )
2273 2288
2274 2289 entries = names.items()
2275 2290 entries.sort()
2276 2291
2277 2292 for abs, (rel, exact) in entries:
2278 2293 mfentry = mf.get(abs)
2279 2294 def handle(xlist, dobackup):
2280 2295 xlist[0].append(abs)
2281 2296 update[abs] = 1
2282 2297 if (dobackup and not opts['no_backup'] and
2283 2298 (os.path.islink(rel) or os.path.exists(rel))):
2284 2299 bakname = "%s.orig" % rel
2285 2300 ui.note(_('saving current version of %s as %s\n') %
2286 2301 (rel, bakname))
2287 2302 if not opts.get('dry_run'):
2288 2303 util.copyfile(rel, bakname)
2289 2304 if ui.verbose or not exact:
2290 2305 ui.status(xlist[1] % rel)
2291 2306 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2292 2307 if abs not in table: continue
2293 2308 # file has changed in dirstate
2294 2309 if mfentry:
2295 2310 handle(hitlist, backuphit)
2296 2311 elif misslist is not None:
2297 2312 handle(misslist, backupmiss)
2298 2313 else:
2299 2314 if exact: ui.warn(_('file not managed: %s\n') % rel)
2300 2315 break
2301 2316 else:
2302 2317 # file has not changed in dirstate
2303 2318 if node == parent:
2304 2319 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2305 2320 continue
2306 2321 if pmf is None:
2307 2322 # only need parent manifest in this unlikely case,
2308 2323 # so do not read by default
2309 2324 pmf = repo.changectx(parent).manifest()
2310 2325 if abs in pmf:
2311 2326 if mfentry:
2312 2327 # if version of file is same in parent and target
2313 2328 # manifests, do nothing
2314 2329 if pmf[abs] != mfentry:
2315 2330 handle(revert, False)
2316 2331 else:
2317 2332 handle(remove, False)
2318 2333
2319 2334 if not opts.get('dry_run'):
2320 2335 repo.dirstate.forget(forget[0])
2321 2336 r = hg.revert(repo, node, update.has_key, wlock)
2322 2337 repo.dirstate.update(add[0], 'a')
2323 2338 repo.dirstate.update(undelete[0], 'n')
2324 2339 repo.dirstate.update(remove[0], 'r')
2325 2340 return r
2326 2341
2327 2342 def rollback(ui, repo):
2328 2343 """roll back the last transaction in this repository
2329 2344
2330 2345 Roll back the last transaction in this repository, restoring the
2331 2346 project to its state prior to the transaction.
2332 2347
2333 2348 Transactions are used to encapsulate the effects of all commands
2334 2349 that create new changesets or propagate existing changesets into a
2335 2350 repository. For example, the following commands are transactional,
2336 2351 and their effects can be rolled back:
2337 2352
2338 2353 commit
2339 2354 import
2340 2355 pull
2341 2356 push (with this repository as destination)
2342 2357 unbundle
2343 2358
2344 2359 This command should be used with care. There is only one level of
2345 2360 rollback, and there is no way to undo a rollback.
2346 2361
2347 2362 This command is not intended for use on public repositories. Once
2348 2363 changes are visible for pull by other users, rolling a transaction
2349 2364 back locally is ineffective (someone else may already have pulled
2350 2365 the changes). Furthermore, a race is possible with readers of the
2351 2366 repository; for example an in-progress pull from the repository
2352 2367 may fail if a rollback is performed.
2353 2368 """
2354 2369 repo.rollback()
2355 2370
2356 2371 def root(ui, repo):
2357 2372 """print the root (top) of the current working dir
2358 2373
2359 2374 Print the root directory of the current repository.
2360 2375 """
2361 2376 ui.write(repo.root + "\n")
2362 2377
2363 2378 def serve(ui, repo, **opts):
2364 2379 """export the repository via HTTP
2365 2380
2366 2381 Start a local HTTP repository browser and pull server.
2367 2382
2368 2383 By default, the server logs accesses to stdout and errors to
2369 2384 stderr. Use the "-A" and "-E" options to log to files.
2370 2385 """
2371 2386
2372 2387 if opts["stdio"]:
2373 2388 if repo is None:
2374 2389 raise hg.RepoError(_("There is no Mercurial repository here"
2375 2390 " (.hg not found)"))
2376 2391 s = sshserver.sshserver(ui, repo)
2377 2392 s.serve_forever()
2378 2393
2379 2394 parentui = ui.parentui or ui
2380 2395 optlist = ("name templates style address port ipv6"
2381 2396 " accesslog errorlog webdir_conf")
2382 2397 for o in optlist.split():
2383 2398 if opts[o]:
2384 2399 parentui.setconfig("web", o, str(opts[o]))
2385 2400
2386 2401 if repo is None and not ui.config("web", "webdir_conf"):
2387 2402 raise hg.RepoError(_("There is no Mercurial repository here"
2388 2403 " (.hg not found)"))
2389 2404
2390 2405 class service:
2391 2406 def init(self):
2392 2407 try:
2393 2408 self.httpd = hgweb.server.create_server(parentui, repo)
2394 2409 except socket.error, inst:
2395 2410 raise util.Abort(_('cannot start server: ') + inst.args[1])
2396 2411
2397 2412 if not ui.verbose: return
2398 2413
2399 2414 if httpd.port != 80:
2400 2415 ui.status(_('listening at http://%s:%d/\n') %
2401 2416 (httpd.addr, httpd.port))
2402 2417 else:
2403 2418 ui.status(_('listening at http://%s/\n') % httpd.addr)
2404 2419
2405 2420 def run(self):
2406 2421 self.httpd.serve_forever()
2407 2422
2408 2423 service = service()
2409 2424
2410 2425 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2411 2426
2412 2427 def status(ui, repo, *pats, **opts):
2413 2428 """show changed files in the working directory
2414 2429
2415 2430 Show status of files in the repository. If names are given, only
2416 2431 files that match are shown. Files that are clean or ignored, are
2417 2432 not listed unless -c (clean), -i (ignored) or -A is given.
2418 2433
2419 2434 NOTE: status may appear to disagree with diff if permissions have
2420 2435 changed or a merge has occurred. The standard diff format does not
2421 2436 report permission changes and diff only reports changes relative
2422 2437 to one merge parent.
2423 2438
2424 2439 If one revision is given, it is used as the base revision.
2425 2440 If two revisions are given, the difference between them is shown.
2426 2441
2427 2442 The codes used to show the status of files are:
2428 2443 M = modified
2429 2444 A = added
2430 2445 R = removed
2431 2446 C = clean
2432 2447 ! = deleted, but still tracked
2433 2448 ? = not tracked
2434 2449 I = ignored (not shown by default)
2435 2450 = the previous added file was copied from here
2436 2451 """
2437 2452
2438 2453 all = opts['all']
2439 2454 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2440 2455
2441 2456 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2442 2457 cwd = (pats and repo.getcwd()) or ''
2443 2458 modified, added, removed, deleted, unknown, ignored, clean = [
2444 2459 n for n in repo.status(node1=node1, node2=node2, files=files,
2445 2460 match=matchfn,
2446 2461 list_ignored=all or opts['ignored'],
2447 2462 list_clean=all or opts['clean'])]
2448 2463
2449 2464 changetypes = (('modified', 'M', modified),
2450 2465 ('added', 'A', added),
2451 2466 ('removed', 'R', removed),
2452 2467 ('deleted', '!', deleted),
2453 2468 ('unknown', '?', unknown),
2454 2469 ('ignored', 'I', ignored))
2455 2470
2456 2471 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2457 2472
2458 2473 end = opts['print0'] and '\0' or '\n'
2459 2474
2460 2475 for opt, char, changes in ([ct for ct in explicit_changetypes
2461 2476 if all or opts[ct[0]]]
2462 2477 or changetypes):
2463 2478 if opts['no_status']:
2464 2479 format = "%%s%s" % end
2465 2480 else:
2466 2481 format = "%s %%s%s" % (char, end)
2467 2482
2468 2483 for f in changes:
2469 2484 ui.write(format % util.pathto(repo.root, cwd, f))
2470 2485 if ((all or opts.get('copies')) and not opts.get('no_status')):
2471 2486 copied = repo.dirstate.copied(f)
2472 2487 if copied:
2473 2488 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2474 2489 end))
2475 2490
2476 2491 def tag(ui, repo, name, rev_=None, **opts):
2477 2492 """add a tag for the current or given revision
2478 2493
2479 2494 Name a particular revision using <name>.
2480 2495
2481 2496 Tags are used to name particular revisions of the repository and are
2482 2497 very useful to compare different revision, to go back to significant
2483 2498 earlier versions or to mark branch points as releases, etc.
2484 2499
2485 2500 If no revision is given, the parent of the working directory is used,
2486 2501 or tip if no revision is checked out.
2487 2502
2488 2503 To facilitate version control, distribution, and merging of tags,
2489 2504 they are stored as a file named ".hgtags" which is managed
2490 2505 similarly to other project files and can be hand-edited if
2491 2506 necessary. The file '.hg/localtags' is used for local tags (not
2492 2507 shared among repositories).
2493 2508 """
2494 2509 if name in ['tip', '.', 'null']:
2495 2510 raise util.Abort(_("the name '%s' is reserved") % name)
2496 2511 if rev_ is not None:
2497 2512 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2498 2513 "please use 'hg tag [-r REV] NAME' instead\n"))
2499 2514 if opts['rev']:
2500 2515 raise util.Abort(_("use only one form to specify the revision"))
2501 2516 if opts['rev'] and opts['remove']:
2502 2517 raise util.Abort(_("--rev and --remove are incompatible"))
2503 2518 if opts['rev']:
2504 2519 rev_ = opts['rev']
2505 2520 message = opts['message']
2506 2521 if opts['remove']:
2507 2522 rev_ = nullid
2508 2523 if not message:
2509 2524 message = _('Removed tag %s') % name
2510 2525 elif name in repo.tags() and not opts['force']:
2511 2526 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2512 2527 % name)
2513 2528 if not rev_ and repo.dirstate.parents()[1] != nullid:
2514 2529 raise util.Abort(_('uncommitted merge - please provide a '
2515 2530 'specific revision'))
2516 2531 r = repo.changectx(rev_).node()
2517 2532
2518 2533 if not message:
2519 2534 message = _('Added tag %s for changeset %s') % (name, short(r))
2520 2535
2521 2536 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2522 2537
2523 2538 def tags(ui, repo):
2524 2539 """list repository tags
2525 2540
2526 2541 List the repository tags.
2527 2542
2528 2543 This lists both regular and local tags.
2529 2544 """
2530 2545
2531 2546 l = repo.tagslist()
2532 2547 l.reverse()
2533 2548 hexfunc = ui.debugflag and hex or short
2534 2549 for t, n in l:
2535 2550 try:
2536 2551 hn = hexfunc(n)
2537 2552 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2538 2553 except revlog.LookupError:
2539 2554 r = " ?:%s" % hn
2540 2555 if ui.quiet:
2541 2556 ui.write("%s\n" % t)
2542 2557 else:
2543 2558 spaces = " " * (30 - util.locallen(t))
2544 2559 ui.write("%s%s %s\n" % (t, spaces, r))
2545 2560
2546 2561 def tip(ui, repo, **opts):
2547 2562 """show the tip revision
2548 2563
2549 2564 Show the tip revision.
2550 2565 """
2551 2566 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2552 2567
2553 2568 def unbundle(ui, repo, fname, **opts):
2554 2569 """apply a changegroup file
2555 2570
2556 2571 Apply a compressed changegroup file generated by the bundle
2557 2572 command.
2558 2573 """
2559 2574 if os.path.exists(fname):
2560 2575 f = open(fname, "rb")
2561 2576 else:
2562 2577 f = urllib.urlopen(fname)
2563 2578 gen = changegroup.readbundle(f, fname)
2564 2579 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2565 2580 return postincoming(ui, repo, modheads, opts['update'])
2566 2581
2567 2582 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2568 2583 """update working directory
2569 2584
2570 2585 Update the working directory to the specified revision, or the
2571 2586 tip of the current branch if none is specified.
2572 2587
2573 2588 If there are no outstanding changes in the working directory and
2574 2589 there is a linear relationship between the current version and the
2575 2590 requested version, the result is the requested version.
2576 2591
2577 2592 To merge the working directory with another revision, use the
2578 2593 merge command.
2579 2594
2580 2595 By default, update will refuse to run if doing so would require
2581 2596 discarding local changes.
2582 2597 """
2583 2598 if rev and node:
2584 2599 raise util.Abort(_("please specify just one revision"))
2585 2600
2586 2601 if not rev:
2587 2602 rev = node
2588 2603
2589 2604 if date:
2590 2605 if rev:
2591 2606 raise util.Abort(_("you can't specify a revision and a date"))
2592 2607 rev = cmdutil.finddate(ui, repo, date)
2593 2608
2594 2609 if clean:
2595 2610 return hg.clean(repo, rev)
2596 2611 else:
2597 2612 return hg.update(repo, rev)
2598 2613
2599 2614 def verify(ui, repo):
2600 2615 """verify the integrity of the repository
2601 2616
2602 2617 Verify the integrity of the current repository.
2603 2618
2604 2619 This will perform an extensive check of the repository's
2605 2620 integrity, validating the hashes and checksums of each entry in
2606 2621 the changelog, manifest, and tracked files, as well as the
2607 2622 integrity of their crosslinks and indices.
2608 2623 """
2609 2624 return hg.verify(repo)
2610 2625
2611 2626 def version_(ui):
2612 2627 """output version and copyright information"""
2613 2628 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2614 2629 % version.get_version())
2615 2630 ui.status(_(
2616 2631 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2617 2632 "This is free software; see the source for copying conditions. "
2618 2633 "There is NO\nwarranty; "
2619 2634 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2620 2635 ))
2621 2636
2622 2637 # Command options and aliases are listed here, alphabetically
2623 2638
2624 2639 globalopts = [
2625 2640 ('R', 'repository', '',
2626 2641 _('repository root directory or symbolic path name')),
2627 2642 ('', 'cwd', '', _('change working directory')),
2628 2643 ('y', 'noninteractive', None,
2629 2644 _('do not prompt, assume \'yes\' for any required answers')),
2630 2645 ('q', 'quiet', None, _('suppress output')),
2631 2646 ('v', 'verbose', None, _('enable additional output')),
2632 2647 ('', 'config', [], _('set/override config option')),
2633 2648 ('', 'debug', None, _('enable debugging output')),
2634 2649 ('', 'debugger', None, _('start debugger')),
2635 2650 ('', 'encoding', util._encoding, _('set the charset encoding')),
2636 2651 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2637 2652 ('', 'lsprof', None, _('print improved command execution profile')),
2638 2653 ('', 'traceback', None, _('print traceback on exception')),
2639 2654 ('', 'time', None, _('time how long the command takes')),
2640 2655 ('', 'profile', None, _('print command execution profile')),
2641 2656 ('', 'version', None, _('output version information and exit')),
2642 2657 ('h', 'help', None, _('display help and exit')),
2643 2658 ]
2644 2659
2645 2660 dryrunopts = [('n', 'dry-run', None,
2646 2661 _('do not perform actions, just print output'))]
2647 2662
2648 2663 remoteopts = [
2649 2664 ('e', 'ssh', '', _('specify ssh command to use')),
2650 2665 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2651 2666 ]
2652 2667
2653 2668 walkopts = [
2654 2669 ('I', 'include', [], _('include names matching the given patterns')),
2655 2670 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2656 2671 ]
2657 2672
2658 2673 commitopts = [
2659 2674 ('m', 'message', '', _('use <text> as commit message')),
2660 2675 ('l', 'logfile', '', _('read commit message from <file>')),
2661 2676 ]
2662 2677
2663 2678 table = {
2664 2679 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2665 2680 "addremove":
2666 2681 (addremove,
2667 2682 [('s', 'similarity', '',
2668 2683 _('guess renamed files by similarity (0<=s<=100)')),
2669 2684 ] + walkopts + dryrunopts,
2670 2685 _('hg addremove [OPTION]... [FILE]...')),
2671 2686 "^annotate":
2672 2687 (annotate,
2673 2688 [('r', 'rev', '', _('annotate the specified revision')),
2674 2689 ('f', 'follow', None, _('follow file copies and renames')),
2675 2690 ('a', 'text', None, _('treat all files as text')),
2676 2691 ('u', 'user', None, _('list the author')),
2677 2692 ('d', 'date', None, _('list the date')),
2678 2693 ('n', 'number', None, _('list the revision number (default)')),
2679 2694 ('c', 'changeset', None, _('list the changeset')),
2680 2695 ] + walkopts,
2681 2696 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2682 2697 "archive":
2683 2698 (archive,
2684 2699 [('', 'no-decode', None, _('do not pass files through decoders')),
2685 2700 ('p', 'prefix', '', _('directory prefix for files in archive')),
2686 2701 ('r', 'rev', '', _('revision to distribute')),
2687 2702 ('t', 'type', '', _('type of distribution to create')),
2688 2703 ] + walkopts,
2689 2704 _('hg archive [OPTION]... DEST')),
2690 2705 "backout":
2691 2706 (backout,
2692 2707 [('', 'merge', None,
2693 2708 _('merge with old dirstate parent after backout')),
2694 2709 ('d', 'date', '', _('record datecode as commit date')),
2695 2710 ('', 'parent', '', _('parent to choose when backing out merge')),
2696 2711 ('u', 'user', '', _('record user as committer')),
2697 2712 ('r', 'rev', '', _('revision to backout')),
2698 2713 ] + walkopts + commitopts,
2699 2714 _('hg backout [OPTION]... [-r] REV')),
2700 2715 "branch": (branch,
2701 2716 [('f', 'force', None,
2702 2717 _('set branch name even if it shadows an existing branch'))],
2703 2718 _('hg branch [NAME]')),
2704 2719 "branches": (branches, [], _('hg branches')),
2705 2720 "bundle":
2706 2721 (bundle,
2707 2722 [('f', 'force', None,
2708 2723 _('run even when remote repository is unrelated')),
2709 2724 ('r', 'rev', [],
2710 2725 _('a changeset you would like to bundle')),
2711 2726 ('', 'base', [],
2712 2727 _('a base changeset to specify instead of a destination')),
2713 2728 ] + remoteopts,
2714 2729 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2715 2730 "cat":
2716 2731 (cat,
2717 2732 [('o', 'output', '', _('print output to file with formatted name')),
2718 2733 ('r', 'rev', '', _('print the given revision')),
2719 2734 ] + walkopts,
2720 2735 _('hg cat [OPTION]... FILE...')),
2721 2736 "^clone":
2722 2737 (clone,
2723 2738 [('U', 'noupdate', None, _('do not update the new working directory')),
2724 2739 ('r', 'rev', [],
2725 2740 _('a changeset you would like to have after cloning')),
2726 2741 ('', 'pull', None, _('use pull protocol to copy metadata')),
2727 2742 ('', 'uncompressed', None,
2728 2743 _('use uncompressed transfer (fast over LAN)')),
2729 2744 ] + remoteopts,
2730 2745 _('hg clone [OPTION]... SOURCE [DEST]')),
2731 2746 "^commit|ci":
2732 2747 (commit,
2733 2748 [('A', 'addremove', None,
2734 2749 _('mark new/missing files as added/removed before committing')),
2735 2750 ('d', 'date', '', _('record datecode as commit date')),
2736 2751 ('u', 'user', '', _('record user as commiter')),
2737 2752 ] + walkopts + commitopts,
2738 2753 _('hg commit [OPTION]... [FILE]...')),
2739 2754 "copy|cp":
2740 2755 (copy,
2741 2756 [('A', 'after', None, _('record a copy that has already occurred')),
2742 2757 ('f', 'force', None,
2743 2758 _('forcibly copy over an existing managed file')),
2744 2759 ] + walkopts + dryrunopts,
2745 2760 _('hg copy [OPTION]... [SOURCE]... DEST')),
2746 2761 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2747 2762 "debugcomplete":
2748 2763 (debugcomplete,
2749 2764 [('o', 'options', None, _('show the command options'))],
2750 2765 _('debugcomplete [-o] CMD')),
2751 2766 "debuginstall": (debuginstall, [], _('debuginstall')),
2752 2767 "debugrebuildstate":
2753 2768 (debugrebuildstate,
2754 2769 [('r', 'rev', '', _('revision to rebuild to'))],
2755 2770 _('debugrebuildstate [-r REV] [REV]')),
2756 2771 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2757 2772 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2758 2773 "debugstate": (debugstate, [], _('debugstate')),
2759 2774 "debugdate":
2760 2775 (debugdate,
2761 2776 [('e', 'extended', None, _('try extended date formats'))],
2762 2777 _('debugdate [-e] DATE [RANGE]')),
2763 2778 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2764 2779 "debugindex": (debugindex, [], _('debugindex FILE')),
2765 2780 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2766 2781 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2767 2782 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2768 2783 "^diff":
2769 2784 (diff,
2770 2785 [('r', 'rev', [], _('revision')),
2771 2786 ('a', 'text', None, _('treat all files as text')),
2772 2787 ('p', 'show-function', None,
2773 2788 _('show which function each change is in')),
2774 2789 ('g', 'git', None, _('use git extended diff format')),
2775 2790 ('', 'nodates', None, _("don't include dates in diff headers")),
2776 2791 ('w', 'ignore-all-space', None,
2777 2792 _('ignore white space when comparing lines')),
2778 2793 ('b', 'ignore-space-change', None,
2779 2794 _('ignore changes in the amount of white space')),
2780 2795 ('B', 'ignore-blank-lines', None,
2781 2796 _('ignore changes whose lines are all blank')),
2782 2797 ] + walkopts,
2783 2798 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2784 2799 "^export":
2785 2800 (export,
2786 2801 [('o', 'output', '', _('print output to file with formatted name')),
2787 2802 ('a', 'text', None, _('treat all files as text')),
2788 2803 ('g', 'git', None, _('use git extended diff format')),
2789 2804 ('', 'nodates', None, _("don't include dates in diff headers")),
2790 2805 ('', 'switch-parent', None, _('diff against the second parent'))],
2791 2806 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2792 2807 "grep":
2793 2808 (grep,
2794 2809 [('0', 'print0', None, _('end fields with NUL')),
2795 2810 ('', 'all', None, _('print all revisions that match')),
2796 2811 ('f', 'follow', None,
2797 2812 _('follow changeset history, or file history across copies and renames')),
2798 2813 ('i', 'ignore-case', None, _('ignore case when matching')),
2799 2814 ('l', 'files-with-matches', None,
2800 2815 _('print only filenames and revs that match')),
2801 2816 ('n', 'line-number', None, _('print matching line numbers')),
2802 2817 ('r', 'rev', [], _('search in given revision range')),
2803 2818 ('u', 'user', None, _('print user who committed change')),
2804 2819 ] + walkopts,
2805 2820 _('hg grep [OPTION]... PATTERN [FILE]...')),
2806 2821 "heads":
2807 2822 (heads,
2808 2823 [('', 'style', '', _('display using template map file')),
2809 2824 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2810 2825 ('', 'template', '', _('display with template'))],
2811 2826 _('hg heads [-r REV]')),
2812 2827 "help": (help_, [], _('hg help [COMMAND]')),
2813 2828 "identify|id": (identify, [], _('hg identify')),
2814 2829 "import|patch":
2815 2830 (import_,
2816 2831 [('p', 'strip', 1,
2817 2832 _('directory strip option for patch. This has the same\n'
2818 2833 'meaning as the corresponding patch option')),
2819 2834 ('b', 'base', '', _('base path')),
2820 2835 ('f', 'force', None,
2821 2836 _('skip check for outstanding uncommitted changes')),
2822 2837 ('', 'exact', None,
2823 2838 _('apply patch to the nodes from which it was generated'))] + commitopts,
2824 2839 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2825 2840 "incoming|in": (incoming,
2826 2841 [('M', 'no-merges', None, _('do not show merges')),
2827 2842 ('f', 'force', None,
2828 2843 _('run even when remote repository is unrelated')),
2829 2844 ('', 'style', '', _('display using template map file')),
2830 2845 ('n', 'newest-first', None, _('show newest record first')),
2831 2846 ('', 'bundle', '', _('file to store the bundles into')),
2832 2847 ('p', 'patch', None, _('show patch')),
2833 2848 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2834 2849 ('', 'template', '', _('display with template')),
2835 2850 ] + remoteopts,
2836 2851 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2837 2852 ' [--bundle FILENAME] [SOURCE]')),
2838 2853 "^init":
2839 2854 (init,
2840 2855 remoteopts,
2841 2856 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2842 2857 "locate":
2843 2858 (locate,
2844 2859 [('r', 'rev', '', _('search the repository as it stood at rev')),
2845 2860 ('0', 'print0', None,
2846 2861 _('end filenames with NUL, for use with xargs')),
2847 2862 ('f', 'fullpath', None,
2848 2863 _('print complete paths from the filesystem root')),
2849 2864 ] + walkopts,
2850 2865 _('hg locate [OPTION]... [PATTERN]...')),
2851 2866 "^log|history":
2852 2867 (log,
2853 2868 [('f', 'follow', None,
2854 2869 _('follow changeset history, or file history across copies and renames')),
2855 2870 ('', 'follow-first', None,
2856 2871 _('only follow the first parent of merge changesets')),
2857 2872 ('d', 'date', '', _('show revs matching date spec')),
2858 2873 ('C', 'copies', None, _('show copied files')),
2859 2874 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2860 2875 ('l', 'limit', '', _('limit number of changes displayed')),
2861 2876 ('r', 'rev', [], _('show the specified revision or range')),
2862 2877 ('', 'removed', None, _('include revs where files were removed')),
2863 2878 ('M', 'no-merges', None, _('do not show merges')),
2864 2879 ('', 'style', '', _('display using template map file')),
2865 2880 ('m', 'only-merges', None, _('show only merges')),
2866 2881 ('p', 'patch', None, _('show patch')),
2867 2882 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2868 2883 ('', 'template', '', _('display with template')),
2869 2884 ] + walkopts,
2870 2885 _('hg log [OPTION]... [FILE]')),
2871 2886 "manifest": (manifest, [], _('hg manifest [REV]')),
2872 2887 "^merge":
2873 2888 (merge,
2874 2889 [('f', 'force', None, _('force a merge with outstanding changes')),
2875 2890 ('r', 'rev', '', _('revision to merge')),
2876 2891 ],
2877 2892 _('hg merge [-f] [[-r] REV]')),
2878 2893 "outgoing|out": (outgoing,
2879 2894 [('M', 'no-merges', None, _('do not show merges')),
2880 2895 ('f', 'force', None,
2881 2896 _('run even when remote repository is unrelated')),
2882 2897 ('p', 'patch', None, _('show patch')),
2883 2898 ('', 'style', '', _('display using template map file')),
2884 2899 ('r', 'rev', [], _('a specific revision you would like to push')),
2885 2900 ('n', 'newest-first', None, _('show newest record first')),
2886 2901 ('', 'template', '', _('display with template')),
2887 2902 ] + remoteopts,
2888 2903 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2889 2904 "^parents":
2890 2905 (parents,
2891 2906 [('r', 'rev', '', _('show parents from the specified rev')),
2892 2907 ('', 'style', '', _('display using template map file')),
2893 2908 ('', 'template', '', _('display with template'))],
2894 2909 _('hg parents [-r REV] [FILE]')),
2895 2910 "paths": (paths, [], _('hg paths [NAME]')),
2896 2911 "^pull":
2897 2912 (pull,
2898 2913 [('u', 'update', None,
2899 2914 _('update to new tip if changesets were pulled')),
2900 2915 ('f', 'force', None,
2901 2916 _('run even when remote repository is unrelated')),
2902 2917 ('r', 'rev', [],
2903 2918 _('a specific revision up to which you would like to pull')),
2904 2919 ] + remoteopts,
2905 2920 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2906 2921 "^push":
2907 2922 (push,
2908 2923 [('f', 'force', None, _('force push')),
2909 2924 ('r', 'rev', [], _('a specific revision you would like to push')),
2910 2925 ] + remoteopts,
2911 2926 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2912 2927 "debugrawcommit|rawcommit":
2913 2928 (rawcommit,
2914 2929 [('p', 'parent', [], _('parent')),
2915 2930 ('d', 'date', '', _('date code')),
2916 2931 ('u', 'user', '', _('user')),
2917 2932 ('F', 'files', '', _('file list'))
2918 2933 ] + commitopts,
2919 2934 _('hg debugrawcommit [OPTION]... [FILE]...')),
2920 2935 "recover": (recover, [], _('hg recover')),
2921 2936 "^remove|rm":
2922 2937 (remove,
2923 2938 [('A', 'after', None, _('record remove that has already occurred')),
2924 2939 ('f', 'force', None, _('remove file even if modified')),
2925 2940 ] + walkopts,
2926 2941 _('hg remove [OPTION]... FILE...')),
2927 2942 "rename|mv":
2928 2943 (rename,
2929 2944 [('A', 'after', None, _('record a rename that has already occurred')),
2930 2945 ('f', 'force', None,
2931 2946 _('forcibly copy over an existing managed file')),
2932 2947 ] + walkopts + dryrunopts,
2933 2948 _('hg rename [OPTION]... SOURCE... DEST')),
2934 2949 "^revert":
2935 2950 (revert,
2936 2951 [('a', 'all', None, _('revert all changes when no arguments given')),
2937 2952 ('d', 'date', '', _('tipmost revision matching date')),
2938 2953 ('r', 'rev', '', _('revision to revert to')),
2939 2954 ('', 'no-backup', None, _('do not save backup copies of files')),
2940 2955 ] + walkopts + dryrunopts,
2941 2956 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2942 2957 "rollback": (rollback, [], _('hg rollback')),
2943 2958 "root": (root, [], _('hg root')),
2944 2959 "showconfig|debugconfig":
2945 2960 (showconfig,
2946 2961 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2947 2962 _('showconfig [-u] [NAME]...')),
2948 2963 "^serve":
2949 2964 (serve,
2950 2965 [('A', 'accesslog', '', _('name of access log file to write to')),
2951 2966 ('d', 'daemon', None, _('run server in background')),
2952 2967 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2953 2968 ('E', 'errorlog', '', _('name of error log file to write to')),
2954 2969 ('p', 'port', 0, _('port to use (default: 8000)')),
2955 2970 ('a', 'address', '', _('address to use')),
2956 2971 ('n', 'name', '',
2957 2972 _('name to show in web pages (default: working dir)')),
2958 2973 ('', 'webdir-conf', '', _('name of the webdir config file'
2959 2974 ' (serve more than one repo)')),
2960 2975 ('', 'pid-file', '', _('name of file to write process ID to')),
2961 2976 ('', 'stdio', None, _('for remote clients')),
2962 2977 ('t', 'templates', '', _('web templates to use')),
2963 2978 ('', 'style', '', _('template style to use')),
2964 2979 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2965 2980 _('hg serve [OPTION]...')),
2966 2981 "^status|st":
2967 2982 (status,
2968 2983 [('A', 'all', None, _('show status of all files')),
2969 2984 ('m', 'modified', None, _('show only modified files')),
2970 2985 ('a', 'added', None, _('show only added files')),
2971 2986 ('r', 'removed', None, _('show only removed files')),
2972 2987 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2973 2988 ('c', 'clean', None, _('show only files without changes')),
2974 2989 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2975 2990 ('i', 'ignored', None, _('show only ignored files')),
2976 2991 ('n', 'no-status', None, _('hide status prefix')),
2977 2992 ('C', 'copies', None, _('show source of copied files')),
2978 2993 ('0', 'print0', None,
2979 2994 _('end filenames with NUL, for use with xargs')),
2980 2995 ('', 'rev', [], _('show difference from revision')),
2981 2996 ] + walkopts,
2982 2997 _('hg status [OPTION]... [FILE]...')),
2983 2998 "tag":
2984 2999 (tag,
2985 3000 [('f', 'force', None, _('replace existing tag')),
2986 3001 ('l', 'local', None, _('make the tag local')),
2987 3002 ('m', 'message', '', _('message for tag commit log entry')),
2988 3003 ('d', 'date', '', _('record datecode as commit date')),
2989 3004 ('u', 'user', '', _('record user as commiter')),
2990 3005 ('r', 'rev', '', _('revision to tag')),
2991 3006 ('', 'remove', None, _('remove a tag'))],
2992 3007 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2993 3008 "tags": (tags, [], _('hg tags')),
2994 3009 "tip":
2995 3010 (tip,
2996 3011 [('', 'style', '', _('display using template map file')),
2997 3012 ('p', 'patch', None, _('show patch')),
2998 3013 ('', 'template', '', _('display with template'))],
2999 3014 _('hg tip [-p]')),
3000 3015 "unbundle":
3001 3016 (unbundle,
3002 3017 [('u', 'update', None,
3003 3018 _('update to new tip if changesets were unbundled'))],
3004 3019 _('hg unbundle [-u] FILE')),
3005 3020 "^update|up|checkout|co":
3006 3021 (update,
3007 3022 [('C', 'clean', None, _('overwrite locally modified files')),
3008 3023 ('d', 'date', '', _('tipmost revision matching date')),
3009 3024 ('r', 'rev', '', _('revision'))],
3010 3025 _('hg update [-C] [-d DATE] [[-r] REV]')),
3011 3026 "verify": (verify, [], _('hg verify')),
3012 3027 "version": (version_, [], _('hg version')),
3013 3028 }
3014 3029
3015 3030 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3016 3031 " debugindex debugindexdot debugdate debuginstall")
3017 3032 optionalrepo = ("paths serve showconfig")
3018 3033
3019 3034 def findpossible(ui, cmd):
3020 3035 """
3021 3036 Return cmd -> (aliases, command table entry)
3022 3037 for each matching command.
3023 3038 Return debug commands (or their aliases) only if no normal command matches.
3024 3039 """
3025 3040 choice = {}
3026 3041 debugchoice = {}
3027 3042 for e in table.keys():
3028 3043 aliases = e.lstrip("^").split("|")
3029 3044 found = None
3030 3045 if cmd in aliases:
3031 3046 found = cmd
3032 3047 elif not ui.config("ui", "strict"):
3033 3048 for a in aliases:
3034 3049 if a.startswith(cmd):
3035 3050 found = a
3036 3051 break
3037 3052 if found is not None:
3038 3053 if aliases[0].startswith("debug") or found.startswith("debug"):
3039 3054 debugchoice[found] = (aliases, table[e])
3040 3055 else:
3041 3056 choice[found] = (aliases, table[e])
3042 3057
3043 3058 if not choice and debugchoice:
3044 3059 choice = debugchoice
3045 3060
3046 3061 return choice
3047 3062
3048 3063 def findcmd(ui, cmd):
3049 3064 """Return (aliases, command table entry) for command string."""
3050 3065 choice = findpossible(ui, cmd)
3051 3066
3052 3067 if choice.has_key(cmd):
3053 3068 return choice[cmd]
3054 3069
3055 3070 if len(choice) > 1:
3056 3071 clist = choice.keys()
3057 3072 clist.sort()
3058 3073 raise AmbiguousCommand(cmd, clist)
3059 3074
3060 3075 if choice:
3061 3076 return choice.values()[0]
3062 3077
3063 3078 raise UnknownCommand(cmd)
3064 3079
3065 3080 def catchterm(*args):
3066 3081 raise util.SignalInterrupt
3067 3082
3068 3083 def run():
3069 3084 sys.exit(dispatch(sys.argv[1:]))
3070 3085
3071 3086 class ParseError(Exception):
3072 3087 """Exception raised on errors in parsing the command line."""
3073 3088
3074 3089 def parse(ui, args):
3075 3090 options = {}
3076 3091 cmdoptions = {}
3077 3092
3078 3093 try:
3079 3094 args = fancyopts.fancyopts(args, globalopts, options)
3080 3095 except fancyopts.getopt.GetoptError, inst:
3081 3096 raise ParseError(None, inst)
3082 3097
3083 3098 if args:
3084 3099 cmd, args = args[0], args[1:]
3085 3100 aliases, i = findcmd(ui, cmd)
3086 3101 cmd = aliases[0]
3087 3102 defaults = ui.config("defaults", cmd)
3088 3103 if defaults:
3089 3104 args = shlex.split(defaults) + args
3090 3105 c = list(i[1])
3091 3106 else:
3092 3107 cmd = None
3093 3108 c = []
3094 3109
3095 3110 # combine global options into local
3096 3111 for o in globalopts:
3097 3112 c.append((o[0], o[1], options[o[1]], o[3]))
3098 3113
3099 3114 try:
3100 3115 args = fancyopts.fancyopts(args, c, cmdoptions)
3101 3116 except fancyopts.getopt.GetoptError, inst:
3102 3117 raise ParseError(cmd, inst)
3103 3118
3104 3119 # separate global options back out
3105 3120 for o in globalopts:
3106 3121 n = o[1]
3107 3122 options[n] = cmdoptions[n]
3108 3123 del cmdoptions[n]
3109 3124
3110 3125 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3111 3126
3112 3127 external = {}
3113 3128
3114 3129 def findext(name):
3115 3130 '''return module with given extension name'''
3116 3131 try:
3117 3132 return sys.modules[external[name]]
3118 3133 except KeyError:
3119 3134 for k, v in external.iteritems():
3120 3135 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3121 3136 return sys.modules[v]
3122 3137 raise KeyError(name)
3123 3138
3124 3139 def load_extensions(ui):
3125 3140 added = []
3126 3141 for ext_name, load_from_name in ui.extensions():
3127 3142 if ext_name in external:
3128 3143 continue
3129 3144 try:
3130 3145 if load_from_name:
3131 3146 # the module will be loaded in sys.modules
3132 3147 # choose an unique name so that it doesn't
3133 3148 # conflicts with other modules
3134 3149 module_name = "hgext_%s" % ext_name.replace('.', '_')
3135 3150 mod = imp.load_source(module_name, load_from_name)
3136 3151 else:
3137 3152 def importh(name):
3138 3153 mod = __import__(name)
3139 3154 components = name.split('.')
3140 3155 for comp in components[1:]:
3141 3156 mod = getattr(mod, comp)
3142 3157 return mod
3143 3158 try:
3144 3159 mod = importh("hgext.%s" % ext_name)
3145 3160 except ImportError:
3146 3161 mod = importh(ext_name)
3147 3162 external[ext_name] = mod.__name__
3148 3163 added.append((mod, ext_name))
3149 3164 except (util.SignalInterrupt, KeyboardInterrupt):
3150 3165 raise
3151 3166 except Exception, inst:
3152 3167 ui.warn(_("*** failed to import extension %s: %s\n") %
3153 3168 (ext_name, inst))
3154 3169 if ui.print_exc():
3155 3170 return 1
3156 3171
3157 3172 for mod, name in added:
3158 3173 uisetup = getattr(mod, 'uisetup', None)
3159 3174 if uisetup:
3160 3175 uisetup(ui)
3161 3176 reposetup = getattr(mod, 'reposetup', None)
3162 3177 if reposetup:
3163 3178 hg.repo_setup_hooks.append(reposetup)
3164 3179 cmdtable = getattr(mod, 'cmdtable', {})
3165 3180 overrides = [cmd for cmd in cmdtable if cmd in table]
3166 3181 if overrides:
3167 3182 ui.warn(_("extension '%s' overrides commands: %s\n")
3168 3183 % (name, " ".join(overrides)))
3169 3184 table.update(cmdtable)
3170 3185
3171 3186 def parseconfig(config):
3172 3187 """parse the --config options from the command line"""
3173 3188 parsed = []
3174 3189 for cfg in config:
3175 3190 try:
3176 3191 name, value = cfg.split('=', 1)
3177 3192 section, name = name.split('.', 1)
3178 3193 if not section or not name:
3179 3194 raise IndexError
3180 3195 parsed.append((section, name, value))
3181 3196 except (IndexError, ValueError):
3182 3197 raise util.Abort(_('malformed --config option: %s') % cfg)
3183 3198 return parsed
3184 3199
3185 3200 def dispatch(args):
3186 3201 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3187 3202 num = getattr(signal, name, None)
3188 3203 if num: signal.signal(num, catchterm)
3189 3204
3190 3205 try:
3191 3206 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3192 3207 except util.Abort, inst:
3193 3208 sys.stderr.write(_("abort: %s\n") % inst)
3194 3209 return -1
3195 3210
3196 3211 load_extensions(u)
3197 3212 u.addreadhook(load_extensions)
3198 3213
3199 3214 try:
3200 3215 cmd, func, args, options, cmdoptions = parse(u, args)
3201 3216 if options["encoding"]:
3202 3217 util._encoding = options["encoding"]
3203 3218 if options["encodingmode"]:
3204 3219 util._encodingmode = options["encodingmode"]
3205 3220 if options["time"]:
3206 3221 def get_times():
3207 3222 t = os.times()
3208 3223 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3209 3224 t = (t[0], t[1], t[2], t[3], time.clock())
3210 3225 return t
3211 3226 s = get_times()
3212 3227 def print_time():
3213 3228 t = get_times()
3214 3229 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3215 3230 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3216 3231 atexit.register(print_time)
3217 3232
3218 3233 # enter the debugger before command execution
3219 3234 if options['debugger']:
3220 3235 pdb.set_trace()
3221 3236
3222 3237 try:
3223 3238 if options['cwd']:
3224 3239 os.chdir(options['cwd'])
3225 3240
3226 3241 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3227 3242 not options["noninteractive"], options["traceback"],
3228 3243 parseconfig(options["config"]))
3229 3244
3230 3245 path = u.expandpath(options["repository"]) or ""
3231 3246 repo = path and hg.repository(u, path=path) or None
3232 3247 if repo and not repo.local():
3233 3248 raise util.Abort(_("repository '%s' is not local") % path)
3234 3249
3235 3250 if options['help']:
3236 3251 return help_(u, cmd, options['version'])
3237 3252 elif options['version']:
3238 3253 return version_(u)
3239 3254 elif not cmd:
3240 3255 return help_(u, 'shortlist')
3241 3256
3242 3257 if cmd not in norepo.split():
3243 3258 try:
3244 3259 if not repo:
3245 3260 repo = hg.repository(u, path=path)
3246 3261 u = repo.ui
3247 3262 except hg.RepoError:
3248 3263 if cmd not in optionalrepo.split():
3249 3264 raise
3250 3265 d = lambda: func(u, repo, *args, **cmdoptions)
3251 3266 else:
3252 3267 d = lambda: func(u, *args, **cmdoptions)
3253 3268
3254 3269 try:
3255 3270 if options['profile']:
3256 3271 import hotshot, hotshot.stats
3257 3272 prof = hotshot.Profile("hg.prof")
3258 3273 try:
3259 3274 try:
3260 3275 return prof.runcall(d)
3261 3276 except:
3262 3277 try:
3263 3278 u.warn(_('exception raised - generating '
3264 3279 'profile anyway\n'))
3265 3280 except:
3266 3281 pass
3267 3282 raise
3268 3283 finally:
3269 3284 prof.close()
3270 3285 stats = hotshot.stats.load("hg.prof")
3271 3286 stats.strip_dirs()
3272 3287 stats.sort_stats('time', 'calls')
3273 3288 stats.print_stats(40)
3274 3289 elif options['lsprof']:
3275 3290 try:
3276 3291 from mercurial import lsprof
3277 3292 except ImportError:
3278 3293 raise util.Abort(_(
3279 3294 'lsprof not available - install from '
3280 3295 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3281 3296 p = lsprof.Profiler()
3282 3297 p.enable(subcalls=True)
3283 3298 try:
3284 3299 return d()
3285 3300 finally:
3286 3301 p.disable()
3287 3302 stats = lsprof.Stats(p.getstats())
3288 3303 stats.sort()
3289 3304 stats.pprint(top=10, file=sys.stderr, climit=5)
3290 3305 else:
3291 3306 return d()
3292 3307 finally:
3293 3308 u.flush()
3294 3309 except:
3295 3310 # enter the debugger when we hit an exception
3296 3311 if options['debugger']:
3297 3312 pdb.post_mortem(sys.exc_info()[2])
3298 3313 u.print_exc()
3299 3314 raise
3300 3315 except ParseError, inst:
3301 3316 if inst.args[0]:
3302 3317 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3303 3318 help_(u, inst.args[0])
3304 3319 else:
3305 3320 u.warn(_("hg: %s\n") % inst.args[1])
3306 3321 help_(u, 'shortlist')
3307 3322 except AmbiguousCommand, inst:
3308 3323 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3309 3324 (inst.args[0], " ".join(inst.args[1])))
3310 3325 except UnknownCommand, inst:
3311 3326 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3312 3327 help_(u, 'shortlist')
3313 3328 except hg.RepoError, inst:
3314 3329 u.warn(_("abort: %s!\n") % inst)
3315 3330 except lock.LockHeld, inst:
3316 3331 if inst.errno == errno.ETIMEDOUT:
3317 3332 reason = _('timed out waiting for lock held by %s') % inst.locker
3318 3333 else:
3319 3334 reason = _('lock held by %s') % inst.locker
3320 3335 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3321 3336 except lock.LockUnavailable, inst:
3322 3337 u.warn(_("abort: could not lock %s: %s\n") %
3323 3338 (inst.desc or inst.filename, inst.strerror))
3324 3339 except revlog.RevlogError, inst:
3325 3340 u.warn(_("abort: %s!\n") % inst)
3326 3341 except util.SignalInterrupt:
3327 3342 u.warn(_("killed!\n"))
3328 3343 except KeyboardInterrupt:
3329 3344 try:
3330 3345 u.warn(_("interrupted!\n"))
3331 3346 except IOError, inst:
3332 3347 if inst.errno == errno.EPIPE:
3333 3348 if u.debugflag:
3334 3349 u.warn(_("\nbroken pipe\n"))
3335 3350 else:
3336 3351 raise
3337 3352 except socket.error, inst:
3338 3353 u.warn(_("abort: %s\n") % inst[1])
3339 3354 except IOError, inst:
3340 3355 if hasattr(inst, "code"):
3341 3356 u.warn(_("abort: %s\n") % inst)
3342 3357 elif hasattr(inst, "reason"):
3343 3358 try: # usually it is in the form (errno, strerror)
3344 3359 reason = inst.reason.args[1]
3345 3360 except: # it might be anything, for example a string
3346 3361 reason = inst.reason
3347 3362 u.warn(_("abort: error: %s\n") % reason)
3348 3363 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3349 3364 if u.debugflag:
3350 3365 u.warn(_("broken pipe\n"))
3351 3366 elif getattr(inst, "strerror", None):
3352 3367 if getattr(inst, "filename", None):
3353 3368 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3354 3369 else:
3355 3370 u.warn(_("abort: %s\n") % inst.strerror)
3356 3371 else:
3357 3372 raise
3358 3373 except OSError, inst:
3359 3374 if getattr(inst, "filename", None):
3360 3375 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3361 3376 else:
3362 3377 u.warn(_("abort: %s\n") % inst.strerror)
3363 3378 except util.UnexpectedOutput, inst:
3364 3379 u.warn(_("abort: %s") % inst[0])
3365 3380 if not isinstance(inst[1], basestring):
3366 3381 u.warn(" %r\n" % (inst[1],))
3367 3382 elif not inst[1]:
3368 3383 u.warn(_(" empty string\n"))
3369 3384 else:
3370 3385 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3371 3386 except util.Abort, inst:
3372 3387 u.warn(_("abort: %s\n") % inst)
3373 3388 except TypeError, inst:
3374 3389 # was this an argument error?
3375 3390 tb = traceback.extract_tb(sys.exc_info()[2])
3376 3391 if len(tb) > 2: # no
3377 3392 raise
3378 3393 u.debug(inst, "\n")
3379 3394 u.warn(_("%s: invalid arguments\n") % cmd)
3380 3395 help_(u, cmd)
3381 3396 except SystemExit, inst:
3382 3397 # Commands shouldn't sys.exit directly, but give a return code.
3383 3398 # Just in case catch this and and pass exit code to caller.
3384 3399 return inst.code
3385 3400 except:
3386 3401 u.warn(_("** unknown exception encountered, details follow\n"))
3387 3402 u.warn(_("** report bug details to "
3388 3403 "http://www.selenic.com/mercurial/bts\n"))
3389 3404 u.warn(_("** or mercurial@selenic.com\n"))
3390 3405 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3391 3406 % version.get_version())
3392 3407 raise
3393 3408
3394 3409 return -1
@@ -1,281 +1,289 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from repo import *
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util
13 import errno, lock, os, shutil, util, cmdutil
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 schemes = {
22 22 'bundle': bundlerepo,
23 23 'file': _local,
24 24 'hg': httprepo,
25 25 'http': httprepo,
26 26 'https': httprepo,
27 27 'old-http': statichttprepo,
28 28 'ssh': sshrepo,
29 29 'static-http': statichttprepo,
30 30 }
31 31
32 32 def _lookup(path):
33 33 scheme = 'file'
34 34 if path:
35 35 c = path.find(':')
36 36 if c > 0:
37 37 scheme = path[:c]
38 38 thing = schemes.get(scheme) or schemes['file']
39 39 try:
40 40 return thing(path)
41 41 except TypeError:
42 42 return thing
43 43
44 44 def islocal(repo):
45 45 '''return true if repo or path is local'''
46 46 if isinstance(repo, str):
47 47 try:
48 48 return _lookup(repo).islocal(repo)
49 49 except AttributeError:
50 50 return False
51 51 return repo.local()
52 52
53 53 repo_setup_hooks = []
54 54
55 55 def repository(ui, path='', create=False):
56 56 """return a repository object for the specified path"""
57 57 repo = _lookup(path).instance(ui, path, create)
58 58 ui = getattr(repo, "ui", ui)
59 59 for hook in repo_setup_hooks:
60 60 hook(ui, repo)
61 61 return repo
62 62
63 63 def defaultdest(source):
64 64 '''return default destination of clone if none is given'''
65 65 return os.path.basename(os.path.normpath(source))
66 66
67 67 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
68 68 stream=False):
69 69 """Make a copy of an existing repository.
70 70
71 71 Create a copy of an existing repository in a new directory. The
72 72 source and destination are URLs, as passed to the repository
73 73 function. Returns a pair of repository objects, the source and
74 74 newly created destination.
75 75
76 76 The location of the source is added to the new repository's
77 77 .hg/hgrc file, as the default to be used for future pulls and
78 78 pushes.
79 79
80 80 If an exception is raised, the partly cloned/updated destination
81 81 repository will be deleted.
82 82
83 83 Arguments:
84 84
85 85 source: repository object or URL
86 86
87 87 dest: URL of destination repository to create (defaults to base
88 88 name of source repository)
89 89
90 90 pull: always pull from source repository, even in local case
91 91
92 92 stream: stream raw data uncompressed from repository (fast over
93 93 LAN, slow over WAN)
94 94
95 95 rev: revision to clone up to (implies pull=True)
96 96
97 97 update: update working directory after clone completes, if
98 98 destination is local repository
99 99 """
100
101 origsource = source
102 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
103
100 104 if isinstance(source, str):
101 105 src_repo = repository(ui, source)
102 106 else:
103 107 src_repo = source
104 108 source = src_repo.url()
105 109
106 110 if dest is None:
107 111 dest = defaultdest(source)
108 112 ui.status(_("destination directory: %s\n") % dest)
109 113
110 114 def localpath(path):
111 115 if path.startswith('file://'):
112 116 return path[7:]
113 117 if path.startswith('file:'):
114 118 return path[5:]
115 119 return path
116 120
117 121 dest = localpath(dest)
118 122 source = localpath(source)
119 123
120 124 if os.path.exists(dest):
121 125 raise util.Abort(_("destination '%s' already exists") % dest)
122 126
123 127 class DirCleanup(object):
124 128 def __init__(self, dir_):
125 129 self.rmtree = shutil.rmtree
126 130 self.dir_ = dir_
127 131 def close(self):
128 132 self.dir_ = None
129 133 def __del__(self):
130 134 if self.dir_:
131 135 self.rmtree(self.dir_, True)
132 136
133 137 dir_cleanup = None
134 138 if islocal(dest):
135 139 dir_cleanup = DirCleanup(dest)
136 140
137 abspath = source
141 abspath = origsource
138 142 copy = False
139 143 if src_repo.local() and islocal(dest):
140 abspath = os.path.abspath(source)
144 abspath = os.path.abspath(origsource)
141 145 copy = not pull and not rev
142 146
143 147 src_lock, dest_lock = None, None
144 148 if copy:
145 149 try:
146 150 # we use a lock here because if we race with commit, we
147 151 # can end up with extra data in the cloned revlogs that's
148 152 # not pointed to by changesets, thus causing verify to
149 153 # fail
150 154 src_lock = src_repo.lock()
151 155 except lock.LockException:
152 156 copy = False
153 157
154 158 if copy:
155 159 def force_copy(src, dst):
156 160 try:
157 161 util.copyfiles(src, dst)
158 162 except OSError, inst:
159 163 if inst.errno != errno.ENOENT:
160 164 raise
161 165
162 166 src_store = os.path.realpath(src_repo.spath)
163 167 if not os.path.exists(dest):
164 168 os.mkdir(dest)
165 169 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 170 os.mkdir(dest_path)
167 171 if src_repo.spath != src_repo.path:
168 172 dest_store = os.path.join(dest_path, "store")
169 173 os.mkdir(dest_store)
170 174 else:
171 175 dest_store = dest_path
172 176 # copy the requires file
173 177 force_copy(src_repo.join("requires"),
174 178 os.path.join(dest_path, "requires"))
175 179 # we lock here to avoid premature writing to the target
176 180 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177 181
178 182 files = ("data",
179 183 "00manifest.d", "00manifest.i",
180 184 "00changelog.d", "00changelog.i")
181 185 for f in files:
182 186 src = os.path.join(src_store, f)
183 187 dst = os.path.join(dest_store, f)
184 188 force_copy(src, dst)
185 189
186 190 # we need to re-init the repo after manually copying the data
187 191 # into it
188 192 dest_repo = repository(ui, dest)
189 193
190 194 else:
191 195 dest_repo = repository(ui, dest, create=True)
192 196
193 197 revs = None
194 198 if rev:
195 199 if 'lookup' not in src_repo.capabilities:
196 200 raise util.Abort(_("src repository does not support revision "
197 201 "lookup and so doesn't support clone by "
198 202 "revision"))
199 203 revs = [src_repo.lookup(r) for r in rev]
200 204
201 205 if dest_repo.local():
202 206 dest_repo.clone(src_repo, heads=revs, stream=stream)
203 207 elif src_repo.local():
204 208 src_repo.push(dest_repo, revs=revs)
205 209 else:
206 210 raise util.Abort(_("clone from remote to remote not supported"))
207 211
208 212 if src_lock:
209 213 src_lock.release()
210 214
211 215 if dest_repo.local():
212 216 fp = dest_repo.opener("hgrc", "w", text=True)
213 217 fp.write("[paths]\n")
214 218 fp.write("default = %s\n" % abspath)
215 219 fp.close()
216 220
217 221 if dest_lock:
218 222 dest_lock.release()
219 223
220 224 if update:
221 _update(dest_repo, dest_repo.changelog.tip())
225 try:
226 checkout = dest_repo.lookup("default")
227 except:
228 checkout = dest_repo.changelog.tip()
229 _update(dest_repo, checkout)
222 230 if dir_cleanup:
223 231 dir_cleanup.close()
224 232
225 233 return src_repo, dest_repo
226 234
227 235 def _showstats(repo, stats):
228 236 stats = ((stats[0], _("updated")),
229 237 (stats[1], _("merged")),
230 238 (stats[2], _("removed")),
231 239 (stats[3], _("unresolved")))
232 240 note = ", ".join([_("%d files %s") % s for s in stats])
233 241 repo.ui.status("%s\n" % note)
234 242
235 243 def _update(repo, node): return update(repo, node)
236 244
237 245 def update(repo, node):
238 246 """update the working directory to node, merging linear changes"""
239 247 pl = repo.parents()
240 248 stats = _merge.update(repo, node, False, False, None, None)
241 249 _showstats(repo, stats)
242 250 if stats[3]:
243 251 repo.ui.status(_("There are unresolved merges with"
244 252 " locally modified files.\n"))
245 253 if stats[1]:
246 254 repo.ui.status(_("You can finish the partial merge using:\n"))
247 255 else:
248 256 repo.ui.status(_("You can redo the full merge using:\n"))
249 257 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
250 258 repo.ui.status(_(" hg update %s\n hg update %s\n")
251 259 % (pl[0].rev(), repo.changectx(node).rev()))
252 260 return stats[3]
253 261
254 262 def clean(repo, node, wlock=None, show_stats=True):
255 263 """forcibly switch the working directory to node, clobbering changes"""
256 264 stats = _merge.update(repo, node, False, True, None, wlock)
257 265 if show_stats: _showstats(repo, stats)
258 266 return stats[3]
259 267
260 268 def merge(repo, node, force=None, remind=True, wlock=None):
261 269 """branch merge with node, resolving changes"""
262 270 stats = _merge.update(repo, node, True, force, False, wlock)
263 271 _showstats(repo, stats)
264 272 if stats[3]:
265 273 pl = repo.parents()
266 274 repo.ui.status(_("There are unresolved merges,"
267 275 " you can redo the full merge using:\n"
268 276 " hg update -C %s\n"
269 277 " hg merge %s\n")
270 278 % (pl[0].rev(), pl[1].rev()))
271 279 elif remind:
272 280 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
273 281 return stats[3]
274 282
275 283 def revert(repo, node, choose, wlock):
276 284 """revert changes to revision in node without updating dirstate"""
277 285 return _merge.update(repo, node, False, True, choose, wlock)[3]
278 286
279 287 def verify(repo):
280 288 """verify the consistency of a repository"""
281 289 return _verify.verify(repo)
@@ -1,62 +1,78 b''
1 1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes
10 10
11 11 def get_mtime(repo_path):
12 12 store_path = os.path.join(repo_path, ".hg")
13 13 if not os.path.isdir(os.path.join(store_path, "data")):
14 14 store_path = os.path.join(store_path, "store")
15 15 cl_path = os.path.join(store_path, "00changelog.i")
16 16 if os.path.exists(cl_path):
17 17 return os.stat(cl_path).st_mtime
18 18 else:
19 19 return os.stat(store_path).st_mtime
20 20
21 21 def staticfile(directory, fname, req):
22 22 """return a file inside directory with guessed content-type header
23 23
24 24 fname always uses '/' as directory separator and isn't allowed to
25 25 contain unusual path components.
26 26 Content-type is guessed using the mimetypes module.
27 27 Return an empty string if fname is illegal or file not found.
28 28
29 29 """
30 30 parts = fname.split('/')
31 31 path = directory
32 32 for part in parts:
33 33 if (part in ('', os.curdir, os.pardir) or
34 34 os.sep in part or os.altsep is not None and os.altsep in part):
35 35 return ""
36 36 path = os.path.join(path, part)
37 37 try:
38 38 os.stat(path)
39 39 ct = mimetypes.guess_type(path)[0] or "text/plain"
40 40 req.header([('Content-type', ct),
41 41 ('Content-length', str(os.path.getsize(path)))])
42 42 return file(path, 'rb').read()
43 43 except (TypeError, OSError):
44 44 # illegal fname or unreadable file
45 45 return ""
46 46
47 47 def style_map(templatepath, style):
48 48 """Return path to mapfile for a given style.
49 49
50 50 Searches mapfile in the following locations:
51 51 1. templatepath/style/map
52 52 2. templatepath/map-style
53 53 3. templatepath/map
54 54 """
55 55 locations = style and [os.path.join(style, "map"), "map-"+style] or []
56 56 locations.append("map")
57 57 for location in locations:
58 58 mapfile = os.path.join(templatepath, location)
59 59 if os.path.isfile(mapfile):
60 60 return mapfile
61 61 raise RuntimeError("No hgweb templates found in %r" % templatepath)
62 62
63 def paritygen(stripecount, offset=0):
64 """count parity of horizontal stripes for easier reading"""
65 if stripecount and offset:
66 # account for offset, e.g. due to building the list in reverse
67 count = (stripecount + offset) % stripecount
68 parity = (stripecount + offset) / stripecount & 1
69 else:
70 count = 0
71 parity = 0
72 while True:
73 yield parity
74 count += 1
75 if stripecount and count >= stripecount:
76 parity = 1 - parity
77 count = 0
78
@@ -1,1173 +1,1158 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 10 import tempfile, urllib, bz2
11 11 from mercurial.node import *
12 12 from mercurial.i18n import gettext as _
13 13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map
15 from common import get_mtime, staticfile, style_map, paritygen
16 16
17 17 def _up(p):
18 18 if p[0] != "/":
19 19 p = "/" + p
20 20 if p[-1] == "/":
21 21 p = p[:-1]
22 22 up = os.path.dirname(p)
23 23 if up == "/":
24 24 return "/"
25 25 return up + "/"
26 26
27 27 def revnavgen(pos, pagelen, limit, nodefunc):
28 28 def seq(factor, limit=None):
29 29 if limit:
30 30 yield limit
31 31 if limit >= 20 and limit <= 40:
32 32 yield 50
33 33 else:
34 34 yield 1 * factor
35 35 yield 3 * factor
36 36 for f in seq(factor * 10):
37 37 yield f
38 38
39 39 def nav(**map):
40 40 l = []
41 41 last = 0
42 42 for f in seq(1, pagelen):
43 43 if f < pagelen or f <= last:
44 44 continue
45 45 if f > limit:
46 46 break
47 47 last = f
48 48 if pos + f < limit:
49 49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 50 if pos - f >= 0:
51 51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52 52
53 53 try:
54 54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55 55
56 56 for label, node in l:
57 57 yield {"label": label, "node": node}
58 58
59 59 yield {"label": "tip", "node": "tip"}
60 60 except hg.RepoError:
61 61 pass
62 62
63 63 return nav
64 64
65 65 class hgweb(object):
66 66 def __init__(self, repo, name=None):
67 67 if type(repo) == type(""):
68 68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 69 else:
70 70 self.repo = repo
71 71
72 72 self.mtime = -1
73 73 self.reponame = name
74 74 self.archives = 'zip', 'gz', 'bz2'
75 75 self.stripecount = 1
76 76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 77 # readable by the user running the CGI script
78 78 self.templatepath = self.config("web", "templates",
79 79 templater.templatepath(),
80 80 untrusted=False)
81 81
82 82 # The CGI scripts are often run by a user different from the repo owner.
83 83 # Trust the settings from the .hg/hgrc files by default.
84 84 def config(self, section, name, default=None, untrusted=True):
85 85 return self.repo.ui.config(section, name, default,
86 86 untrusted=untrusted)
87 87
88 88 def configbool(self, section, name, default=False, untrusted=True):
89 89 return self.repo.ui.configbool(section, name, default,
90 90 untrusted=untrusted)
91 91
92 92 def configlist(self, section, name, default=None, untrusted=True):
93 93 return self.repo.ui.configlist(section, name, default,
94 94 untrusted=untrusted)
95 95
96 96 def refresh(self):
97 97 mtime = get_mtime(self.repo.root)
98 98 if mtime != self.mtime:
99 99 self.mtime = mtime
100 100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 102 self.stripecount = int(self.config("web", "stripes", 1))
103 103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 105 self.allowpull = self.configbool("web", "allowpull", True)
106 106
107 107 def archivelist(self, nodeid):
108 108 allowed = self.configlist("web", "allow_archive")
109 109 for i, spec in self.archive_specs.iteritems():
110 110 if i in allowed or self.configbool("web", "allow" + i):
111 111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112 112
113 113 def listfilediffs(self, files, changeset):
114 114 for f in files[:self.maxfiles]:
115 115 yield self.t("filedifflink", node=hex(changeset), file=f)
116 116 if len(files) > self.maxfiles:
117 117 yield self.t("fileellipses")
118 118
119 119 def siblings(self, siblings=[], hiderev=None, **args):
120 120 siblings = [s for s in siblings if s.node() != nullid]
121 121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 122 return
123 123 for s in siblings:
124 124 d = {'node': hex(s.node()), 'rev': s.rev()}
125 125 if hasattr(s, 'path'):
126 126 d['file'] = s.path()
127 127 d.update(args)
128 128 yield d
129 129
130 130 def renamelink(self, fl, node):
131 131 r = fl.renamed(node)
132 132 if r:
133 133 return [dict(file=r[0], node=hex(r[1]))]
134 134 return []
135 135
136 136 def showtag(self, t1, node=nullid, **args):
137 137 for t in self.repo.nodetags(node):
138 138 yield self.t(t1, tag=t, **args)
139 139
140 140 def diff(self, node1, node2, files):
141 141 def filterfiles(filters, files):
142 142 l = [x for x in files if x in filters]
143 143
144 144 for t in filters:
145 145 if t and t[-1] != os.sep:
146 146 t += os.sep
147 147 l += [x for x in files if x.startswith(t)]
148 148 return l
149 149
150 parity = [0]
150 parity = paritygen(self.stripecount)
151 151 def diffblock(diff, f, fn):
152 152 yield self.t("diffblock",
153 153 lines=prettyprintlines(diff),
154 parity=parity[0],
154 parity=parity.next(),
155 155 file=f,
156 156 filenode=hex(fn or nullid))
157 parity[0] = 1 - parity[0]
158 157
159 158 def prettyprintlines(diff):
160 159 for l in diff.splitlines(1):
161 160 if l.startswith('+'):
162 161 yield self.t("difflineplus", line=l)
163 162 elif l.startswith('-'):
164 163 yield self.t("difflineminus", line=l)
165 164 elif l.startswith('@'):
166 165 yield self.t("difflineat", line=l)
167 166 else:
168 167 yield self.t("diffline", line=l)
169 168
170 169 r = self.repo
171 170 c1 = r.changectx(node1)
172 171 c2 = r.changectx(node2)
173 172 date1 = util.datestr(c1.date())
174 173 date2 = util.datestr(c2.date())
175 174
176 175 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
177 176 if files:
178 177 modified, added, removed = map(lambda x: filterfiles(files, x),
179 178 (modified, added, removed))
180 179
181 180 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
182 181 for f in modified:
183 182 to = c1.filectx(f).data()
184 183 tn = c2.filectx(f).data()
185 184 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
186 185 opts=diffopts), f, tn)
187 186 for f in added:
188 187 to = None
189 188 tn = c2.filectx(f).data()
190 189 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
191 190 opts=diffopts), f, tn)
192 191 for f in removed:
193 192 to = c1.filectx(f).data()
194 193 tn = None
195 194 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 195 opts=diffopts), f, tn)
197 196
198 197 def changelog(self, ctx, shortlog=False):
199 198 def changelist(**map):
200 parity = (start - end) & 1
201 199 cl = self.repo.changelog
202 200 l = [] # build a list in forward order for efficiency
203 201 for i in xrange(start, end):
204 202 ctx = self.repo.changectx(i)
205 203 n = ctx.node()
206 204
207 l.insert(0, {"parity": parity,
205 l.insert(0, {"parity": parity.next(),
208 206 "author": ctx.user(),
209 207 "parent": self.siblings(ctx.parents(), i - 1),
210 208 "child": self.siblings(ctx.children(), i + 1),
211 209 "changelogtag": self.showtag("changelogtag",n),
212 210 "desc": ctx.description(),
213 211 "date": ctx.date(),
214 212 "files": self.listfilediffs(ctx.files(), n),
215 213 "rev": i,
216 214 "node": hex(n)})
217 parity = 1 - parity
218 215
219 216 for e in l:
220 217 yield e
221 218
222 219 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
223 220 cl = self.repo.changelog
224 221 count = cl.count()
225 222 pos = ctx.rev()
226 223 start = max(0, pos - maxchanges + 1)
227 224 end = min(count, start + maxchanges)
228 225 pos = end - 1
226 parity = paritygen(self.stripecount, offset=start-end)
229 227
230 228 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
231 229
232 230 yield self.t(shortlog and 'shortlog' or 'changelog',
233 231 changenav=changenav,
234 232 node=hex(cl.tip()),
235 233 rev=pos, changesets=count, entries=changelist,
236 234 archives=self.archivelist("tip"))
237 235
238 236 def search(self, query):
239 237
240 238 def changelist(**map):
241 239 cl = self.repo.changelog
242 240 count = 0
243 241 qw = query.lower().split()
244 242
245 243 def revgen():
246 244 for i in xrange(cl.count() - 1, 0, -100):
247 245 l = []
248 246 for j in xrange(max(0, i - 100), i):
249 247 ctx = self.repo.changectx(j)
250 248 l.append(ctx)
251 249 l.reverse()
252 250 for e in l:
253 251 yield e
254 252
255 253 for ctx in revgen():
256 254 miss = 0
257 255 for q in qw:
258 256 if not (q in ctx.user().lower() or
259 257 q in ctx.description().lower() or
260 258 q in " ".join(ctx.files()).lower()):
261 259 miss = 1
262 260 break
263 261 if miss:
264 262 continue
265 263
266 264 count += 1
267 265 n = ctx.node()
268 266
269 267 yield self.t('searchentry',
270 parity=self.stripes(count),
268 parity=parity.next(),
271 269 author=ctx.user(),
272 270 parent=self.siblings(ctx.parents()),
273 271 child=self.siblings(ctx.children()),
274 272 changelogtag=self.showtag("changelogtag",n),
275 273 desc=ctx.description(),
276 274 date=ctx.date(),
277 275 files=self.listfilediffs(ctx.files(), n),
278 276 rev=ctx.rev(),
279 277 node=hex(n))
280 278
281 279 if count >= self.maxchanges:
282 280 break
283 281
284 282 cl = self.repo.changelog
283 parity = paritygen(self.stripecount)
285 284
286 285 yield self.t('search',
287 286 query=query,
288 287 node=hex(cl.tip()),
289 entries=changelist)
288 entries=changelist,
289 archives=self.archivelist("tip"))
290 290
291 291 def changeset(self, ctx):
292 292 n = ctx.node()
293 293 parents = ctx.parents()
294 294 p1 = parents[0].node()
295 295
296 296 files = []
297 parity = 0
297 parity = paritygen(self.stripecount)
298 298 for f in ctx.files():
299 299 files.append(self.t("filenodelink",
300 300 node=hex(n), file=f,
301 parity=parity))
302 parity = 1 - parity
301 parity=parity.next()))
303 302
304 303 def diff(**map):
305 304 yield self.diff(p1, n, None)
306 305
307 306 yield self.t('changeset',
308 307 diff=diff,
309 308 rev=ctx.rev(),
310 309 node=hex(n),
311 310 parent=self.siblings(parents),
312 311 child=self.siblings(ctx.children()),
313 312 changesettag=self.showtag("changesettag",n),
314 313 author=ctx.user(),
315 314 desc=ctx.description(),
316 315 date=ctx.date(),
317 316 files=files,
318 317 archives=self.archivelist(hex(n)))
319 318
320 319 def filelog(self, fctx):
321 320 f = fctx.path()
322 321 fl = fctx.filelog()
323 322 count = fl.count()
324 323 pagelen = self.maxshortchanges
325 324 pos = fctx.filerev()
326 325 start = max(0, pos - pagelen + 1)
327 326 end = min(count, start + pagelen)
328 327 pos = end - 1
328 parity = paritygen(self.stripecount, offset=start-end)
329 329
330 330 def entries(**map):
331 331 l = []
332 parity = (count - 1) & 1
333 332
334 333 for i in xrange(start, end):
335 334 ctx = fctx.filectx(i)
336 335 n = fl.node(i)
337 336
338 l.insert(0, {"parity": parity,
337 l.insert(0, {"parity": parity.next(),
339 338 "filerev": i,
340 339 "file": f,
341 340 "node": hex(ctx.node()),
342 341 "author": ctx.user(),
343 342 "date": ctx.date(),
344 343 "rename": self.renamelink(fl, n),
345 344 "parent": self.siblings(fctx.parents()),
346 345 "child": self.siblings(fctx.children()),
347 346 "desc": ctx.description()})
348 parity = 1 - parity
349 347
350 348 for e in l:
351 349 yield e
352 350
353 351 nodefunc = lambda x: fctx.filectx(fileid=x)
354 352 nav = revnavgen(pos, pagelen, count, nodefunc)
355 353 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
356 354 entries=entries)
357 355
358 356 def filerevision(self, fctx):
359 357 f = fctx.path()
360 358 text = fctx.data()
361 359 fl = fctx.filelog()
362 360 n = fctx.filenode()
361 parity = paritygen(self.stripecount)
363 362
364 363 mt = mimetypes.guess_type(f)[0]
365 364 rawtext = text
366 365 if util.binary(text):
367 366 mt = mt or 'application/octet-stream'
368 367 text = "(binary:%s)" % mt
369 368 mt = mt or 'text/plain'
370 369
371 370 def lines():
372 371 for l, t in enumerate(text.splitlines(1)):
373 372 yield {"line": t,
374 373 "linenumber": "% 6d" % (l + 1),
375 "parity": self.stripes(l)}
374 "parity": parity.next()}
376 375
377 376 yield self.t("filerevision",
378 377 file=f,
379 378 path=_up(f),
380 379 text=lines(),
381 380 raw=rawtext,
382 381 mimetype=mt,
383 382 rev=fctx.rev(),
384 383 node=hex(fctx.node()),
385 384 author=fctx.user(),
386 385 date=fctx.date(),
387 386 desc=fctx.description(),
388 387 parent=self.siblings(fctx.parents()),
389 388 child=self.siblings(fctx.children()),
390 389 rename=self.renamelink(fl, n),
391 390 permissions=fctx.manifest().execf(f))
392 391
393 392 def fileannotate(self, fctx):
394 393 f = fctx.path()
395 394 n = fctx.filenode()
396 395 fl = fctx.filelog()
396 parity = paritygen(self.stripecount)
397 397
398 398 def annotate(**map):
399 parity = 0
400 399 last = None
401 400 for f, l in fctx.annotate(follow=True):
402 401 fnode = f.filenode()
403 402 name = self.repo.ui.shortuser(f.user())
404 403
405 404 if last != fnode:
406 parity = 1 - parity
407 405 last = fnode
408 406
409 yield {"parity": parity,
407 yield {"parity": parity.next(),
410 408 "node": hex(f.node()),
411 409 "rev": f.rev(),
412 410 "author": name,
413 411 "file": f.path(),
414 412 "line": l}
415 413
416 414 yield self.t("fileannotate",
417 415 file=f,
418 416 annotate=annotate,
419 417 path=_up(f),
420 418 rev=fctx.rev(),
421 419 node=hex(fctx.node()),
422 420 author=fctx.user(),
423 421 date=fctx.date(),
424 422 desc=fctx.description(),
425 423 rename=self.renamelink(fl, n),
426 424 parent=self.siblings(fctx.parents()),
427 425 child=self.siblings(fctx.children()),
428 426 permissions=fctx.manifest().execf(f))
429 427
430 428 def manifest(self, ctx, path):
431 429 mf = ctx.manifest()
432 430 node = ctx.node()
433 431
434 432 files = {}
433 parity = paritygen(self.stripecount)
435 434
436 435 if path and path[-1] != "/":
437 436 path += "/"
438 437 l = len(path)
439 438 abspath = "/" + path
440 439
441 440 for f, n in mf.items():
442 441 if f[:l] != path:
443 442 continue
444 443 remain = f[l:]
445 444 if "/" in remain:
446 445 short = remain[:remain.index("/") + 1] # bleah
447 446 files[short] = (f, None)
448 447 else:
449 448 short = os.path.basename(remain)
450 449 files[short] = (f, n)
451 450
452 451 def filelist(**map):
453 parity = 0
454 452 fl = files.keys()
455 453 fl.sort()
456 454 for f in fl:
457 455 full, fnode = files[f]
458 456 if not fnode:
459 457 continue
460 458
461 459 yield {"file": full,
462 "parity": self.stripes(parity),
460 "parity": parity.next(),
463 461 "basename": f,
464 462 "size": ctx.filectx(full).size(),
465 463 "permissions": mf.execf(full)}
466 parity += 1
467 464
468 465 def dirlist(**map):
469 parity = 0
470 466 fl = files.keys()
471 467 fl.sort()
472 468 for f in fl:
473 469 full, fnode = files[f]
474 470 if fnode:
475 471 continue
476 472
477 yield {"parity": self.stripes(parity),
473 yield {"parity": parity.next(),
478 474 "path": os.path.join(abspath, f),
479 475 "basename": f[:-1]}
480 parity += 1
481 476
482 477 yield self.t("manifest",
483 478 rev=ctx.rev(),
484 479 node=hex(node),
485 480 path=abspath,
486 481 up=_up(abspath),
482 upparity=parity.next(),
487 483 fentries=filelist,
488 484 dentries=dirlist,
489 485 archives=self.archivelist(hex(node)))
490 486
491 487 def tags(self):
492 488 i = self.repo.tagslist()
493 489 i.reverse()
490 parity = paritygen(self.stripecount)
494 491
495 492 def entries(notip=False, **map):
496 parity = 0
497 493 for k, n in i:
498 494 if notip and k == "tip":
499 495 continue
500 yield {"parity": self.stripes(parity),
496 yield {"parity": parity.next(),
501 497 "tag": k,
502 498 "date": self.repo.changectx(n).date(),
503 499 "node": hex(n)}
504 parity += 1
505 500
506 501 yield self.t("tags",
507 502 node=hex(self.repo.changelog.tip()),
508 503 entries=lambda **x: entries(False, **x),
509 504 entriesnotip=lambda **x: entries(True, **x))
510 505
511 506 def summary(self):
512 507 i = self.repo.tagslist()
513 508 i.reverse()
514 509
515 510 def tagentries(**map):
516 parity = 0
511 parity = paritygen(self.stripecount)
517 512 count = 0
518 513 for k, n in i:
519 514 if k == "tip": # skip tip
520 515 continue;
521 516
522 517 count += 1
523 518 if count > 10: # limit to 10 tags
524 519 break;
525 520
526 521 yield self.t("tagentry",
527 parity=self.stripes(parity),
522 parity=parity.next(),
528 523 tag=k,
529 524 node=hex(n),
530 525 date=self.repo.changectx(n).date())
531 parity += 1
532 526
533 527
534 528 def branches(**map):
535 parity = 0
529 parity = paritygen(self.stripecount)
536 530
537 531 b = self.repo.branchtags()
538 532 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
539 533 l.sort()
540 534
541 535 for r,n,t in l:
542 536 ctx = self.repo.changectx(n)
543 537
544 yield {'parity': self.stripes(parity),
538 yield {'parity': parity.next(),
545 539 'branch': t,
546 540 'node': hex(n),
547 541 'date': ctx.date()}
548 parity += 1
549 542
550 543 def changelist(**map):
551 parity = 0
544 parity = paritygen(self.stripecount, offset=start-end)
552 545 l = [] # build a list in forward order for efficiency
553 546 for i in xrange(start, end):
554 547 ctx = self.repo.changectx(i)
555 548 hn = hex(ctx.node())
556 549
557 550 l.insert(0, self.t(
558 551 'shortlogentry',
559 parity=parity,
552 parity=parity.next(),
560 553 author=ctx.user(),
561 554 desc=ctx.description(),
562 555 date=ctx.date(),
563 556 rev=i,
564 557 node=hn))
565 parity = 1 - parity
566 558
567 559 yield l
568 560
569 561 cl = self.repo.changelog
570 562 count = cl.count()
571 563 start = max(0, count - self.maxchanges)
572 564 end = min(count, start + self.maxchanges)
573 565
574 566 yield self.t("summary",
575 567 desc=self.config("web", "description", "unknown"),
576 568 owner=(self.config("ui", "username") or # preferred
577 569 self.config("web", "contact") or # deprecated
578 570 self.config("web", "author", "unknown")), # also
579 571 lastchange=cl.read(cl.tip())[2],
580 572 tags=tagentries,
581 573 branches=branches,
582 574 shortlog=changelist,
583 575 node=hex(cl.tip()),
584 576 archives=self.archivelist("tip"))
585 577
586 578 def filediff(self, fctx):
587 579 n = fctx.node()
588 580 path = fctx.path()
589 581 parents = fctx.parents()
590 582 p1 = parents and parents[0].node() or nullid
591 583
592 584 def diff(**map):
593 585 yield self.diff(p1, n, [path])
594 586
595 587 yield self.t("filediff",
596 588 file=path,
597 589 node=hex(n),
598 590 rev=fctx.rev(),
599 591 parent=self.siblings(parents),
600 592 child=self.siblings(fctx.children()),
601 593 diff=diff)
602 594
603 595 archive_specs = {
604 596 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
605 597 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
606 598 'zip': ('application/zip', 'zip', '.zip', None),
607 599 }
608 600
609 601 def archive(self, req, id, type_):
610 602 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
611 603 cnode = self.repo.lookup(id)
612 604 arch_version = id
613 605 if cnode == id or id == 'tip':
614 606 arch_version = short(cnode)
615 607 name = "%s-%s" % (reponame, arch_version)
616 608 mimetype, artype, extension, encoding = self.archive_specs[type_]
617 609 headers = [('Content-type', mimetype),
618 610 ('Content-disposition', 'attachment; filename=%s%s' %
619 611 (name, extension))]
620 612 if encoding:
621 613 headers.append(('Content-encoding', encoding))
622 614 req.header(headers)
623 615 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
624 616
625 617 # add tags to things
626 618 # tags -> list of changesets corresponding to tags
627 619 # find tag, changeset, file
628 620
629 621 def cleanpath(self, path):
630 622 path = path.lstrip('/')
631 623 return util.canonpath(self.repo.root, '', path)
632 624
633 625 def run(self):
634 626 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
635 627 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
636 628 import mercurial.hgweb.wsgicgi as wsgicgi
637 629 from request import wsgiapplication
638 630 def make_web_app():
639 631 return self
640 632 wsgicgi.launch(wsgiapplication(make_web_app))
641 633
642 634 def run_wsgi(self, req):
643 635 def header(**map):
644 636 header_file = cStringIO.StringIO(
645 637 ''.join(self.t("header", encoding=util._encoding, **map)))
646 638 msg = mimetools.Message(header_file, 0)
647 639 req.header(msg.items())
648 640 yield header_file.read()
649 641
650 642 def rawfileheader(**map):
651 643 req.header([('Content-type', map['mimetype']),
652 644 ('Content-disposition', 'filename=%s' % map['file']),
653 645 ('Content-length', str(len(map['raw'])))])
654 646 yield ''
655 647
656 648 def footer(**map):
657 649 yield self.t("footer", **map)
658 650
659 651 def motd(**map):
660 652 yield self.config("web", "motd", "")
661 653
662 654 def expand_form(form):
663 655 shortcuts = {
664 656 'cl': [('cmd', ['changelog']), ('rev', None)],
665 657 'sl': [('cmd', ['shortlog']), ('rev', None)],
666 658 'cs': [('cmd', ['changeset']), ('node', None)],
667 659 'f': [('cmd', ['file']), ('filenode', None)],
668 660 'fl': [('cmd', ['filelog']), ('filenode', None)],
669 661 'fd': [('cmd', ['filediff']), ('node', None)],
670 662 'fa': [('cmd', ['annotate']), ('filenode', None)],
671 663 'mf': [('cmd', ['manifest']), ('manifest', None)],
672 664 'ca': [('cmd', ['archive']), ('node', None)],
673 665 'tags': [('cmd', ['tags'])],
674 666 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
675 667 'static': [('cmd', ['static']), ('file', None)]
676 668 }
677 669
678 670 for k in shortcuts.iterkeys():
679 671 if form.has_key(k):
680 672 for name, value in shortcuts[k]:
681 673 if value is None:
682 674 value = form[k]
683 675 form[name] = value
684 676 del form[k]
685 677
686 678 def rewrite_request(req):
687 679 '''translate new web interface to traditional format'''
688 680
689 681 def spliturl(req):
690 682 def firstitem(query):
691 683 return query.split('&', 1)[0].split(';', 1)[0]
692 684
693 685 def normurl(url):
694 686 inner = '/'.join([x for x in url.split('/') if x])
695 687 tl = len(url) > 1 and url.endswith('/') and '/' or ''
696 688
697 689 return '%s%s%s' % (url.startswith('/') and '/' or '',
698 690 inner, tl)
699 691
700 692 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
701 693 pi = normurl(req.env.get('PATH_INFO', ''))
702 694 if pi:
703 695 # strip leading /
704 696 pi = pi[1:]
705 697 if pi:
706 698 root = root[:root.rfind(pi)]
707 699 if req.env.has_key('REPO_NAME'):
708 700 rn = req.env['REPO_NAME'] + '/'
709 701 root += rn
710 702 query = pi[len(rn):]
711 703 else:
712 704 query = pi
713 705 else:
714 706 root += '?'
715 707 query = firstitem(req.env['QUERY_STRING'])
716 708
717 709 return (root, query)
718 710
719 711 req.url, query = spliturl(req)
720 712
721 713 if req.form.has_key('cmd'):
722 714 # old style
723 715 return
724 716
725 717 args = query.split('/', 2)
726 718 if not args or not args[0]:
727 719 return
728 720
729 721 cmd = args.pop(0)
730 722 style = cmd.rfind('-')
731 723 if style != -1:
732 724 req.form['style'] = [cmd[:style]]
733 725 cmd = cmd[style+1:]
734 726 # avoid accepting e.g. style parameter as command
735 727 if hasattr(self, 'do_' + cmd):
736 728 req.form['cmd'] = [cmd]
737 729
738 730 if args and args[0]:
739 731 node = args.pop(0)
740 732 req.form['node'] = [node]
741 733 if args:
742 734 req.form['file'] = args
743 735
744 736 if cmd == 'static':
745 737 req.form['file'] = req.form['node']
746 738 elif cmd == 'archive':
747 739 fn = req.form['node'][0]
748 740 for type_, spec in self.archive_specs.iteritems():
749 741 ext = spec[2]
750 742 if fn.endswith(ext):
751 743 req.form['node'] = [fn[:-len(ext)]]
752 744 req.form['type'] = [type_]
753 745
754 746 def sessionvars(**map):
755 747 fields = []
756 748 if req.form.has_key('style'):
757 749 style = req.form['style'][0]
758 750 if style != self.config('web', 'style', ''):
759 751 fields.append(('style', style))
760 752
761 753 separator = req.url[-1] == '?' and ';' or '?'
762 754 for name, value in fields:
763 755 yield dict(name=name, value=value, separator=separator)
764 756 separator = ';'
765 757
766 758 self.refresh()
767 759
768 760 expand_form(req.form)
769 761 rewrite_request(req)
770 762
771 763 style = self.config("web", "style", "")
772 764 if req.form.has_key('style'):
773 765 style = req.form['style'][0]
774 766 mapfile = style_map(self.templatepath, style)
775 767
776 768 port = req.env["SERVER_PORT"]
777 769 port = port != "80" and (":" + port) or ""
778 770 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
779 771 staticurl = self.config("web", "staticurl") or req.url + 'static/'
780 772 if not staticurl.endswith('/'):
781 773 staticurl += '/'
782 774
783 775 if not self.reponame:
784 776 self.reponame = (self.config("web", "name")
785 777 or req.env.get('REPO_NAME')
786 778 or req.url.strip('/') or self.repo.root)
787 779
788 780 self.t = templater.templater(mapfile, templater.common_filters,
789 781 defaults={"url": req.url,
790 782 "staticurl": staticurl,
791 783 "urlbase": urlbase,
792 784 "repo": self.reponame,
793 785 "header": header,
794 786 "footer": footer,
795 787 "motd": motd,
796 788 "rawfileheader": rawfileheader,
797 789 "sessionvars": sessionvars
798 790 })
799 791
800 792 try:
801 793 if not req.form.has_key('cmd'):
802 794 req.form['cmd'] = [self.t.cache['default']]
803 795
804 796 cmd = req.form['cmd'][0]
805 797
806 798 method = getattr(self, 'do_' + cmd, None)
807 799 if method:
808 800 try:
809 801 method(req)
810 802 except (hg.RepoError, revlog.RevlogError), inst:
811 803 req.write(self.t("error", error=str(inst)))
812 804 else:
813 805 req.write(self.t("error", error='No such method: ' + cmd))
814 806 finally:
815 807 self.t = None
816 808
817 809 def changectx(self, req):
818 810 if req.form.has_key('node'):
819 811 changeid = req.form['node'][0]
820 812 elif req.form.has_key('manifest'):
821 813 changeid = req.form['manifest'][0]
822 814 else:
823 815 changeid = self.repo.changelog.count() - 1
824 816
825 817 try:
826 818 ctx = self.repo.changectx(changeid)
827 819 except hg.RepoError:
828 820 man = self.repo.manifest
829 821 mn = man.lookup(changeid)
830 822 ctx = self.repo.changectx(man.linkrev(mn))
831 823
832 824 return ctx
833 825
834 826 def filectx(self, req):
835 827 path = self.cleanpath(req.form['file'][0])
836 828 if req.form.has_key('node'):
837 829 changeid = req.form['node'][0]
838 830 else:
839 831 changeid = req.form['filenode'][0]
840 832 try:
841 833 ctx = self.repo.changectx(changeid)
842 834 fctx = ctx.filectx(path)
843 835 except hg.RepoError:
844 836 fctx = self.repo.filectx(path, fileid=changeid)
845 837
846 838 return fctx
847 839
848 def stripes(self, parity):
849 "make horizontal stripes for easier reading"
850 if self.stripecount:
851 return (1 + parity / self.stripecount) & 1
852 else:
853 return 0
854
855 840 def do_log(self, req):
856 841 if req.form.has_key('file') and req.form['file'][0]:
857 842 self.do_filelog(req)
858 843 else:
859 844 self.do_changelog(req)
860 845
861 846 def do_rev(self, req):
862 847 self.do_changeset(req)
863 848
864 849 def do_file(self, req):
865 850 path = self.cleanpath(req.form.get('file', [''])[0])
866 851 if path:
867 852 try:
868 853 req.write(self.filerevision(self.filectx(req)))
869 854 return
870 855 except revlog.LookupError:
871 856 pass
872 857
873 858 req.write(self.manifest(self.changectx(req), path))
874 859
875 860 def do_diff(self, req):
876 861 self.do_filediff(req)
877 862
878 863 def do_changelog(self, req, shortlog = False):
879 864 if req.form.has_key('node'):
880 865 ctx = self.changectx(req)
881 866 else:
882 867 if req.form.has_key('rev'):
883 868 hi = req.form['rev'][0]
884 869 else:
885 870 hi = self.repo.changelog.count() - 1
886 871 try:
887 872 ctx = self.repo.changectx(hi)
888 873 except hg.RepoError:
889 874 req.write(self.search(hi)) # XXX redirect to 404 page?
890 875 return
891 876
892 877 req.write(self.changelog(ctx, shortlog = shortlog))
893 878
894 879 def do_shortlog(self, req):
895 880 self.do_changelog(req, shortlog = True)
896 881
897 882 def do_changeset(self, req):
898 883 req.write(self.changeset(self.changectx(req)))
899 884
900 885 def do_manifest(self, req):
901 886 req.write(self.manifest(self.changectx(req),
902 887 self.cleanpath(req.form['path'][0])))
903 888
904 889 def do_tags(self, req):
905 890 req.write(self.tags())
906 891
907 892 def do_summary(self, req):
908 893 req.write(self.summary())
909 894
910 895 def do_filediff(self, req):
911 896 req.write(self.filediff(self.filectx(req)))
912 897
913 898 def do_annotate(self, req):
914 899 req.write(self.fileannotate(self.filectx(req)))
915 900
916 901 def do_filelog(self, req):
917 902 req.write(self.filelog(self.filectx(req)))
918 903
919 904 def do_lookup(self, req):
920 905 try:
921 906 r = hex(self.repo.lookup(req.form['key'][0]))
922 907 success = 1
923 908 except Exception,inst:
924 909 r = str(inst)
925 910 success = 0
926 911 resp = "%s %s\n" % (success, r)
927 912 req.httphdr("application/mercurial-0.1", length=len(resp))
928 913 req.write(resp)
929 914
930 915 def do_heads(self, req):
931 916 resp = " ".join(map(hex, self.repo.heads())) + "\n"
932 917 req.httphdr("application/mercurial-0.1", length=len(resp))
933 918 req.write(resp)
934 919
935 920 def do_branches(self, req):
936 921 nodes = []
937 922 if req.form.has_key('nodes'):
938 923 nodes = map(bin, req.form['nodes'][0].split(" "))
939 924 resp = cStringIO.StringIO()
940 925 for b in self.repo.branches(nodes):
941 926 resp.write(" ".join(map(hex, b)) + "\n")
942 927 resp = resp.getvalue()
943 928 req.httphdr("application/mercurial-0.1", length=len(resp))
944 929 req.write(resp)
945 930
946 931 def do_between(self, req):
947 932 if req.form.has_key('pairs'):
948 933 pairs = [map(bin, p.split("-"))
949 934 for p in req.form['pairs'][0].split(" ")]
950 935 resp = cStringIO.StringIO()
951 936 for b in self.repo.between(pairs):
952 937 resp.write(" ".join(map(hex, b)) + "\n")
953 938 resp = resp.getvalue()
954 939 req.httphdr("application/mercurial-0.1", length=len(resp))
955 940 req.write(resp)
956 941
957 942 def do_changegroup(self, req):
958 943 req.httphdr("application/mercurial-0.1")
959 944 nodes = []
960 945 if not self.allowpull:
961 946 return
962 947
963 948 if req.form.has_key('roots'):
964 949 nodes = map(bin, req.form['roots'][0].split(" "))
965 950
966 951 z = zlib.compressobj()
967 952 f = self.repo.changegroup(nodes, 'serve')
968 953 while 1:
969 954 chunk = f.read(4096)
970 955 if not chunk:
971 956 break
972 957 req.write(z.compress(chunk))
973 958
974 959 req.write(z.flush())
975 960
976 961 def do_changegroupsubset(self, req):
977 962 req.httphdr("application/mercurial-0.1")
978 963 bases = []
979 964 heads = []
980 965 if not self.allowpull:
981 966 return
982 967
983 968 if req.form.has_key('bases'):
984 969 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
985 970 if req.form.has_key('heads'):
986 971 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
987 972
988 973 z = zlib.compressobj()
989 974 f = self.repo.changegroupsubset(bases, heads, 'serve')
990 975 while 1:
991 976 chunk = f.read(4096)
992 977 if not chunk:
993 978 break
994 979 req.write(z.compress(chunk))
995 980
996 981 req.write(z.flush())
997 982
998 983 def do_archive(self, req):
999 984 type_ = req.form['type'][0]
1000 985 allowed = self.configlist("web", "allow_archive")
1001 986 if (type_ in self.archives and (type_ in allowed or
1002 987 self.configbool("web", "allow" + type_, False))):
1003 988 self.archive(req, req.form['node'][0], type_)
1004 989 return
1005 990
1006 991 req.write(self.t("error"))
1007 992
1008 993 def do_static(self, req):
1009 994 fname = req.form['file'][0]
1010 995 # a repo owner may set web.static in .hg/hgrc to get any file
1011 996 # readable by the user running the CGI script
1012 997 static = self.config("web", "static",
1013 998 os.path.join(self.templatepath, "static"),
1014 999 untrusted=False)
1015 1000 req.write(staticfile(static, fname, req)
1016 1001 or self.t("error", error="%r not found" % fname))
1017 1002
1018 1003 def do_capabilities(self, req):
1019 1004 caps = ['lookup', 'changegroupsubset']
1020 1005 if self.configbool('server', 'uncompressed'):
1021 1006 caps.append('stream=%d' % self.repo.changelog.version)
1022 1007 # XXX: make configurable and/or share code with do_unbundle:
1023 1008 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1024 1009 if unbundleversions:
1025 1010 caps.append('unbundle=%s' % ','.join(unbundleversions))
1026 1011 resp = ' '.join(caps)
1027 1012 req.httphdr("application/mercurial-0.1", length=len(resp))
1028 1013 req.write(resp)
1029 1014
1030 1015 def check_perm(self, req, op, default):
1031 1016 '''check permission for operation based on user auth.
1032 1017 return true if op allowed, else false.
1033 1018 default is policy to use if no config given.'''
1034 1019
1035 1020 user = req.env.get('REMOTE_USER')
1036 1021
1037 1022 deny = self.configlist('web', 'deny_' + op)
1038 1023 if deny and (not user or deny == ['*'] or user in deny):
1039 1024 return False
1040 1025
1041 1026 allow = self.configlist('web', 'allow_' + op)
1042 1027 return (allow and (allow == ['*'] or user in allow)) or default
1043 1028
1044 1029 def do_unbundle(self, req):
1045 1030 def bail(response, headers={}):
1046 1031 length = int(req.env['CONTENT_LENGTH'])
1047 1032 for s in util.filechunkiter(req, limit=length):
1048 1033 # drain incoming bundle, else client will not see
1049 1034 # response when run outside cgi script
1050 1035 pass
1051 1036 req.httphdr("application/mercurial-0.1", headers=headers)
1052 1037 req.write('0\n')
1053 1038 req.write(response)
1054 1039
1055 1040 # require ssl by default, auth info cannot be sniffed and
1056 1041 # replayed
1057 1042 ssl_req = self.configbool('web', 'push_ssl', True)
1058 1043 if ssl_req:
1059 1044 if not req.env.get('HTTPS'):
1060 1045 bail(_('ssl required\n'))
1061 1046 return
1062 1047 proto = 'https'
1063 1048 else:
1064 1049 proto = 'http'
1065 1050
1066 1051 # do not allow push unless explicitly allowed
1067 1052 if not self.check_perm(req, 'push', False):
1068 1053 bail(_('push not authorized\n'),
1069 1054 headers={'status': '401 Unauthorized'})
1070 1055 return
1071 1056
1072 1057 their_heads = req.form['heads'][0].split(' ')
1073 1058
1074 1059 def check_heads():
1075 1060 heads = map(hex, self.repo.heads())
1076 1061 return their_heads == [hex('force')] or their_heads == heads
1077 1062
1078 1063 # fail early if possible
1079 1064 if not check_heads():
1080 1065 bail(_('unsynced changes\n'))
1081 1066 return
1082 1067
1083 1068 req.httphdr("application/mercurial-0.1")
1084 1069
1085 1070 # do not lock repo until all changegroup data is
1086 1071 # streamed. save to temporary file.
1087 1072
1088 1073 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1089 1074 fp = os.fdopen(fd, 'wb+')
1090 1075 try:
1091 1076 length = int(req.env['CONTENT_LENGTH'])
1092 1077 for s in util.filechunkiter(req, limit=length):
1093 1078 fp.write(s)
1094 1079
1095 1080 try:
1096 1081 lock = self.repo.lock()
1097 1082 try:
1098 1083 if not check_heads():
1099 1084 req.write('0\n')
1100 1085 req.write(_('unsynced changes\n'))
1101 1086 return
1102 1087
1103 1088 fp.seek(0)
1104 1089 header = fp.read(6)
1105 1090 if not header.startswith("HG"):
1106 1091 # old client with uncompressed bundle
1107 1092 def generator(f):
1108 1093 yield header
1109 1094 for chunk in f:
1110 1095 yield chunk
1111 1096 elif not header.startswith("HG10"):
1112 1097 req.write("0\n")
1113 1098 req.write(_("unknown bundle version\n"))
1114 1099 return
1115 1100 elif header == "HG10GZ":
1116 1101 def generator(f):
1117 1102 zd = zlib.decompressobj()
1118 1103 for chunk in f:
1119 1104 yield zd.decompress(chunk)
1120 1105 elif header == "HG10BZ":
1121 1106 def generator(f):
1122 1107 zd = bz2.BZ2Decompressor()
1123 1108 zd.decompress("BZ")
1124 1109 for chunk in f:
1125 1110 yield zd.decompress(chunk)
1126 1111 elif header == "HG10UN":
1127 1112 def generator(f):
1128 1113 for chunk in f:
1129 1114 yield chunk
1130 1115 else:
1131 1116 req.write("0\n")
1132 1117 req.write(_("unknown bundle compression type\n"))
1133 1118 return
1134 1119 gen = generator(util.filechunkiter(fp, 4096))
1135 1120
1136 1121 # send addchangegroup output to client
1137 1122
1138 1123 old_stdout = sys.stdout
1139 1124 sys.stdout = cStringIO.StringIO()
1140 1125
1141 1126 try:
1142 1127 url = 'remote:%s:%s' % (proto,
1143 1128 req.env.get('REMOTE_HOST', ''))
1144 1129 try:
1145 1130 ret = self.repo.addchangegroup(
1146 1131 util.chunkbuffer(gen), 'serve', url)
1147 1132 except util.Abort, inst:
1148 1133 sys.stdout.write("abort: %s\n" % inst)
1149 1134 ret = 0
1150 1135 finally:
1151 1136 val = sys.stdout.getvalue()
1152 1137 sys.stdout = old_stdout
1153 1138 req.write('%d\n' % ret)
1154 1139 req.write(val)
1155 1140 finally:
1156 1141 lock.release()
1157 1142 except (OSError, IOError), inst:
1158 1143 req.write('0\n')
1159 1144 filename = getattr(inst, 'filename', '')
1160 1145 # Don't send our filesystem layout to the client
1161 1146 if filename.startswith(self.repo.root):
1162 1147 filename = filename[len(self.repo.root)+1:]
1163 1148 else:
1164 1149 filename = ''
1165 1150 error = getattr(inst, 'strerror', 'Unknown error')
1166 1151 req.write('%s: %s\n' % (error, filename))
1167 1152 finally:
1168 1153 fp.close()
1169 1154 os.unlink(tempname)
1170 1155
1171 1156 def do_stream_out(self, req):
1172 1157 req.httphdr("application/mercurial-0.1")
1173 1158 streamclone.stream_out(self.repo, req)
@@ -1,234 +1,237 b''
1 1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from mercurial import demandimport; demandimport.enable()
10 10 import os, mimetools, cStringIO
11 11 from mercurial.i18n import gettext as _
12 12 from mercurial import ui, hg, util, templater
13 from common import get_mtime, staticfile, style_map
13 from common import get_mtime, staticfile, style_map, paritygen
14 14 from hgweb_mod import hgweb
15 15
16 16 # This is a stopgap
17 17 class hgwebdir(object):
18 18 def __init__(self, config, parentui=None):
19 19 def cleannames(items):
20 20 return [(name.strip(os.sep), path) for name, path in items]
21 21
22 22 self.parentui = parentui
23 23 self.motd = None
24 24 self.style = None
25 self.stripecount = None
25 26 self.repos_sorted = ('name', False)
26 27 if isinstance(config, (list, tuple)):
27 28 self.repos = cleannames(config)
28 29 self.repos_sorted = ('', False)
29 30 elif isinstance(config, dict):
30 31 self.repos = cleannames(config.items())
31 32 self.repos.sort()
32 33 else:
33 34 if isinstance(config, util.configparser):
34 35 cp = config
35 36 else:
36 37 cp = util.configparser()
37 38 cp.read(config)
38 39 self.repos = []
39 40 if cp.has_section('web'):
40 41 if cp.has_option('web', 'motd'):
41 42 self.motd = cp.get('web', 'motd')
42 43 if cp.has_option('web', 'style'):
43 44 self.style = cp.get('web', 'style')
45 if cp.has_option('web', 'stripes'):
46 self.stripecount = int(cp.get('web', 'stripes'))
44 47 if cp.has_section('paths'):
45 48 self.repos.extend(cleannames(cp.items('paths')))
46 49 if cp.has_section('collections'):
47 50 for prefix, root in cp.items('collections'):
48 51 for path in util.walkrepos(root):
49 52 repo = os.path.normpath(path)
50 53 name = repo
51 54 if name.startswith(prefix):
52 55 name = name[len(prefix):]
53 56 self.repos.append((name.lstrip(os.sep), repo))
54 57 self.repos.sort()
55 58
56 59 def run(self):
57 60 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
58 61 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
59 62 import mercurial.hgweb.wsgicgi as wsgicgi
60 63 from request import wsgiapplication
61 64 def make_web_app():
62 65 return self
63 66 wsgicgi.launch(wsgiapplication(make_web_app))
64 67
65 68 def run_wsgi(self, req):
66 69 def header(**map):
67 70 header_file = cStringIO.StringIO(
68 71 ''.join(tmpl("header", encoding=util._encoding, **map)))
69 72 msg = mimetools.Message(header_file, 0)
70 73 req.header(msg.items())
71 74 yield header_file.read()
72 75
73 76 def footer(**map):
74 77 yield tmpl("footer", **map)
75 78
76 79 def motd(**map):
77 80 if self.motd is not None:
78 81 yield self.motd
79 82 else:
80 83 yield config('web', 'motd', '')
81 84
82 85 parentui = self.parentui or ui.ui(report_untrusted=False)
83 86
84 87 def config(section, name, default=None, untrusted=True):
85 88 return parentui.config(section, name, default, untrusted)
86 89
87 90 url = req.env['REQUEST_URI'].split('?')[0]
88 91 if not url.endswith('/'):
89 92 url += '/'
90 93
91 94 staticurl = config('web', 'staticurl') or url + 'static/'
92 95 if not staticurl.endswith('/'):
93 96 staticurl += '/'
94 97
95 98 style = self.style
96 99 if style is None:
97 100 style = config('web', 'style', '')
98 101 if req.form.has_key('style'):
99 102 style = req.form['style'][0]
103 if self.stripecount is None:
104 self.stripecount = int(config('web', 'stripes', 1))
100 105 mapfile = style_map(templater.templatepath(), style)
101 106 tmpl = templater.templater(mapfile, templater.common_filters,
102 107 defaults={"header": header,
103 108 "footer": footer,
104 109 "motd": motd,
105 110 "url": url,
106 111 "staticurl": staticurl})
107 112
108 113 def archivelist(ui, nodeid, url):
109 114 allowed = ui.configlist("web", "allow_archive", untrusted=True)
110 115 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
111 116 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
112 117 untrusted=True):
113 118 yield {"type" : i[0], "extension": i[1],
114 119 "node": nodeid, "url": url}
115 120
116 121 def entries(sortcolumn="", descending=False, **map):
117 122 def sessionvars(**map):
118 123 fields = []
119 124 if req.form.has_key('style'):
120 125 style = req.form['style'][0]
121 126 if style != get('web', 'style', ''):
122 127 fields.append(('style', style))
123 128
124 129 separator = url[-1] == '?' and ';' or '?'
125 130 for name, value in fields:
126 131 yield dict(name=name, value=value, separator=separator)
127 132 separator = ';'
128 133
129 134 rows = []
130 parity = 0
135 parity = paritygen(self.stripecount)
131 136 for name, path in self.repos:
132 137 u = ui.ui(parentui=parentui)
133 138 try:
134 139 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
135 140 except IOError:
136 141 pass
137 142 def get(section, name, default=None):
138 143 return u.config(section, name, default, untrusted=True)
139 144
140 145 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
141 146 .replace("//", "/")) + '/'
142 147
143 148 # update time with local timezone
144 149 try:
145 150 d = (get_mtime(path), util.makedate()[1])
146 151 except OSError:
147 152 continue
148 153
149 154 contact = (get("ui", "username") or # preferred
150 155 get("web", "contact") or # deprecated
151 156 get("web", "author", "")) # also
152 157 description = get("web", "description", "")
153 158 name = get("web", "name", name)
154 159 row = dict(contact=contact or "unknown",
155 160 contact_sort=contact.upper() or "unknown",
156 161 name=name,
157 162 name_sort=name,
158 163 url=url,
159 164 description=description or "unknown",
160 165 description_sort=description.upper() or "unknown",
161 166 lastchange=d,
162 167 lastchange_sort=d[1]-d[0],
163 168 sessionvars=sessionvars,
164 169 archives=archivelist(u, "tip", url))
165 170 if (not sortcolumn
166 171 or (sortcolumn, descending) == self.repos_sorted):
167 172 # fast path for unsorted output
168 row['parity'] = parity
169 parity = 1 - parity
173 row['parity'] = parity.next()
170 174 yield row
171 175 else:
172 176 rows.append((row["%s_sort" % sortcolumn], row))
173 177 if rows:
174 178 rows.sort()
175 179 if descending:
176 180 rows.reverse()
177 181 for key, row in rows:
178 row['parity'] = parity
179 parity = 1 - parity
182 row['parity'] = parity.next()
180 183 yield row
181 184
182 185 try:
183 186 virtual = req.env.get("PATH_INFO", "").strip('/')
184 187 if virtual.startswith('static/'):
185 188 static = os.path.join(templater.templatepath(), 'static')
186 189 fname = virtual[7:]
187 190 req.write(staticfile(static, fname, req) or
188 191 tmpl('error', error='%r not found' % fname))
189 192 elif virtual:
190 193 while virtual:
191 194 real = dict(self.repos).get(virtual)
192 195 if real:
193 196 break
194 197 up = virtual.rfind('/')
195 198 if up < 0:
196 199 break
197 200 virtual = virtual[:up]
198 201 if real:
199 202 req.env['REPO_NAME'] = virtual
200 203 try:
201 204 repo = hg.repository(parentui, real)
202 205 hgweb(repo).run_wsgi(req)
203 206 except IOError, inst:
204 207 req.write(tmpl("error", error=inst.strerror))
205 208 except hg.RepoError, inst:
206 209 req.write(tmpl("error", error=str(inst)))
207 210 else:
208 211 req.write(tmpl("notfound", repo=virtual))
209 212 else:
210 213 if req.form.has_key('static'):
211 214 static = os.path.join(templater.templatepath(), "static")
212 215 fname = req.form['static'][0]
213 216 req.write(staticfile(static, fname, req)
214 217 or tmpl("error", error="%r not found" % fname))
215 218 else:
216 219 sortable = ["name", "description", "contact", "lastchange"]
217 220 sortcolumn, descending = self.repos_sorted
218 221 if req.form.has_key('sort'):
219 222 sortcolumn = req.form['sort'][0]
220 223 descending = sortcolumn.startswith('-')
221 224 if descending:
222 225 sortcolumn = sortcolumn[1:]
223 226 if sortcolumn not in sortable:
224 227 sortcolumn = ""
225 228
226 229 sort = [("sort_%s" % column,
227 230 "%s%s" % ((not descending and column == sortcolumn)
228 231 and "-" or "", column))
229 232 for column in sortable]
230 233 req.write(tmpl("index", entries=entries,
231 234 sortcolumn=sortcolumn, descending=descending,
232 235 **dict(sort)))
233 236 finally:
234 237 tmpl = None
@@ -1,1944 +1,1950 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 if not path:
24 24 p = os.getcwd()
25 25 while not os.path.isdir(os.path.join(p, ".hg")):
26 26 oldp = p
27 27 p = os.path.dirname(p)
28 28 if p == oldp:
29 29 raise repo.RepoError(_("There is no Mercurial repository"
30 30 " here (.hg not found)"))
31 31 path = p
32 32
33 33 self.root = os.path.realpath(path)
34 34 self.path = os.path.join(self.root, ".hg")
35 35 self.origroot = path
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 if not os.path.isdir(self.path):
40 40 if create:
41 41 if not os.path.exists(path):
42 42 os.mkdir(path)
43 43 os.mkdir(self.path)
44 44 requirements = ["revlogv1"]
45 45 if parentui.configbool('format', 'usestore', True):
46 46 os.mkdir(os.path.join(self.path, "store"))
47 47 requirements.append("store")
48 48 # create an invalid changelog
49 49 self.opener("00changelog.i", "a").write(
50 50 '\0\0\0\2' # represents revlogv2
51 51 ' dummy changelog to prevent using the old repo layout'
52 52 )
53 53 reqfile = self.opener("requires", "w")
54 54 for r in requirements:
55 55 reqfile.write("%s\n" % r)
56 56 reqfile.close()
57 57 else:
58 58 raise repo.RepoError(_("repository %s not found") % path)
59 59 elif create:
60 60 raise repo.RepoError(_("repository %s already exists") % path)
61 61 else:
62 62 # find requirements
63 63 try:
64 64 requirements = self.opener("requires").read().splitlines()
65 65 except IOError, inst:
66 66 if inst.errno != errno.ENOENT:
67 67 raise
68 68 requirements = []
69 69 # check them
70 70 for r in requirements:
71 71 if r not in self.supported:
72 72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 73
74 74 # setup store
75 75 if "store" in requirements:
76 76 self.encodefn = util.encodefilename
77 77 self.decodefn = util.decodefilename
78 78 self.spath = os.path.join(self.path, "store")
79 79 else:
80 80 self.encodefn = lambda x: x
81 81 self.decodefn = lambda x: x
82 82 self.spath = self.path
83 83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 84
85 85 self.ui = ui.ui(parentui=parentui)
86 86 try:
87 87 self.ui.readconfig(self.join("hgrc"), self.root)
88 88 except IOError:
89 89 pass
90 90
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 self.manifest = manifest.manifest(self.sopener)
94 94
95 95 fallback = self.ui.config('ui', 'fallbackencoding')
96 96 if fallback:
97 97 util._fallbackencoding = fallback
98 98
99 99 self.tagscache = None
100 100 self.branchcache = None
101 101 self.nodetagscache = None
102 102 self.filterpats = {}
103 103 self.transhandle = None
104 104
105 105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
106 106
107 107 def url(self):
108 108 return 'file:' + self.root
109 109
110 110 def hook(self, name, throw=False, **args):
111 111 def callhook(hname, funcname):
112 112 '''call python hook. hook is callable object, looked up as
113 113 name in python module. if callable returns "true", hook
114 114 fails, else passes. if hook raises exception, treated as
115 115 hook failure. exception propagates if throw is "true".
116 116
117 117 reason for "true" meaning "hook failed" is so that
118 118 unmodified commands (e.g. mercurial.commands.update) can
119 119 be run as hooks without wrappers to convert return values.'''
120 120
121 121 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
122 122 obj = funcname
123 123 if not callable(obj):
124 124 d = funcname.rfind('.')
125 125 if d == -1:
126 126 raise util.Abort(_('%s hook is invalid ("%s" not in '
127 127 'a module)') % (hname, funcname))
128 128 modname = funcname[:d]
129 129 try:
130 130 obj = __import__(modname)
131 131 except ImportError:
132 132 try:
133 133 # extensions are loaded with hgext_ prefix
134 134 obj = __import__("hgext_%s" % modname)
135 135 except ImportError:
136 136 raise util.Abort(_('%s hook is invalid '
137 137 '(import of "%s" failed)') %
138 138 (hname, modname))
139 139 try:
140 140 for p in funcname.split('.')[1:]:
141 141 obj = getattr(obj, p)
142 142 except AttributeError, err:
143 143 raise util.Abort(_('%s hook is invalid '
144 144 '("%s" is not defined)') %
145 145 (hname, funcname))
146 146 if not callable(obj):
147 147 raise util.Abort(_('%s hook is invalid '
148 148 '("%s" is not callable)') %
149 149 (hname, funcname))
150 150 try:
151 151 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
152 152 except (KeyboardInterrupt, util.SignalInterrupt):
153 153 raise
154 154 except Exception, exc:
155 155 if isinstance(exc, util.Abort):
156 156 self.ui.warn(_('error: %s hook failed: %s\n') %
157 157 (hname, exc.args[0]))
158 158 else:
159 159 self.ui.warn(_('error: %s hook raised an exception: '
160 160 '%s\n') % (hname, exc))
161 161 if throw:
162 162 raise
163 163 self.ui.print_exc()
164 164 return True
165 165 if r:
166 166 if throw:
167 167 raise util.Abort(_('%s hook failed') % hname)
168 168 self.ui.warn(_('warning: %s hook failed\n') % hname)
169 169 return r
170 170
171 171 def runhook(name, cmd):
172 172 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
173 173 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
174 174 r = util.system(cmd, environ=env, cwd=self.root)
175 175 if r:
176 176 desc, r = util.explain_exit(r)
177 177 if throw:
178 178 raise util.Abort(_('%s hook %s') % (name, desc))
179 179 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
180 180 return r
181 181
182 182 r = False
183 183 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
184 184 if hname.split(".", 1)[0] == name and cmd]
185 185 hooks.sort()
186 186 for hname, cmd in hooks:
187 187 if callable(cmd):
188 188 r = callhook(hname, cmd) or r
189 189 elif cmd.startswith('python:'):
190 190 r = callhook(hname, cmd[7:].strip()) or r
191 191 else:
192 192 r = runhook(hname, cmd) or r
193 193 return r
194 194
195 195 tag_disallowed = ':\r\n'
196 196
197 197 def _tag(self, name, node, message, local, user, date, parent=None):
198 198 use_dirstate = parent is None
199 199
200 200 for c in self.tag_disallowed:
201 201 if c in name:
202 202 raise util.Abort(_('%r cannot be used in a tag name') % c)
203 203
204 204 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
205 205
206 206 if local:
207 207 # local tags are stored in the current charset
208 208 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
209 209 self.hook('tag', node=hex(node), tag=name, local=local)
210 210 return
211 211
212 212 # committed tags are stored in UTF-8
213 213 line = '%s %s\n' % (hex(node), util.fromlocal(name))
214 214 if use_dirstate:
215 215 self.wfile('.hgtags', 'ab').write(line)
216 216 else:
217 217 ntags = self.filectx('.hgtags', parent).data()
218 218 self.wfile('.hgtags', 'ab').write(ntags + line)
219 219 if use_dirstate and self.dirstate.state('.hgtags') == '?':
220 220 self.add(['.hgtags'])
221 221
222 222 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
223 223
224 224 self.hook('tag', node=hex(node), tag=name, local=local)
225 225
226 226 return tagnode
227 227
228 228 def tag(self, name, node, message, local, user, date):
229 229 '''tag a revision with a symbolic name.
230 230
231 231 if local is True, the tag is stored in a per-repository file.
232 232 otherwise, it is stored in the .hgtags file, and a new
233 233 changeset is committed with the change.
234 234
235 235 keyword arguments:
236 236
237 237 local: whether to store tag in non-version-controlled file
238 238 (default False)
239 239
240 240 message: commit message to use if committing
241 241
242 242 user: name of user to use if committing
243 243
244 244 date: date tuple to use if committing'''
245 245
246 246 for x in self.status()[:5]:
247 247 if '.hgtags' in x:
248 248 raise util.Abort(_('working copy of .hgtags is changed '
249 249 '(please commit .hgtags manually)'))
250 250
251 251
252 252 self._tag(name, node, message, local, user, date)
253 253
254 254 def tags(self):
255 255 '''return a mapping of tag to node'''
256 256 if self.tagscache:
257 257 return self.tagscache
258 258
259 259 globaltags = {}
260 260
261 261 def readtags(lines, fn):
262 262 filetags = {}
263 263 count = 0
264 264
265 265 def warn(msg):
266 266 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
267 267
268 268 for l in lines:
269 269 count += 1
270 270 if not l:
271 271 continue
272 272 s = l.split(" ", 1)
273 273 if len(s) != 2:
274 274 warn(_("cannot parse entry"))
275 275 continue
276 276 node, key = s
277 277 key = util.tolocal(key.strip()) # stored in UTF-8
278 278 try:
279 279 bin_n = bin(node)
280 280 except TypeError:
281 281 warn(_("node '%s' is not well formed") % node)
282 282 continue
283 283 if bin_n not in self.changelog.nodemap:
284 284 warn(_("tag '%s' refers to unknown node") % key)
285 285 continue
286 286
287 287 h = []
288 288 if key in filetags:
289 289 n, h = filetags[key]
290 290 h.append(n)
291 291 filetags[key] = (bin_n, h)
292 292
293 293 for k,nh in filetags.items():
294 294 if k not in globaltags:
295 295 globaltags[k] = nh
296 296 continue
297 297 # we prefer the global tag if:
298 298 # it supercedes us OR
299 299 # mutual supercedes and it has a higher rank
300 300 # otherwise we win because we're tip-most
301 301 an, ah = nh
302 302 bn, bh = globaltags[k]
303 303 if bn != an and an in bh and \
304 304 (bn not in ah or len(bh) > len(ah)):
305 305 an = bn
306 306 ah.append([n for n in bh if n not in ah])
307 307 globaltags[k] = an, ah
308 308
309 309 # read the tags file from each head, ending with the tip
310 310 f = None
311 311 for rev, node, fnode in self._hgtagsnodes():
312 312 f = (f and f.filectx(fnode) or
313 313 self.filectx('.hgtags', fileid=fnode))
314 314 readtags(f.data().splitlines(), f)
315 315
316 316 try:
317 317 data = util.fromlocal(self.opener("localtags").read())
318 318 # localtags are stored in the local character set
319 319 # while the internal tag table is stored in UTF-8
320 320 readtags(data.splitlines(), "localtags")
321 321 except IOError:
322 322 pass
323 323
324 324 self.tagscache = {}
325 325 for k,nh in globaltags.items():
326 326 n = nh[0]
327 327 if n != nullid:
328 328 self.tagscache[k] = n
329 329 self.tagscache['tip'] = self.changelog.tip()
330 330
331 331 return self.tagscache
332 332
333 333 def _hgtagsnodes(self):
334 334 heads = self.heads()
335 335 heads.reverse()
336 336 last = {}
337 337 ret = []
338 338 for node in heads:
339 339 c = self.changectx(node)
340 340 rev = c.rev()
341 341 try:
342 342 fnode = c.filenode('.hgtags')
343 343 except revlog.LookupError:
344 344 continue
345 345 ret.append((rev, node, fnode))
346 346 if fnode in last:
347 347 ret[last[fnode]] = None
348 348 last[fnode] = len(ret) - 1
349 349 return [item for item in ret if item]
350 350
351 351 def tagslist(self):
352 352 '''return a list of tags ordered by revision'''
353 353 l = []
354 354 for t, n in self.tags().items():
355 355 try:
356 356 r = self.changelog.rev(n)
357 357 except:
358 358 r = -2 # sort to the beginning of the list if unknown
359 359 l.append((r, t, n))
360 360 l.sort()
361 361 return [(t, n) for r, t, n in l]
362 362
363 363 def nodetags(self, node):
364 364 '''return the tags associated with a node'''
365 365 if not self.nodetagscache:
366 366 self.nodetagscache = {}
367 367 for t, n in self.tags().items():
368 368 self.nodetagscache.setdefault(n, []).append(t)
369 369 return self.nodetagscache.get(node, [])
370 370
371 371 def _branchtags(self):
372 372 partial, last, lrev = self._readbranchcache()
373 373
374 374 tiprev = self.changelog.count() - 1
375 375 if lrev != tiprev:
376 376 self._updatebranchcache(partial, lrev+1, tiprev+1)
377 377 self._writebranchcache(partial, self.changelog.tip(), tiprev)
378 378
379 379 return partial
380 380
381 381 def branchtags(self):
382 382 if self.branchcache is not None:
383 383 return self.branchcache
384 384
385 385 self.branchcache = {} # avoid recursion in changectx
386 386 partial = self._branchtags()
387 387
388 388 # the branch cache is stored on disk as UTF-8, but in the local
389 389 # charset internally
390 390 for k, v in partial.items():
391 391 self.branchcache[util.tolocal(k)] = v
392 392 return self.branchcache
393 393
394 394 def _readbranchcache(self):
395 395 partial = {}
396 396 try:
397 397 f = self.opener("branch.cache")
398 398 lines = f.read().split('\n')
399 399 f.close()
400 400 except (IOError, OSError):
401 401 return {}, nullid, nullrev
402 402
403 403 try:
404 404 last, lrev = lines.pop(0).split(" ", 1)
405 405 last, lrev = bin(last), int(lrev)
406 406 if not (lrev < self.changelog.count() and
407 407 self.changelog.node(lrev) == last): # sanity check
408 408 # invalidate the cache
409 409 raise ValueError('Invalid branch cache: unknown tip')
410 410 for l in lines:
411 411 if not l: continue
412 412 node, label = l.split(" ", 1)
413 413 partial[label.strip()] = bin(node)
414 414 except (KeyboardInterrupt, util.SignalInterrupt):
415 415 raise
416 416 except Exception, inst:
417 417 if self.ui.debugflag:
418 418 self.ui.warn(str(inst), '\n')
419 419 partial, last, lrev = {}, nullid, nullrev
420 420 return partial, last, lrev
421 421
422 422 def _writebranchcache(self, branches, tip, tiprev):
423 423 try:
424 424 f = self.opener("branch.cache", "w", atomictemp=True)
425 425 f.write("%s %s\n" % (hex(tip), tiprev))
426 426 for label, node in branches.iteritems():
427 427 f.write("%s %s\n" % (hex(node), label))
428 428 f.rename()
429 429 except (IOError, OSError):
430 430 pass
431 431
432 432 def _updatebranchcache(self, partial, start, end):
433 433 for r in xrange(start, end):
434 434 c = self.changectx(r)
435 435 b = c.branch()
436 436 partial[b] = c.node()
437 437
438 438 def lookup(self, key):
439 439 if key == '.':
440 440 key = self.dirstate.parents()[0]
441 441 if key == nullid:
442 442 raise repo.RepoError(_("no revision checked out"))
443 443 elif key == 'null':
444 444 return nullid
445 445 n = self.changelog._match(key)
446 446 if n:
447 447 return n
448 448 if key in self.tags():
449 449 return self.tags()[key]
450 450 if key in self.branchtags():
451 451 return self.branchtags()[key]
452 452 n = self.changelog._partialmatch(key)
453 453 if n:
454 454 return n
455 455 raise repo.RepoError(_("unknown revision '%s'") % key)
456 456
457 457 def dev(self):
458 458 return os.lstat(self.path).st_dev
459 459
460 460 def local(self):
461 461 return True
462 462
463 463 def join(self, f):
464 464 return os.path.join(self.path, f)
465 465
466 466 def sjoin(self, f):
467 467 f = self.encodefn(f)
468 468 return os.path.join(self.spath, f)
469 469
470 470 def wjoin(self, f):
471 471 return os.path.join(self.root, f)
472 472
473 473 def file(self, f):
474 474 if f[0] == '/':
475 475 f = f[1:]
476 476 return filelog.filelog(self.sopener, f)
477 477
478 478 def changectx(self, changeid=None):
479 479 return context.changectx(self, changeid)
480 480
481 481 def workingctx(self):
482 482 return context.workingctx(self)
483 483
484 484 def parents(self, changeid=None):
485 485 '''
486 486 get list of changectxs for parents of changeid or working directory
487 487 '''
488 488 if changeid is None:
489 489 pl = self.dirstate.parents()
490 490 else:
491 491 n = self.changelog.lookup(changeid)
492 492 pl = self.changelog.parents(n)
493 493 if pl[1] == nullid:
494 494 return [self.changectx(pl[0])]
495 495 return [self.changectx(pl[0]), self.changectx(pl[1])]
496 496
497 497 def filectx(self, path, changeid=None, fileid=None):
498 498 """changeid can be a changeset revision, node, or tag.
499 499 fileid can be a file revision or node."""
500 500 return context.filectx(self, path, changeid, fileid)
501 501
502 502 def getcwd(self):
503 503 return self.dirstate.getcwd()
504 504
505 505 def wfile(self, f, mode='r'):
506 506 return self.wopener(f, mode)
507 507
508 508 def _link(self, f):
509 509 return os.path.islink(self.wjoin(f))
510 510
511 511 def _filter(self, filter, filename, data):
512 512 if filter not in self.filterpats:
513 513 l = []
514 514 for pat, cmd in self.ui.configitems(filter):
515 515 mf = util.matcher(self.root, "", [pat], [], [])[1]
516 516 l.append((mf, cmd))
517 517 self.filterpats[filter] = l
518 518
519 519 for mf, cmd in self.filterpats[filter]:
520 520 if mf(filename):
521 521 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
522 522 data = util.filter(data, cmd)
523 523 break
524 524
525 525 return data
526 526
527 527 def wread(self, filename):
528 528 if self._link(filename):
529 529 data = os.readlink(self.wjoin(filename))
530 530 else:
531 531 data = self.wopener(filename, 'r').read()
532 532 return self._filter("encode", filename, data)
533 533
534 534 def wwrite(self, filename, data, flags):
535 535 data = self._filter("decode", filename, data)
536 536 if "l" in flags:
537 537 f = self.wjoin(filename)
538 538 try:
539 539 os.unlink(f)
540 540 except OSError:
541 541 pass
542 542 d = os.path.dirname(f)
543 543 if not os.path.exists(d):
544 544 os.makedirs(d)
545 545 os.symlink(data, f)
546 546 else:
547 547 try:
548 548 if self._link(filename):
549 549 os.unlink(self.wjoin(filename))
550 550 except OSError:
551 551 pass
552 552 self.wopener(filename, 'w').write(data)
553 553 util.set_exec(self.wjoin(filename), "x" in flags)
554 554
555 555 def wwritedata(self, filename, data):
556 556 return self._filter("decode", filename, data)
557 557
558 558 def transaction(self):
559 559 tr = self.transhandle
560 560 if tr != None and tr.running():
561 561 return tr.nest()
562 562
563 563 # save dirstate for rollback
564 564 try:
565 565 ds = self.opener("dirstate").read()
566 566 except IOError:
567 567 ds = ""
568 568 self.opener("journal.dirstate", "w").write(ds)
569 569
570 570 renames = [(self.sjoin("journal"), self.sjoin("undo")),
571 571 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
572 572 tr = transaction.transaction(self.ui.warn, self.sopener,
573 573 self.sjoin("journal"),
574 574 aftertrans(renames))
575 575 self.transhandle = tr
576 576 return tr
577 577
578 578 def recover(self):
579 579 l = self.lock()
580 580 if os.path.exists(self.sjoin("journal")):
581 581 self.ui.status(_("rolling back interrupted transaction\n"))
582 582 transaction.rollback(self.sopener, self.sjoin("journal"))
583 583 self.reload()
584 584 return True
585 585 else:
586 586 self.ui.warn(_("no interrupted transaction available\n"))
587 587 return False
588 588
589 589 def rollback(self, wlock=None, lock=None):
590 590 if not wlock:
591 591 wlock = self.wlock()
592 592 if not lock:
593 593 lock = self.lock()
594 594 if os.path.exists(self.sjoin("undo")):
595 595 self.ui.status(_("rolling back last transaction\n"))
596 596 transaction.rollback(self.sopener, self.sjoin("undo"))
597 597 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
598 598 self.reload()
599 599 self.wreload()
600 600 else:
601 601 self.ui.warn(_("no rollback information available\n"))
602 602
603 603 def wreload(self):
604 604 self.dirstate.reload()
605 605
606 606 def reload(self):
607 607 self.changelog.load()
608 608 self.manifest.load()
609 609 self.tagscache = None
610 610 self.nodetagscache = None
611 611
612 612 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
613 613 desc=None):
614 614 try:
615 615 l = lock.lock(lockname, 0, releasefn, desc=desc)
616 616 except lock.LockHeld, inst:
617 617 if not wait:
618 618 raise
619 619 self.ui.warn(_("waiting for lock on %s held by %r\n") %
620 620 (desc, inst.locker))
621 621 # default to 600 seconds timeout
622 622 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
623 623 releasefn, desc=desc)
624 624 if acquirefn:
625 625 acquirefn()
626 626 return l
627 627
628 628 def lock(self, wait=1):
629 629 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
630 630 desc=_('repository %s') % self.origroot)
631 631
632 632 def wlock(self, wait=1):
633 633 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
634 634 self.wreload,
635 635 desc=_('working directory of %s') % self.origroot)
636 636
637 637 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
638 638 """
639 639 commit an individual file as part of a larger transaction
640 640 """
641 641
642 642 t = self.wread(fn)
643 643 fl = self.file(fn)
644 644 fp1 = manifest1.get(fn, nullid)
645 645 fp2 = manifest2.get(fn, nullid)
646 646
647 647 meta = {}
648 648 cp = self.dirstate.copied(fn)
649 649 if cp:
650 650 # Mark the new revision of this file as a copy of another
651 651 # file. This copy data will effectively act as a parent
652 652 # of this new revision. If this is a merge, the first
653 653 # parent will be the nullid (meaning "look up the copy data")
654 654 # and the second one will be the other parent. For example:
655 655 #
656 656 # 0 --- 1 --- 3 rev1 changes file foo
657 657 # \ / rev2 renames foo to bar and changes it
658 658 # \- 2 -/ rev3 should have bar with all changes and
659 659 # should record that bar descends from
660 660 # bar in rev2 and foo in rev1
661 661 #
662 662 # this allows this merge to succeed:
663 663 #
664 664 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
665 665 # \ / merging rev3 and rev4 should use bar@rev2
666 666 # \- 2 --- 4 as the merge base
667 667 #
668 668 meta["copy"] = cp
669 669 if not manifest2: # not a branch merge
670 670 meta["copyrev"] = hex(manifest1.get(cp, nullid))
671 671 fp2 = nullid
672 672 elif fp2 != nullid: # copied on remote side
673 673 meta["copyrev"] = hex(manifest1.get(cp, nullid))
674 674 elif fp1 != nullid: # copied on local side, reversed
675 675 meta["copyrev"] = hex(manifest2.get(cp))
676 676 fp2 = fp1
677 677 else: # directory rename
678 678 meta["copyrev"] = hex(manifest1.get(cp, nullid))
679 679 self.ui.debug(_(" %s: copy %s:%s\n") %
680 680 (fn, cp, meta["copyrev"]))
681 681 fp1 = nullid
682 682 elif fp2 != nullid:
683 683 # is one parent an ancestor of the other?
684 684 fpa = fl.ancestor(fp1, fp2)
685 685 if fpa == fp1:
686 686 fp1, fp2 = fp2, nullid
687 687 elif fpa == fp2:
688 688 fp2 = nullid
689 689
690 690 # is the file unmodified from the parent? report existing entry
691 691 if fp2 == nullid and not fl.cmp(fp1, t):
692 692 return fp1
693 693
694 694 changelist.append(fn)
695 695 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
696 696
697 697 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
698 698 if p1 is None:
699 699 p1, p2 = self.dirstate.parents()
700 700 return self.commit(files=files, text=text, user=user, date=date,
701 701 p1=p1, p2=p2, wlock=wlock, extra=extra)
702 702
703 703 def commit(self, files=None, text="", user=None, date=None,
704 704 match=util.always, force=False, lock=None, wlock=None,
705 705 force_editor=False, p1=None, p2=None, extra={}):
706 706
707 707 commit = []
708 708 remove = []
709 709 changed = []
710 710 use_dirstate = (p1 is None) # not rawcommit
711 711 extra = extra.copy()
712 712
713 713 if use_dirstate:
714 714 if files:
715 715 for f in files:
716 716 s = self.dirstate.state(f)
717 717 if s in 'nmai':
718 718 commit.append(f)
719 719 elif s == 'r':
720 720 remove.append(f)
721 721 else:
722 722 self.ui.warn(_("%s not tracked!\n") % f)
723 723 else:
724 724 changes = self.status(match=match)[:5]
725 725 modified, added, removed, deleted, unknown = changes
726 726 commit = modified + added
727 727 remove = removed
728 728 else:
729 729 commit = files
730 730
731 731 if use_dirstate:
732 732 p1, p2 = self.dirstate.parents()
733 733 update_dirstate = True
734 734 else:
735 735 p1, p2 = p1, p2 or nullid
736 736 update_dirstate = (self.dirstate.parents()[0] == p1)
737 737
738 738 c1 = self.changelog.read(p1)
739 739 c2 = self.changelog.read(p2)
740 740 m1 = self.manifest.read(c1[0]).copy()
741 741 m2 = self.manifest.read(c2[0])
742 742
743 743 if use_dirstate:
744 744 branchname = self.workingctx().branch()
745 745 try:
746 746 branchname = branchname.decode('UTF-8').encode('UTF-8')
747 747 except UnicodeDecodeError:
748 748 raise util.Abort(_('branch name not in UTF-8!'))
749 749 else:
750 750 branchname = ""
751 751
752 752 if use_dirstate:
753 753 oldname = c1[5].get("branch") # stored in UTF-8
754 754 if not commit and not remove and not force and p2 == nullid and \
755 755 branchname == oldname:
756 756 self.ui.status(_("nothing changed\n"))
757 757 return None
758 758
759 759 xp1 = hex(p1)
760 760 if p2 == nullid: xp2 = ''
761 761 else: xp2 = hex(p2)
762 762
763 763 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
764 764
765 765 if not wlock:
766 766 wlock = self.wlock()
767 767 if not lock:
768 768 lock = self.lock()
769 769 tr = self.transaction()
770 770
771 771 # check in files
772 772 new = {}
773 773 linkrev = self.changelog.count()
774 774 commit.sort()
775 775 is_exec = util.execfunc(self.root, m1.execf)
776 776 is_link = util.linkfunc(self.root, m1.linkf)
777 777 for f in commit:
778 778 self.ui.note(f + "\n")
779 779 try:
780 780 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
781 781 m1.set(f, is_exec(f), is_link(f))
782 782 except (OSError, IOError):
783 783 if use_dirstate:
784 784 self.ui.warn(_("trouble committing %s!\n") % f)
785 785 raise
786 786 else:
787 787 remove.append(f)
788 788
789 789 # update manifest
790 790 m1.update(new)
791 791 remove.sort()
792 792 removed = []
793 793
794 794 for f in remove:
795 795 if f in m1:
796 796 del m1[f]
797 797 removed.append(f)
798 798 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
799 799
800 800 # add changeset
801 801 new = new.keys()
802 802 new.sort()
803 803
804 804 user = user or self.ui.username()
805 805 if not text or force_editor:
806 806 edittext = []
807 807 if text:
808 808 edittext.append(text)
809 809 edittext.append("")
810 810 edittext.append("HG: user: %s" % user)
811 811 if p2 != nullid:
812 812 edittext.append("HG: branch merge")
813 813 if branchname:
814 814 edittext.append("HG: branch %s" % util.tolocal(branchname))
815 815 edittext.extend(["HG: changed %s" % f for f in changed])
816 816 edittext.extend(["HG: removed %s" % f for f in removed])
817 817 if not changed and not remove:
818 818 edittext.append("HG: no files changed")
819 819 edittext.append("")
820 820 # run editor in the repository root
821 821 olddir = os.getcwd()
822 822 os.chdir(self.root)
823 823 text = self.ui.edit("\n".join(edittext), user)
824 824 os.chdir(olddir)
825 825
826 826 lines = [line.rstrip() for line in text.rstrip().splitlines()]
827 827 while lines and not lines[0]:
828 828 del lines[0]
829 829 if not lines:
830 830 return None
831 831 text = '\n'.join(lines)
832 832 if branchname:
833 833 extra["branch"] = branchname
834 834 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
835 835 user, date, extra)
836 836 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
837 837 parent2=xp2)
838 838 tr.close()
839 839
840 840 if self.branchcache and "branch" in extra:
841 841 self.branchcache[util.tolocal(extra["branch"])] = n
842 842
843 843 if use_dirstate or update_dirstate:
844 844 self.dirstate.setparents(n)
845 845 if use_dirstate:
846 846 self.dirstate.update(new, "n")
847 847 self.dirstate.forget(removed)
848 848
849 849 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
850 850 return n
851 851
852 852 def walk(self, node=None, files=[], match=util.always, badmatch=None):
853 853 '''
854 854 walk recursively through the directory tree or a given
855 855 changeset, finding all files matched by the match
856 856 function
857 857
858 858 results are yielded in a tuple (src, filename), where src
859 859 is one of:
860 860 'f' the file was found in the directory tree
861 861 'm' the file was only in the dirstate and not in the tree
862 862 'b' file was not found and matched badmatch
863 863 '''
864 864
865 865 if node:
866 866 fdict = dict.fromkeys(files)
867 867 # for dirstate.walk, files=['.'] means "walk the whole tree".
868 868 # follow that here, too
869 869 fdict.pop('.', None)
870 870 mdict = self.manifest.read(self.changelog.read(node)[0])
871 871 mfiles = mdict.keys()
872 872 mfiles.sort()
873 873 for fn in mfiles:
874 874 for ffn in fdict:
875 875 # match if the file is the exact name or a directory
876 876 if ffn == fn or fn.startswith("%s/" % ffn):
877 877 del fdict[ffn]
878 878 break
879 879 if match(fn):
880 880 yield 'm', fn
881 881 ffiles = fdict.keys()
882 882 ffiles.sort()
883 883 for fn in ffiles:
884 884 if badmatch and badmatch(fn):
885 885 if match(fn):
886 886 yield 'b', fn
887 887 else:
888 888 self.ui.warn(_('%s: No such file in rev %s\n') % (
889 889 util.pathto(self.root, self.getcwd(), fn), short(node)))
890 890 else:
891 891 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
892 892 yield src, fn
893 893
894 894 def status(self, node1=None, node2=None, files=[], match=util.always,
895 895 wlock=None, list_ignored=False, list_clean=False):
896 896 """return status of files between two nodes or node and working directory
897 897
898 898 If node1 is None, use the first dirstate parent instead.
899 899 If node2 is None, compare node1 with working directory.
900 900 """
901 901
902 902 def fcmp(fn, getnode):
903 903 t1 = self.wread(fn)
904 904 return self.file(fn).cmp(getnode(fn), t1)
905 905
906 906 def mfmatches(node):
907 907 change = self.changelog.read(node)
908 908 mf = self.manifest.read(change[0]).copy()
909 909 for fn in mf.keys():
910 910 if not match(fn):
911 911 del mf[fn]
912 912 return mf
913 913
914 914 modified, added, removed, deleted, unknown = [], [], [], [], []
915 915 ignored, clean = [], []
916 916
917 917 compareworking = False
918 918 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
919 919 compareworking = True
920 920
921 921 if not compareworking:
922 922 # read the manifest from node1 before the manifest from node2,
923 923 # so that we'll hit the manifest cache if we're going through
924 924 # all the revisions in parent->child order.
925 925 mf1 = mfmatches(node1)
926 926
927 927 mywlock = False
928 928
929 929 # are we comparing the working directory?
930 930 if not node2:
931 931 (lookup, modified, added, removed, deleted, unknown,
932 932 ignored, clean) = self.dirstate.status(files, match,
933 933 list_ignored, list_clean)
934 934
935 935 # are we comparing working dir against its parent?
936 936 if compareworking:
937 937 if lookup:
938 938 # do a full compare of any files that might have changed
939 939 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
940 940 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
941 941 nullid)
942 942 for f in lookup:
943 943 if fcmp(f, getnode):
944 944 modified.append(f)
945 945 else:
946 946 clean.append(f)
947 947 if not wlock and not mywlock:
948 948 mywlock = True
949 949 try:
950 950 wlock = self.wlock(wait=0)
951 951 except lock.LockException:
952 952 pass
953 953 if wlock:
954 954 self.dirstate.update([f], "n")
955 955 else:
956 956 # we are comparing working dir against non-parent
957 957 # generate a pseudo-manifest for the working dir
958 958 # XXX: create it in dirstate.py ?
959 959 mf2 = mfmatches(self.dirstate.parents()[0])
960 960 is_exec = util.execfunc(self.root, mf2.execf)
961 961 is_link = util.linkfunc(self.root, mf2.linkf)
962 962 for f in lookup + modified + added:
963 963 mf2[f] = ""
964 964 mf2.set(f, is_exec(f), is_link(f))
965 965 for f in removed:
966 966 if f in mf2:
967 967 del mf2[f]
968 968
969 969 if mywlock and wlock:
970 970 wlock.release()
971 971 else:
972 972 # we are comparing two revisions
973 973 mf2 = mfmatches(node2)
974 974
975 975 if not compareworking:
976 976 # flush lists from dirstate before comparing manifests
977 977 modified, added, clean = [], [], []
978 978
979 979 # make sure to sort the files so we talk to the disk in a
980 980 # reasonable order
981 981 mf2keys = mf2.keys()
982 982 mf2keys.sort()
983 983 getnode = lambda fn: mf1.get(fn, nullid)
984 984 for fn in mf2keys:
985 985 if mf1.has_key(fn):
986 986 if mf1.flags(fn) != mf2.flags(fn) or \
987 987 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
988 988 fcmp(fn, getnode))):
989 989 modified.append(fn)
990 990 elif list_clean:
991 991 clean.append(fn)
992 992 del mf1[fn]
993 993 else:
994 994 added.append(fn)
995 995
996 996 removed = mf1.keys()
997 997
998 998 # sort and return results:
999 999 for l in modified, added, removed, deleted, unknown, ignored, clean:
1000 1000 l.sort()
1001 1001 return (modified, added, removed, deleted, unknown, ignored, clean)
1002 1002
1003 1003 def add(self, list, wlock=None):
1004 1004 if not wlock:
1005 1005 wlock = self.wlock()
1006 1006 for f in list:
1007 1007 p = self.wjoin(f)
1008 1008 islink = os.path.islink(p)
1009 size = os.lstat(p).st_size
1010 if size > 10000000:
1011 self.ui.warn(_("%s: files over 10MB may cause memory and"
1012 " performance problems\n"
1013 "(use 'hg revert %s' to unadd the file)\n")
1014 % (f, f))
1009 1015 if not islink and not os.path.exists(p):
1010 1016 self.ui.warn(_("%s does not exist!\n") % f)
1011 1017 elif not islink and not os.path.isfile(p):
1012 1018 self.ui.warn(_("%s not added: only files and symlinks "
1013 1019 "supported currently\n") % f)
1014 1020 elif self.dirstate.state(f) in 'an':
1015 1021 self.ui.warn(_("%s already tracked!\n") % f)
1016 1022 else:
1017 1023 self.dirstate.update([f], "a")
1018 1024
1019 1025 def forget(self, list, wlock=None):
1020 1026 if not wlock:
1021 1027 wlock = self.wlock()
1022 1028 for f in list:
1023 1029 if self.dirstate.state(f) not in 'ai':
1024 1030 self.ui.warn(_("%s not added!\n") % f)
1025 1031 else:
1026 1032 self.dirstate.forget([f])
1027 1033
1028 1034 def remove(self, list, unlink=False, wlock=None):
1029 1035 if unlink:
1030 1036 for f in list:
1031 1037 try:
1032 1038 util.unlink(self.wjoin(f))
1033 1039 except OSError, inst:
1034 1040 if inst.errno != errno.ENOENT:
1035 1041 raise
1036 1042 if not wlock:
1037 1043 wlock = self.wlock()
1038 1044 for f in list:
1039 1045 if unlink and os.path.exists(self.wjoin(f)):
1040 1046 self.ui.warn(_("%s still exists!\n") % f)
1041 1047 elif self.dirstate.state(f) == 'a':
1042 1048 self.dirstate.forget([f])
1043 1049 elif f not in self.dirstate:
1044 1050 self.ui.warn(_("%s not tracked!\n") % f)
1045 1051 else:
1046 1052 self.dirstate.update([f], "r")
1047 1053
1048 1054 def undelete(self, list, wlock=None):
1049 1055 p = self.dirstate.parents()[0]
1050 1056 mn = self.changelog.read(p)[0]
1051 1057 m = self.manifest.read(mn)
1052 1058 if not wlock:
1053 1059 wlock = self.wlock()
1054 1060 for f in list:
1055 1061 if self.dirstate.state(f) not in "r":
1056 1062 self.ui.warn("%s not removed!\n" % f)
1057 1063 else:
1058 1064 t = self.file(f).read(m[f])
1059 1065 self.wwrite(f, t, m.flags(f))
1060 1066 self.dirstate.update([f], "n")
1061 1067
1062 1068 def copy(self, source, dest, wlock=None):
1063 1069 p = self.wjoin(dest)
1064 1070 if not (os.path.exists(p) or os.path.islink(p)):
1065 1071 self.ui.warn(_("%s does not exist!\n") % dest)
1066 1072 elif not (os.path.isfile(p) or os.path.islink(p)):
1067 1073 self.ui.warn(_("copy failed: %s is not a file or a "
1068 1074 "symbolic link\n") % dest)
1069 1075 else:
1070 1076 if not wlock:
1071 1077 wlock = self.wlock()
1072 1078 if self.dirstate.state(dest) == '?':
1073 1079 self.dirstate.update([dest], "a")
1074 1080 self.dirstate.copy(source, dest)
1075 1081
1076 1082 def heads(self, start=None):
1077 1083 heads = self.changelog.heads(start)
1078 1084 # sort the output in rev descending order
1079 1085 heads = [(-self.changelog.rev(h), h) for h in heads]
1080 1086 heads.sort()
1081 1087 return [n for (r, n) in heads]
1082 1088
1083 1089 def branches(self, nodes):
1084 1090 if not nodes:
1085 1091 nodes = [self.changelog.tip()]
1086 1092 b = []
1087 1093 for n in nodes:
1088 1094 t = n
1089 1095 while 1:
1090 1096 p = self.changelog.parents(n)
1091 1097 if p[1] != nullid or p[0] == nullid:
1092 1098 b.append((t, n, p[0], p[1]))
1093 1099 break
1094 1100 n = p[0]
1095 1101 return b
1096 1102
1097 1103 def between(self, pairs):
1098 1104 r = []
1099 1105
1100 1106 for top, bottom in pairs:
1101 1107 n, l, i = top, [], 0
1102 1108 f = 1
1103 1109
1104 1110 while n != bottom:
1105 1111 p = self.changelog.parents(n)[0]
1106 1112 if i == f:
1107 1113 l.append(n)
1108 1114 f = f * 2
1109 1115 n = p
1110 1116 i += 1
1111 1117
1112 1118 r.append(l)
1113 1119
1114 1120 return r
1115 1121
1116 1122 def findincoming(self, remote, base=None, heads=None, force=False):
1117 1123 """Return list of roots of the subsets of missing nodes from remote
1118 1124
1119 1125 If base dict is specified, assume that these nodes and their parents
1120 1126 exist on the remote side and that no child of a node of base exists
1121 1127 in both remote and self.
1122 1128 Furthermore base will be updated to include the nodes that exists
1123 1129 in self and remote but no children exists in self and remote.
1124 1130 If a list of heads is specified, return only nodes which are heads
1125 1131 or ancestors of these heads.
1126 1132
1127 1133 All the ancestors of base are in self and in remote.
1128 1134 All the descendants of the list returned are missing in self.
1129 1135 (and so we know that the rest of the nodes are missing in remote, see
1130 1136 outgoing)
1131 1137 """
1132 1138 m = self.changelog.nodemap
1133 1139 search = []
1134 1140 fetch = {}
1135 1141 seen = {}
1136 1142 seenbranch = {}
1137 1143 if base == None:
1138 1144 base = {}
1139 1145
1140 1146 if not heads:
1141 1147 heads = remote.heads()
1142 1148
1143 1149 if self.changelog.tip() == nullid:
1144 1150 base[nullid] = 1
1145 1151 if heads != [nullid]:
1146 1152 return [nullid]
1147 1153 return []
1148 1154
1149 1155 # assume we're closer to the tip than the root
1150 1156 # and start by examining the heads
1151 1157 self.ui.status(_("searching for changes\n"))
1152 1158
1153 1159 unknown = []
1154 1160 for h in heads:
1155 1161 if h not in m:
1156 1162 unknown.append(h)
1157 1163 else:
1158 1164 base[h] = 1
1159 1165
1160 1166 if not unknown:
1161 1167 return []
1162 1168
1163 1169 req = dict.fromkeys(unknown)
1164 1170 reqcnt = 0
1165 1171
1166 1172 # search through remote branches
1167 1173 # a 'branch' here is a linear segment of history, with four parts:
1168 1174 # head, root, first parent, second parent
1169 1175 # (a branch always has two parents (or none) by definition)
1170 1176 unknown = remote.branches(unknown)
1171 1177 while unknown:
1172 1178 r = []
1173 1179 while unknown:
1174 1180 n = unknown.pop(0)
1175 1181 if n[0] in seen:
1176 1182 continue
1177 1183
1178 1184 self.ui.debug(_("examining %s:%s\n")
1179 1185 % (short(n[0]), short(n[1])))
1180 1186 if n[0] == nullid: # found the end of the branch
1181 1187 pass
1182 1188 elif n in seenbranch:
1183 1189 self.ui.debug(_("branch already found\n"))
1184 1190 continue
1185 1191 elif n[1] and n[1] in m: # do we know the base?
1186 1192 self.ui.debug(_("found incomplete branch %s:%s\n")
1187 1193 % (short(n[0]), short(n[1])))
1188 1194 search.append(n) # schedule branch range for scanning
1189 1195 seenbranch[n] = 1
1190 1196 else:
1191 1197 if n[1] not in seen and n[1] not in fetch:
1192 1198 if n[2] in m and n[3] in m:
1193 1199 self.ui.debug(_("found new changeset %s\n") %
1194 1200 short(n[1]))
1195 1201 fetch[n[1]] = 1 # earliest unknown
1196 1202 for p in n[2:4]:
1197 1203 if p in m:
1198 1204 base[p] = 1 # latest known
1199 1205
1200 1206 for p in n[2:4]:
1201 1207 if p not in req and p not in m:
1202 1208 r.append(p)
1203 1209 req[p] = 1
1204 1210 seen[n[0]] = 1
1205 1211
1206 1212 if r:
1207 1213 reqcnt += 1
1208 1214 self.ui.debug(_("request %d: %s\n") %
1209 1215 (reqcnt, " ".join(map(short, r))))
1210 1216 for p in xrange(0, len(r), 10):
1211 1217 for b in remote.branches(r[p:p+10]):
1212 1218 self.ui.debug(_("received %s:%s\n") %
1213 1219 (short(b[0]), short(b[1])))
1214 1220 unknown.append(b)
1215 1221
1216 1222 # do binary search on the branches we found
1217 1223 while search:
1218 1224 n = search.pop(0)
1219 1225 reqcnt += 1
1220 1226 l = remote.between([(n[0], n[1])])[0]
1221 1227 l.append(n[1])
1222 1228 p = n[0]
1223 1229 f = 1
1224 1230 for i in l:
1225 1231 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1226 1232 if i in m:
1227 1233 if f <= 2:
1228 1234 self.ui.debug(_("found new branch changeset %s\n") %
1229 1235 short(p))
1230 1236 fetch[p] = 1
1231 1237 base[i] = 1
1232 1238 else:
1233 1239 self.ui.debug(_("narrowed branch search to %s:%s\n")
1234 1240 % (short(p), short(i)))
1235 1241 search.append((p, i))
1236 1242 break
1237 1243 p, f = i, f * 2
1238 1244
1239 1245 # sanity check our fetch list
1240 1246 for f in fetch.keys():
1241 1247 if f in m:
1242 1248 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1243 1249
1244 1250 if base.keys() == [nullid]:
1245 1251 if force:
1246 1252 self.ui.warn(_("warning: repository is unrelated\n"))
1247 1253 else:
1248 1254 raise util.Abort(_("repository is unrelated"))
1249 1255
1250 1256 self.ui.debug(_("found new changesets starting at ") +
1251 1257 " ".join([short(f) for f in fetch]) + "\n")
1252 1258
1253 1259 self.ui.debug(_("%d total queries\n") % reqcnt)
1254 1260
1255 1261 return fetch.keys()
1256 1262
1257 1263 def findoutgoing(self, remote, base=None, heads=None, force=False):
1258 1264 """Return list of nodes that are roots of subsets not in remote
1259 1265
1260 1266 If base dict is specified, assume that these nodes and their parents
1261 1267 exist on the remote side.
1262 1268 If a list of heads is specified, return only nodes which are heads
1263 1269 or ancestors of these heads, and return a second element which
1264 1270 contains all remote heads which get new children.
1265 1271 """
1266 1272 if base == None:
1267 1273 base = {}
1268 1274 self.findincoming(remote, base, heads, force=force)
1269 1275
1270 1276 self.ui.debug(_("common changesets up to ")
1271 1277 + " ".join(map(short, base.keys())) + "\n")
1272 1278
1273 1279 remain = dict.fromkeys(self.changelog.nodemap)
1274 1280
1275 1281 # prune everything remote has from the tree
1276 1282 del remain[nullid]
1277 1283 remove = base.keys()
1278 1284 while remove:
1279 1285 n = remove.pop(0)
1280 1286 if n in remain:
1281 1287 del remain[n]
1282 1288 for p in self.changelog.parents(n):
1283 1289 remove.append(p)
1284 1290
1285 1291 # find every node whose parents have been pruned
1286 1292 subset = []
1287 1293 # find every remote head that will get new children
1288 1294 updated_heads = {}
1289 1295 for n in remain:
1290 1296 p1, p2 = self.changelog.parents(n)
1291 1297 if p1 not in remain and p2 not in remain:
1292 1298 subset.append(n)
1293 1299 if heads:
1294 1300 if p1 in heads:
1295 1301 updated_heads[p1] = True
1296 1302 if p2 in heads:
1297 1303 updated_heads[p2] = True
1298 1304
1299 1305 # this is the set of all roots we have to push
1300 1306 if heads:
1301 1307 return subset, updated_heads.keys()
1302 1308 else:
1303 1309 return subset
1304 1310
1305 1311 def pull(self, remote, heads=None, force=False, lock=None):
1306 1312 mylock = False
1307 1313 if not lock:
1308 1314 lock = self.lock()
1309 1315 mylock = True
1310 1316
1311 1317 try:
1312 1318 fetch = self.findincoming(remote, force=force)
1313 1319 if fetch == [nullid]:
1314 1320 self.ui.status(_("requesting all changes\n"))
1315 1321
1316 1322 if not fetch:
1317 1323 self.ui.status(_("no changes found\n"))
1318 1324 return 0
1319 1325
1320 1326 if heads is None:
1321 1327 cg = remote.changegroup(fetch, 'pull')
1322 1328 else:
1323 1329 if 'changegroupsubset' not in remote.capabilities:
1324 1330 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1325 1331 cg = remote.changegroupsubset(fetch, heads, 'pull')
1326 1332 return self.addchangegroup(cg, 'pull', remote.url())
1327 1333 finally:
1328 1334 if mylock:
1329 1335 lock.release()
1330 1336
1331 1337 def push(self, remote, force=False, revs=None):
1332 1338 # there are two ways to push to remote repo:
1333 1339 #
1334 1340 # addchangegroup assumes local user can lock remote
1335 1341 # repo (local filesystem, old ssh servers).
1336 1342 #
1337 1343 # unbundle assumes local user cannot lock remote repo (new ssh
1338 1344 # servers, http servers).
1339 1345
1340 1346 if remote.capable('unbundle'):
1341 1347 return self.push_unbundle(remote, force, revs)
1342 1348 return self.push_addchangegroup(remote, force, revs)
1343 1349
1344 1350 def prepush(self, remote, force, revs):
1345 1351 base = {}
1346 1352 remote_heads = remote.heads()
1347 1353 inc = self.findincoming(remote, base, remote_heads, force=force)
1348 1354
1349 1355 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1350 1356 if revs is not None:
1351 1357 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1352 1358 else:
1353 1359 bases, heads = update, self.changelog.heads()
1354 1360
1355 1361 if not bases:
1356 1362 self.ui.status(_("no changes found\n"))
1357 1363 return None, 1
1358 1364 elif not force:
1359 1365 # check if we're creating new remote heads
1360 1366 # to be a remote head after push, node must be either
1361 1367 # - unknown locally
1362 1368 # - a local outgoing head descended from update
1363 1369 # - a remote head that's known locally and not
1364 1370 # ancestral to an outgoing head
1365 1371
1366 1372 warn = 0
1367 1373
1368 1374 if remote_heads == [nullid]:
1369 1375 warn = 0
1370 1376 elif not revs and len(heads) > len(remote_heads):
1371 1377 warn = 1
1372 1378 else:
1373 1379 newheads = list(heads)
1374 1380 for r in remote_heads:
1375 1381 if r in self.changelog.nodemap:
1376 1382 desc = self.changelog.heads(r, heads)
1377 1383 l = [h for h in heads if h in desc]
1378 1384 if not l:
1379 1385 newheads.append(r)
1380 1386 else:
1381 1387 newheads.append(r)
1382 1388 if len(newheads) > len(remote_heads):
1383 1389 warn = 1
1384 1390
1385 1391 if warn:
1386 1392 self.ui.warn(_("abort: push creates new remote branches!\n"))
1387 1393 self.ui.status(_("(did you forget to merge?"
1388 1394 " use push -f to force)\n"))
1389 1395 return None, 1
1390 1396 elif inc:
1391 1397 self.ui.warn(_("note: unsynced remote changes!\n"))
1392 1398
1393 1399
1394 1400 if revs is None:
1395 1401 cg = self.changegroup(update, 'push')
1396 1402 else:
1397 1403 cg = self.changegroupsubset(update, revs, 'push')
1398 1404 return cg, remote_heads
1399 1405
1400 1406 def push_addchangegroup(self, remote, force, revs):
1401 1407 lock = remote.lock()
1402 1408
1403 1409 ret = self.prepush(remote, force, revs)
1404 1410 if ret[0] is not None:
1405 1411 cg, remote_heads = ret
1406 1412 return remote.addchangegroup(cg, 'push', self.url())
1407 1413 return ret[1]
1408 1414
1409 1415 def push_unbundle(self, remote, force, revs):
1410 1416 # local repo finds heads on server, finds out what revs it
1411 1417 # must push. once revs transferred, if server finds it has
1412 1418 # different heads (someone else won commit/push race), server
1413 1419 # aborts.
1414 1420
1415 1421 ret = self.prepush(remote, force, revs)
1416 1422 if ret[0] is not None:
1417 1423 cg, remote_heads = ret
1418 1424 if force: remote_heads = ['force']
1419 1425 return remote.unbundle(cg, remote_heads, 'push')
1420 1426 return ret[1]
1421 1427
1422 1428 def changegroupinfo(self, nodes):
1423 1429 self.ui.note(_("%d changesets found\n") % len(nodes))
1424 1430 if self.ui.debugflag:
1425 1431 self.ui.debug(_("List of changesets:\n"))
1426 1432 for node in nodes:
1427 1433 self.ui.debug("%s\n" % hex(node))
1428 1434
1429 1435 def changegroupsubset(self, bases, heads, source):
1430 1436 """This function generates a changegroup consisting of all the nodes
1431 1437 that are descendents of any of the bases, and ancestors of any of
1432 1438 the heads.
1433 1439
1434 1440 It is fairly complex as determining which filenodes and which
1435 1441 manifest nodes need to be included for the changeset to be complete
1436 1442 is non-trivial.
1437 1443
1438 1444 Another wrinkle is doing the reverse, figuring out which changeset in
1439 1445 the changegroup a particular filenode or manifestnode belongs to."""
1440 1446
1441 1447 self.hook('preoutgoing', throw=True, source=source)
1442 1448
1443 1449 # Set up some initial variables
1444 1450 # Make it easy to refer to self.changelog
1445 1451 cl = self.changelog
1446 1452 # msng is short for missing - compute the list of changesets in this
1447 1453 # changegroup.
1448 1454 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1449 1455 self.changegroupinfo(msng_cl_lst)
1450 1456 # Some bases may turn out to be superfluous, and some heads may be
1451 1457 # too. nodesbetween will return the minimal set of bases and heads
1452 1458 # necessary to re-create the changegroup.
1453 1459
1454 1460 # Known heads are the list of heads that it is assumed the recipient
1455 1461 # of this changegroup will know about.
1456 1462 knownheads = {}
1457 1463 # We assume that all parents of bases are known heads.
1458 1464 for n in bases:
1459 1465 for p in cl.parents(n):
1460 1466 if p != nullid:
1461 1467 knownheads[p] = 1
1462 1468 knownheads = knownheads.keys()
1463 1469 if knownheads:
1464 1470 # Now that we know what heads are known, we can compute which
1465 1471 # changesets are known. The recipient must know about all
1466 1472 # changesets required to reach the known heads from the null
1467 1473 # changeset.
1468 1474 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1469 1475 junk = None
1470 1476 # Transform the list into an ersatz set.
1471 1477 has_cl_set = dict.fromkeys(has_cl_set)
1472 1478 else:
1473 1479 # If there were no known heads, the recipient cannot be assumed to
1474 1480 # know about any changesets.
1475 1481 has_cl_set = {}
1476 1482
1477 1483 # Make it easy to refer to self.manifest
1478 1484 mnfst = self.manifest
1479 1485 # We don't know which manifests are missing yet
1480 1486 msng_mnfst_set = {}
1481 1487 # Nor do we know which filenodes are missing.
1482 1488 msng_filenode_set = {}
1483 1489
1484 1490 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1485 1491 junk = None
1486 1492
1487 1493 # A changeset always belongs to itself, so the changenode lookup
1488 1494 # function for a changenode is identity.
1489 1495 def identity(x):
1490 1496 return x
1491 1497
1492 1498 # A function generating function. Sets up an environment for the
1493 1499 # inner function.
1494 1500 def cmp_by_rev_func(revlog):
1495 1501 # Compare two nodes by their revision number in the environment's
1496 1502 # revision history. Since the revision number both represents the
1497 1503 # most efficient order to read the nodes in, and represents a
1498 1504 # topological sorting of the nodes, this function is often useful.
1499 1505 def cmp_by_rev(a, b):
1500 1506 return cmp(revlog.rev(a), revlog.rev(b))
1501 1507 return cmp_by_rev
1502 1508
1503 1509 # If we determine that a particular file or manifest node must be a
1504 1510 # node that the recipient of the changegroup will already have, we can
1505 1511 # also assume the recipient will have all the parents. This function
1506 1512 # prunes them from the set of missing nodes.
1507 1513 def prune_parents(revlog, hasset, msngset):
1508 1514 haslst = hasset.keys()
1509 1515 haslst.sort(cmp_by_rev_func(revlog))
1510 1516 for node in haslst:
1511 1517 parentlst = [p for p in revlog.parents(node) if p != nullid]
1512 1518 while parentlst:
1513 1519 n = parentlst.pop()
1514 1520 if n not in hasset:
1515 1521 hasset[n] = 1
1516 1522 p = [p for p in revlog.parents(n) if p != nullid]
1517 1523 parentlst.extend(p)
1518 1524 for n in hasset:
1519 1525 msngset.pop(n, None)
1520 1526
1521 1527 # This is a function generating function used to set up an environment
1522 1528 # for the inner function to execute in.
1523 1529 def manifest_and_file_collector(changedfileset):
1524 1530 # This is an information gathering function that gathers
1525 1531 # information from each changeset node that goes out as part of
1526 1532 # the changegroup. The information gathered is a list of which
1527 1533 # manifest nodes are potentially required (the recipient may
1528 1534 # already have them) and total list of all files which were
1529 1535 # changed in any changeset in the changegroup.
1530 1536 #
1531 1537 # We also remember the first changenode we saw any manifest
1532 1538 # referenced by so we can later determine which changenode 'owns'
1533 1539 # the manifest.
1534 1540 def collect_manifests_and_files(clnode):
1535 1541 c = cl.read(clnode)
1536 1542 for f in c[3]:
1537 1543 # This is to make sure we only have one instance of each
1538 1544 # filename string for each filename.
1539 1545 changedfileset.setdefault(f, f)
1540 1546 msng_mnfst_set.setdefault(c[0], clnode)
1541 1547 return collect_manifests_and_files
1542 1548
1543 1549 # Figure out which manifest nodes (of the ones we think might be part
1544 1550 # of the changegroup) the recipient must know about and remove them
1545 1551 # from the changegroup.
1546 1552 def prune_manifests():
1547 1553 has_mnfst_set = {}
1548 1554 for n in msng_mnfst_set:
1549 1555 # If a 'missing' manifest thinks it belongs to a changenode
1550 1556 # the recipient is assumed to have, obviously the recipient
1551 1557 # must have that manifest.
1552 1558 linknode = cl.node(mnfst.linkrev(n))
1553 1559 if linknode in has_cl_set:
1554 1560 has_mnfst_set[n] = 1
1555 1561 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1556 1562
1557 1563 # Use the information collected in collect_manifests_and_files to say
1558 1564 # which changenode any manifestnode belongs to.
1559 1565 def lookup_manifest_link(mnfstnode):
1560 1566 return msng_mnfst_set[mnfstnode]
1561 1567
1562 1568 # A function generating function that sets up the initial environment
1563 1569 # the inner function.
1564 1570 def filenode_collector(changedfiles):
1565 1571 next_rev = [0]
1566 1572 # This gathers information from each manifestnode included in the
1567 1573 # changegroup about which filenodes the manifest node references
1568 1574 # so we can include those in the changegroup too.
1569 1575 #
1570 1576 # It also remembers which changenode each filenode belongs to. It
1571 1577 # does this by assuming the a filenode belongs to the changenode
1572 1578 # the first manifest that references it belongs to.
1573 1579 def collect_msng_filenodes(mnfstnode):
1574 1580 r = mnfst.rev(mnfstnode)
1575 1581 if r == next_rev[0]:
1576 1582 # If the last rev we looked at was the one just previous,
1577 1583 # we only need to see a diff.
1578 1584 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1579 1585 # For each line in the delta
1580 1586 for dline in delta.splitlines():
1581 1587 # get the filename and filenode for that line
1582 1588 f, fnode = dline.split('\0')
1583 1589 fnode = bin(fnode[:40])
1584 1590 f = changedfiles.get(f, None)
1585 1591 # And if the file is in the list of files we care
1586 1592 # about.
1587 1593 if f is not None:
1588 1594 # Get the changenode this manifest belongs to
1589 1595 clnode = msng_mnfst_set[mnfstnode]
1590 1596 # Create the set of filenodes for the file if
1591 1597 # there isn't one already.
1592 1598 ndset = msng_filenode_set.setdefault(f, {})
1593 1599 # And set the filenode's changelog node to the
1594 1600 # manifest's if it hasn't been set already.
1595 1601 ndset.setdefault(fnode, clnode)
1596 1602 else:
1597 1603 # Otherwise we need a full manifest.
1598 1604 m = mnfst.read(mnfstnode)
1599 1605 # For every file in we care about.
1600 1606 for f in changedfiles:
1601 1607 fnode = m.get(f, None)
1602 1608 # If it's in the manifest
1603 1609 if fnode is not None:
1604 1610 # See comments above.
1605 1611 clnode = msng_mnfst_set[mnfstnode]
1606 1612 ndset = msng_filenode_set.setdefault(f, {})
1607 1613 ndset.setdefault(fnode, clnode)
1608 1614 # Remember the revision we hope to see next.
1609 1615 next_rev[0] = r + 1
1610 1616 return collect_msng_filenodes
1611 1617
1612 1618 # We have a list of filenodes we think we need for a file, lets remove
1613 1619 # all those we now the recipient must have.
1614 1620 def prune_filenodes(f, filerevlog):
1615 1621 msngset = msng_filenode_set[f]
1616 1622 hasset = {}
1617 1623 # If a 'missing' filenode thinks it belongs to a changenode we
1618 1624 # assume the recipient must have, then the recipient must have
1619 1625 # that filenode.
1620 1626 for n in msngset:
1621 1627 clnode = cl.node(filerevlog.linkrev(n))
1622 1628 if clnode in has_cl_set:
1623 1629 hasset[n] = 1
1624 1630 prune_parents(filerevlog, hasset, msngset)
1625 1631
1626 1632 # A function generator function that sets up the a context for the
1627 1633 # inner function.
1628 1634 def lookup_filenode_link_func(fname):
1629 1635 msngset = msng_filenode_set[fname]
1630 1636 # Lookup the changenode the filenode belongs to.
1631 1637 def lookup_filenode_link(fnode):
1632 1638 return msngset[fnode]
1633 1639 return lookup_filenode_link
1634 1640
1635 1641 # Now that we have all theses utility functions to help out and
1636 1642 # logically divide up the task, generate the group.
1637 1643 def gengroup():
1638 1644 # The set of changed files starts empty.
1639 1645 changedfiles = {}
1640 1646 # Create a changenode group generator that will call our functions
1641 1647 # back to lookup the owning changenode and collect information.
1642 1648 group = cl.group(msng_cl_lst, identity,
1643 1649 manifest_and_file_collector(changedfiles))
1644 1650 for chnk in group:
1645 1651 yield chnk
1646 1652
1647 1653 # The list of manifests has been collected by the generator
1648 1654 # calling our functions back.
1649 1655 prune_manifests()
1650 1656 msng_mnfst_lst = msng_mnfst_set.keys()
1651 1657 # Sort the manifestnodes by revision number.
1652 1658 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1653 1659 # Create a generator for the manifestnodes that calls our lookup
1654 1660 # and data collection functions back.
1655 1661 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1656 1662 filenode_collector(changedfiles))
1657 1663 for chnk in group:
1658 1664 yield chnk
1659 1665
1660 1666 # These are no longer needed, dereference and toss the memory for
1661 1667 # them.
1662 1668 msng_mnfst_lst = None
1663 1669 msng_mnfst_set.clear()
1664 1670
1665 1671 changedfiles = changedfiles.keys()
1666 1672 changedfiles.sort()
1667 1673 # Go through all our files in order sorted by name.
1668 1674 for fname in changedfiles:
1669 1675 filerevlog = self.file(fname)
1670 1676 # Toss out the filenodes that the recipient isn't really
1671 1677 # missing.
1672 1678 if msng_filenode_set.has_key(fname):
1673 1679 prune_filenodes(fname, filerevlog)
1674 1680 msng_filenode_lst = msng_filenode_set[fname].keys()
1675 1681 else:
1676 1682 msng_filenode_lst = []
1677 1683 # If any filenodes are left, generate the group for them,
1678 1684 # otherwise don't bother.
1679 1685 if len(msng_filenode_lst) > 0:
1680 1686 yield changegroup.genchunk(fname)
1681 1687 # Sort the filenodes by their revision #
1682 1688 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1683 1689 # Create a group generator and only pass in a changenode
1684 1690 # lookup function as we need to collect no information
1685 1691 # from filenodes.
1686 1692 group = filerevlog.group(msng_filenode_lst,
1687 1693 lookup_filenode_link_func(fname))
1688 1694 for chnk in group:
1689 1695 yield chnk
1690 1696 if msng_filenode_set.has_key(fname):
1691 1697 # Don't need this anymore, toss it to free memory.
1692 1698 del msng_filenode_set[fname]
1693 1699 # Signal that no more groups are left.
1694 1700 yield changegroup.closechunk()
1695 1701
1696 1702 if msng_cl_lst:
1697 1703 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1698 1704
1699 1705 return util.chunkbuffer(gengroup())
1700 1706
1701 1707 def changegroup(self, basenodes, source):
1702 1708 """Generate a changegroup of all nodes that we have that a recipient
1703 1709 doesn't.
1704 1710
1705 1711 This is much easier than the previous function as we can assume that
1706 1712 the recipient has any changenode we aren't sending them."""
1707 1713
1708 1714 self.hook('preoutgoing', throw=True, source=source)
1709 1715
1710 1716 cl = self.changelog
1711 1717 nodes = cl.nodesbetween(basenodes, None)[0]
1712 1718 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1713 1719 self.changegroupinfo(nodes)
1714 1720
1715 1721 def identity(x):
1716 1722 return x
1717 1723
1718 1724 def gennodelst(revlog):
1719 1725 for r in xrange(0, revlog.count()):
1720 1726 n = revlog.node(r)
1721 1727 if revlog.linkrev(n) in revset:
1722 1728 yield n
1723 1729
1724 1730 def changed_file_collector(changedfileset):
1725 1731 def collect_changed_files(clnode):
1726 1732 c = cl.read(clnode)
1727 1733 for fname in c[3]:
1728 1734 changedfileset[fname] = 1
1729 1735 return collect_changed_files
1730 1736
1731 1737 def lookuprevlink_func(revlog):
1732 1738 def lookuprevlink(n):
1733 1739 return cl.node(revlog.linkrev(n))
1734 1740 return lookuprevlink
1735 1741
1736 1742 def gengroup():
1737 1743 # construct a list of all changed files
1738 1744 changedfiles = {}
1739 1745
1740 1746 for chnk in cl.group(nodes, identity,
1741 1747 changed_file_collector(changedfiles)):
1742 1748 yield chnk
1743 1749 changedfiles = changedfiles.keys()
1744 1750 changedfiles.sort()
1745 1751
1746 1752 mnfst = self.manifest
1747 1753 nodeiter = gennodelst(mnfst)
1748 1754 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1749 1755 yield chnk
1750 1756
1751 1757 for fname in changedfiles:
1752 1758 filerevlog = self.file(fname)
1753 1759 nodeiter = gennodelst(filerevlog)
1754 1760 nodeiter = list(nodeiter)
1755 1761 if nodeiter:
1756 1762 yield changegroup.genchunk(fname)
1757 1763 lookup = lookuprevlink_func(filerevlog)
1758 1764 for chnk in filerevlog.group(nodeiter, lookup):
1759 1765 yield chnk
1760 1766
1761 1767 yield changegroup.closechunk()
1762 1768
1763 1769 if nodes:
1764 1770 self.hook('outgoing', node=hex(nodes[0]), source=source)
1765 1771
1766 1772 return util.chunkbuffer(gengroup())
1767 1773
1768 1774 def addchangegroup(self, source, srctype, url):
1769 1775 """add changegroup to repo.
1770 1776
1771 1777 return values:
1772 1778 - nothing changed or no source: 0
1773 1779 - more heads than before: 1+added heads (2..n)
1774 1780 - less heads than before: -1-removed heads (-2..-n)
1775 1781 - number of heads stays the same: 1
1776 1782 """
1777 1783 def csmap(x):
1778 1784 self.ui.debug(_("add changeset %s\n") % short(x))
1779 1785 return cl.count()
1780 1786
1781 1787 def revmap(x):
1782 1788 return cl.rev(x)
1783 1789
1784 1790 if not source:
1785 1791 return 0
1786 1792
1787 1793 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1788 1794
1789 1795 changesets = files = revisions = 0
1790 1796
1791 1797 tr = self.transaction()
1792 1798
1793 1799 # write changelog data to temp files so concurrent readers will not see
1794 1800 # inconsistent view
1795 1801 cl = self.changelog
1796 1802 cl.delayupdate()
1797 1803 oldheads = len(cl.heads())
1798 1804
1799 1805 # pull off the changeset group
1800 1806 self.ui.status(_("adding changesets\n"))
1801 1807 cor = cl.count() - 1
1802 1808 chunkiter = changegroup.chunkiter(source)
1803 1809 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1804 1810 raise util.Abort(_("received changelog group is empty"))
1805 1811 cnr = cl.count() - 1
1806 1812 changesets = cnr - cor
1807 1813
1808 1814 # pull off the manifest group
1809 1815 self.ui.status(_("adding manifests\n"))
1810 1816 chunkiter = changegroup.chunkiter(source)
1811 1817 # no need to check for empty manifest group here:
1812 1818 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1813 1819 # no new manifest will be created and the manifest group will
1814 1820 # be empty during the pull
1815 1821 self.manifest.addgroup(chunkiter, revmap, tr)
1816 1822
1817 1823 # process the files
1818 1824 self.ui.status(_("adding file changes\n"))
1819 1825 while 1:
1820 1826 f = changegroup.getchunk(source)
1821 1827 if not f:
1822 1828 break
1823 1829 self.ui.debug(_("adding %s revisions\n") % f)
1824 1830 fl = self.file(f)
1825 1831 o = fl.count()
1826 1832 chunkiter = changegroup.chunkiter(source)
1827 1833 if fl.addgroup(chunkiter, revmap, tr) is None:
1828 1834 raise util.Abort(_("received file revlog group is empty"))
1829 1835 revisions += fl.count() - o
1830 1836 files += 1
1831 1837
1832 1838 # make changelog see real files again
1833 1839 cl.finalize(tr)
1834 1840
1835 1841 newheads = len(self.changelog.heads())
1836 1842 heads = ""
1837 1843 if oldheads and newheads != oldheads:
1838 1844 heads = _(" (%+d heads)") % (newheads - oldheads)
1839 1845
1840 1846 self.ui.status(_("added %d changesets"
1841 1847 " with %d changes to %d files%s\n")
1842 1848 % (changesets, revisions, files, heads))
1843 1849
1844 1850 if changesets > 0:
1845 1851 self.hook('pretxnchangegroup', throw=True,
1846 1852 node=hex(self.changelog.node(cor+1)), source=srctype,
1847 1853 url=url)
1848 1854
1849 1855 tr.close()
1850 1856
1851 1857 if changesets > 0:
1852 1858 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1853 1859 source=srctype, url=url)
1854 1860
1855 1861 for i in xrange(cor + 1, cnr + 1):
1856 1862 self.hook("incoming", node=hex(self.changelog.node(i)),
1857 1863 source=srctype, url=url)
1858 1864
1859 1865 # never return 0 here:
1860 1866 if newheads < oldheads:
1861 1867 return newheads - oldheads - 1
1862 1868 else:
1863 1869 return newheads - oldheads + 1
1864 1870
1865 1871
1866 1872 def stream_in(self, remote):
1867 1873 fp = remote.stream_out()
1868 1874 l = fp.readline()
1869 1875 try:
1870 1876 resp = int(l)
1871 1877 except ValueError:
1872 1878 raise util.UnexpectedOutput(
1873 1879 _('Unexpected response from remote server:'), l)
1874 1880 if resp == 1:
1875 1881 raise util.Abort(_('operation forbidden by server'))
1876 1882 elif resp == 2:
1877 1883 raise util.Abort(_('locking the remote repository failed'))
1878 1884 elif resp != 0:
1879 1885 raise util.Abort(_('the server sent an unknown error code'))
1880 1886 self.ui.status(_('streaming all changes\n'))
1881 1887 l = fp.readline()
1882 1888 try:
1883 1889 total_files, total_bytes = map(int, l.split(' ', 1))
1884 1890 except ValueError, TypeError:
1885 1891 raise util.UnexpectedOutput(
1886 1892 _('Unexpected response from remote server:'), l)
1887 1893 self.ui.status(_('%d files to transfer, %s of data\n') %
1888 1894 (total_files, util.bytecount(total_bytes)))
1889 1895 start = time.time()
1890 1896 for i in xrange(total_files):
1891 1897 # XXX doesn't support '\n' or '\r' in filenames
1892 1898 l = fp.readline()
1893 1899 try:
1894 1900 name, size = l.split('\0', 1)
1895 1901 size = int(size)
1896 1902 except ValueError, TypeError:
1897 1903 raise util.UnexpectedOutput(
1898 1904 _('Unexpected response from remote server:'), l)
1899 1905 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1900 1906 ofp = self.sopener(name, 'w')
1901 1907 for chunk in util.filechunkiter(fp, limit=size):
1902 1908 ofp.write(chunk)
1903 1909 ofp.close()
1904 1910 elapsed = time.time() - start
1905 1911 if elapsed <= 0:
1906 1912 elapsed = 0.001
1907 1913 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1908 1914 (util.bytecount(total_bytes), elapsed,
1909 1915 util.bytecount(total_bytes / elapsed)))
1910 1916 self.reload()
1911 1917 return len(self.heads()) + 1
1912 1918
1913 1919 def clone(self, remote, heads=[], stream=False):
1914 1920 '''clone remote repository.
1915 1921
1916 1922 keyword arguments:
1917 1923 heads: list of revs to clone (forces use of pull)
1918 1924 stream: use streaming clone if possible'''
1919 1925
1920 1926 # now, all clients that can request uncompressed clones can
1921 1927 # read repo formats supported by all servers that can serve
1922 1928 # them.
1923 1929
1924 1930 # if revlog format changes, client will have to check version
1925 1931 # and format flags on "stream" capability, and use
1926 1932 # uncompressed only if compatible.
1927 1933
1928 1934 if stream and not heads and remote.capable('stream'):
1929 1935 return self.stream_in(remote)
1930 1936 return self.pull(remote, heads)
1931 1937
1932 1938 # used to avoid circular references so destructors work
1933 1939 def aftertrans(files):
1934 1940 renamefiles = [tuple(t) for t in files]
1935 1941 def a():
1936 1942 for src, dest in renamefiles:
1937 1943 util.rename(src, dest)
1938 1944 return a
1939 1945
1940 1946 def instance(ui, path, create):
1941 1947 return localrepository(ui, util.drop_scheme('file', path), create)
1942 1948
1943 1949 def islocal(path):
1944 1950 return True
@@ -1,32 +1,32 b''
1 1 #header#
2 2 <title>#repo|escape#: Changelog</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog
10 10 </div>
11 11
12 12 <form action="{url}log">
13 13 {sessionvars%hiddenformentry}
14 14 <div class="search">
15 15 <input type="text" name="rev" />
16 16 </div>
17 17 </form>
18 18 </div>
19 19
20 20 <div class="page_nav">
21 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#<br/>
21 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
22 22 <br/>
23 23 #changenav%naventry#<br/>
24 24 </div>
25 25
26 26 #entries%changelogentry#
27 27
28 28 <div class="page_nav">
29 29 #changenav%naventry#<br/>
30 30 </div>
31 31
32 32 #footer#
@@ -1,41 +1,40 b''
1 1 #header#
2 2 <title>{repo|escape}: changeset {rev}:{node|short}</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/>
14 14 </div>
15 15
16 16 <div>
17 17 <a class="title" href="{url}raw-rev/#node|short#">#desc|strip|escape|firstline#</a>
18 18 </div>
19 19 <div class="title_text">
20 20 <table cellspacing="0">
21 21 <tr><td>author</td><td>#author|obfuscate#</td></tr>
22 22 <tr><td></td><td>#date|date# (#date|age# ago)</td></tr>
23 23 <tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr>
24 <tr><td>manifest</td><td style="font-family:monospace"><a class="list" href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
25 24 #parent%changesetparent#
26 25 #child%changesetchild#
27 26 #changesettag#
28 27 </table></div>
29 28
30 29 <div class="page_body">
31 30 #desc|strip|escape|addbreaks#
32 31 </div>
33 32 <div class="list_head"></div>
34 33 <div class="title_text">
35 34 <table cellspacing="0">
36 35 #files#
37 36 </table></div>
38 37
39 38 <div class="page_body">#diff#</div>
40 39
41 40 #footer#
@@ -1,59 +1,56 b''
1 1 #header#
2 2 <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
18 18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
19 19 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
20 20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
21 21 annotate |
22 22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
23 23 <a href="{url}raw-annotate/{node|short}/#file|urlescape#">raw</a><br/>
24 24 </div>
25 25
26 26 <div class="title">#file|escape#</div>
27 27
28 28 <div class="title_text">
29 <table>
29 <table cellspacing="0">
30 <tr>
31 <td>author</td>
32 <td>#author|obfuscate#</td></tr>
30 33 <tr>
31 <td class="metatag">changeset #rev#:</td>
32 <td><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
34 <td></td>
35 <td>#date|date# (#date|age# ago)</td></tr>
36 <tr>
37 <td>changeset {rev}</td>
38 <td style="font-family:monospace"><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
33 39 #parent%fileannotateparent#
34 40 #child%fileannotatechild#
35 41 <tr>
36 <td class="metatag">manifest:</td>
37 <td><a href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
38 <tr>
39 <td class="metatag">author:</td>
40 <td>#author|obfuscate#</td></tr>
41 <tr>
42 <td class="metatag">date:</td>
43 <td>#date|date# (#date|age# ago)</td></tr>
44 <tr>
45 <td class="metatag">permissions:</td>
46 <td>#permissions|permissions#</td></tr>
42 <td>permissions</td>
43 <td style="font-family:monospace">#permissions|permissions#</td></tr>
47 44 </table>
48 45 </div>
49 46
50 47 <div class="page_path">
51 48 {desc|strip|escape|addbreaks}
52 49 </div>
53 50 <div class="page_body">
54 51 <table>
55 52 #annotate%annotateline#
56 53 </table>
57 54 </div>
58 55
59 56 #footer#
@@ -1,47 +1,42 b''
1 1 {header}
2 2 <title>{repo|escape}: diff {file|escape}</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for {repo|escape}">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / annotate
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 17 <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">manifest</a> |
18 18 <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
19 19 <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
20 20 <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
21 21 <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
22 22 diff |
23 23 <a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a><br/>
24 24 </div>
25 25
26 26 <div class="title">{file|escape}</div>
27 27
28 28 <table>
29 29 <tr>
30 <td class="metatag">changeset {rev}:</td>
31 <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
32 </tr>
30 <td>changeset {rev}</td>
31 <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>
33 32 {parent%filediffparent}
34 33 {child%filediffchild}
35 <tr>
36 <td class="metatag">manifest:</td>
37 <td><a href="{url}file/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
38 </tr>
39 34 </table>
40 35
36 <div class="list_head"></div>
37
41 38 <div class="page_body">
42 <table>
43 39 {diff}
44 </table>
45 40 </div>
46 41
47 42 {footer}
@@ -1,33 +1,36 b''
1 1 #header#
2 2 <title>#repo|escape#: File revisions</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 17 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
18 18 revisions |
19 19 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
20 20 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
21 <a href="{url}rss-log/#node|short#/#file|urlescape#">rss</a><br/>
22
21 <a href="{url}rss-log/#node|short#/#file|urlescape#">rss</a>
23 22 <br/>
24 {nav%filenaventry}<br/>
23 {nav%filenaventry}
25 24 </div>
26 25
27 26 <div class="title" >#file|urlescape#</div>
28 27
29 28 <table>
30 29 #entries%filelogentry#
31 30 </table>
32 31
32 <div class="page_nav">
33 {nav%filenaventry}
34 </div>
35
33 36 #footer#
@@ -1,58 +1,55 b''
1 1 #header#
2 2 <title>{repo|escape}: {file|escape}@{node|short}</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
18 18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
19 19 file |
20 20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
21 21 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
22 22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
23 23 <a href="{url}raw-file/{node|short}/#file|urlescape#">raw</a><br/>
24 24 </div>
25 25
26 26 <div class="title">#file|escape#</div>
27 27
28 28 <div class="title_text">
29 <table>
29 <table cellspacing="0">
30 <tr>
31 <td>author</td>
32 <td>#author|obfuscate#</td></tr>
30 33 <tr>
31 <td class="metatag">changeset #rev#:</td>
32 <td><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
34 <td></td>
35 <td>#date|date# (#date|age# ago)</td></tr>
36 <tr>
37 <td>changeset {rev}</td>
38 <td style="font-family:monospace"><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
33 39 #parent%filerevparent#
34 40 #child%filerevchild#
35 41 <tr>
36 <td class="metatag">manifest:</td>
37 <td><a href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
38 <tr>
39 <td class="metatag">author:</td>
40 <td>#author|obfuscate#</td></tr>
41 <tr>
42 <td class="metatag">date:</td>
43 <td>#date|date# (#date|age# ago)</td></tr>
44 <tr>
45 <td class="metatag">permissions:</td>
46 <td>#permissions|permissions#</td></tr>
42 <td>permissions</td>
43 <td style="font-family:monospace">#permissions|permissions#</td></tr>
47 44 </table>
48 45 </div>
49 46
50 47 <div class="page_path">
51 48 {desc|strip|escape|addbreaks}
52 49 </div>
53 50
54 51 <div class="page_body">
55 52 #text%fileline#
56 53 </div>
57 54
58 55 #footer#
@@ -1,33 +1,33 b''
1 1 #header#
2 2 <title>#repo|escape#: Manifest</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 17 manifest |
18 18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/>
19 19 </div>
20 20
21 21 <div class="title" >#path|escape#</div>
22 <div class="page_body">
23 22 <table cellspacing="0">
24 <tr class="light">
23 <tr class="parity#upparity#">
25 24 <td style="font-family:monospace">drwxr-xr-x</td>
26 25 <td style="font-family:monospace"></td>
27 26 <td><a href="{url}file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a></td>
28 27 <td class="link">&nbsp;</td>
29 28 </tr>
30 29 #dentries%manifestdirentry#
31 30 #fentries%manifestfileentry#
32 31 </table>
32
33 33 #footer#
@@ -1,57 +1,57 b''
1 1 default = 'summary'
2 2 header = header.tmpl
3 3 footer = footer.tmpl
4 4 search = search.tmpl
5 5 changelog = changelog.tmpl
6 6 summary = summary.tmpl
7 7 error = error.tmpl
8 8 naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
9 9 navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
10 10 filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
11 11 filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
12 12 filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>'
13 13 fileellipses = '...'
14 14 changelogentry = changelogentry.tmpl
15 15 searchentry = changelogentry.tmpl
16 16 changeset = changeset.tmpl
17 17 manifest = manifest.tmpl
18 manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
18 manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
19 19 manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
20 20 filerevision = filerevision.tmpl
21 21 fileannotate = fileannotate.tmpl
22 22 filediff = filediff.tmpl
23 23 filelog = filelog.tmpl
24 24 fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><span class="linenr"> #linenumber#</span> #line|escape#</pre></div>'
25 25 annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>'
26 26 difflineplus = '<div style="color:#008800;">#line|escape#</div>'
27 27 difflineminus = '<div style="color:#cc0000;">#line|escape#</div>'
28 28 difflineat = '<div style="color:#990099;">#line|escape#</div>'
29 29 diffline = '<div>#line|escape#</div>'
30 30 changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
31 31 changesetparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
32 filerevparent = '<tr><td class="metatag">parent {rev}:</td><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
32 filerevparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
33 33 filerename = '{file|escape}@'
34 34 filelogrename = '| <a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">base</a>'
35 fileannotateparent = '<tr><td class="metatag">parent {rev}:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
35 fileannotateparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
36 36 changelogchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
37 37 changesetchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
38 filerevchild = '<tr><td class="metatag">child {rev}:</td><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
39 fileannotatechild = '<tr><td class="metatag">child {rev}:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
38 filerevchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
39 fileannotatechild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
40 40 tags = tags.tmpl
41 41 tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>#tag|escape#</b></a></td><td class="link"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/#node|short#{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
42 42 branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>'
43 43 diffblock = '<pre>#lines#</pre>'
44 44 changelogtag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
45 45 changesettag = '<tr><td>tag</td><td>#tag|escape#</td></tr>'
46 filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
46 filediffparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
47 47 filelogparent = '<tr><td align="right">parent #rev#:&nbsp;</td><td><a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
48 filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
48 filediffchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
49 49 filelogchild = '<tr><td align="right">child #rev#:&nbsp;</td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
50 50 shortlog = shortlog.tmpl
51 51 shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author#</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link" nowrap><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
52 52 filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> #rename%filelogrename#</td></tr>'
53 53 archiveentry = ' | <a href="{url}archive/{node|short}{extension}">#type|escape#</a> '
54 54 indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a class="rss_logo" href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
55 55 index = index.tmpl
56 56 urlparameter = '#separator##name#=#value|urlescape#'
57 57 hiddenformentry = '<input type="hidden" name="#name#" value="#value|escape#" />'
@@ -1,27 +1,32 b''
1 1 #header#
2 <title>#repo|escape#: Search</title>
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
6 <body>
7
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search
10
11 <form action="{url}log">
12 {sessionvars%hiddenformentry}
13 <div class="search">
14 <input type="text" name="rev" value="#query|escape#" />
15 </div>
16 </form>
17 </div>
18
2 19 <div class="page_nav">
3 20 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
4 21 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
5 22 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
6 23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
7 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a><br/>
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
25 <br/>
8 26 </div>
9 27
10 <h2>searching for #query|escape#</h2>
11
12 <form action="{url}log">
13 {sessionvars%hiddenformentry}
14 search:
15 <input name="rev" type="text" width="30" value="#query|escape#">
16 </form>
28 <div class="title">searching for #query|escape#</div>
17 29
18 30 #entries#
19 31
20 <form action="{url}log">
21 {sessionvars%hiddenformentry}
22 search:
23 <input type="hidden" name="style" value="gitweb">
24 <input name="rev" type="text" width="30">
25 </form>
26
27 32 #footer#
@@ -1,34 +1,38 b''
1 1 #header#
2 2 <title>#repo|escape#: Shortlog</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog
10 10 </div>
11 11
12 12 <form action="{url}log">
13 13 {sessionvars%hiddenformentry}
14 14 <div class="search">
15 15 <input type="text" name="rev" />
16 16 </div>
17 17 </form>
18 18 </div>
19 19 <div class="page_nav">
20 20 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
21 21 shortlog |
22 22 <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> |
23 23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#<br/>
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
25 25 <br/>
26
27 26 #changenav%navshortentry#<br/>
28 27 </div>
29 28
29 <div class="title">&nbsp;</div>
30 30 <table cellspacing="0">
31 31 #entries%shortlogentry#
32 32 </table>
33 33
34 <div class="page_nav">
35 #changenav%navshortentry#
36 </div>
37
34 38 #footer#
@@ -1,46 +1,54 b''
1 1 #header#
2 2 <title>#repo|escape#: Summary</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary
10
11 <form action="{url}log">
12 {sessionvars%hiddenformentry}
13 <div class="search">
14 <input type="text" name="rev" />
10 15 </div>
16 </form>
17 </div>
18
11 19 <div class="page_nav">
12 20 summary |
13 21 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 22 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
17 25 <br/>
18 26 </div>
19 27
20 28 <div class="title">&nbsp;</div>
21 29 <table cellspacing="0">
22 30 <tr><td>description</td><td>#desc#</td></tr>
23 31 <tr><td>owner</td><td>#owner|escape#</td></tr>
24 32 <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr>
25 33 </table>
26 34
27 35 <div><a class="title" href="{url}log{sessionvars%urlparameter}">changes</a></div>
28 36 <table cellspacing="0">
29 37 #shortlog#
30 <tr class="light"><td colspan="3"><a class="list" href="{url}log{sessionvars%urlparameter}">...</a></td></tr>
38 <tr class="light"><td colspan="4"><a class="list" href="{url}log{sessionvars%urlparameter}">...</a></td></tr>
31 39 </table>
32 40
33 41 <div><a class="title" href="{url}tags{sessionvars%urlparameter}">tags</a></div>
34 42 <table cellspacing="0">
35 43 #tags#
36 44 <tr class="light"><td colspan="3"><a class="list" href="{url}tags{sessionvars%urlparameter}">...</a></td></tr>
37 45 </table>
38 46
39 47 <div><a class="title" href="#">branches</a></div>
40 48 <table cellspacing="0">
41 49 {branches%branchentry}
42 50 <tr class="light">
43 <td colspan="3"><a class="list" href="#">...</a></td>
51 <td colspan="4"><a class="list" href="#">...</a></td>
44 52 </tr>
45 53 </table>
46 54 #footer#
@@ -1,25 +1,26 b''
1 1 #header#
2 2 <title>#repo|escape#: Tags</title>
3 3 <link rel="alternate" type="application/rss+xml"
4 4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 5 </head>
6 6 <body>
7 7
8 8 <div class="page_header">
9 9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags
10 10 </div>
11 11
12 12 <div class="page_nav">
13 13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 16 tags |
17 17 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>
18 18 <br/>
19 19 </div>
20 20
21 <div class="title">&nbsp;</div>
21 22 <table cellspacing="0">
22 23 #entries%tagentry#
23 24 </table>
24 25
25 26 #footer#
@@ -1,25 +1,25 b''
1 1 #header#
2 2 <title>#repo|escape#: manifest for changeset #node|short#</title>
3 3 </head>
4 4 <body>
5 5
6 6 <div class="buttons">
7 7 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
8 8 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
9 9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
10 10 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
11 11 #archives%archiveentry#
12 12 </div>
13 13
14 14 <h2>manifest for changeset #node|short#: #path|escape#</h2>
15 15
16 16 <table cellpadding="0" cellspacing="0">
17 <tr class="parity1">
17 <tr class="parity#upparity#">
18 18 <td><tt>drwxr-xr-x</tt>&nbsp;
19 19 <td>&nbsp;
20 20 <td><a href="#url#file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a>
21 21 </tr>
22 22 #dentries%manifestdirentry#
23 23 #fentries%manifestfileentry#
24 24 </table>
25 25 #footer#
@@ -1,33 +1,34 b''
1 1 #header#
2 2 <title>#repo|escape#: searching for #query|escape#</title>
3 3 </head>
4 4 <body>
5 5
6 6 <div class="buttons">
7 7 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
8 8 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
9 9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
10 10 <a href="#url#file/#node|short#{sessionvars%urlparameter}">manifest</a>
11 #archives%archiveentry#
11 12 </div>
12 13
13 14 <h2>searching for #query|escape#</h2>
14 15
15 16 <form>
16 17 {sessionvars%hiddenformentry}
17 18 <p>
18 19 search:
19 20 <input name="rev" type="text" width="30" value="#query|escape#">
20 21 </p>
21 22 </form>
22 23
23 24 #entries#
24 25
25 26 <form>
26 27 {sessionvars%hiddenformentry}
27 28 <p>
28 29 search:
29 30 <input name="rev" type="text" width="30" value="#query|escape#">
30 31 </p>
31 32 </form>
32 33
33 34 #footer#
@@ -1,26 +1,26 b''
1 1 #!/bin/sh
2 2
3 3 hg init rep
4 4 cd rep
5 5 mkdir dir
6 6 touch foo dir/bar
7 7 hg -v addremove
8 8 hg -v commit -m "add 1" -d "1000000 0"
9 9 cd dir/
10 10 touch ../foo_2 bar_2
11 11 hg -v addremove
12 12 hg -v commit -m "add 2" -d "1000000 0"
13 13
14 14 cd ..
15 15 hg init sim
16 16 cd sim
17 17 echo a > a
18 18 echo a >> a
19 19 echo a >> a
20 20 echo c > c
21 21 hg commit -Ama
22 22 mv a b
23 23 rm c
24 24 echo d > d
25 hg addremove -s 0.5
25 hg addremove -s 50
26 26 hg commit -mb
@@ -1,37 +1,43 b''
1 1 #!/bin/sh
2 2
3 3 hg init rep; cd rep
4 4
5 5 touch empty-file
6 6 python -c 'for x in range(10000): print x' > large-file
7 7
8 8 hg addremove
9 9
10 10 hg commit -m A
11 11
12 12 rm large-file empty-file
13 13 python -c 'for x in range(10,10000): print x' > another-file
14 14
15 15 hg addremove -s50
16 16
17 17 hg commit -m B
18 18
19 echo % comparing two empty files caused ZeroDivisionError in the past
20 hg update -C 0
21 rm empty-file
22 touch another-empty-file
23 hg addremove -s50
24
19 25 cd ..
20 26
21 27 hg init rep2; cd rep2
22 28
23 29 python -c 'for x in range(10000): print x' > large-file
24 30 python -c 'for x in range(50): print x' > tiny-file
25 31
26 32 hg addremove
27 33
28 34 hg commit -m A
29 35
30 36 python -c 'for x in range(70): print x' > small-file
31 37 rm tiny-file
32 38 rm large-file
33 39
34 40 hg addremove -s50
35 41
36 42 hg commit -m B
37 43
@@ -1,12 +1,16 b''
1 1 adding empty-file
2 2 adding large-file
3 3 adding another-file
4 4 removing empty-file
5 5 removing large-file
6 6 recording removal of large-file as rename to another-file (99% similar)
7 % comparing two empty files caused ZeroDivisionError in the past
8 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
9 adding another-empty-file
10 removing empty-file
7 11 adding large-file
8 12 adding tiny-file
9 13 adding small-file
10 14 removing large-file
11 15 removing tiny-file
12 16 recording removal of tiny-file as rename to small-file (82% similar)
@@ -1,347 +1,348 b''
1 1 adding foo
2 2 checking changesets
3 3 checking manifests
4 4 crosschecking files in changesets and manifests
5 5 checking files
6 6 1 files, 9 changesets, 9 total revisions
7 7 comparing with http://localhost:20059/
8 8 changeset: 0:9cb21d99fe27
9 9 user: test
10 10 date: Mon Jan 12 13:46:40 1970 +0000
11 11 summary: 0
12 12
13 13 changeset: 1:d717f5dfad6a
14 14 user: test
15 15 date: Mon Jan 12 13:46:40 1970 +0000
16 16 summary: 1
17 17
18 18 changeset: 2:c0d6b86da426
19 19 user: test
20 20 date: Mon Jan 12 13:46:40 1970 +0000
21 21 summary: 2
22 22
23 23 changeset: 3:dfacbd43b3fe
24 24 user: test
25 25 date: Mon Jan 12 13:46:40 1970 +0000
26 26 summary: 3
27 27
28 28 changeset: 4:1f3a964b6022
29 29 user: test
30 30 date: Mon Jan 12 13:46:40 1970 +0000
31 31 summary: 4
32 32
33 33 changeset: 5:c028bcc7a28a
34 34 user: test
35 35 date: Mon Jan 12 13:46:40 1970 +0000
36 36 summary: 5
37 37
38 38 changeset: 6:a0c0095f3389
39 39 user: test
40 40 date: Mon Jan 12 13:46:40 1970 +0000
41 41 summary: 6
42 42
43 43 changeset: 7:d4be65f4e891
44 44 user: test
45 45 date: Mon Jan 12 13:46:40 1970 +0000
46 46 summary: 7
47 47
48 48 changeset: 8:92b83e334ef8
49 49 tag: tip
50 50 user: test
51 51 date: Mon Jan 12 13:46:40 1970 +0000
52 52 summary: 8
53 53
54 54 comparing with http://localhost:20059/
55 55 changeset: 0:9cb21d99fe27
56 56 user: test
57 57 date: Mon Jan 12 13:46:40 1970 +0000
58 58 summary: 0
59 59
60 60 changeset: 1:d717f5dfad6a
61 61 user: test
62 62 date: Mon Jan 12 13:46:40 1970 +0000
63 63 summary: 1
64 64
65 65 changeset: 2:c0d6b86da426
66 66 user: test
67 67 date: Mon Jan 12 13:46:40 1970 +0000
68 68 summary: 2
69 69
70 70 changeset: 3:dfacbd43b3fe
71 71 user: test
72 72 date: Mon Jan 12 13:46:40 1970 +0000
73 73 summary: 3
74 74
75 75 changeset: 4:1f3a964b6022
76 tag: tip
76 77 user: test
77 78 date: Mon Jan 12 13:46:40 1970 +0000
78 79 summary: 4
79 80
80 81 comparing with test
81 82 changeset: 0:9cb21d99fe27
82 83 user: test
83 84 date: Mon Jan 12 13:46:40 1970 +0000
84 85 summary: 0
85 86
86 87 changeset: 1:d717f5dfad6a
87 88 user: test
88 89 date: Mon Jan 12 13:46:40 1970 +0000
89 90 summary: 1
90 91
91 92 changeset: 2:c0d6b86da426
92 93 user: test
93 94 date: Mon Jan 12 13:46:40 1970 +0000
94 95 summary: 2
95 96
96 97 changeset: 3:dfacbd43b3fe
97 98 user: test
98 99 date: Mon Jan 12 13:46:40 1970 +0000
99 100 summary: 3
100 101
101 102 changeset: 4:1f3a964b6022
102 103 user: test
103 104 date: Mon Jan 12 13:46:40 1970 +0000
104 105 summary: 4
105 106
106 107 changeset: 5:c028bcc7a28a
107 108 user: test
108 109 date: Mon Jan 12 13:46:40 1970 +0000
109 110 summary: 5
110 111
111 112 changeset: 6:a0c0095f3389
112 113 user: test
113 114 date: Mon Jan 12 13:46:40 1970 +0000
114 115 summary: 6
115 116
116 117 changeset: 7:d4be65f4e891
117 118 user: test
118 119 date: Mon Jan 12 13:46:40 1970 +0000
119 120 summary: 7
120 121
121 122 changeset: 8:92b83e334ef8
122 123 tag: tip
123 124 user: test
124 125 date: Mon Jan 12 13:46:40 1970 +0000
125 126 summary: 8
126 127
127 128 comparing with test
128 129 changeset: 0:9cb21d99fe27
129 130 user: test
130 131 date: Mon Jan 12 13:46:40 1970 +0000
131 132 summary: 0
132 133
133 134 changeset: 1:d717f5dfad6a
134 135 user: test
135 136 date: Mon Jan 12 13:46:40 1970 +0000
136 137 summary: 1
137 138
138 139 changeset: 2:c0d6b86da426
139 140 user: test
140 141 date: Mon Jan 12 13:46:40 1970 +0000
141 142 summary: 2
142 143
143 144 changeset: 3:dfacbd43b3fe
144 145 user: test
145 146 date: Mon Jan 12 13:46:40 1970 +0000
146 147 summary: 3
147 148
148 149 changeset: 4:1f3a964b6022
149 150 user: test
150 151 date: Mon Jan 12 13:46:40 1970 +0000
151 152 summary: 4
152 153
153 154 comparing with http://localhost:20059/
154 155 changeset: 0:9cb21d99fe27
155 156 user: test
156 157 date: Mon Jan 12 13:46:40 1970 +0000
157 158 summary: 0
158 159
159 160 changeset: 1:d717f5dfad6a
160 161 user: test
161 162 date: Mon Jan 12 13:46:40 1970 +0000
162 163 summary: 1
163 164
164 165 changeset: 2:c0d6b86da426
165 166 user: test
166 167 date: Mon Jan 12 13:46:40 1970 +0000
167 168 summary: 2
168 169
169 170 changeset: 3:dfacbd43b3fe
170 171 user: test
171 172 date: Mon Jan 12 13:46:40 1970 +0000
172 173 summary: 3
173 174
174 175 changeset: 4:1f3a964b6022
175 176 user: test
176 177 date: Mon Jan 12 13:46:40 1970 +0000
177 178 summary: 4
178 179
179 180 changeset: 5:c028bcc7a28a
180 181 user: test
181 182 date: Mon Jan 12 13:46:40 1970 +0000
182 183 summary: 5
183 184
184 185 changeset: 6:a0c0095f3389
185 186 user: test
186 187 date: Mon Jan 12 13:46:40 1970 +0000
187 188 summary: 6
188 189
189 190 changeset: 7:d4be65f4e891
190 191 user: test
191 192 date: Mon Jan 12 13:46:40 1970 +0000
192 193 summary: 7
193 194
194 195 changeset: 8:92b83e334ef8
195 196 tag: tip
196 197 user: test
197 198 date: Mon Jan 12 13:46:40 1970 +0000
198 199 summary: 8
199 200
200 201 comparing with test
201 202 changeset: 0:9cb21d99fe27
202 203 user: test
203 204 date: Mon Jan 12 13:46:40 1970 +0000
204 205 summary: 0
205 206
206 207 changeset: 1:d717f5dfad6a
207 208 user: test
208 209 date: Mon Jan 12 13:46:40 1970 +0000
209 210 summary: 1
210 211
211 212 changeset: 2:c0d6b86da426
212 213 user: test
213 214 date: Mon Jan 12 13:46:40 1970 +0000
214 215 summary: 2
215 216
216 217 changeset: 3:dfacbd43b3fe
217 218 user: test
218 219 date: Mon Jan 12 13:46:40 1970 +0000
219 220 summary: 3
220 221
221 222 changeset: 4:1f3a964b6022
222 223 user: test
223 224 date: Mon Jan 12 13:46:40 1970 +0000
224 225 summary: 4
225 226
226 227 changeset: 5:c028bcc7a28a
227 228 user: test
228 229 date: Mon Jan 12 13:46:40 1970 +0000
229 230 summary: 5
230 231
231 232 changeset: 6:a0c0095f3389
232 233 user: test
233 234 date: Mon Jan 12 13:46:40 1970 +0000
234 235 summary: 6
235 236
236 237 changeset: 7:d4be65f4e891
237 238 user: test
238 239 date: Mon Jan 12 13:46:40 1970 +0000
239 240 summary: 7
240 241
241 242 changeset: 8:92b83e334ef8
242 243 tag: tip
243 244 user: test
244 245 date: Mon Jan 12 13:46:40 1970 +0000
245 246 summary: 8
246 247
247 248 adding changesets
248 249 adding manifests
249 250 adding file changes
250 251 added 9 changesets with 9 changes to 1 files
251 252 (run 'hg update' to get a working copy)
252 253 adding changesets
253 254 adding manifests
254 255 adding file changes
255 256 added 9 changesets with 9 changes to 1 files
256 257 (run 'hg update' to get a working copy)
257 258 changeset: 8:92b83e334ef8
258 259 tag: tip
259 260 user: test
260 261 date: Mon Jan 12 13:46:40 1970 +0000
261 262 summary: 8
262 263
263 264 changeset: 8:92b83e334ef8
264 265 tag: tip
265 266 user: test
266 267 date: Mon Jan 12 13:46:40 1970 +0000
267 268 summary: 8
268 269
269 270 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 271 checking changesets
271 272 checking manifests
272 273 crosschecking files in changesets and manifests
273 274 checking files
274 275 1 files, 14 changesets, 14 total revisions
275 276 comparing with test
276 277 searching for changes
277 278 changeset: 9:3741c3ad1096
278 279 user: test
279 280 date: Mon Jan 12 13:46:40 1970 +0000
280 281 summary: 9
281 282
282 283 changeset: 10:de4143c8d9a5
283 284 user: test
284 285 date: Mon Jan 12 13:46:40 1970 +0000
285 286 summary: 10
286 287
287 288 changeset: 11:0e1c188b9a7a
288 289 user: test
289 290 date: Mon Jan 12 13:46:40 1970 +0000
290 291 summary: 11
291 292
292 293 changeset: 12:251354d0fdd3
293 294 user: test
294 295 date: Mon Jan 12 13:46:40 1970 +0000
295 296 summary: 12
296 297
297 298 changeset: 13:bdaadd969642
298 299 tag: tip
299 300 user: test
300 301 date: Mon Jan 12 13:46:40 1970 +0000
301 302 summary: 13
302 303
303 304 comparing with http://localhost:20059/
304 305 searching for changes
305 306 changeset: 9:3741c3ad1096
306 307 user: test
307 308 date: Mon Jan 12 13:46:40 1970 +0000
308 309 summary: 9
309 310
310 311 changeset: 10:de4143c8d9a5
311 312 user: test
312 313 date: Mon Jan 12 13:46:40 1970 +0000
313 314 summary: 10
314 315
315 316 changeset: 11:0e1c188b9a7a
316 317 user: test
317 318 date: Mon Jan 12 13:46:40 1970 +0000
318 319 summary: 11
319 320
320 321 changeset: 12:251354d0fdd3
321 322 user: test
322 323 date: Mon Jan 12 13:46:40 1970 +0000
323 324 summary: 12
324 325
325 326 changeset: 13:bdaadd969642
326 327 tag: tip
327 328 user: test
328 329 date: Mon Jan 12 13:46:40 1970 +0000
329 330 summary: 13
330 331
331 332 comparing with http://localhost:20059/
332 333 searching for changes
333 334 changeset: 9:3741c3ad1096
334 335 user: test
335 336 date: Mon Jan 12 13:46:40 1970 +0000
336 337 summary: 9
337 338
338 339 changeset: 10:de4143c8d9a5
339 340 user: test
340 341 date: Mon Jan 12 13:46:40 1970 +0000
341 342 summary: 10
342 343
343 344 changeset: 11:0e1c188b9a7a
344 345 user: test
345 346 date: Mon Jan 12 13:46:40 1970 +0000
346 347 summary: 11
347 348
@@ -1,99 +1,130 b''
1 1 #!/bin/sh
2 2
3 3 cat <<EOF >> $HGRCPATH
4 4 [extensions]
5 5 hgext.purge=
6 6 EOF
7 7
8 8 echo % init
9 9 hg init t
10 10 cd t
11 11
12 12 echo % setup
13 13 echo r1 > r1
14 14 hg ci -qAmr1 -d'0 0'
15 15 mkdir directory
16 16 echo r2 > directory/r2
17 17 hg ci -qAmr2 -d'1 0'
18 18 echo 'ignored' > .hgignore
19 19 hg ci -qAmr3 -d'2 0'
20 20
21 21 echo % delete an empty directory
22 22 mkdir empty_dir
23 23 hg purge -p
24 24 hg purge -v
25 25 ls
26 26
27 27 echo % delete an untracked directory
28 28 mkdir untracked_dir
29 29 touch untracked_dir/untracked_file1
30 30 touch untracked_dir/untracked_file2
31 31 hg purge -p
32 32 hg purge -v
33 33 ls
34 34
35 35 echo % delete an untracked file
36 36 touch untracked_file
37 37 hg purge -p
38 38 hg purge -v
39 39 ls
40 40
41 41 echo % delete an untracked file in a tracked directory
42 42 touch directory/untracked_file
43 43 hg purge -p
44 44 hg purge -v
45 45 ls
46 46
47 47 echo % delete nested directories
48 48 mkdir -p untracked_directory/nested_directory
49 49 hg purge -p
50 50 hg purge -v
51 51 ls
52 52
53 53 echo % delete nested directories from a subdir
54 54 mkdir -p untracked_directory/nested_directory
55 55 cd directory
56 56 hg purge -p
57 57 hg purge -v
58 58 cd ..
59 59 ls
60 60
61 61 echo % delete only part of the tree
62 62 mkdir -p untracked_directory/nested_directory
63 63 touch directory/untracked_file
64 64 cd directory
65 65 hg purge -p ../untracked_directory
66 66 hg purge -v ../untracked_directory
67 67 cd ..
68 68 ls
69 69 ls directory/untracked_file
70 70 rm directory/untracked_file
71 71
72 72 echo % delete ignored files
73 73 touch ignored
74 74 hg purge -p
75 75 hg purge -v
76 76 ls
77 77
78 78 echo % abort with missing files until we support name mangling filesystems
79 79 touch untracked_file
80 80 rm r1
81 81 # hide error messages to avoid changing the output when the text changes
82 82 hg purge -p 2> /dev/null
83 83 if [ $? -ne 0 ]; then
84 84 echo "refused to run"
85 85 fi
86 86 if [ -f untracked_file ]; then
87 87 echo "untracked_file still around"
88 88 fi
89 89 hg purge -p --force
90 90 hg purge -v 2> /dev/null
91 91 if [ $? -ne 0 ]; then
92 92 echo "refused to run"
93 93 fi
94 94 if [ -f untracked_file ]; then
95 95 echo "untracked_file still around"
96 96 fi
97 97 hg purge -v --force
98 98 hg revert --all --quiet
99 99 ls
100
101 echo % skip excluded files
102 touch excluded_file
103 hg purge -p -X excluded_file
104 hg purge -v -X excluded_file
105 ls
106 rm excluded_file
107
108 echo % skip files in excluded dirs
109 mkdir excluded_dir
110 touch excluded_dir/file
111 hg purge -p -X excluded_dir
112 hg purge -v -X excluded_dir
113 ls
114 ls excluded_dir
115 rm -R excluded_dir
116
117 echo % skip excluded empty dirs
118 mkdir excluded_dir
119 hg purge -p -X excluded_dir
120 hg purge -v -X excluded_dir
121 ls
122 rmdir excluded_dir
123
124 echo % skip patterns
125 mkdir .svn
126 touch .svn/foo
127 mkdir directory/.svn
128 touch directory/.svn/foo
129 hg purge -p -X .svn -X '*/.svn'
130 hg purge -p -X re:.*.svn
@@ -1,58 +1,72 b''
1 1 % init
2 2 % setup
3 3 % delete an empty directory
4 4 empty_dir
5 5 Removing directory empty_dir
6 6 directory
7 7 r1
8 8 % delete an untracked directory
9 9 untracked_dir/untracked_file1
10 10 untracked_dir/untracked_file2
11 11 Removing file untracked_dir/untracked_file1
12 12 Removing file untracked_dir/untracked_file2
13 13 Removing directory untracked_dir
14 14 directory
15 15 r1
16 16 % delete an untracked file
17 17 untracked_file
18 18 Removing file untracked_file
19 19 directory
20 20 r1
21 21 % delete an untracked file in a tracked directory
22 22 directory/untracked_file
23 23 Removing file directory/untracked_file
24 24 directory
25 25 r1
26 26 % delete nested directories
27 27 untracked_directory/nested_directory
28 28 Removing directory untracked_directory/nested_directory
29 29 Removing directory untracked_directory
30 30 directory
31 31 r1
32 32 % delete nested directories from a subdir
33 33 untracked_directory/nested_directory
34 34 Removing directory untracked_directory/nested_directory
35 35 Removing directory untracked_directory
36 36 directory
37 37 r1
38 38 % delete only part of the tree
39 39 untracked_directory/nested_directory
40 40 Removing directory untracked_directory/nested_directory
41 41 Removing directory untracked_directory
42 42 directory
43 43 r1
44 44 directory/untracked_file
45 45 % delete ignored files
46 46 ignored
47 47 Removing file ignored
48 48 directory
49 49 r1
50 50 % abort with missing files until we support name mangling filesystems
51 51 refused to run
52 52 untracked_file still around
53 53 untracked_file
54 54 refused to run
55 55 untracked_file still around
56 56 Removing file untracked_file
57 57 directory
58 58 r1
59 % skip excluded files
60 directory
61 excluded_file
62 r1
63 % skip files in excluded dirs
64 directory
65 excluded_dir
66 r1
67 file
68 % skip excluded empty dirs
69 directory
70 excluded_dir
71 r1
72 % skip patterns
General Comments 0
You need to be logged in to leave comments. Login now