##// END OF EJS Templates
Removed trailing whitespace and tabs from python files
Thomas Arendsen Hein -
r4516:96d8a56d default
parent child Browse files
Show More
@@ -1,722 +1,722 b''
1 1 # convert.py Foreign SCM converter
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, os, zlib, sha, time, re, locale, socket
9 9 from mercurial import hg, ui, util, commands
10 10
11 11 commands.norepo += " convert"
12 12
13 13 class NoRepo(Exception): pass
14 14
15 15 class commit(object):
16 16 def __init__(self, **parts):
17 17 for x in "author date desc parents".split():
18 18 if not x in parts:
19 19 raise util.Abort("commit missing field %s\n" % x)
20 20 self.__dict__.update(parts)
21 21
22 22 def recode(s):
23 23 try:
24 24 return s.decode("utf-8").encode("utf-8")
25 25 except:
26 26 try:
27 27 return s.decode("latin-1").encode("utf-8")
28 28 except:
29 29 return s.decode("utf-8", "replace").encode("utf-8")
30 30
31 31 class converter_source(object):
32 32 """Conversion source interface"""
33 33
34 34 def __init__(self, ui, path):
35 35 """Initialize conversion source (or raise NoRepo("message")
36 36 exception if path is not a valid repository)"""
37 37 raise NotImplementedError()
38 38
39 39 def getheads(self):
40 40 """Return a list of this repository's heads"""
41 41 raise NotImplementedError()
42 42
43 43 def getfile(self, name, rev):
44 44 """Return file contents as a string"""
45 45 raise NotImplementedError()
46 46
47 47 def getmode(self, name, rev):
48 48 """Return file mode, eg. '', 'x', or 'l'"""
49 49 raise NotImplementedError()
50 50
51 51 def getchanges(self, version):
52 52 """Return sorted list of (filename, id) tuples for all files changed in rev.
53
53
54 54 id just tells us which revision to return in getfile(), e.g. in
55 55 git it's an object hash."""
56 56 raise NotImplementedError()
57 57
58 58 def getcommit(self, version):
59 59 """Return the commit object for version"""
60 60 raise NotImplementedError()
61 61
62 62 def gettags(self):
63 63 """Return the tags as a dictionary of name: revision"""
64 64 raise NotImplementedError()
65 65
66 66 class converter_sink(object):
67 67 """Conversion sink (target) interface"""
68 68
69 69 def __init__(self, ui, path):
70 70 """Initialize conversion sink (or raise NoRepo("message")
71 71 exception if path is not a valid repository)"""
72 72 raise NotImplementedError()
73 73
74 74 def getheads(self):
75 75 """Return a list of this repository's heads"""
76 76 raise NotImplementedError()
77 77
78 78 def mapfile(self):
79 79 """Path to a file that will contain lines
80 80 source_rev_id sink_rev_id
81 81 mapping equivalent revision identifiers for each system."""
82 82 raise NotImplementedError()
83 83
84 84 def putfile(self, f, e, data):
85 85 """Put file for next putcommit().
86 86 f: path to file
87 87 e: '', 'x', or 'l' (regular file, executable, or symlink)
88 88 data: file contents"""
89 89 raise NotImplementedError()
90 90
91 91 def delfile(self, f):
92 92 """Delete file for next putcommit().
93 93 f: path to file"""
94 94 raise NotImplementedError()
95 95
96 96 def putcommit(self, files, parents, commit):
97 97 """Create a revision with all changed files listed in 'files'
98 98 and having listed parents. 'commit' is a commit object containing
99 99 at a minimum the author, date, and message for this changeset.
100 100 Called after putfile() and delfile() calls. Note that the sink
101 101 repository is not told to update itself to a particular revision
102 102 (or even what that revision would be) before it receives the
103 103 file data."""
104 104 raise NotImplementedError()
105 105
106 106 def puttags(self, tags):
107 107 """Put tags into sink.
108 108 tags: {tagname: sink_rev_id, ...}"""
109 109 raise NotImplementedError()
110 110
111 111
112 112 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
113 113 class convert_cvs(converter_source):
114 114 def __init__(self, ui, path):
115 115 self.path = path
116 116 self.ui = ui
117 117 cvs = os.path.join(path, "CVS")
118 118 if not os.path.exists(cvs):
119 119 raise NoRepo("couldn't open CVS repo %s" % path)
120 120
121 121 self.changeset = {}
122 122 self.files = {}
123 123 self.tags = {}
124 124 self.lastbranch = {}
125 125 self.parent = {}
126 126 self.socket = None
127 127 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
128 128 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
129 129 self.encoding = locale.getpreferredencoding()
130 130 self._parse()
131 131 self._connect()
132 132
133 133 def _parse(self):
134 134 if self.changeset:
135 135 return
136 136
137 137 d = os.getcwd()
138 138 try:
139 139 os.chdir(self.path)
140 140 id = None
141 141 state = 0
142 142 for l in os.popen("cvsps -A -u --cvs-direct -q"):
143 143 if state == 0: # header
144 144 if l.startswith("PatchSet"):
145 145 id = l[9:-2]
146 146 elif l.startswith("Date"):
147 147 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
148 148 date = util.datestr(date)
149 149 elif l.startswith("Branch"):
150 150 branch = l[8:-1]
151 151 self.parent[id] = self.lastbranch.get(branch,'bad')
152 152 self.lastbranch[branch] = id
153 153 elif l.startswith("Ancestor branch"):
154 154 ancestor = l[17:-1]
155 155 self.parent[id] = self.lastbranch[ancestor]
156 156 elif l.startswith("Author"):
157 157 author = self.recode(l[8:-1])
158 158 elif l.startswith("Tag: "):
159 159 t = l[5:-1].rstrip()
160 160 if t != "(none)":
161 161 self.tags[t] = id
162 162 elif l.startswith("Log:"):
163 163 state = 1
164 164 log = ""
165 165 elif state == 1: # log
166 166 if l == "Members: \n":
167 167 files = {}
168 168 log = self.recode(log[:-1])
169 169 if log.isspace():
170 170 log = "*** empty log message ***\n"
171 171 state = 2
172 172 else:
173 173 log += l
174 174 elif state == 2:
175 175 if l == "\n": #
176 176 state = 0
177 177 p = [self.parent[id]]
178 178 if id == "1":
179 179 p = []
180 180 c = commit(author=author, date=date, parents=p,
181 181 desc=log, branch=branch)
182 182 self.changeset[id] = c
183 183 self.files[id] = files
184 184 else:
185 185 colon = l.rfind(':')
186 186 file = l[1:colon]
187 187 rev = l[colon+1:-2]
188 188 rev = rev.split("->")[1]
189 189 files[file] = rev
190 190
191 191 self.heads = self.lastbranch.values()
192 192 finally:
193 193 os.chdir(d)
194 194
195 195 def _connect(self):
196 196 root = self.cvsroot
197 197 conntype = None
198 198 user, host = None, None
199 199 cmd = ['cvs', 'server']
200 200
201 201 self.ui.status("connecting to %s\n" % root)
202 202
203 203 if root.startswith(":pserver:"):
204 204 root = root[9:]
205 205 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', root)
206 206 if m:
207 207 conntype = "pserver"
208 208 user, passw, serv, port, root = m.groups()
209 209 if not user:
210 210 user = "anonymous"
211 211 rr = ":pserver:" + user + "@" + serv + ":" + root
212 212 if port:
213 213 rr2, port = "-", int(port)
214 214 else:
215 215 rr2, port = rr, 2401
216 216 rr += str(port)
217 217
218 218 if not passw:
219 219 passw = "A"
220 220 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
221 221 for l in pf:
222 222 # :pserver:cvs@mea.tmt.tele.fi:/cvsroot/zmailer Ah<Z
223 223 m = re.match(r'(/\d+\s+/)?(.*)', l)
224 224 l = m.group(2)
225 225 w, p = l.split(' ', 1)
226 226 if w in [rr, rr2]:
227 227 passw = p
228 228 break
229 229 pf.close()
230 230
231 231 sck = socket.socket()
232 232 sck.connect((serv, port))
233 233 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw, "END AUTH REQUEST", ""]))
234 234 if sck.recv(128) != "I LOVE YOU\n":
235 235 raise NoRepo("CVS pserver authentication failed")
236 236
237 237 self.writep = self.readp = sck.makefile('r+')
238 238
239 239 if not conntype and root.startswith(":local:"):
240 240 conntype = "local"
241 241 root = root[7:]
242 242
243 243 if not conntype:
244 244 # :ext:user@host/home/user/path/to/cvsroot
245 245 if root.startswith(":ext:"):
246 246 root = root[5:]
247 247 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
248 248 if not m:
249 249 conntype = "local"
250 250 else:
251 251 conntype = "rsh"
252 252 user, host, root = m.group(1), m.group(2), m.group(3)
253 253
254 254 if conntype != "pserver":
255 if conntype == "rsh":
255 if conntype == "rsh":
256 256 rsh = os.environ.get("CVS_RSH" or "rsh")
257 257 if user:
258 258 cmd = [rsh, '-l', user, host] + cmd
259 259 else:
260 260 cmd = [rsh, host] + cmd
261 261
262 262 self.writep, self.readp = os.popen2(cmd)
263 263
264 264 self.realroot = root
265 265
266 266 self.writep.write("Root %s\n" % root)
267 267 self.writep.write("Valid-responses ok error Valid-requests Mode"
268 268 " M Mbinary E Checked-in Created Updated"
269 269 " Merged Removed\n")
270 270 self.writep.write("valid-requests\n")
271 271 self.writep.flush()
272 272 r = self.readp.readline()
273 273 if not r.startswith("Valid-requests"):
274 274 raise util.Abort("server sucks\n")
275 275 if "UseUnchanged" in r:
276 276 self.writep.write("UseUnchanged\n")
277 277 self.writep.flush()
278 278 r = self.readp.readline()
279 279
280 280 def getheads(self):
281 281 return self.heads
282 282
283 283 def _getfile(self, name, rev):
284 284 if rev.endswith("(DEAD)"):
285 285 raise IOError
286 286
287 287 args = ("-N -P -kk -r %s --" % rev).split()
288 288 args.append(os.path.join(self.cvsrepo, name))
289 289 for x in args:
290 290 self.writep.write("Argument %s\n" % x)
291 291 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
292 292 self.writep.flush()
293 293
294 294 data = ""
295 295 while 1:
296 296 line = self.readp.readline()
297 297 if line.startswith("Created ") or line.startswith("Updated "):
298 298 self.readp.readline() # path
299 299 self.readp.readline() # entries
300 300 mode = self.readp.readline()[:-1]
301 301 count = int(self.readp.readline()[:-1])
302 302 data = self.readp.read(count)
303 303 elif line.startswith(" "):
304 304 data += line[1:]
305 305 elif line.startswith("M "):
306 306 pass
307 307 elif line.startswith("Mbinary "):
308 308 count = int(self.readp.readline()[:-1])
309 309 data = self.readp.read(count)
310 310 else:
311 311 if line == "ok\n":
312 312 return (data, "x" in mode and "x" or "")
313 313 elif line.startswith("E "):
314 314 self.ui.warn("cvs server: %s\n" % line[2:])
315 315 elif line.startswith("Remove"):
316 316 l = self.readp.readline()
317 317 l = self.readp.readline()
318 318 if l != "ok\n":
319 319 raise util.Abort("unknown CVS response: %s\n" % l)
320 320 else:
321 321 raise util.Abort("unknown CVS response: %s\n" % line)
322 322
323 323 def getfile(self, file, rev):
324 324 data, mode = self._getfile(file, rev)
325 325 self.modecache[(file, rev)] = mode
326 326 return data
327 327
328 328 def getmode(self, file, rev):
329 329 return self.modecache[(file, rev)]
330 330
331 331 def getchanges(self, rev):
332 332 self.modecache = {}
333 333 files = self.files[rev]
334 334 cl = files.items()
335 335 cl.sort()
336 336 return cl
337 337
338 338 def recode(self, text):
339 339 return text.decode(self.encoding, "replace").encode("utf-8")
340 340
341 341 def getcommit(self, rev):
342 342 return self.changeset[rev]
343 343
344 344 def gettags(self):
345 345 return self.tags
346 346
347 347 class convert_git(converter_source):
348 348 def __init__(self, ui, path):
349 349 if os.path.isdir(path + "/.git"):
350 350 path += "/.git"
351 351 self.path = path
352 352 self.ui = ui
353 353 if not os.path.exists(path + "/objects"):
354 354 raise NoRepo("couldn't open GIT repo %s" % path)
355 355
356 356 def getheads(self):
357 357 fh = os.popen("GIT_DIR=%s git-rev-parse --verify HEAD" % self.path)
358 358 return [fh.read()[:-1]]
359 359
360 360 def catfile(self, rev, type):
361 361 if rev == "0" * 40: raise IOError()
362 362 fh = os.popen("GIT_DIR=%s git-cat-file %s %s 2>/dev/null" % (self.path, type, rev))
363 363 return fh.read()
364 364
365 365 def getfile(self, name, rev):
366 366 return self.catfile(rev, "blob")
367 367
368 368 def getmode(self, name, rev):
369 369 return self.modecache[(name, rev)]
370 370
371 371 def getchanges(self, version):
372 372 self.modecache = {}
373 373 fh = os.popen("GIT_DIR=%s git-diff-tree --root -m -r %s" % (self.path, version))
374 374 changes = []
375 375 for l in fh:
376 376 if "\t" not in l: continue
377 377 m, f = l[:-1].split("\t")
378 378 m = m.split()
379 379 h = m[3]
380 380 p = (m[1] == "100755")
381 381 s = (m[1] == "120000")
382 382 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
383 383 changes.append((f, h))
384 384 return changes
385 385
386 386 def getcommit(self, version):
387 387 c = self.catfile(version, "commit") # read the commit hash
388 388 end = c.find("\n\n")
389 389 message = c[end+2:]
390 390 message = recode(message)
391 391 l = c[:end].splitlines()
392 392 manifest = l[0].split()[1]
393 393 parents = []
394 394 for e in l[1:]:
395 395 n,v = e.split(" ", 1)
396 396 if n == "author":
397 397 p = v.split()
398 398 tm, tz = p[-2:]
399 399 author = " ".join(p[:-2])
400 400 if author[0] == "<": author = author[1:-1]
401 401 author = recode(author)
402 402 if n == "committer":
403 403 p = v.split()
404 404 tm, tz = p[-2:]
405 405 committer = " ".join(p[:-2])
406 406 if committer[0] == "<": committer = committer[1:-1]
407 407 committer = recode(committer)
408 408 message += "\ncommitter: %s\n" % committer
409 409 if n == "parent": parents.append(v)
410 410
411 411 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
412 412 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
413 413 date = tm + " " + str(tz)
414 414
415 415 c = commit(parents=parents, date=date, author=author, desc=message)
416 416 return c
417 417
418 418 def gettags(self):
419 419 tags = {}
420 420 fh = os.popen('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
421 421 prefix = 'refs/tags/'
422 422 for line in fh:
423 423 line = line.strip()
424 424 if not line.endswith("^{}"):
425 425 continue
426 426 node, tag = line.split(None, 1)
427 427 if not tag.startswith(prefix):
428 428 continue
429 429 tag = tag[len(prefix):-3]
430 430 tags[tag] = node
431 431
432 432 return tags
433 433
434 434 class convert_mercurial(converter_sink):
435 435 def __init__(self, ui, path):
436 436 self.path = path
437 437 self.ui = ui
438 438 try:
439 439 self.repo = hg.repository(self.ui, path)
440 440 except:
441 441 raise NoRepo("could open hg repo %s" % path)
442 442
443 443 def mapfile(self):
444 444 return os.path.join(self.path, ".hg", "shamap")
445 445
446 446 def getheads(self):
447 447 h = self.repo.changelog.heads()
448 448 return [ hg.hex(x) for x in h ]
449 449
450 450 def putfile(self, f, e, data):
451 451 self.repo.wwrite(f, data, e)
452 452 if self.repo.dirstate.state(f) == '?':
453 453 self.repo.dirstate.update([f], "a")
454 454
455 455 def delfile(self, f):
456 456 try:
457 457 os.unlink(self.repo.wjoin(f))
458 458 #self.repo.remove([f])
459 459 except:
460 460 pass
461 461
462 462 def putcommit(self, files, parents, commit):
463 463 seen = {}
464 464 pl = []
465 465 for p in parents:
466 466 if p not in seen:
467 467 pl.append(p)
468 468 seen[p] = 1
469 469 parents = pl
470 470
471 471 if len(parents) < 2: parents.append("0" * 40)
472 472 if len(parents) < 2: parents.append("0" * 40)
473 473 p2 = parents.pop(0)
474 474
475 475 text = commit.desc
476 476 extra = {}
477 477 try:
478 478 extra["branch"] = commit.branch
479 479 except AttributeError:
480 480 pass
481 481
482 482 while parents:
483 483 p1 = p2
484 484 p2 = parents.pop(0)
485 485 a = self.repo.rawcommit(files, text, commit.author, commit.date,
486 486 hg.bin(p1), hg.bin(p2), extra=extra)
487 487 text = "(octopus merge fixup)\n"
488 488 p2 = hg.hex(self.repo.changelog.tip())
489 489
490 490 return p2
491 491
492 492 def puttags(self, tags):
493 493 try:
494 494 old = self.repo.wfile(".hgtags").read()
495 495 oldlines = old.splitlines(1)
496 496 oldlines.sort()
497 497 except:
498 498 oldlines = []
499 499
500 500 k = tags.keys()
501 501 k.sort()
502 502 newlines = []
503 503 for tag in k:
504 504 newlines.append("%s %s\n" % (tags[tag], tag))
505 505
506 506 newlines.sort()
507 507
508 508 if newlines != oldlines:
509 509 self.ui.status("updating tags\n")
510 510 f = self.repo.wfile(".hgtags", "w")
511 511 f.write("".join(newlines))
512 512 f.close()
513 513 if not oldlines: self.repo.add([".hgtags"])
514 514 date = "%s 0" % int(time.mktime(time.gmtime()))
515 515 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
516 516 date, self.repo.changelog.tip(), hg.nullid)
517 517 return hg.hex(self.repo.changelog.tip())
518 518
519 519 converters = [convert_cvs, convert_git, convert_mercurial]
520 520
521 521 def converter(ui, path):
522 522 if not os.path.isdir(path):
523 523 raise util.Abort("%s: not a directory\n" % path)
524 524 for c in converters:
525 525 try:
526 526 return c(ui, path)
527 527 except NoRepo:
528 528 pass
529 529 raise util.Abort("%s: unknown repository type\n" % path)
530 530
531 531 class convert(object):
532 532 def __init__(self, ui, source, dest, mapfile, opts):
533 533
534 534 self.source = source
535 535 self.dest = dest
536 536 self.ui = ui
537 537 self.mapfile = mapfile
538 538 self.opts = opts
539 539 self.commitcache = {}
540 540
541 541 self.map = {}
542 542 try:
543 543 for l in file(self.mapfile):
544 544 sv, dv = l[:-1].split()
545 545 self.map[sv] = dv
546 546 except IOError:
547 547 pass
548 548
549 549 def walktree(self, heads):
550 550 visit = heads
551 551 known = {}
552 552 parents = {}
553 553 while visit:
554 554 n = visit.pop(0)
555 555 if n in known or n in self.map: continue
556 556 known[n] = 1
557 557 self.commitcache[n] = self.source.getcommit(n)
558 558 cp = self.commitcache[n].parents
559 559 for p in cp:
560 560 parents.setdefault(n, []).append(p)
561 561 visit.append(p)
562 562
563 563 return parents
564 564
565 565 def toposort(self, parents):
566 566 visit = parents.keys()
567 567 seen = {}
568 568 children = {}
569 569
570 570 while visit:
571 571 n = visit.pop(0)
572 572 if n in seen: continue
573 573 seen[n] = 1
574 574 pc = 0
575 575 if n in parents:
576 576 for p in parents[n]:
577 577 if p not in self.map: pc += 1
578 578 visit.append(p)
579 579 children.setdefault(p, []).append(n)
580 580 if not pc: root = n
581 581
582 582 s = []
583 583 removed = {}
584 584 visit = children.keys()
585 585 while visit:
586 586 n = visit.pop(0)
587 587 if n in removed: continue
588 588 dep = 0
589 589 if n in parents:
590 590 for p in parents[n]:
591 591 if p in self.map: continue
592 592 if p not in removed:
593 593 # we're still dependent
594 594 visit.append(n)
595 595 dep = 1
596 596 break
597 597
598 598 if not dep:
599 599 # all n's parents are in the list
600 600 removed[n] = 1
601 601 if n not in self.map:
602 602 s.append(n)
603 603 if n in children:
604 604 for c in children[n]:
605 605 visit.insert(0, c)
606 606
607 607 if self.opts.get('datesort'):
608 608 depth = {}
609 609 for n in s:
610 610 depth[n] = 0
611 611 pl = [p for p in self.commitcache[n].parents if p not in self.map]
612 612 if pl:
613 613 depth[n] = max([depth[p] for p in pl]) + 1
614 614
615 615 s = [(depth[n], self.commitcache[n].date, n) for n in s]
616 616 s.sort()
617 617 s = [e[2] for e in s]
618 618
619 619 return s
620 620
621 621 def copy(self, rev):
622 622 c = self.commitcache[rev]
623 623 files = self.source.getchanges(rev)
624 624
625 625 for f,v in files:
626 626 try:
627 627 data = self.source.getfile(f, v)
628 628 except IOError, inst:
629 629 self.dest.delfile(f)
630 630 else:
631 631 e = self.source.getmode(f, v)
632 632 self.dest.putfile(f, e, data)
633 633
634 634 r = [self.map[v] for v in c.parents]
635 635 f = [f for f,v in files]
636 636 self.map[rev] = self.dest.putcommit(f, r, c)
637 637 file(self.mapfile, "a").write("%s %s\n" % (rev, self.map[rev]))
638 638
639 639 def convert(self):
640 640 self.ui.status("scanning source...\n")
641 641 heads = self.source.getheads()
642 642 parents = self.walktree(heads)
643 643 self.ui.status("sorting...\n")
644 644 t = self.toposort(parents)
645 645 num = len(t)
646 646 c = None
647 647
648 648 self.ui.status("converting...\n")
649 649 for c in t:
650 650 num -= 1
651 651 desc = self.commitcache[c].desc
652 652 if "\n" in desc:
653 653 desc = desc.splitlines()[0]
654 654 self.ui.status("%d %s\n" % (num, desc))
655 655 self.copy(c)
656 656
657 657 tags = self.source.gettags()
658 658 ctags = {}
659 659 for k in tags:
660 660 v = tags[k]
661 661 if v in self.map:
662 662 ctags[k] = self.map[v]
663 663
664 664 if c and ctags:
665 665 nrev = self.dest.puttags(ctags)
666 666 # write another hash correspondence to override the previous
667 667 # one so we don't end up with extra tag heads
668 668 if nrev:
669 669 file(self.mapfile, "a").write("%s %s\n" % (c, nrev))
670 670
671 671 def _convert(ui, src, dest=None, mapfile=None, **opts):
672 672 '''Convert a foreign SCM repository to a Mercurial one.
673 673
674 674 Accepted source formats:
675 675 - GIT
676 676 - CVS
677 677
678 678 Accepted destination formats:
679 679 - Mercurial
680 680
681 681 If destination isn't given, a new Mercurial repo named <src>-hg will
682 682 be created. If <mapfile> isn't given, it will be put in a default
683 683 location (<dest>/.hg/shamap by default)
684 684
685 685 The <mapfile> is a simple text file that maps each source commit ID to
686 686 the destination ID for that revision, like so:
687 687
688 688 <source ID> <destination ID>
689 689
690 690 If the file doesn't exist, it's automatically created. It's updated
691 691 on each commit copied, so convert-repo can be interrupted and can
692 692 be run repeatedly to copy new commits.
693 693 '''
694 694
695 695 srcc = converter(ui, src)
696 696 if not hasattr(srcc, "getcommit"):
697 697 raise util.Abort("%s: can't read from this repo type\n" % src)
698 698
699 699 if not dest:
700 700 dest = src + "-hg"
701 701 ui.status("assuming destination %s\n" % dest)
702 702 if not os.path.isdir(dest):
703 703 ui.status("creating repository %s\n" % dest)
704 704 os.system("hg init " + dest)
705 705 destc = converter(ui, dest)
706 706 if not hasattr(destc, "putcommit"):
707 707 raise util.Abort("%s: can't write to this repo type\n" % src)
708 708
709 709 if not mapfile:
710 710 try:
711 711 mapfile = destc.mapfile()
712 712 except:
713 713 mapfile = os.path.join(destc, "map")
714 714
715 715 c = convert(ui, srcc, destc, mapfile, opts)
716 716 c.convert()
717 717
718 718 cmdtable = {
719 719 "convert": (_convert,
720 720 [('', 'datesort', None, 'try to sort changesets by date')],
721 721 'hg convert [OPTIONS] <src> [dst [map]]'),
722 722 }
@@ -1,266 +1,266 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys
9 9 from mercurial.cmdutil import revrange, show_changeset
10 10 from mercurial.i18n import _
11 11 from mercurial.node import nullid, nullrev
12 12 from mercurial.util import Abort
13 13
14 14 def revision_grapher(repo, start_rev, stop_rev):
15 15 """incremental revision grapher
16 16
17 17 This generator function walks through the revision history from
18 18 revision start_rev to revision stop_rev (which must be less than
19 19 or equal to start_rev) and for each revision emits tuples with the
20 20 following elements:
21 21
22 22 - Current revision.
23 23 - Current node.
24 24 - Column of the current node in the set of ongoing edges.
25 25 - Edges; a list of (col, next_col) indicating the edges between
26 26 the current node and its parents.
27 27 - Number of columns (ongoing edges) in the current revision.
28 28 - The difference between the number of columns (ongoing edges)
29 29 in the next revision and the number of columns (ongoing edges)
30 30 in the current revision. That is: -1 means one column removed;
31 31 0 means no columns added or removed; 1 means one column added.
32 32 """
33 33
34 34 assert start_rev >= stop_rev
35 35 curr_rev = start_rev
36 36 revs = []
37 37 while curr_rev >= stop_rev:
38 38 node = repo.changelog.node(curr_rev)
39 39
40 40 # Compute revs and next_revs.
41 41 if curr_rev not in revs:
42 42 # New head.
43 43 revs.append(curr_rev)
44 44 rev_index = revs.index(curr_rev)
45 45 next_revs = revs[:]
46 46
47 47 # Add parents to next_revs.
48 48 parents = get_rev_parents(repo, curr_rev)
49 49 parents_to_add = []
50 50 for parent in parents:
51 51 if parent not in next_revs:
52 52 parents_to_add.append(parent)
53 53 parents_to_add.sort()
54 54 next_revs[rev_index:rev_index + 1] = parents_to_add
55 55
56 56 edges = []
57 57 for parent in parents:
58 58 edges.append((rev_index, next_revs.index(parent)))
59 59
60 60 n_columns_diff = len(next_revs) - len(revs)
61 61 yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
62 62
63 63 revs = next_revs
64 64 curr_rev -= 1
65 65
66 66 def get_rev_parents(repo, rev):
67 67 return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
68 68
69 69 def fix_long_right_edges(edges):
70 70 for (i, (start, end)) in enumerate(edges):
71 71 if end > start:
72 72 edges[i] = (start, end + 1)
73 73
74 74 def draw_edges(edges, nodeline, interline):
75 75 for (start, end) in edges:
76 76 if start == end + 1:
77 77 interline[2 * end + 1] = "/"
78 78 elif start == end - 1:
79 79 interline[2 * start + 1] = "\\"
80 80 elif start == end:
81 81 interline[2 * start] = "|"
82 82 else:
83 83 nodeline[2 * end] = "+"
84 84 if start > end:
85 85 (start, end) = (end,start)
86 86 for i in range(2 * start + 1, 2 * end):
87 87 if nodeline[i] != "+":
88 88 nodeline[i] = "-"
89 89
90 90 def format_line(line, level, logstr):
91 91 text = "%-*s %s" % (2 * level, "".join(line), logstr)
92 92 return "%s\n" % text.rstrip()
93 93
94 94 def get_nodeline_edges_tail(
95 95 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
96 96 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
97 97 # Still going in the same non-vertical direction.
98 98 if n_columns_diff == -1:
99 99 start = max(node_index + 1, p_node_index)
100 100 tail = ["|", " "] * (start - node_index - 1)
101 101 tail.extend(["/", " "] * (n_columns - start))
102 102 return tail
103 103 else:
104 104 return ["\\", " "] * (n_columns - node_index - 1)
105 105 else:
106 106 return ["|", " "] * (n_columns - node_index - 1)
107 107
108 108 def get_padding_line(ni, n_columns, edges):
109 109 line = []
110 110 line.extend(["|", " "] * ni)
111 111 if (ni, ni - 1) in edges or (ni, ni) in edges:
112 112 # (ni, ni - 1) (ni, ni)
113 113 # | | | | | | | |
114 114 # +---o | | o---+
115 115 # | | c | | c | |
116 116 # | |/ / | |/ /
117 117 # | | | | | |
118 118 c = "|"
119 119 else:
120 120 c = " "
121 121 line.extend([c, " "])
122 122 line.extend(["|", " "] * (n_columns - ni - 1))
123 123 return line
124 124
125 125 def get_limit(limit_opt):
126 126 if limit_opt:
127 127 try:
128 128 limit = int(limit_opt)
129 129 except ValueError:
130 130 raise Abort(_("limit must be a positive integer"))
131 131 if limit <= 0:
132 132 raise Abort(_("limit must be positive"))
133 133 else:
134 134 limit = sys.maxint
135 135 return limit
136 136
137 137 def get_revs(repo, rev_opt):
138 138 if rev_opt:
139 139 revs = revrange(repo, rev_opt)
140 140 return (max(revs), min(revs))
141 141 else:
142 142 return (repo.changelog.count() - 1, 0)
143 143
144 144 def graphlog(ui, repo, *args, **opts):
145 145 """show revision history alongside an ASCII revision graph
146 146
147 147 Print a revision history alongside a revision graph drawn with
148 148 ASCII characters.
149 149
150 150 Nodes printed as a . character are parents of the working
151 151 directory.
152 152 """
153 153
154 154 limit = get_limit(opts["limit"])
155 155 (start_rev, stop_rev) = get_revs(repo, opts["rev"])
156 156 stop_rev = max(stop_rev, start_rev - limit + 1)
157 157 if start_rev == nullrev:
158 158 return
159 159 cs_printer = show_changeset(ui, repo, opts)
160 160 grapher = revision_grapher(repo, start_rev, stop_rev)
161 161 repo_parents = repo.dirstate.parents()
162 162 prev_n_columns_diff = 0
163 163 prev_node_index = 0
164 164
165 165 for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
166 166 # log_strings is the list of all log strings to draw alongside
167 167 # the graph.
168 168 ui.pushbuffer()
169 169 cs_printer.show(rev, node)
170 170 log_strings = ui.popbuffer().split("\n")[:-1]
171 171
172 172 if n_columns_diff == -1:
173 173 # Transform
174 174 #
175 175 # | | | | | |
176 176 # o | | into o---+
177 177 # |X / |/ /
178 178 # | | | |
179 179 fix_long_right_edges(edges)
180 180
181 181 # add_padding_line says whether to rewrite
182 182 #
183 183 # | | | | | | | |
184 184 # | o---+ into | o---+
185 185 # | / / | | | # <--- padding line
186 186 # o | | | / /
187 187 # o | |
188 188 add_padding_line = \
189 189 len(log_strings) > 2 and \
190 190 n_columns_diff == -1 and \
191 191 [x for (x, y) in edges if x + 1 < y]
192 192
193 193 # fix_nodeline_tail says whether to rewrite
194 194 #
195 195 # | | o | | | | o | |
196 196 # | | |/ / | | |/ /
197 197 # | o | | into | o / / # <--- fixed nodeline tail
198 198 # | |/ / | |/ /
199 199 # o | | o | |
200 200 fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
201 201
202 202 # nodeline is the line containing the node character (. or o).
203 203 nodeline = ["|", " "] * node_index
204 204 if node in repo_parents:
205 205 node_ch = "."
206 206 else:
207 207 node_ch = "o"
208 208 nodeline.extend([node_ch, " "])
209 209
210 210 nodeline.extend(
211 211 get_nodeline_edges_tail(
212 212 node_index, prev_node_index, n_columns, n_columns_diff,
213 213 prev_n_columns_diff, fix_nodeline_tail))
214 214
215 215 # shift_interline is the line containing the non-vertical
216 216 # edges between this entry and the next.
217 217 shift_interline = ["|", " "] * node_index
218 218 if n_columns_diff == -1:
219 219 n_spaces = 1
220 220 edge_ch = "/"
221 221 elif n_columns_diff == 0:
222 222 n_spaces = 2
223 223 edge_ch = "|"
224 224 else:
225 225 n_spaces = 3
226 226 edge_ch = "\\"
227 227 shift_interline.extend(n_spaces * [" "])
228 228 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
229 229
230 230 # Draw edges from the current node to its parents.
231 231 draw_edges(edges, nodeline, shift_interline)
232 232
233 233 # lines is the list of all graph lines to print.
234 234 lines = [nodeline]
235 235 if add_padding_line:
236 236 lines.append(get_padding_line(node_index, n_columns, edges))
237 237 lines.append(shift_interline)
238 238
239 239 # Make sure that there are as many graph lines as there are
240 240 # log strings.
241 241 while len(log_strings) < len(lines):
242 242 log_strings.append("")
243 243 if len(lines) < len(log_strings):
244 244 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
245 245 while len(lines) < len(log_strings):
246 246 lines.append(extra_interline)
247 247
248 248 # Print lines.
249 249 indentation_level = max(n_columns, n_columns + n_columns_diff)
250 250 for (line, logstr) in zip(lines, log_strings):
251 251 ui.write(format_line(line, indentation_level, logstr))
252 252
253 253 # ...and start over.
254 254 prev_node_index = node_index
255 255 prev_n_columns_diff = n_columns_diff
256 256
257 257 cmdtable = {
258 258 "glog":
259 259 (graphlog,
260 260 [("l", "limit", "", _("limit number of changes displayed")),
261 261 ("p", "patch", False, _("show patch")),
262 262 ("r", "rev", [], _("show the specified revision or range")),
263 263 ("", "style", "", _("display using template map file")),
264 264 ("", "template", "", _("display with template"))],
265 265 "hg glog [OPTIONS]"),
266 266 }
@@ -1,164 +1,164 b''
1 1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 2 #
3 3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 4 # that removes files not known to mercurial
5 5 #
6 6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 7 # (http://www.red-bean.com/cvsutils/).
8 8 #
9 9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 10 # [extensions]
11 11 # hgext.purge =
12 12 #
13 13 # For help on the usage of "hg purge" use:
14 14 # hg help purge
15 15 #
16 16 # This program is free software; you can redistribute it and/or modify
17 17 # it under the terms of the GNU General Public License as published by
18 18 # the Free Software Foundation; either version 2 of the License, or
19 19 # (at your option) any later version.
20 20 #
21 21 # This program is distributed in the hope that it will be useful,
22 22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 24 # GNU General Public License for more details.
25 25 #
26 26 # You should have received a copy of the GNU General Public License
27 27 # along with this program; if not, write to the Free Software
28 28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29 29
30 30 from mercurial import hg, util
31 31 from mercurial.i18n import _
32 32 import os
33 33
34 34 def dopurge(ui, repo, dirs=None, act=True, abort_on_err=False, eol='\n',
35 35 force=False, include=None, exclude=None):
36 36 def error(msg):
37 37 if abort_on_err:
38 38 raise util.Abort(msg)
39 39 else:
40 40 ui.warn(_('warning: %s\n') % msg)
41 41
42 42 def remove(remove_func, name):
43 43 if act:
44 44 try:
45 45 remove_func(os.path.join(repo.root, name))
46 46 except OSError, e:
47 47 error(_('%s cannot be removed') % name)
48 48 else:
49 49 ui.write('%s%s' % (name, eol))
50 50
51 51 directories = []
52 52 files = []
53 53 missing = []
54 54 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
55 55 include, exclude)
56 56 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
57 57 ignored=True, directories=True):
58 58 if src == 'd':
59 59 directories.append(f)
60 60 elif src == 'm':
61 61 missing.append(f)
62 62 elif src == 'f' and f not in repo.dirstate:
63 63 files.append(f)
64 64
65 65 _check_missing(ui, repo, missing, force)
66 66
67 67 directories.sort()
68 68
69 69 for f in files:
70 70 if f not in repo.dirstate:
71 71 ui.note(_('Removing file %s\n') % f)
72 72 remove(os.remove, f)
73 73
74 74 for f in directories[::-1]:
75 75 if match(f) and not os.listdir(repo.wjoin(f)):
76 76 ui.note(_('Removing directory %s\n') % f)
77 77 remove(os.rmdir, f)
78 78
79 79 def _check_missing(ui, repo, missing, force=False):
80 80 """Abort if there is the chance of having problems with name-mangling fs
81 81
82 82 In a name mangling filesystem (e.g. a case insensitive one)
83 83 dirstate.walk() can yield filenames different from the ones
84 84 stored in the dirstate. This already confuses the status and
85 85 add commands, but with purge this may cause data loss.
86
86
87 87 To prevent this, _check_missing will abort if there are missing
88 files. The force option will let the user skip the check if he
88 files. The force option will let the user skip the check if he
89 89 knows it is safe.
90
91 Even with the force option this function will check if any of the
90
91 Even with the force option this function will check if any of the
92 92 missing files is still available in the working dir: if so there
93 93 may be some problem with the underlying filesystem, so it
94 94 aborts unconditionally."""
95 95
96 96 found = [f for f in missing if util.lexists(repo.wjoin(f))]
97 97
98 98 if found:
99 99 if not ui.quiet:
100 100 ui.warn(_("The following tracked files weren't listed by the "
101 101 "filesystem, but could still be found:\n"))
102 102 for f in found:
103 103 ui.warn("%s\n" % f)
104 104 if util.checkfolding(repo.path):
105 105 ui.warn(_("This is probably due to a case-insensitive "
106 106 "filesystem\n"))
107 107 raise util.Abort(_("purging on name mangling filesystems is not "
108 108 "yet fully supported"))
109 109
110 110 if missing and not force:
111 111 raise util.Abort(_("there are missing files in the working dir and "
112 112 "purge still has problems with them due to name "
113 113 "mangling filesystems. "
114 114 "Use --force if you know what you are doing"))
115 115
116 116
117 117 def purge(ui, repo, *dirs, **opts):
118 118 '''removes files not tracked by mercurial
119 119
120 120 Delete files not known to mercurial, this is useful to test local and
121 121 uncommitted changes in the otherwise clean source tree.
122 122
123 123 This means that purge will delete:
124 124 - Unknown files: files marked with "?" by "hg status"
125 125 - Ignored files: files usually ignored by Mercurial because they match
126 126 a pattern in a ".hgignore" file
127 127 - Empty directories: in fact Mercurial ignores directories unless they
128 128 contain files under source control managment
129 129 But it will leave untouched:
130 130 - Unmodified tracked files
131 131 - Modified tracked files
132 132 - New files added to the repository (with "hg add")
133 133
134 134 If directories are given on the command line, only files in these
135 135 directories are considered.
136 136
137 137 Be careful with purge, you could irreversibly delete some files you
138 138 forgot to add to the repository. If you only want to print the list of
139 139 files that this program would delete use the --print option.
140 140 '''
141 141 act = not opts['print']
142 142 abort_on_err = bool(opts['abort_on_err'])
143 143 eol = opts['print0'] and '\0' or '\n'
144 144 if eol == '\0':
145 145 # --print0 implies --print
146 146 act = False
147 147 force = bool(opts['force'])
148 148 include = opts['include']
149 149 exclude = opts['exclude']
150 150 dopurge(ui, repo, dirs, act, abort_on_err, eol, force, include, exclude)
151 151
152 152
153 153 cmdtable = {
154 154 'purge':
155 155 (purge,
156 156 [('a', 'abort-on-err', None, _('abort if an error occurs')),
157 157 ('f', 'force', None, _('purge even when missing files are detected')),
158 158 ('p', 'print', None, _('print the file names instead of deleting them')),
159 159 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
160 160 ' (implies -p)')),
161 161 ('I', 'include', [], _('include names matching the given patterns')),
162 162 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
163 163 _('hg purge [OPTION]... [DIR]...'))
164 164 }
@@ -1,593 +1,593 b''
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 import os, tempfile
10 10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
11 11 from mercurial import patch, revlog, util
12 12
13 13 '''patch transplanting tool
14 14
15 15 This extension allows you to transplant patches from another branch.
16 16
17 17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 18 from a changeset hash to its hash in the source repository.
19 19 '''
20 20
21 21 class transplantentry:
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants:
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter:
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73 73
74 74 def applied(self, repo, node, parent):
75 75 '''returns True if a node is already an ancestor of parent
76 76 or has already been transplanted'''
77 77 if hasnode(repo, node):
78 78 if node in repo.changelog.reachable(parent, stop=node):
79 79 return True
80 80 for t in self.transplants.get(node):
81 81 # it might have been stripped
82 82 if not hasnode(repo, t.lnode):
83 83 self.transplants.remove(t)
84 84 return False
85 85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 86 return True
87 87 return False
88 88
89 89 def apply(self, repo, source, revmap, merges, opts={}):
90 90 '''apply the revisions in revmap one by one in revision order'''
91 91 revs = revmap.keys()
92 92 revs.sort()
93 93
94 94 p1, p2 = repo.dirstate.parents()
95 95 pulls = []
96 96 diffopts = patch.diffopts(self.ui, opts)
97 97 diffopts.git = True
98 98
99 99 lock = repo.lock()
100 100 wlock = repo.wlock()
101 101 try:
102 102 for rev in revs:
103 103 node = revmap[rev]
104 104 revstr = '%s:%s' % (rev, revlog.short(node))
105 105
106 106 if self.applied(repo, node, p1):
107 107 self.ui.warn(_('skipping already applied revision %s\n') %
108 108 revstr)
109 109 continue
110 110
111 111 parents = source.changelog.parents(node)
112 112 if not opts.get('filter'):
113 113 # If the changeset parent is the same as the wdir's parent,
114 114 # just pull it.
115 115 if parents[0] == p1:
116 116 pulls.append(node)
117 117 p1 = node
118 118 continue
119 119 if pulls:
120 120 if source != repo:
121 121 repo.pull(source, heads=pulls, lock=lock)
122 122 merge.update(repo, pulls[-1], False, False, None,
123 123 wlock=wlock)
124 124 p1, p2 = repo.dirstate.parents()
125 125 pulls = []
126 126
127 127 domerge = False
128 128 if node in merges:
129 129 # pulling all the merge revs at once would mean we couldn't
130 130 # transplant after the latest even if transplants before them
131 131 # fail.
132 132 domerge = True
133 133 if not hasnode(repo, node):
134 134 repo.pull(source, heads=[node], lock=lock)
135 135
136 136 if parents[1] != revlog.nullid:
137 137 self.ui.note(_('skipping merge changeset %s:%s\n')
138 138 % (rev, revlog.short(node)))
139 139 patchfile = None
140 140 else:
141 141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 142 fp = os.fdopen(fd, 'w')
143 143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 144 fp.close()
145 145
146 146 del revmap[rev]
147 147 if patchfile or domerge:
148 148 try:
149 149 n = self.applyone(repo, node, source.changelog.read(node),
150 150 patchfile, merge=domerge,
151 151 log=opts.get('log'),
152 152 filter=opts.get('filter'),
153 153 lock=lock, wlock=wlock)
154 154 if n and domerge:
155 155 self.ui.status(_('%s merged at %s\n') % (revstr,
156 156 revlog.short(n)))
157 157 elif n:
158 158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 159 revlog.short(n)))
160 160 finally:
161 161 if patchfile:
162 162 os.unlink(patchfile)
163 163 if pulls:
164 164 repo.pull(source, heads=pulls, lock=lock)
165 165 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
166 166 finally:
167 167 self.saveseries(revmap, merges)
168 168 self.transplants.write()
169 169
170 170 def filter(self, filter, changelog, patchfile):
171 171 '''arbitrarily rewrite changeset before applying it'''
172 172
173 173 self.ui.status('filtering %s\n' % patchfile)
174 174 user, date, msg = (changelog[1], changelog[2], changelog[4])
175 175
176 176 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
177 177 fp = os.fdopen(fd, 'w')
178 178 fp.write("# HG changeset patch\n")
179 179 fp.write("# User %s\n" % user)
180 180 fp.write("# Date %d %d\n" % date)
181 181 fp.write(changelog[4])
182 182 fp.close()
183 183
184 184 try:
185 185 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
186 186 util.shellquote(patchfile)),
187 187 environ={'HGUSER': changelog[1]},
188 188 onerr=util.Abort, errprefix=_('filter failed'))
189 189 user, date, msg = self.parselog(file(headerfile))[1:4]
190 190 finally:
191 191 os.unlink(headerfile)
192 192
193 193 return (user, date, msg)
194 194
195 195 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
196 196 filter=None, lock=None, wlock=None):
197 197 '''apply the patch in patchfile to the repository as a transplant'''
198 198 (manifest, user, (time, timezone), files, message) = cl[:5]
199 199 date = "%d %d" % (time, timezone)
200 200 extra = {'transplant_source': node}
201 201 if filter:
202 202 (user, date, message) = self.filter(filter, cl, patchfile)
203 203
204 204 if log:
205 205 message += '\n(transplanted from %s)' % revlog.hex(node)
206 206
207 207 self.ui.status(_('applying %s\n') % revlog.short(node))
208 208 self.ui.note('%s %s\n%s\n' % (user, date, message))
209 209
210 210 if not patchfile and not merge:
211 211 raise util.Abort(_('can only omit patchfile if merging'))
212 212 if patchfile:
213 213 try:
214 214 files = {}
215 215 try:
216 216 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
217 217 files=files)
218 218 if not files:
219 219 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
220 220 return None
221 221 finally:
222 222 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 223 except Exception, inst:
224 224 if filter:
225 225 os.unlink(patchfile)
226 226 seriespath = os.path.join(self.path, 'series')
227 227 if os.path.exists(seriespath):
228 228 os.unlink(seriespath)
229 229 p1 = repo.dirstate.parents()[0]
230 230 p2 = node
231 231 self.log(user, date, message, p1, p2, merge=merge)
232 232 self.ui.write(str(inst) + '\n')
233 233 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
234 234 else:
235 235 files = None
236 236 if merge:
237 237 p1, p2 = repo.dirstate.parents()
238 238 repo.dirstate.setparents(p1, node)
239 239
240 240 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
241 241 extra=extra)
242 242 if not merge:
243 243 self.transplants.set(n, node)
244 244
245 245 return n
246 246
247 247 def resume(self, repo, source, opts=None):
248 248 '''recover last transaction and apply remaining changesets'''
249 249 if os.path.exists(os.path.join(self.path, 'journal')):
250 250 n, node = self.recover(repo)
251 251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 252 revlog.short(n)))
253 253 seriespath = os.path.join(self.path, 'series')
254 254 if not os.path.exists(seriespath):
255 255 self.transplants.write()
256 256 return
257 257 nodes, merges = self.readseries()
258 258 revmap = {}
259 259 for n in nodes:
260 260 revmap[source.changelog.rev(n)] = n
261 261 os.unlink(seriespath)
262 262
263 263 self.apply(repo, source, revmap, merges, opts)
264 264
265 265 def recover(self, repo):
266 266 '''commit working directory using journal metadata'''
267 267 node, user, date, message, parents = self.readlog()
268 268 merge = len(parents) == 2
269 269
270 270 if not user or not date or not message or not parents[0]:
271 271 raise util.Abort(_('transplant log file is corrupt'))
272 272
273 273 extra = {'transplant_source': node}
274 274 wlock = repo.wlock()
275 275 p1, p2 = repo.dirstate.parents()
276 276 if p1 != parents[0]:
277 277 raise util.Abort(_('working dir not at transplant parent %s') %
278 278 revlog.hex(parents[0]))
279 279 if merge:
280 280 repo.dirstate.setparents(p1, parents[1])
281 281 n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
282 282 if not n:
283 283 raise util.Abort(_('commit failed'))
284 284 if not merge:
285 285 self.transplants.set(n, node)
286 286 self.unlog()
287 287
288 288 return n, node
289 289
290 290 def readseries(self):
291 291 nodes = []
292 292 merges = []
293 293 cur = nodes
294 294 for line in self.opener('series').read().splitlines():
295 295 if line.startswith('# Merges'):
296 296 cur = merges
297 297 continue
298 298 cur.append(revlog.bin(line))
299 299
300 300 return (nodes, merges)
301 301
302 302 def saveseries(self, revmap, merges):
303 303 if not revmap:
304 304 return
305 305
306 306 if not os.path.isdir(self.path):
307 307 os.mkdir(self.path)
308 308 series = self.opener('series', 'w')
309 309 revs = revmap.keys()
310 310 revs.sort()
311 311 for rev in revs:
312 312 series.write(revlog.hex(revmap[rev]) + '\n')
313 313 if merges:
314 314 series.write('# Merges\n')
315 315 for m in merges:
316 316 series.write(revlog.hex(m) + '\n')
317 317 series.close()
318 318
319 319 def parselog(self, fp):
320 320 parents = []
321 321 message = []
322 322 node = revlog.nullid
323 323 inmsg = False
324 324 for line in fp.read().splitlines():
325 325 if inmsg:
326 326 message.append(line)
327 327 elif line.startswith('# User '):
328 328 user = line[7:]
329 329 elif line.startswith('# Date '):
330 330 date = line[7:]
331 331 elif line.startswith('# Node ID '):
332 332 node = revlog.bin(line[10:])
333 333 elif line.startswith('# Parent '):
334 334 parents.append(revlog.bin(line[9:]))
335 335 elif not line.startswith('#'):
336 336 inmsg = True
337 337 message.append(line)
338 338 return (node, user, date, '\n'.join(message), parents)
339
339
340 340 def log(self, user, date, message, p1, p2, merge=False):
341 341 '''journal changelog metadata for later recover'''
342 342
343 343 if not os.path.isdir(self.path):
344 344 os.mkdir(self.path)
345 345 fp = self.opener('journal', 'w')
346 346 fp.write('# User %s\n' % user)
347 347 fp.write('# Date %s\n' % date)
348 348 fp.write('# Node ID %s\n' % revlog.hex(p2))
349 349 fp.write('# Parent ' + revlog.hex(p1) + '\n')
350 350 if merge:
351 351 fp.write('# Parent ' + revlog.hex(p2) + '\n')
352 352 fp.write(message.rstrip() + '\n')
353 353 fp.close()
354 354
355 355 def readlog(self):
356 356 return self.parselog(self.opener('journal'))
357 357
358 358 def unlog(self):
359 359 '''remove changelog journal'''
360 360 absdst = os.path.join(self.path, 'journal')
361 361 if os.path.exists(absdst):
362 362 os.unlink(absdst)
363 363
364 364 def transplantfilter(self, repo, source, root):
365 365 def matchfn(node):
366 366 if self.applied(repo, node, root):
367 367 return False
368 368 if source.changelog.parents(node)[1] != revlog.nullid:
369 369 return False
370 370 extra = source.changelog.read(node)[5]
371 371 cnode = extra.get('transplant_source')
372 372 if cnode and self.applied(repo, cnode, root):
373 373 return False
374 374 return True
375 375
376 376 return matchfn
377 377
378 378 def hasnode(repo, node):
379 379 try:
380 380 return repo.changelog.rev(node) != None
381 381 except revlog.RevlogError:
382 382 return False
383 383
384 384 def browserevs(ui, repo, nodes, opts):
385 385 '''interactively transplant changesets'''
386 386 def browsehelp(ui):
387 387 ui.write('y: transplant this changeset\n'
388 388 'n: skip this changeset\n'
389 389 'm: merge at this changeset\n'
390 390 'p: show patch\n'
391 391 'c: commit selected changesets\n'
392 392 'q: cancel transplant\n'
393 393 '?: show this help\n')
394 394
395 395 displayer = cmdutil.show_changeset(ui, repo, opts)
396 396 transplants = []
397 397 merges = []
398 398 for node in nodes:
399 399 displayer.show(changenode=node)
400 400 action = None
401 401 while not action:
402 402 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
403 403 if action == '?':
404 404 browsehelp(ui)
405 405 action = None
406 406 elif action == 'p':
407 407 parent = repo.changelog.parents(node)[0]
408 408 patch.diff(repo, parent, node)
409 409 action = None
410 410 elif action not in ('y', 'n', 'm', 'c', 'q'):
411 411 ui.write('no such option\n')
412 412 action = None
413 413 if action == 'y':
414 414 transplants.append(node)
415 415 elif action == 'm':
416 416 merges.append(node)
417 417 elif action == 'c':
418 418 break
419 419 elif action == 'q':
420 420 transplants = ()
421 421 merges = ()
422 422 break
423 423 return (transplants, merges)
424 424
425 425 def transplant(ui, repo, *revs, **opts):
426 426 '''transplant changesets from another branch
427 427
428 428 Selected changesets will be applied on top of the current working
429 429 directory with the log of the original changeset. If --log is
430 430 specified, log messages will have a comment appended of the form:
431 431
432 432 (transplanted from CHANGESETHASH)
433 433
434 434 You can rewrite the changelog message with the --filter option.
435 435 Its argument will be invoked with the current changelog message
436 436 as $1 and the patch as $2.
437 437
438 438 If --source is specified, selects changesets from the named
439 439 repository. If --branch is specified, selects changesets from the
440 440 branch holding the named revision, up to that revision. If --all
441 441 is specified, all changesets on the branch will be transplanted,
442 442 otherwise you will be prompted to select the changesets you want.
443 443
444 444 hg transplant --branch REVISION --all will rebase the selected branch
445 445 (up to the named revision) onto your current working directory.
446 446
447 447 You can optionally mark selected transplanted changesets as
448 448 merge changesets. You will not be prompted to transplant any
449 449 ancestors of a merged transplant, and you can merge descendants
450 450 of them normally instead of transplanting them.
451 451
452 452 If no merges or revisions are provided, hg transplant will start
453 453 an interactive changeset browser.
454 454
455 455 If a changeset application fails, you can fix the merge by hand and
456 456 then resume where you left off by calling hg transplant --continue.
457 457 '''
458 458 def getoneitem(opts, item, errmsg):
459 459 val = opts.get(item)
460 460 if val:
461 461 if len(val) > 1:
462 462 raise util.Abort(errmsg)
463 463 else:
464 464 return val[0]
465 465
466 466 def getremotechanges(repo, url):
467 467 sourcerepo = ui.expandpath(url)
468 468 source = hg.repository(ui, sourcerepo)
469 469 incoming = repo.findincoming(source, force=True)
470 470 if not incoming:
471 471 return (source, None, None)
472 472
473 473 bundle = None
474 474 if not source.local():
475 475 cg = source.changegroup(incoming, 'incoming')
476 476 bundle = changegroup.writebundle(cg, None, 'HG10UN')
477 477 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
478 478
479 479 return (source, incoming, bundle)
480 480
481 481 def incwalk(repo, incoming, branches, match=util.always):
482 482 if not branches:
483 483 branches=None
484 484 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
485 485 if match(node):
486 486 yield node
487 487
488 488 def transplantwalk(repo, root, branches, match=util.always):
489 489 if not branches:
490 490 branches = repo.heads()
491 491 ancestors = []
492 492 for branch in branches:
493 493 ancestors.append(repo.changelog.ancestor(root, branch))
494 494 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
495 495 if match(node):
496 496 yield node
497 497
498 498 def checkopts(opts, revs):
499 499 if opts.get('continue'):
500 500 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
501 501 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
502 502 return
503 503 if not (opts.get('source') or revs or
504 504 opts.get('merge') or opts.get('branch')):
505 505 raise util.Abort(_('no source URL, branch tag or revision list provided'))
506 506 if opts.get('all'):
507 507 if not opts.get('branch'):
508 508 raise util.Abort(_('--all requires a branch revision'))
509 509 if revs:
510 510 raise util.Abort(_('--all is incompatible with a revision list'))
511 511
512 512 checkopts(opts, revs)
513 513
514 514 if not opts.get('log'):
515 515 opts['log'] = ui.config('transplant', 'log')
516 516 if not opts.get('filter'):
517 517 opts['filter'] = ui.config('transplant', 'filter')
518 518
519 519 tp = transplanter(ui, repo)
520 520
521 521 p1, p2 = repo.dirstate.parents()
522 522 if p1 == revlog.nullid:
523 523 raise util.Abort(_('no revision checked out'))
524 524 if not opts.get('continue'):
525 525 if p2 != revlog.nullid:
526 526 raise util.Abort(_('outstanding uncommitted merges'))
527 527 m, a, r, d = repo.status()[:4]
528 528 if m or a or r or d:
529 529 raise util.Abort(_('outstanding local changes'))
530 530
531 531 bundle = None
532 532 source = opts.get('source')
533 533 if source:
534 534 (source, incoming, bundle) = getremotechanges(repo, source)
535 535 else:
536 536 source = repo
537 537
538 538 try:
539 539 if opts.get('continue'):
540 540 tp.resume(repo, source, opts)
541 541 return
542 542
543 543 tf=tp.transplantfilter(repo, source, p1)
544 544 if opts.get('prune'):
545 545 prune = [source.lookup(r)
546 546 for r in cmdutil.revrange(source, opts.get('prune'))]
547 547 matchfn = lambda x: tf(x) and x not in prune
548 548 else:
549 549 matchfn = tf
550 550 branches = map(source.lookup, opts.get('branch', ()))
551 551 merges = map(source.lookup, opts.get('merge', ()))
552 552 revmap = {}
553 553 if revs:
554 554 for r in cmdutil.revrange(source, revs):
555 555 revmap[int(r)] = source.lookup(r)
556 556 elif opts.get('all') or not merges:
557 557 if source != repo:
558 558 alltransplants = incwalk(source, incoming, branches, match=matchfn)
559 559 else:
560 560 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
561 561 if opts.get('all'):
562 562 revs = alltransplants
563 563 else:
564 564 revs, newmerges = browserevs(ui, source, alltransplants, opts)
565 565 merges.extend(newmerges)
566 566 for r in revs:
567 567 revmap[source.changelog.rev(r)] = r
568 568 for r in merges:
569 569 revmap[source.changelog.rev(r)] = r
570 570
571 571 revs = revmap.keys()
572 572 revs.sort()
573 573 pulls = []
574 574
575 575 tp.apply(repo, source, revmap, merges, opts)
576 576 finally:
577 577 if bundle:
578 578 source.close()
579 579 os.unlink(bundle)
580 580
581 581 cmdtable = {
582 582 "transplant":
583 583 (transplant,
584 584 [('s', 'source', '', _('pull patches from REPOSITORY')),
585 585 ('b', 'branch', [], _('pull patches from branch BRANCH')),
586 586 ('a', 'all', None, _('pull all changesets up to BRANCH')),
587 587 ('p', 'prune', [], _('skip over REV')),
588 588 ('m', 'merge', [], _('merge at REV')),
589 589 ('', 'log', None, _('append transplant info to log message')),
590 590 ('c', 'continue', None, _('continue last transplant session after repair')),
591 591 ('', 'filter', '', _('filter changesets through FILTER'))],
592 592 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
593 593 }
@@ -1,3409 +1,3409 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import traceback, errno, version, atexit, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, node=None, rev=None, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202 if rev and node:
203 203 raise util.Abort(_("please specify just one revision"))
204 204
205 205 if not rev:
206 206 rev = node
207 207
208 208 bail_if_changed(repo)
209 209 op1, op2 = repo.dirstate.parents()
210 210 if op2 != nullid:
211 211 raise util.Abort(_('outstanding uncommitted merge'))
212 212 node = repo.lookup(rev)
213 213 p1, p2 = repo.changelog.parents(node)
214 214 if p1 == nullid:
215 215 raise util.Abort(_('cannot back out a change with no parents'))
216 216 if p2 != nullid:
217 217 if not opts['parent']:
218 218 raise util.Abort(_('cannot back out a merge changeset without '
219 219 '--parent'))
220 220 p = repo.lookup(opts['parent'])
221 221 if p not in (p1, p2):
222 222 raise util.Abort(_('%s is not a parent of %s') %
223 223 (short(p), short(node)))
224 224 parent = p
225 225 else:
226 226 if opts['parent']:
227 227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 228 parent = p1
229 229 hg.clean(repo, node, show_stats=False)
230 230 revert_opts = opts.copy()
231 231 revert_opts['date'] = None
232 232 revert_opts['all'] = True
233 233 revert_opts['rev'] = hex(parent)
234 234 revert(ui, repo, **revert_opts)
235 235 commit_opts = opts.copy()
236 236 commit_opts['addremove'] = False
237 237 if not commit_opts['message'] and not commit_opts['logfile']:
238 238 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
239 239 commit_opts['force_editor'] = True
240 240 commit(ui, repo, **commit_opts)
241 241 def nice(node):
242 242 return '%d:%s' % (repo.changelog.rev(node), short(node))
243 243 ui.status(_('changeset %s backs out changeset %s\n') %
244 244 (nice(repo.changelog.tip()), nice(node)))
245 245 if op1 != node:
246 246 if opts['merge']:
247 247 ui.status(_('merging with changeset %s\n') % nice(op1))
248 248 hg.merge(repo, hex(op1))
249 249 else:
250 250 ui.status(_('the backout changeset is a new head - '
251 251 'do not forget to merge\n'))
252 252 ui.status(_('(use "backout --merge" '
253 253 'if you want to auto-merge)\n'))
254 254
255 255 def branch(ui, repo, label=None, **opts):
256 256 """set or show the current branch name
257 257
258 258 With <name>, set the current branch name. Otherwise, show the
259 259 current branch name.
260 260
261 261 Unless --force is specified, branch will not let you set a
262 262 branch name that shadows an existing branch.
263 263 """
264 264
265 265 if label:
266 266 if not opts.get('force') and label in repo.branchtags():
267 267 if label not in [p.branch() for p in repo.workingctx().parents()]:
268 268 raise util.Abort(_('a branch of the same name already exists'
269 269 ' (use --force to override)'))
270 270 repo.dirstate.setbranch(util.fromlocal(label))
271 271 else:
272 272 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
273 273
274 274 def branches(ui, repo):
275 275 """list repository named branches
276 276
277 277 List the repository's named branches.
278 278 """
279 279 b = repo.branchtags()
280 280 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
281 281 l.sort()
282 282 for r, n, t in l:
283 283 hexfunc = ui.debugflag and hex or short
284 284 if ui.quiet:
285 285 ui.write("%s\n" % t)
286 286 else:
287 287 spaces = " " * (30 - util.locallen(t))
288 288 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
289 289
290 290 def bundle(ui, repo, fname, dest=None, **opts):
291 291 """create a changegroup file
292 292
293 293 Generate a compressed changegroup file collecting changesets not
294 294 found in the other repository.
295 295
296 296 If no destination repository is specified the destination is assumed
297 297 to have all the nodes specified by one or more --base parameters.
298 298
299 299 The bundle file can then be transferred using conventional means and
300 300 applied to another repository with the unbundle or pull command.
301 301 This is useful when direct push and pull are not available or when
302 302 exporting an entire repository is undesirable.
303 303
304 304 Applying bundles preserves all changeset contents including
305 305 permissions, copy/rename information, and revision history.
306 306 """
307 307 revs = opts.get('rev') or None
308 308 if revs:
309 309 revs = [repo.lookup(rev) for rev in revs]
310 310 base = opts.get('base')
311 311 if base:
312 312 if dest:
313 313 raise util.Abort(_("--base is incompatible with specifiying "
314 314 "a destination"))
315 315 base = [repo.lookup(rev) for rev in base]
316 316 # create the right base
317 317 # XXX: nodesbetween / changegroup* should be "fixed" instead
318 318 o = []
319 319 has = {nullid: None}
320 320 for n in base:
321 321 has.update(repo.changelog.reachable(n))
322 322 if revs:
323 323 visit = list(revs)
324 324 else:
325 325 visit = repo.changelog.heads()
326 326 seen = {}
327 327 while visit:
328 328 n = visit.pop(0)
329 329 parents = [p for p in repo.changelog.parents(n) if p not in has]
330 330 if len(parents) == 0:
331 331 o.insert(0, n)
332 332 else:
333 333 for p in parents:
334 334 if p not in seen:
335 335 seen[p] = 1
336 336 visit.append(p)
337 337 else:
338 338 setremoteconfig(ui, opts)
339 339 dest, revs = cmdutil.parseurl(
340 340 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
341 341 other = hg.repository(ui, dest)
342 342 o = repo.findoutgoing(other, force=opts['force'])
343 343
344 344 if revs:
345 345 cg = repo.changegroupsubset(o, revs, 'bundle')
346 346 else:
347 347 cg = repo.changegroup(o, 'bundle')
348 348 changegroup.writebundle(cg, fname, "HG10BZ")
349 349
350 350 def cat(ui, repo, file1, *pats, **opts):
351 351 """output the current or given revision of files
352 352
353 353 Print the specified files as they were at the given revision.
354 354 If no revision is given, the parent of the working directory is used,
355 355 or tip if no revision is checked out.
356 356
357 357 Output may be to a file, in which case the name of the file is
358 358 given using a format string. The formatting rules are the same as
359 359 for the export command, with the following additions:
360 360
361 361 %s basename of file being printed
362 362 %d dirname of file being printed, or '.' if in repo root
363 363 %p root-relative path name of file being printed
364 364 """
365 365 ctx = repo.changectx(opts['rev'])
366 366 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
367 367 ctx.node()):
368 368 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
369 369 fp.write(ctx.filectx(abs).data())
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 if f in unknown:
444 444 raise util.Abort(_("file %s not tracked!") % rf)
445 445 try:
446 446 mode = os.lstat(rf)[stat.ST_MODE]
447 447 except OSError:
448 448 raise util.Abort(_("file %s not found!") % rf)
449 449 if stat.S_ISDIR(mode):
450 450 name = f + '/'
451 451 if slist is None:
452 452 slist = list(files)
453 453 slist.sort()
454 454 i = bisect.bisect(slist, name)
455 455 if i >= len(slist) or not slist[i].startswith(name):
456 456 raise util.Abort(_("no match under directory %s!")
457 457 % rf)
458 458 elif not stat.S_ISREG(mode):
459 459 raise util.Abort(_("can't commit %s: "
460 460 "unsupported file type!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 469 def docopy(ui, repo, pats, opts, wlock):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'a': _('has been marked for add'),
485 485 'r': _('has been marked for remove')}
486 486 state = repo.dirstate.state(abs)
487 487 reason = reasons.get(state)
488 488 if reason:
489 489 if state == 'a':
490 490 origsrc = repo.dirstate.copied(abs)
491 491 if origsrc is not None:
492 492 return origsrc
493 493 if exact:
494 494 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
495 495 else:
496 496 return abs
497 497
498 498 # origsrc: hgsep
499 499 # abssrc: hgsep
500 500 # relsrc: ossep
501 501 # target: ossep
502 502 def copy(origsrc, abssrc, relsrc, target, exact):
503 503 abstarget = util.canonpath(repo.root, cwd, target)
504 504 reltarget = util.pathto(repo.root, cwd, abstarget)
505 505 prevsrc = targets.get(abstarget)
506 506 if prevsrc is not None:
507 507 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 508 (reltarget, util.localpath(abssrc),
509 509 util.localpath(prevsrc)))
510 510 return
511 511 if (not opts['after'] and os.path.exists(reltarget) or
512 512 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
513 513 if not opts['force']:
514 514 ui.warn(_('%s: not overwriting - file exists\n') %
515 515 reltarget)
516 516 return
517 517 if not opts['after'] and not opts.get('dry_run'):
518 518 os.unlink(reltarget)
519 519 if opts['after']:
520 520 if not os.path.exists(reltarget):
521 521 return
522 522 else:
523 523 targetdir = os.path.dirname(reltarget) or '.'
524 524 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 525 os.makedirs(targetdir)
526 526 try:
527 527 restore = repo.dirstate.state(abstarget) == 'r'
528 528 if restore and not opts.get('dry_run'):
529 529 repo.undelete([abstarget], wlock)
530 530 try:
531 531 if not opts.get('dry_run'):
532 532 util.copyfile(relsrc, reltarget)
533 533 restore = False
534 534 finally:
535 535 if restore:
536 536 repo.remove([abstarget], wlock=wlock)
537 537 except IOError, inst:
538 538 if inst.errno == errno.ENOENT:
539 539 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 540 else:
541 541 ui.warn(_('%s: cannot copy - %s\n') %
542 542 (relsrc, inst.strerror))
543 543 errors += 1
544 544 return
545 545 if ui.verbose or not exact:
546 546 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 547 targets[abstarget] = abssrc
548 548 if abstarget != origsrc and not opts.get('dry_run'):
549 549 repo.copy(origsrc, abstarget, wlock)
550 550 copied.append((abssrc, relsrc, exact))
551 551
552 552 # pat: ossep
553 553 # dest ossep
554 554 # srcs: list of (hgsep, hgsep, ossep, bool)
555 555 # return: function that takes hgsep and returns ossep
556 556 def targetpathfn(pat, dest, srcs):
557 557 if os.path.isdir(pat):
558 558 abspfx = util.canonpath(repo.root, cwd, pat)
559 559 abspfx = util.localpath(abspfx)
560 560 if destdirexists:
561 561 striplen = len(os.path.split(abspfx)[0])
562 562 else:
563 563 striplen = len(abspfx)
564 564 if striplen:
565 565 striplen += len(os.sep)
566 566 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
567 567 elif destdirexists:
568 568 res = lambda p: os.path.join(dest,
569 569 os.path.basename(util.localpath(p)))
570 570 else:
571 571 res = lambda p: dest
572 572 return res
573 573
574 574 # pat: ossep
575 575 # dest ossep
576 576 # srcs: list of (hgsep, hgsep, ossep, bool)
577 577 # return: function that takes hgsep and returns ossep
578 578 def targetpathafterfn(pat, dest, srcs):
579 579 if util.patkind(pat, None)[0]:
580 580 # a mercurial pattern
581 581 res = lambda p: os.path.join(dest,
582 582 os.path.basename(util.localpath(p)))
583 583 else:
584 584 abspfx = util.canonpath(repo.root, cwd, pat)
585 585 if len(abspfx) < len(srcs[0][0]):
586 586 # A directory. Either the target path contains the last
587 587 # component of the source path or it does not.
588 588 def evalpath(striplen):
589 589 score = 0
590 590 for s in srcs:
591 591 t = os.path.join(dest, util.localpath(s[0])[striplen:])
592 592 if os.path.exists(t):
593 593 score += 1
594 594 return score
595 595
596 596 abspfx = util.localpath(abspfx)
597 597 striplen = len(abspfx)
598 598 if striplen:
599 599 striplen += len(os.sep)
600 600 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
601 601 score = evalpath(striplen)
602 602 striplen1 = len(os.path.split(abspfx)[0])
603 603 if striplen1:
604 604 striplen1 += len(os.sep)
605 605 if evalpath(striplen1) > score:
606 606 striplen = striplen1
607 607 res = lambda p: os.path.join(dest,
608 608 util.localpath(p)[striplen:])
609 609 else:
610 610 # a file
611 611 if destdirexists:
612 612 res = lambda p: os.path.join(dest,
613 613 os.path.basename(util.localpath(p)))
614 614 else:
615 615 res = lambda p: dest
616 616 return res
617 617
618 618
619 619 pats = util.expand_glob(pats)
620 620 if not pats:
621 621 raise util.Abort(_('no source or destination specified'))
622 622 if len(pats) == 1:
623 623 raise util.Abort(_('no destination specified'))
624 624 dest = pats.pop()
625 625 destdirexists = os.path.isdir(dest)
626 626 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
627 627 raise util.Abort(_('with multiple sources, destination must be an '
628 628 'existing directory'))
629 629 if opts['after']:
630 630 tfn = targetpathafterfn
631 631 else:
632 632 tfn = targetpathfn
633 633 copylist = []
634 634 for pat in pats:
635 635 srcs = []
636 636 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
637 637 globbed=True):
638 638 origsrc = okaytocopy(abssrc, relsrc, exact)
639 639 if origsrc:
640 640 srcs.append((origsrc, abssrc, relsrc, exact))
641 641 if not srcs:
642 642 continue
643 643 copylist.append((tfn(pat, dest, srcs), srcs))
644 644 if not copylist:
645 645 raise util.Abort(_('no files to copy'))
646 646
647 647 for targetpath, srcs in copylist:
648 648 for origsrc, abssrc, relsrc, exact in srcs:
649 649 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
650 650
651 651 if errors:
652 652 ui.warn(_('(consider using --after)\n'))
653 653 return errors, copied
654 654
655 655 def copy(ui, repo, *pats, **opts):
656 656 """mark files as copied for the next commit
657 657
658 658 Mark dest as having copies of source files. If dest is a
659 659 directory, copies are put in that directory. If dest is a file,
660 660 there can only be one source.
661 661
662 662 By default, this command copies the contents of files as they
663 663 stand in the working directory. If invoked with --after, the
664 664 operation is recorded, but no copying is performed.
665 665
666 666 This command takes effect in the next commit. To undo a copy
667 667 before that, see hg revert.
668 668 """
669 669 wlock = repo.wlock(0)
670 670 errs, copied = docopy(ui, repo, pats, opts, wlock)
671 671 return errs
672 672
673 673 def debugancestor(ui, index, rev1, rev2):
674 674 """find the ancestor revision of two revisions in a given index"""
675 675 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
676 676 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
677 677 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
678 678
679 679 def debugcomplete(ui, cmd='', **opts):
680 680 """returns the completion list associated with the given command"""
681 681
682 682 if opts['options']:
683 683 options = []
684 684 otables = [globalopts]
685 685 if cmd:
686 686 aliases, entry = findcmd(ui, cmd)
687 687 otables.append(entry[1])
688 688 for t in otables:
689 689 for o in t:
690 690 if o[0]:
691 691 options.append('-%s' % o[0])
692 692 options.append('--%s' % o[1])
693 693 ui.write("%s\n" % "\n".join(options))
694 694 return
695 695
696 696 clist = findpossible(ui, cmd).keys()
697 697 clist.sort()
698 698 ui.write("%s\n" % "\n".join(clist))
699 699
700 700 def debugrebuildstate(ui, repo, rev=""):
701 701 """rebuild the dirstate as it would look like for the given revision"""
702 702 if rev == "":
703 703 rev = repo.changelog.tip()
704 704 ctx = repo.changectx(rev)
705 705 files = ctx.manifest()
706 706 wlock = repo.wlock()
707 707 repo.dirstate.rebuild(rev, files)
708 708
709 709 def debugcheckstate(ui, repo):
710 710 """validate the correctness of the current dirstate"""
711 711 parent1, parent2 = repo.dirstate.parents()
712 712 repo.dirstate.read()
713 713 dc = repo.dirstate.map
714 714 keys = dc.keys()
715 715 keys.sort()
716 716 m1 = repo.changectx(parent1).manifest()
717 717 m2 = repo.changectx(parent2).manifest()
718 718 errors = 0
719 719 for f in dc:
720 720 state = repo.dirstate.state(f)
721 721 if state in "nr" and f not in m1:
722 722 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
723 723 errors += 1
724 724 if state in "a" and f in m1:
725 725 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
726 726 errors += 1
727 727 if state in "m" and f not in m1 and f not in m2:
728 728 ui.warn(_("%s in state %s, but not in either manifest\n") %
729 729 (f, state))
730 730 errors += 1
731 731 for f in m1:
732 732 state = repo.dirstate.state(f)
733 733 if state not in "nrm":
734 734 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
735 735 errors += 1
736 736 if errors:
737 737 error = _(".hg/dirstate inconsistent with current parent's manifest")
738 738 raise util.Abort(error)
739 739
740 740 def showconfig(ui, repo, *values, **opts):
741 741 """show combined config settings from all hgrc files
742 742
743 743 With no args, print names and values of all config items.
744 744
745 745 With one arg of the form section.name, print just the value of
746 746 that config item.
747 747
748 748 With multiple args, print names and values of all config items
749 749 with matching section names."""
750 750
751 751 untrusted = bool(opts.get('untrusted'))
752 752 if values:
753 753 if len([v for v in values if '.' in v]) > 1:
754 754 raise util.Abort(_('only one config item permitted'))
755 755 for section, name, value in ui.walkconfig(untrusted=untrusted):
756 756 sectname = section + '.' + name
757 757 if values:
758 758 for v in values:
759 759 if v == section:
760 760 ui.write('%s=%s\n' % (sectname, value))
761 761 elif v == sectname:
762 762 ui.write(value, '\n')
763 763 else:
764 764 ui.write('%s=%s\n' % (sectname, value))
765 765
766 766 def debugsetparents(ui, repo, rev1, rev2=None):
767 767 """manually set the parents of the current working directory
768 768
769 769 This is useful for writing repository conversion tools, but should
770 770 be used with care.
771 771 """
772 772
773 773 if not rev2:
774 774 rev2 = hex(nullid)
775 775
776 776 wlock = repo.wlock()
777 777 try:
778 778 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
779 779 finally:
780 780 wlock.release()
781 781
782 782 def debugstate(ui, repo):
783 783 """show the contents of the current dirstate"""
784 784 repo.dirstate.read()
785 785 dc = repo.dirstate.map
786 786 keys = dc.keys()
787 787 keys.sort()
788 788 for file_ in keys:
789 789 if dc[file_][3] == -1:
790 790 # Pad or slice to locale representation
791 791 locale_len = len(time.strftime("%x %X", time.localtime(0)))
792 792 timestr = 'unset'
793 793 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
794 794 else:
795 795 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
796 796 ui.write("%c %3o %10d %s %s\n"
797 797 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
798 798 timestr, file_))
799 799 for f in repo.dirstate.copies():
800 800 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
801 801
802 802 def debugdata(ui, file_, rev):
803 803 """dump the contents of a data file revision"""
804 804 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
805 805 try:
806 806 ui.write(r.revision(r.lookup(rev)))
807 807 except KeyError:
808 808 raise util.Abort(_('invalid revision identifier %s') % rev)
809 809
810 810 def debugdate(ui, date, range=None, **opts):
811 811 """parse and display a date"""
812 812 if opts["extended"]:
813 813 d = util.parsedate(date, util.extendeddateformats)
814 814 else:
815 815 d = util.parsedate(date)
816 816 ui.write("internal: %s %s\n" % d)
817 817 ui.write("standard: %s\n" % util.datestr(d))
818 818 if range:
819 819 m = util.matchdate(range)
820 820 ui.write("match: %s\n" % m(d[0]))
821 821
822 822 def debugindex(ui, file_):
823 823 """dump the contents of an index file"""
824 824 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
825 825 ui.write(" rev offset length base linkrev" +
826 826 " nodeid p1 p2\n")
827 827 for i in xrange(r.count()):
828 828 node = r.node(i)
829 829 pp = r.parents(node)
830 830 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
831 831 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
832 832 short(node), short(pp[0]), short(pp[1])))
833 833
834 834 def debugindexdot(ui, file_):
835 835 """dump an index DAG as a .dot file"""
836 836 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
837 837 ui.write("digraph G {\n")
838 838 for i in xrange(r.count()):
839 839 node = r.node(i)
840 840 pp = r.parents(node)
841 841 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
842 842 if pp[1] != nullid:
843 843 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
844 844 ui.write("}\n")
845 845
846 846 def debuginstall(ui):
847 847 '''test Mercurial installation'''
848 848
849 849 def writetemp(contents):
850 850 (fd, name) = tempfile.mkstemp()
851 851 f = os.fdopen(fd, "wb")
852 852 f.write(contents)
853 853 f.close()
854 854 return name
855 855
856 856 problems = 0
857 857
858 858 # encoding
859 859 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
860 860 try:
861 861 util.fromlocal("test")
862 862 except util.Abort, inst:
863 863 ui.write(" %s\n" % inst)
864 864 ui.write(_(" (check that your locale is properly set)\n"))
865 865 problems += 1
866 866
867 867 # compiled modules
868 868 ui.status(_("Checking extensions...\n"))
869 869 try:
870 870 import bdiff, mpatch, base85
871 871 except Exception, inst:
872 872 ui.write(" %s\n" % inst)
873 873 ui.write(_(" One or more extensions could not be found"))
874 874 ui.write(_(" (check that you compiled the extensions)\n"))
875 875 problems += 1
876 876
877 877 # templates
878 878 ui.status(_("Checking templates...\n"))
879 879 try:
880 880 import templater
881 881 t = templater.templater(templater.templatepath("map-cmdline.default"))
882 882 except Exception, inst:
883 883 ui.write(" %s\n" % inst)
884 884 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
885 885 problems += 1
886 886
887 887 # patch
888 888 ui.status(_("Checking patch...\n"))
889 889 patcher = ui.config('ui', 'patch')
890 890 patcher = ((patcher and util.find_exe(patcher)) or
891 891 util.find_exe('gpatch') or
892 892 util.find_exe('patch'))
893 893 if not patcher:
894 894 ui.write(_(" Can't find patch or gpatch in PATH\n"))
895 895 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
896 896 problems += 1
897 897 else:
898 898 # actually attempt a patch here
899 899 a = "1\n2\n3\n4\n"
900 900 b = "1\n2\n3\ninsert\n4\n"
901 901 fa = writetemp(a)
902 902 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
903 903 fd = writetemp(d)
904
904
905 905 files = {}
906 906 try:
907 907 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
908 908 except util.Abort, e:
909 909 ui.write(_(" patch call failed:\n"))
910 910 ui.write(" " + str(e) + "\n")
911 911 problems += 1
912 else:
912 else:
913 913 if list(files) != [os.path.basename(fa)]:
914 914 ui.write(_(" unexpected patch output!"))
915 915 ui.write(_(" (you may have an incompatible version of patch)\n"))
916 916 problems += 1
917 917 a = file(fa).read()
918 918 if a != b:
919 919 ui.write(_(" patch test failed!"))
920 920 ui.write(_(" (you may have an incompatible version of patch)\n"))
921 921 problems += 1
922
922
923 923 os.unlink(fa)
924 924 os.unlink(fd)
925 925
926 926 # merge helper
927 927 ui.status(_("Checking merge helper...\n"))
928 928 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
929 929 or "hgmerge")
930 930 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
931 931 if not cmdpath:
932 932 if cmd == 'hgmerge':
933 933 ui.write(_(" No merge helper set and can't find default"
934 934 " hgmerge script in PATH\n"))
935 935 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
936 936 else:
937 937 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
938 938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 939 problems += 1
940 940 else:
941 941 # actually attempt a patch here
942 942 fa = writetemp("1\n2\n3\n4\n")
943 943 fl = writetemp("1\n2\n3\ninsert\n4\n")
944 944 fr = writetemp("begin\n1\n2\n3\n4\n")
945 945 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
946 946 if r:
947 947 ui.write(_(" got unexpected merge error %d!") % r)
948 948 problems += 1
949 949 m = file(fl).read()
950 950 if m != "begin\n1\n2\n3\ninsert\n4\n":
951 951 ui.write(_(" got unexpected merge results!") % r)
952 952 ui.write(_(" (your merge helper may have the"
953 953 " wrong argument order)\n"))
954 954 ui.write(m)
955 955 os.unlink(fa)
956 956 os.unlink(fl)
957 957 os.unlink(fr)
958 958
959 959 # editor
960 960 ui.status(_("Checking commit editor...\n"))
961 961 editor = (os.environ.get("HGEDITOR") or
962 962 ui.config("ui", "editor") or
963 963 os.environ.get("EDITOR", "vi"))
964 964 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
965 965 if not cmdpath:
966 966 if editor == 'vi':
967 967 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
968 968 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
969 969 else:
970 970 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
971 971 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
972 972 problems += 1
973 973
974 974 # check username
975 975 ui.status(_("Checking username...\n"))
976 976 user = os.environ.get("HGUSER")
977 977 if user is None:
978 978 user = ui.config("ui", "username")
979 979 if user is None:
980 980 user = os.environ.get("EMAIL")
981 981 if not user:
982 982 ui.warn(" ")
983 983 ui.username()
984 984 ui.write(_(" (specify a username in your .hgrc file)\n"))
985 985
986 986 if not problems:
987 987 ui.status(_("No problems detected\n"))
988 988 else:
989 989 ui.write(_("%s problems detected,"
990 990 " please check your install!\n") % problems)
991 991
992 992 return problems
993 993
994 994 def debugrename(ui, repo, file1, *pats, **opts):
995 995 """dump rename information"""
996 996
997 997 ctx = repo.changectx(opts.get('rev', 'tip'))
998 998 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
999 999 ctx.node()):
1000 1000 m = ctx.filectx(abs).renamed()
1001 1001 if m:
1002 1002 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1003 1003 else:
1004 1004 ui.write(_("%s not renamed\n") % rel)
1005 1005
1006 1006 def debugwalk(ui, repo, *pats, **opts):
1007 1007 """show how files match on given patterns"""
1008 1008 items = list(cmdutil.walk(repo, pats, opts))
1009 1009 if not items:
1010 1010 return
1011 1011 fmt = '%%s %%-%ds %%-%ds %%s' % (
1012 1012 max([len(abs) for (src, abs, rel, exact) in items]),
1013 1013 max([len(rel) for (src, abs, rel, exact) in items]))
1014 1014 for src, abs, rel, exact in items:
1015 1015 line = fmt % (src, abs, rel, exact and 'exact' or '')
1016 1016 ui.write("%s\n" % line.rstrip())
1017 1017
1018 1018 def diff(ui, repo, *pats, **opts):
1019 1019 """diff repository (or selected files)
1020 1020
1021 1021 Show differences between revisions for the specified files.
1022 1022
1023 1023 Differences between files are shown using the unified diff format.
1024 1024
1025 1025 NOTE: diff may generate unexpected results for merges, as it will
1026 1026 default to comparing against the working directory's first parent
1027 1027 changeset if no revisions are specified.
1028 1028
1029 1029 When two revision arguments are given, then changes are shown
1030 1030 between those revisions. If only one revision is specified then
1031 1031 that revision is compared to the working directory, and, when no
1032 1032 revisions are specified, the working directory files are compared
1033 1033 to its parent.
1034 1034
1035 1035 Without the -a option, diff will avoid generating diffs of files
1036 1036 it detects as binary. With -a, diff will generate a diff anyway,
1037 1037 probably with undesirable results.
1038 1038 """
1039 1039 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1040 1040
1041 1041 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1042 1042
1043 1043 patch.diff(repo, node1, node2, fns, match=matchfn,
1044 1044 opts=patch.diffopts(ui, opts))
1045 1045
1046 1046 def export(ui, repo, *changesets, **opts):
1047 1047 """dump the header and diffs for one or more changesets
1048 1048
1049 1049 Print the changeset header and diffs for one or more revisions.
1050 1050
1051 1051 The information shown in the changeset header is: author,
1052 1052 changeset hash, parent(s) and commit comment.
1053 1053
1054 1054 NOTE: export may generate unexpected diff output for merge changesets,
1055 1055 as it will compare the merge changeset against its first parent only.
1056 1056
1057 1057 Output may be to a file, in which case the name of the file is
1058 1058 given using a format string. The formatting rules are as follows:
1059 1059
1060 1060 %% literal "%" character
1061 1061 %H changeset hash (40 bytes of hexadecimal)
1062 1062 %N number of patches being generated
1063 1063 %R changeset revision number
1064 1064 %b basename of the exporting repository
1065 1065 %h short-form changeset hash (12 bytes of hexadecimal)
1066 1066 %n zero-padded sequence number, starting at 1
1067 1067 %r zero-padded changeset revision number
1068 1068
1069 1069 Without the -a option, export will avoid generating diffs of files
1070 1070 it detects as binary. With -a, export will generate a diff anyway,
1071 1071 probably with undesirable results.
1072 1072
1073 1073 With the --switch-parent option, the diff will be against the second
1074 1074 parent. It can be useful to review a merge.
1075 1075 """
1076 1076 if not changesets:
1077 1077 raise util.Abort(_("export requires at least one changeset"))
1078 1078 revs = cmdutil.revrange(repo, changesets)
1079 1079 if len(revs) > 1:
1080 1080 ui.note(_('exporting patches:\n'))
1081 1081 else:
1082 1082 ui.note(_('exporting patch:\n'))
1083 1083 patch.export(repo, revs, template=opts['output'],
1084 1084 switch_parent=opts['switch_parent'],
1085 1085 opts=patch.diffopts(ui, opts))
1086 1086
1087 1087 def grep(ui, repo, pattern, *pats, **opts):
1088 1088 """search for a pattern in specified files and revisions
1089 1089
1090 1090 Search revisions of files for a regular expression.
1091 1091
1092 1092 This command behaves differently than Unix grep. It only accepts
1093 1093 Python/Perl regexps. It searches repository history, not the
1094 1094 working directory. It always prints the revision number in which
1095 1095 a match appears.
1096 1096
1097 1097 By default, grep only prints output for the first revision of a
1098 1098 file in which it finds a match. To get it to print every revision
1099 1099 that contains a change in match status ("-" for a match that
1100 1100 becomes a non-match, or "+" for a non-match that becomes a match),
1101 1101 use the --all flag.
1102 1102 """
1103 1103 reflags = 0
1104 1104 if opts['ignore_case']:
1105 1105 reflags |= re.I
1106 1106 regexp = re.compile(pattern, reflags)
1107 1107 sep, eol = ':', '\n'
1108 1108 if opts['print0']:
1109 1109 sep = eol = '\0'
1110 1110
1111 1111 fcache = {}
1112 1112 def getfile(fn):
1113 1113 if fn not in fcache:
1114 1114 fcache[fn] = repo.file(fn)
1115 1115 return fcache[fn]
1116 1116
1117 1117 def matchlines(body):
1118 1118 begin = 0
1119 1119 linenum = 0
1120 1120 while True:
1121 1121 match = regexp.search(body, begin)
1122 1122 if not match:
1123 1123 break
1124 1124 mstart, mend = match.span()
1125 1125 linenum += body.count('\n', begin, mstart) + 1
1126 1126 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1127 1127 lend = body.find('\n', mend)
1128 1128 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1129 1129 begin = lend + 1
1130 1130
1131 1131 class linestate(object):
1132 1132 def __init__(self, line, linenum, colstart, colend):
1133 1133 self.line = line
1134 1134 self.linenum = linenum
1135 1135 self.colstart = colstart
1136 1136 self.colend = colend
1137 1137
1138 1138 def __eq__(self, other):
1139 1139 return self.line == other.line
1140 1140
1141 1141 matches = {}
1142 1142 copies = {}
1143 1143 def grepbody(fn, rev, body):
1144 1144 matches[rev].setdefault(fn, [])
1145 1145 m = matches[rev][fn]
1146 1146 for lnum, cstart, cend, line in matchlines(body):
1147 1147 s = linestate(line, lnum, cstart, cend)
1148 1148 m.append(s)
1149 1149
1150 1150 def difflinestates(a, b):
1151 1151 sm = difflib.SequenceMatcher(None, a, b)
1152 1152 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1153 1153 if tag == 'insert':
1154 1154 for i in xrange(blo, bhi):
1155 1155 yield ('+', b[i])
1156 1156 elif tag == 'delete':
1157 1157 for i in xrange(alo, ahi):
1158 1158 yield ('-', a[i])
1159 1159 elif tag == 'replace':
1160 1160 for i in xrange(alo, ahi):
1161 1161 yield ('-', a[i])
1162 1162 for i in xrange(blo, bhi):
1163 1163 yield ('+', b[i])
1164 1164
1165 1165 prev = {}
1166 1166 def display(fn, rev, states, prevstates):
1167 1167 found = False
1168 1168 filerevmatches = {}
1169 1169 r = prev.get(fn, -1)
1170 1170 if opts['all']:
1171 1171 iter = difflinestates(states, prevstates)
1172 1172 else:
1173 1173 iter = [('', l) for l in prevstates]
1174 1174 for change, l in iter:
1175 1175 cols = [fn, str(r)]
1176 1176 if opts['line_number']:
1177 1177 cols.append(str(l.linenum))
1178 1178 if opts['all']:
1179 1179 cols.append(change)
1180 1180 if opts['user']:
1181 1181 cols.append(ui.shortuser(get(r)[1]))
1182 1182 if opts['files_with_matches']:
1183 1183 c = (fn, r)
1184 1184 if c in filerevmatches:
1185 1185 continue
1186 1186 filerevmatches[c] = 1
1187 1187 else:
1188 1188 cols.append(l.line)
1189 1189 ui.write(sep.join(cols), eol)
1190 1190 found = True
1191 1191 return found
1192 1192
1193 1193 fstate = {}
1194 1194 skip = {}
1195 1195 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1196 1196 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1197 1197 found = False
1198 1198 follow = opts.get('follow')
1199 1199 for st, rev, fns in changeiter:
1200 1200 if st == 'window':
1201 1201 matches.clear()
1202 1202 elif st == 'add':
1203 1203 mf = repo.changectx(rev).manifest()
1204 1204 matches[rev] = {}
1205 1205 for fn in fns:
1206 1206 if fn in skip:
1207 1207 continue
1208 1208 fstate.setdefault(fn, {})
1209 1209 try:
1210 1210 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1211 1211 if follow:
1212 1212 copied = getfile(fn).renamed(mf[fn])
1213 1213 if copied:
1214 1214 copies.setdefault(rev, {})[fn] = copied[0]
1215 1215 except KeyError:
1216 1216 pass
1217 1217 elif st == 'iter':
1218 1218 states = matches[rev].items()
1219 1219 states.sort()
1220 1220 for fn, m in states:
1221 1221 copy = copies.get(rev, {}).get(fn)
1222 1222 if fn in skip:
1223 1223 if copy:
1224 1224 skip[copy] = True
1225 1225 continue
1226 1226 if fn in prev or fstate[fn]:
1227 1227 r = display(fn, rev, m, fstate[fn])
1228 1228 found = found or r
1229 1229 if r and not opts['all']:
1230 1230 skip[fn] = True
1231 1231 if copy:
1232 1232 skip[copy] = True
1233 1233 fstate[fn] = m
1234 1234 if copy:
1235 1235 fstate[copy] = m
1236 1236 prev[fn] = rev
1237 1237
1238 1238 fstate = fstate.items()
1239 1239 fstate.sort()
1240 1240 for fn, state in fstate:
1241 1241 if fn in skip:
1242 1242 continue
1243 1243 if fn not in copies.get(prev[fn], {}):
1244 1244 found = display(fn, rev, {}, state) or found
1245 1245 return (not found and 1) or 0
1246 1246
1247 1247 def heads(ui, repo, **opts):
1248 1248 """show current repository heads
1249 1249
1250 1250 Show all repository head changesets.
1251 1251
1252 1252 Repository "heads" are changesets that don't have children
1253 1253 changesets. They are where development generally takes place and
1254 1254 are the usual targets for update and merge operations.
1255 1255 """
1256 1256 if opts['rev']:
1257 1257 heads = repo.heads(repo.lookup(opts['rev']))
1258 1258 else:
1259 1259 heads = repo.heads()
1260 1260 displayer = cmdutil.show_changeset(ui, repo, opts)
1261 1261 for n in heads:
1262 1262 displayer.show(changenode=n)
1263 1263
1264 1264 def help_(ui, name=None, with_version=False):
1265 1265 """show help for a command, extension, or list of commands
1266 1266
1267 1267 With no arguments, print a list of commands and short help.
1268 1268
1269 1269 Given a command name, print help for that command.
1270 1270
1271 1271 Given an extension name, print help for that extension, and the
1272 1272 commands it provides."""
1273 1273 option_lists = []
1274 1274
1275 1275 def addglobalopts(aliases):
1276 1276 if ui.verbose:
1277 1277 option_lists.append((_("global options:"), globalopts))
1278 1278 if name == 'shortlist':
1279 1279 option_lists.append((_('use "hg help" for the full list '
1280 1280 'of commands'), ()))
1281 1281 else:
1282 1282 if name == 'shortlist':
1283 1283 msg = _('use "hg help" for the full list of commands '
1284 1284 'or "hg -v" for details')
1285 1285 elif aliases:
1286 1286 msg = _('use "hg -v help%s" to show aliases and '
1287 1287 'global options') % (name and " " + name or "")
1288 1288 else:
1289 1289 msg = _('use "hg -v help %s" to show global options') % name
1290 1290 option_lists.append((msg, ()))
1291 1291
1292 1292 def helpcmd(name):
1293 1293 if with_version:
1294 1294 version_(ui)
1295 1295 ui.write('\n')
1296 1296 aliases, i = findcmd(ui, name)
1297 1297 # synopsis
1298 1298 ui.write("%s\n\n" % i[2])
1299 1299
1300 1300 # description
1301 1301 doc = i[0].__doc__
1302 1302 if not doc:
1303 1303 doc = _("(No help text available)")
1304 1304 if ui.quiet:
1305 1305 doc = doc.splitlines(0)[0]
1306 1306 ui.write("%s\n" % doc.rstrip())
1307 1307
1308 1308 if not ui.quiet:
1309 1309 # aliases
1310 1310 if len(aliases) > 1:
1311 1311 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1312 1312
1313 1313 # options
1314 1314 if i[1]:
1315 1315 option_lists.append((_("options:\n"), i[1]))
1316 1316
1317 1317 addglobalopts(False)
1318 1318
1319 1319 def helplist(select=None):
1320 1320 h = {}
1321 1321 cmds = {}
1322 1322 for c, e in table.items():
1323 1323 f = c.split("|", 1)[0]
1324 1324 if select and not select(f):
1325 1325 continue
1326 1326 if name == "shortlist" and not f.startswith("^"):
1327 1327 continue
1328 1328 f = f.lstrip("^")
1329 1329 if not ui.debugflag and f.startswith("debug"):
1330 1330 continue
1331 1331 doc = e[0].__doc__
1332 1332 if not doc:
1333 1333 doc = _("(No help text available)")
1334 1334 h[f] = doc.splitlines(0)[0].rstrip()
1335 1335 cmds[f] = c.lstrip("^")
1336 1336
1337 1337 fns = h.keys()
1338 1338 fns.sort()
1339 1339 m = max(map(len, fns))
1340 1340 for f in fns:
1341 1341 if ui.verbose:
1342 1342 commands = cmds[f].replace("|",", ")
1343 1343 ui.write(" %s:\n %s\n"%(commands, h[f]))
1344 1344 else:
1345 1345 ui.write(' %-*s %s\n' % (m, f, h[f]))
1346 1346
1347 1347 if not ui.quiet:
1348 1348 addglobalopts(True)
1349 1349
1350 1350 def helptopic(name):
1351 1351 v = None
1352 1352 for i in help.helptable:
1353 1353 l = i.split('|')
1354 1354 if name in l:
1355 1355 v = i
1356 1356 header = l[-1]
1357 1357 if not v:
1358 1358 raise UnknownCommand(name)
1359 1359
1360 1360 # description
1361 1361 doc = help.helptable[v]
1362 1362 if not doc:
1363 1363 doc = _("(No help text available)")
1364 1364 if callable(doc):
1365 1365 doc = doc()
1366 1366
1367 1367 ui.write("%s\n" % header)
1368 1368 ui.write("%s\n" % doc.rstrip())
1369 1369
1370 1370 def helpext(name):
1371 1371 try:
1372 1372 mod = findext(name)
1373 1373 except KeyError:
1374 1374 raise UnknownCommand(name)
1375 1375
1376 1376 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1377 1377 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1378 1378 for d in doc[1:]:
1379 1379 ui.write(d, '\n')
1380 1380
1381 1381 ui.status('\n')
1382 1382
1383 1383 try:
1384 1384 ct = mod.cmdtable
1385 1385 except AttributeError:
1386 1386 ui.status(_('no commands defined\n'))
1387 1387 return
1388 1388
1389 1389 ui.status(_('list of commands:\n\n'))
1390 1390 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1391 1391 helplist(modcmds.has_key)
1392 1392
1393 1393 if name and name != 'shortlist':
1394 1394 i = None
1395 1395 for f in (helpcmd, helptopic, helpext):
1396 1396 try:
1397 1397 f(name)
1398 1398 i = None
1399 1399 break
1400 1400 except UnknownCommand, inst:
1401 1401 i = inst
1402 1402 if i:
1403 1403 raise i
1404 1404
1405 1405 else:
1406 1406 # program name
1407 1407 if ui.verbose or with_version:
1408 1408 version_(ui)
1409 1409 else:
1410 1410 ui.status(_("Mercurial Distributed SCM\n"))
1411 1411 ui.status('\n')
1412 1412
1413 1413 # list of commands
1414 1414 if name == "shortlist":
1415 1415 ui.status(_('basic commands:\n\n'))
1416 1416 else:
1417 1417 ui.status(_('list of commands:\n\n'))
1418 1418
1419 1419 helplist()
1420 1420
1421 1421 # list all option lists
1422 1422 opt_output = []
1423 1423 for title, options in option_lists:
1424 1424 opt_output.append(("\n%s" % title, None))
1425 1425 for shortopt, longopt, default, desc in options:
1426 1426 if "DEPRECATED" in desc and not ui.verbose: continue
1427 1427 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1428 1428 longopt and " --%s" % longopt),
1429 1429 "%s%s" % (desc,
1430 1430 default
1431 1431 and _(" (default: %s)") % default
1432 1432 or "")))
1433 1433
1434 1434 if opt_output:
1435 1435 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1436 1436 for first, second in opt_output:
1437 1437 if second:
1438 1438 ui.write(" %-*s %s\n" % (opts_len, first, second))
1439 1439 else:
1440 1440 ui.write("%s\n" % first)
1441 1441
1442 1442 def identify(ui, repo):
1443 1443 """print information about the working copy
1444 1444
1445 1445 Print a short summary of the current state of the repo.
1446 1446
1447 1447 This summary identifies the repository state using one or two parent
1448 1448 hash identifiers, followed by a "+" if there are uncommitted changes
1449 1449 in the working directory, followed by a list of tags for this revision.
1450 1450 """
1451 1451 parents = [p for p in repo.dirstate.parents() if p != nullid]
1452 1452 if not parents:
1453 1453 ui.write(_("unknown\n"))
1454 1454 return
1455 1455
1456 1456 hexfunc = ui.debugflag and hex or short
1457 1457 modified, added, removed, deleted = repo.status()[:4]
1458 1458 output = ["%s%s" %
1459 1459 ('+'.join([hexfunc(parent) for parent in parents]),
1460 1460 (modified or added or removed or deleted) and "+" or "")]
1461 1461
1462 1462 if not ui.quiet:
1463 1463
1464 1464 branch = util.tolocal(repo.workingctx().branch())
1465 1465 if branch != 'default':
1466 1466 output.append("(%s)" % branch)
1467 1467
1468 1468 # multiple tags for a single parent separated by '/'
1469 1469 parenttags = ['/'.join(tags)
1470 1470 for tags in map(repo.nodetags, parents) if tags]
1471 1471 # tags for multiple parents separated by ' + '
1472 1472 if parenttags:
1473 1473 output.append(' + '.join(parenttags))
1474 1474
1475 1475 ui.write("%s\n" % ' '.join(output))
1476 1476
1477 1477 def import_(ui, repo, patch1, *patches, **opts):
1478 1478 """import an ordered set of patches
1479 1479
1480 1480 Import a list of patches and commit them individually.
1481 1481
1482 1482 If there are outstanding changes in the working directory, import
1483 1483 will abort unless given the -f flag.
1484 1484
1485 1485 You can import a patch straight from a mail message. Even patches
1486 1486 as attachments work (body part must be type text/plain or
1487 1487 text/x-patch to be used). From and Subject headers of email
1488 1488 message are used as default committer and commit message. All
1489 1489 text/plain body parts before first diff are added to commit
1490 1490 message.
1491 1491
1492 1492 If the imported patch was generated by hg export, user and description
1493 1493 from patch override values from message headers and body. Values
1494 1494 given on command line with -m and -u override these.
1495 1495
1496 1496 If --exact is specified, import will set the working directory
1497 1497 to the parent of each patch before applying it, and will abort
1498 1498 if the resulting changeset has a different ID than the one
1499 1499 recorded in the patch. This may happen due to character set
1500 1500 problems or other deficiencies in the text patch format.
1501 1501
1502 1502 To read a patch from standard input, use patch name "-".
1503 1503 """
1504 1504 patches = (patch1,) + patches
1505 1505
1506 1506 if opts.get('exact') or not opts['force']:
1507 1507 bail_if_changed(repo)
1508 1508
1509 1509 d = opts["base"]
1510 1510 strip = opts["strip"]
1511 1511
1512 1512 wlock = repo.wlock()
1513 1513 lock = repo.lock()
1514 1514
1515 1515 for p in patches:
1516 1516 pf = os.path.join(d, p)
1517 1517
1518 1518 if pf == '-':
1519 1519 ui.status(_("applying patch from stdin\n"))
1520 1520 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1521 1521 else:
1522 1522 ui.status(_("applying %s\n") % p)
1523 1523 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf))
1524 1524
1525 1525 if tmpname is None:
1526 1526 raise util.Abort(_('no diffs found'))
1527 1527
1528 1528 try:
1529 1529 cmdline_message = logmessage(opts)
1530 1530 if cmdline_message:
1531 1531 # pickup the cmdline msg
1532 1532 message = cmdline_message
1533 1533 elif message:
1534 1534 # pickup the patch msg
1535 1535 message = message.strip()
1536 1536 else:
1537 1537 # launch the editor
1538 1538 message = None
1539 1539 ui.debug(_('message:\n%s\n') % message)
1540 1540
1541 1541 wp = repo.workingctx().parents()
1542 1542 if opts.get('exact'):
1543 1543 if not nodeid or not p1:
1544 1544 raise util.Abort(_('not a mercurial patch'))
1545 1545 p1 = repo.lookup(p1)
1546 1546 p2 = repo.lookup(p2 or hex(nullid))
1547 1547
1548 1548 if p1 != wp[0].node():
1549 1549 hg.clean(repo, p1, wlock=wlock)
1550 1550 repo.dirstate.setparents(p1, p2)
1551 1551 repo.dirstate.setbranch(branch or 'default')
1552 1552 elif p2:
1553 1553 try:
1554 1554 p1 = repo.lookup(p1)
1555 1555 p2 = repo.lookup(p2)
1556 1556 if p1 == wp[0].node():
1557 1557 repo.dirstate.setparents(p1, p2)
1558 1558 except hg.RepoError:
1559 1559 pass
1560 1560
1561 1561 files = {}
1562 1562 try:
1563 1563 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1564 1564 files=files)
1565 1565 finally:
1566 1566 files = patch.updatedir(ui, repo, files, wlock=wlock)
1567 1567 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1568 1568 if opts.get('exact'):
1569 1569 if hex(n) != nodeid:
1570 1570 repo.rollback(wlock=wlock, lock=lock)
1571 1571 raise util.Abort(_('patch is damaged or loses information'))
1572 1572 finally:
1573 1573 os.unlink(tmpname)
1574 1574
1575 1575 def incoming(ui, repo, source="default", **opts):
1576 1576 """show new changesets found in source
1577 1577
1578 1578 Show new changesets found in the specified path/URL or the default
1579 1579 pull location. These are the changesets that would be pulled if a pull
1580 1580 was requested.
1581 1581
1582 1582 For remote repository, using --bundle avoids downloading the changesets
1583 1583 twice if the incoming is followed by a pull.
1584 1584
1585 1585 See pull for valid source format details.
1586 1586 """
1587 1587 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1588 1588 setremoteconfig(ui, opts)
1589 1589
1590 1590 other = hg.repository(ui, source)
1591 1591 ui.status(_('comparing with %s\n') % source)
1592 1592 if revs:
1593 1593 if 'lookup' in other.capabilities:
1594 1594 revs = [other.lookup(rev) for rev in revs]
1595 1595 else:
1596 1596 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1597 1597 raise util.Abort(error)
1598 1598 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1599 1599 if not incoming:
1600 1600 try:
1601 1601 os.unlink(opts["bundle"])
1602 1602 except:
1603 1603 pass
1604 1604 ui.status(_("no changes found\n"))
1605 1605 return 1
1606 1606
1607 1607 cleanup = None
1608 1608 try:
1609 1609 fname = opts["bundle"]
1610 1610 if fname or not other.local():
1611 1611 # create a bundle (uncompressed if other repo is not local)
1612 1612 if revs is None:
1613 1613 cg = other.changegroup(incoming, "incoming")
1614 1614 else:
1615 1615 if 'changegroupsubset' not in other.capabilities:
1616 1616 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1617 1617 cg = other.changegroupsubset(incoming, revs, 'incoming')
1618 1618 bundletype = other.local() and "HG10BZ" or "HG10UN"
1619 1619 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1620 1620 # keep written bundle?
1621 1621 if opts["bundle"]:
1622 1622 cleanup = None
1623 1623 if not other.local():
1624 1624 # use the created uncompressed bundlerepo
1625 1625 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1626 1626
1627 1627 o = other.changelog.nodesbetween(incoming, revs)[0]
1628 1628 if opts['newest_first']:
1629 1629 o.reverse()
1630 1630 displayer = cmdutil.show_changeset(ui, other, opts)
1631 1631 for n in o:
1632 1632 parents = [p for p in other.changelog.parents(n) if p != nullid]
1633 1633 if opts['no_merges'] and len(parents) == 2:
1634 1634 continue
1635 1635 displayer.show(changenode=n)
1636 1636 finally:
1637 1637 if hasattr(other, 'close'):
1638 1638 other.close()
1639 1639 if cleanup:
1640 1640 os.unlink(cleanup)
1641 1641
1642 1642 def init(ui, dest=".", **opts):
1643 1643 """create a new repository in the given directory
1644 1644
1645 1645 Initialize a new repository in the given directory. If the given
1646 1646 directory does not exist, it is created.
1647 1647
1648 1648 If no directory is given, the current directory is used.
1649 1649
1650 1650 It is possible to specify an ssh:// URL as the destination.
1651 1651 Look at the help text for the pull command for important details
1652 1652 about ssh:// URLs.
1653 1653 """
1654 1654 setremoteconfig(ui, opts)
1655 1655 hg.repository(ui, dest, create=1)
1656 1656
1657 1657 def locate(ui, repo, *pats, **opts):
1658 1658 """locate files matching specific patterns
1659 1659
1660 1660 Print all files under Mercurial control whose names match the
1661 1661 given patterns.
1662 1662
1663 1663 This command searches the entire repository by default. To search
1664 1664 just the current directory and its subdirectories, use "--include .".
1665 1665
1666 1666 If no patterns are given to match, this command prints all file
1667 1667 names.
1668 1668
1669 1669 If you want to feed the output of this command into the "xargs"
1670 1670 command, use the "-0" option to both this command and "xargs".
1671 1671 This will avoid the problem of "xargs" treating single filenames
1672 1672 that contain white space as multiple filenames.
1673 1673 """
1674 1674 end = opts['print0'] and '\0' or '\n'
1675 1675 rev = opts['rev']
1676 1676 if rev:
1677 1677 node = repo.lookup(rev)
1678 1678 else:
1679 1679 node = None
1680 1680
1681 1681 ret = 1
1682 1682 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1683 1683 badmatch=util.always,
1684 1684 default='relglob'):
1685 1685 if src == 'b':
1686 1686 continue
1687 1687 if not node and repo.dirstate.state(abs) == '?':
1688 1688 continue
1689 1689 if opts['fullpath']:
1690 1690 ui.write(os.path.join(repo.root, abs), end)
1691 1691 else:
1692 1692 ui.write(((pats and rel) or abs), end)
1693 1693 ret = 0
1694 1694
1695 1695 return ret
1696 1696
1697 1697 def log(ui, repo, *pats, **opts):
1698 1698 """show revision history of entire repository or files
1699 1699
1700 1700 Print the revision history of the specified files or the entire
1701 1701 project.
1702 1702
1703 1703 File history is shown without following rename or copy history of
1704 1704 files. Use -f/--follow with a file name to follow history across
1705 1705 renames and copies. --follow without a file name will only show
1706 1706 ancestors or descendants of the starting revision. --follow-first
1707 1707 only follows the first parent of merge revisions.
1708 1708
1709 1709 If no revision range is specified, the default is tip:0 unless
1710 1710 --follow is set, in which case the working directory parent is
1711 1711 used as the starting revision.
1712 1712
1713 1713 By default this command outputs: changeset id and hash, tags,
1714 1714 non-trivial parents, user, date and time, and a summary for each
1715 1715 commit. When the -v/--verbose switch is used, the list of changed
1716 1716 files and full commit message is shown.
1717 1717
1718 1718 NOTE: log -p may generate unexpected diff output for merge
1719 1719 changesets, as it will compare the merge changeset against its
1720 1720 first parent only. Also, the files: list will only reflect files
1721 1721 that are different from BOTH parents.
1722 1722
1723 1723 """
1724 1724
1725 1725 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1726 1726 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1727 1727
1728 1728 if opts['limit']:
1729 1729 try:
1730 1730 limit = int(opts['limit'])
1731 1731 except ValueError:
1732 1732 raise util.Abort(_('limit must be a positive integer'))
1733 1733 if limit <= 0: raise util.Abort(_('limit must be positive'))
1734 1734 else:
1735 1735 limit = sys.maxint
1736 1736 count = 0
1737 1737
1738 1738 if opts['copies'] and opts['rev']:
1739 1739 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1740 1740 else:
1741 1741 endrev = repo.changelog.count()
1742 1742 rcache = {}
1743 1743 ncache = {}
1744 1744 dcache = []
1745 1745 def getrenamed(fn, rev, man):
1746 1746 '''looks up all renames for a file (up to endrev) the first
1747 1747 time the file is given. It indexes on the changerev and only
1748 1748 parses the manifest if linkrev != changerev.
1749 1749 Returns rename info for fn at changerev rev.'''
1750 1750 if fn not in rcache:
1751 1751 rcache[fn] = {}
1752 1752 ncache[fn] = {}
1753 1753 fl = repo.file(fn)
1754 1754 for i in xrange(fl.count()):
1755 1755 node = fl.node(i)
1756 1756 lr = fl.linkrev(node)
1757 1757 renamed = fl.renamed(node)
1758 1758 rcache[fn][lr] = renamed
1759 1759 if renamed:
1760 1760 ncache[fn][node] = renamed
1761 1761 if lr >= endrev:
1762 1762 break
1763 1763 if rev in rcache[fn]:
1764 1764 return rcache[fn][rev]
1765 1765 mr = repo.manifest.rev(man)
1766 1766 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1767 1767 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1768 1768 if not dcache or dcache[0] != man:
1769 1769 dcache[:] = [man, repo.manifest.readdelta(man)]
1770 1770 if fn in dcache[1]:
1771 1771 return ncache[fn].get(dcache[1][fn])
1772 1772 return None
1773 1773
1774 1774 df = False
1775 1775 if opts["date"]:
1776 1776 df = util.matchdate(opts["date"])
1777 1777
1778 1778 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1779 1779 for st, rev, fns in changeiter:
1780 1780 if st == 'add':
1781 1781 changenode = repo.changelog.node(rev)
1782 1782 parents = [p for p in repo.changelog.parentrevs(rev)
1783 1783 if p != nullrev]
1784 1784 if opts['no_merges'] and len(parents) == 2:
1785 1785 continue
1786 1786 if opts['only_merges'] and len(parents) != 2:
1787 1787 continue
1788 1788
1789 1789 if df:
1790 1790 changes = get(rev)
1791 1791 if not df(changes[2][0]):
1792 1792 continue
1793 1793
1794 1794 if opts['keyword']:
1795 1795 changes = get(rev)
1796 1796 miss = 0
1797 1797 for k in [kw.lower() for kw in opts['keyword']]:
1798 1798 if not (k in changes[1].lower() or
1799 1799 k in changes[4].lower() or
1800 1800 k in " ".join(changes[3]).lower()):
1801 1801 miss = 1
1802 1802 break
1803 1803 if miss:
1804 1804 continue
1805 1805
1806 1806 copies = []
1807 1807 if opts.get('copies') and rev:
1808 1808 mf = get(rev)[0]
1809 1809 for fn in get(rev)[3]:
1810 1810 rename = getrenamed(fn, rev, mf)
1811 1811 if rename:
1812 1812 copies.append((fn, rename[0]))
1813 1813 displayer.show(rev, changenode, copies=copies)
1814 1814 elif st == 'iter':
1815 1815 if count == limit: break
1816 1816 if displayer.flush(rev):
1817 1817 count += 1
1818 1818
1819 1819 def manifest(ui, repo, rev=None):
1820 1820 """output the current or given revision of the project manifest
1821 1821
1822 1822 Print a list of version controlled files for the given revision.
1823 1823 If no revision is given, the parent of the working directory is used,
1824 1824 or tip if no revision is checked out.
1825 1825
1826 1826 The manifest is the list of files being version controlled. If no revision
1827 1827 is given then the first parent of the working directory is used.
1828 1828
1829 1829 With -v flag, print file permissions. With --debug flag, print
1830 1830 file revision hashes.
1831 1831 """
1832 1832
1833 1833 m = repo.changectx(rev).manifest()
1834 1834 files = m.keys()
1835 1835 files.sort()
1836 1836
1837 1837 for f in files:
1838 1838 if ui.debugflag:
1839 1839 ui.write("%40s " % hex(m[f]))
1840 1840 if ui.verbose:
1841 1841 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1842 1842 ui.write("%s\n" % f)
1843 1843
1844 1844 def merge(ui, repo, node=None, force=None, rev=None):
1845 1845 """merge working directory with another revision
1846 1846
1847 1847 Merge the contents of the current working directory and the
1848 1848 requested revision. Files that changed between either parent are
1849 1849 marked as changed for the next commit and a commit must be
1850 1850 performed before any further updates are allowed.
1851 1851
1852 1852 If no revision is specified, the working directory's parent is a
1853 1853 head revision, and the repository contains exactly one other head,
1854 1854 the other head is merged with by default. Otherwise, an explicit
1855 1855 revision to merge with must be provided.
1856 1856 """
1857 1857
1858 1858 if rev and node:
1859 1859 raise util.Abort(_("please specify just one revision"))
1860 1860
1861 1861 if not node:
1862 1862 node = rev
1863 1863
1864 1864 if not node:
1865 1865 heads = repo.heads()
1866 1866 if len(heads) > 2:
1867 1867 raise util.Abort(_('repo has %d heads - '
1868 1868 'please merge with an explicit rev') %
1869 1869 len(heads))
1870 1870 if len(heads) == 1:
1871 1871 raise util.Abort(_('there is nothing to merge - '
1872 1872 'use "hg update" instead'))
1873 1873 parent = repo.dirstate.parents()[0]
1874 1874 if parent not in heads:
1875 1875 raise util.Abort(_('working dir not at a head rev - '
1876 1876 'use "hg update" or merge with an explicit rev'))
1877 1877 node = parent == heads[0] and heads[-1] or heads[0]
1878 1878 return hg.merge(repo, node, force=force)
1879 1879
1880 1880 def outgoing(ui, repo, dest=None, **opts):
1881 1881 """show changesets not found in destination
1882 1882
1883 1883 Show changesets not found in the specified destination repository or
1884 1884 the default push location. These are the changesets that would be pushed
1885 1885 if a push was requested.
1886 1886
1887 1887 See pull for valid destination format details.
1888 1888 """
1889 1889 dest, revs = cmdutil.parseurl(
1890 1890 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1891 1891 setremoteconfig(ui, opts)
1892 1892 if revs:
1893 1893 revs = [repo.lookup(rev) for rev in revs]
1894 1894
1895 1895 other = hg.repository(ui, dest)
1896 1896 ui.status(_('comparing with %s\n') % dest)
1897 1897 o = repo.findoutgoing(other, force=opts['force'])
1898 1898 if not o:
1899 1899 ui.status(_("no changes found\n"))
1900 1900 return 1
1901 1901 o = repo.changelog.nodesbetween(o, revs)[0]
1902 1902 if opts['newest_first']:
1903 1903 o.reverse()
1904 1904 displayer = cmdutil.show_changeset(ui, repo, opts)
1905 1905 for n in o:
1906 1906 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1907 1907 if opts['no_merges'] and len(parents) == 2:
1908 1908 continue
1909 1909 displayer.show(changenode=n)
1910 1910
1911 1911 def parents(ui, repo, file_=None, **opts):
1912 1912 """show the parents of the working dir or revision
1913 1913
1914 1914 Print the working directory's parent revisions.
1915 1915 """
1916 1916 rev = opts.get('rev')
1917 1917 if rev:
1918 1918 if file_:
1919 1919 ctx = repo.filectx(file_, changeid=rev)
1920 1920 else:
1921 1921 ctx = repo.changectx(rev)
1922 1922 p = [cp.node() for cp in ctx.parents()]
1923 1923 else:
1924 1924 p = repo.dirstate.parents()
1925 1925
1926 1926 displayer = cmdutil.show_changeset(ui, repo, opts)
1927 1927 for n in p:
1928 1928 if n != nullid:
1929 1929 displayer.show(changenode=n)
1930 1930
1931 1931 def paths(ui, repo, search=None):
1932 1932 """show definition of symbolic path names
1933 1933
1934 1934 Show definition of symbolic path name NAME. If no name is given, show
1935 1935 definition of available names.
1936 1936
1937 1937 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1938 1938 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1939 1939 """
1940 1940 if search:
1941 1941 for name, path in ui.configitems("paths"):
1942 1942 if name == search:
1943 1943 ui.write("%s\n" % path)
1944 1944 return
1945 1945 ui.warn(_("not found!\n"))
1946 1946 return 1
1947 1947 else:
1948 1948 for name, path in ui.configitems("paths"):
1949 1949 ui.write("%s = %s\n" % (name, path))
1950 1950
1951 1951 def postincoming(ui, repo, modheads, optupdate):
1952 1952 if modheads == 0:
1953 1953 return
1954 1954 if optupdate:
1955 1955 if modheads == 1:
1956 1956 return hg.update(repo, repo.changelog.tip()) # update
1957 1957 else:
1958 1958 ui.status(_("not updating, since new heads added\n"))
1959 1959 if modheads > 1:
1960 1960 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1961 1961 else:
1962 1962 ui.status(_("(run 'hg update' to get a working copy)\n"))
1963 1963
1964 1964 def pull(ui, repo, source="default", **opts):
1965 1965 """pull changes from the specified source
1966 1966
1967 1967 Pull changes from a remote repository to a local one.
1968 1968
1969 1969 This finds all changes from the repository at the specified path
1970 1970 or URL and adds them to the local repository. By default, this
1971 1971 does not update the copy of the project in the working directory.
1972 1972
1973 1973 Valid URLs are of the form:
1974 1974
1975 1975 local/filesystem/path (or file://local/filesystem/path)
1976 1976 http://[user@]host[:port]/[path]
1977 1977 https://[user@]host[:port]/[path]
1978 1978 ssh://[user@]host[:port]/[path]
1979 1979 static-http://host[:port]/[path]
1980 1980
1981 1981 Paths in the local filesystem can either point to Mercurial
1982 1982 repositories or to bundle files (as created by 'hg bundle' or
1983 1983 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1984 1984 allows access to a Mercurial repository where you simply use a web
1985 1985 server to publish the .hg directory as static content.
1986 1986
1987 1987 An optional identifier after # indicates a particular branch, tag,
1988 1988 or changeset to pull.
1989 1989
1990 1990 Some notes about using SSH with Mercurial:
1991 1991 - SSH requires an accessible shell account on the destination machine
1992 1992 and a copy of hg in the remote path or specified with as remotecmd.
1993 1993 - path is relative to the remote user's home directory by default.
1994 1994 Use an extra slash at the start of a path to specify an absolute path:
1995 1995 ssh://example.com//tmp/repository
1996 1996 - Mercurial doesn't use its own compression via SSH; the right thing
1997 1997 to do is to configure it in your ~/.ssh/config, e.g.:
1998 1998 Host *.mylocalnetwork.example.com
1999 1999 Compression no
2000 2000 Host *
2001 2001 Compression yes
2002 2002 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2003 2003 with the --ssh command line option.
2004 2004 """
2005 2005 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2006 2006 setremoteconfig(ui, opts)
2007 2007
2008 2008 other = hg.repository(ui, source)
2009 2009 ui.status(_('pulling from %s\n') % (source))
2010 2010 if revs:
2011 2011 if 'lookup' in other.capabilities:
2012 2012 revs = [other.lookup(rev) for rev in revs]
2013 2013 else:
2014 2014 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2015 2015 raise util.Abort(error)
2016 2016
2017 2017 modheads = repo.pull(other, heads=revs, force=opts['force'])
2018 2018 return postincoming(ui, repo, modheads, opts['update'])
2019 2019
2020 2020 def push(ui, repo, dest=None, **opts):
2021 2021 """push changes to the specified destination
2022 2022
2023 2023 Push changes from the local repository to the given destination.
2024 2024
2025 2025 This is the symmetrical operation for pull. It helps to move
2026 2026 changes from the current repository to a different one. If the
2027 2027 destination is local this is identical to a pull in that directory
2028 2028 from the current one.
2029 2029
2030 2030 By default, push will refuse to run if it detects the result would
2031 2031 increase the number of remote heads. This generally indicates the
2032 2032 the client has forgotten to sync and merge before pushing.
2033 2033
2034 2034 Valid URLs are of the form:
2035 2035
2036 2036 local/filesystem/path (or file://local/filesystem/path)
2037 2037 ssh://[user@]host[:port]/[path]
2038 2038 http://[user@]host[:port]/[path]
2039 2039 https://[user@]host[:port]/[path]
2040 2040
2041 2041 An optional identifier after # indicates a particular branch, tag,
2042 2042 or changeset to push.
2043 2043
2044 2044 Look at the help text for the pull command for important details
2045 2045 about ssh:// URLs.
2046 2046
2047 2047 Pushing to http:// and https:// URLs is only possible, if this
2048 2048 feature is explicitly enabled on the remote Mercurial server.
2049 2049 """
2050 2050 dest, revs = cmdutil.parseurl(
2051 2051 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2052 2052 setremoteconfig(ui, opts)
2053 2053
2054 2054 other = hg.repository(ui, dest)
2055 2055 ui.status('pushing to %s\n' % (dest))
2056 2056 if revs:
2057 2057 revs = [repo.lookup(rev) for rev in revs]
2058 2058 r = repo.push(other, opts['force'], revs=revs)
2059 2059 return r == 0
2060 2060
2061 2061 def rawcommit(ui, repo, *pats, **opts):
2062 2062 """raw commit interface (DEPRECATED)
2063 2063
2064 2064 (DEPRECATED)
2065 2065 Lowlevel commit, for use in helper scripts.
2066 2066
2067 2067 This command is not intended to be used by normal users, as it is
2068 2068 primarily useful for importing from other SCMs.
2069 2069
2070 2070 This command is now deprecated and will be removed in a future
2071 2071 release, please use debugsetparents and commit instead.
2072 2072 """
2073 2073
2074 2074 ui.warn(_("(the rawcommit command is deprecated)\n"))
2075 2075
2076 2076 message = logmessage(opts)
2077 2077
2078 2078 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2079 2079 if opts['files']:
2080 2080 files += open(opts['files']).read().splitlines()
2081 2081
2082 2082 parents = [repo.lookup(p) for p in opts['parent']]
2083 2083
2084 2084 try:
2085 2085 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2086 2086 except ValueError, inst:
2087 2087 raise util.Abort(str(inst))
2088 2088
2089 2089 def recover(ui, repo):
2090 2090 """roll back an interrupted transaction
2091 2091
2092 2092 Recover from an interrupted commit or pull.
2093 2093
2094 2094 This command tries to fix the repository status after an interrupted
2095 2095 operation. It should only be necessary when Mercurial suggests it.
2096 2096 """
2097 2097 if repo.recover():
2098 2098 return hg.verify(repo)
2099 2099 return 1
2100 2100
2101 2101 def remove(ui, repo, *pats, **opts):
2102 2102 """remove the specified files on the next commit
2103 2103
2104 2104 Schedule the indicated files for removal from the repository.
2105 2105
2106 2106 This only removes files from the current branch, not from the
2107 2107 entire project history. If the files still exist in the working
2108 2108 directory, they will be deleted from it. If invoked with --after,
2109 2109 files are marked as removed, but not actually unlinked unless --force
2110 2110 is also given. Without exact file names, --after will only mark
2111 2111 files as removed if they are no longer in the working directory.
2112 2112
2113 2113 This command schedules the files to be removed at the next commit.
2114 2114 To undo a remove before that, see hg revert.
2115 2115
2116 2116 Modified files and added files are not removed by default. To
2117 2117 remove them, use the -f/--force option.
2118 2118 """
2119 2119 names = []
2120 2120 if not opts['after'] and not pats:
2121 2121 raise util.Abort(_('no files specified'))
2122 2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2123 2123 exact = dict.fromkeys(files)
2124 2124 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2125 2125 modified, added, removed, deleted, unknown = mardu
2126 2126 remove, forget = [], []
2127 2127 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2128 2128 reason = None
2129 2129 if abs in modified and not opts['force']:
2130 2130 reason = _('is modified (use -f to force removal)')
2131 2131 elif abs in added:
2132 2132 if opts['force']:
2133 2133 forget.append(abs)
2134 2134 continue
2135 2135 reason = _('has been marked for add (use -f to force removal)')
2136 2136 elif abs in unknown:
2137 2137 reason = _('is not managed')
2138 2138 elif opts['after'] and not exact and abs not in deleted:
2139 2139 continue
2140 2140 elif abs in removed:
2141 2141 continue
2142 2142 if reason:
2143 2143 if exact:
2144 2144 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2145 2145 else:
2146 2146 if ui.verbose or not exact:
2147 2147 ui.status(_('removing %s\n') % rel)
2148 2148 remove.append(abs)
2149 2149 repo.forget(forget)
2150 2150 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2151 2151
2152 2152 def rename(ui, repo, *pats, **opts):
2153 2153 """rename files; equivalent of copy + remove
2154 2154
2155 2155 Mark dest as copies of sources; mark sources for deletion. If
2156 2156 dest is a directory, copies are put in that directory. If dest is
2157 2157 a file, there can only be one source.
2158 2158
2159 2159 By default, this command copies the contents of files as they
2160 2160 stand in the working directory. If invoked with --after, the
2161 2161 operation is recorded, but no copying is performed.
2162 2162
2163 2163 This command takes effect in the next commit. To undo a rename
2164 2164 before that, see hg revert.
2165 2165 """
2166 2166 wlock = repo.wlock(0)
2167 2167 errs, copied = docopy(ui, repo, pats, opts, wlock)
2168 2168 names = []
2169 2169 for abs, rel, exact in copied:
2170 2170 if ui.verbose or not exact:
2171 2171 ui.status(_('removing %s\n') % rel)
2172 2172 names.append(abs)
2173 2173 if not opts.get('dry_run'):
2174 2174 repo.remove(names, True, wlock=wlock)
2175 2175 return errs
2176 2176
2177 2177 def revert(ui, repo, *pats, **opts):
2178 2178 """revert files or dirs to their states as of some revision
2179 2179
2180 2180 With no revision specified, revert the named files or directories
2181 2181 to the contents they had in the parent of the working directory.
2182 2182 This restores the contents of the affected files to an unmodified
2183 2183 state and unschedules adds, removes, copies, and renames. If the
2184 2184 working directory has two parents, you must explicitly specify the
2185 2185 revision to revert to.
2186 2186
2187 2187 Modified files are saved with a .orig suffix before reverting.
2188 2188 To disable these backups, use --no-backup.
2189 2189
2190 2190 Using the -r option, revert the given files or directories to their
2191 2191 contents as of a specific revision. This can be helpful to "roll
2192 2192 back" some or all of a change that should not have been committed.
2193 2193
2194 2194 Revert modifies the working directory. It does not commit any
2195 2195 changes, or change the parent of the working directory. If you
2196 2196 revert to a revision other than the parent of the working
2197 2197 directory, the reverted files will thus appear modified
2198 2198 afterwards.
2199 2199
2200 2200 If a file has been deleted, it is recreated. If the executable
2201 2201 mode of a file was changed, it is reset.
2202 2202
2203 2203 If names are given, all files matching the names are reverted.
2204 2204
2205 2205 If no arguments are given, no files are reverted.
2206 2206 """
2207 2207
2208 2208 if opts["date"]:
2209 2209 if opts["rev"]:
2210 2210 raise util.Abort(_("you can't specify a revision and a date"))
2211 2211 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2212 2212
2213 2213 if not pats and not opts['all']:
2214 2214 raise util.Abort(_('no files or directories specified; '
2215 2215 'use --all to revert the whole repo'))
2216 2216
2217 2217 parent, p2 = repo.dirstate.parents()
2218 2218 if not opts['rev'] and p2 != nullid:
2219 2219 raise util.Abort(_('uncommitted merge - please provide a '
2220 2220 'specific revision'))
2221 2221 ctx = repo.changectx(opts['rev'])
2222 2222 node = ctx.node()
2223 2223 mf = ctx.manifest()
2224 2224 if node == parent:
2225 2225 pmf = mf
2226 2226 else:
2227 2227 pmf = None
2228 2228
2229 2229 wlock = repo.wlock()
2230 2230
2231 2231 # need all matching names in dirstate and manifest of target rev,
2232 2232 # so have to walk both. do not print errors if files exist in one
2233 2233 # but not other.
2234 2234
2235 2235 names = {}
2236 2236 target_only = {}
2237 2237
2238 2238 # walk dirstate.
2239 2239
2240 2240 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2241 2241 badmatch=mf.has_key):
2242 2242 names[abs] = (rel, exact)
2243 2243 if src == 'b':
2244 2244 target_only[abs] = True
2245 2245
2246 2246 # walk target manifest.
2247 2247
2248 2248 def badmatch(path):
2249 2249 if path in names:
2250 2250 return True
2251 2251 path_ = path + '/'
2252 2252 for f in names:
2253 2253 if f.startswith(path_):
2254 2254 return True
2255 2255 return False
2256 2256
2257 2257 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2258 2258 badmatch=badmatch):
2259 2259 if abs in names or src == 'b':
2260 2260 continue
2261 2261 names[abs] = (rel, exact)
2262 2262 target_only[abs] = True
2263 2263
2264 2264 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2265 2265 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2266 2266
2267 2267 revert = ([], _('reverting %s\n'))
2268 2268 add = ([], _('adding %s\n'))
2269 2269 remove = ([], _('removing %s\n'))
2270 2270 forget = ([], _('forgetting %s\n'))
2271 2271 undelete = ([], _('undeleting %s\n'))
2272 2272 update = {}
2273 2273
2274 2274 disptable = (
2275 2275 # dispatch table:
2276 2276 # file state
2277 2277 # action if in target manifest
2278 2278 # action if not in target manifest
2279 2279 # make backup if in target manifest
2280 2280 # make backup if not in target manifest
2281 2281 (modified, revert, remove, True, True),
2282 2282 (added, revert, forget, True, False),
2283 2283 (removed, undelete, None, False, False),
2284 2284 (deleted, revert, remove, False, False),
2285 2285 (unknown, add, None, True, False),
2286 2286 (target_only, add, None, False, False),
2287 2287 )
2288 2288
2289 2289 entries = names.items()
2290 2290 entries.sort()
2291 2291
2292 2292 for abs, (rel, exact) in entries:
2293 2293 mfentry = mf.get(abs)
2294 2294 def handle(xlist, dobackup):
2295 2295 xlist[0].append(abs)
2296 2296 update[abs] = 1
2297 2297 if (dobackup and not opts['no_backup'] and
2298 2298 (os.path.islink(rel) or os.path.exists(rel))):
2299 2299 bakname = "%s.orig" % rel
2300 2300 ui.note(_('saving current version of %s as %s\n') %
2301 2301 (rel, bakname))
2302 2302 if not opts.get('dry_run'):
2303 2303 util.copyfile(rel, bakname)
2304 2304 if ui.verbose or not exact:
2305 2305 ui.status(xlist[1] % rel)
2306 2306 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2307 2307 if abs not in table: continue
2308 2308 # file has changed in dirstate
2309 2309 if mfentry:
2310 2310 handle(hitlist, backuphit)
2311 2311 elif misslist is not None:
2312 2312 handle(misslist, backupmiss)
2313 2313 else:
2314 2314 if exact: ui.warn(_('file not managed: %s\n') % rel)
2315 2315 break
2316 2316 else:
2317 2317 # file has not changed in dirstate
2318 2318 if node == parent:
2319 2319 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2320 2320 continue
2321 2321 if pmf is None:
2322 2322 # only need parent manifest in this unlikely case,
2323 2323 # so do not read by default
2324 2324 pmf = repo.changectx(parent).manifest()
2325 2325 if abs in pmf:
2326 2326 if mfentry:
2327 2327 # if version of file is same in parent and target
2328 2328 # manifests, do nothing
2329 2329 if pmf[abs] != mfentry:
2330 2330 handle(revert, False)
2331 2331 else:
2332 2332 handle(remove, False)
2333 2333
2334 2334 if not opts.get('dry_run'):
2335 2335 repo.dirstate.forget(forget[0])
2336 2336 r = hg.revert(repo, node, update.has_key, wlock)
2337 2337 repo.dirstate.update(add[0], 'a')
2338 2338 repo.dirstate.update(undelete[0], 'n')
2339 2339 repo.dirstate.update(remove[0], 'r')
2340 2340 return r
2341 2341
2342 2342 def rollback(ui, repo):
2343 2343 """roll back the last transaction in this repository
2344 2344
2345 2345 Roll back the last transaction in this repository, restoring the
2346 2346 project to its state prior to the transaction.
2347 2347
2348 2348 Transactions are used to encapsulate the effects of all commands
2349 2349 that create new changesets or propagate existing changesets into a
2350 2350 repository. For example, the following commands are transactional,
2351 2351 and their effects can be rolled back:
2352 2352
2353 2353 commit
2354 2354 import
2355 2355 pull
2356 2356 push (with this repository as destination)
2357 2357 unbundle
2358 2358
2359 2359 This command should be used with care. There is only one level of
2360 2360 rollback, and there is no way to undo a rollback.
2361 2361
2362 2362 This command is not intended for use on public repositories. Once
2363 2363 changes are visible for pull by other users, rolling a transaction
2364 2364 back locally is ineffective (someone else may already have pulled
2365 2365 the changes). Furthermore, a race is possible with readers of the
2366 2366 repository; for example an in-progress pull from the repository
2367 2367 may fail if a rollback is performed.
2368 2368 """
2369 2369 repo.rollback()
2370 2370
2371 2371 def root(ui, repo):
2372 2372 """print the root (top) of the current working dir
2373 2373
2374 2374 Print the root directory of the current repository.
2375 2375 """
2376 2376 ui.write(repo.root + "\n")
2377 2377
2378 2378 def serve(ui, repo, **opts):
2379 2379 """export the repository via HTTP
2380 2380
2381 2381 Start a local HTTP repository browser and pull server.
2382 2382
2383 2383 By default, the server logs accesses to stdout and errors to
2384 2384 stderr. Use the "-A" and "-E" options to log to files.
2385 2385 """
2386 2386
2387 2387 if opts["stdio"]:
2388 2388 if repo is None:
2389 2389 raise hg.RepoError(_("There is no Mercurial repository here"
2390 2390 " (.hg not found)"))
2391 2391 s = sshserver.sshserver(ui, repo)
2392 2392 s.serve_forever()
2393 2393
2394 2394 parentui = ui.parentui or ui
2395 2395 optlist = ("name templates style address port ipv6"
2396 2396 " accesslog errorlog webdir_conf")
2397 2397 for o in optlist.split():
2398 2398 if opts[o]:
2399 2399 parentui.setconfig("web", o, str(opts[o]))
2400 2400
2401 2401 if repo is None and not ui.config("web", "webdir_conf"):
2402 2402 raise hg.RepoError(_("There is no Mercurial repository here"
2403 2403 " (.hg not found)"))
2404 2404
2405 2405 class service:
2406 2406 def init(self):
2407 2407 try:
2408 2408 self.httpd = hgweb.server.create_server(parentui, repo)
2409 2409 except socket.error, inst:
2410 2410 raise util.Abort(_('cannot start server: ') + inst.args[1])
2411 2411
2412 2412 if not ui.verbose: return
2413 2413
2414 2414 if self.httpd.port != 80:
2415 2415 ui.status(_('listening at http://%s:%d/\n') %
2416 2416 (self.httpd.addr, self.httpd.port))
2417 2417 else:
2418 2418 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2419 2419
2420 2420 def run(self):
2421 2421 self.httpd.serve_forever()
2422 2422
2423 2423 service = service()
2424 2424
2425 2425 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2426 2426
2427 2427 def status(ui, repo, *pats, **opts):
2428 2428 """show changed files in the working directory
2429 2429
2430 2430 Show status of files in the repository. If names are given, only
2431 2431 files that match are shown. Files that are clean or ignored, are
2432 2432 not listed unless -c (clean), -i (ignored) or -A is given.
2433 2433
2434 2434 NOTE: status may appear to disagree with diff if permissions have
2435 2435 changed or a merge has occurred. The standard diff format does not
2436 2436 report permission changes and diff only reports changes relative
2437 2437 to one merge parent.
2438 2438
2439 2439 If one revision is given, it is used as the base revision.
2440 2440 If two revisions are given, the difference between them is shown.
2441 2441
2442 2442 The codes used to show the status of files are:
2443 2443 M = modified
2444 2444 A = added
2445 2445 R = removed
2446 2446 C = clean
2447 2447 ! = deleted, but still tracked
2448 2448 ? = not tracked
2449 2449 I = ignored (not shown by default)
2450 2450 = the previous added file was copied from here
2451 2451 """
2452 2452
2453 2453 all = opts['all']
2454 2454 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2455 2455
2456 2456 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2457 2457 cwd = (pats and repo.getcwd()) or ''
2458 2458 modified, added, removed, deleted, unknown, ignored, clean = [
2459 2459 n for n in repo.status(node1=node1, node2=node2, files=files,
2460 2460 match=matchfn,
2461 2461 list_ignored=all or opts['ignored'],
2462 2462 list_clean=all or opts['clean'])]
2463 2463
2464 2464 changetypes = (('modified', 'M', modified),
2465 2465 ('added', 'A', added),
2466 2466 ('removed', 'R', removed),
2467 2467 ('deleted', '!', deleted),
2468 2468 ('unknown', '?', unknown),
2469 2469 ('ignored', 'I', ignored))
2470 2470
2471 2471 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2472 2472
2473 2473 end = opts['print0'] and '\0' or '\n'
2474 2474
2475 2475 for opt, char, changes in ([ct for ct in explicit_changetypes
2476 2476 if all or opts[ct[0]]]
2477 2477 or changetypes):
2478 2478 if opts['no_status']:
2479 2479 format = "%%s%s" % end
2480 2480 else:
2481 2481 format = "%s %%s%s" % (char, end)
2482 2482
2483 2483 for f in changes:
2484 2484 ui.write(format % util.pathto(repo.root, cwd, f))
2485 2485 if ((all or opts.get('copies')) and not opts.get('no_status')):
2486 2486 copied = repo.dirstate.copied(f)
2487 2487 if copied:
2488 2488 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2489 2489 end))
2490 2490
2491 2491 def tag(ui, repo, name, rev_=None, **opts):
2492 2492 """add a tag for the current or given revision
2493 2493
2494 2494 Name a particular revision using <name>.
2495 2495
2496 2496 Tags are used to name particular revisions of the repository and are
2497 2497 very useful to compare different revision, to go back to significant
2498 2498 earlier versions or to mark branch points as releases, etc.
2499 2499
2500 2500 If no revision is given, the parent of the working directory is used,
2501 2501 or tip if no revision is checked out.
2502 2502
2503 2503 To facilitate version control, distribution, and merging of tags,
2504 2504 they are stored as a file named ".hgtags" which is managed
2505 2505 similarly to other project files and can be hand-edited if
2506 2506 necessary. The file '.hg/localtags' is used for local tags (not
2507 2507 shared among repositories).
2508 2508 """
2509 2509 if name in ['tip', '.', 'null']:
2510 2510 raise util.Abort(_("the name '%s' is reserved") % name)
2511 2511 if rev_ is not None:
2512 2512 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2513 2513 "please use 'hg tag [-r REV] NAME' instead\n"))
2514 2514 if opts['rev']:
2515 2515 raise util.Abort(_("use only one form to specify the revision"))
2516 2516 if opts['rev'] and opts['remove']:
2517 2517 raise util.Abort(_("--rev and --remove are incompatible"))
2518 2518 if opts['rev']:
2519 2519 rev_ = opts['rev']
2520 2520 message = opts['message']
2521 2521 if opts['remove']:
2522 2522 rev_ = nullid
2523 2523 if not message:
2524 2524 message = _('Removed tag %s') % name
2525 2525 elif name in repo.tags() and not opts['force']:
2526 2526 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2527 2527 % name)
2528 2528 if not rev_ and repo.dirstate.parents()[1] != nullid:
2529 2529 raise util.Abort(_('uncommitted merge - please provide a '
2530 2530 'specific revision'))
2531 2531 r = repo.changectx(rev_).node()
2532 2532
2533 2533 if not message:
2534 2534 message = _('Added tag %s for changeset %s') % (name, short(r))
2535 2535
2536 2536 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2537 2537
2538 2538 def tags(ui, repo):
2539 2539 """list repository tags
2540 2540
2541 2541 List the repository tags.
2542 2542
2543 2543 This lists both regular and local tags.
2544 2544 """
2545 2545
2546 2546 l = repo.tagslist()
2547 2547 l.reverse()
2548 2548 hexfunc = ui.debugflag and hex or short
2549 2549 for t, n in l:
2550 2550 try:
2551 hn = hexfunc(n)
2551 hn = hexfunc(n)
2552 2552 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2553 2553 except revlog.LookupError:
2554 2554 r = " ?:%s" % hn
2555 2555 if ui.quiet:
2556 2556 ui.write("%s\n" % t)
2557 2557 else:
2558 2558 spaces = " " * (30 - util.locallen(t))
2559 2559 ui.write("%s%s %s\n" % (t, spaces, r))
2560 2560
2561 2561 def tip(ui, repo, **opts):
2562 2562 """show the tip revision
2563 2563
2564 2564 Show the tip revision.
2565 2565 """
2566 2566 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2567 2567
2568 2568 def unbundle(ui, repo, fname, **opts):
2569 2569 """apply a changegroup file
2570 2570
2571 2571 Apply a compressed changegroup file generated by the bundle
2572 2572 command.
2573 2573 """
2574 2574 if os.path.exists(fname):
2575 2575 f = open(fname, "rb")
2576 2576 else:
2577 2577 f = urllib.urlopen(fname)
2578 2578 gen = changegroup.readbundle(f, fname)
2579 2579 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2580 2580 return postincoming(ui, repo, modheads, opts['update'])
2581 2581
2582 2582 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2583 2583 """update working directory
2584 2584
2585 2585 Update the working directory to the specified revision, or the
2586 2586 tip of the current branch if none is specified.
2587 2587
2588 2588 If there are no outstanding changes in the working directory and
2589 2589 there is a linear relationship between the current version and the
2590 2590 requested version, the result is the requested version.
2591 2591
2592 2592 To merge the working directory with another revision, use the
2593 2593 merge command.
2594 2594
2595 2595 By default, update will refuse to run if doing so would require
2596 2596 discarding local changes.
2597 2597 """
2598 2598 if rev and node:
2599 2599 raise util.Abort(_("please specify just one revision"))
2600 2600
2601 2601 if not rev:
2602 2602 rev = node
2603 2603
2604 2604 if date:
2605 2605 if rev:
2606 2606 raise util.Abort(_("you can't specify a revision and a date"))
2607 2607 rev = cmdutil.finddate(ui, repo, date)
2608 2608
2609 2609 if clean:
2610 2610 return hg.clean(repo, rev)
2611 2611 else:
2612 2612 return hg.update(repo, rev)
2613 2613
2614 2614 def verify(ui, repo):
2615 2615 """verify the integrity of the repository
2616 2616
2617 2617 Verify the integrity of the current repository.
2618 2618
2619 2619 This will perform an extensive check of the repository's
2620 2620 integrity, validating the hashes and checksums of each entry in
2621 2621 the changelog, manifest, and tracked files, as well as the
2622 2622 integrity of their crosslinks and indices.
2623 2623 """
2624 2624 return hg.verify(repo)
2625 2625
2626 2626 def version_(ui):
2627 2627 """output version and copyright information"""
2628 2628 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2629 2629 % version.get_version())
2630 2630 ui.status(_(
2631 2631 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2632 2632 "This is free software; see the source for copying conditions. "
2633 2633 "There is NO\nwarranty; "
2634 2634 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2635 2635 ))
2636 2636
2637 2637 # Command options and aliases are listed here, alphabetically
2638 2638
2639 2639 globalopts = [
2640 2640 ('R', 'repository', '',
2641 2641 _('repository root directory or symbolic path name')),
2642 2642 ('', 'cwd', '', _('change working directory')),
2643 2643 ('y', 'noninteractive', None,
2644 2644 _('do not prompt, assume \'yes\' for any required answers')),
2645 2645 ('q', 'quiet', None, _('suppress output')),
2646 2646 ('v', 'verbose', None, _('enable additional output')),
2647 2647 ('', 'config', [], _('set/override config option')),
2648 2648 ('', 'debug', None, _('enable debugging output')),
2649 2649 ('', 'debugger', None, _('start debugger')),
2650 2650 ('', 'encoding', util._encoding, _('set the charset encoding')),
2651 2651 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2652 2652 ('', 'lsprof', None, _('print improved command execution profile')),
2653 2653 ('', 'traceback', None, _('print traceback on exception')),
2654 2654 ('', 'time', None, _('time how long the command takes')),
2655 2655 ('', 'profile', None, _('print command execution profile')),
2656 2656 ('', 'version', None, _('output version information and exit')),
2657 2657 ('h', 'help', None, _('display help and exit')),
2658 2658 ]
2659 2659
2660 2660 dryrunopts = [('n', 'dry-run', None,
2661 2661 _('do not perform actions, just print output'))]
2662 2662
2663 2663 remoteopts = [
2664 2664 ('e', 'ssh', '', _('specify ssh command to use')),
2665 2665 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2666 2666 ]
2667 2667
2668 2668 walkopts = [
2669 2669 ('I', 'include', [], _('include names matching the given patterns')),
2670 2670 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2671 2671 ]
2672 2672
2673 2673 commitopts = [
2674 2674 ('m', 'message', '', _('use <text> as commit message')),
2675 2675 ('l', 'logfile', '', _('read commit message from <file>')),
2676 2676 ]
2677 2677
2678 2678 table = {
2679 2679 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2680 2680 "addremove":
2681 2681 (addremove,
2682 2682 [('s', 'similarity', '',
2683 2683 _('guess renamed files by similarity (0<=s<=100)')),
2684 2684 ] + walkopts + dryrunopts,
2685 2685 _('hg addremove [OPTION]... [FILE]...')),
2686 2686 "^annotate":
2687 2687 (annotate,
2688 2688 [('r', 'rev', '', _('annotate the specified revision')),
2689 2689 ('f', 'follow', None, _('follow file copies and renames')),
2690 2690 ('a', 'text', None, _('treat all files as text')),
2691 2691 ('u', 'user', None, _('list the author')),
2692 2692 ('d', 'date', None, _('list the date')),
2693 2693 ('n', 'number', None, _('list the revision number (default)')),
2694 2694 ('c', 'changeset', None, _('list the changeset')),
2695 2695 ] + walkopts,
2696 2696 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2697 2697 "archive":
2698 2698 (archive,
2699 2699 [('', 'no-decode', None, _('do not pass files through decoders')),
2700 2700 ('p', 'prefix', '', _('directory prefix for files in archive')),
2701 2701 ('r', 'rev', '', _('revision to distribute')),
2702 2702 ('t', 'type', '', _('type of distribution to create')),
2703 2703 ] + walkopts,
2704 2704 _('hg archive [OPTION]... DEST')),
2705 2705 "backout":
2706 2706 (backout,
2707 2707 [('', 'merge', None,
2708 2708 _('merge with old dirstate parent after backout')),
2709 2709 ('d', 'date', '', _('record datecode as commit date')),
2710 2710 ('', 'parent', '', _('parent to choose when backing out merge')),
2711 2711 ('u', 'user', '', _('record user as committer')),
2712 2712 ('r', 'rev', '', _('revision to backout')),
2713 2713 ] + walkopts + commitopts,
2714 2714 _('hg backout [OPTION]... [-r] REV')),
2715 2715 "branch": (branch,
2716 2716 [('f', 'force', None,
2717 2717 _('set branch name even if it shadows an existing branch'))],
2718 2718 _('hg branch [NAME]')),
2719 2719 "branches": (branches, [], _('hg branches')),
2720 2720 "bundle":
2721 2721 (bundle,
2722 2722 [('f', 'force', None,
2723 2723 _('run even when remote repository is unrelated')),
2724 2724 ('r', 'rev', [],
2725 2725 _('a changeset you would like to bundle')),
2726 2726 ('', 'base', [],
2727 2727 _('a base changeset to specify instead of a destination')),
2728 2728 ] + remoteopts,
2729 2729 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2730 2730 "cat":
2731 2731 (cat,
2732 2732 [('o', 'output', '', _('print output to file with formatted name')),
2733 2733 ('r', 'rev', '', _('print the given revision')),
2734 2734 ] + walkopts,
2735 2735 _('hg cat [OPTION]... FILE...')),
2736 2736 "^clone":
2737 2737 (clone,
2738 2738 [('U', 'noupdate', None, _('do not update the new working directory')),
2739 2739 ('r', 'rev', [],
2740 2740 _('a changeset you would like to have after cloning')),
2741 2741 ('', 'pull', None, _('use pull protocol to copy metadata')),
2742 2742 ('', 'uncompressed', None,
2743 2743 _('use uncompressed transfer (fast over LAN)')),
2744 2744 ] + remoteopts,
2745 2745 _('hg clone [OPTION]... SOURCE [DEST]')),
2746 2746 "^commit|ci":
2747 2747 (commit,
2748 2748 [('A', 'addremove', None,
2749 2749 _('mark new/missing files as added/removed before committing')),
2750 2750 ('d', 'date', '', _('record datecode as commit date')),
2751 2751 ('u', 'user', '', _('record user as commiter')),
2752 2752 ] + walkopts + commitopts,
2753 2753 _('hg commit [OPTION]... [FILE]...')),
2754 2754 "copy|cp":
2755 2755 (copy,
2756 2756 [('A', 'after', None, _('record a copy that has already occurred')),
2757 2757 ('f', 'force', None,
2758 2758 _('forcibly copy over an existing managed file')),
2759 2759 ] + walkopts + dryrunopts,
2760 2760 _('hg copy [OPTION]... [SOURCE]... DEST')),
2761 2761 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2762 2762 "debugcomplete":
2763 2763 (debugcomplete,
2764 2764 [('o', 'options', None, _('show the command options'))],
2765 2765 _('debugcomplete [-o] CMD')),
2766 2766 "debuginstall": (debuginstall, [], _('debuginstall')),
2767 2767 "debugrebuildstate":
2768 2768 (debugrebuildstate,
2769 2769 [('r', 'rev', '', _('revision to rebuild to'))],
2770 2770 _('debugrebuildstate [-r REV] [REV]')),
2771 2771 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2772 2772 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2773 2773 "debugstate": (debugstate, [], _('debugstate')),
2774 2774 "debugdate":
2775 2775 (debugdate,
2776 2776 [('e', 'extended', None, _('try extended date formats'))],
2777 2777 _('debugdate [-e] DATE [RANGE]')),
2778 2778 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2779 2779 "debugindex": (debugindex, [], _('debugindex FILE')),
2780 2780 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2781 2781 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2782 2782 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2783 2783 "^diff":
2784 2784 (diff,
2785 2785 [('r', 'rev', [], _('revision')),
2786 2786 ('a', 'text', None, _('treat all files as text')),
2787 2787 ('p', 'show-function', None,
2788 2788 _('show which function each change is in')),
2789 2789 ('g', 'git', None, _('use git extended diff format')),
2790 2790 ('', 'nodates', None, _("don't include dates in diff headers")),
2791 2791 ('w', 'ignore-all-space', None,
2792 2792 _('ignore white space when comparing lines')),
2793 2793 ('b', 'ignore-space-change', None,
2794 2794 _('ignore changes in the amount of white space')),
2795 2795 ('B', 'ignore-blank-lines', None,
2796 2796 _('ignore changes whose lines are all blank')),
2797 2797 ] + walkopts,
2798 2798 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2799 2799 "^export":
2800 2800 (export,
2801 2801 [('o', 'output', '', _('print output to file with formatted name')),
2802 2802 ('a', 'text', None, _('treat all files as text')),
2803 2803 ('g', 'git', None, _('use git extended diff format')),
2804 2804 ('', 'nodates', None, _("don't include dates in diff headers")),
2805 2805 ('', 'switch-parent', None, _('diff against the second parent'))],
2806 2806 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2807 2807 "grep":
2808 2808 (grep,
2809 2809 [('0', 'print0', None, _('end fields with NUL')),
2810 2810 ('', 'all', None, _('print all revisions that match')),
2811 2811 ('f', 'follow', None,
2812 2812 _('follow changeset history, or file history across copies and renames')),
2813 2813 ('i', 'ignore-case', None, _('ignore case when matching')),
2814 2814 ('l', 'files-with-matches', None,
2815 2815 _('print only filenames and revs that match')),
2816 2816 ('n', 'line-number', None, _('print matching line numbers')),
2817 2817 ('r', 'rev', [], _('search in given revision range')),
2818 2818 ('u', 'user', None, _('print user who committed change')),
2819 2819 ] + walkopts,
2820 2820 _('hg grep [OPTION]... PATTERN [FILE]...')),
2821 2821 "heads":
2822 2822 (heads,
2823 2823 [('', 'style', '', _('display using template map file')),
2824 2824 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2825 2825 ('', 'template', '', _('display with template'))],
2826 2826 _('hg heads [-r REV]')),
2827 2827 "help": (help_, [], _('hg help [COMMAND]')),
2828 2828 "identify|id": (identify, [], _('hg identify')),
2829 2829 "import|patch":
2830 2830 (import_,
2831 2831 [('p', 'strip', 1,
2832 2832 _('directory strip option for patch. This has the same\n'
2833 2833 'meaning as the corresponding patch option')),
2834 2834 ('b', 'base', '', _('base path')),
2835 2835 ('f', 'force', None,
2836 2836 _('skip check for outstanding uncommitted changes')),
2837 2837 ('', 'exact', None,
2838 2838 _('apply patch to the nodes from which it was generated'))] + commitopts,
2839 2839 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2840 2840 "incoming|in": (incoming,
2841 2841 [('M', 'no-merges', None, _('do not show merges')),
2842 2842 ('f', 'force', None,
2843 2843 _('run even when remote repository is unrelated')),
2844 2844 ('', 'style', '', _('display using template map file')),
2845 2845 ('n', 'newest-first', None, _('show newest record first')),
2846 2846 ('', 'bundle', '', _('file to store the bundles into')),
2847 2847 ('p', 'patch', None, _('show patch')),
2848 2848 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2849 2849 ('', 'template', '', _('display with template')),
2850 2850 ] + remoteopts,
2851 2851 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2852 2852 ' [--bundle FILENAME] [SOURCE]')),
2853 2853 "^init":
2854 2854 (init,
2855 2855 remoteopts,
2856 2856 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2857 2857 "locate":
2858 2858 (locate,
2859 2859 [('r', 'rev', '', _('search the repository as it stood at rev')),
2860 2860 ('0', 'print0', None,
2861 2861 _('end filenames with NUL, for use with xargs')),
2862 2862 ('f', 'fullpath', None,
2863 2863 _('print complete paths from the filesystem root')),
2864 2864 ] + walkopts,
2865 2865 _('hg locate [OPTION]... [PATTERN]...')),
2866 2866 "^log|history":
2867 2867 (log,
2868 2868 [('f', 'follow', None,
2869 2869 _('follow changeset history, or file history across copies and renames')),
2870 2870 ('', 'follow-first', None,
2871 2871 _('only follow the first parent of merge changesets')),
2872 2872 ('d', 'date', '', _('show revs matching date spec')),
2873 2873 ('C', 'copies', None, _('show copied files')),
2874 2874 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2875 2875 ('l', 'limit', '', _('limit number of changes displayed')),
2876 2876 ('r', 'rev', [], _('show the specified revision or range')),
2877 2877 ('', 'removed', None, _('include revs where files were removed')),
2878 2878 ('M', 'no-merges', None, _('do not show merges')),
2879 2879 ('', 'style', '', _('display using template map file')),
2880 2880 ('m', 'only-merges', None, _('show only merges')),
2881 2881 ('p', 'patch', None, _('show patch')),
2882 2882 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2883 2883 ('', 'template', '', _('display with template')),
2884 2884 ] + walkopts,
2885 2885 _('hg log [OPTION]... [FILE]')),
2886 2886 "manifest": (manifest, [], _('hg manifest [REV]')),
2887 2887 "^merge":
2888 2888 (merge,
2889 2889 [('f', 'force', None, _('force a merge with outstanding changes')),
2890 2890 ('r', 'rev', '', _('revision to merge')),
2891 2891 ],
2892 2892 _('hg merge [-f] [[-r] REV]')),
2893 2893 "outgoing|out": (outgoing,
2894 2894 [('M', 'no-merges', None, _('do not show merges')),
2895 2895 ('f', 'force', None,
2896 2896 _('run even when remote repository is unrelated')),
2897 2897 ('p', 'patch', None, _('show patch')),
2898 2898 ('', 'style', '', _('display using template map file')),
2899 2899 ('r', 'rev', [], _('a specific revision you would like to push')),
2900 2900 ('n', 'newest-first', None, _('show newest record first')),
2901 2901 ('', 'template', '', _('display with template')),
2902 2902 ] + remoteopts,
2903 2903 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2904 2904 "^parents":
2905 2905 (parents,
2906 2906 [('r', 'rev', '', _('show parents from the specified rev')),
2907 2907 ('', 'style', '', _('display using template map file')),
2908 2908 ('', 'template', '', _('display with template'))],
2909 2909 _('hg parents [-r REV] [FILE]')),
2910 2910 "paths": (paths, [], _('hg paths [NAME]')),
2911 2911 "^pull":
2912 2912 (pull,
2913 2913 [('u', 'update', None,
2914 2914 _('update to new tip if changesets were pulled')),
2915 2915 ('f', 'force', None,
2916 2916 _('run even when remote repository is unrelated')),
2917 2917 ('r', 'rev', [],
2918 2918 _('a specific revision up to which you would like to pull')),
2919 2919 ] + remoteopts,
2920 2920 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2921 2921 "^push":
2922 2922 (push,
2923 2923 [('f', 'force', None, _('force push')),
2924 2924 ('r', 'rev', [], _('a specific revision you would like to push')),
2925 2925 ] + remoteopts,
2926 2926 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2927 2927 "debugrawcommit|rawcommit":
2928 2928 (rawcommit,
2929 2929 [('p', 'parent', [], _('parent')),
2930 2930 ('d', 'date', '', _('date code')),
2931 2931 ('u', 'user', '', _('user')),
2932 2932 ('F', 'files', '', _('file list'))
2933 2933 ] + commitopts,
2934 2934 _('hg debugrawcommit [OPTION]... [FILE]...')),
2935 2935 "recover": (recover, [], _('hg recover')),
2936 2936 "^remove|rm":
2937 2937 (remove,
2938 2938 [('A', 'after', None, _('record remove that has already occurred')),
2939 2939 ('f', 'force', None, _('remove file even if modified')),
2940 2940 ] + walkopts,
2941 2941 _('hg remove [OPTION]... FILE...')),
2942 2942 "rename|mv":
2943 2943 (rename,
2944 2944 [('A', 'after', None, _('record a rename that has already occurred')),
2945 2945 ('f', 'force', None,
2946 2946 _('forcibly copy over an existing managed file')),
2947 2947 ] + walkopts + dryrunopts,
2948 2948 _('hg rename [OPTION]... SOURCE... DEST')),
2949 2949 "^revert":
2950 2950 (revert,
2951 2951 [('a', 'all', None, _('revert all changes when no arguments given')),
2952 2952 ('d', 'date', '', _('tipmost revision matching date')),
2953 2953 ('r', 'rev', '', _('revision to revert to')),
2954 2954 ('', 'no-backup', None, _('do not save backup copies of files')),
2955 2955 ] + walkopts + dryrunopts,
2956 2956 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2957 2957 "rollback": (rollback, [], _('hg rollback')),
2958 2958 "root": (root, [], _('hg root')),
2959 2959 "showconfig|debugconfig":
2960 2960 (showconfig,
2961 2961 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2962 2962 _('showconfig [-u] [NAME]...')),
2963 2963 "^serve":
2964 2964 (serve,
2965 2965 [('A', 'accesslog', '', _('name of access log file to write to')),
2966 2966 ('d', 'daemon', None, _('run server in background')),
2967 2967 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2968 2968 ('E', 'errorlog', '', _('name of error log file to write to')),
2969 2969 ('p', 'port', 0, _('port to use (default: 8000)')),
2970 2970 ('a', 'address', '', _('address to use')),
2971 2971 ('n', 'name', '',
2972 2972 _('name to show in web pages (default: working dir)')),
2973 2973 ('', 'webdir-conf', '', _('name of the webdir config file'
2974 2974 ' (serve more than one repo)')),
2975 2975 ('', 'pid-file', '', _('name of file to write process ID to')),
2976 2976 ('', 'stdio', None, _('for remote clients')),
2977 2977 ('t', 'templates', '', _('web templates to use')),
2978 2978 ('', 'style', '', _('template style to use')),
2979 2979 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2980 2980 _('hg serve [OPTION]...')),
2981 2981 "^status|st":
2982 2982 (status,
2983 2983 [('A', 'all', None, _('show status of all files')),
2984 2984 ('m', 'modified', None, _('show only modified files')),
2985 2985 ('a', 'added', None, _('show only added files')),
2986 2986 ('r', 'removed', None, _('show only removed files')),
2987 2987 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2988 2988 ('c', 'clean', None, _('show only files without changes')),
2989 2989 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2990 2990 ('i', 'ignored', None, _('show only ignored files')),
2991 2991 ('n', 'no-status', None, _('hide status prefix')),
2992 2992 ('C', 'copies', None, _('show source of copied files')),
2993 2993 ('0', 'print0', None,
2994 2994 _('end filenames with NUL, for use with xargs')),
2995 2995 ('', 'rev', [], _('show difference from revision')),
2996 2996 ] + walkopts,
2997 2997 _('hg status [OPTION]... [FILE]...')),
2998 2998 "tag":
2999 2999 (tag,
3000 3000 [('f', 'force', None, _('replace existing tag')),
3001 3001 ('l', 'local', None, _('make the tag local')),
3002 3002 ('m', 'message', '', _('message for tag commit log entry')),
3003 3003 ('d', 'date', '', _('record datecode as commit date')),
3004 3004 ('u', 'user', '', _('record user as commiter')),
3005 3005 ('r', 'rev', '', _('revision to tag')),
3006 3006 ('', 'remove', None, _('remove a tag'))],
3007 3007 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3008 3008 "tags": (tags, [], _('hg tags')),
3009 3009 "tip":
3010 3010 (tip,
3011 3011 [('', 'style', '', _('display using template map file')),
3012 3012 ('p', 'patch', None, _('show patch')),
3013 3013 ('', 'template', '', _('display with template'))],
3014 3014 _('hg tip [-p]')),
3015 3015 "unbundle":
3016 3016 (unbundle,
3017 3017 [('u', 'update', None,
3018 3018 _('update to new tip if changesets were unbundled'))],
3019 3019 _('hg unbundle [-u] FILE')),
3020 3020 "^update|up|checkout|co":
3021 3021 (update,
3022 3022 [('C', 'clean', None, _('overwrite locally modified files')),
3023 3023 ('d', 'date', '', _('tipmost revision matching date')),
3024 3024 ('r', 'rev', '', _('revision'))],
3025 3025 _('hg update [-C] [-d DATE] [[-r] REV]')),
3026 3026 "verify": (verify, [], _('hg verify')),
3027 3027 "version": (version_, [], _('hg version')),
3028 3028 }
3029 3029
3030 3030 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3031 3031 " debugindex debugindexdot debugdate debuginstall")
3032 3032 optionalrepo = ("paths serve showconfig")
3033 3033
3034 3034 def findpossible(ui, cmd):
3035 3035 """
3036 3036 Return cmd -> (aliases, command table entry)
3037 3037 for each matching command.
3038 3038 Return debug commands (or their aliases) only if no normal command matches.
3039 3039 """
3040 3040 choice = {}
3041 3041 debugchoice = {}
3042 3042 for e in table.keys():
3043 3043 aliases = e.lstrip("^").split("|")
3044 3044 found = None
3045 3045 if cmd in aliases:
3046 3046 found = cmd
3047 3047 elif not ui.config("ui", "strict"):
3048 3048 for a in aliases:
3049 3049 if a.startswith(cmd):
3050 3050 found = a
3051 3051 break
3052 3052 if found is not None:
3053 3053 if aliases[0].startswith("debug") or found.startswith("debug"):
3054 3054 debugchoice[found] = (aliases, table[e])
3055 3055 else:
3056 3056 choice[found] = (aliases, table[e])
3057 3057
3058 3058 if not choice and debugchoice:
3059 3059 choice = debugchoice
3060 3060
3061 3061 return choice
3062 3062
3063 3063 def findcmd(ui, cmd):
3064 3064 """Return (aliases, command table entry) for command string."""
3065 3065 choice = findpossible(ui, cmd)
3066 3066
3067 3067 if choice.has_key(cmd):
3068 3068 return choice[cmd]
3069 3069
3070 3070 if len(choice) > 1:
3071 3071 clist = choice.keys()
3072 3072 clist.sort()
3073 3073 raise AmbiguousCommand(cmd, clist)
3074 3074
3075 3075 if choice:
3076 3076 return choice.values()[0]
3077 3077
3078 3078 raise UnknownCommand(cmd)
3079 3079
3080 3080 def catchterm(*args):
3081 3081 raise util.SignalInterrupt
3082 3082
3083 3083 def run():
3084 3084 sys.exit(dispatch(sys.argv[1:]))
3085 3085
3086 3086 class ParseError(Exception):
3087 3087 """Exception raised on errors in parsing the command line."""
3088 3088
3089 3089 def parse(ui, args):
3090 3090 options = {}
3091 3091 cmdoptions = {}
3092 3092
3093 3093 try:
3094 3094 args = fancyopts.fancyopts(args, globalopts, options)
3095 3095 except fancyopts.getopt.GetoptError, inst:
3096 3096 raise ParseError(None, inst)
3097 3097
3098 3098 if args:
3099 3099 cmd, args = args[0], args[1:]
3100 3100 aliases, i = findcmd(ui, cmd)
3101 3101 cmd = aliases[0]
3102 3102 defaults = ui.config("defaults", cmd)
3103 3103 if defaults:
3104 3104 args = shlex.split(defaults) + args
3105 3105 c = list(i[1])
3106 3106 else:
3107 3107 cmd = None
3108 3108 c = []
3109 3109
3110 3110 # combine global options into local
3111 3111 for o in globalopts:
3112 3112 c.append((o[0], o[1], options[o[1]], o[3]))
3113 3113
3114 3114 try:
3115 3115 args = fancyopts.fancyopts(args, c, cmdoptions)
3116 3116 except fancyopts.getopt.GetoptError, inst:
3117 3117 raise ParseError(cmd, inst)
3118 3118
3119 3119 # separate global options back out
3120 3120 for o in globalopts:
3121 3121 n = o[1]
3122 3122 options[n] = cmdoptions[n]
3123 3123 del cmdoptions[n]
3124 3124
3125 3125 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3126 3126
3127 3127 external = {}
3128 3128
3129 3129 def findext(name):
3130 3130 '''return module with given extension name'''
3131 3131 try:
3132 3132 return sys.modules[external[name]]
3133 3133 except KeyError:
3134 3134 for k, v in external.iteritems():
3135 3135 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3136 3136 return sys.modules[v]
3137 3137 raise KeyError(name)
3138 3138
3139 3139 def load_extensions(ui):
3140 3140 added = []
3141 3141 for ext_name, load_from_name in ui.extensions():
3142 3142 if ext_name in external:
3143 3143 continue
3144 3144 try:
3145 3145 if load_from_name:
3146 3146 # the module will be loaded in sys.modules
3147 3147 # choose an unique name so that it doesn't
3148 3148 # conflicts with other modules
3149 3149 module_name = "hgext_%s" % ext_name.replace('.', '_')
3150 3150 mod = imp.load_source(module_name, load_from_name)
3151 3151 else:
3152 3152 def importh(name):
3153 3153 mod = __import__(name)
3154 3154 components = name.split('.')
3155 3155 for comp in components[1:]:
3156 3156 mod = getattr(mod, comp)
3157 3157 return mod
3158 3158 try:
3159 3159 mod = importh("hgext.%s" % ext_name)
3160 3160 except ImportError:
3161 3161 mod = importh(ext_name)
3162 3162 external[ext_name] = mod.__name__
3163 3163 added.append((mod, ext_name))
3164 3164 except (util.SignalInterrupt, KeyboardInterrupt):
3165 3165 raise
3166 3166 except Exception, inst:
3167 3167 ui.warn(_("*** failed to import extension %s: %s\n") %
3168 3168 (ext_name, inst))
3169 3169 if ui.print_exc():
3170 3170 return 1
3171 3171
3172 3172 for mod, name in added:
3173 3173 uisetup = getattr(mod, 'uisetup', None)
3174 3174 if uisetup:
3175 3175 uisetup(ui)
3176 3176 reposetup = getattr(mod, 'reposetup', None)
3177 3177 if reposetup:
3178 3178 hg.repo_setup_hooks.append(reposetup)
3179 3179 cmdtable = getattr(mod, 'cmdtable', {})
3180 3180 overrides = [cmd for cmd in cmdtable if cmd in table]
3181 3181 if overrides:
3182 3182 ui.warn(_("extension '%s' overrides commands: %s\n")
3183 3183 % (name, " ".join(overrides)))
3184 3184 table.update(cmdtable)
3185 3185
3186 3186 def parseconfig(config):
3187 3187 """parse the --config options from the command line"""
3188 3188 parsed = []
3189 3189 for cfg in config:
3190 3190 try:
3191 3191 name, value = cfg.split('=', 1)
3192 3192 section, name = name.split('.', 1)
3193 3193 if not section or not name:
3194 3194 raise IndexError
3195 3195 parsed.append((section, name, value))
3196 3196 except (IndexError, ValueError):
3197 3197 raise util.Abort(_('malformed --config option: %s') % cfg)
3198 3198 return parsed
3199 3199
3200 3200 def dispatch(args):
3201 3201 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3202 3202 num = getattr(signal, name, None)
3203 3203 if num: signal.signal(num, catchterm)
3204 3204
3205 3205 try:
3206 3206 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3207 3207 except util.Abort, inst:
3208 3208 sys.stderr.write(_("abort: %s\n") % inst)
3209 3209 return -1
3210 3210
3211 3211 load_extensions(u)
3212 3212 u.addreadhook(load_extensions)
3213 3213
3214 3214 try:
3215 3215 cmd, func, args, options, cmdoptions = parse(u, args)
3216 3216 if options["encoding"]:
3217 3217 util._encoding = options["encoding"]
3218 3218 if options["encodingmode"]:
3219 3219 util._encodingmode = options["encodingmode"]
3220 3220 if options["time"]:
3221 3221 def get_times():
3222 3222 t = os.times()
3223 3223 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3224 3224 t = (t[0], t[1], t[2], t[3], time.clock())
3225 3225 return t
3226 3226 s = get_times()
3227 3227 def print_time():
3228 3228 t = get_times()
3229 3229 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3230 3230 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3231 3231 atexit.register(print_time)
3232 3232
3233 3233 # enter the debugger before command execution
3234 3234 if options['debugger']:
3235 3235 pdb.set_trace()
3236 3236
3237 3237 try:
3238 3238 if options['cwd']:
3239 3239 os.chdir(options['cwd'])
3240 3240
3241 3241 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3242 3242 not options["noninteractive"], options["traceback"],
3243 3243 parseconfig(options["config"]))
3244 3244
3245 3245 path = u.expandpath(options["repository"]) or ""
3246 3246 repo = path and hg.repository(u, path=path) or None
3247 3247 if repo and not repo.local():
3248 3248 raise util.Abort(_("repository '%s' is not local") % path)
3249 3249
3250 3250 if options['help']:
3251 3251 return help_(u, cmd, options['version'])
3252 3252 elif options['version']:
3253 3253 return version_(u)
3254 3254 elif not cmd:
3255 3255 return help_(u, 'shortlist')
3256 3256
3257 3257 if cmd not in norepo.split():
3258 3258 try:
3259 3259 if not repo:
3260 3260 repo = hg.repository(u, path=path)
3261 3261 u = repo.ui
3262 3262 except hg.RepoError:
3263 3263 if cmd not in optionalrepo.split():
3264 3264 raise
3265 3265 d = lambda: func(u, repo, *args, **cmdoptions)
3266 3266 else:
3267 3267 d = lambda: func(u, *args, **cmdoptions)
3268 3268
3269 3269 try:
3270 3270 if options['profile']:
3271 3271 import hotshot, hotshot.stats
3272 3272 prof = hotshot.Profile("hg.prof")
3273 3273 try:
3274 3274 try:
3275 3275 return prof.runcall(d)
3276 3276 except:
3277 3277 try:
3278 3278 u.warn(_('exception raised - generating '
3279 3279 'profile anyway\n'))
3280 3280 except:
3281 3281 pass
3282 3282 raise
3283 3283 finally:
3284 3284 prof.close()
3285 3285 stats = hotshot.stats.load("hg.prof")
3286 3286 stats.strip_dirs()
3287 3287 stats.sort_stats('time', 'calls')
3288 3288 stats.print_stats(40)
3289 3289 elif options['lsprof']:
3290 3290 try:
3291 3291 from mercurial import lsprof
3292 3292 except ImportError:
3293 3293 raise util.Abort(_(
3294 3294 'lsprof not available - install from '
3295 3295 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3296 3296 p = lsprof.Profiler()
3297 3297 p.enable(subcalls=True)
3298 3298 try:
3299 3299 return d()
3300 3300 finally:
3301 3301 p.disable()
3302 3302 stats = lsprof.Stats(p.getstats())
3303 3303 stats.sort()
3304 3304 stats.pprint(top=10, file=sys.stderr, climit=5)
3305 3305 else:
3306 3306 return d()
3307 3307 finally:
3308 3308 u.flush()
3309 3309 except:
3310 3310 # enter the debugger when we hit an exception
3311 3311 if options['debugger']:
3312 3312 pdb.post_mortem(sys.exc_info()[2])
3313 3313 u.print_exc()
3314 3314 raise
3315 3315 except ParseError, inst:
3316 3316 if inst.args[0]:
3317 3317 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3318 3318 help_(u, inst.args[0])
3319 3319 else:
3320 3320 u.warn(_("hg: %s\n") % inst.args[1])
3321 3321 help_(u, 'shortlist')
3322 3322 except AmbiguousCommand, inst:
3323 3323 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3324 3324 (inst.args[0], " ".join(inst.args[1])))
3325 3325 except UnknownCommand, inst:
3326 3326 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3327 3327 help_(u, 'shortlist')
3328 3328 except hg.RepoError, inst:
3329 3329 u.warn(_("abort: %s!\n") % inst)
3330 3330 except lock.LockHeld, inst:
3331 3331 if inst.errno == errno.ETIMEDOUT:
3332 3332 reason = _('timed out waiting for lock held by %s') % inst.locker
3333 3333 else:
3334 3334 reason = _('lock held by %s') % inst.locker
3335 3335 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3336 3336 except lock.LockUnavailable, inst:
3337 3337 u.warn(_("abort: could not lock %s: %s\n") %
3338 3338 (inst.desc or inst.filename, inst.strerror))
3339 3339 except revlog.RevlogError, inst:
3340 3340 u.warn(_("abort: %s!\n") % inst)
3341 3341 except util.SignalInterrupt:
3342 3342 u.warn(_("killed!\n"))
3343 3343 except KeyboardInterrupt:
3344 3344 try:
3345 3345 u.warn(_("interrupted!\n"))
3346 3346 except IOError, inst:
3347 3347 if inst.errno == errno.EPIPE:
3348 3348 if u.debugflag:
3349 3349 u.warn(_("\nbroken pipe\n"))
3350 3350 else:
3351 3351 raise
3352 3352 except socket.error, inst:
3353 3353 u.warn(_("abort: %s\n") % inst[1])
3354 3354 except IOError, inst:
3355 3355 if hasattr(inst, "code"):
3356 3356 u.warn(_("abort: %s\n") % inst)
3357 3357 elif hasattr(inst, "reason"):
3358 3358 try: # usually it is in the form (errno, strerror)
3359 3359 reason = inst.reason.args[1]
3360 3360 except: # it might be anything, for example a string
3361 3361 reason = inst.reason
3362 3362 u.warn(_("abort: error: %s\n") % reason)
3363 3363 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3364 3364 if u.debugflag:
3365 3365 u.warn(_("broken pipe\n"))
3366 3366 elif getattr(inst, "strerror", None):
3367 3367 if getattr(inst, "filename", None):
3368 3368 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3369 3369 else:
3370 3370 u.warn(_("abort: %s\n") % inst.strerror)
3371 3371 else:
3372 3372 raise
3373 3373 except OSError, inst:
3374 3374 if getattr(inst, "filename", None):
3375 3375 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3376 3376 else:
3377 3377 u.warn(_("abort: %s\n") % inst.strerror)
3378 3378 except util.UnexpectedOutput, inst:
3379 3379 u.warn(_("abort: %s") % inst[0])
3380 3380 if not isinstance(inst[1], basestring):
3381 3381 u.warn(" %r\n" % (inst[1],))
3382 3382 elif not inst[1]:
3383 3383 u.warn(_(" empty string\n"))
3384 3384 else:
3385 3385 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3386 3386 except util.Abort, inst:
3387 3387 u.warn(_("abort: %s\n") % inst)
3388 3388 except TypeError, inst:
3389 3389 # was this an argument error?
3390 3390 tb = traceback.extract_tb(sys.exc_info()[2])
3391 3391 if len(tb) > 2: # no
3392 3392 raise
3393 3393 u.debug(inst, "\n")
3394 3394 u.warn(_("%s: invalid arguments\n") % cmd)
3395 3395 help_(u, cmd)
3396 3396 except SystemExit, inst:
3397 3397 # Commands shouldn't sys.exit directly, but give a return code.
3398 3398 # Just in case catch this and and pass exit code to caller.
3399 3399 return inst.code
3400 3400 except:
3401 3401 u.warn(_("** unknown exception encountered, details follow\n"))
3402 3402 u.warn(_("** report bug details to "
3403 3403 "http://www.selenic.com/mercurial/bts\n"))
3404 3404 u.warn(_("** or mercurial@selenic.com\n"))
3405 3405 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3406 3406 % version.get_version())
3407 3407 raise
3408 3408
3409 3409 return -1
@@ -1,243 +1,243 b''
1 1 # hgweb/server.py - The standalone hg web server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
10 10 from mercurial import ui, hg, util, templater
11 11 from hgweb_mod import hgweb
12 12 from hgwebdir_mod import hgwebdir
13 13 from request import wsgiapplication
14 14 from mercurial.i18n import gettext as _
15 15
16 16 def _splitURI(uri):
17 17 """ Return path and query splited from uri
18 18
19 19 Just like CGI environment, the path is unquoted, the query is
20 20 not.
21 21 """
22 22 if '?' in uri:
23 23 path, query = uri.split('?', 1)
24 24 else:
25 25 path, query = uri, ''
26 26 return urllib.unquote(path), query
27 27
28 28 class _error_logger(object):
29 29 def __init__(self, handler):
30 30 self.handler = handler
31 31 def flush(self):
32 32 pass
33 33 def write(self, str):
34 34 self.writelines(str.split('\n'))
35 35 def writelines(self, seq):
36 36 for msg in seq:
37 37 self.handler.log_error("HG error: %s", msg)
38 38
39 39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40 40 def __init__(self, *args, **kargs):
41 41 self.protocol_version = 'HTTP/1.1'
42 42 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
43 43
44 44 def log_error(self, format, *args):
45 45 errorlog = self.server.errorlog
46 46 errorlog.write("%s - - [%s] %s\n" % (self.client_address[0],
47 47 self.log_date_time_string(),
48 48 format % args))
49 49
50 50 def log_message(self, format, *args):
51 51 accesslog = self.server.accesslog
52 52 accesslog.write("%s - - [%s] %s\n" % (self.client_address[0],
53 53 self.log_date_time_string(),
54 54 format % args))
55 55
56 56 def do_POST(self):
57 57 try:
58 58 try:
59 59 self.do_hgweb()
60 60 except socket.error, inst:
61 61 if inst[0] != errno.EPIPE:
62 62 raise
63 63 except StandardError, inst:
64 64 self._start_response("500 Internal Server Error", [])
65 65 self._write("Internal Server Error")
66 66 tb = "".join(traceback.format_exception(*sys.exc_info()))
67 67 self.log_error("Exception happened during processing request '%s':\n%s",
68 68 self.path, tb)
69 69
70 70 def do_GET(self):
71 71 self.do_POST()
72 72
73 73 def do_hgweb(self):
74 74 path_info, query = _splitURI(self.path)
75 75
76 76 env = {}
77 77 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
78 78 env['REQUEST_METHOD'] = self.command
79 79 env['SERVER_NAME'] = self.server.server_name
80 80 env['SERVER_PORT'] = str(self.server.server_port)
81 81 env['REQUEST_URI'] = self.path
82 82 env['PATH_INFO'] = path_info
83 83 env['REMOTE_HOST'] = self.client_address[0]
84 84 env['REMOTE_ADDR'] = self.client_address[0]
85 85 if query:
86 86 env['QUERY_STRING'] = query
87 87
88 88 if self.headers.typeheader is None:
89 89 env['CONTENT_TYPE'] = self.headers.type
90 90 else:
91 91 env['CONTENT_TYPE'] = self.headers.typeheader
92 92 length = self.headers.getheader('content-length')
93 93 if length:
94 94 env['CONTENT_LENGTH'] = length
95 95 for header in [h for h in self.headers.keys() \
96 96 if h not in ('content-type', 'content-length')]:
97 97 hkey = 'HTTP_' + header.replace('-', '_').upper()
98 98 hval = self.headers.getheader(header)
99 99 hval = hval.replace('\n', '').strip()
100 100 if hval:
101 101 env[hkey] = hval
102 102 env['SERVER_PROTOCOL'] = self.request_version
103 103 env['wsgi.version'] = (1, 0)
104 104 env['wsgi.url_scheme'] = 'http'
105 105 env['wsgi.input'] = self.rfile
106 106 env['wsgi.errors'] = _error_logger(self)
107 107 env['wsgi.multithread'] = isinstance(self.server,
108 108 SocketServer.ThreadingMixIn)
109 109 env['wsgi.multiprocess'] = isinstance(self.server,
110 110 SocketServer.ForkingMixIn)
111 111 env['wsgi.run_once'] = 0
112 112
113 113 self.close_connection = True
114 114 self.saved_status = None
115 115 self.saved_headers = []
116 116 self.sent_headers = False
117 117 self.length = None
118 118 req = self.server.reqmaker(env, self._start_response)
119 119 for data in req:
120 120 if data:
121 121 self._write(data)
122 122
123 123 def send_headers(self):
124 124 if not self.saved_status:
125 125 raise AssertionError("Sending headers before start_response() called")
126 126 saved_status = self.saved_status.split(None, 1)
127 127 saved_status[0] = int(saved_status[0])
128 128 self.send_response(*saved_status)
129 129 should_close = True
130 130 for h in self.saved_headers:
131 131 self.send_header(*h)
132 132 if h[0].lower() == 'content-length':
133 133 should_close = False
134 134 self.length = int(h[1])
135 135 # The value of the Connection header is a list of case-insensitive
136 136 # tokens separated by commas and optional whitespace.
137 137 if 'close' in [token.strip().lower() for token in
138 138 self.headers.get('connection', '').split(',')]:
139 139 should_close = True
140 140 if should_close:
141 141 self.send_header('Connection', 'close')
142 142 self.close_connection = should_close
143 143 self.end_headers()
144 144 self.sent_headers = True
145 145
146 146 def _start_response(self, http_status, headers, exc_info=None):
147 147 code, msg = http_status.split(None, 1)
148 148 code = int(code)
149 149 self.saved_status = http_status
150 150 bad_headers = ('connection', 'transfer-encoding')
151 151 self.saved_headers = [ h for h in headers \
152 152 if h[0].lower() not in bad_headers ]
153 153 return self._write
154 154
155 155 def _write(self, data):
156 156 if not self.saved_status:
157 157 raise AssertionError("data written before start_response() called")
158 158 elif not self.sent_headers:
159 159 self.send_headers()
160 160 if self.length is not None:
161 161 if len(data) > self.length:
162 162 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
163 163 self.length = self.length - len(data)
164 164 self.wfile.write(data)
165 165 self.wfile.flush()
166 166
167 167 def create_server(ui, repo):
168 168 use_threads = True
169 169
170 170 def openlog(opt, default):
171 171 if opt and opt != '-':
172 172 return open(opt, 'w')
173 173 return default
174 174
175 175 address = ui.config("web", "address", "")
176 176 port = int(ui.config("web", "port", 8000))
177 177 use_ipv6 = ui.configbool("web", "ipv6")
178 178 webdir_conf = ui.config("web", "webdir_conf")
179 179 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
180 180 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
181 181
182 182 if use_threads:
183 183 try:
184 184 from threading import activeCount
185 185 except ImportError:
186 186 use_threads = False
187 187
188 188 if use_threads:
189 189 _mixin = SocketServer.ThreadingMixIn
190 190 else:
191 191 if hasattr(os, "fork"):
192 192 _mixin = SocketServer.ForkingMixIn
193 193 else:
194 194 class _mixin:
195 195 pass
196 196
197 197 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
198
198
199 199 # SO_REUSEADDR has broken semantics on windows
200 200 if os.name == 'nt':
201 201 allow_reuse_address = 0
202
202
203 203 def __init__(self, *args, **kargs):
204 204 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
205 205 self.accesslog = accesslog
206 206 self.errorlog = errorlog
207 207 self.daemon_threads = True
208 208 def make_handler():
209 209 if webdir_conf:
210 210 hgwebobj = hgwebdir(webdir_conf, ui)
211 211 elif repo is not None:
212 212 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
213 213 else:
214 214 raise hg.RepoError(_("There is no Mercurial repository here"
215 215 " (.hg not found)"))
216 216 return hgwebobj
217 217 self.reqmaker = wsgiapplication(make_handler)
218 218
219 219 addr, port = self.socket.getsockname()[:2]
220 220 if addr in ('0.0.0.0', '::'):
221 221 addr = socket.gethostname()
222 222 else:
223 223 try:
224 224 addr = socket.gethostbyaddr(addr)[0]
225 225 except socket.error:
226 226 pass
227 227 self.addr, self.port = addr, port
228 228
229 229 class IPv6HTTPServer(MercurialHTTPServer):
230 230 address_family = getattr(socket, 'AF_INET6', None)
231 231
232 232 def __init__(self, *args, **kwargs):
233 233 if self.address_family is None:
234 234 raise hg.RepoError(_('IPv6 not available on this system'))
235 235 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
236 236
237 237 try:
238 238 if use_ipv6:
239 239 return IPv6HTTPServer((address, port), _hgwebhandler)
240 240 else:
241 241 return MercurialHTTPServer((address, port), _hgwebhandler)
242 242 except socket.error, inst:
243 243 raise util.Abort(_('cannot start server: %s') % inst.args[1])
@@ -1,402 +1,402 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from remoterepo import *
11 11 from i18n import _
12 12 import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
13 13 import errno, keepalive, tempfile, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 106 class basehttphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 110 has_https = hasattr(urllib2, 'HTTPSHandler')
111 111 if has_https:
112 112 class httpsconnection(httplib.HTTPSConnection):
113 113 response_class = keepalive.HTTPResponse
114 114 # must be able to send big bundle as stream.
115 115 send = _gen_sendfile(httplib.HTTPSConnection)
116 116
117 117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
118 118 def https_open(self, req):
119 119 return self.do_open(httpsconnection, req)
120 120 else:
121 121 class httphandler(basehttphandler):
122 122 pass
123 123
124 124 def zgenerator(f):
125 125 zd = zlib.decompressobj()
126 126 try:
127 127 for chunk in util.filechunkiter(f):
128 128 yield zd.decompress(chunk)
129 129 except httplib.HTTPException, inst:
130 130 raise IOError(None, _('connection ended unexpectedly'))
131 131 yield zd.flush()
132 132
133 133 class httprepository(remoterepository):
134 134 def __init__(self, ui, path):
135 135 self.path = path
136 136 self.caps = None
137 137 self.handler = None
138 138 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
139 139 if query or frag:
140 140 raise util.Abort(_('unsupported URL component: "%s"') %
141 141 (query or frag))
142 142 if not urlpath: urlpath = '/'
143 143 host, port, user, passwd = netlocsplit(netloc)
144 144
145 145 # urllib cannot handle URLs with embedded user or passwd
146 146 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
147 147 urlpath, '', ''))
148 148 self.ui = ui
149 149
150 150 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
151 151 # XXX proxyauthinfo = None
152 152 self.handler = httphandler()
153 153 handlers = [self.handler]
154 154
155 155 if proxyurl:
156 156 # proxy can be proper url or host[:port]
157 157 if not (proxyurl.startswith('http:') or
158 158 proxyurl.startswith('https:')):
159 159 proxyurl = 'http://' + proxyurl + '/'
160 160 snpqf = urlparse.urlsplit(proxyurl)
161 161 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
162 162 hpup = netlocsplit(proxynetloc)
163 163
164 164 proxyhost, proxyport, proxyuser, proxypasswd = hpup
165 165 if not proxyuser:
166 166 proxyuser = ui.config("http_proxy", "user")
167 167 proxypasswd = ui.config("http_proxy", "passwd")
168 168
169 169 # see if we should use a proxy for this url
170 170 no_list = [ "localhost", "127.0.0.1" ]
171 171 no_list.extend([p.lower() for
172 172 p in ui.configlist("http_proxy", "no")])
173 173 no_list.extend([p.strip().lower() for
174 174 p in os.getenv("no_proxy", '').split(',')
175 175 if p.strip()])
176 176 # "http_proxy.always" config is for running tests on localhost
177 177 if (not ui.configbool("http_proxy", "always") and
178 178 host.lower() in no_list):
179 179 ui.debug(_('disabling proxy for %s\n') % host)
180 180 else:
181 181 proxyurl = urlparse.urlunsplit((
182 182 proxyscheme, netlocunsplit(proxyhost, proxyport,
183 183 proxyuser, proxypasswd or ''),
184 184 proxypath, proxyquery, proxyfrag))
185 185 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
186 186 ui.debug(_('proxying through http://%s:%s\n') %
187 187 (proxyhost, proxyport))
188 188
189 189 # urllib2 takes proxy values from the environment and those
190 190 # will take precedence if found, so drop them
191 191 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
192 192 try:
193 193 if os.environ.has_key(env):
194 194 del os.environ[env]
195 195 except OSError:
196 196 pass
197 197
198 198 passmgr = passwordmgr(ui)
199 199 if user:
200 200 ui.debug(_('http auth: user %s, password %s\n') %
201 201 (user, passwd and '*' * len(passwd) or 'not set'))
202 202 passmgr.add_password(None, host, user, passwd or '')
203 203
204 204 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
205 205 urllib2.HTTPDigestAuthHandler(passmgr)))
206 206 opener = urllib2.build_opener(*handlers)
207 207
208 208 # 1.0 here is the _protocol_ version
209 209 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
210 210 urllib2.install_opener(opener)
211
211
212 212 def __del__(self):
213 213 if self.handler:
214 214 self.handler.close_all()
215 215 self.handler = None
216 216
217 217 def url(self):
218 218 return self.path
219 219
220 220 # look up capabilities only when needed
221 221
222 222 def get_caps(self):
223 223 if self.caps is None:
224 224 try:
225 225 self.caps = self.do_read('capabilities').split()
226 226 except hg.RepoError:
227 227 self.caps = ()
228 228 self.ui.debug(_('capabilities: %s\n') %
229 229 (' '.join(self.caps or ['none'])))
230 230 return self.caps
231 231
232 232 capabilities = property(get_caps)
233 233
234 234 def lock(self):
235 235 raise util.Abort(_('operation not supported over http'))
236 236
237 237 def do_cmd(self, cmd, **args):
238 238 data = args.pop('data', None)
239 239 headers = args.pop('headers', {})
240 240 self.ui.debug(_("sending %s command\n") % cmd)
241 241 q = {"cmd": cmd}
242 242 q.update(args)
243 243 qs = '?%s' % urllib.urlencode(q)
244 244 cu = "%s%s" % (self._url, qs)
245 245 try:
246 246 if data:
247 247 self.ui.debug(_("sending %s bytes\n") %
248 248 headers.get('content-length', 'X'))
249 249 resp = urllib2.urlopen(request(cu, data, headers))
250 250 except urllib2.HTTPError, inst:
251 251 if inst.code == 401:
252 252 raise util.Abort(_('authorization failed'))
253 253 raise
254 254 except httplib.HTTPException, inst:
255 255 self.ui.debug(_('http error while sending %s command\n') % cmd)
256 256 self.ui.print_exc()
257 257 raise IOError(None, inst)
258 258 except IndexError:
259 259 # this only happens with Python 2.3, later versions raise URLError
260 260 raise util.Abort(_('http error, possibly caused by proxy setting'))
261 261 # record the url we got redirected to
262 262 resp_url = resp.geturl()
263 263 if resp_url.endswith(qs):
264 264 resp_url = resp_url[:-len(qs)]
265 265 if self._url != resp_url:
266 266 self.ui.status(_('real URL is %s\n') % resp_url)
267 267 self._url = resp_url
268 268 try:
269 269 proto = resp.getheader('content-type')
270 270 except AttributeError:
271 271 proto = resp.headers['content-type']
272 272
273 273 # accept old "text/plain" and "application/hg-changegroup" for now
274 274 if not proto.startswith('application/mercurial-') and \
275 275 not proto.startswith('text/plain') and \
276 276 not proto.startswith('application/hg-changegroup'):
277 277 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
278 278 self._url)
279 279
280 280 if proto.startswith('application/mercurial-'):
281 281 try:
282 282 version = proto.split('-', 1)[1]
283 283 version_info = tuple([int(n) for n in version.split('.')])
284 284 except ValueError:
285 285 raise hg.RepoError(_("'%s' sent a broken Content-type "
286 286 "header (%s)") % (self._url, proto))
287 287 if version_info > (0, 1):
288 288 raise hg.RepoError(_("'%s' uses newer protocol %s") %
289 289 (self._url, version))
290 290
291 291 return resp
292 292
293 293 def do_read(self, cmd, **args):
294 294 fp = self.do_cmd(cmd, **args)
295 295 try:
296 296 return fp.read()
297 297 finally:
298 298 # if using keepalive, allow connection to be reused
299 299 fp.close()
300 300
301 301 def lookup(self, key):
302 302 d = self.do_cmd("lookup", key = key).read()
303 303 success, data = d[:-1].split(' ', 1)
304 304 if int(success):
305 305 return bin(data)
306 306 raise hg.RepoError(data)
307 307
308 308 def heads(self):
309 309 d = self.do_read("heads")
310 310 try:
311 311 return map(bin, d[:-1].split(" "))
312 312 except:
313 313 raise util.UnexpectedOutput(_("unexpected response:"), d)
314 314
315 315 def branches(self, nodes):
316 316 n = " ".join(map(hex, nodes))
317 317 d = self.do_read("branches", nodes=n)
318 318 try:
319 319 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
320 320 return br
321 321 except:
322 322 raise util.UnexpectedOutput(_("unexpected response:"), d)
323 323
324 324 def between(self, pairs):
325 325 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
326 326 d = self.do_read("between", pairs=n)
327 327 try:
328 328 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
329 329 return p
330 330 except:
331 331 raise util.UnexpectedOutput(_("unexpected response:"), d)
332 332
333 333 def changegroup(self, nodes, kind):
334 334 n = " ".join(map(hex, nodes))
335 335 f = self.do_cmd("changegroup", roots=n)
336 336 return util.chunkbuffer(zgenerator(f))
337 337
338 338 def changegroupsubset(self, bases, heads, source):
339 339 baselst = " ".join([hex(n) for n in bases])
340 340 headlst = " ".join([hex(n) for n in heads])
341 341 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
342 342 return util.chunkbuffer(zgenerator(f))
343 343
344 344 def unbundle(self, cg, heads, source):
345 345 # have to stream bundle to a temp file because we do not have
346 346 # http 1.1 chunked transfer.
347 347
348 348 type = ""
349 349 types = self.capable('unbundle')
350 350 # servers older than d1b16a746db6 will send 'unbundle' as a
351 351 # boolean capability
352 352 try:
353 353 types = types.split(',')
354 354 except AttributeError:
355 355 types = [""]
356 356 if types:
357 357 for x in types:
358 358 if x in changegroup.bundletypes:
359 359 type = x
360 360 break
361 361
362 362 tempname = changegroup.writebundle(cg, None, type)
363 363 fp = httpsendfile(tempname, "rb")
364 364 try:
365 365 try:
366 366 rfp = self.do_cmd(
367 367 'unbundle', data=fp,
368 368 headers={'content-type': 'application/octet-stream'},
369 369 heads=' '.join(map(hex, heads)))
370 370 try:
371 371 ret = int(rfp.readline())
372 372 self.ui.write(rfp.read())
373 373 return ret
374 374 finally:
375 375 rfp.close()
376 376 except socket.error, err:
377 377 if err[0] in (errno.ECONNRESET, errno.EPIPE):
378 378 raise util.Abort(_('push failed: %s') % err[1])
379 379 raise util.Abort(err[1])
380 380 finally:
381 381 fp.close()
382 382 os.unlink(tempname)
383 383
384 384 def stream_out(self):
385 385 return self.do_cmd('stream_out')
386 386
387 387 class httpsrepository(httprepository):
388 388 def __init__(self, ui, path):
389 389 if not has_https:
390 390 raise util.Abort(_('Python support for SSL and HTTPS '
391 391 'is not installed'))
392 392 httprepository.__init__(self, ui, path)
393 393
394 394 def instance(ui, path, create):
395 395 if create:
396 396 raise util.Abort(_('cannot create new http repository'))
397 397 if path.startswith('hg:'):
398 398 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
399 399 path = 'http:' + path[3:]
400 400 if path.startswith('https:'):
401 401 return httpsrepository(ui, path)
402 402 return httprepository(ui, path)
@@ -1,1953 +1,1953 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 if not path:
24 24 p = os.getcwd()
25 25 while not os.path.isdir(os.path.join(p, ".hg")):
26 26 oldp = p
27 27 p = os.path.dirname(p)
28 28 if p == oldp:
29 29 raise repo.RepoError(_("There is no Mercurial repository"
30 30 " here (.hg not found)"))
31 31 path = p
32 32
33 33 self.root = os.path.realpath(path)
34 34 self.path = os.path.join(self.root, ".hg")
35 35 self.origroot = path
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 if not os.path.isdir(self.path):
40 40 if create:
41 41 if not os.path.exists(path):
42 42 os.mkdir(path)
43 43 os.mkdir(self.path)
44 44 requirements = ["revlogv1"]
45 45 if parentui.configbool('format', 'usestore', True):
46 46 os.mkdir(os.path.join(self.path, "store"))
47 47 requirements.append("store")
48 48 # create an invalid changelog
49 49 self.opener("00changelog.i", "a").write(
50 50 '\0\0\0\2' # represents revlogv2
51 51 ' dummy changelog to prevent using the old repo layout'
52 52 )
53 53 reqfile = self.opener("requires", "w")
54 54 for r in requirements:
55 55 reqfile.write("%s\n" % r)
56 56 reqfile.close()
57 57 else:
58 58 raise repo.RepoError(_("repository %s not found") % path)
59 59 elif create:
60 60 raise repo.RepoError(_("repository %s already exists") % path)
61 61 else:
62 62 # find requirements
63 63 try:
64 64 requirements = self.opener("requires").read().splitlines()
65 65 except IOError, inst:
66 66 if inst.errno != errno.ENOENT:
67 67 raise
68 68 requirements = []
69 69 # check them
70 70 for r in requirements:
71 71 if r not in self.supported:
72 72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 73
74 74 # setup store
75 75 if "store" in requirements:
76 76 self.encodefn = util.encodefilename
77 77 self.decodefn = util.decodefilename
78 78 self.spath = os.path.join(self.path, "store")
79 79 else:
80 80 self.encodefn = lambda x: x
81 81 self.decodefn = lambda x: x
82 82 self.spath = self.path
83 83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 84
85 85 self.ui = ui.ui(parentui=parentui)
86 86 try:
87 87 self.ui.readconfig(self.join("hgrc"), self.root)
88 88 except IOError:
89 89 pass
90 90
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 self.manifest = manifest.manifest(self.sopener)
94 94
95 95 fallback = self.ui.config('ui', 'fallbackencoding')
96 96 if fallback:
97 97 util._fallbackencoding = fallback
98 98
99 99 self.tagscache = None
100 100 self.branchcache = None
101 101 self.nodetagscache = None
102 102 self.filterpats = {}
103 103 self.transhandle = None
104 104
105 105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
106 106
107 107 def url(self):
108 108 return 'file:' + self.root
109 109
110 110 def hook(self, name, throw=False, **args):
111 111 def callhook(hname, funcname):
112 112 '''call python hook. hook is callable object, looked up as
113 113 name in python module. if callable returns "true", hook
114 114 fails, else passes. if hook raises exception, treated as
115 115 hook failure. exception propagates if throw is "true".
116 116
117 117 reason for "true" meaning "hook failed" is so that
118 118 unmodified commands (e.g. mercurial.commands.update) can
119 119 be run as hooks without wrappers to convert return values.'''
120 120
121 121 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
122 122 obj = funcname
123 123 if not callable(obj):
124 124 d = funcname.rfind('.')
125 125 if d == -1:
126 126 raise util.Abort(_('%s hook is invalid ("%s" not in '
127 127 'a module)') % (hname, funcname))
128 128 modname = funcname[:d]
129 129 try:
130 130 obj = __import__(modname)
131 131 except ImportError:
132 132 try:
133 133 # extensions are loaded with hgext_ prefix
134 134 obj = __import__("hgext_%s" % modname)
135 135 except ImportError:
136 136 raise util.Abort(_('%s hook is invalid '
137 137 '(import of "%s" failed)') %
138 138 (hname, modname))
139 139 try:
140 140 for p in funcname.split('.')[1:]:
141 141 obj = getattr(obj, p)
142 142 except AttributeError, err:
143 143 raise util.Abort(_('%s hook is invalid '
144 144 '("%s" is not defined)') %
145 145 (hname, funcname))
146 146 if not callable(obj):
147 147 raise util.Abort(_('%s hook is invalid '
148 148 '("%s" is not callable)') %
149 149 (hname, funcname))
150 150 try:
151 151 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
152 152 except (KeyboardInterrupt, util.SignalInterrupt):
153 153 raise
154 154 except Exception, exc:
155 155 if isinstance(exc, util.Abort):
156 156 self.ui.warn(_('error: %s hook failed: %s\n') %
157 157 (hname, exc.args[0]))
158 158 else:
159 159 self.ui.warn(_('error: %s hook raised an exception: '
160 160 '%s\n') % (hname, exc))
161 161 if throw:
162 162 raise
163 163 self.ui.print_exc()
164 164 return True
165 165 if r:
166 166 if throw:
167 167 raise util.Abort(_('%s hook failed') % hname)
168 168 self.ui.warn(_('warning: %s hook failed\n') % hname)
169 169 return r
170 170
171 171 def runhook(name, cmd):
172 172 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
173 173 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
174 174 r = util.system(cmd, environ=env, cwd=self.root)
175 175 if r:
176 176 desc, r = util.explain_exit(r)
177 177 if throw:
178 178 raise util.Abort(_('%s hook %s') % (name, desc))
179 179 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
180 180 return r
181 181
182 182 r = False
183 183 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
184 184 if hname.split(".", 1)[0] == name and cmd]
185 185 hooks.sort()
186 186 for hname, cmd in hooks:
187 187 if callable(cmd):
188 188 r = callhook(hname, cmd) or r
189 189 elif cmd.startswith('python:'):
190 190 r = callhook(hname, cmd[7:].strip()) or r
191 191 else:
192 192 r = runhook(hname, cmd) or r
193 193 return r
194 194
195 195 tag_disallowed = ':\r\n'
196 196
197 197 def _tag(self, name, node, message, local, user, date, parent=None):
198 198 use_dirstate = parent is None
199 199
200 200 for c in self.tag_disallowed:
201 201 if c in name:
202 202 raise util.Abort(_('%r cannot be used in a tag name') % c)
203 203
204 204 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
205 205
206 206 if local:
207 207 # local tags are stored in the current charset
208 208 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
209 209 self.hook('tag', node=hex(node), tag=name, local=local)
210 210 return
211 211
212 212 # committed tags are stored in UTF-8
213 213 line = '%s %s\n' % (hex(node), util.fromlocal(name))
214 214 if use_dirstate:
215 215 self.wfile('.hgtags', 'ab').write(line)
216 216 else:
217 217 ntags = self.filectx('.hgtags', parent).data()
218 218 self.wfile('.hgtags', 'ab').write(ntags + line)
219 219 if use_dirstate and self.dirstate.state('.hgtags') == '?':
220 220 self.add(['.hgtags'])
221 221
222 222 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
223 223
224 224 self.hook('tag', node=hex(node), tag=name, local=local)
225 225
226 226 return tagnode
227 227
228 228 def tag(self, name, node, message, local, user, date):
229 229 '''tag a revision with a symbolic name.
230 230
231 231 if local is True, the tag is stored in a per-repository file.
232 232 otherwise, it is stored in the .hgtags file, and a new
233 233 changeset is committed with the change.
234 234
235 235 keyword arguments:
236 236
237 237 local: whether to store tag in non-version-controlled file
238 238 (default False)
239 239
240 240 message: commit message to use if committing
241 241
242 242 user: name of user to use if committing
243 243
244 244 date: date tuple to use if committing'''
245 245
246 246 for x in self.status()[:5]:
247 247 if '.hgtags' in x:
248 248 raise util.Abort(_('working copy of .hgtags is changed '
249 249 '(please commit .hgtags manually)'))
250 250
251 251
252 252 self._tag(name, node, message, local, user, date)
253 253
254 254 def tags(self):
255 255 '''return a mapping of tag to node'''
256 256 if self.tagscache:
257 257 return self.tagscache
258 258
259 259 globaltags = {}
260 260
261 261 def readtags(lines, fn):
262 262 filetags = {}
263 263 count = 0
264 264
265 265 def warn(msg):
266 266 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
267 267
268 268 for l in lines:
269 269 count += 1
270 270 if not l:
271 271 continue
272 272 s = l.split(" ", 1)
273 273 if len(s) != 2:
274 274 warn(_("cannot parse entry"))
275 275 continue
276 276 node, key = s
277 277 key = util.tolocal(key.strip()) # stored in UTF-8
278 278 try:
279 279 bin_n = bin(node)
280 280 except TypeError:
281 281 warn(_("node '%s' is not well formed") % node)
282 282 continue
283 283 if bin_n not in self.changelog.nodemap:
284 284 warn(_("tag '%s' refers to unknown node") % key)
285 285 continue
286 286
287 287 h = []
288 288 if key in filetags:
289 289 n, h = filetags[key]
290 290 h.append(n)
291 291 filetags[key] = (bin_n, h)
292 292
293 293 for k,nh in filetags.items():
294 294 if k not in globaltags:
295 295 globaltags[k] = nh
296 296 continue
297 297 # we prefer the global tag if:
298 298 # it supercedes us OR
299 299 # mutual supercedes and it has a higher rank
300 300 # otherwise we win because we're tip-most
301 301 an, ah = nh
302 302 bn, bh = globaltags[k]
303 303 if bn != an and an in bh and \
304 304 (bn not in ah or len(bh) > len(ah)):
305 305 an = bn
306 306 ah.extend([n for n in bh if n not in ah])
307 307 globaltags[k] = an, ah
308 308
309 309 # read the tags file from each head, ending with the tip
310 310 f = None
311 311 for rev, node, fnode in self._hgtagsnodes():
312 312 f = (f and f.filectx(fnode) or
313 313 self.filectx('.hgtags', fileid=fnode))
314 314 readtags(f.data().splitlines(), f)
315 315
316 316 try:
317 317 data = util.fromlocal(self.opener("localtags").read())
318 318 # localtags are stored in the local character set
319 319 # while the internal tag table is stored in UTF-8
320 320 readtags(data.splitlines(), "localtags")
321 321 except IOError:
322 322 pass
323 323
324 324 self.tagscache = {}
325 325 for k,nh in globaltags.items():
326 326 n = nh[0]
327 327 if n != nullid:
328 328 self.tagscache[k] = n
329 329 self.tagscache['tip'] = self.changelog.tip()
330 330
331 331 return self.tagscache
332 332
333 333 def _hgtagsnodes(self):
334 334 heads = self.heads()
335 335 heads.reverse()
336 336 last = {}
337 337 ret = []
338 338 for node in heads:
339 339 c = self.changectx(node)
340 340 rev = c.rev()
341 341 try:
342 342 fnode = c.filenode('.hgtags')
343 343 except revlog.LookupError:
344 344 continue
345 345 ret.append((rev, node, fnode))
346 346 if fnode in last:
347 347 ret[last[fnode]] = None
348 348 last[fnode] = len(ret) - 1
349 349 return [item for item in ret if item]
350 350
351 351 def tagslist(self):
352 352 '''return a list of tags ordered by revision'''
353 353 l = []
354 354 for t, n in self.tags().items():
355 355 try:
356 356 r = self.changelog.rev(n)
357 357 except:
358 358 r = -2 # sort to the beginning of the list if unknown
359 359 l.append((r, t, n))
360 360 l.sort()
361 361 return [(t, n) for r, t, n in l]
362 362
363 363 def nodetags(self, node):
364 364 '''return the tags associated with a node'''
365 365 if not self.nodetagscache:
366 366 self.nodetagscache = {}
367 367 for t, n in self.tags().items():
368 368 self.nodetagscache.setdefault(n, []).append(t)
369 369 return self.nodetagscache.get(node, [])
370 370
371 371 def _branchtags(self):
372 372 partial, last, lrev = self._readbranchcache()
373 373
374 374 tiprev = self.changelog.count() - 1
375 375 if lrev != tiprev:
376 376 self._updatebranchcache(partial, lrev+1, tiprev+1)
377 377 self._writebranchcache(partial, self.changelog.tip(), tiprev)
378 378
379 379 return partial
380 380
381 381 def branchtags(self):
382 382 if self.branchcache is not None:
383 383 return self.branchcache
384 384
385 385 self.branchcache = {} # avoid recursion in changectx
386 386 partial = self._branchtags()
387 387
388 388 # the branch cache is stored on disk as UTF-8, but in the local
389 389 # charset internally
390 390 for k, v in partial.items():
391 391 self.branchcache[util.tolocal(k)] = v
392 392 return self.branchcache
393 393
394 394 def _readbranchcache(self):
395 395 partial = {}
396 396 try:
397 397 f = self.opener("branch.cache")
398 398 lines = f.read().split('\n')
399 399 f.close()
400 400 except (IOError, OSError):
401 401 return {}, nullid, nullrev
402 402
403 403 try:
404 404 last, lrev = lines.pop(0).split(" ", 1)
405 405 last, lrev = bin(last), int(lrev)
406 406 if not (lrev < self.changelog.count() and
407 407 self.changelog.node(lrev) == last): # sanity check
408 408 # invalidate the cache
409 409 raise ValueError('Invalid branch cache: unknown tip')
410 410 for l in lines:
411 411 if not l: continue
412 412 node, label = l.split(" ", 1)
413 413 partial[label.strip()] = bin(node)
414 414 except (KeyboardInterrupt, util.SignalInterrupt):
415 415 raise
416 416 except Exception, inst:
417 417 if self.ui.debugflag:
418 418 self.ui.warn(str(inst), '\n')
419 419 partial, last, lrev = {}, nullid, nullrev
420 420 return partial, last, lrev
421 421
422 422 def _writebranchcache(self, branches, tip, tiprev):
423 423 try:
424 424 f = self.opener("branch.cache", "w", atomictemp=True)
425 425 f.write("%s %s\n" % (hex(tip), tiprev))
426 426 for label, node in branches.iteritems():
427 427 f.write("%s %s\n" % (hex(node), label))
428 428 f.rename()
429 429 except (IOError, OSError):
430 430 pass
431 431
432 432 def _updatebranchcache(self, partial, start, end):
433 433 for r in xrange(start, end):
434 434 c = self.changectx(r)
435 435 b = c.branch()
436 436 partial[b] = c.node()
437 437
438 438 def lookup(self, key):
439 439 if key == '.':
440 440 key, second = self.dirstate.parents()
441 441 if key == nullid:
442 442 raise repo.RepoError(_("no revision checked out"))
443 443 if second != nullid:
444 444 self.ui.warn(_("warning: working directory has two parents, "
445 445 "tag '.' uses the first\n"))
446 446 elif key == 'null':
447 447 return nullid
448 448 n = self.changelog._match(key)
449 449 if n:
450 450 return n
451 451 if key in self.tags():
452 452 return self.tags()[key]
453 453 if key in self.branchtags():
454 454 return self.branchtags()[key]
455 455 n = self.changelog._partialmatch(key)
456 456 if n:
457 457 return n
458 458 raise repo.RepoError(_("unknown revision '%s'") % key)
459 459
460 460 def dev(self):
461 461 return os.lstat(self.path).st_dev
462 462
463 463 def local(self):
464 464 return True
465 465
466 466 def join(self, f):
467 467 return os.path.join(self.path, f)
468 468
469 469 def sjoin(self, f):
470 470 f = self.encodefn(f)
471 471 return os.path.join(self.spath, f)
472 472
473 473 def wjoin(self, f):
474 474 return os.path.join(self.root, f)
475 475
476 476 def file(self, f):
477 477 if f[0] == '/':
478 478 f = f[1:]
479 479 return filelog.filelog(self.sopener, f)
480 480
481 481 def changectx(self, changeid=None):
482 482 return context.changectx(self, changeid)
483 483
484 484 def workingctx(self):
485 485 return context.workingctx(self)
486 486
487 487 def parents(self, changeid=None):
488 488 '''
489 489 get list of changectxs for parents of changeid or working directory
490 490 '''
491 491 if changeid is None:
492 492 pl = self.dirstate.parents()
493 493 else:
494 494 n = self.changelog.lookup(changeid)
495 495 pl = self.changelog.parents(n)
496 496 if pl[1] == nullid:
497 497 return [self.changectx(pl[0])]
498 498 return [self.changectx(pl[0]), self.changectx(pl[1])]
499 499
500 500 def filectx(self, path, changeid=None, fileid=None):
501 501 """changeid can be a changeset revision, node, or tag.
502 502 fileid can be a file revision or node."""
503 503 return context.filectx(self, path, changeid, fileid)
504 504
505 505 def getcwd(self):
506 506 return self.dirstate.getcwd()
507 507
508 508 def wfile(self, f, mode='r'):
509 509 return self.wopener(f, mode)
510 510
511 511 def _link(self, f):
512 512 return os.path.islink(self.wjoin(f))
513 513
514 514 def _filter(self, filter, filename, data):
515 515 if filter not in self.filterpats:
516 516 l = []
517 517 for pat, cmd in self.ui.configitems(filter):
518 518 mf = util.matcher(self.root, "", [pat], [], [])[1]
519 519 l.append((mf, cmd))
520 520 self.filterpats[filter] = l
521 521
522 522 for mf, cmd in self.filterpats[filter]:
523 523 if mf(filename):
524 524 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
525 525 data = util.filter(data, cmd)
526 526 break
527 527
528 528 return data
529 529
530 530 def wread(self, filename):
531 531 if self._link(filename):
532 532 data = os.readlink(self.wjoin(filename))
533 533 else:
534 534 data = self.wopener(filename, 'r').read()
535 535 return self._filter("encode", filename, data)
536 536
537 537 def wwrite(self, filename, data, flags):
538 538 data = self._filter("decode", filename, data)
539 539 if "l" in flags:
540 540 f = self.wjoin(filename)
541 541 try:
542 542 os.unlink(f)
543 543 except OSError:
544 544 pass
545 545 d = os.path.dirname(f)
546 546 if not os.path.exists(d):
547 547 os.makedirs(d)
548 548 os.symlink(data, f)
549 549 else:
550 550 try:
551 551 if self._link(filename):
552 552 os.unlink(self.wjoin(filename))
553 553 except OSError:
554 554 pass
555 555 self.wopener(filename, 'w').write(data)
556 556 util.set_exec(self.wjoin(filename), "x" in flags)
557 557
558 558 def wwritedata(self, filename, data):
559 559 return self._filter("decode", filename, data)
560 560
561 561 def transaction(self):
562 562 tr = self.transhandle
563 563 if tr != None and tr.running():
564 564 return tr.nest()
565 565
566 566 # save dirstate for rollback
567 567 try:
568 568 ds = self.opener("dirstate").read()
569 569 except IOError:
570 570 ds = ""
571 571 self.opener("journal.dirstate", "w").write(ds)
572 572
573 573 renames = [(self.sjoin("journal"), self.sjoin("undo")),
574 574 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
575 575 tr = transaction.transaction(self.ui.warn, self.sopener,
576 576 self.sjoin("journal"),
577 577 aftertrans(renames))
578 578 self.transhandle = tr
579 579 return tr
580 580
581 581 def recover(self):
582 582 l = self.lock()
583 583 if os.path.exists(self.sjoin("journal")):
584 584 self.ui.status(_("rolling back interrupted transaction\n"))
585 585 transaction.rollback(self.sopener, self.sjoin("journal"))
586 586 self.reload()
587 587 return True
588 588 else:
589 589 self.ui.warn(_("no interrupted transaction available\n"))
590 590 return False
591 591
592 592 def rollback(self, wlock=None, lock=None):
593 593 if not wlock:
594 594 wlock = self.wlock()
595 595 if not lock:
596 596 lock = self.lock()
597 597 if os.path.exists(self.sjoin("undo")):
598 598 self.ui.status(_("rolling back last transaction\n"))
599 599 transaction.rollback(self.sopener, self.sjoin("undo"))
600 600 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
601 601 self.reload()
602 602 self.wreload()
603 603 else:
604 604 self.ui.warn(_("no rollback information available\n"))
605 605
606 606 def wreload(self):
607 607 self.dirstate.reload()
608 608
609 609 def reload(self):
610 610 self.changelog.load()
611 611 self.manifest.load()
612 612 self.tagscache = None
613 613 self.nodetagscache = None
614 614
615 615 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
616 616 desc=None):
617 617 try:
618 618 l = lock.lock(lockname, 0, releasefn, desc=desc)
619 619 except lock.LockHeld, inst:
620 620 if not wait:
621 621 raise
622 622 self.ui.warn(_("waiting for lock on %s held by %r\n") %
623 623 (desc, inst.locker))
624 624 # default to 600 seconds timeout
625 625 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
626 626 releasefn, desc=desc)
627 627 if acquirefn:
628 628 acquirefn()
629 629 return l
630 630
631 631 def lock(self, wait=1):
632 632 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
633 633 desc=_('repository %s') % self.origroot)
634 634
635 635 def wlock(self, wait=1):
636 636 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
637 637 self.wreload,
638 638 desc=_('working directory of %s') % self.origroot)
639 639
640 640 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
641 641 """
642 642 commit an individual file as part of a larger transaction
643 643 """
644 644
645 645 t = self.wread(fn)
646 646 fl = self.file(fn)
647 647 fp1 = manifest1.get(fn, nullid)
648 648 fp2 = manifest2.get(fn, nullid)
649 649
650 650 meta = {}
651 651 cp = self.dirstate.copied(fn)
652 652 if cp:
653 653 # Mark the new revision of this file as a copy of another
654 # file. This copy data will effectively act as a parent
655 # of this new revision. If this is a merge, the first
654 # file. This copy data will effectively act as a parent
655 # of this new revision. If this is a merge, the first
656 656 # parent will be the nullid (meaning "look up the copy data")
657 657 # and the second one will be the other parent. For example:
658 658 #
659 659 # 0 --- 1 --- 3 rev1 changes file foo
660 660 # \ / rev2 renames foo to bar and changes it
661 661 # \- 2 -/ rev3 should have bar with all changes and
662 662 # should record that bar descends from
663 663 # bar in rev2 and foo in rev1
664 664 #
665 665 # this allows this merge to succeed:
666 666 #
667 667 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
668 668 # \ / merging rev3 and rev4 should use bar@rev2
669 669 # \- 2 --- 4 as the merge base
670 670 #
671 671 meta["copy"] = cp
672 672 if not manifest2: # not a branch merge
673 673 meta["copyrev"] = hex(manifest1.get(cp, nullid))
674 674 fp2 = nullid
675 675 elif fp2 != nullid: # copied on remote side
676 676 meta["copyrev"] = hex(manifest1.get(cp, nullid))
677 677 elif fp1 != nullid: # copied on local side, reversed
678 678 meta["copyrev"] = hex(manifest2.get(cp))
679 679 fp2 = fp1
680 680 else: # directory rename
681 681 meta["copyrev"] = hex(manifest1.get(cp, nullid))
682 682 self.ui.debug(_(" %s: copy %s:%s\n") %
683 683 (fn, cp, meta["copyrev"]))
684 684 fp1 = nullid
685 685 elif fp2 != nullid:
686 686 # is one parent an ancestor of the other?
687 687 fpa = fl.ancestor(fp1, fp2)
688 688 if fpa == fp1:
689 689 fp1, fp2 = fp2, nullid
690 690 elif fpa == fp2:
691 691 fp2 = nullid
692 692
693 693 # is the file unmodified from the parent? report existing entry
694 694 if fp2 == nullid and not fl.cmp(fp1, t):
695 695 return fp1
696 696
697 697 changelist.append(fn)
698 698 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
699 699
700 700 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
701 701 if p1 is None:
702 702 p1, p2 = self.dirstate.parents()
703 703 return self.commit(files=files, text=text, user=user, date=date,
704 704 p1=p1, p2=p2, wlock=wlock, extra=extra)
705 705
706 706 def commit(self, files=None, text="", user=None, date=None,
707 707 match=util.always, force=False, lock=None, wlock=None,
708 708 force_editor=False, p1=None, p2=None, extra={}):
709 709
710 710 commit = []
711 711 remove = []
712 712 changed = []
713 713 use_dirstate = (p1 is None) # not rawcommit
714 714 extra = extra.copy()
715 715
716 716 if use_dirstate:
717 717 if files:
718 718 for f in files:
719 719 s = self.dirstate.state(f)
720 720 if s in 'nmai':
721 721 commit.append(f)
722 722 elif s == 'r':
723 723 remove.append(f)
724 724 else:
725 725 self.ui.warn(_("%s not tracked!\n") % f)
726 726 else:
727 727 changes = self.status(match=match)[:5]
728 728 modified, added, removed, deleted, unknown = changes
729 729 commit = modified + added
730 730 remove = removed
731 731 else:
732 732 commit = files
733 733
734 734 if use_dirstate:
735 735 p1, p2 = self.dirstate.parents()
736 736 update_dirstate = True
737 737 else:
738 738 p1, p2 = p1, p2 or nullid
739 739 update_dirstate = (self.dirstate.parents()[0] == p1)
740 740
741 741 c1 = self.changelog.read(p1)
742 742 c2 = self.changelog.read(p2)
743 743 m1 = self.manifest.read(c1[0]).copy()
744 744 m2 = self.manifest.read(c2[0])
745 745
746 746 if use_dirstate:
747 747 branchname = self.workingctx().branch()
748 748 try:
749 749 branchname = branchname.decode('UTF-8').encode('UTF-8')
750 750 except UnicodeDecodeError:
751 751 raise util.Abort(_('branch name not in UTF-8!'))
752 752 else:
753 753 branchname = ""
754 754
755 755 if use_dirstate:
756 756 oldname = c1[5].get("branch") # stored in UTF-8
757 757 if not commit and not remove and not force and p2 == nullid and \
758 758 branchname == oldname:
759 759 self.ui.status(_("nothing changed\n"))
760 760 return None
761 761
762 762 xp1 = hex(p1)
763 763 if p2 == nullid: xp2 = ''
764 764 else: xp2 = hex(p2)
765 765
766 766 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
767 767
768 768 if not wlock:
769 769 wlock = self.wlock()
770 770 if not lock:
771 771 lock = self.lock()
772 772 tr = self.transaction()
773 773
774 774 # check in files
775 775 new = {}
776 776 linkrev = self.changelog.count()
777 777 commit.sort()
778 778 is_exec = util.execfunc(self.root, m1.execf)
779 779 is_link = util.linkfunc(self.root, m1.linkf)
780 780 for f in commit:
781 781 self.ui.note(f + "\n")
782 782 try:
783 783 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
784 784 m1.set(f, is_exec(f), is_link(f))
785 785 except (OSError, IOError):
786 786 if use_dirstate:
787 787 self.ui.warn(_("trouble committing %s!\n") % f)
788 788 raise
789 789 else:
790 790 remove.append(f)
791 791
792 792 # update manifest
793 793 m1.update(new)
794 794 remove.sort()
795 795 removed = []
796 796
797 797 for f in remove:
798 798 if f in m1:
799 799 del m1[f]
800 800 removed.append(f)
801 801 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
802 802
803 803 # add changeset
804 804 new = new.keys()
805 805 new.sort()
806 806
807 807 user = user or self.ui.username()
808 808 if not text or force_editor:
809 809 edittext = []
810 810 if text:
811 811 edittext.append(text)
812 812 edittext.append("")
813 813 edittext.append("HG: user: %s" % user)
814 814 if p2 != nullid:
815 815 edittext.append("HG: branch merge")
816 816 if branchname:
817 817 edittext.append("HG: branch %s" % util.tolocal(branchname))
818 818 edittext.extend(["HG: changed %s" % f for f in changed])
819 819 edittext.extend(["HG: removed %s" % f for f in removed])
820 820 if not changed and not remove:
821 821 edittext.append("HG: no files changed")
822 822 edittext.append("")
823 823 # run editor in the repository root
824 824 olddir = os.getcwd()
825 825 os.chdir(self.root)
826 826 text = self.ui.edit("\n".join(edittext), user)
827 827 os.chdir(olddir)
828 828
829 829 lines = [line.rstrip() for line in text.rstrip().splitlines()]
830 830 while lines and not lines[0]:
831 831 del lines[0]
832 832 if not lines:
833 833 return None
834 834 text = '\n'.join(lines)
835 835 if branchname:
836 836 extra["branch"] = branchname
837 837 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
838 838 user, date, extra)
839 839 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
840 840 parent2=xp2)
841 841 tr.close()
842 842
843 843 if self.branchcache and "branch" in extra:
844 844 self.branchcache[util.tolocal(extra["branch"])] = n
845 845
846 846 if use_dirstate or update_dirstate:
847 847 self.dirstate.setparents(n)
848 848 if use_dirstate:
849 849 self.dirstate.update(new, "n")
850 850 self.dirstate.forget(removed)
851 851
852 852 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
853 853 return n
854 854
855 855 def walk(self, node=None, files=[], match=util.always, badmatch=None):
856 856 '''
857 857 walk recursively through the directory tree or a given
858 858 changeset, finding all files matched by the match
859 859 function
860 860
861 861 results are yielded in a tuple (src, filename), where src
862 862 is one of:
863 863 'f' the file was found in the directory tree
864 864 'm' the file was only in the dirstate and not in the tree
865 865 'b' file was not found and matched badmatch
866 866 '''
867 867
868 868 if node:
869 869 fdict = dict.fromkeys(files)
870 870 # for dirstate.walk, files=['.'] means "walk the whole tree".
871 871 # follow that here, too
872 872 fdict.pop('.', None)
873 873 mdict = self.manifest.read(self.changelog.read(node)[0])
874 874 mfiles = mdict.keys()
875 875 mfiles.sort()
876 876 for fn in mfiles:
877 877 for ffn in fdict:
878 878 # match if the file is the exact name or a directory
879 879 if ffn == fn or fn.startswith("%s/" % ffn):
880 880 del fdict[ffn]
881 881 break
882 882 if match(fn):
883 883 yield 'm', fn
884 884 ffiles = fdict.keys()
885 885 ffiles.sort()
886 886 for fn in ffiles:
887 887 if badmatch and badmatch(fn):
888 888 if match(fn):
889 889 yield 'b', fn
890 890 else:
891 891 self.ui.warn(_('%s: No such file in rev %s\n') % (
892 892 util.pathto(self.root, self.getcwd(), fn), short(node)))
893 893 else:
894 894 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
895 895 yield src, fn
896 896
897 897 def status(self, node1=None, node2=None, files=[], match=util.always,
898 898 wlock=None, list_ignored=False, list_clean=False):
899 899 """return status of files between two nodes or node and working directory
900 900
901 901 If node1 is None, use the first dirstate parent instead.
902 902 If node2 is None, compare node1 with working directory.
903 903 """
904 904
905 905 def fcmp(fn, getnode):
906 906 t1 = self.wread(fn)
907 907 return self.file(fn).cmp(getnode(fn), t1)
908 908
909 909 def mfmatches(node):
910 910 change = self.changelog.read(node)
911 911 mf = self.manifest.read(change[0]).copy()
912 912 for fn in mf.keys():
913 913 if not match(fn):
914 914 del mf[fn]
915 915 return mf
916 916
917 917 modified, added, removed, deleted, unknown = [], [], [], [], []
918 918 ignored, clean = [], []
919 919
920 920 compareworking = False
921 921 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
922 922 compareworking = True
923 923
924 924 if not compareworking:
925 925 # read the manifest from node1 before the manifest from node2,
926 926 # so that we'll hit the manifest cache if we're going through
927 927 # all the revisions in parent->child order.
928 928 mf1 = mfmatches(node1)
929 929
930 930 mywlock = False
931 931
932 932 # are we comparing the working directory?
933 933 if not node2:
934 934 (lookup, modified, added, removed, deleted, unknown,
935 935 ignored, clean) = self.dirstate.status(files, match,
936 936 list_ignored, list_clean)
937 937
938 938 # are we comparing working dir against its parent?
939 939 if compareworking:
940 940 if lookup:
941 941 # do a full compare of any files that might have changed
942 942 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
943 943 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
944 944 nullid)
945 945 for f in lookup:
946 946 if fcmp(f, getnode):
947 947 modified.append(f)
948 948 else:
949 949 clean.append(f)
950 950 if not wlock and not mywlock:
951 951 mywlock = True
952 952 try:
953 953 wlock = self.wlock(wait=0)
954 954 except lock.LockException:
955 955 pass
956 956 if wlock:
957 957 self.dirstate.update([f], "n")
958 958 else:
959 959 # we are comparing working dir against non-parent
960 960 # generate a pseudo-manifest for the working dir
961 961 # XXX: create it in dirstate.py ?
962 962 mf2 = mfmatches(self.dirstate.parents()[0])
963 963 is_exec = util.execfunc(self.root, mf2.execf)
964 964 is_link = util.linkfunc(self.root, mf2.linkf)
965 965 for f in lookup + modified + added:
966 966 mf2[f] = ""
967 967 mf2.set(f, is_exec(f), is_link(f))
968 968 for f in removed:
969 969 if f in mf2:
970 970 del mf2[f]
971 971
972 972 if mywlock and wlock:
973 973 wlock.release()
974 974 else:
975 975 # we are comparing two revisions
976 976 mf2 = mfmatches(node2)
977 977
978 978 if not compareworking:
979 979 # flush lists from dirstate before comparing manifests
980 980 modified, added, clean = [], [], []
981 981
982 982 # make sure to sort the files so we talk to the disk in a
983 983 # reasonable order
984 984 mf2keys = mf2.keys()
985 985 mf2keys.sort()
986 986 getnode = lambda fn: mf1.get(fn, nullid)
987 987 for fn in mf2keys:
988 988 if mf1.has_key(fn):
989 989 if mf1.flags(fn) != mf2.flags(fn) or \
990 990 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
991 991 fcmp(fn, getnode))):
992 992 modified.append(fn)
993 993 elif list_clean:
994 994 clean.append(fn)
995 995 del mf1[fn]
996 996 else:
997 997 added.append(fn)
998 998
999 999 removed = mf1.keys()
1000 1000
1001 1001 # sort and return results:
1002 1002 for l in modified, added, removed, deleted, unknown, ignored, clean:
1003 1003 l.sort()
1004 1004 return (modified, added, removed, deleted, unknown, ignored, clean)
1005 1005
1006 1006 def add(self, list, wlock=None):
1007 1007 if not wlock:
1008 1008 wlock = self.wlock()
1009 1009 for f in list:
1010 1010 p = self.wjoin(f)
1011 1011 islink = os.path.islink(p)
1012 1012 size = os.lstat(p).st_size
1013 1013 if size > 10000000:
1014 1014 self.ui.warn(_("%s: files over 10MB may cause memory and"
1015 1015 " performance problems\n"
1016 1016 "(use 'hg revert %s' to unadd the file)\n")
1017 1017 % (f, f))
1018 1018 if not islink and not os.path.exists(p):
1019 1019 self.ui.warn(_("%s does not exist!\n") % f)
1020 1020 elif not islink and not os.path.isfile(p):
1021 1021 self.ui.warn(_("%s not added: only files and symlinks "
1022 1022 "supported currently\n") % f)
1023 1023 elif self.dirstate.state(f) in 'an':
1024 1024 self.ui.warn(_("%s already tracked!\n") % f)
1025 1025 else:
1026 1026 self.dirstate.update([f], "a")
1027 1027
1028 1028 def forget(self, list, wlock=None):
1029 1029 if not wlock:
1030 1030 wlock = self.wlock()
1031 1031 for f in list:
1032 1032 if self.dirstate.state(f) not in 'ai':
1033 1033 self.ui.warn(_("%s not added!\n") % f)
1034 1034 else:
1035 1035 self.dirstate.forget([f])
1036 1036
1037 1037 def remove(self, list, unlink=False, wlock=None):
1038 1038 if unlink:
1039 1039 for f in list:
1040 1040 try:
1041 1041 util.unlink(self.wjoin(f))
1042 1042 except OSError, inst:
1043 1043 if inst.errno != errno.ENOENT:
1044 1044 raise
1045 1045 if not wlock:
1046 1046 wlock = self.wlock()
1047 1047 for f in list:
1048 1048 if unlink and os.path.exists(self.wjoin(f)):
1049 1049 self.ui.warn(_("%s still exists!\n") % f)
1050 1050 elif self.dirstate.state(f) == 'a':
1051 1051 self.dirstate.forget([f])
1052 1052 elif f not in self.dirstate:
1053 1053 self.ui.warn(_("%s not tracked!\n") % f)
1054 1054 else:
1055 1055 self.dirstate.update([f], "r")
1056 1056
1057 1057 def undelete(self, list, wlock=None):
1058 1058 p = self.dirstate.parents()[0]
1059 1059 mn = self.changelog.read(p)[0]
1060 1060 m = self.manifest.read(mn)
1061 1061 if not wlock:
1062 1062 wlock = self.wlock()
1063 1063 for f in list:
1064 1064 if self.dirstate.state(f) not in "r":
1065 1065 self.ui.warn("%s not removed!\n" % f)
1066 1066 else:
1067 1067 t = self.file(f).read(m[f])
1068 1068 self.wwrite(f, t, m.flags(f))
1069 1069 self.dirstate.update([f], "n")
1070 1070
1071 1071 def copy(self, source, dest, wlock=None):
1072 1072 p = self.wjoin(dest)
1073 1073 if not (os.path.exists(p) or os.path.islink(p)):
1074 1074 self.ui.warn(_("%s does not exist!\n") % dest)
1075 1075 elif not (os.path.isfile(p) or os.path.islink(p)):
1076 1076 self.ui.warn(_("copy failed: %s is not a file or a "
1077 1077 "symbolic link\n") % dest)
1078 1078 else:
1079 1079 if not wlock:
1080 1080 wlock = self.wlock()
1081 1081 if self.dirstate.state(dest) == '?':
1082 1082 self.dirstate.update([dest], "a")
1083 1083 self.dirstate.copy(source, dest)
1084 1084
1085 1085 def heads(self, start=None):
1086 1086 heads = self.changelog.heads(start)
1087 1087 # sort the output in rev descending order
1088 1088 heads = [(-self.changelog.rev(h), h) for h in heads]
1089 1089 heads.sort()
1090 1090 return [n for (r, n) in heads]
1091 1091
1092 1092 def branches(self, nodes):
1093 1093 if not nodes:
1094 1094 nodes = [self.changelog.tip()]
1095 1095 b = []
1096 1096 for n in nodes:
1097 1097 t = n
1098 1098 while 1:
1099 1099 p = self.changelog.parents(n)
1100 1100 if p[1] != nullid or p[0] == nullid:
1101 1101 b.append((t, n, p[0], p[1]))
1102 1102 break
1103 1103 n = p[0]
1104 1104 return b
1105 1105
1106 1106 def between(self, pairs):
1107 1107 r = []
1108 1108
1109 1109 for top, bottom in pairs:
1110 1110 n, l, i = top, [], 0
1111 1111 f = 1
1112 1112
1113 1113 while n != bottom:
1114 1114 p = self.changelog.parents(n)[0]
1115 1115 if i == f:
1116 1116 l.append(n)
1117 1117 f = f * 2
1118 1118 n = p
1119 1119 i += 1
1120 1120
1121 1121 r.append(l)
1122 1122
1123 1123 return r
1124 1124
1125 1125 def findincoming(self, remote, base=None, heads=None, force=False):
1126 1126 """Return list of roots of the subsets of missing nodes from remote
1127 1127
1128 1128 If base dict is specified, assume that these nodes and their parents
1129 1129 exist on the remote side and that no child of a node of base exists
1130 1130 in both remote and self.
1131 1131 Furthermore base will be updated to include the nodes that exists
1132 1132 in self and remote but no children exists in self and remote.
1133 1133 If a list of heads is specified, return only nodes which are heads
1134 1134 or ancestors of these heads.
1135 1135
1136 1136 All the ancestors of base are in self and in remote.
1137 1137 All the descendants of the list returned are missing in self.
1138 1138 (and so we know that the rest of the nodes are missing in remote, see
1139 1139 outgoing)
1140 1140 """
1141 1141 m = self.changelog.nodemap
1142 1142 search = []
1143 1143 fetch = {}
1144 1144 seen = {}
1145 1145 seenbranch = {}
1146 1146 if base == None:
1147 1147 base = {}
1148 1148
1149 1149 if not heads:
1150 1150 heads = remote.heads()
1151 1151
1152 1152 if self.changelog.tip() == nullid:
1153 1153 base[nullid] = 1
1154 1154 if heads != [nullid]:
1155 1155 return [nullid]
1156 1156 return []
1157 1157
1158 1158 # assume we're closer to the tip than the root
1159 1159 # and start by examining the heads
1160 1160 self.ui.status(_("searching for changes\n"))
1161 1161
1162 1162 unknown = []
1163 1163 for h in heads:
1164 1164 if h not in m:
1165 1165 unknown.append(h)
1166 1166 else:
1167 1167 base[h] = 1
1168 1168
1169 1169 if not unknown:
1170 1170 return []
1171 1171
1172 1172 req = dict.fromkeys(unknown)
1173 1173 reqcnt = 0
1174 1174
1175 1175 # search through remote branches
1176 1176 # a 'branch' here is a linear segment of history, with four parts:
1177 1177 # head, root, first parent, second parent
1178 1178 # (a branch always has two parents (or none) by definition)
1179 1179 unknown = remote.branches(unknown)
1180 1180 while unknown:
1181 1181 r = []
1182 1182 while unknown:
1183 1183 n = unknown.pop(0)
1184 1184 if n[0] in seen:
1185 1185 continue
1186 1186
1187 1187 self.ui.debug(_("examining %s:%s\n")
1188 1188 % (short(n[0]), short(n[1])))
1189 1189 if n[0] == nullid: # found the end of the branch
1190 1190 pass
1191 1191 elif n in seenbranch:
1192 1192 self.ui.debug(_("branch already found\n"))
1193 1193 continue
1194 1194 elif n[1] and n[1] in m: # do we know the base?
1195 1195 self.ui.debug(_("found incomplete branch %s:%s\n")
1196 1196 % (short(n[0]), short(n[1])))
1197 1197 search.append(n) # schedule branch range for scanning
1198 1198 seenbranch[n] = 1
1199 1199 else:
1200 1200 if n[1] not in seen and n[1] not in fetch:
1201 1201 if n[2] in m and n[3] in m:
1202 1202 self.ui.debug(_("found new changeset %s\n") %
1203 1203 short(n[1]))
1204 1204 fetch[n[1]] = 1 # earliest unknown
1205 1205 for p in n[2:4]:
1206 1206 if p in m:
1207 1207 base[p] = 1 # latest known
1208 1208
1209 1209 for p in n[2:4]:
1210 1210 if p not in req and p not in m:
1211 1211 r.append(p)
1212 1212 req[p] = 1
1213 1213 seen[n[0]] = 1
1214 1214
1215 1215 if r:
1216 1216 reqcnt += 1
1217 1217 self.ui.debug(_("request %d: %s\n") %
1218 1218 (reqcnt, " ".join(map(short, r))))
1219 1219 for p in xrange(0, len(r), 10):
1220 1220 for b in remote.branches(r[p:p+10]):
1221 1221 self.ui.debug(_("received %s:%s\n") %
1222 1222 (short(b[0]), short(b[1])))
1223 1223 unknown.append(b)
1224 1224
1225 1225 # do binary search on the branches we found
1226 1226 while search:
1227 1227 n = search.pop(0)
1228 1228 reqcnt += 1
1229 1229 l = remote.between([(n[0], n[1])])[0]
1230 1230 l.append(n[1])
1231 1231 p = n[0]
1232 1232 f = 1
1233 1233 for i in l:
1234 1234 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1235 1235 if i in m:
1236 1236 if f <= 2:
1237 1237 self.ui.debug(_("found new branch changeset %s\n") %
1238 1238 short(p))
1239 1239 fetch[p] = 1
1240 1240 base[i] = 1
1241 1241 else:
1242 1242 self.ui.debug(_("narrowed branch search to %s:%s\n")
1243 1243 % (short(p), short(i)))
1244 1244 search.append((p, i))
1245 1245 break
1246 1246 p, f = i, f * 2
1247 1247
1248 1248 # sanity check our fetch list
1249 1249 for f in fetch.keys():
1250 1250 if f in m:
1251 1251 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1252 1252
1253 1253 if base.keys() == [nullid]:
1254 1254 if force:
1255 1255 self.ui.warn(_("warning: repository is unrelated\n"))
1256 1256 else:
1257 1257 raise util.Abort(_("repository is unrelated"))
1258 1258
1259 1259 self.ui.debug(_("found new changesets starting at ") +
1260 1260 " ".join([short(f) for f in fetch]) + "\n")
1261 1261
1262 1262 self.ui.debug(_("%d total queries\n") % reqcnt)
1263 1263
1264 1264 return fetch.keys()
1265 1265
1266 1266 def findoutgoing(self, remote, base=None, heads=None, force=False):
1267 1267 """Return list of nodes that are roots of subsets not in remote
1268 1268
1269 1269 If base dict is specified, assume that these nodes and their parents
1270 1270 exist on the remote side.
1271 1271 If a list of heads is specified, return only nodes which are heads
1272 1272 or ancestors of these heads, and return a second element which
1273 1273 contains all remote heads which get new children.
1274 1274 """
1275 1275 if base == None:
1276 1276 base = {}
1277 1277 self.findincoming(remote, base, heads, force=force)
1278 1278
1279 1279 self.ui.debug(_("common changesets up to ")
1280 1280 + " ".join(map(short, base.keys())) + "\n")
1281 1281
1282 1282 remain = dict.fromkeys(self.changelog.nodemap)
1283 1283
1284 1284 # prune everything remote has from the tree
1285 1285 del remain[nullid]
1286 1286 remove = base.keys()
1287 1287 while remove:
1288 1288 n = remove.pop(0)
1289 1289 if n in remain:
1290 1290 del remain[n]
1291 1291 for p in self.changelog.parents(n):
1292 1292 remove.append(p)
1293 1293
1294 1294 # find every node whose parents have been pruned
1295 1295 subset = []
1296 1296 # find every remote head that will get new children
1297 1297 updated_heads = {}
1298 1298 for n in remain:
1299 1299 p1, p2 = self.changelog.parents(n)
1300 1300 if p1 not in remain and p2 not in remain:
1301 1301 subset.append(n)
1302 1302 if heads:
1303 1303 if p1 in heads:
1304 1304 updated_heads[p1] = True
1305 1305 if p2 in heads:
1306 1306 updated_heads[p2] = True
1307 1307
1308 1308 # this is the set of all roots we have to push
1309 1309 if heads:
1310 1310 return subset, updated_heads.keys()
1311 1311 else:
1312 1312 return subset
1313 1313
1314 1314 def pull(self, remote, heads=None, force=False, lock=None):
1315 1315 mylock = False
1316 1316 if not lock:
1317 1317 lock = self.lock()
1318 1318 mylock = True
1319 1319
1320 1320 try:
1321 1321 fetch = self.findincoming(remote, force=force)
1322 1322 if fetch == [nullid]:
1323 1323 self.ui.status(_("requesting all changes\n"))
1324 1324
1325 1325 if not fetch:
1326 1326 self.ui.status(_("no changes found\n"))
1327 1327 return 0
1328 1328
1329 1329 if heads is None:
1330 1330 cg = remote.changegroup(fetch, 'pull')
1331 1331 else:
1332 1332 if 'changegroupsubset' not in remote.capabilities:
1333 1333 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1334 1334 cg = remote.changegroupsubset(fetch, heads, 'pull')
1335 1335 return self.addchangegroup(cg, 'pull', remote.url())
1336 1336 finally:
1337 1337 if mylock:
1338 1338 lock.release()
1339 1339
1340 1340 def push(self, remote, force=False, revs=None):
1341 1341 # there are two ways to push to remote repo:
1342 1342 #
1343 1343 # addchangegroup assumes local user can lock remote
1344 1344 # repo (local filesystem, old ssh servers).
1345 1345 #
1346 1346 # unbundle assumes local user cannot lock remote repo (new ssh
1347 1347 # servers, http servers).
1348 1348
1349 1349 if remote.capable('unbundle'):
1350 1350 return self.push_unbundle(remote, force, revs)
1351 1351 return self.push_addchangegroup(remote, force, revs)
1352 1352
1353 1353 def prepush(self, remote, force, revs):
1354 1354 base = {}
1355 1355 remote_heads = remote.heads()
1356 1356 inc = self.findincoming(remote, base, remote_heads, force=force)
1357 1357
1358 1358 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1359 1359 if revs is not None:
1360 1360 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1361 1361 else:
1362 1362 bases, heads = update, self.changelog.heads()
1363 1363
1364 1364 if not bases:
1365 1365 self.ui.status(_("no changes found\n"))
1366 1366 return None, 1
1367 1367 elif not force:
1368 1368 # check if we're creating new remote heads
1369 1369 # to be a remote head after push, node must be either
1370 1370 # - unknown locally
1371 1371 # - a local outgoing head descended from update
1372 1372 # - a remote head that's known locally and not
1373 1373 # ancestral to an outgoing head
1374 1374
1375 1375 warn = 0
1376 1376
1377 1377 if remote_heads == [nullid]:
1378 1378 warn = 0
1379 1379 elif not revs and len(heads) > len(remote_heads):
1380 1380 warn = 1
1381 1381 else:
1382 1382 newheads = list(heads)
1383 1383 for r in remote_heads:
1384 1384 if r in self.changelog.nodemap:
1385 1385 desc = self.changelog.heads(r, heads)
1386 1386 l = [h for h in heads if h in desc]
1387 1387 if not l:
1388 1388 newheads.append(r)
1389 1389 else:
1390 1390 newheads.append(r)
1391 1391 if len(newheads) > len(remote_heads):
1392 1392 warn = 1
1393 1393
1394 1394 if warn:
1395 1395 self.ui.warn(_("abort: push creates new remote branches!\n"))
1396 1396 self.ui.status(_("(did you forget to merge?"
1397 1397 " use push -f to force)\n"))
1398 1398 return None, 1
1399 1399 elif inc:
1400 1400 self.ui.warn(_("note: unsynced remote changes!\n"))
1401 1401
1402 1402
1403 1403 if revs is None:
1404 1404 cg = self.changegroup(update, 'push')
1405 1405 else:
1406 1406 cg = self.changegroupsubset(update, revs, 'push')
1407 1407 return cg, remote_heads
1408 1408
1409 1409 def push_addchangegroup(self, remote, force, revs):
1410 1410 lock = remote.lock()
1411 1411
1412 1412 ret = self.prepush(remote, force, revs)
1413 1413 if ret[0] is not None:
1414 1414 cg, remote_heads = ret
1415 1415 return remote.addchangegroup(cg, 'push', self.url())
1416 1416 return ret[1]
1417 1417
1418 1418 def push_unbundle(self, remote, force, revs):
1419 1419 # local repo finds heads on server, finds out what revs it
1420 1420 # must push. once revs transferred, if server finds it has
1421 1421 # different heads (someone else won commit/push race), server
1422 1422 # aborts.
1423 1423
1424 1424 ret = self.prepush(remote, force, revs)
1425 1425 if ret[0] is not None:
1426 1426 cg, remote_heads = ret
1427 1427 if force: remote_heads = ['force']
1428 1428 return remote.unbundle(cg, remote_heads, 'push')
1429 1429 return ret[1]
1430 1430
1431 1431 def changegroupinfo(self, nodes):
1432 1432 self.ui.note(_("%d changesets found\n") % len(nodes))
1433 1433 if self.ui.debugflag:
1434 1434 self.ui.debug(_("List of changesets:\n"))
1435 1435 for node in nodes:
1436 1436 self.ui.debug("%s\n" % hex(node))
1437 1437
1438 1438 def changegroupsubset(self, bases, heads, source):
1439 1439 """This function generates a changegroup consisting of all the nodes
1440 1440 that are descendents of any of the bases, and ancestors of any of
1441 1441 the heads.
1442 1442
1443 1443 It is fairly complex as determining which filenodes and which
1444 1444 manifest nodes need to be included for the changeset to be complete
1445 1445 is non-trivial.
1446 1446
1447 1447 Another wrinkle is doing the reverse, figuring out which changeset in
1448 1448 the changegroup a particular filenode or manifestnode belongs to."""
1449 1449
1450 1450 self.hook('preoutgoing', throw=True, source=source)
1451 1451
1452 1452 # Set up some initial variables
1453 1453 # Make it easy to refer to self.changelog
1454 1454 cl = self.changelog
1455 1455 # msng is short for missing - compute the list of changesets in this
1456 1456 # changegroup.
1457 1457 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1458 1458 self.changegroupinfo(msng_cl_lst)
1459 1459 # Some bases may turn out to be superfluous, and some heads may be
1460 1460 # too. nodesbetween will return the minimal set of bases and heads
1461 1461 # necessary to re-create the changegroup.
1462 1462
1463 1463 # Known heads are the list of heads that it is assumed the recipient
1464 1464 # of this changegroup will know about.
1465 1465 knownheads = {}
1466 1466 # We assume that all parents of bases are known heads.
1467 1467 for n in bases:
1468 1468 for p in cl.parents(n):
1469 1469 if p != nullid:
1470 1470 knownheads[p] = 1
1471 1471 knownheads = knownheads.keys()
1472 1472 if knownheads:
1473 1473 # Now that we know what heads are known, we can compute which
1474 1474 # changesets are known. The recipient must know about all
1475 1475 # changesets required to reach the known heads from the null
1476 1476 # changeset.
1477 1477 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1478 1478 junk = None
1479 1479 # Transform the list into an ersatz set.
1480 1480 has_cl_set = dict.fromkeys(has_cl_set)
1481 1481 else:
1482 1482 # If there were no known heads, the recipient cannot be assumed to
1483 1483 # know about any changesets.
1484 1484 has_cl_set = {}
1485 1485
1486 1486 # Make it easy to refer to self.manifest
1487 1487 mnfst = self.manifest
1488 1488 # We don't know which manifests are missing yet
1489 1489 msng_mnfst_set = {}
1490 1490 # Nor do we know which filenodes are missing.
1491 1491 msng_filenode_set = {}
1492 1492
1493 1493 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1494 1494 junk = None
1495 1495
1496 1496 # A changeset always belongs to itself, so the changenode lookup
1497 1497 # function for a changenode is identity.
1498 1498 def identity(x):
1499 1499 return x
1500 1500
1501 1501 # A function generating function. Sets up an environment for the
1502 1502 # inner function.
1503 1503 def cmp_by_rev_func(revlog):
1504 1504 # Compare two nodes by their revision number in the environment's
1505 1505 # revision history. Since the revision number both represents the
1506 1506 # most efficient order to read the nodes in, and represents a
1507 1507 # topological sorting of the nodes, this function is often useful.
1508 1508 def cmp_by_rev(a, b):
1509 1509 return cmp(revlog.rev(a), revlog.rev(b))
1510 1510 return cmp_by_rev
1511 1511
1512 1512 # If we determine that a particular file or manifest node must be a
1513 1513 # node that the recipient of the changegroup will already have, we can
1514 1514 # also assume the recipient will have all the parents. This function
1515 1515 # prunes them from the set of missing nodes.
1516 1516 def prune_parents(revlog, hasset, msngset):
1517 1517 haslst = hasset.keys()
1518 1518 haslst.sort(cmp_by_rev_func(revlog))
1519 1519 for node in haslst:
1520 1520 parentlst = [p for p in revlog.parents(node) if p != nullid]
1521 1521 while parentlst:
1522 1522 n = parentlst.pop()
1523 1523 if n not in hasset:
1524 1524 hasset[n] = 1
1525 1525 p = [p for p in revlog.parents(n) if p != nullid]
1526 1526 parentlst.extend(p)
1527 1527 for n in hasset:
1528 1528 msngset.pop(n, None)
1529 1529
1530 1530 # This is a function generating function used to set up an environment
1531 1531 # for the inner function to execute in.
1532 1532 def manifest_and_file_collector(changedfileset):
1533 1533 # This is an information gathering function that gathers
1534 1534 # information from each changeset node that goes out as part of
1535 1535 # the changegroup. The information gathered is a list of which
1536 1536 # manifest nodes are potentially required (the recipient may
1537 1537 # already have them) and total list of all files which were
1538 1538 # changed in any changeset in the changegroup.
1539 1539 #
1540 1540 # We also remember the first changenode we saw any manifest
1541 1541 # referenced by so we can later determine which changenode 'owns'
1542 1542 # the manifest.
1543 1543 def collect_manifests_and_files(clnode):
1544 1544 c = cl.read(clnode)
1545 1545 for f in c[3]:
1546 1546 # This is to make sure we only have one instance of each
1547 1547 # filename string for each filename.
1548 1548 changedfileset.setdefault(f, f)
1549 1549 msng_mnfst_set.setdefault(c[0], clnode)
1550 1550 return collect_manifests_and_files
1551 1551
1552 1552 # Figure out which manifest nodes (of the ones we think might be part
1553 1553 # of the changegroup) the recipient must know about and remove them
1554 1554 # from the changegroup.
1555 1555 def prune_manifests():
1556 1556 has_mnfst_set = {}
1557 1557 for n in msng_mnfst_set:
1558 1558 # If a 'missing' manifest thinks it belongs to a changenode
1559 1559 # the recipient is assumed to have, obviously the recipient
1560 1560 # must have that manifest.
1561 1561 linknode = cl.node(mnfst.linkrev(n))
1562 1562 if linknode in has_cl_set:
1563 1563 has_mnfst_set[n] = 1
1564 1564 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1565 1565
1566 1566 # Use the information collected in collect_manifests_and_files to say
1567 1567 # which changenode any manifestnode belongs to.
1568 1568 def lookup_manifest_link(mnfstnode):
1569 1569 return msng_mnfst_set[mnfstnode]
1570 1570
1571 1571 # A function generating function that sets up the initial environment
1572 1572 # the inner function.
1573 1573 def filenode_collector(changedfiles):
1574 1574 next_rev = [0]
1575 1575 # This gathers information from each manifestnode included in the
1576 1576 # changegroup about which filenodes the manifest node references
1577 1577 # so we can include those in the changegroup too.
1578 1578 #
1579 1579 # It also remembers which changenode each filenode belongs to. It
1580 1580 # does this by assuming the a filenode belongs to the changenode
1581 1581 # the first manifest that references it belongs to.
1582 1582 def collect_msng_filenodes(mnfstnode):
1583 1583 r = mnfst.rev(mnfstnode)
1584 1584 if r == next_rev[0]:
1585 1585 # If the last rev we looked at was the one just previous,
1586 1586 # we only need to see a diff.
1587 1587 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1588 1588 # For each line in the delta
1589 1589 for dline in delta.splitlines():
1590 1590 # get the filename and filenode for that line
1591 1591 f, fnode = dline.split('\0')
1592 1592 fnode = bin(fnode[:40])
1593 1593 f = changedfiles.get(f, None)
1594 1594 # And if the file is in the list of files we care
1595 1595 # about.
1596 1596 if f is not None:
1597 1597 # Get the changenode this manifest belongs to
1598 1598 clnode = msng_mnfst_set[mnfstnode]
1599 1599 # Create the set of filenodes for the file if
1600 1600 # there isn't one already.
1601 1601 ndset = msng_filenode_set.setdefault(f, {})
1602 1602 # And set the filenode's changelog node to the
1603 1603 # manifest's if it hasn't been set already.
1604 1604 ndset.setdefault(fnode, clnode)
1605 1605 else:
1606 1606 # Otherwise we need a full manifest.
1607 1607 m = mnfst.read(mnfstnode)
1608 1608 # For every file in we care about.
1609 1609 for f in changedfiles:
1610 1610 fnode = m.get(f, None)
1611 1611 # If it's in the manifest
1612 1612 if fnode is not None:
1613 1613 # See comments above.
1614 1614 clnode = msng_mnfst_set[mnfstnode]
1615 1615 ndset = msng_filenode_set.setdefault(f, {})
1616 1616 ndset.setdefault(fnode, clnode)
1617 1617 # Remember the revision we hope to see next.
1618 1618 next_rev[0] = r + 1
1619 1619 return collect_msng_filenodes
1620 1620
1621 1621 # We have a list of filenodes we think we need for a file, lets remove
1622 1622 # all those we now the recipient must have.
1623 1623 def prune_filenodes(f, filerevlog):
1624 1624 msngset = msng_filenode_set[f]
1625 1625 hasset = {}
1626 1626 # If a 'missing' filenode thinks it belongs to a changenode we
1627 1627 # assume the recipient must have, then the recipient must have
1628 1628 # that filenode.
1629 1629 for n in msngset:
1630 1630 clnode = cl.node(filerevlog.linkrev(n))
1631 1631 if clnode in has_cl_set:
1632 1632 hasset[n] = 1
1633 1633 prune_parents(filerevlog, hasset, msngset)
1634 1634
1635 1635 # A function generator function that sets up the a context for the
1636 1636 # inner function.
1637 1637 def lookup_filenode_link_func(fname):
1638 1638 msngset = msng_filenode_set[fname]
1639 1639 # Lookup the changenode the filenode belongs to.
1640 1640 def lookup_filenode_link(fnode):
1641 1641 return msngset[fnode]
1642 1642 return lookup_filenode_link
1643 1643
1644 1644 # Now that we have all theses utility functions to help out and
1645 1645 # logically divide up the task, generate the group.
1646 1646 def gengroup():
1647 1647 # The set of changed files starts empty.
1648 1648 changedfiles = {}
1649 1649 # Create a changenode group generator that will call our functions
1650 1650 # back to lookup the owning changenode and collect information.
1651 1651 group = cl.group(msng_cl_lst, identity,
1652 1652 manifest_and_file_collector(changedfiles))
1653 1653 for chnk in group:
1654 1654 yield chnk
1655 1655
1656 1656 # The list of manifests has been collected by the generator
1657 1657 # calling our functions back.
1658 1658 prune_manifests()
1659 1659 msng_mnfst_lst = msng_mnfst_set.keys()
1660 1660 # Sort the manifestnodes by revision number.
1661 1661 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1662 1662 # Create a generator for the manifestnodes that calls our lookup
1663 1663 # and data collection functions back.
1664 1664 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1665 1665 filenode_collector(changedfiles))
1666 1666 for chnk in group:
1667 1667 yield chnk
1668 1668
1669 1669 # These are no longer needed, dereference and toss the memory for
1670 1670 # them.
1671 1671 msng_mnfst_lst = None
1672 1672 msng_mnfst_set.clear()
1673 1673
1674 1674 changedfiles = changedfiles.keys()
1675 1675 changedfiles.sort()
1676 1676 # Go through all our files in order sorted by name.
1677 1677 for fname in changedfiles:
1678 1678 filerevlog = self.file(fname)
1679 1679 # Toss out the filenodes that the recipient isn't really
1680 1680 # missing.
1681 1681 if msng_filenode_set.has_key(fname):
1682 1682 prune_filenodes(fname, filerevlog)
1683 1683 msng_filenode_lst = msng_filenode_set[fname].keys()
1684 1684 else:
1685 1685 msng_filenode_lst = []
1686 1686 # If any filenodes are left, generate the group for them,
1687 1687 # otherwise don't bother.
1688 1688 if len(msng_filenode_lst) > 0:
1689 1689 yield changegroup.genchunk(fname)
1690 1690 # Sort the filenodes by their revision #
1691 1691 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1692 1692 # Create a group generator and only pass in a changenode
1693 1693 # lookup function as we need to collect no information
1694 1694 # from filenodes.
1695 1695 group = filerevlog.group(msng_filenode_lst,
1696 1696 lookup_filenode_link_func(fname))
1697 1697 for chnk in group:
1698 1698 yield chnk
1699 1699 if msng_filenode_set.has_key(fname):
1700 1700 # Don't need this anymore, toss it to free memory.
1701 1701 del msng_filenode_set[fname]
1702 1702 # Signal that no more groups are left.
1703 1703 yield changegroup.closechunk()
1704 1704
1705 1705 if msng_cl_lst:
1706 1706 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1707 1707
1708 1708 return util.chunkbuffer(gengroup())
1709 1709
1710 1710 def changegroup(self, basenodes, source):
1711 1711 """Generate a changegroup of all nodes that we have that a recipient
1712 1712 doesn't.
1713 1713
1714 1714 This is much easier than the previous function as we can assume that
1715 1715 the recipient has any changenode we aren't sending them."""
1716 1716
1717 1717 self.hook('preoutgoing', throw=True, source=source)
1718 1718
1719 1719 cl = self.changelog
1720 1720 nodes = cl.nodesbetween(basenodes, None)[0]
1721 1721 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1722 1722 self.changegroupinfo(nodes)
1723 1723
1724 1724 def identity(x):
1725 1725 return x
1726 1726
1727 1727 def gennodelst(revlog):
1728 1728 for r in xrange(0, revlog.count()):
1729 1729 n = revlog.node(r)
1730 1730 if revlog.linkrev(n) in revset:
1731 1731 yield n
1732 1732
1733 1733 def changed_file_collector(changedfileset):
1734 1734 def collect_changed_files(clnode):
1735 1735 c = cl.read(clnode)
1736 1736 for fname in c[3]:
1737 1737 changedfileset[fname] = 1
1738 1738 return collect_changed_files
1739 1739
1740 1740 def lookuprevlink_func(revlog):
1741 1741 def lookuprevlink(n):
1742 1742 return cl.node(revlog.linkrev(n))
1743 1743 return lookuprevlink
1744 1744
1745 1745 def gengroup():
1746 1746 # construct a list of all changed files
1747 1747 changedfiles = {}
1748 1748
1749 1749 for chnk in cl.group(nodes, identity,
1750 1750 changed_file_collector(changedfiles)):
1751 1751 yield chnk
1752 1752 changedfiles = changedfiles.keys()
1753 1753 changedfiles.sort()
1754 1754
1755 1755 mnfst = self.manifest
1756 1756 nodeiter = gennodelst(mnfst)
1757 1757 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1758 1758 yield chnk
1759 1759
1760 1760 for fname in changedfiles:
1761 1761 filerevlog = self.file(fname)
1762 1762 nodeiter = gennodelst(filerevlog)
1763 1763 nodeiter = list(nodeiter)
1764 1764 if nodeiter:
1765 1765 yield changegroup.genchunk(fname)
1766 1766 lookup = lookuprevlink_func(filerevlog)
1767 1767 for chnk in filerevlog.group(nodeiter, lookup):
1768 1768 yield chnk
1769 1769
1770 1770 yield changegroup.closechunk()
1771 1771
1772 1772 if nodes:
1773 1773 self.hook('outgoing', node=hex(nodes[0]), source=source)
1774 1774
1775 1775 return util.chunkbuffer(gengroup())
1776 1776
1777 1777 def addchangegroup(self, source, srctype, url):
1778 1778 """add changegroup to repo.
1779 1779
1780 1780 return values:
1781 1781 - nothing changed or no source: 0
1782 1782 - more heads than before: 1+added heads (2..n)
1783 1783 - less heads than before: -1-removed heads (-2..-n)
1784 1784 - number of heads stays the same: 1
1785 1785 """
1786 1786 def csmap(x):
1787 1787 self.ui.debug(_("add changeset %s\n") % short(x))
1788 1788 return cl.count()
1789 1789
1790 1790 def revmap(x):
1791 1791 return cl.rev(x)
1792 1792
1793 1793 if not source:
1794 1794 return 0
1795 1795
1796 1796 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1797 1797
1798 1798 changesets = files = revisions = 0
1799 1799
1800 1800 tr = self.transaction()
1801 1801
1802 1802 # write changelog data to temp files so concurrent readers will not see
1803 1803 # inconsistent view
1804 1804 cl = self.changelog
1805 1805 cl.delayupdate()
1806 1806 oldheads = len(cl.heads())
1807 1807
1808 1808 # pull off the changeset group
1809 1809 self.ui.status(_("adding changesets\n"))
1810 1810 cor = cl.count() - 1
1811 1811 chunkiter = changegroup.chunkiter(source)
1812 1812 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1813 1813 raise util.Abort(_("received changelog group is empty"))
1814 1814 cnr = cl.count() - 1
1815 1815 changesets = cnr - cor
1816 1816
1817 1817 # pull off the manifest group
1818 1818 self.ui.status(_("adding manifests\n"))
1819 1819 chunkiter = changegroup.chunkiter(source)
1820 1820 # no need to check for empty manifest group here:
1821 1821 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1822 1822 # no new manifest will be created and the manifest group will
1823 1823 # be empty during the pull
1824 1824 self.manifest.addgroup(chunkiter, revmap, tr)
1825 1825
1826 1826 # process the files
1827 1827 self.ui.status(_("adding file changes\n"))
1828 1828 while 1:
1829 1829 f = changegroup.getchunk(source)
1830 1830 if not f:
1831 1831 break
1832 1832 self.ui.debug(_("adding %s revisions\n") % f)
1833 1833 fl = self.file(f)
1834 1834 o = fl.count()
1835 1835 chunkiter = changegroup.chunkiter(source)
1836 1836 if fl.addgroup(chunkiter, revmap, tr) is None:
1837 1837 raise util.Abort(_("received file revlog group is empty"))
1838 1838 revisions += fl.count() - o
1839 1839 files += 1
1840 1840
1841 1841 # make changelog see real files again
1842 1842 cl.finalize(tr)
1843 1843
1844 1844 newheads = len(self.changelog.heads())
1845 1845 heads = ""
1846 1846 if oldheads and newheads != oldheads:
1847 1847 heads = _(" (%+d heads)") % (newheads - oldheads)
1848 1848
1849 1849 self.ui.status(_("added %d changesets"
1850 1850 " with %d changes to %d files%s\n")
1851 1851 % (changesets, revisions, files, heads))
1852 1852
1853 1853 if changesets > 0:
1854 1854 self.hook('pretxnchangegroup', throw=True,
1855 1855 node=hex(self.changelog.node(cor+1)), source=srctype,
1856 1856 url=url)
1857 1857
1858 1858 tr.close()
1859 1859
1860 1860 if changesets > 0:
1861 1861 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1862 1862 source=srctype, url=url)
1863 1863
1864 1864 for i in xrange(cor + 1, cnr + 1):
1865 1865 self.hook("incoming", node=hex(self.changelog.node(i)),
1866 1866 source=srctype, url=url)
1867 1867
1868 1868 # never return 0 here:
1869 1869 if newheads < oldheads:
1870 1870 return newheads - oldheads - 1
1871 1871 else:
1872 1872 return newheads - oldheads + 1
1873 1873
1874 1874
1875 1875 def stream_in(self, remote):
1876 1876 fp = remote.stream_out()
1877 1877 l = fp.readline()
1878 1878 try:
1879 1879 resp = int(l)
1880 1880 except ValueError:
1881 1881 raise util.UnexpectedOutput(
1882 1882 _('Unexpected response from remote server:'), l)
1883 1883 if resp == 1:
1884 1884 raise util.Abort(_('operation forbidden by server'))
1885 1885 elif resp == 2:
1886 1886 raise util.Abort(_('locking the remote repository failed'))
1887 1887 elif resp != 0:
1888 1888 raise util.Abort(_('the server sent an unknown error code'))
1889 1889 self.ui.status(_('streaming all changes\n'))
1890 1890 l = fp.readline()
1891 1891 try:
1892 1892 total_files, total_bytes = map(int, l.split(' ', 1))
1893 1893 except ValueError, TypeError:
1894 1894 raise util.UnexpectedOutput(
1895 1895 _('Unexpected response from remote server:'), l)
1896 1896 self.ui.status(_('%d files to transfer, %s of data\n') %
1897 1897 (total_files, util.bytecount(total_bytes)))
1898 1898 start = time.time()
1899 1899 for i in xrange(total_files):
1900 1900 # XXX doesn't support '\n' or '\r' in filenames
1901 1901 l = fp.readline()
1902 1902 try:
1903 1903 name, size = l.split('\0', 1)
1904 1904 size = int(size)
1905 1905 except ValueError, TypeError:
1906 1906 raise util.UnexpectedOutput(
1907 1907 _('Unexpected response from remote server:'), l)
1908 1908 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1909 1909 ofp = self.sopener(name, 'w')
1910 1910 for chunk in util.filechunkiter(fp, limit=size):
1911 1911 ofp.write(chunk)
1912 1912 ofp.close()
1913 1913 elapsed = time.time() - start
1914 1914 if elapsed <= 0:
1915 1915 elapsed = 0.001
1916 1916 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1917 1917 (util.bytecount(total_bytes), elapsed,
1918 1918 util.bytecount(total_bytes / elapsed)))
1919 1919 self.reload()
1920 1920 return len(self.heads()) + 1
1921 1921
1922 1922 def clone(self, remote, heads=[], stream=False):
1923 1923 '''clone remote repository.
1924 1924
1925 1925 keyword arguments:
1926 1926 heads: list of revs to clone (forces use of pull)
1927 1927 stream: use streaming clone if possible'''
1928 1928
1929 1929 # now, all clients that can request uncompressed clones can
1930 1930 # read repo formats supported by all servers that can serve
1931 1931 # them.
1932 1932
1933 1933 # if revlog format changes, client will have to check version
1934 1934 # and format flags on "stream" capability, and use
1935 1935 # uncompressed only if compatible.
1936 1936
1937 1937 if stream and not heads and remote.capable('stream'):
1938 1938 return self.stream_in(remote)
1939 1939 return self.pull(remote, heads)
1940 1940
1941 1941 # used to avoid circular references so destructors work
1942 1942 def aftertrans(files):
1943 1943 renamefiles = [tuple(t) for t in files]
1944 1944 def a():
1945 1945 for src, dest in renamefiles:
1946 1946 util.rename(src, dest)
1947 1947 return a
1948 1948
1949 1949 def instance(ui, path, create):
1950 1950 return localrepository(ui, util.drop_scheme('file', path), create)
1951 1951
1952 1952 def islocal(path):
1953 1953 return True
@@ -1,674 +1,674 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 from node import *
10 10 import base85, cmdutil, mdiff, util, context, revlog
11 11 import cStringIO, email.Parser, os, popen2, re, sha
12 12 import sys, tempfile, zlib
13 13
14 14 # helper functions
15 15
16 16 def copyfile(src, dst, basedir=None):
17 17 if not basedir:
18 18 basedir = os.getcwd()
19 19
20 20 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
21 21 if os.path.exists(absdst):
22 22 raise util.Abort(_("cannot create %s: destination already exists") %
23 23 dst)
24 24
25 25 targetdir = os.path.dirname(absdst)
26 26 if not os.path.isdir(targetdir):
27 27 os.makedirs(targetdir)
28 28
29 29 util.copyfile(abssrc, absdst)
30 30
31 31 # public functions
32 32
33 33 def extract(ui, fileobj):
34 34 '''extract patch from data read from fileobj.
35 35
36 36 patch can be a normal patch or contained in an email message.
37 37
38 38 return tuple (filename, message, user, date, node, p1, p2).
39 39 Any item in the returned tuple can be None. If filename is None,
40 40 fileobj did not contain a patch. Caller must unlink filename when done.'''
41 41
42 42 # attempt to detect the start of a patch
43 43 # (this heuristic is borrowed from quilt)
44 44 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
45 45 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
46 46 '(---|\*\*\*)[ \t])', re.MULTILINE)
47 47
48 48 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
49 49 tmpfp = os.fdopen(fd, 'w')
50 50 try:
51 51 msg = email.Parser.Parser().parse(fileobj)
52 52
53 53 message = msg['Subject']
54 54 user = msg['From']
55 55 # should try to parse msg['Date']
56 56 date = None
57 57 nodeid = None
58 58 branch = None
59 59 parents = []
60 60
61 61 if message:
62 62 if message.startswith('[PATCH'):
63 63 pend = message.find(']')
64 64 if pend >= 0:
65 65 message = message[pend+1:].lstrip()
66 66 message = message.replace('\n\t', ' ')
67 67 ui.debug('Subject: %s\n' % message)
68 68 if user:
69 69 ui.debug('From: %s\n' % user)
70 70 diffs_seen = 0
71 71 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
72 72
73 73 for part in msg.walk():
74 74 content_type = part.get_content_type()
75 75 ui.debug('Content-Type: %s\n' % content_type)
76 76 if content_type not in ok_types:
77 77 continue
78 78 payload = part.get_payload(decode=True)
79 79 m = diffre.search(payload)
80 80 if m:
81 81 hgpatch = False
82 82 ignoretext = False
83 83
84 84 ui.debug(_('found patch at byte %d\n') % m.start(0))
85 85 diffs_seen += 1
86 86 cfp = cStringIO.StringIO()
87 87 if message:
88 88 cfp.write(message)
89 89 cfp.write('\n')
90 90 for line in payload[:m.start(0)].splitlines():
91 91 if line.startswith('# HG changeset patch'):
92 92 ui.debug(_('patch generated by hg export\n'))
93 93 hgpatch = True
94 94 # drop earlier commit message content
95 95 cfp.seek(0)
96 96 cfp.truncate()
97 97 elif hgpatch:
98 98 if line.startswith('# User '):
99 99 user = line[7:]
100 100 ui.debug('From: %s\n' % user)
101 101 elif line.startswith("# Date "):
102 102 date = line[7:]
103 103 elif line.startswith("# Branch "):
104 104 branch = line[9:]
105 105 elif line.startswith("# Node ID "):
106 106 nodeid = line[10:]
107 107 elif line.startswith("# Parent "):
108 108 parents.append(line[10:])
109 109 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
110 110 ignoretext = True
111 111 if not line.startswith('# ') and not ignoretext:
112 112 cfp.write(line)
113 113 cfp.write('\n')
114 114 message = cfp.getvalue()
115 115 if tmpfp:
116 116 tmpfp.write(payload)
117 117 if not payload.endswith('\n'):
118 118 tmpfp.write('\n')
119 119 elif not diffs_seen and message and content_type == 'text/plain':
120 120 message += '\n' + payload
121 121 except:
122 122 tmpfp.close()
123 123 os.unlink(tmpname)
124 124 raise
125 125
126 126 tmpfp.close()
127 127 if not diffs_seen:
128 128 os.unlink(tmpname)
129 129 return None, message, user, date, branch, None, None, None
130 130 p1 = parents and parents.pop(0) or None
131 131 p2 = parents and parents.pop(0) or None
132 132 return tmpname, message, user, date, branch, nodeid, p1, p2
133 133
134 134 GP_PATCH = 1 << 0 # we have to run patch
135 135 GP_FILTER = 1 << 1 # there's some copy/rename operation
136 136 GP_BINARY = 1 << 2 # there's a binary patch
137 137
138 138 def readgitpatch(patchname):
139 139 """extract git-style metadata about patches from <patchname>"""
140 140 class gitpatch:
141 141 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
142 142 def __init__(self, path):
143 143 self.path = path
144 144 self.oldpath = None
145 145 self.mode = None
146 146 self.op = 'MODIFY'
147 147 self.copymod = False
148 148 self.lineno = 0
149 149 self.binary = False
150 150
151 151 # Filter patch for git information
152 152 gitre = re.compile('diff --git a/(.*) b/(.*)')
153 153 pf = file(patchname)
154 154 gp = None
155 155 gitpatches = []
156 156 # Can have a git patch with only metadata, causing patch to complain
157 157 dopatch = 0
158 158
159 159 lineno = 0
160 160 for line in pf:
161 161 lineno += 1
162 162 if line.startswith('diff --git'):
163 163 m = gitre.match(line)
164 164 if m:
165 165 if gp:
166 166 gitpatches.append(gp)
167 167 src, dst = m.group(1, 2)
168 168 gp = gitpatch(dst)
169 169 gp.lineno = lineno
170 170 elif gp:
171 171 if line.startswith('--- '):
172 172 if gp.op in ('COPY', 'RENAME'):
173 173 gp.copymod = True
174 174 dopatch |= GP_FILTER
175 175 gitpatches.append(gp)
176 176 gp = None
177 177 dopatch |= GP_PATCH
178 178 continue
179 179 if line.startswith('rename from '):
180 180 gp.op = 'RENAME'
181 181 gp.oldpath = line[12:].rstrip()
182 182 elif line.startswith('rename to '):
183 183 gp.path = line[10:].rstrip()
184 184 elif line.startswith('copy from '):
185 185 gp.op = 'COPY'
186 186 gp.oldpath = line[10:].rstrip()
187 187 elif line.startswith('copy to '):
188 188 gp.path = line[8:].rstrip()
189 189 elif line.startswith('deleted file'):
190 190 gp.op = 'DELETE'
191 191 elif line.startswith('new file mode '):
192 192 gp.op = 'ADD'
193 193 gp.mode = int(line.rstrip()[-3:], 8)
194 194 elif line.startswith('new mode '):
195 195 gp.mode = int(line.rstrip()[-3:], 8)
196 196 elif line.startswith('GIT binary patch'):
197 197 dopatch |= GP_BINARY
198 198 gp.binary = True
199 199 if gp:
200 200 gitpatches.append(gp)
201 201
202 202 if not gitpatches:
203 203 dopatch = GP_PATCH
204 204
205 205 return (dopatch, gitpatches)
206 206
207 207 def dogitpatch(patchname, gitpatches, cwd=None):
208 208 """Preprocess git patch so that vanilla patch can handle it"""
209 209 def extractbin(fp):
210 210 i = [0] # yuck
211 211 def readline():
212 212 i[0] += 1
213 213 return fp.readline().rstrip()
214 214 line = readline()
215 215 while line and not line.startswith('literal '):
216 216 line = readline()
217 217 if not line:
218 218 return None, i[0]
219 219 size = int(line[8:])
220 220 dec = []
221 221 line = readline()
222 222 while line:
223 223 l = line[0]
224 224 if l <= 'Z' and l >= 'A':
225 225 l = ord(l) - ord('A') + 1
226 226 else:
227 227 l = ord(l) - ord('a') + 27
228 228 dec.append(base85.b85decode(line[1:])[:l])
229 229 line = readline()
230 230 text = zlib.decompress(''.join(dec))
231 231 if len(text) != size:
232 232 raise util.Abort(_('binary patch is %d bytes, not %d') %
233 233 (len(text), size))
234 234 return text, i[0]
235 235
236 236 pf = file(patchname)
237 237 pfline = 1
238 238
239 239 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
240 240 tmpfp = os.fdopen(fd, 'w')
241 241
242 242 try:
243 243 for i in xrange(len(gitpatches)):
244 244 p = gitpatches[i]
245 245 if not p.copymod and not p.binary:
246 246 continue
247 247
248 248 # rewrite patch hunk
249 249 while pfline < p.lineno:
250 250 tmpfp.write(pf.readline())
251 251 pfline += 1
252 252
253 253 if p.binary:
254 254 text, delta = extractbin(pf)
255 255 if not text:
256 256 raise util.Abort(_('binary patch extraction failed'))
257 257 pfline += delta
258 258 if not cwd:
259 259 cwd = os.getcwd()
260 260 absdst = os.path.join(cwd, p.path)
261 261 basedir = os.path.dirname(absdst)
262 262 if not os.path.isdir(basedir):
263 263 os.makedirs(basedir)
264 264 out = file(absdst, 'wb')
265 265 out.write(text)
266 266 out.close()
267 267 elif p.copymod:
268 268 copyfile(p.oldpath, p.path, basedir=cwd)
269 269 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
270 270 line = pf.readline()
271 271 pfline += 1
272 272 while not line.startswith('--- a/'):
273 273 tmpfp.write(line)
274 274 line = pf.readline()
275 275 pfline += 1
276 276 tmpfp.write('--- a/%s\n' % p.path)
277 277
278 278 line = pf.readline()
279 279 while line:
280 280 tmpfp.write(line)
281 281 line = pf.readline()
282 282 except:
283 283 tmpfp.close()
284 284 os.unlink(patchname)
285 285 raise
286 286
287 287 tmpfp.close()
288 288 return patchname
289 289
290 290 def patch(patchname, ui, strip=1, cwd=None, files={}):
291 291 """apply the patch <patchname> to the working directory.
292 292 a list of patched files is returned"""
293 293
294 294 # helper function
295 295 def __patch(patchname):
296 296 """patch and updates the files and fuzz variables"""
297 297 fuzz = False
298 298
299 299 args = []
300 300 patcher = ui.config('ui', 'patch')
301 301 patcher = ((patcher and util.find_exe(patcher)) or
302 302 util.find_exe('gpatch') or
303 303 util.find_exe('patch'))
304 304 if not patcher:
305 305 raise util.Abort(_('no patch command found in hgrc or PATH'))
306 306 if util.needbinarypatch():
307 307 args.append('--binary')
308
308
309 309 if cwd:
310 310 args.append('-d %s' % util.shellquote(cwd))
311 311 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
312 312 util.shellquote(patchname)))
313 313
314 314 for line in fp:
315 315 line = line.rstrip()
316 316 ui.note(line + '\n')
317 317 if line.startswith('patching file '):
318 318 pf = util.parse_patch_output(line)
319 319 printed_file = False
320 320 files.setdefault(pf, (None, None))
321 321 elif line.find('with fuzz') >= 0:
322 322 fuzz = True
323 323 if not printed_file:
324 324 ui.warn(pf + '\n')
325 325 printed_file = True
326 326 ui.warn(line + '\n')
327 327 elif line.find('saving rejects to file') >= 0:
328 328 ui.warn(line + '\n')
329 329 elif line.find('FAILED') >= 0:
330 330 if not printed_file:
331 331 ui.warn(pf + '\n')
332 332 printed_file = True
333 333 ui.warn(line + '\n')
334 334 code = fp.close()
335 335 if code:
336 336 raise util.Abort(_("patch command failed: %s") %
337 337 util.explain_exit(code)[0])
338 338 return fuzz
339 339
340 340 (dopatch, gitpatches) = readgitpatch(patchname)
341 341 for gp in gitpatches:
342 342 files[gp.path] = (gp.op, gp)
343 343
344 344 fuzz = False
345 345 if dopatch:
346 346 filterpatch = dopatch & (GP_FILTER | GP_BINARY)
347 347 if filterpatch:
348 348 patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
349 349 try:
350 350 if dopatch & GP_PATCH:
351 351 fuzz = __patch(patchname)
352 352 finally:
353 353 if filterpatch:
354 354 os.unlink(patchname)
355 355
356 356 return fuzz
357 357
358 358 def diffopts(ui, opts={}, untrusted=False):
359 359 def get(key, name=None):
360 360 return (opts.get(key) or
361 361 ui.configbool('diff', name or key, None, untrusted=untrusted))
362 362 return mdiff.diffopts(
363 363 text=opts.get('text'),
364 364 git=get('git'),
365 365 nodates=get('nodates'),
366 366 showfunc=get('show_function', 'showfunc'),
367 367 ignorews=get('ignore_all_space', 'ignorews'),
368 368 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
369 369 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
370 370
371 371 def updatedir(ui, repo, patches, wlock=None):
372 372 '''Update dirstate after patch application according to metadata'''
373 373 if not patches:
374 374 return
375 375 copies = []
376 376 removes = {}
377 377 cfiles = patches.keys()
378 378 cwd = repo.getcwd()
379 379 if cwd:
380 380 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
381 381 for f in patches:
382 382 ctype, gp = patches[f]
383 383 if ctype == 'RENAME':
384 384 copies.append((gp.oldpath, gp.path, gp.copymod))
385 385 removes[gp.oldpath] = 1
386 386 elif ctype == 'COPY':
387 387 copies.append((gp.oldpath, gp.path, gp.copymod))
388 388 elif ctype == 'DELETE':
389 389 removes[gp.path] = 1
390 390 for src, dst, after in copies:
391 391 if not after:
392 392 copyfile(src, dst, repo.root)
393 393 repo.copy(src, dst, wlock=wlock)
394 394 removes = removes.keys()
395 395 if removes:
396 396 removes.sort()
397 397 repo.remove(removes, True, wlock=wlock)
398 398 for f in patches:
399 399 ctype, gp = patches[f]
400 400 if gp and gp.mode:
401 401 x = gp.mode & 0100 != 0
402 402 dst = os.path.join(repo.root, gp.path)
403 403 # patch won't create empty files
404 404 if ctype == 'ADD' and not os.path.exists(dst):
405 405 repo.wwrite(gp.path, '', x and 'x' or '')
406 406 else:
407 407 util.set_exec(dst, x)
408 408 cmdutil.addremove(repo, cfiles, wlock=wlock)
409 409 files = patches.keys()
410 410 files.extend([r for r in removes if r not in files])
411 411 files.sort()
412 412
413 413 return files
414 414
415 415 def b85diff(fp, to, tn):
416 416 '''print base85-encoded binary diff'''
417 417 def gitindex(text):
418 418 if not text:
419 419 return '0' * 40
420 420 l = len(text)
421 421 s = sha.new('blob %d\0' % l)
422 422 s.update(text)
423 423 return s.hexdigest()
424 424
425 425 def fmtline(line):
426 426 l = len(line)
427 427 if l <= 26:
428 428 l = chr(ord('A') + l - 1)
429 429 else:
430 430 l = chr(l - 26 + ord('a') - 1)
431 431 return '%c%s\n' % (l, base85.b85encode(line, True))
432 432
433 433 def chunk(text, csize=52):
434 434 l = len(text)
435 435 i = 0
436 436 while i < l:
437 437 yield text[i:i+csize]
438 438 i += csize
439 439
440 440 tohash = gitindex(to)
441 441 tnhash = gitindex(tn)
442 442 if tohash == tnhash:
443 443 return ""
444 444
445 445 # TODO: deltas
446 446 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
447 447 (tohash, tnhash, len(tn))]
448 448 for l in chunk(zlib.compress(tn)):
449 449 ret.append(fmtline(l))
450 450 ret.append('\n')
451 451 return ''.join(ret)
452 452
453 453 def diff(repo, node1=None, node2=None, files=None, match=util.always,
454 454 fp=None, changes=None, opts=None):
455 455 '''print diff of changes to files between two nodes, or node and
456 456 working directory.
457 457
458 458 if node1 is None, use first dirstate parent instead.
459 459 if node2 is None, compare node1 with working directory.'''
460 460
461 461 if opts is None:
462 462 opts = mdiff.defaultopts
463 463 if fp is None:
464 464 fp = repo.ui
465 465
466 466 if not node1:
467 467 node1 = repo.dirstate.parents()[0]
468 468
469 469 ccache = {}
470 470 def getctx(r):
471 471 if r not in ccache:
472 472 ccache[r] = context.changectx(repo, r)
473 473 return ccache[r]
474 474
475 475 flcache = {}
476 476 def getfilectx(f, ctx):
477 477 flctx = ctx.filectx(f, filelog=flcache.get(f))
478 478 if f not in flcache:
479 479 flcache[f] = flctx._filelog
480 480 return flctx
481 481
482 482 # reading the data for node1 early allows it to play nicely
483 483 # with repo.status and the revlog cache.
484 484 ctx1 = context.changectx(repo, node1)
485 485 # force manifest reading
486 486 man1 = ctx1.manifest()
487 487 date1 = util.datestr(ctx1.date())
488 488
489 489 if not changes:
490 490 changes = repo.status(node1, node2, files, match=match)[:5]
491 491 modified, added, removed, deleted, unknown = changes
492 492
493 493 if not modified and not added and not removed:
494 494 return
495 495
496 496 if node2:
497 497 ctx2 = context.changectx(repo, node2)
498 498 execf2 = ctx2.manifest().execf
499 499 else:
500 500 ctx2 = context.workingctx(repo)
501 501 execf2 = util.execfunc(repo.root, None)
502 502 if execf2 is None:
503 503 execf2 = ctx2.parents()[0].manifest().copy().execf
504 504
505 505 # returns False if there was no rename between ctx1 and ctx2
506 506 # returns None if the file was created between ctx1 and ctx2
507 507 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
508 508 def renamed(f):
509 509 startrev = ctx1.rev()
510 510 c = ctx2
511 511 crev = c.rev()
512 512 if crev is None:
513 513 crev = repo.changelog.count()
514 514 orig = f
515 515 while crev > startrev:
516 516 if f in c.files():
517 517 try:
518 518 src = getfilectx(f, c).renamed()
519 519 except revlog.LookupError:
520 520 return None
521 521 if src:
522 522 f = src[0]
523 523 crev = c.parents()[0].rev()
524 524 # try to reuse
525 525 c = getctx(crev)
526 526 if f not in man1:
527 527 return None
528 528 if f == orig:
529 529 return False
530 530 return f
531 531
532 532 if repo.ui.quiet:
533 533 r = None
534 534 else:
535 535 hexfunc = repo.ui.debugflag and hex or short
536 536 r = [hexfunc(node) for node in [node1, node2] if node]
537 537
538 538 if opts.git:
539 539 copied = {}
540 540 for f in added:
541 541 src = renamed(f)
542 542 if src:
543 543 copied[f] = src
544 544 srcs = [x[1] for x in copied.items()]
545 545
546 546 all = modified + added + removed
547 547 all.sort()
548 548 gone = {}
549 549
550 550 for f in all:
551 551 to = None
552 552 tn = None
553 553 dodiff = True
554 554 header = []
555 555 if f in man1:
556 556 to = getfilectx(f, ctx1).data()
557 557 if f not in removed:
558 558 tn = getfilectx(f, ctx2).data()
559 559 if opts.git:
560 560 def gitmode(x):
561 561 return x and '100755' or '100644'
562 562 def addmodehdr(header, omode, nmode):
563 563 if omode != nmode:
564 564 header.append('old mode %s\n' % omode)
565 565 header.append('new mode %s\n' % nmode)
566 566
567 567 a, b = f, f
568 568 if f in added:
569 569 mode = gitmode(execf2(f))
570 570 if f in copied:
571 571 a = copied[f]
572 572 omode = gitmode(man1.execf(a))
573 573 addmodehdr(header, omode, mode)
574 574 if a in removed and a not in gone:
575 575 op = 'rename'
576 576 gone[a] = 1
577 577 else:
578 578 op = 'copy'
579 579 header.append('%s from %s\n' % (op, a))
580 580 header.append('%s to %s\n' % (op, f))
581 581 to = getfilectx(a, ctx1).data()
582 582 else:
583 583 header.append('new file mode %s\n' % mode)
584 584 if util.binary(tn):
585 585 dodiff = 'binary'
586 586 elif f in removed:
587 587 if f in srcs:
588 588 dodiff = False
589 589 else:
590 590 mode = gitmode(man1.execf(f))
591 591 header.append('deleted file mode %s\n' % mode)
592 592 else:
593 593 omode = gitmode(man1.execf(f))
594 594 nmode = gitmode(execf2(f))
595 595 addmodehdr(header, omode, nmode)
596 596 if util.binary(to) or util.binary(tn):
597 597 dodiff = 'binary'
598 598 r = None
599 599 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
600 600 if dodiff:
601 601 if dodiff == 'binary':
602 602 text = b85diff(fp, to, tn)
603 603 else:
604 604 text = mdiff.unidiff(to, date1,
605 605 # ctx2 date may be dynamic
606 606 tn, util.datestr(ctx2.date()),
607 607 f, r, opts=opts)
608 608 if text or len(header) > 1:
609 609 fp.write(''.join(header))
610 610 fp.write(text)
611 611
612 612 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
613 613 opts=None):
614 614 '''export changesets as hg patches.'''
615 615
616 616 total = len(revs)
617 617 revwidth = max([len(str(rev)) for rev in revs])
618 618
619 619 def single(rev, seqno, fp):
620 620 ctx = repo.changectx(rev)
621 621 node = ctx.node()
622 622 parents = [p.node() for p in ctx.parents() if p]
623 623 branch = ctx.branch()
624 624 if switch_parent:
625 625 parents.reverse()
626 626 prev = (parents and parents[0]) or nullid
627 627
628 628 if not fp:
629 629 fp = cmdutil.make_file(repo, template, node, total=total,
630 630 seqno=seqno, revwidth=revwidth)
631 631 if fp != sys.stdout and hasattr(fp, 'name'):
632 632 repo.ui.note("%s\n" % fp.name)
633 633
634 634 fp.write("# HG changeset patch\n")
635 635 fp.write("# User %s\n" % ctx.user())
636 636 fp.write("# Date %d %d\n" % ctx.date())
637 637 if branch and (branch != 'default'):
638 638 fp.write("# Branch %s\n" % branch)
639 639 fp.write("# Node ID %s\n" % hex(node))
640 640 fp.write("# Parent %s\n" % hex(prev))
641 641 if len(parents) > 1:
642 642 fp.write("# Parent %s\n" % hex(parents[1]))
643 643 fp.write(ctx.description().rstrip())
644 644 fp.write("\n\n")
645 645
646 646 diff(repo, prev, node, fp=fp, opts=opts)
647 647 if fp not in (sys.stdout, repo.ui):
648 648 fp.close()
649 649
650 650 for seqno, rev in enumerate(revs):
651 651 single(rev, seqno+1, fp)
652 652
653 653 def diffstat(patchlines):
654 654 if not util.find_exe('diffstat'):
655 655 return
656 656 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
657 657 try:
658 658 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
659 659 try:
660 660 for line in patchlines: print >> p.tochild, line
661 661 p.tochild.close()
662 662 if p.wait(): return
663 663 fp = os.fdopen(fd, 'r')
664 664 stat = []
665 665 for line in fp: stat.append(line.lstrip())
666 666 last = stat.pop()
667 667 stat.insert(0, last)
668 668 stat = ''.join(stat)
669 669 if stat.startswith('0 files'): raise ValueError
670 670 return stat
671 671 except: raise
672 672 finally:
673 673 try: os.unlink(name)
674 674 except: pass
@@ -1,1524 +1,1524 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 17 import os, threading, time, calendar, ConfigParser, locale, glob
18 18
19 19 try:
20 20 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
21 21 or "ascii"
22 22 except locale.Error:
23 23 _encoding = 'ascii'
24 24 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
25 25 _fallbackencoding = 'ISO-8859-1'
26 26
27 27 def tolocal(s):
28 28 """
29 29 Convert a string from internal UTF-8 to local encoding
30 30
31 31 All internal strings should be UTF-8 but some repos before the
32 32 implementation of locale support may contain latin1 or possibly
33 33 other character sets. We attempt to decode everything strictly
34 34 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
35 35 replace unknown characters.
36 36 """
37 37 for e in ('UTF-8', _fallbackencoding):
38 38 try:
39 39 u = s.decode(e) # attempt strict decoding
40 40 return u.encode(_encoding, "replace")
41 41 except LookupError, k:
42 42 raise Abort(_("%s, please check your locale settings") % k)
43 43 except UnicodeDecodeError:
44 44 pass
45 45 u = s.decode("utf-8", "replace") # last ditch
46 46 return u.encode(_encoding, "replace")
47 47
48 48 def fromlocal(s):
49 49 """
50 50 Convert a string from the local character encoding to UTF-8
51 51
52 52 We attempt to decode strings using the encoding mode set by
53 53 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
54 54 characters will cause an error message. Other modes include
55 55 'replace', which replaces unknown characters with a special
56 56 Unicode character, and 'ignore', which drops the character.
57 57 """
58 58 try:
59 59 return s.decode(_encoding, _encodingmode).encode("utf-8")
60 60 except UnicodeDecodeError, inst:
61 61 sub = s[max(0, inst.start-10):inst.start+10]
62 62 raise Abort("decoding near '%s': %s!" % (sub, inst))
63 63 except LookupError, k:
64 64 raise Abort(_("%s, please check your locale settings") % k)
65 65
66 66 def locallen(s):
67 67 """Find the length in characters of a local string"""
68 68 return len(s.decode(_encoding, "replace"))
69 69
70 70 def localsub(s, a, b=None):
71 71 try:
72 72 u = s.decode(_encoding, _encodingmode)
73 73 if b is not None:
74 74 u = u[a:b]
75 75 else:
76 76 u = u[:a]
77 77 return u.encode(_encoding, _encodingmode)
78 78 except UnicodeDecodeError, inst:
79 79 sub = s[max(0, inst.start-10), inst.start+10]
80 80 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
81 81
82 82 # used by parsedate
83 83 defaultdateformats = (
84 84 '%Y-%m-%d %H:%M:%S',
85 85 '%Y-%m-%d %I:%M:%S%p',
86 86 '%Y-%m-%d %H:%M',
87 87 '%Y-%m-%d %I:%M%p',
88 88 '%Y-%m-%d',
89 89 '%m-%d',
90 90 '%m/%d',
91 91 '%m/%d/%y',
92 92 '%m/%d/%Y',
93 93 '%a %b %d %H:%M:%S %Y',
94 94 '%a %b %d %I:%M:%S%p %Y',
95 95 '%b %d %H:%M:%S %Y',
96 96 '%b %d %I:%M:%S%p %Y',
97 97 '%b %d %H:%M:%S',
98 98 '%b %d %I:%M:%S%p',
99 99 '%b %d %H:%M',
100 100 '%b %d %I:%M%p',
101 101 '%b %d %Y',
102 102 '%b %d',
103 103 '%H:%M:%S',
104 104 '%I:%M:%SP',
105 105 '%H:%M',
106 106 '%I:%M%p',
107 107 )
108 108
109 109 extendeddateformats = defaultdateformats + (
110 110 "%Y",
111 111 "%Y-%m",
112 112 "%b",
113 113 "%b %Y",
114 114 )
115 115
116 116 class SignalInterrupt(Exception):
117 117 """Exception raised on SIGTERM and SIGHUP."""
118 118
119 119 # differences from SafeConfigParser:
120 120 # - case-sensitive keys
121 121 # - allows values that are not strings (this means that you may not
122 122 # be able to save the configuration to a file)
123 123 class configparser(ConfigParser.SafeConfigParser):
124 124 def optionxform(self, optionstr):
125 125 return optionstr
126 126
127 127 def set(self, section, option, value):
128 128 return ConfigParser.ConfigParser.set(self, section, option, value)
129 129
130 130 def _interpolate(self, section, option, rawval, vars):
131 131 if not isinstance(rawval, basestring):
132 132 return rawval
133 133 return ConfigParser.SafeConfigParser._interpolate(self, section,
134 134 option, rawval, vars)
135 135
136 136 def cachefunc(func):
137 137 '''cache the result of function calls'''
138 138 # XXX doesn't handle keywords args
139 139 cache = {}
140 140 if func.func_code.co_argcount == 1:
141 141 # we gain a small amount of time because
142 142 # we don't need to pack/unpack the list
143 143 def f(arg):
144 144 if arg not in cache:
145 145 cache[arg] = func(arg)
146 146 return cache[arg]
147 147 else:
148 148 def f(*args):
149 149 if args not in cache:
150 150 cache[args] = func(*args)
151 151 return cache[args]
152 152
153 153 return f
154 154
155 155 def pipefilter(s, cmd):
156 156 '''filter string S through command CMD, returning its output'''
157 157 (pout, pin) = popen2.popen2(cmd, -1, 'b')
158 158 def writer():
159 159 try:
160 160 pin.write(s)
161 161 pin.close()
162 162 except IOError, inst:
163 163 if inst.errno != errno.EPIPE:
164 164 raise
165 165
166 166 # we should use select instead on UNIX, but this will work on most
167 167 # systems, including Windows
168 168 w = threading.Thread(target=writer)
169 169 w.start()
170 170 f = pout.read()
171 171 pout.close()
172 172 w.join()
173 173 return f
174 174
175 175 def tempfilter(s, cmd):
176 176 '''filter string S through a pair of temporary files with CMD.
177 177 CMD is used as a template to create the real command to be run,
178 178 with the strings INFILE and OUTFILE replaced by the real names of
179 179 the temporary files generated.'''
180 180 inname, outname = None, None
181 181 try:
182 182 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
183 183 fp = os.fdopen(infd, 'wb')
184 184 fp.write(s)
185 185 fp.close()
186 186 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
187 187 os.close(outfd)
188 188 cmd = cmd.replace('INFILE', inname)
189 189 cmd = cmd.replace('OUTFILE', outname)
190 190 code = os.system(cmd)
191 191 if code: raise Abort(_("command '%s' failed: %s") %
192 192 (cmd, explain_exit(code)))
193 193 return open(outname, 'rb').read()
194 194 finally:
195 195 try:
196 196 if inname: os.unlink(inname)
197 197 except: pass
198 198 try:
199 199 if outname: os.unlink(outname)
200 200 except: pass
201 201
202 202 filtertable = {
203 203 'tempfile:': tempfilter,
204 204 'pipe:': pipefilter,
205 205 }
206 206
207 207 def filter(s, cmd):
208 208 "filter a string through a command that transforms its input to its output"
209 209 for name, fn in filtertable.iteritems():
210 210 if cmd.startswith(name):
211 211 return fn(s, cmd[len(name):].lstrip())
212 212 return pipefilter(s, cmd)
213 213
214 214 def binary(s):
215 215 """return true if a string is binary data using diff's heuristic"""
216 216 if s and '\0' in s[:4096]:
217 217 return True
218 218 return False
219 219
220 220 def unique(g):
221 221 """return the uniq elements of iterable g"""
222 222 seen = {}
223 223 l = []
224 224 for f in g:
225 225 if f not in seen:
226 226 seen[f] = 1
227 227 l.append(f)
228 228 return l
229 229
230 230 class Abort(Exception):
231 231 """Raised if a command needs to print an error and exit."""
232 232
233 233 class UnexpectedOutput(Abort):
234 234 """Raised to print an error with part of output and exit."""
235 235
236 236 def always(fn): return True
237 237 def never(fn): return False
238 238
239 239 def expand_glob(pats):
240 240 '''On Windows, expand the implicit globs in a list of patterns'''
241 241 if os.name != 'nt':
242 242 return list(pats)
243 243 ret = []
244 244 for p in pats:
245 245 kind, name = patkind(p, None)
246 246 if kind is None:
247 247 globbed = glob.glob(name)
248 248 if globbed:
249 249 ret.extend(globbed)
250 250 continue
251 251 # if we couldn't expand the glob, just keep it around
252 252 ret.append(p)
253 253 return ret
254 254
255 255 def patkind(name, dflt_pat='glob'):
256 256 """Split a string into an optional pattern kind prefix and the
257 257 actual pattern."""
258 258 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
259 259 if name.startswith(prefix + ':'): return name.split(':', 1)
260 260 return dflt_pat, name
261 261
262 262 def globre(pat, head='^', tail='$'):
263 263 "convert a glob pattern into a regexp"
264 264 i, n = 0, len(pat)
265 265 res = ''
266 266 group = False
267 267 def peek(): return i < n and pat[i]
268 268 while i < n:
269 269 c = pat[i]
270 270 i = i+1
271 271 if c == '*':
272 272 if peek() == '*':
273 273 i += 1
274 274 res += '.*'
275 275 else:
276 276 res += '[^/]*'
277 277 elif c == '?':
278 278 res += '.'
279 279 elif c == '[':
280 280 j = i
281 281 if j < n and pat[j] in '!]':
282 282 j += 1
283 283 while j < n and pat[j] != ']':
284 284 j += 1
285 285 if j >= n:
286 286 res += '\\['
287 287 else:
288 288 stuff = pat[i:j].replace('\\','\\\\')
289 289 i = j + 1
290 290 if stuff[0] == '!':
291 291 stuff = '^' + stuff[1:]
292 292 elif stuff[0] == '^':
293 293 stuff = '\\' + stuff
294 294 res = '%s[%s]' % (res, stuff)
295 295 elif c == '{':
296 296 group = True
297 297 res += '(?:'
298 298 elif c == '}' and group:
299 299 res += ')'
300 300 group = False
301 301 elif c == ',' and group:
302 302 res += '|'
303 303 elif c == '\\':
304 304 p = peek()
305 305 if p:
306 306 i += 1
307 307 res += re.escape(p)
308 308 else:
309 309 res += re.escape(c)
310 310 else:
311 311 res += re.escape(c)
312 312 return head + res + tail
313 313
314 314 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
315 315
316 316 def pathto(root, n1, n2):
317 317 '''return the relative path from one place to another.
318 318 root should use os.sep to separate directories
319 319 n1 should use os.sep to separate directories
320 320 n2 should use "/" to separate directories
321 321 returns an os.sep-separated path.
322 322
323 323 If n1 is a relative path, it's assumed it's
324 324 relative to root.
325 325 n2 should always be relative to root.
326 326 '''
327 327 if not n1: return localpath(n2)
328 328 if os.path.isabs(n1):
329 329 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
330 330 return os.path.join(root, localpath(n2))
331 331 n2 = '/'.join((pconvert(root), n2))
332 332 a, b = n1.split(os.sep), n2.split('/')
333 333 a.reverse()
334 334 b.reverse()
335 335 while a and b and a[-1] == b[-1]:
336 336 a.pop()
337 337 b.pop()
338 338 b.reverse()
339 339 return os.sep.join((['..'] * len(a)) + b)
340 340
341 341 def canonpath(root, cwd, myname):
342 342 """return the canonical path of myname, given cwd and root"""
343 343 if root == os.sep:
344 344 rootsep = os.sep
345 345 elif root.endswith(os.sep):
346 346 rootsep = root
347 347 else:
348 348 rootsep = root + os.sep
349 349 name = myname
350 350 if not os.path.isabs(name):
351 351 name = os.path.join(root, cwd, name)
352 352 name = os.path.normpath(name)
353 353 if name != rootsep and name.startswith(rootsep):
354 354 name = name[len(rootsep):]
355 355 audit_path(name)
356 356 return pconvert(name)
357 357 elif name == root:
358 358 return ''
359 359 else:
360 360 # Determine whether `name' is in the hierarchy at or beneath `root',
361 361 # by iterating name=dirname(name) until that causes no change (can't
362 362 # check name == '/', because that doesn't work on windows). For each
363 363 # `name', compare dev/inode numbers. If they match, the list `rel'
364 364 # holds the reversed list of components making up the relative file
365 365 # name we want.
366 366 root_st = os.stat(root)
367 367 rel = []
368 368 while True:
369 369 try:
370 370 name_st = os.stat(name)
371 371 except OSError:
372 372 break
373 373 if samestat(name_st, root_st):
374 374 if not rel:
375 375 # name was actually the same as root (maybe a symlink)
376 376 return ''
377 377 rel.reverse()
378 378 name = os.path.join(*rel)
379 379 audit_path(name)
380 380 return pconvert(name)
381 381 dirname, basename = os.path.split(name)
382 382 rel.append(basename)
383 383 if dirname == name:
384 384 break
385 385 name = dirname
386 386
387 387 raise Abort('%s not under root' % myname)
388 388
389 389 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
390 390 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
391 391
392 392 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
393 393 globbed=False, default=None):
394 394 default = default or 'relpath'
395 395 if default == 'relpath' and not globbed:
396 396 names = expand_glob(names)
397 397 return _matcher(canonroot, cwd, names, inc, exc, default, src)
398 398
399 399 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
400 400 """build a function to match a set of file patterns
401 401
402 402 arguments:
403 403 canonroot - the canonical root of the tree you're matching against
404 404 cwd - the current working directory, if relevant
405 405 names - patterns to find
406 406 inc - patterns to include
407 407 exc - patterns to exclude
408 408 dflt_pat - if a pattern in names has no explicit type, assume this one
409 409 src - where these patterns came from (e.g. .hgignore)
410 410
411 411 a pattern is one of:
412 412 'glob:<glob>' - a glob relative to cwd
413 413 're:<regexp>' - a regular expression
414 414 'path:<path>' - a path relative to canonroot
415 415 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
416 416 'relpath:<path>' - a path relative to cwd
417 417 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
418 418 '<something>' - one of the cases above, selected by the dflt_pat argument
419 419
420 420 returns:
421 421 a 3-tuple containing
422 422 - list of roots (places where one should start a recursive walk of the fs);
423 423 this often matches the explicit non-pattern names passed in, but also
424 424 includes the initial part of glob: patterns that has no glob characters
425 425 - a bool match(filename) function
426 426 - a bool indicating if any patterns were passed in
427 427 """
428 428
429 429 # a common case: no patterns at all
430 430 if not names and not inc and not exc:
431 431 return [], always, False
432 432
433 433 def contains_glob(name):
434 434 for c in name:
435 435 if c in _globchars: return True
436 436 return False
437 437
438 438 def regex(kind, name, tail):
439 439 '''convert a pattern into a regular expression'''
440 440 if not name:
441 441 return ''
442 442 if kind == 're':
443 443 return name
444 444 elif kind == 'path':
445 445 return '^' + re.escape(name) + '(?:/|$)'
446 446 elif kind == 'relglob':
447 447 return globre(name, '(?:|.*/)', tail)
448 448 elif kind == 'relpath':
449 449 return re.escape(name) + '(?:/|$)'
450 450 elif kind == 'relre':
451 451 if name.startswith('^'):
452 452 return name
453 453 return '.*' + name
454 454 return globre(name, '', tail)
455 455
456 456 def matchfn(pats, tail):
457 457 """build a matching function from a set of patterns"""
458 458 if not pats:
459 459 return
460 460 try:
461 461 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
462 462 return re.compile(pat).match
463 463 except re.error:
464 464 for k, p in pats:
465 465 try:
466 466 re.compile('(?:%s)' % regex(k, p, tail))
467 467 except re.error:
468 468 if src:
469 469 raise Abort("%s: invalid pattern (%s): %s" %
470 470 (src, k, p))
471 471 else:
472 472 raise Abort("invalid pattern (%s): %s" % (k, p))
473 473 raise Abort("invalid pattern")
474 474
475 475 def globprefix(pat):
476 476 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
477 477 root = []
478 478 for p in pat.split('/'):
479 479 if contains_glob(p): break
480 480 root.append(p)
481 481 return '/'.join(root) or '.'
482 482
483 483 def normalizepats(names, default):
484 484 pats = []
485 485 roots = []
486 486 anypats = False
487 487 for kind, name in [patkind(p, default) for p in names]:
488 488 if kind in ('glob', 'relpath'):
489 489 name = canonpath(canonroot, cwd, name)
490 490 elif kind in ('relglob', 'path'):
491 491 name = normpath(name)
492 492
493 493 pats.append((kind, name))
494 494
495 495 if kind in ('glob', 're', 'relglob', 'relre'):
496 496 anypats = True
497 497
498 498 if kind == 'glob':
499 499 root = globprefix(name)
500 500 roots.append(root)
501 501 elif kind in ('relpath', 'path'):
502 502 roots.append(name or '.')
503 503 elif kind == 'relglob':
504 504 roots.append('.')
505 505 return roots, pats, anypats
506 506
507 507 roots, pats, anypats = normalizepats(names, dflt_pat)
508 508
509 509 patmatch = matchfn(pats, '$') or always
510 510 incmatch = always
511 511 if inc:
512 512 dummy, inckinds, dummy = normalizepats(inc, 'glob')
513 513 incmatch = matchfn(inckinds, '(?:/|$)')
514 514 excmatch = lambda fn: False
515 515 if exc:
516 516 dummy, exckinds, dummy = normalizepats(exc, 'glob')
517 517 excmatch = matchfn(exckinds, '(?:/|$)')
518 518
519 519 if not names and inc and not exc:
520 520 # common case: hgignore patterns
521 521 match = incmatch
522 522 else:
523 523 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
524 524
525 525 return (roots, match, (inc or exc or anypats) and True)
526 526
527 527 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
528 528 '''enhanced shell command execution.
529 529 run with environment maybe modified, maybe in different dir.
530 530
531 531 if command fails and onerr is None, return status. if ui object,
532 532 print error message and return status, else raise onerr object as
533 533 exception.'''
534 534 def py2shell(val):
535 535 'convert python object into string that is useful to shell'
536 536 if val in (None, False):
537 537 return '0'
538 538 if val == True:
539 539 return '1'
540 540 return str(val)
541 541 oldenv = {}
542 542 for k in environ:
543 543 oldenv[k] = os.environ.get(k)
544 544 if cwd is not None:
545 545 oldcwd = os.getcwd()
546 546 origcmd = cmd
547 547 if os.name == 'nt':
548 548 cmd = '"%s"' % cmd
549 549 try:
550 550 for k, v in environ.iteritems():
551 551 os.environ[k] = py2shell(v)
552 552 if cwd is not None and oldcwd != cwd:
553 553 os.chdir(cwd)
554 554 rc = os.system(cmd)
555 555 if rc and onerr:
556 556 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
557 557 explain_exit(rc)[0])
558 558 if errprefix:
559 559 errmsg = '%s: %s' % (errprefix, errmsg)
560 560 try:
561 561 onerr.warn(errmsg + '\n')
562 562 except AttributeError:
563 563 raise onerr(errmsg)
564 564 return rc
565 565 finally:
566 566 for k, v in oldenv.iteritems():
567 567 if v is None:
568 568 del os.environ[k]
569 569 else:
570 570 os.environ[k] = v
571 571 if cwd is not None and oldcwd != cwd:
572 572 os.chdir(oldcwd)
573 573
574 574 # os.path.lexists is not available on python2.3
575 575 def lexists(filename):
576 576 "test whether a file with this name exists. does not follow symlinks"
577 577 try:
578 578 os.lstat(filename)
579 579 except:
580 580 return False
581 581 return True
582 582
583 583 def rename(src, dst):
584 584 """forcibly rename a file"""
585 585 try:
586 586 os.rename(src, dst)
587 587 except OSError, err:
588 588 # on windows, rename to existing file is not allowed, so we
589 589 # must delete destination first. but if file is open, unlink
590 590 # schedules it for delete but does not delete it. rename
591 591 # happens immediately even for open files, so we create
592 592 # temporary file, delete it, rename destination to that name,
593 593 # then delete that. then rename is safe to do.
594 594 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
595 595 os.close(fd)
596 596 os.unlink(temp)
597 597 os.rename(dst, temp)
598 598 os.unlink(temp)
599 599 os.rename(src, dst)
600 600
601 601 def unlink(f):
602 602 """unlink and remove the directory if it is empty"""
603 603 os.unlink(f)
604 604 # try removing directories that might now be empty
605 605 try:
606 606 os.removedirs(os.path.dirname(f))
607 607 except OSError:
608 608 pass
609 609
610 610 def copyfile(src, dest):
611 611 "copy a file, preserving mode"
612 612 if os.path.islink(src):
613 613 try:
614 614 os.unlink(dest)
615 615 except:
616 616 pass
617 617 os.symlink(os.readlink(src), dest)
618 618 else:
619 619 try:
620 620 shutil.copyfile(src, dest)
621 621 shutil.copymode(src, dest)
622 622 except shutil.Error, inst:
623 623 raise Abort(str(inst))
624 624
625 625 def copyfiles(src, dst, hardlink=None):
626 626 """Copy a directory tree using hardlinks if possible"""
627 627
628 628 if hardlink is None:
629 629 hardlink = (os.stat(src).st_dev ==
630 630 os.stat(os.path.dirname(dst)).st_dev)
631 631
632 632 if os.path.isdir(src):
633 633 os.mkdir(dst)
634 634 for name in os.listdir(src):
635 635 srcname = os.path.join(src, name)
636 636 dstname = os.path.join(dst, name)
637 637 copyfiles(srcname, dstname, hardlink)
638 638 else:
639 639 if hardlink:
640 640 try:
641 641 os_link(src, dst)
642 642 except (IOError, OSError):
643 643 hardlink = False
644 644 shutil.copy(src, dst)
645 645 else:
646 646 shutil.copy(src, dst)
647 647
648 648 def audit_path(path):
649 649 """Abort if path contains dangerous components"""
650 650 parts = os.path.normcase(path).split(os.sep)
651 651 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
652 652 or os.pardir in parts):
653 653 raise Abort(_("path contains illegal component: %s\n") % path)
654 654
655 655 def _makelock_file(info, pathname):
656 656 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
657 657 os.write(ld, info)
658 658 os.close(ld)
659 659
660 660 def _readlock_file(pathname):
661 661 return posixfile(pathname).read()
662 662
663 663 def nlinks(pathname):
664 664 """Return number of hardlinks for the given file."""
665 665 return os.lstat(pathname).st_nlink
666 666
667 667 if hasattr(os, 'link'):
668 668 os_link = os.link
669 669 else:
670 670 def os_link(src, dst):
671 671 raise OSError(0, _("Hardlinks not supported"))
672 672
673 673 def fstat(fp):
674 674 '''stat file object that may not have fileno method.'''
675 675 try:
676 676 return os.fstat(fp.fileno())
677 677 except AttributeError:
678 678 return os.stat(fp.name)
679 679
680 680 posixfile = file
681 681
682 682 def is_win_9x():
683 683 '''return true if run on windows 95, 98 or me.'''
684 684 try:
685 685 return sys.getwindowsversion()[3] == 1
686 686 except AttributeError:
687 687 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
688 688
689 689 getuser_fallback = None
690 690
691 691 def getuser():
692 692 '''return name of current user'''
693 693 try:
694 694 return getpass.getuser()
695 695 except ImportError:
696 696 # import of pwd will fail on windows - try fallback
697 697 if getuser_fallback:
698 698 return getuser_fallback()
699 699 # raised if win32api not available
700 700 raise Abort(_('user name not available - set USERNAME '
701 701 'environment variable'))
702 702
703 703 def username(uid=None):
704 704 """Return the name of the user with the given uid.
705 705
706 706 If uid is None, return the name of the current user."""
707 707 try:
708 708 import pwd
709 709 if uid is None:
710 710 uid = os.getuid()
711 711 try:
712 712 return pwd.getpwuid(uid)[0]
713 713 except KeyError:
714 714 return str(uid)
715 715 except ImportError:
716 716 return None
717 717
718 718 def groupname(gid=None):
719 719 """Return the name of the group with the given gid.
720 720
721 721 If gid is None, return the name of the current group."""
722 722 try:
723 723 import grp
724 724 if gid is None:
725 725 gid = os.getgid()
726 726 try:
727 727 return grp.getgrgid(gid)[0]
728 728 except KeyError:
729 729 return str(gid)
730 730 except ImportError:
731 731 return None
732 732
733 733 # File system features
734 734
735 735 def checkfolding(path):
736 736 """
737 737 Check whether the given path is on a case-sensitive filesystem
738 738
739 739 Requires a path (like /foo/.hg) ending with a foldable final
740 740 directory component.
741 741 """
742 742 s1 = os.stat(path)
743 743 d, b = os.path.split(path)
744 744 p2 = os.path.join(d, b.upper())
745 745 if path == p2:
746 746 p2 = os.path.join(d, b.lower())
747 747 try:
748 748 s2 = os.stat(p2)
749 749 if s2 == s1:
750 750 return False
751 751 return True
752 752 except:
753 753 return True
754 754
755 755 def checkexec(path):
756 756 """
757 757 Check whether the given path is on a filesystem with UNIX-like exec flags
758 758
759 759 Requires a directory (like /foo/.hg)
760 760 """
761 761 fh, fn = tempfile.mkstemp("", "", path)
762 762 os.close(fh)
763 763 m = os.stat(fn).st_mode
764 764 os.chmod(fn, m ^ 0111)
765 765 r = (os.stat(fn).st_mode != m)
766 766 os.unlink(fn)
767 767 return r
768 768
769 769 def execfunc(path, fallback):
770 770 '''return an is_exec() function with default to fallback'''
771 771 if checkexec(path):
772 772 return lambda x: is_exec(os.path.join(path, x))
773 773 return fallback
774 774
775 775 def checklink(path):
776 776 """check whether the given path is on a symlink-capable filesystem"""
777 777 # mktemp is not racy because symlink creation will fail if the
778 778 # file already exists
779 779 name = tempfile.mktemp(dir=path)
780 780 try:
781 781 os.symlink(".", name)
782 782 os.unlink(name)
783 783 return True
784 784 except (OSError, AttributeError):
785 785 return False
786 786
787 787 def linkfunc(path, fallback):
788 788 '''return an is_link() function with default to fallback'''
789 789 if checklink(path):
790 790 return lambda x: os.path.islink(os.path.join(path, x))
791 791 return fallback
792 792
793 793 _umask = os.umask(0)
794 794 os.umask(_umask)
795 795
796 796 def needbinarypatch():
797 797 """return True if patches should be applied in binary mode by default."""
798 798 return os.name == 'nt'
799 799
800 800 # Platform specific variants
801 801 if os.name == 'nt':
802 802 import msvcrt
803 803 nulldev = 'NUL:'
804 804
805 805 class winstdout:
806 806 '''stdout on windows misbehaves if sent through a pipe'''
807 807
808 808 def __init__(self, fp):
809 809 self.fp = fp
810 810
811 811 def __getattr__(self, key):
812 812 return getattr(self.fp, key)
813 813
814 814 def close(self):
815 815 try:
816 816 self.fp.close()
817 817 except: pass
818 818
819 819 def write(self, s):
820 820 try:
821 821 return self.fp.write(s)
822 822 except IOError, inst:
823 823 if inst.errno != 0: raise
824 824 self.close()
825 825 raise IOError(errno.EPIPE, 'Broken pipe')
826
826
827 827 def flush(self):
828 828 try:
829 829 return self.fp.flush()
830 830 except IOError, inst:
831 831 if inst.errno != errno.EINVAL: raise
832 832 self.close()
833 833 raise IOError(errno.EPIPE, 'Broken pipe')
834 834
835 835 sys.stdout = winstdout(sys.stdout)
836 836
837 837 def system_rcpath():
838 838 try:
839 839 return system_rcpath_win32()
840 840 except:
841 841 return [r'c:\mercurial\mercurial.ini']
842 842
843 843 def user_rcpath():
844 844 '''return os-specific hgrc search path to the user dir'''
845 845 try:
846 846 userrc = user_rcpath_win32()
847 847 except:
848 848 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
849 849 path = [userrc]
850 850 userprofile = os.environ.get('USERPROFILE')
851 851 if userprofile:
852 852 path.append(os.path.join(userprofile, 'mercurial.ini'))
853 853 return path
854 854
855 855 def parse_patch_output(output_line):
856 856 """parses the output produced by patch and returns the file name"""
857 857 pf = output_line[14:]
858 858 if pf[0] == '`':
859 859 pf = pf[1:-1] # Remove the quotes
860 860 return pf
861 861
862 862 def testpid(pid):
863 863 '''return False if pid dead, True if running or not known'''
864 864 return True
865 865
866 866 def set_exec(f, mode):
867 867 pass
868 868
869 869 def set_link(f, mode):
870 870 pass
871 871
872 872 def set_binary(fd):
873 873 msvcrt.setmode(fd.fileno(), os.O_BINARY)
874 874
875 875 def pconvert(path):
876 876 return path.replace("\\", "/")
877 877
878 878 def localpath(path):
879 879 return path.replace('/', '\\')
880 880
881 881 def normpath(path):
882 882 return pconvert(os.path.normpath(path))
883 883
884 884 makelock = _makelock_file
885 885 readlock = _readlock_file
886 886
887 887 def samestat(s1, s2):
888 888 return False
889 889
890 890 # A sequence of backslashes is special iff it precedes a double quote:
891 891 # - if there's an even number of backslashes, the double quote is not
892 892 # quoted (i.e. it ends the quoted region)
893 893 # - if there's an odd number of backslashes, the double quote is quoted
894 894 # - in both cases, every pair of backslashes is unquoted into a single
895 895 # backslash
896 896 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
897 897 # So, to quote a string, we must surround it in double quotes, double
898 898 # the number of backslashes that preceed double quotes and add another
899 899 # backslash before every double quote (being careful with the double
900 900 # quote we've appended to the end)
901 901 _quotere = None
902 902 def shellquote(s):
903 903 global _quotere
904 904 if _quotere is None:
905 905 _quotere = re.compile(r'(\\*)("|\\$)')
906 906 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
907 907
908 908 def explain_exit(code):
909 909 return _("exited with status %d") % code, code
910 910
911 911 # if you change this stub into a real check, please try to implement the
912 912 # username and groupname functions above, too.
913 913 def isowner(fp, st=None):
914 914 return True
915
915
916 916 def find_in_path(name, path, default=None):
917 917 '''find name in search path. path can be string (will be split
918 918 with os.pathsep), or iterable thing that returns strings. if name
919 919 found, return path to name. else return default. name is looked up
920 920 using cmd.exe rules, using PATHEXT.'''
921 921 if isinstance(path, str):
922 922 path = path.split(os.pathsep)
923
923
924 924 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
925 925 pathext = pathext.lower().split(os.pathsep)
926 926 isexec = os.path.splitext(name)[1].lower() in pathext
927
927
928 928 for p in path:
929 929 p_name = os.path.join(p, name)
930
930
931 931 if isexec and os.path.exists(p_name):
932 932 return p_name
933
933
934 934 for ext in pathext:
935 935 p_name_ext = p_name + ext
936 936 if os.path.exists(p_name_ext):
937 937 return p_name_ext
938 938 return default
939 939
940 940 try:
941 941 # override functions with win32 versions if possible
942 942 from util_win32 import *
943 943 if not is_win_9x():
944 944 posixfile = posixfile_nt
945 945 except ImportError:
946 946 pass
947 947
948 948 else:
949 949 nulldev = '/dev/null'
950 950
951 951 def rcfiles(path):
952 952 rcs = [os.path.join(path, 'hgrc')]
953 953 rcdir = os.path.join(path, 'hgrc.d')
954 954 try:
955 955 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
956 956 if f.endswith(".rc")])
957 957 except OSError:
958 958 pass
959 959 return rcs
960 960
961 961 def system_rcpath():
962 962 path = []
963 963 # old mod_python does not set sys.argv
964 964 if len(getattr(sys, 'argv', [])) > 0:
965 965 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
966 966 '/../etc/mercurial'))
967 967 path.extend(rcfiles('/etc/mercurial'))
968 968 return path
969 969
970 970 def user_rcpath():
971 971 return [os.path.expanduser('~/.hgrc')]
972 972
973 973 def parse_patch_output(output_line):
974 974 """parses the output produced by patch and returns the file name"""
975 975 pf = output_line[14:]
976 976 if pf.startswith("'") and pf.endswith("'") and " " in pf:
977 977 pf = pf[1:-1] # Remove the quotes
978 978 return pf
979 979
980 980 def is_exec(f):
981 981 """check whether a file is executable"""
982 982 return (os.lstat(f).st_mode & 0100 != 0)
983 983
984 984 def set_exec(f, mode):
985 985 s = os.lstat(f).st_mode
986 986 if (s & 0100 != 0) == mode:
987 987 return
988 988 if mode:
989 989 # Turn on +x for every +r bit when making a file executable
990 990 # and obey umask.
991 991 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
992 992 else:
993 993 os.chmod(f, s & 0666)
994 994
995 995 def set_link(f, mode):
996 996 """make a file a symbolic link/regular file
997 997
998 998 if a file is changed to a link, its contents become the link data
999 999 if a link is changed to a file, its link data become its contents
1000 1000 """
1001 1001
1002 1002 m = os.path.islink(f)
1003 1003 if m == bool(mode):
1004 1004 return
1005 1005
1006 1006 if mode: # switch file to link
1007 1007 data = file(f).read()
1008 1008 os.unlink(f)
1009 1009 os.symlink(data, f)
1010 1010 else:
1011 1011 data = os.readlink(f)
1012 1012 os.unlink(f)
1013 1013 file(f, "w").write(data)
1014 1014
1015 1015 def set_binary(fd):
1016 1016 pass
1017 1017
1018 1018 def pconvert(path):
1019 1019 return path
1020 1020
1021 1021 def localpath(path):
1022 1022 return path
1023 1023
1024 1024 normpath = os.path.normpath
1025 1025 samestat = os.path.samestat
1026 1026
1027 1027 def makelock(info, pathname):
1028 1028 try:
1029 1029 os.symlink(info, pathname)
1030 1030 except OSError, why:
1031 1031 if why.errno == errno.EEXIST:
1032 1032 raise
1033 1033 else:
1034 1034 _makelock_file(info, pathname)
1035 1035
1036 1036 def readlock(pathname):
1037 1037 try:
1038 1038 return os.readlink(pathname)
1039 1039 except OSError, why:
1040 1040 if why.errno == errno.EINVAL:
1041 1041 return _readlock_file(pathname)
1042 1042 else:
1043 1043 raise
1044 1044
1045 1045 def shellquote(s):
1046 1046 return "'%s'" % s.replace("'", "'\\''")
1047 1047
1048 1048 def testpid(pid):
1049 1049 '''return False if pid dead, True if running or not sure'''
1050 1050 try:
1051 1051 os.kill(pid, 0)
1052 1052 return True
1053 1053 except OSError, inst:
1054 1054 return inst.errno != errno.ESRCH
1055 1055
1056 1056 def explain_exit(code):
1057 1057 """return a 2-tuple (desc, code) describing a process's status"""
1058 1058 if os.WIFEXITED(code):
1059 1059 val = os.WEXITSTATUS(code)
1060 1060 return _("exited with status %d") % val, val
1061 1061 elif os.WIFSIGNALED(code):
1062 1062 val = os.WTERMSIG(code)
1063 1063 return _("killed by signal %d") % val, val
1064 1064 elif os.WIFSTOPPED(code):
1065 1065 val = os.WSTOPSIG(code)
1066 1066 return _("stopped by signal %d") % val, val
1067 1067 raise ValueError(_("invalid exit code"))
1068 1068
1069 1069 def isowner(fp, st=None):
1070 1070 """Return True if the file object f belongs to the current user.
1071 1071
1072 1072 The return value of a util.fstat(f) may be passed as the st argument.
1073 1073 """
1074 1074 if st is None:
1075 1075 st = fstat(fp)
1076 1076 return st.st_uid == os.getuid()
1077
1077
1078 1078 def find_in_path(name, path, default=None):
1079 1079 '''find name in search path. path can be string (will be split
1080 1080 with os.pathsep), or iterable thing that returns strings. if name
1081 1081 found, return path to name. else return default.'''
1082 1082 if isinstance(path, str):
1083 1083 path = path.split(os.pathsep)
1084 1084 for p in path:
1085 1085 p_name = os.path.join(p, name)
1086 1086 if os.path.exists(p_name):
1087 1087 return p_name
1088 1088 return default
1089 1089
1090 1090 def find_exe(name, default=None):
1091 1091 '''find path of an executable.
1092 1092 if name contains a path component, return it as is. otherwise,
1093 1093 use normal executable search path.'''
1094 1094
1095 1095 if os.sep in name:
1096 1096 # don't check the executable bit. if the file isn't
1097 1097 # executable, whoever tries to actually run it will give a
1098 1098 # much more useful error message.
1099 1099 return name
1100 1100 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1101 1101
1102 1102 def _buildencodefun():
1103 1103 e = '_'
1104 1104 win_reserved = [ord(x) for x in '\\:*?"<>|']
1105 1105 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1106 1106 for x in (range(32) + range(126, 256) + win_reserved):
1107 1107 cmap[chr(x)] = "~%02x" % x
1108 1108 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1109 1109 cmap[chr(x)] = e + chr(x).lower()
1110 1110 dmap = {}
1111 1111 for k, v in cmap.iteritems():
1112 1112 dmap[v] = k
1113 1113 def decode(s):
1114 1114 i = 0
1115 1115 while i < len(s):
1116 1116 for l in xrange(1, 4):
1117 1117 try:
1118 1118 yield dmap[s[i:i+l]]
1119 1119 i += l
1120 1120 break
1121 1121 except KeyError:
1122 1122 pass
1123 1123 else:
1124 1124 raise KeyError
1125 1125 return (lambda s: "".join([cmap[c] for c in s]),
1126 1126 lambda s: "".join(list(decode(s))))
1127 1127
1128 1128 encodefilename, decodefilename = _buildencodefun()
1129 1129
1130 1130 def encodedopener(openerfn, fn):
1131 1131 def o(path, *args, **kw):
1132 1132 return openerfn(fn(path), *args, **kw)
1133 1133 return o
1134 1134
1135 1135 def opener(base, audit=True):
1136 1136 """
1137 1137 return a function that opens files relative to base
1138 1138
1139 1139 this function is used to hide the details of COW semantics and
1140 1140 remote file access from higher level code.
1141 1141 """
1142 1142 p = base
1143 1143 audit_p = audit
1144 1144
1145 1145 def mktempcopy(name, emptyok=False):
1146 1146 d, fn = os.path.split(name)
1147 1147 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1148 1148 os.close(fd)
1149 1149 # Temporary files are created with mode 0600, which is usually not
1150 1150 # what we want. If the original file already exists, just copy
1151 1151 # its mode. Otherwise, manually obey umask.
1152 1152 try:
1153 1153 st_mode = os.lstat(name).st_mode
1154 1154 except OSError, inst:
1155 1155 if inst.errno != errno.ENOENT:
1156 1156 raise
1157 1157 st_mode = 0666 & ~_umask
1158 1158 os.chmod(temp, st_mode)
1159 1159 if emptyok:
1160 1160 return temp
1161 1161 try:
1162 1162 try:
1163 1163 ifp = posixfile(name, "rb")
1164 1164 except IOError, inst:
1165 1165 if inst.errno == errno.ENOENT:
1166 1166 return temp
1167 1167 if not getattr(inst, 'filename', None):
1168 1168 inst.filename = name
1169 1169 raise
1170 1170 ofp = posixfile(temp, "wb")
1171 1171 for chunk in filechunkiter(ifp):
1172 1172 ofp.write(chunk)
1173 1173 ifp.close()
1174 1174 ofp.close()
1175 1175 except:
1176 1176 try: os.unlink(temp)
1177 1177 except: pass
1178 1178 raise
1179 1179 return temp
1180 1180
1181 1181 class atomictempfile(posixfile):
1182 1182 """the file will only be copied when rename is called"""
1183 1183 def __init__(self, name, mode):
1184 1184 self.__name = name
1185 1185 self.temp = mktempcopy(name, emptyok=('w' in mode))
1186 1186 posixfile.__init__(self, self.temp, mode)
1187 1187 def rename(self):
1188 1188 if not self.closed:
1189 1189 posixfile.close(self)
1190 1190 rename(self.temp, localpath(self.__name))
1191 1191 def __del__(self):
1192 1192 if not self.closed:
1193 1193 try:
1194 1194 os.unlink(self.temp)
1195 1195 except: pass
1196 1196 posixfile.close(self)
1197 1197
1198 1198 def o(path, mode="r", text=False, atomictemp=False):
1199 1199 if audit_p:
1200 1200 audit_path(path)
1201 1201 f = os.path.join(p, path)
1202 1202
1203 1203 if not text:
1204 1204 mode += "b" # for that other OS
1205 1205
1206 1206 if mode[0] != "r":
1207 1207 try:
1208 1208 nlink = nlinks(f)
1209 1209 except OSError:
1210 1210 nlink = 0
1211 1211 d = os.path.dirname(f)
1212 1212 if not os.path.isdir(d):
1213 1213 os.makedirs(d)
1214 1214 if atomictemp:
1215 1215 return atomictempfile(f, mode)
1216 1216 if nlink > 1:
1217 1217 rename(mktempcopy(f), f)
1218 1218 return posixfile(f, mode)
1219 1219
1220 1220 return o
1221 1221
1222 1222 class chunkbuffer(object):
1223 1223 """Allow arbitrary sized chunks of data to be efficiently read from an
1224 1224 iterator over chunks of arbitrary size."""
1225 1225
1226 1226 def __init__(self, in_iter, targetsize = 2**16):
1227 1227 """in_iter is the iterator that's iterating over the input chunks.
1228 1228 targetsize is how big a buffer to try to maintain."""
1229 1229 self.in_iter = iter(in_iter)
1230 1230 self.buf = ''
1231 1231 self.targetsize = int(targetsize)
1232 1232 if self.targetsize <= 0:
1233 1233 raise ValueError(_("targetsize must be greater than 0, was %d") %
1234 1234 targetsize)
1235 1235 self.iterempty = False
1236 1236
1237 1237 def fillbuf(self):
1238 1238 """Ignore target size; read every chunk from iterator until empty."""
1239 1239 if not self.iterempty:
1240 1240 collector = cStringIO.StringIO()
1241 1241 collector.write(self.buf)
1242 1242 for ch in self.in_iter:
1243 1243 collector.write(ch)
1244 1244 self.buf = collector.getvalue()
1245 1245 self.iterempty = True
1246 1246
1247 1247 def read(self, l):
1248 1248 """Read L bytes of data from the iterator of chunks of data.
1249 1249 Returns less than L bytes if the iterator runs dry."""
1250 1250 if l > len(self.buf) and not self.iterempty:
1251 1251 # Clamp to a multiple of self.targetsize
1252 1252 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1253 1253 collector = cStringIO.StringIO()
1254 1254 collector.write(self.buf)
1255 1255 collected = len(self.buf)
1256 1256 for chunk in self.in_iter:
1257 1257 collector.write(chunk)
1258 1258 collected += len(chunk)
1259 1259 if collected >= targetsize:
1260 1260 break
1261 1261 if collected < targetsize:
1262 1262 self.iterempty = True
1263 1263 self.buf = collector.getvalue()
1264 1264 s, self.buf = self.buf[:l], buffer(self.buf, l)
1265 1265 return s
1266 1266
1267 1267 def filechunkiter(f, size=65536, limit=None):
1268 1268 """Create a generator that produces the data in the file size
1269 1269 (default 65536) bytes at a time, up to optional limit (default is
1270 1270 to read all data). Chunks may be less than size bytes if the
1271 1271 chunk is the last chunk in the file, or the file is a socket or
1272 1272 some other type of file that sometimes reads less data than is
1273 1273 requested."""
1274 1274 assert size >= 0
1275 1275 assert limit is None or limit >= 0
1276 1276 while True:
1277 1277 if limit is None: nbytes = size
1278 1278 else: nbytes = min(limit, size)
1279 1279 s = nbytes and f.read(nbytes)
1280 1280 if not s: break
1281 1281 if limit: limit -= len(s)
1282 1282 yield s
1283 1283
1284 1284 def makedate():
1285 1285 lt = time.localtime()
1286 1286 if lt[8] == 1 and time.daylight:
1287 1287 tz = time.altzone
1288 1288 else:
1289 1289 tz = time.timezone
1290 1290 return time.mktime(lt), tz
1291 1291
1292 1292 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1293 1293 """represent a (unixtime, offset) tuple as a localized time.
1294 1294 unixtime is seconds since the epoch, and offset is the time zone's
1295 1295 number of seconds away from UTC. if timezone is false, do not
1296 1296 append time zone to string."""
1297 1297 t, tz = date or makedate()
1298 1298 s = time.strftime(format, time.gmtime(float(t) - tz))
1299 1299 if timezone:
1300 1300 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1301 1301 return s
1302 1302
1303 1303 def strdate(string, format, defaults):
1304 1304 """parse a localized time string and return a (unixtime, offset) tuple.
1305 1305 if the string cannot be parsed, ValueError is raised."""
1306 1306 def timezone(string):
1307 1307 tz = string.split()[-1]
1308 1308 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1309 1309 tz = int(tz)
1310 1310 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1311 1311 return offset
1312 1312 if tz == "GMT" or tz == "UTC":
1313 1313 return 0
1314 1314 return None
1315 1315
1316 1316 # NOTE: unixtime = localunixtime + offset
1317 1317 offset, date = timezone(string), string
1318 1318 if offset != None:
1319 1319 date = " ".join(string.split()[:-1])
1320 1320
1321 1321 # add missing elements from defaults
1322 1322 for part in defaults:
1323 1323 found = [True for p in part if ("%"+p) in format]
1324 1324 if not found:
1325 1325 date += "@" + defaults[part]
1326 1326 format += "@%" + part[0]
1327 1327
1328 1328 timetuple = time.strptime(date, format)
1329 1329 localunixtime = int(calendar.timegm(timetuple))
1330 1330 if offset is None:
1331 1331 # local timezone
1332 1332 unixtime = int(time.mktime(timetuple))
1333 1333 offset = unixtime - localunixtime
1334 1334 else:
1335 1335 unixtime = localunixtime + offset
1336 1336 return unixtime, offset
1337 1337
1338 1338 def parsedate(string, formats=None, defaults=None):
1339 1339 """parse a localized time string and return a (unixtime, offset) tuple.
1340 1340 The date may be a "unixtime offset" string or in one of the specified
1341 1341 formats."""
1342 1342 if not string:
1343 1343 return 0, 0
1344 1344 if not formats:
1345 1345 formats = defaultdateformats
1346 1346 string = string.strip()
1347 1347 try:
1348 1348 when, offset = map(int, string.split(' '))
1349 1349 except ValueError:
1350 1350 # fill out defaults
1351 1351 if not defaults:
1352 1352 defaults = {}
1353 1353 now = makedate()
1354 1354 for part in "d mb yY HI M S".split():
1355 1355 if part not in defaults:
1356 1356 if part[0] in "HMS":
1357 1357 defaults[part] = "00"
1358 1358 elif part[0] in "dm":
1359 1359 defaults[part] = "1"
1360 1360 else:
1361 1361 defaults[part] = datestr(now, "%" + part[0], False)
1362 1362
1363 1363 for format in formats:
1364 1364 try:
1365 1365 when, offset = strdate(string, format, defaults)
1366 1366 except ValueError:
1367 1367 pass
1368 1368 else:
1369 1369 break
1370 1370 else:
1371 1371 raise Abort(_('invalid date: %r ') % string)
1372 1372 # validate explicit (probably user-specified) date and
1373 1373 # time zone offset. values must fit in signed 32 bits for
1374 1374 # current 32-bit linux runtimes. timezones go from UTC-12
1375 1375 # to UTC+14
1376 1376 if abs(when) > 0x7fffffff:
1377 1377 raise Abort(_('date exceeds 32 bits: %d') % when)
1378 1378 if offset < -50400 or offset > 43200:
1379 1379 raise Abort(_('impossible time zone offset: %d') % offset)
1380 1380 return when, offset
1381 1381
1382 1382 def matchdate(date):
1383 1383 """Return a function that matches a given date match specifier
1384 1384
1385 1385 Formats include:
1386 1386
1387 1387 '{date}' match a given date to the accuracy provided
1388 1388
1389 1389 '<{date}' on or before a given date
1390 1390
1391 1391 '>{date}' on or after a given date
1392 1392
1393 1393 """
1394 1394
1395 1395 def lower(date):
1396 1396 return parsedate(date, extendeddateformats)[0]
1397 1397
1398 1398 def upper(date):
1399 1399 d = dict(mb="12", HI="23", M="59", S="59")
1400 1400 for days in "31 30 29".split():
1401 1401 try:
1402 1402 d["d"] = days
1403 1403 return parsedate(date, extendeddateformats, d)[0]
1404 1404 except:
1405 1405 pass
1406 1406 d["d"] = "28"
1407 1407 return parsedate(date, extendeddateformats, d)[0]
1408 1408
1409 1409 if date[0] == "<":
1410 1410 when = upper(date[1:])
1411 1411 return lambda x: x <= when
1412 1412 elif date[0] == ">":
1413 1413 when = lower(date[1:])
1414 1414 return lambda x: x >= when
1415 1415 elif date[0] == "-":
1416 1416 try:
1417 1417 days = int(date[1:])
1418 1418 except ValueError:
1419 1419 raise Abort(_("invalid day spec: %s") % date[1:])
1420 1420 when = makedate()[0] - days * 3600 * 24
1421 1421 return lambda x: x >= when
1422 1422 elif " to " in date:
1423 1423 a, b = date.split(" to ")
1424 1424 start, stop = lower(a), upper(b)
1425 1425 return lambda x: x >= start and x <= stop
1426 1426 else:
1427 1427 start, stop = lower(date), upper(date)
1428 1428 return lambda x: x >= start and x <= stop
1429 1429
1430 1430 def shortuser(user):
1431 1431 """Return a short representation of a user name or email address."""
1432 1432 f = user.find('@')
1433 1433 if f >= 0:
1434 1434 user = user[:f]
1435 1435 f = user.find('<')
1436 1436 if f >= 0:
1437 1437 user = user[f+1:]
1438 1438 f = user.find(' ')
1439 1439 if f >= 0:
1440 1440 user = user[:f]
1441 1441 f = user.find('.')
1442 1442 if f >= 0:
1443 1443 user = user[:f]
1444 1444 return user
1445 1445
1446 1446 def ellipsis(text, maxlength=400):
1447 1447 """Trim string to at most maxlength (default: 400) characters."""
1448 1448 if len(text) <= maxlength:
1449 1449 return text
1450 1450 else:
1451 1451 return "%s..." % (text[:maxlength-3])
1452 1452
1453 1453 def walkrepos(path):
1454 1454 '''yield every hg repository under path, recursively.'''
1455 1455 def errhandler(err):
1456 1456 if err.filename == path:
1457 1457 raise err
1458 1458
1459 1459 for root, dirs, files in os.walk(path, onerror=errhandler):
1460 1460 for d in dirs:
1461 1461 if d == '.hg':
1462 1462 yield root
1463 1463 dirs[:] = []
1464 1464 break
1465 1465
1466 1466 _rcpath = None
1467 1467
1468 1468 def os_rcpath():
1469 1469 '''return default os-specific hgrc search path'''
1470 1470 path = system_rcpath()
1471 1471 path.extend(user_rcpath())
1472 1472 path = [os.path.normpath(f) for f in path]
1473 1473 return path
1474 1474
1475 1475 def rcpath():
1476 1476 '''return hgrc search path. if env var HGRCPATH is set, use it.
1477 1477 for each item in path, if directory, use files ending in .rc,
1478 1478 else use item.
1479 1479 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1480 1480 if no HGRCPATH, use default os-specific path.'''
1481 1481 global _rcpath
1482 1482 if _rcpath is None:
1483 1483 if 'HGRCPATH' in os.environ:
1484 1484 _rcpath = []
1485 1485 for p in os.environ['HGRCPATH'].split(os.pathsep):
1486 1486 if not p: continue
1487 1487 if os.path.isdir(p):
1488 1488 for f in os.listdir(p):
1489 1489 if f.endswith('.rc'):
1490 1490 _rcpath.append(os.path.join(p, f))
1491 1491 else:
1492 1492 _rcpath.append(p)
1493 1493 else:
1494 1494 _rcpath = os_rcpath()
1495 1495 return _rcpath
1496 1496
1497 1497 def bytecount(nbytes):
1498 1498 '''return byte count formatted as readable string, with units'''
1499 1499
1500 1500 units = (
1501 1501 (100, 1<<30, _('%.0f GB')),
1502 1502 (10, 1<<30, _('%.1f GB')),
1503 1503 (1, 1<<30, _('%.2f GB')),
1504 1504 (100, 1<<20, _('%.0f MB')),
1505 1505 (10, 1<<20, _('%.1f MB')),
1506 1506 (1, 1<<20, _('%.2f MB')),
1507 1507 (100, 1<<10, _('%.0f KB')),
1508 1508 (10, 1<<10, _('%.1f KB')),
1509 1509 (1, 1<<10, _('%.2f KB')),
1510 1510 (1, 1, _('%.0f bytes')),
1511 1511 )
1512 1512
1513 1513 for multiplier, divisor, format in units:
1514 1514 if nbytes >= divisor * multiplier:
1515 1515 return format % (nbytes / float(divisor))
1516 1516 return units[-1][2] % nbytes
1517 1517
1518 1518 def drop_scheme(scheme, path):
1519 1519 sc = scheme + ':'
1520 1520 if path.startswith(sc):
1521 1521 path = path[len(sc):]
1522 1522 if path.startswith('//'):
1523 1523 path = path[2:]
1524 1524 return path
@@ -1,409 +1,409 b''
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, write to the Free Software
15 15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
16 16
17 17 import os
18 18 import unittest
19 19 from unittest import TestCase
20 20 import imp
21 21 import shutil
22 22 from mercurial import util
23 23
24 24 # copy simplemerge to the cwd to avoid creating a .pyc file in the source tree
25 25 shutil.copyfile(os.path.join(os.environ['TESTDIR'], os.path.pardir,
26 26 'contrib', 'simplemerge'),
27 27 'simplemerge.py')
28 28 simplemerge = imp.load_source('simplemerge', 'simplemerge.py')
29 29 Merge3 = simplemerge.Merge3
30 30 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
31 31
32 32 def split_lines(t):
33 33 from cStringIO import StringIO
34 34 return StringIO(t).readlines()
35 35
36 36 ############################################################
37 37 # test case data from the gnu diffutils manual
38 38 # common base
39 39 TZU = split_lines(""" The Nameless is the origin of Heaven and Earth;
40 40 The named is the mother of all things.
41
41
42 42 Therefore let there always be non-being,
43 43 so we may see their subtlety,
44 44 And let there always be being,
45 45 so we may see their outcome.
46 46 The two are the same,
47 47 But after they are produced,
48 48 they have different names.
49 49 They both may be called deep and profound.
50 50 Deeper and more profound,
51 51 The door of all subtleties!
52 52 """)
53 53
54 54 LAO = split_lines(""" The Way that can be told of is not the eternal Way;
55 55 The name that can be named is not the eternal name.
56 56 The Nameless is the origin of Heaven and Earth;
57 57 The Named is the mother of all things.
58 58 Therefore let there always be non-being,
59 59 so we may see their subtlety,
60 60 And let there always be being,
61 61 so we may see their outcome.
62 62 The two are the same,
63 63 But after they are produced,
64 64 they have different names.
65 65 """)
66 66
67 67
68 68 TAO = split_lines(""" The Way that can be told of is not the eternal Way;
69 69 The name that can be named is not the eternal name.
70 70 The Nameless is the origin of Heaven and Earth;
71 71 The named is the mother of all things.
72
72
73 73 Therefore let there always be non-being,
74 74 so we may see their subtlety,
75 75 And let there always be being,
76 76 so we may see their result.
77 77 The two are the same,
78 78 But after they are produced,
79 79 they have different names.
80
80
81 81 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
82 82
83 83 """)
84 84
85 85 MERGED_RESULT = split_lines(""" The Way that can be told of is not the eternal Way;
86 86 The name that can be named is not the eternal name.
87 87 The Nameless is the origin of Heaven and Earth;
88 88 The Named is the mother of all things.
89 89 Therefore let there always be non-being,
90 90 so we may see their subtlety,
91 91 And let there always be being,
92 92 so we may see their result.
93 93 The two are the same,
94 94 But after they are produced,
95 95 they have different names.
96 96 <<<<<<< LAO
97 97 =======
98
98
99 99 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
100 100
101 101 >>>>>>> TAO
102 102 """)
103 103
104 104 class TestMerge3(TestCase):
105 105 def log(self, msg):
106 106 pass
107 107
108 108 def test_no_changes(self):
109 109 """No conflicts because nothing changed"""
110 110 m3 = Merge3(['aaa', 'bbb'],
111 111 ['aaa', 'bbb'],
112 112 ['aaa', 'bbb'])
113 113
114 114 self.assertEquals(m3.find_unconflicted(),
115 115 [(0, 2)])
116 116
117 117 self.assertEquals(list(m3.find_sync_regions()),
118 118 [(0, 2,
119 119 0, 2,
120 120 0, 2),
121 121 (2,2, 2,2, 2,2)])
122 122
123 123 self.assertEquals(list(m3.merge_regions()),
124 124 [('unchanged', 0, 2)])
125 125
126 126 self.assertEquals(list(m3.merge_groups()),
127 127 [('unchanged', ['aaa', 'bbb'])])
128 128
129 129 def test_front_insert(self):
130 130 m3 = Merge3(['zz'],
131 131 ['aaa', 'bbb', 'zz'],
132 132 ['zz'])
133 133
134 134 # todo: should use a sentinal at end as from get_matching_blocks
135 135 # to match without zz
136 136 self.assertEquals(list(m3.find_sync_regions()),
137 137 [(0,1, 2,3, 0,1),
138 138 (1,1, 3,3, 1,1),])
139 139
140 140 self.assertEquals(list(m3.merge_regions()),
141 141 [('a', 0, 2),
142 142 ('unchanged', 0, 1)])
143 143
144 144 self.assertEquals(list(m3.merge_groups()),
145 145 [('a', ['aaa', 'bbb']),
146 146 ('unchanged', ['zz'])])
147
147
148 148 def test_null_insert(self):
149 149 m3 = Merge3([],
150 150 ['aaa', 'bbb'],
151 151 [])
152 152 # todo: should use a sentinal at end as from get_matching_blocks
153 153 # to match without zz
154 154 self.assertEquals(list(m3.find_sync_regions()),
155 155 [(0,0, 2,2, 0,0)])
156 156
157 157 self.assertEquals(list(m3.merge_regions()),
158 158 [('a', 0, 2)])
159 159
160 160 self.assertEquals(list(m3.merge_lines()),
161 161 ['aaa', 'bbb'])
162 162
163 163 def test_no_conflicts(self):
164 164 """No conflicts because only one side changed"""
165 165 m3 = Merge3(['aaa', 'bbb'],
166 166 ['aaa', '111', 'bbb'],
167 167 ['aaa', 'bbb'])
168 168
169 169 self.assertEquals(m3.find_unconflicted(),
170 170 [(0, 1), (1, 2)])
171 171
172 172 self.assertEquals(list(m3.find_sync_regions()),
173 173 [(0,1, 0,1, 0,1),
174 174 (1,2, 2,3, 1,2),
175 175 (2,2, 3,3, 2,2),])
176 176
177 177 self.assertEquals(list(m3.merge_regions()),
178 178 [('unchanged', 0, 1),
179 179 ('a', 1, 2),
180 180 ('unchanged', 1, 2),])
181 181
182 182 def test_append_a(self):
183 183 m3 = Merge3(['aaa\n', 'bbb\n'],
184 184 ['aaa\n', 'bbb\n', '222\n'],
185 185 ['aaa\n', 'bbb\n'])
186 186
187 187 self.assertEquals(''.join(m3.merge_lines()),
188 188 'aaa\nbbb\n222\n')
189 189
190 190 def test_append_b(self):
191 191 m3 = Merge3(['aaa\n', 'bbb\n'],
192 192 ['aaa\n', 'bbb\n'],
193 193 ['aaa\n', 'bbb\n', '222\n'])
194 194
195 195 self.assertEquals(''.join(m3.merge_lines()),
196 196 'aaa\nbbb\n222\n')
197 197
198 198 def test_append_agreement(self):
199 199 m3 = Merge3(['aaa\n', 'bbb\n'],
200 200 ['aaa\n', 'bbb\n', '222\n'],
201 201 ['aaa\n', 'bbb\n', '222\n'])
202 202
203 203 self.assertEquals(''.join(m3.merge_lines()),
204 204 'aaa\nbbb\n222\n')
205 205
206 206 def test_append_clash(self):
207 207 m3 = Merge3(['aaa\n', 'bbb\n'],
208 208 ['aaa\n', 'bbb\n', '222\n'],
209 209 ['aaa\n', 'bbb\n', '333\n'])
210 210
211 211 ml = m3.merge_lines(name_a='a',
212 212 name_b='b',
213 213 start_marker='<<',
214 214 mid_marker='--',
215 215 end_marker='>>')
216 216 self.assertEquals(''.join(ml),
217 217 '''\
218 218 aaa
219 219 bbb
220 220 << a
221 221 222
222 222 --
223 223 333
224 224 >> b
225 225 ''')
226 226
227 227 def test_insert_agreement(self):
228 228 m3 = Merge3(['aaa\n', 'bbb\n'],
229 229 ['aaa\n', '222\n', 'bbb\n'],
230 230 ['aaa\n', '222\n', 'bbb\n'])
231 231
232 232 ml = m3.merge_lines(name_a='a',
233 233 name_b='b',
234 234 start_marker='<<',
235 235 mid_marker='--',
236 236 end_marker='>>')
237 237 self.assertEquals(''.join(ml), 'aaa\n222\nbbb\n')
238
238
239 239
240 240 def test_insert_clash(self):
241 241 """Both try to insert lines in the same place."""
242 242 m3 = Merge3(['aaa\n', 'bbb\n'],
243 243 ['aaa\n', '111\n', 'bbb\n'],
244 244 ['aaa\n', '222\n', 'bbb\n'])
245 245
246 246 self.assertEquals(m3.find_unconflicted(),
247 247 [(0, 1), (1, 2)])
248 248
249 249 self.assertEquals(list(m3.find_sync_regions()),
250 250 [(0,1, 0,1, 0,1),
251 251 (1,2, 2,3, 2,3),
252 252 (2,2, 3,3, 3,3),])
253 253
254 254 self.assertEquals(list(m3.merge_regions()),
255 255 [('unchanged', 0,1),
256 256 ('conflict', 1,1, 1,2, 1,2),
257 257 ('unchanged', 1,2)])
258 258
259 259 self.assertEquals(list(m3.merge_groups()),
260 260 [('unchanged', ['aaa\n']),
261 261 ('conflict', [], ['111\n'], ['222\n']),
262 262 ('unchanged', ['bbb\n']),
263 263 ])
264 264
265 265 ml = m3.merge_lines(name_a='a',
266 266 name_b='b',
267 267 start_marker='<<',
268 268 mid_marker='--',
269 269 end_marker='>>')
270 270 self.assertEquals(''.join(ml),
271 271 '''aaa
272 272 << a
273 273 111
274 274 --
275 275 222
276 276 >> b
277 277 bbb
278 278 ''')
279 279
280 280 def test_replace_clash(self):
281 281 """Both try to insert lines in the same place."""
282 282 m3 = Merge3(['aaa', '000', 'bbb'],
283 283 ['aaa', '111', 'bbb'],
284 284 ['aaa', '222', 'bbb'])
285 285
286 286 self.assertEquals(m3.find_unconflicted(),
287 287 [(0, 1), (2, 3)])
288 288
289 289 self.assertEquals(list(m3.find_sync_regions()),
290 290 [(0,1, 0,1, 0,1),
291 291 (2,3, 2,3, 2,3),
292 292 (3,3, 3,3, 3,3),])
293 293
294 294 def test_replace_multi(self):
295 295 """Replacement with regions of different size."""
296 296 m3 = Merge3(['aaa', '000', '000', 'bbb'],
297 297 ['aaa', '111', '111', '111', 'bbb'],
298 298 ['aaa', '222', '222', '222', '222', 'bbb'])
299 299
300 300 self.assertEquals(m3.find_unconflicted(),
301 301 [(0, 1), (3, 4)])
302 302
303 303
304 304 self.assertEquals(list(m3.find_sync_regions()),
305 305 [(0,1, 0,1, 0,1),
306 306 (3,4, 4,5, 5,6),
307 307 (4,4, 5,5, 6,6),])
308 308
309 309 def test_merge_poem(self):
310 310 """Test case from diff3 manual"""
311 311 m3 = Merge3(TZU, LAO, TAO)
312 312 ml = list(m3.merge_lines('LAO', 'TAO'))
313 313 self.log('merge result:')
314 314 self.log(''.join(ml))
315 315 self.assertEquals(ml, MERGED_RESULT)
316 316
317 317 def test_minimal_conflicts_common(self):
318 318 """Reprocessing"""
319 319 base_text = ("a\n" * 20).splitlines(True)
320 320 this_text = ("a\n"*10+"b\n" * 10).splitlines(True)
321 321 other_text = ("a\n"*10+"c\n"+"b\n" * 8 + "c\n").splitlines(True)
322 322 m3 = Merge3(base_text, other_text, this_text)
323 323 m_lines = m3.merge_lines('OTHER', 'THIS', reprocess=True)
324 324 merged_text = "".join(list(m_lines))
325 325 optimal_text = ("a\n" * 10 + "<<<<<<< OTHER\nc\n=======\n"
326 326 + ">>>>>>> THIS\n"
327 327 + 8* "b\n" + "<<<<<<< OTHER\nc\n=======\n"
328 328 + 2* "b\n" + ">>>>>>> THIS\n")
329 329 self.assertEquals(optimal_text, merged_text)
330 330
331 331 def test_minimal_conflicts_unique(self):
332 332 def add_newline(s):
333 333 """Add a newline to each entry in the string"""
334 334 return [(x+'\n') for x in s]
335 335
336 336 base_text = add_newline("abcdefghijklm")
337 337 this_text = add_newline("abcdefghijklmNOPQRSTUVWXYZ")
338 338 other_text = add_newline("abcdefghijklm1OPQRSTUVWXY2")
339 339 m3 = Merge3(base_text, other_text, this_text)
340 340 m_lines = m3.merge_lines('OTHER', 'THIS', reprocess=True)
341 341 merged_text = "".join(list(m_lines))
342 342 optimal_text = ''.join(add_newline("abcdefghijklm")
343 343 + ["<<<<<<< OTHER\n1\n=======\nN\n>>>>>>> THIS\n"]
344 344 + add_newline('OPQRSTUVWXY')
345 345 + ["<<<<<<< OTHER\n2\n=======\nZ\n>>>>>>> THIS\n"]
346 346 )
347 347 self.assertEquals(optimal_text, merged_text)
348 348
349 349 def test_minimal_conflicts_nonunique(self):
350 350 def add_newline(s):
351 351 """Add a newline to each entry in the string"""
352 352 return [(x+'\n') for x in s]
353 353
354 354 base_text = add_newline("abacddefgghij")
355 355 this_text = add_newline("abacddefgghijkalmontfprz")
356 356 other_text = add_newline("abacddefgghijknlmontfprd")
357 357 m3 = Merge3(base_text, other_text, this_text)
358 358 m_lines = m3.merge_lines('OTHER', 'THIS', reprocess=True)
359 359 merged_text = "".join(list(m_lines))
360 360 optimal_text = ''.join(add_newline("abacddefgghijk")
361 361 + ["<<<<<<< OTHER\nn\n=======\na\n>>>>>>> THIS\n"]
362 362 + add_newline('lmontfpr')
363 363 + ["<<<<<<< OTHER\nd\n=======\nz\n>>>>>>> THIS\n"]
364 364 )
365 365 self.assertEquals(optimal_text, merged_text)
366 366
367 367 def test_reprocess_and_base(self):
368 368 """Reprocessing and showing base breaks correctly"""
369 369 base_text = ("a\n" * 20).splitlines(True)
370 370 this_text = ("a\n"*10+"b\n" * 10).splitlines(True)
371 371 other_text = ("a\n"*10+"c\n"+"b\n" * 8 + "c\n").splitlines(True)
372 372 m3 = Merge3(base_text, other_text, this_text)
373 m_lines = m3.merge_lines('OTHER', 'THIS', reprocess=True,
373 m_lines = m3.merge_lines('OTHER', 'THIS', reprocess=True,
374 374 base_marker='|||||||')
375 375 self.assertRaises(CantReprocessAndShowBase, list, m_lines)
376 376
377 377 def test_binary(self):
378 378 self.assertRaises(util.Abort, Merge3, ['\x00'], ['a'], ['b'])
379 379
380 380 def test_dos_text(self):
381 381 base_text = 'a\r\n'
382 382 this_text = 'b\r\n'
383 383 other_text = 'c\r\n'
384 384 m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
385 385 this_text.splitlines(True))
386 386 m_lines = m3.merge_lines('OTHER', 'THIS')
387 387 self.assertEqual('<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
388 388 '>>>>>>> THIS\r\n'.splitlines(True), list(m_lines))
389 389
390 390 def test_mac_text(self):
391 391 base_text = 'a\r'
392 392 this_text = 'b\r'
393 393 other_text = 'c\r'
394 394 m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
395 395 this_text.splitlines(True))
396 396 m_lines = m3.merge_lines('OTHER', 'THIS')
397 397 self.assertEqual('<<<<<<< OTHER\rc\r=======\rb\r'
398 398 '>>>>>>> THIS\r'.splitlines(True), list(m_lines))
399 399
400 400 if __name__ == '__main__':
401 401 # hide the timer
402 402 import time
403 403 orig = time.time
404 404 try:
405 405 time.time = lambda: 0
406 406 unittest.main()
407 407 finally:
408 408 time.time = orig
409 409
@@ -1,207 +1,207 b''
1 1 # Since it's not easy to write a test that portably deals
2 2 # with files from different users/groups, we cheat a bit by
3 3 # monkey-patching some functions in the util module
4 4
5 5 import os
6 6 from mercurial import ui, util
7 7
8 8 hgrc = os.environ['HGRCPATH']
9 9
10 10 def testui(user='foo', group='bar', tusers=(), tgroups=(),
11 11 cuser='foo', cgroup='bar', debug=False, silent=False):
12 12 # user, group => owners of the file
13 13 # tusers, tgroups => trusted users/groups
14 14 # cuser, cgroup => user/group of the current process
15 15
16 16 # write a global hgrc with the list of trusted users/groups and
17 17 # some setting so that we can be sure it was read
18 18 f = open(hgrc, 'w')
19 19 f.write('[paths]\n')
20 20 f.write('global = /some/path\n\n')
21 21
22 22 if tusers or tgroups:
23 23 f.write('[trusted]\n')
24 24 if tusers:
25 25 f.write('users = %s\n' % ', '.join(tusers))
26 26 if tgroups:
27 27 f.write('groups = %s\n' % ', '.join(tgroups))
28 28 f.close()
29 29
30 30 # override the functions that give names to uids and gids
31 31 def username(uid=None):
32 32 if uid is None:
33 33 return cuser
34 34 return user
35 35 util.username = username
36 36
37 37 def groupname(gid=None):
38 38 if gid is None:
39 39 return 'bar'
40 40 return group
41 41 util.groupname = groupname
42 42
43 43 def isowner(fp, st=None):
44 44 return user == cuser
45 45 util.isowner = isowner
46 46
47 47 # try to read everything
48 48 #print '# File belongs to user %s, group %s' % (user, group)
49 49 #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups)
50 50 kind = ('different', 'same')
51 51 who = ('', 'user', 'group', 'user and the group')
52 52 trusted = who[(user in tusers) + 2*(group in tgroups)]
53 53 if trusted:
54 54 trusted = ', but we trust the ' + trusted
55 55 print '# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup],
56 56 trusted)
57 57
58 58 parentui = ui.ui()
59 59 parentui.updateopts(debug=debug)
60 60 u = ui.ui(parentui=parentui)
61 61 u.readconfig('.hg/hgrc')
62 62 if silent:
63 63 return u
64 64 print 'trusted'
65 65 for name, path in u.configitems('paths'):
66 66 print ' ', name, '=', path
67 67 print 'untrusted'
68 68 for name, path in u.configitems('paths', untrusted=True):
69 69 print '.',
70 70 u.config('paths', name) # warning with debug=True
71 71 print '.',
72 72 u.config('paths', name, untrusted=True) # no warnings
73 73 print name, '=', path
74 74 print
75 75
76 76 return u
77 77
78 78 os.mkdir('repo')
79 79 os.chdir('repo')
80 80 os.mkdir('.hg')
81 81 f = open('.hg/hgrc', 'w')
82 82 f.write('[paths]\n')
83 83 f.write('local = /another/path\n\n')
84 84 f.write('interpolated = %(global)s%(local)s\n\n')
85 85 f.close()
86 86
87 87 #print '# Everything is run by user foo, group bar\n'
88 88
89 89 # same user, same group
90 90 testui()
91 91 # same user, different group
92 92 testui(group='def')
93 93 # different user, same group
94 94 testui(user='abc')
95 95 # ... but we trust the group
96 96 testui(user='abc', tgroups=['bar'])
97 97 # different user, different group
98 98 testui(user='abc', group='def')
99 99 # ... but we trust the user
100 100 testui(user='abc', group='def', tusers=['abc'])
101 101 # ... but we trust the group
102 102 testui(user='abc', group='def', tgroups=['def'])
103 103 # ... but we trust the user and the group
104 104 testui(user='abc', group='def', tusers=['abc'], tgroups=['def'])
105 105 # ... but we trust all users
106 106 print '# we trust all users'
107 107 testui(user='abc', group='def', tusers=['*'])
108 108 # ... but we trust all groups
109 109 print '# we trust all groups'
110 110 testui(user='abc', group='def', tgroups=['*'])
111 111 # ... but we trust the whole universe
112 112 print '# we trust all users and groups'
113 113 testui(user='abc', group='def', tusers=['*'], tgroups=['*'])
114 114 # ... check that users and groups are in different namespaces
115 115 print "# we don't get confused by users and groups with the same name"
116 116 testui(user='abc', group='def', tusers=['def'], tgroups=['abc'])
117 117 # ... lists of user names work
118 118 print "# list of user names"
119 119 testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'],
120 120 tgroups=['bar', 'baz', 'qux'])
121 121 # ... lists of group names work
122 122 print "# list of group names"
123 123 testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'],
124 124 tgroups=['bar', 'def', 'baz', 'qux'])
125 125
126 126 print "# Can't figure out the name of the user running this process"
127 127 testui(user='abc', group='def', cuser=None)
128 128
129 129 print "# prints debug warnings"
130 130 u = testui(user='abc', group='def', cuser='foo', debug=True)
131 131
132 132 print "# ui.readsections"
133 133 filename = 'foobar'
134 134 f = open(filename, 'w')
135 135 f.write('[foobar]\n')
136 136 f.write('baz = quux\n')
137 137 f.close()
138 138 u.readsections(filename, 'foobar')
139 139 print u.config('foobar', 'baz')
140 140
141 141 print
142 142 print "# read trusted, untrusted, new ui, trusted"
143 143 u = ui.ui()
144 144 u.updateopts(debug=True)
145 145 u.readconfig(filename)
146 146 u2 = ui.ui(parentui=u)
147 147 def username(uid=None):
148 148 return 'foo'
149 149 util.username = username
150 150 u2.readconfig('.hg/hgrc')
151 151 print 'trusted:'
152 152 print u2.config('foobar', 'baz')
153 153 print u2.config('paths', 'interpolated')
154 154 print 'untrusted:'
155 155 print u2.config('foobar', 'baz', untrusted=True)
156 156 print u2.config('paths', 'interpolated', untrusted=True)
157 157
158 print
158 print
159 159 print "# error handling"
160 160
161 161 def assertraises(f, exc=util.Abort):
162 162 try:
163 163 f()
164 164 except exc, inst:
165 165 print 'raised', inst.__class__.__name__
166 166 else:
167 167 print 'no exception?!'
168 168
169 169 print "# file doesn't exist"
170 170 os.unlink('.hg/hgrc')
171 171 assert not os.path.exists('.hg/hgrc')
172 172 testui(debug=True, silent=True)
173 173 testui(user='abc', group='def', debug=True, silent=True)
174 174
175 175 print
176 176 print "# parse error"
177 177 f = open('.hg/hgrc', 'w')
178 178 f.write('foo = bar')
179 179 f.close()
180 180 testui(user='abc', group='def', silent=True)
181 181 assertraises(lambda: testui(debug=True, silent=True))
182 182
183 183 print
184 184 print "# interpolation error"
185 185 f = open('.hg/hgrc', 'w')
186 186 f.write('[foo]\n')
187 187 f.write('bar = %(')
188 188 f.close()
189 189 u = testui(debug=True, silent=True)
190 190 print '# regular config:'
191 191 print ' trusted',
192 192 assertraises(lambda: u.config('foo', 'bar'))
193 193 print 'untrusted',
194 194 assertraises(lambda: u.config('foo', 'bar', untrusted=True))
195 195
196 196 u = testui(user='abc', group='def', debug=True, silent=True)
197 197 print ' trusted ',
198 198 print u.config('foo', 'bar')
199 199 print 'untrusted',
200 200 assertraises(lambda: u.config('foo', 'bar', untrusted=True))
201 201
202 202 print '# configitems:'
203 203 print ' trusted ',
204 204 print u.configitems('foo')
205 205 print 'untrusted',
206 206 assertraises(lambda: u.configitems('foo', untrusted=True))
207 207
General Comments 0
You need to be logged in to leave comments. Login now