##// END OF EJS Templates
pep-0008 cleanup...
benoit.boissinot@ens-lyon.fr -
r1062:6d5a62a5 default
parent child Browse files
Show More
@@ -1,1838 +1,1838
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 demandload(globals(), "os re sys signal shutil")
10 10 demandload(globals(), "fancyopts ui hg util lock")
11 11 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
12 12 demandload(globals(), "errno socket version struct atexit sets")
13 13
14 14 class UnknownCommand(Exception):
15 15 """Exception raised if command is not in the command table."""
16 16
17 17 def filterfiles(filters, files):
18 18 l = [x for x in files if x in filters]
19 19
20 20 for t in filters:
21 21 if t and t[-1] != "/":
22 22 t += "/"
23 23 l += [x for x in files if x.startswith(t)]
24 24 return l
25 25
26 26 def relpath(repo, args):
27 27 cwd = repo.getcwd()
28 28 if cwd:
29 29 return [util.normpath(os.path.join(cwd, x)) for x in args]
30 30 return args
31 31
32 def matchpats(repo, cwd, pats = [], opts = {}, head = ''):
32 def matchpats(repo, cwd, pats=[], opts={}, head=''):
33 33 return util.matcher(repo, cwd, pats or ['.'], opts.get('include'),
34 34 opts.get('exclude'), head)
35 35
36 def makewalk(repo, pats, opts, head = ''):
36 def makewalk(repo, pats, opts, head=''):
37 37 cwd = repo.getcwd()
38 38 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
39 39 exact = dict(zip(files, files))
40 40 def walk():
41 for src, fn in repo.walk(files = files, match = matchfn):
41 for src, fn in repo.walk(files=files, match=matchfn):
42 42 yield src, fn, util.pathto(cwd, fn), fn in exact
43 43 return files, matchfn, walk()
44 44
45 def walk(repo, pats, opts, head = ''):
45 def walk(repo, pats, opts, head=''):
46 46 files, matchfn, results = makewalk(repo, pats, opts, head)
47 47 for r in results: yield r
48 48
49 49 def walkchangerevs(ui, repo, cwd, pats, opts):
50 50 # This code most commonly needs to iterate backwards over the
51 51 # history it is interested in. Doing so has awful
52 52 # (quadratic-looking) performance, so we use iterators in a
53 53 # "windowed" way. Walk forwards through a window of revisions,
54 54 # yielding them in the desired order, and walk the windows
55 55 # themselves backwards.
56 56 cwd = repo.getcwd()
57 57 if not pats and cwd:
58 58 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
59 59 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
60 60 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
61 61 pats, opts)
62 62 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
63 63 wanted = {}
64 64 slowpath = anypats
65 65 window = 300
66 66 fncache = {}
67 67 if not slowpath and not files:
68 68 # No files, no patterns. Display all revs.
69 69 wanted = dict(zip(revs, revs))
70 70 if not slowpath:
71 71 # Only files, no patterns. Check the history of each file.
72 72 def filerevgen(filelog):
73 73 for i in xrange(filelog.count() - 1, -1, -window):
74 74 revs = []
75 75 for j in xrange(max(0, i - window), i + 1):
76 76 revs.append(filelog.linkrev(filelog.node(j)))
77 77 revs.reverse()
78 78 for rev in revs:
79 79 yield rev
80 80
81 81 minrev, maxrev = min(revs), max(revs)
82 82 for file in files:
83 83 filelog = repo.file(file)
84 84 # A zero count may be a directory or deleted file, so
85 85 # try to find matching entries on the slow path.
86 86 if filelog.count() == 0:
87 87 slowpath = True
88 88 break
89 89 for rev in filerevgen(filelog):
90 90 if rev <= maxrev:
91 91 if rev < minrev: break
92 92 fncache.setdefault(rev, [])
93 93 fncache[rev].append(file)
94 94 wanted[rev] = 1
95 95 if slowpath:
96 96 # The slow path checks files modified in every changeset.
97 97 def changerevgen():
98 98 for i in xrange(repo.changelog.count() - 1, -1, -window):
99 99 for j in xrange(max(0, i - window), i + 1):
100 100 yield j, repo.changelog.read(repo.lookup(str(j)))[3]
101 101
102 102 for rev, changefiles in changerevgen():
103 103 matches = filter(matchfn, changefiles)
104 104 if matches:
105 105 fncache[rev] = matches
106 106 wanted[rev] = 1
107 107
108 108 for i in xrange(0, len(revs), window):
109 109 yield 'window', revs[0] < revs[-1], revs[-1]
110 110 nrevs = [rev for rev in revs[i : min(i + window, len(revs))]
111 111 if rev in wanted]
112 112 srevs = list(nrevs)
113 113 srevs.sort()
114 114 for rev in srevs:
115 115 fns = fncache.get(rev)
116 116 if not fns:
117 117 fns = repo.changelog.read(repo.lookup(str(rev)))[3]
118 118 fns = filter(matchfn, fns)
119 119 yield 'add', rev, fns
120 120 for rev in nrevs:
121 121 yield 'iter', rev, None
122 122
123 123 revrangesep = ':'
124 124
125 125 def revrange(ui, repo, revs, revlog=None):
126 126 if revlog is None:
127 127 revlog = repo.changelog
128 128 revcount = revlog.count()
129 129 def fix(val, defval):
130 130 if not val:
131 131 return defval
132 132 try:
133 133 num = int(val)
134 134 if str(num) != val:
135 135 raise ValueError
136 136 if num < 0:
137 137 num += revcount
138 138 if not (0 <= num < revcount):
139 139 raise ValueError
140 140 except ValueError:
141 141 try:
142 142 num = repo.changelog.rev(repo.lookup(val))
143 143 except KeyError:
144 144 try:
145 145 num = revlog.rev(revlog.lookup(val))
146 146 except KeyError:
147 147 raise util.Abort('invalid revision identifier %s', val)
148 148 return num
149 149 for spec in revs:
150 150 if spec.find(revrangesep) >= 0:
151 151 start, end = spec.split(revrangesep, 1)
152 152 start = fix(start, 0)
153 153 end = fix(end, revcount - 1)
154 154 if end > start:
155 155 end += 1
156 156 step = 1
157 157 else:
158 158 end -= 1
159 159 step = -1
160 160 for rev in xrange(start, end, step):
161 161 yield str(rev)
162 162 else:
163 163 yield str(fix(spec, None))
164 164
165 165 def make_filename(repo, r, pat, node=None,
166 166 total=None, seqno=None, revwidth=None):
167 167 node_expander = {
168 168 'H': lambda: hg.hex(node),
169 169 'R': lambda: str(r.rev(node)),
170 170 'h': lambda: hg.short(node),
171 171 }
172 172 expander = {
173 173 '%': lambda: '%',
174 174 'b': lambda: os.path.basename(repo.root),
175 175 }
176 176
177 177 try:
178 178 if node:
179 179 expander.update(node_expander)
180 180 if node and revwidth is not None:
181 181 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
182 182 if total is not None:
183 183 expander['N'] = lambda: str(total)
184 184 if seqno is not None:
185 185 expander['n'] = lambda: str(seqno)
186 186 if total is not None and seqno is not None:
187 187 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
188 188
189 189 newname = []
190 190 patlen = len(pat)
191 191 i = 0
192 192 while i < patlen:
193 193 c = pat[i]
194 194 if c == '%':
195 195 i += 1
196 196 c = pat[i]
197 197 c = expander[c]()
198 198 newname.append(c)
199 199 i += 1
200 200 return ''.join(newname)
201 201 except KeyError, inst:
202 202 raise util.Abort("invalid format spec '%%%s' in output file name",
203 203 inst.args[0])
204 204
205 205 def make_file(repo, r, pat, node=None,
206 206 total=None, seqno=None, revwidth=None, mode='wb'):
207 207 if not pat or pat == '-':
208 208 if 'w' in mode: return sys.stdout
209 209 else: return sys.stdin
210 210 if hasattr(pat, 'write') and 'w' in mode:
211 211 return pat
212 212 if hasattr(pat, 'read') and 'r' in mode:
213 213 return pat
214 214 return open(make_filename(repo, r, pat, node, total, seqno, revwidth),
215 215 mode)
216 216
217 217 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
218 218 changes=None, text=False):
219 219 def date(c):
220 220 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
221 221
222 222 if not changes:
223 (c, a, d, u) = repo.changes(node1, node2, files, match = match)
223 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
224 224 else:
225 225 (c, a, d, u) = changes
226 226 if files:
227 227 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
228 228
229 229 if not c and not a and not d:
230 230 return
231 231
232 232 if node2:
233 233 change = repo.changelog.read(node2)
234 234 mmap2 = repo.manifest.read(change[0])
235 235 date2 = date(change)
236 236 def read(f):
237 237 return repo.file(f).read(mmap2[f])
238 238 else:
239 239 date2 = time.asctime()
240 240 if not node1:
241 241 node1 = repo.dirstate.parents()[0]
242 242 def read(f):
243 243 return repo.wfile(f).read()
244 244
245 245 if ui.quiet:
246 246 r = None
247 247 else:
248 248 hexfunc = ui.verbose and hg.hex or hg.short
249 249 r = [hexfunc(node) for node in [node1, node2] if node]
250 250
251 251 change = repo.changelog.read(node1)
252 252 mmap = repo.manifest.read(change[0])
253 253 date1 = date(change)
254 254
255 255 for f in c:
256 256 to = None
257 257 if f in mmap:
258 258 to = repo.file(f).read(mmap[f])
259 259 tn = read(f)
260 260 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
261 261 for f in a:
262 262 to = None
263 263 tn = read(f)
264 264 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
265 265 for f in d:
266 266 to = repo.file(f).read(mmap[f])
267 267 tn = None
268 268 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
269 269
270 270 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
271 271 """show a single changeset or file revision"""
272 272 log = repo.changelog
273 273 if changenode is None:
274 274 changenode = log.node(rev)
275 275 elif not rev:
276 276 rev = log.rev(changenode)
277 277
278 278 if ui.quiet:
279 279 ui.write("%d:%s\n" % (rev, hg.short(changenode)))
280 280 return
281 281
282 282 changes = log.read(changenode)
283 283
284 284 t, tz = changes[2].split(' ')
285 285 # a conversion tool was sticking non-integer offsets into repos
286 286 try:
287 287 tz = int(tz)
288 288 except ValueError:
289 289 tz = 0
290 290 date = time.asctime(time.localtime(float(t))) + " %+05d" % (int(tz)/-36)
291 291
292 292 parents = [(log.rev(p), ui.verbose and hg.hex(p) or hg.short(p))
293 293 for p in log.parents(changenode)
294 294 if ui.debugflag or p != hg.nullid]
295 295 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
296 296 parents = []
297 297
298 298 if ui.verbose:
299 299 ui.write("changeset: %d:%s\n" % (rev, hg.hex(changenode)))
300 300 else:
301 301 ui.write("changeset: %d:%s\n" % (rev, hg.short(changenode)))
302 302
303 303 for tag in repo.nodetags(changenode):
304 304 ui.status("tag: %s\n" % tag)
305 305 for parent in parents:
306 306 ui.write("parent: %d:%s\n" % parent)
307 307
308 308 if brinfo and changenode in brinfo:
309 309 br = brinfo[changenode]
310 310 ui.write("branch: %s\n" % " ".join(br))
311 311
312 312 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
313 313 hg.hex(changes[0])))
314 314 ui.status("user: %s\n" % changes[1])
315 315 ui.status("date: %s\n" % date)
316 316
317 317 if ui.debugflag:
318 318 files = repo.changes(log.parents(changenode)[0], changenode)
319 319 for key, value in zip(["files:", "files+:", "files-:"], files):
320 320 if value:
321 321 ui.note("%-12s %s\n" % (key, " ".join(value)))
322 322 else:
323 323 ui.note("files: %s\n" % " ".join(changes[3]))
324 324
325 325 description = changes[4].strip()
326 326 if description:
327 327 if ui.verbose:
328 328 ui.status("description:\n")
329 329 ui.status(description)
330 330 ui.status("\n\n")
331 331 else:
332 332 ui.status("summary: %s\n" % description.splitlines()[0])
333 333 ui.status("\n")
334 334
335 335 def show_version(ui):
336 336 """output version and copyright information"""
337 337 ui.write("Mercurial Distributed SCM (version %s)\n"
338 338 % version.get_version())
339 339 ui.status(
340 340 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
341 341 "This is free software; see the source for copying conditions. "
342 342 "There is NO\nwarranty; "
343 343 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
344 344 )
345 345
346 346 def help_(ui, cmd=None, with_version=False):
347 347 """show help for a given command or all commands"""
348 348 option_lists = []
349 349 if cmd and cmd != 'shortlist':
350 350 if with_version:
351 351 show_version(ui)
352 352 ui.write('\n')
353 353 key, i = find(cmd)
354 354 # synopsis
355 355 ui.write("%s\n\n" % i[2])
356 356
357 357 # description
358 358 doc = i[0].__doc__
359 359 if ui.quiet:
360 360 doc = doc.splitlines(0)[0]
361 361 ui.write("%s\n" % doc.rstrip())
362 362
363 363 if not ui.quiet:
364 364 # aliases
365 365 aliases = ', '.join(key.split('|')[1:])
366 366 if aliases:
367 367 ui.write("\naliases: %s\n" % aliases)
368 368
369 369 # options
370 370 if i[1]:
371 371 option_lists.append(("options", i[1]))
372 372
373 373 else:
374 374 # program name
375 375 if ui.verbose or with_version:
376 376 show_version(ui)
377 377 else:
378 378 ui.status("Mercurial Distributed SCM\n")
379 379 ui.status('\n')
380 380
381 381 # list of commands
382 382 if cmd == "shortlist":
383 383 ui.status('basic commands (use "hg help" '
384 384 'for the full list or option "-v" for details):\n\n')
385 385 elif ui.verbose:
386 386 ui.status('list of commands:\n\n')
387 387 else:
388 388 ui.status('list of commands (use "hg help -v" '
389 389 'to show aliases and global options):\n\n')
390 390
391 391 h = {}
392 392 cmds = {}
393 393 for c, e in table.items():
394 394 f = c.split("|")[0]
395 395 if cmd == "shortlist" and not f.startswith("^"):
396 396 continue
397 397 f = f.lstrip("^")
398 398 if not ui.debugflag and f.startswith("debug"):
399 399 continue
400 400 d = ""
401 401 if e[0].__doc__:
402 402 d = e[0].__doc__.splitlines(0)[0].rstrip()
403 403 h[f] = d
404 404 cmds[f]=c.lstrip("^")
405 405
406 406 fns = h.keys()
407 407 fns.sort()
408 408 m = max(map(len, fns))
409 409 for f in fns:
410 410 if ui.verbose:
411 411 commands = cmds[f].replace("|",", ")
412 412 ui.write(" %s:\n %s\n"%(commands,h[f]))
413 413 else:
414 414 ui.write(' %-*s %s\n' % (m, f, h[f]))
415 415
416 416 # global options
417 417 if ui.verbose:
418 418 option_lists.append(("global options", globalopts))
419 419
420 420 # list all option lists
421 421 opt_output = []
422 422 for title, options in option_lists:
423 423 opt_output.append(("\n%s:\n" % title, None))
424 424 for shortopt, longopt, default, desc in options:
425 425 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
426 426 longopt and " --%s" % longopt),
427 427 "%s%s" % (desc,
428 428 default and " (default: %s)" % default
429 429 or "")))
430 430
431 431 if opt_output:
432 432 opts_len = max([len(line[0]) for line in opt_output if line[1]])
433 433 for first, second in opt_output:
434 434 if second:
435 435 ui.write(" %-*s %s\n" % (opts_len, first, second))
436 436 else:
437 437 ui.write("%s\n" % first)
438 438
439 439 # Commands start here, listed alphabetically
440 440
441 441 def add(ui, repo, *pats, **opts):
442 442 '''add the specified files on the next commit'''
443 443 names = []
444 444 for src, abs, rel, exact in walk(repo, pats, opts):
445 445 if exact:
446 446 names.append(abs)
447 447 elif repo.dirstate.state(abs) == '?':
448 448 ui.status('adding %s\n' % rel)
449 449 names.append(abs)
450 450 repo.add(names)
451 451
452 452 def addremove(ui, repo, *pats, **opts):
453 453 """add all new files, delete all missing files"""
454 454 add, remove = [], []
455 455 for src, abs, rel, exact in walk(repo, pats, opts):
456 456 if src == 'f' and repo.dirstate.state(abs) == '?':
457 457 add.append(abs)
458 458 if not exact: ui.status('adding ', rel, '\n')
459 459 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
460 460 remove.append(abs)
461 461 if not exact: ui.status('removing ', rel, '\n')
462 462 repo.add(add)
463 463 repo.remove(remove)
464 464
465 465 def annotate(ui, repo, *pats, **opts):
466 466 """show changeset information per file line"""
467 467 def getnode(rev):
468 468 return hg.short(repo.changelog.node(rev))
469 469
470 470 def getname(rev):
471 471 try:
472 472 return bcache[rev]
473 473 except KeyError:
474 474 cl = repo.changelog.read(repo.changelog.node(rev))
475 475 name = cl[1]
476 476 f = name.find('@')
477 477 if f >= 0:
478 478 name = name[:f]
479 479 f = name.find('<')
480 480 if f >= 0:
481 481 name = name[f+1:]
482 482 bcache[rev] = name
483 483 return name
484 484
485 485 if not pats:
486 486 raise util.Abort('at least one file name or pattern required')
487 487
488 488 bcache = {}
489 489 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
490 490 if not opts['user'] and not opts['changeset']:
491 491 opts['number'] = 1
492 492
493 493 if opts['rev']:
494 494 node = repo.changelog.lookup(opts['rev'])
495 495 else:
496 496 node = repo.dirstate.parents()[0]
497 497 change = repo.changelog.read(node)
498 498 mmap = repo.manifest.read(change[0])
499 499
500 500 for src, abs, rel, exact in walk(repo, pats, opts):
501 501 if abs not in mmap:
502 502 ui.warn("warning: %s is not in the repository!\n" % rel)
503 503 continue
504 504
505 505 f = repo.file(abs)
506 506 if not opts['text'] and util.binary(f.read(mmap[abs])):
507 507 ui.write("%s: binary file\n" % rel)
508 508 continue
509 509
510 510 lines = f.annotate(mmap[abs])
511 511 pieces = []
512 512
513 513 for o, f in opmap:
514 514 if opts[o]:
515 515 l = [f(n) for n, dummy in lines]
516 516 if l:
517 517 m = max(map(len, l))
518 518 pieces.append(["%*s" % (m, x) for x in l])
519 519
520 520 if pieces:
521 521 for p, l in zip(zip(*pieces), lines):
522 522 ui.write("%s: %s" % (" ".join(p), l[1]))
523 523
524 524 def cat(ui, repo, file1, rev=None, **opts):
525 525 """output the latest or given revision of a file"""
526 526 r = repo.file(relpath(repo, [file1])[0])
527 527 if rev:
528 528 try:
529 529 # assume all revision numbers are for changesets
530 530 n = repo.lookup(rev)
531 531 change = repo.changelog.read(n)
532 532 m = repo.manifest.read(change[0])
533 533 n = m[relpath(repo, [file1])[0]]
534 534 except hg.RepoError, KeyError:
535 535 n = r.lookup(rev)
536 536 else:
537 537 n = r.tip()
538 538 fp = make_file(repo, r, opts['output'], node=n)
539 539 fp.write(r.read(n))
540 540
541 541 def clone(ui, source, dest=None, **opts):
542 542 """make a copy of an existing repository"""
543 543 if dest is None:
544 544 dest = os.path.basename(os.path.normpath(source))
545 545
546 546 if os.path.exists(dest):
547 547 ui.warn("abort: destination '%s' already exists\n" % dest)
548 548 return 1
549 549
550 550 dest = os.path.realpath(dest)
551 551
552 552 class Dircleanup:
553 553 def __init__(self, dir_):
554 554 self.rmtree = shutil.rmtree
555 555 self.dir_ = dir_
556 556 os.mkdir(dir_)
557 557 def close(self):
558 558 self.dir_ = None
559 559 def __del__(self):
560 560 if self.dir_:
561 561 self.rmtree(self.dir_, True)
562 562
563 563 if opts['ssh']:
564 564 ui.setconfig("ui", "ssh", opts['ssh'])
565 565 if opts['remotecmd']:
566 566 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
567 567
568 568 d = Dircleanup(dest)
569 569 source = ui.expandpath(source)
570 570 abspath = source
571 571 other = hg.repository(ui, source)
572 572
573 573 if other.dev() != -1:
574 574 abspath = os.path.abspath(source)
575 575 copyfile = (os.stat(dest).st_dev == other.dev()
576 576 and getattr(os, 'link', None) or shutil.copy2)
577 577 if copyfile is not shutil.copy2:
578 578 ui.note("cloning by hardlink\n")
579 579 # we use a lock here because because we're not nicely ordered
580 580 l = lock.lock(os.path.join(source, ".hg", "lock"))
581 581
582 582 util.copytree(os.path.join(source, ".hg"), os.path.join(dest, ".hg"),
583 583 copyfile)
584 584 try:
585 585 os.unlink(os.path.join(dest, ".hg", "dirstate"))
586 586 except OSError:
587 587 pass
588 588
589 589 repo = hg.repository(ui, dest)
590 590
591 591 else:
592 592 repo = hg.repository(ui, dest, create=1)
593 593 repo.pull(other)
594 594
595 595 f = repo.opener("hgrc", "w")
596 596 f.write("[paths]\n")
597 597 f.write("default = %s\n" % abspath)
598 598
599 599 if not opts['noupdate']:
600 600 update(ui, repo)
601 601
602 602 d.close()
603 603
604 604 def commit(ui, repo, *pats, **opts):
605 605 """commit the specified files or all outstanding changes"""
606 606 if opts['text']:
607 607 ui.warn("Warning: -t and --text is deprecated,"
608 608 " please use -m or --message instead.\n")
609 609 message = opts['message'] or opts['text']
610 610 logfile = opts['logfile']
611 611 if not message and logfile:
612 612 try:
613 613 if logfile == '-':
614 614 message = sys.stdin.read()
615 615 else:
616 616 message = open(logfile).read()
617 617 except IOError, why:
618 618 ui.warn("Can't read commit message %s: %s\n" % (logfile, why))
619 619
620 620 if opts['addremove']:
621 621 addremove(ui, repo, *pats, **opts)
622 622 cwd = repo.getcwd()
623 623 if not pats and cwd:
624 624 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
625 625 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
626 626 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
627 627 pats, opts)
628 628 if pats:
629 c, a, d, u = repo.changes(files = fns, match = match)
629 c, a, d, u = repo.changes(files=fns, match=match)
630 630 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
631 631 else:
632 632 files = []
633 633 repo.commit(files, message, opts['user'], opts['date'], match)
634 634
635 635 def copy(ui, repo, source, dest):
636 636 """mark a file as copied or renamed for the next commit"""
637 637 return repo.copy(*relpath(repo, (source, dest)))
638 638
639 639 def debugcheckstate(ui, repo):
640 640 """validate the correctness of the current dirstate"""
641 641 parent1, parent2 = repo.dirstate.parents()
642 642 repo.dirstate.read()
643 643 dc = repo.dirstate.map
644 644 keys = dc.keys()
645 645 keys.sort()
646 646 m1n = repo.changelog.read(parent1)[0]
647 647 m2n = repo.changelog.read(parent2)[0]
648 648 m1 = repo.manifest.read(m1n)
649 649 m2 = repo.manifest.read(m2n)
650 650 errors = 0
651 651 for f in dc:
652 652 state = repo.dirstate.state(f)
653 653 if state in "nr" and f not in m1:
654 654 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
655 655 errors += 1
656 656 if state in "a" and f in m1:
657 657 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
658 658 errors += 1
659 659 if state in "m" and f not in m1 and f not in m2:
660 660 ui.warn("%s in state %s, but not in either manifest\n" %
661 661 (f, state))
662 662 errors += 1
663 663 for f in m1:
664 664 state = repo.dirstate.state(f)
665 665 if state not in "nrm":
666 666 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
667 667 errors += 1
668 668 if errors:
669 669 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
670 670
671 671 def debugconfig(ui):
672 672 """show combined config settings from all hgrc files"""
673 673 try:
674 674 repo = hg.repository(ui)
675 675 except: pass
676 676 for section, name, value in ui.walkconfig():
677 677 ui.write('%s.%s=%s\n' % (section, name, value))
678 678
679 679 def debugstate(ui, repo):
680 680 """show the contents of the current dirstate"""
681 681 repo.dirstate.read()
682 682 dc = repo.dirstate.map
683 683 keys = dc.keys()
684 684 keys.sort()
685 685 for file_ in keys:
686 686 ui.write("%c %3o %10d %s %s\n"
687 687 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
688 688 time.strftime("%x %X",
689 689 time.localtime(dc[file_][3])), file_))
690 690
691 691 def debugdata(ui, file_, rev):
692 692 """dump the contents of an data file revision"""
693 693 r = hg.revlog(hg.opener(""), file_[:-2] + ".i", file_)
694 694 ui.write(r.revision(r.lookup(rev)))
695 695
696 696 def debugindex(ui, file_):
697 697 """dump the contents of an index file"""
698 698 r = hg.revlog(hg.opener(""), file_, "")
699 699 ui.write(" rev offset length base linkrev" +
700 700 " nodeid p1 p2\n")
701 701 for i in range(r.count()):
702 702 e = r.index[i]
703 703 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
704 704 i, e[0], e[1], e[2], e[3],
705 705 hg.short(e[6]), hg.short(e[4]), hg.short(e[5])))
706 706
707 707 def debugindexdot(ui, file_):
708 708 """dump an index DAG as a .dot file"""
709 709 r = hg.revlog(hg.opener(""), file_, "")
710 710 ui.write("digraph G {\n")
711 711 for i in range(r.count()):
712 712 e = r.index[i]
713 713 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
714 714 if e[5] != hg.nullid:
715 715 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
716 716 ui.write("}\n")
717 717
718 718 def debugwalk(ui, repo, *pats, **opts):
719 719 """show how files match on given patterns"""
720 720 items = list(walk(repo, pats, opts))
721 721 if not items: return
722 722 fmt = '%%s %%-%ds %%-%ds %%s\n' % (
723 723 max([len(abs) for (src, abs, rel, exact) in items]),
724 724 max([len(rel) for (src, abs, rel, exact) in items]))
725 725 exactly = {True: 'exact', False: ''}
726 726 for src, abs, rel, exact in items:
727 727 ui.write(fmt % (src, abs, rel, exactly[exact]))
728 728
729 729 def diff(ui, repo, *pats, **opts):
730 730 """diff working directory (or selected files)"""
731 731 node1, node2 = None, None
732 732 revs = [repo.lookup(x) for x in opts['rev']]
733 733
734 734 if len(revs) > 0:
735 735 node1 = revs[0]
736 736 if len(revs) > 1:
737 737 node2 = revs[1]
738 738 if len(revs) > 2:
739 739 raise util.Abort("too many revisions to diff")
740 740
741 741 files = []
742 742 match = util.always
743 743 if pats:
744 744 roots, match, results = makewalk(repo, pats, opts)
745 745 for src, abs, rel, exact in results:
746 746 files.append(abs)
747 747
748 748 dodiff(sys.stdout, ui, repo, node1, node2, files, match=match,
749 749 text=opts['text'])
750 750
751 751 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
752 752 node = repo.lookup(changeset)
753 753 prev, other = repo.changelog.parents(node)
754 754 change = repo.changelog.read(node)
755 755
756 756 fp = make_file(repo, repo.changelog, opts['output'],
757 757 node=node, total=total, seqno=seqno,
758 758 revwidth=revwidth)
759 759 if fp != sys.stdout:
760 760 ui.note("%s\n" % fp.name)
761 761
762 762 fp.write("# HG changeset patch\n")
763 763 fp.write("# User %s\n" % change[1])
764 764 fp.write("# Node ID %s\n" % hg.hex(node))
765 765 fp.write("# Parent %s\n" % hg.hex(prev))
766 766 if other != hg.nullid:
767 767 fp.write("# Parent %s\n" % hg.hex(other))
768 768 fp.write(change[4].rstrip())
769 769 fp.write("\n\n")
770 770
771 771 dodiff(fp, ui, repo, prev, node, text=opts['text'])
772 772 if fp != sys.stdout: fp.close()
773 773
774 774 def export(ui, repo, *changesets, **opts):
775 775 """dump the header and diffs for one or more changesets"""
776 776 if not changesets:
777 777 raise util.Abort("export requires at least one changeset")
778 778 seqno = 0
779 779 revs = list(revrange(ui, repo, changesets))
780 780 total = len(revs)
781 781 revwidth = max(len(revs[0]), len(revs[-1]))
782 782 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
783 783 for cset in revs:
784 784 seqno += 1
785 785 doexport(ui, repo, cset, seqno, total, revwidth, opts)
786 786
787 787 def forget(ui, repo, *pats, **opts):
788 788 """don't add the specified files on the next commit"""
789 789 forget = []
790 790 for src, abs, rel, exact in walk(repo, pats, opts):
791 791 if repo.dirstate.state(abs) == 'a':
792 792 forget.append(abs)
793 793 if not exact: ui.status('forgetting ', rel, '\n')
794 794 repo.forget(forget)
795 795
796 def grep(ui, repo, pattern = None, *pats, **opts):
796 def grep(ui, repo, pattern=None, *pats, **opts):
797 797 """search for a pattern in specified files and revisions"""
798 798 if pattern is None: pattern = opts['regexp']
799 799 if not pattern: raise util.Abort('no pattern to search for')
800 800 reflags = 0
801 801 if opts['ignore_case']: reflags |= re.I
802 802 regexp = re.compile(pattern, reflags)
803 803 sep, end = ':', '\n'
804 804 if opts['null'] or opts['print0']: sep = end = '\0'
805 805
806 806 fcache = {}
807 807 def getfile(fn):
808 808 if fn not in fcache:
809 809 fcache[fn] = repo.file(fn)
810 810 return fcache[fn]
811 811
812 812 def matchlines(body):
813 813 begin = 0
814 814 linenum = 0
815 815 while True:
816 816 match = regexp.search(body, begin)
817 817 if not match: break
818 818 mstart, mend = match.span()
819 819 linenum += body.count('\n', begin, mstart) + 1
820 820 lstart = body.rfind('\n', begin, mstart) + 1 or begin
821 821 lend = body.find('\n', mend)
822 822 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
823 823 begin = lend + 1
824 824
825 825 class linestate:
826 826 def __init__(self, line, linenum, colstart, colend):
827 827 self.line = line
828 828 self.linenum = linenum
829 829 self.colstart = colstart
830 830 self.colend = colend
831 831 def __eq__(self, other): return self.line == other.line
832 832 def __hash__(self): return hash(self.line)
833 833
834 834 matches = {}
835 835 def grepbody(fn, rev, body):
836 836 matches[rev].setdefault(fn, {})
837 837 m = matches[rev][fn]
838 838 for lnum, cstart, cend, line in matchlines(body):
839 839 s = linestate(line, lnum, cstart, cend)
840 840 m[s] = s
841 841
842 842 prev = {}
843 843 def display(fn, rev, states, prevstates):
844 844 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
845 845 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
846 846 for l in diff:
847 847 if incrementing:
848 848 change = ((l in prevstates) and '-') or '+'
849 849 r = rev
850 850 else:
851 851 change = ((l in states) and '-') or '+'
852 852 r = prev[fn]
853 853 ui.write('%s:%s:%s:%s%s\n' % (fn, r, l.linenum, change, l.line))
854 854
855 855 fstate = {}
856 856 for st, rev, fns in walkchangerevs(ui, repo, repo.getcwd(), pats, opts):
857 857 if st == 'window':
858 858 incrementing = rev
859 859 matches.clear()
860 860 elif st == 'add':
861 861 change = repo.changelog.read(repo.lookup(str(rev)))
862 862 mf = repo.manifest.read(change[0])
863 863 matches[rev] = {}
864 864 for fn in fns:
865 865 fstate.setdefault(fn, {})
866 866 try:
867 867 grepbody(fn, rev, getfile(fn).read(mf[fn]))
868 868 except KeyError:
869 869 pass
870 870 elif st == 'iter':
871 871 states = matches[rev].items()
872 872 states.sort()
873 873 for fn, m in states:
874 874 if incrementing or fstate[fn]:
875 875 display(fn, rev, m, fstate[fn])
876 876 fstate[fn] = m
877 877 prev[fn] = rev
878 878
879 879 if not incrementing:
880 880 fstate = fstate.items()
881 881 fstate.sort()
882 882 for fn, state in fstate:
883 883 display(fn, rev, {}, state)
884 884
885 885 def heads(ui, repo, **opts):
886 886 """show current repository heads"""
887 887 heads = repo.changelog.heads()
888 888 br = None
889 889 if opts['branches']:
890 890 br = repo.branchlookup(heads)
891 891 for n in repo.changelog.heads():
892 892 show_changeset(ui, repo, changenode=n, brinfo=br)
893 893
894 894 def identify(ui, repo):
895 895 """print information about the working copy"""
896 896 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
897 897 if not parents:
898 898 ui.write("unknown\n")
899 899 return
900 900
901 901 hexfunc = ui.verbose and hg.hex or hg.short
902 902 (c, a, d, u) = repo.changes()
903 903 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
904 904 (c or a or d) and "+" or "")]
905 905
906 906 if not ui.quiet:
907 907 # multiple tags for a single parent separated by '/'
908 908 parenttags = ['/'.join(tags)
909 909 for tags in map(repo.nodetags, parents) if tags]
910 910 # tags for multiple parents separated by ' + '
911 911 if parenttags:
912 912 output.append(' + '.join(parenttags))
913 913
914 914 ui.write("%s\n" % ' '.join(output))
915 915
916 916 def import_(ui, repo, patch1, *patches, **opts):
917 917 """import an ordered set of patches"""
918 918 patches = (patch1,) + patches
919 919
920 920 if not opts['force']:
921 921 (c, a, d, u) = repo.changes()
922 922 if c or a or d:
923 923 ui.warn("abort: outstanding uncommitted changes!\n")
924 924 return 1
925 925
926 926 d = opts["base"]
927 927 strip = opts["strip"]
928 928
929 929 for patch in patches:
930 930 ui.status("applying %s\n" % patch)
931 931 pf = os.path.join(d, patch)
932 932
933 933 message = []
934 934 user = None
935 935 hgpatch = False
936 936 for line in file(pf):
937 937 line = line.rstrip()
938 938 if line.startswith("--- ") or line.startswith("diff -r"):
939 939 break
940 940 elif hgpatch:
941 941 # parse values when importing the result of an hg export
942 942 if line.startswith("# User "):
943 943 user = line[7:]
944 944 ui.debug('User: %s\n' % user)
945 945 elif not line.startswith("# ") and line:
946 946 message.append(line)
947 947 hgpatch = False
948 948 elif line == '# HG changeset patch':
949 949 hgpatch = True
950 950 message = [] # We may have collected garbage
951 951 else:
952 952 message.append(line)
953 953
954 954 # make sure message isn't empty
955 955 if not message:
956 956 message = "imported patch %s\n" % patch
957 957 else:
958 958 message = "%s\n" % '\n'.join(message)
959 959 ui.debug('message:\n%s\n' % message)
960 960
961 961 f = os.popen("patch -p%d < '%s'" % (strip, pf))
962 962 files = []
963 963 for l in f.read().splitlines():
964 964 l.rstrip('\r\n');
965 965 ui.status("%s\n" % l)
966 966 if l.startswith('patching file '):
967 967 pf = l[14:]
968 968 if pf not in files:
969 969 files.append(pf)
970 970 patcherr = f.close()
971 971 if patcherr:
972 972 raise util.Abort("patch failed")
973 973
974 974 if len(files) > 0:
975 975 addremove(ui, repo, *files)
976 976 repo.commit(files, message, user)
977 977
978 978 def incoming(ui, repo, source="default"):
979 979 """show new changesets found in source"""
980 980 source = ui.expandpath(source)
981 981 other = hg.repository(ui, source)
982 982 if not other.local():
983 983 ui.warn("abort: incoming doesn't work for remote"
984 984 + " repositories yet, sorry!\n")
985 985 return 1
986 986 o = repo.findincoming(other)
987 987 if not o:
988 988 return
989 989 o = other.newer(o)
990 990 o.reverse()
991 991 for n in o:
992 992 show_changeset(ui, other, changenode=n)
993 993
994 994 def init(ui, dest="."):
995 995 """create a new repository in the given directory"""
996 996 if not os.path.exists(dest):
997 997 os.mkdir(dest)
998 998 hg.repository(ui, dest, create=1)
999 999
1000 1000 def locate(ui, repo, *pats, **opts):
1001 1001 """locate files matching specific patterns"""
1002 1002 end = '\n'
1003 1003 if opts['print0']: end = '\0'
1004 1004
1005 1005 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1006 1006 if repo.dirstate.state(abs) == '?': continue
1007 1007 if opts['fullpath']:
1008 1008 ui.write(os.path.join(repo.root, abs), end)
1009 1009 else:
1010 1010 ui.write(rel, end)
1011 1011
1012 1012 def log(ui, repo, *pats, **opts):
1013 1013 """show revision history of entire repository or files"""
1014 1014 class dui:
1015 1015 # Implement and delegate some ui protocol. Save hunks of
1016 1016 # output for later display in the desired order.
1017 1017 def __init__(self, ui):
1018 1018 self.ui = ui
1019 1019 self.hunk = {}
1020 1020 def bump(self, rev):
1021 1021 self.rev = rev
1022 1022 self.hunk[rev] = []
1023 1023 def note(self, *args):
1024 1024 if self.verbose: self.write(*args)
1025 1025 def status(self, *args):
1026 1026 if not self.quiet: self.write(*args)
1027 1027 def write(self, *args):
1028 1028 self.hunk[self.rev].append(args)
1029 1029 def __getattr__(self, key):
1030 1030 return getattr(self.ui, key)
1031 1031 cwd = repo.getcwd()
1032 1032 if not pats and cwd:
1033 1033 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1034 1034 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1035 1035 for st, rev, fns in walkchangerevs(ui, repo, (pats and cwd) or '', pats,
1036 1036 opts):
1037 1037 if st == 'window':
1038 1038 du = dui(ui)
1039 1039 elif st == 'add':
1040 1040 du.bump(rev)
1041 1041 show_changeset(du, repo, rev)
1042 1042 if opts['patch']:
1043 1043 changenode = repo.changelog.node(rev)
1044 1044 prev, other = repo.changelog.parents(changenode)
1045 1045 dodiff(du, du, repo, prev, changenode, fns)
1046 1046 du.write("\n\n")
1047 1047 elif st == 'iter':
1048 1048 for args in du.hunk[rev]:
1049 1049 ui.write(*args)
1050 1050
1051 1051 def manifest(ui, repo, rev=None):
1052 1052 """output the latest or given revision of the project manifest"""
1053 1053 if rev:
1054 1054 try:
1055 1055 # assume all revision numbers are for changesets
1056 1056 n = repo.lookup(rev)
1057 1057 change = repo.changelog.read(n)
1058 1058 n = change[0]
1059 1059 except hg.RepoError:
1060 1060 n = repo.manifest.lookup(rev)
1061 1061 else:
1062 1062 n = repo.manifest.tip()
1063 1063 m = repo.manifest.read(n)
1064 1064 mf = repo.manifest.readflags(n)
1065 1065 files = m.keys()
1066 1066 files.sort()
1067 1067
1068 1068 for f in files:
1069 1069 ui.write("%40s %3s %s\n" % (hg.hex(m[f]), mf[f] and "755" or "644", f))
1070 1070
1071 1071 def outgoing(ui, repo, dest="default-push"):
1072 1072 """show changesets not found in destination"""
1073 1073 dest = ui.expandpath(dest)
1074 1074 other = hg.repository(ui, dest)
1075 1075 o = repo.findoutgoing(other)
1076 1076 o = repo.newer(o)
1077 1077 o.reverse()
1078 1078 for n in o:
1079 1079 show_changeset(ui, repo, changenode=n)
1080 1080
1081 1081 def parents(ui, repo, rev=None):
1082 1082 """show the parents of the working dir or revision"""
1083 1083 if rev:
1084 1084 p = repo.changelog.parents(repo.lookup(rev))
1085 1085 else:
1086 1086 p = repo.dirstate.parents()
1087 1087
1088 1088 for n in p:
1089 1089 if n != hg.nullid:
1090 1090 show_changeset(ui, repo, changenode=n)
1091 1091
1092 def paths(ui, search = None):
1092 def paths(ui, search=None):
1093 1093 """show definition of symbolic path names"""
1094 1094 try:
1095 1095 repo = hg.repository(ui=ui)
1096 1096 except:
1097 1097 pass
1098 1098
1099 1099 if search:
1100 1100 for name, path in ui.configitems("paths"):
1101 1101 if name == search:
1102 1102 ui.write("%s\n" % path)
1103 1103 return
1104 1104 ui.warn("not found!\n")
1105 1105 return 1
1106 1106 else:
1107 1107 for name, path in ui.configitems("paths"):
1108 1108 ui.write("%s = %s\n" % (name, path))
1109 1109
1110 1110 def pull(ui, repo, source="default", **opts):
1111 1111 """pull changes from the specified source"""
1112 1112 source = ui.expandpath(source)
1113 1113 ui.status('pulling from %s\n' % (source))
1114 1114
1115 1115 if opts['ssh']:
1116 1116 ui.setconfig("ui", "ssh", opts['ssh'])
1117 1117 if opts['remotecmd']:
1118 1118 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1119 1119
1120 1120 other = hg.repository(ui, source)
1121 1121 r = repo.pull(other)
1122 1122 if not r:
1123 1123 if opts['update']:
1124 1124 return update(ui, repo)
1125 1125 else:
1126 1126 ui.status("(run 'hg update' to get a working copy)\n")
1127 1127
1128 1128 return r
1129 1129
1130 1130 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1131 1131 """push changes to the specified destination"""
1132 1132 dest = ui.expandpath(dest)
1133 1133 ui.status('pushing to %s\n' % (dest))
1134 1134
1135 1135 if ssh:
1136 1136 ui.setconfig("ui", "ssh", ssh)
1137 1137 if remotecmd:
1138 1138 ui.setconfig("ui", "remotecmd", remotecmd)
1139 1139
1140 1140 other = hg.repository(ui, dest)
1141 1141 r = repo.push(other, force)
1142 1142 return r
1143 1143
1144 1144 def rawcommit(ui, repo, *flist, **rc):
1145 1145 "raw commit interface"
1146 1146 if rc['text']:
1147 1147 ui.warn("Warning: -t and --text is deprecated,"
1148 1148 " please use -m or --message instead.\n")
1149 1149 message = rc['message'] or rc['text']
1150 1150 if not message and rc['logfile']:
1151 1151 try:
1152 1152 message = open(rc['logfile']).read()
1153 1153 except IOError:
1154 1154 pass
1155 1155 if not message and not rc['logfile']:
1156 1156 ui.warn("abort: missing commit message\n")
1157 1157 return 1
1158 1158
1159 1159 files = relpath(repo, list(flist))
1160 1160 if rc['files']:
1161 1161 files += open(rc['files']).read().splitlines()
1162 1162
1163 1163 rc['parent'] = map(repo.lookup, rc['parent'])
1164 1164
1165 1165 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1166 1166
1167 1167 def recover(ui, repo):
1168 1168 """roll back an interrupted transaction"""
1169 1169 repo.recover()
1170 1170
1171 1171 def remove(ui, repo, file1, *files):
1172 1172 """remove the specified files on the next commit"""
1173 1173 repo.remove(relpath(repo, (file1,) + files))
1174 1174
1175 1175 def revert(ui, repo, *names, **opts):
1176 1176 """revert modified files or dirs back to their unmodified states"""
1177 1177 node = opts['rev'] and repo.lookup(opts['rev']) or \
1178 1178 repo.dirstate.parents()[0]
1179 1179 root = os.path.realpath(repo.root)
1180 1180
1181 1181 def trimpath(p):
1182 1182 p = os.path.realpath(p)
1183 1183 if p.startswith(root):
1184 1184 rest = p[len(root):]
1185 1185 if not rest:
1186 1186 return rest
1187 1187 if p.startswith(os.sep):
1188 1188 return rest[1:]
1189 1189 return p
1190 1190
1191 1191 relnames = map(trimpath, names or [os.getcwd()])
1192 1192 chosen = {}
1193 1193
1194 1194 def choose(name):
1195 1195 def body(name):
1196 1196 for r in relnames:
1197 1197 if not name.startswith(r):
1198 1198 continue
1199 1199 rest = name[len(r):]
1200 1200 if not rest:
1201 1201 return r, True
1202 1202 depth = rest.count(os.sep)
1203 1203 if not r:
1204 1204 if depth == 0 or not opts['nonrecursive']:
1205 1205 return r, True
1206 1206 elif rest[0] == os.sep:
1207 1207 if depth == 1 or not opts['nonrecursive']:
1208 1208 return r, True
1209 1209 return None, False
1210 1210 relname, ret = body(name)
1211 1211 if ret:
1212 1212 chosen[relname] = 1
1213 1213 return ret
1214 1214
1215 1215 r = repo.update(node, False, True, choose, False)
1216 1216 for n in relnames:
1217 1217 if n not in chosen:
1218 1218 ui.warn('error: no matches for %s\n' % n)
1219 1219 r = 1
1220 1220 sys.stdout.flush()
1221 1221 return r
1222 1222
1223 1223 def root(ui, repo):
1224 1224 """print the root (top) of the current working dir"""
1225 1225 ui.write(repo.root + "\n")
1226 1226
1227 1227 def serve(ui, repo, **opts):
1228 1228 """export the repository via HTTP"""
1229 1229
1230 1230 if opts["stdio"]:
1231 1231 fin, fout = sys.stdin, sys.stdout
1232 1232 sys.stdout = sys.stderr
1233 1233
1234 1234 def getarg():
1235 1235 argline = fin.readline()[:-1]
1236 1236 arg, l = argline.split()
1237 1237 val = fin.read(int(l))
1238 1238 return arg, val
1239 1239 def respond(v):
1240 1240 fout.write("%d\n" % len(v))
1241 1241 fout.write(v)
1242 1242 fout.flush()
1243 1243
1244 1244 lock = None
1245 1245
1246 1246 while 1:
1247 1247 cmd = fin.readline()[:-1]
1248 1248 if cmd == '':
1249 1249 return
1250 1250 if cmd == "heads":
1251 1251 h = repo.heads()
1252 1252 respond(" ".join(map(hg.hex, h)) + "\n")
1253 1253 if cmd == "lock":
1254 1254 lock = repo.lock()
1255 1255 respond("")
1256 1256 if cmd == "unlock":
1257 1257 if lock:
1258 1258 lock.release()
1259 1259 lock = None
1260 1260 respond("")
1261 1261 elif cmd == "branches":
1262 1262 arg, nodes = getarg()
1263 1263 nodes = map(hg.bin, nodes.split(" "))
1264 1264 r = []
1265 1265 for b in repo.branches(nodes):
1266 1266 r.append(" ".join(map(hg.hex, b)) + "\n")
1267 1267 respond("".join(r))
1268 1268 elif cmd == "between":
1269 1269 arg, pairs = getarg()
1270 1270 pairs = [map(hg.bin, p.split("-")) for p in pairs.split(" ")]
1271 1271 r = []
1272 1272 for b in repo.between(pairs):
1273 1273 r.append(" ".join(map(hg.hex, b)) + "\n")
1274 1274 respond("".join(r))
1275 1275 elif cmd == "changegroup":
1276 1276 nodes = []
1277 1277 arg, roots = getarg()
1278 1278 nodes = map(hg.bin, roots.split(" "))
1279 1279
1280 1280 cg = repo.changegroup(nodes)
1281 1281 while 1:
1282 1282 d = cg.read(4096)
1283 1283 if not d:
1284 1284 break
1285 1285 fout.write(d)
1286 1286
1287 1287 fout.flush()
1288 1288
1289 1289 elif cmd == "addchangegroup":
1290 1290 if not lock:
1291 1291 respond("not locked")
1292 1292 continue
1293 1293 respond("")
1294 1294
1295 1295 r = repo.addchangegroup(fin)
1296 1296 respond("")
1297 1297
1298 1298 optlist = "name templates style address port ipv6 accesslog errorlog"
1299 1299 for o in optlist.split():
1300 1300 if opts[o]:
1301 1301 ui.setconfig("web", o, opts[o])
1302 1302
1303 1303 httpd = hgweb.create_server(repo)
1304 1304
1305 1305 if ui.verbose:
1306 1306 addr, port = httpd.socket.getsockname()
1307 1307 if addr == '0.0.0.0':
1308 1308 addr = socket.gethostname()
1309 1309 else:
1310 1310 try:
1311 1311 addr = socket.gethostbyaddr(addr)[0]
1312 1312 except socket.error:
1313 1313 pass
1314 1314 if port != 80:
1315 1315 ui.status('listening at http://%s:%d/\n' % (addr, port))
1316 1316 else:
1317 1317 ui.status('listening at http://%s/\n' % addr)
1318 1318 httpd.serve_forever()
1319 1319
1320 1320 def status(ui, repo, *pats, **opts):
1321 1321 '''show changed files in the working directory
1322 1322
1323 1323 M = modified
1324 1324 A = added
1325 1325 R = removed
1326 1326 ? = not tracked
1327 1327 '''
1328 1328
1329 1329 cwd = repo.getcwd()
1330 1330 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1331 1331 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1332 1332 for n in repo.changes(files=files, match=matchfn)]
1333 1333
1334 1334 changetypes = [('modified', 'M', c),
1335 1335 ('added', 'A', a),
1336 1336 ('removed', 'R', d),
1337 1337 ('unknown', '?', u)]
1338 1338
1339 1339 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1340 1340 or changetypes):
1341 1341 for f in changes:
1342 1342 ui.write("%s %s\n" % (char, f))
1343 1343
1344 1344 def tag(ui, repo, name, rev=None, **opts):
1345 1345 """add a tag for the current tip or a given revision"""
1346 1346 if opts['text']:
1347 1347 ui.warn("Warning: -t and --text is deprecated,"
1348 1348 " please use -m or --message instead.\n")
1349 1349 if name == "tip":
1350 1350 ui.warn("abort: 'tip' is a reserved name!\n")
1351 1351 return -1
1352 1352 if rev:
1353 1353 r = hg.hex(repo.lookup(rev))
1354 1354 else:
1355 1355 r = hg.hex(repo.changelog.tip())
1356 1356
1357 1357 if name.find(revrangesep) >= 0:
1358 1358 ui.warn("abort: '%s' cannot be used in a tag name\n" % revrangesep)
1359 1359 return -1
1360 1360
1361 1361 if opts['local']:
1362 1362 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1363 1363 return
1364 1364
1365 1365 (c, a, d, u) = repo.changes()
1366 1366 for x in (c, a, d, u):
1367 1367 if ".hgtags" in x:
1368 1368 ui.warn("abort: working copy of .hgtags is changed!\n")
1369 1369 ui.status("(please commit .hgtags manually)\n")
1370 1370 return -1
1371 1371
1372 1372 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1373 1373 if repo.dirstate.state(".hgtags") == '?':
1374 1374 repo.add([".hgtags"])
1375 1375
1376 1376 message = (opts['message'] or opts['text'] or
1377 1377 "Added tag %s for changeset %s" % (name, r))
1378 1378 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1379 1379
1380 1380 def tags(ui, repo):
1381 1381 """list repository tags"""
1382 1382
1383 1383 l = repo.tagslist()
1384 1384 l.reverse()
1385 1385 for t, n in l:
1386 1386 try:
1387 1387 r = "%5d:%s" % (repo.changelog.rev(n), hg.hex(n))
1388 1388 except KeyError:
1389 1389 r = " ?:?"
1390 1390 ui.write("%-30s %s\n" % (t, r))
1391 1391
1392 1392 def tip(ui, repo):
1393 1393 """show the tip revision"""
1394 1394 n = repo.changelog.tip()
1395 1395 show_changeset(ui, repo, changenode=n)
1396 1396
1397 1397 def undo(ui, repo):
1398 1398 """undo the last commit or pull
1399 1399
1400 1400 Roll back the last pull or commit transaction on the
1401 1401 repository, restoring the project to its earlier state.
1402 1402
1403 1403 This command should be used with care. There is only one level of
1404 1404 undo and there is no redo.
1405 1405
1406 1406 This command is not intended for use on public repositories. Once
1407 1407 a change is visible for pull by other users, undoing it locally is
1408 1408 ineffective.
1409 1409 """
1410 1410 repo.undo()
1411 1411
1412 1412 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1413 1413 '''update or merge working directory
1414 1414
1415 1415 If there are no outstanding changes in the working directory and
1416 1416 there is a linear relationship between the current version and the
1417 1417 requested version, the result is the requested version.
1418 1418
1419 1419 Otherwise the result is a merge between the contents of the
1420 1420 current working directory and the requested version. Files that
1421 1421 changed between either parent are marked as changed for the next
1422 1422 commit and a commit must be performed before any further updates
1423 1423 are allowed.
1424 1424 '''
1425 1425 if branch:
1426 1426 br = repo.branchlookup(branch=branch)
1427 1427 found = []
1428 1428 for x in br:
1429 1429 if branch in br[x]:
1430 1430 found.append(x)
1431 1431 if len(found) > 1:
1432 1432 ui.warn("Found multiple heads for %s\n" % branch)
1433 1433 for x in found:
1434 1434 show_changeset(ui, repo, changenode=x, brinfo=br)
1435 1435 return 1
1436 1436 if len(found) == 1:
1437 1437 node = found[0]
1438 1438 ui.warn("Using head %s for branch %s\n" % (hg.short(node), branch))
1439 1439 else:
1440 1440 ui.warn("branch %s not found\n" % (branch))
1441 1441 return 1
1442 1442 else:
1443 1443 node = node and repo.lookup(node) or repo.changelog.tip()
1444 1444 return repo.update(node, allow=merge, force=clean)
1445 1445
1446 1446 def verify(ui, repo):
1447 1447 """verify the integrity of the repository"""
1448 1448 return repo.verify()
1449 1449
1450 1450 # Command options and aliases are listed here, alphabetically
1451 1451
1452 1452 table = {
1453 1453 "^add":
1454 1454 (add,
1455 1455 [('I', 'include', [], 'include path in search'),
1456 1456 ('X', 'exclude', [], 'exclude path from search')],
1457 1457 "hg add [OPTION]... [FILE]..."),
1458 1458 "addremove":
1459 1459 (addremove,
1460 1460 [('I', 'include', [], 'include path in search'),
1461 1461 ('X', 'exclude', [], 'exclude path from search')],
1462 1462 "hg addremove [OPTION]... [FILE]..."),
1463 1463 "^annotate":
1464 1464 (annotate,
1465 1465 [('r', 'rev', '', 'revision'),
1466 1466 ('a', 'text', None, 'treat all files as text'),
1467 1467 ('u', 'user', None, 'show user'),
1468 1468 ('n', 'number', None, 'show revision number'),
1469 1469 ('c', 'changeset', None, 'show changeset'),
1470 1470 ('I', 'include', [], 'include path in search'),
1471 1471 ('X', 'exclude', [], 'exclude path from search')],
1472 1472 'hg annotate [OPTION]... FILE...'),
1473 1473 "cat":
1474 1474 (cat,
1475 1475 [('o', 'output', "", 'output to file')],
1476 1476 'hg cat [-o OUTFILE] FILE [REV]'),
1477 1477 "^clone":
1478 1478 (clone,
1479 1479 [('U', 'noupdate', None, 'skip update after cloning'),
1480 1480 ('e', 'ssh', "", 'ssh command'),
1481 1481 ('', 'remotecmd', "", 'remote hg command')],
1482 1482 'hg clone [OPTION]... SOURCE [DEST]'),
1483 1483 "^commit|ci":
1484 1484 (commit,
1485 1485 [('A', 'addremove', None, 'run add/remove during commit'),
1486 1486 ('I', 'include', [], 'include path in search'),
1487 1487 ('X', 'exclude', [], 'exclude path from search'),
1488 1488 ('m', 'message', "", 'commit message'),
1489 1489 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1490 1490 ('l', 'logfile', "", 'commit message file'),
1491 1491 ('d', 'date', "", 'date code'),
1492 1492 ('u', 'user', "", 'user')],
1493 1493 'hg commit [OPTION]... [FILE]...'),
1494 1494 "copy": (copy, [], 'hg copy SOURCE DEST'),
1495 1495 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1496 1496 "debugconfig": (debugconfig, [], 'debugconfig'),
1497 1497 "debugstate": (debugstate, [], 'debugstate'),
1498 1498 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1499 1499 "debugindex": (debugindex, [], 'debugindex FILE'),
1500 1500 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1501 1501 "debugwalk":
1502 1502 (debugwalk,
1503 1503 [('I', 'include', [], 'include path in search'),
1504 1504 ('X', 'exclude', [], 'exclude path from search')],
1505 1505 'debugwalk [OPTION]... [FILE]...'),
1506 1506 "^diff":
1507 1507 (diff,
1508 1508 [('r', 'rev', [], 'revision'),
1509 1509 ('a', 'text', None, 'treat all files as text'),
1510 1510 ('I', 'include', [], 'include path in search'),
1511 1511 ('X', 'exclude', [], 'exclude path from search')],
1512 1512 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1513 1513 "^export":
1514 1514 (export,
1515 1515 [('o', 'output', "", 'output to file'),
1516 1516 ('a', 'text', None, 'treat all files as text')],
1517 1517 "hg export [-a] [-o OUTFILE] REV..."),
1518 1518 "forget":
1519 1519 (forget,
1520 1520 [('I', 'include', [], 'include path in search'),
1521 1521 ('X', 'exclude', [], 'exclude path from search')],
1522 1522 "hg forget [OPTION]... FILE..."),
1523 1523 "grep": (grep,
1524 1524 [('0', 'print0', None, 'terminate file names with NUL'),
1525 1525 ('I', 'include', [], 'include path in search'),
1526 1526 ('X', 'exclude', [], 'include path in search'),
1527 1527 ('Z', 'null', None, 'terminate file names with NUL'),
1528 1528 ('a', 'all-revs', '', 'search all revs'),
1529 1529 ('e', 'regexp', '', 'pattern to search for'),
1530 1530 ('f', 'full-path', None, 'print complete paths'),
1531 1531 ('i', 'ignore-case', None, 'ignore case when matching'),
1532 1532 ('l', 'files-with-matches', None, 'print names of files with matches'),
1533 1533 ('n', 'line-number', '', 'print line numbers'),
1534 1534 ('r', 'rev', [], 'search in revision rev'),
1535 1535 ('s', 'no-messages', None, 'do not print error messages'),
1536 1536 ('v', 'invert-match', None, 'select non-matching lines')],
1537 1537 "hg grep [options] [pat] [files]"),
1538 1538 "heads":
1539 1539 (heads,
1540 1540 [('b', 'branches', None, 'find branch info')],
1541 1541 'hg heads [-b]'),
1542 1542 "help": (help_, [], 'hg help [COMMAND]'),
1543 1543 "identify|id": (identify, [], 'hg identify'),
1544 1544 "import|patch":
1545 1545 (import_,
1546 1546 [('p', 'strip', 1, 'path strip'),
1547 1547 ('f', 'force', None, 'skip check for outstanding changes'),
1548 1548 ('b', 'base', "", 'base path')],
1549 1549 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1550 1550 "incoming|in": (incoming, [], 'hg incoming [SOURCE]'),
1551 1551 "^init": (init, [], 'hg init [DEST]'),
1552 1552 "locate":
1553 1553 (locate,
1554 1554 [('r', 'rev', '', 'revision'),
1555 1555 ('0', 'print0', None, 'end records with NUL'),
1556 1556 ('f', 'fullpath', None, 'print complete paths'),
1557 1557 ('I', 'include', [], 'include path in search'),
1558 1558 ('X', 'exclude', [], 'exclude path from search')],
1559 1559 'hg locate [OPTION]... [PATTERN]...'),
1560 1560 "^log|history":
1561 1561 (log,
1562 1562 [('I', 'include', [], 'include path in search'),
1563 1563 ('X', 'exclude', [], 'exclude path from search'),
1564 1564 ('r', 'rev', [], 'revision'),
1565 1565 ('p', 'patch', None, 'show patch')],
1566 1566 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1567 1567 "manifest": (manifest, [], 'hg manifest [REV]'),
1568 1568 "outgoing|out": (outgoing, [], 'hg outgoing [DEST]'),
1569 1569 "parents": (parents, [], 'hg parents [REV]'),
1570 1570 "paths": (paths, [], 'hg paths [NAME]'),
1571 1571 "^pull":
1572 1572 (pull,
1573 1573 [('u', 'update', None, 'update working directory'),
1574 1574 ('e', 'ssh', "", 'ssh command'),
1575 1575 ('', 'remotecmd', "", 'remote hg command')],
1576 1576 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1577 1577 "^push":
1578 1578 (push,
1579 1579 [('f', 'force', None, 'force push'),
1580 1580 ('e', 'ssh', "", 'ssh command'),
1581 1581 ('', 'remotecmd', "", 'remote hg command')],
1582 1582 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1583 1583 "rawcommit":
1584 1584 (rawcommit,
1585 1585 [('p', 'parent', [], 'parent'),
1586 1586 ('d', 'date', "", 'date code'),
1587 1587 ('u', 'user', "", 'user'),
1588 1588 ('F', 'files', "", 'file list'),
1589 1589 ('m', 'message', "", 'commit message'),
1590 1590 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1591 1591 ('l', 'logfile', "", 'commit message file')],
1592 1592 'hg rawcommit [OPTION]... [FILE]...'),
1593 1593 "recover": (recover, [], "hg recover"),
1594 1594 "^remove|rm": (remove, [], "hg remove FILE..."),
1595 1595 "^revert":
1596 1596 (revert,
1597 1597 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1598 1598 ("r", "rev", "", "revision")],
1599 1599 "hg revert [-n] [-r REV] [NAME]..."),
1600 1600 "root": (root, [], "hg root"),
1601 1601 "^serve":
1602 1602 (serve,
1603 1603 [('A', 'accesslog', '', 'access log file'),
1604 1604 ('E', 'errorlog', '', 'error log file'),
1605 1605 ('p', 'port', 0, 'listen port'),
1606 1606 ('a', 'address', '', 'interface address'),
1607 1607 ('n', 'name', "", 'repository name'),
1608 1608 ('', 'stdio', None, 'for remote clients'),
1609 1609 ('t', 'templates', "", 'template directory'),
1610 1610 ('', 'style', "", 'template style'),
1611 1611 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1612 1612 "hg serve [OPTION]..."),
1613 1613 "^status":
1614 1614 (status,
1615 1615 [('m', 'modified', None, 'show only modified files'),
1616 1616 ('a', 'added', None, 'show only added files'),
1617 1617 ('r', 'removed', None, 'show only removed files'),
1618 1618 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1619 1619 ('I', 'include', [], 'include path in search'),
1620 1620 ('X', 'exclude', [], 'exclude path from search')],
1621 1621 "hg status [OPTION]... [FILE]..."),
1622 1622 "tag":
1623 1623 (tag,
1624 1624 [('l', 'local', None, 'make the tag local'),
1625 1625 ('m', 'message', "", 'commit message'),
1626 1626 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1627 1627 ('d', 'date', "", 'date code'),
1628 1628 ('u', 'user', "", 'user')],
1629 1629 'hg tag [OPTION]... NAME [REV]'),
1630 1630 "tags": (tags, [], 'hg tags'),
1631 1631 "tip": (tip, [], 'hg tip'),
1632 1632 "undo": (undo, [], 'hg undo'),
1633 1633 "^update|up|checkout|co":
1634 1634 (update,
1635 1635 [('b', 'branch', "", 'checkout the head of a specific branch'),
1636 1636 ('m', 'merge', None, 'allow merging of conflicts'),
1637 1637 ('C', 'clean', None, 'overwrite locally modified files')],
1638 1638 'hg update [-b TAG] [-m] [-C] [REV]'),
1639 1639 "verify": (verify, [], 'hg verify'),
1640 1640 "version": (show_version, [], 'hg version'),
1641 1641 }
1642 1642
1643 1643 globalopts = [
1644 1644 ('R', 'repository', "", 'repository root directory'),
1645 1645 ('', 'cwd', '', 'change working directory'),
1646 1646 ('y', 'noninteractive', None, 'run non-interactively'),
1647 1647 ('q', 'quiet', None, 'quiet mode'),
1648 1648 ('v', 'verbose', None, 'verbose mode'),
1649 1649 ('', 'debug', None, 'debug mode'),
1650 1650 ('', 'traceback', None, 'print traceback on exception'),
1651 1651 ('', 'time', None, 'time how long the command takes'),
1652 1652 ('', 'profile', None, 'profile'),
1653 1653 ('', 'version', None, 'output version information and exit'),
1654 1654 ('h', 'help', None, 'display help and exit'),
1655 1655 ]
1656 1656
1657 1657 norepo = "clone init version help debugconfig debugdata" + \
1658 1658 " debugindex debugindexdot paths"
1659 1659
1660 1660 def find(cmd):
1661 1661 for e in table.keys():
1662 1662 if re.match("(%s)$" % e, cmd):
1663 1663 return e, table[e]
1664 1664
1665 1665 raise UnknownCommand(cmd)
1666 1666
1667 1667 class SignalInterrupt(Exception):
1668 1668 """Exception raised on SIGTERM and SIGHUP."""
1669 1669
1670 1670 def catchterm(*args):
1671 1671 raise SignalInterrupt
1672 1672
1673 1673 def run():
1674 1674 sys.exit(dispatch(sys.argv[1:]))
1675 1675
1676 1676 class ParseError(Exception):
1677 1677 """Exception raised on errors in parsing the command line."""
1678 1678
1679 1679 def parse(args):
1680 1680 options = {}
1681 1681 cmdoptions = {}
1682 1682
1683 1683 try:
1684 1684 args = fancyopts.fancyopts(args, globalopts, options)
1685 1685 except fancyopts.getopt.GetoptError, inst:
1686 1686 raise ParseError(None, inst)
1687 1687
1688 1688 if args:
1689 1689 cmd, args = args[0], args[1:]
1690 1690 i = find(cmd)[1]
1691 1691 c = list(i[1])
1692 1692 else:
1693 1693 cmd = None
1694 1694 c = []
1695 1695
1696 1696 # combine global options into local
1697 1697 for o in globalopts:
1698 1698 c.append((o[0], o[1], options[o[1]], o[3]))
1699 1699
1700 1700 try:
1701 1701 args = fancyopts.fancyopts(args, c, cmdoptions)
1702 1702 except fancyopts.getopt.GetoptError, inst:
1703 1703 raise ParseError(cmd, inst)
1704 1704
1705 1705 # separate global options back out
1706 1706 for o in globalopts:
1707 1707 n = o[1]
1708 1708 options[n] = cmdoptions[n]
1709 1709 del cmdoptions[n]
1710 1710
1711 1711 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
1712 1712
1713 1713 def dispatch(args):
1714 1714 signal.signal(signal.SIGTERM, catchterm)
1715 1715 try:
1716 1716 signal.signal(signal.SIGHUP, catchterm)
1717 1717 except AttributeError:
1718 1718 pass
1719 1719
1720 1720 try:
1721 1721 cmd, func, args, options, cmdoptions = parse(args)
1722 1722 except ParseError, inst:
1723 1723 u = ui.ui()
1724 1724 if inst.args[0]:
1725 1725 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
1726 1726 help_(u, inst.args[0])
1727 1727 else:
1728 1728 u.warn("hg: %s\n" % inst.args[1])
1729 1729 help_(u, 'shortlist')
1730 1730 sys.exit(-1)
1731 1731 except UnknownCommand, inst:
1732 1732 u = ui.ui()
1733 1733 u.warn("hg: unknown command '%s'\n" % inst.args[0])
1734 1734 help_(u, 'shortlist')
1735 1735 sys.exit(1)
1736 1736
1737 1737 if options["time"]:
1738 1738 def get_times():
1739 1739 t = os.times()
1740 1740 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
1741 1741 t = (t[0], t[1], t[2], t[3], time.clock())
1742 1742 return t
1743 1743 s = get_times()
1744 1744 def print_time():
1745 1745 t = get_times()
1746 1746 u = ui.ui()
1747 1747 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
1748 1748 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
1749 1749 atexit.register(print_time)
1750 1750
1751 1751 u = ui.ui(options["verbose"], options["debug"], options["quiet"],
1752 1752 not options["noninteractive"])
1753 1753
1754 1754 try:
1755 1755 try:
1756 1756 if options['help']:
1757 1757 help_(u, cmd, options['version'])
1758 1758 sys.exit(0)
1759 1759 elif options['version']:
1760 1760 show_version(u)
1761 1761 sys.exit(0)
1762 1762 elif not cmd:
1763 1763 help_(u, 'shortlist')
1764 1764 sys.exit(0)
1765 1765
1766 1766 if options['cwd']:
1767 1767 try:
1768 1768 os.chdir(options['cwd'])
1769 1769 except OSError, inst:
1770 1770 u.warn('abort: %s: %s\n' % (options['cwd'], inst.strerror))
1771 1771 sys.exit(1)
1772 1772
1773 1773 if cmd not in norepo.split():
1774 1774 path = options["repository"] or ""
1775 1775 repo = hg.repository(ui=u, path=path)
1776 1776 d = lambda: func(u, repo, *args, **cmdoptions)
1777 1777 else:
1778 1778 d = lambda: func(u, *args, **cmdoptions)
1779 1779
1780 1780 if options['profile']:
1781 1781 import hotshot, hotshot.stats
1782 1782 prof = hotshot.Profile("hg.prof")
1783 1783 r = prof.runcall(d)
1784 1784 prof.close()
1785 1785 stats = hotshot.stats.load("hg.prof")
1786 1786 stats.strip_dirs()
1787 1787 stats.sort_stats('time', 'calls')
1788 1788 stats.print_stats(40)
1789 1789 return r
1790 1790 else:
1791 1791 return d()
1792 1792 except:
1793 1793 if options['traceback']:
1794 1794 traceback.print_exc()
1795 1795 raise
1796 1796 except hg.RepoError, inst:
1797 1797 u.warn("abort: ", inst, "!\n")
1798 1798 except SignalInterrupt:
1799 1799 u.warn("killed!\n")
1800 1800 except KeyboardInterrupt:
1801 1801 try:
1802 1802 u.warn("interrupted!\n")
1803 1803 except IOError, inst:
1804 1804 if inst.errno == errno.EPIPE:
1805 1805 if u.debugflag:
1806 1806 u.warn("\nbroken pipe\n")
1807 1807 else:
1808 1808 raise
1809 1809 except IOError, inst:
1810 1810 if hasattr(inst, "code"):
1811 1811 u.warn("abort: %s\n" % inst)
1812 1812 elif hasattr(inst, "reason"):
1813 1813 u.warn("abort: error: %s\n" % inst.reason[1])
1814 1814 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
1815 1815 if u.debugflag: u.warn("broken pipe\n")
1816 1816 else:
1817 1817 raise
1818 1818 except OSError, inst:
1819 1819 if hasattr(inst, "filename"):
1820 1820 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
1821 1821 else:
1822 1822 u.warn("abort: %s\n" % inst.strerror)
1823 1823 except util.Abort, inst:
1824 1824 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
1825 1825 sys.exit(1)
1826 1826 except TypeError, inst:
1827 1827 # was this an argument error?
1828 1828 tb = traceback.extract_tb(sys.exc_info()[2])
1829 1829 if len(tb) > 2: # no
1830 1830 raise
1831 1831 u.debug(inst, "\n")
1832 1832 u.warn("%s: invalid arguments\n" % cmd)
1833 1833 help_(u, cmd)
1834 1834 except UnknownCommand, inst:
1835 1835 u.warn("hg: unknown command '%s'\n" % inst.args[0])
1836 1836 help_(u, 'shortlist')
1837 1837
1838 1838 sys.exit(-1)
@@ -1,2294 +1,2294
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, struct, os
9 9 import util
10 10 from revlog import *
11 11 from demandload import *
12 12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 14 demandload(globals(), "bisect errno select stat")
15 15
16 16 class filelog(revlog):
17 17 def __init__(self, opener, path):
18 18 revlog.__init__(self, opener,
19 19 os.path.join("data", self.encodedir(path + ".i")),
20 20 os.path.join("data", self.encodedir(path + ".d")))
21 21
22 22 # This avoids a collision between a file named foo and a dir named
23 23 # foo.i or foo.d
24 24 def encodedir(self, path):
25 25 return (path
26 26 .replace(".hg/", ".hg.hg/")
27 27 .replace(".i/", ".i.hg/")
28 28 .replace(".d/", ".d.hg/"))
29 29
30 30 def decodedir(self, path):
31 31 return (path
32 32 .replace(".d.hg/", ".d/")
33 33 .replace(".i.hg/", ".i/")
34 34 .replace(".hg.hg/", ".hg/"))
35 35
36 36 def read(self, node):
37 37 t = self.revision(node)
38 38 if not t.startswith('\1\n'):
39 39 return t
40 40 s = t.find('\1\n', 2)
41 41 return t[s+2:]
42 42
43 43 def readmeta(self, node):
44 44 t = self.revision(node)
45 45 if not t.startswith('\1\n'):
46 46 return t
47 47 s = t.find('\1\n', 2)
48 48 mt = t[2:s]
49 49 for l in mt.splitlines():
50 50 k, v = l.split(": ", 1)
51 51 m[k] = v
52 52 return m
53 53
54 54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 55 if meta or text.startswith('\1\n'):
56 56 mt = ""
57 57 if meta:
58 58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 60 return self.addrevision(text, transaction, link, p1, p2)
61 61
62 62 def annotate(self, node):
63 63
64 64 def decorate(text, rev):
65 65 return ([rev] * len(text.splitlines()), text)
66 66
67 67 def pair(parent, child):
68 68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 69 child[0][b1:b2] = parent[0][a1:a2]
70 70 return child
71 71
72 72 # find all ancestors
73 73 needed = {node:1}
74 74 visit = [node]
75 75 while visit:
76 76 n = visit.pop(0)
77 77 for p in self.parents(n):
78 78 if p not in needed:
79 79 needed[p] = 1
80 80 visit.append(p)
81 81 else:
82 82 # count how many times we'll use this
83 83 needed[p] += 1
84 84
85 85 # sort by revision which is a topological order
86 86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 87 visit.sort()
88 88 hist = {}
89 89
90 90 for r,n in visit:
91 91 curr = decorate(self.read(n), self.linkrev(n))
92 92 for p in self.parents(n):
93 93 if p != nullid:
94 94 curr = pair(hist[p], curr)
95 95 # trim the history of unneeded revs
96 96 needed[p] -= 1
97 97 if not needed[p]:
98 98 del hist[p]
99 99 hist[n] = curr
100 100
101 101 return zip(hist[n][0], hist[n][1].splitlines(1))
102 102
103 103 class manifest(revlog):
104 104 def __init__(self, opener):
105 105 self.mapcache = None
106 106 self.listcache = None
107 107 self.addlist = None
108 108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109 109
110 110 def read(self, node):
111 111 if node == nullid: return {} # don't upset local cache
112 112 if self.mapcache and self.mapcache[0] == node:
113 113 return self.mapcache[1]
114 114 text = self.revision(node)
115 115 map = {}
116 116 flag = {}
117 117 self.listcache = (text, text.splitlines(1))
118 118 for l in self.listcache[1]:
119 119 (f, n) = l.split('\0')
120 120 map[f] = bin(n[:40])
121 121 flag[f] = (n[40:-1] == "x")
122 122 self.mapcache = (node, map, flag)
123 123 return map
124 124
125 125 def readflags(self, node):
126 126 if node == nullid: return {} # don't upset local cache
127 127 if not self.mapcache or self.mapcache[0] != node:
128 128 self.read(node)
129 129 return self.mapcache[2]
130 130
131 131 def diff(self, a, b):
132 132 # this is sneaky, as we're not actually using a and b
133 133 if self.listcache and self.addlist and self.listcache[0] == a:
134 134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 135 if mdiff.patch(a, d) != b:
136 136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 137 return mdiff.textdiff(a, b)
138 138 return d
139 139 else:
140 140 return mdiff.textdiff(a, b)
141 141
142 142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 143 changed=None):
144 144 # directly generate the mdiff delta from the data collected during
145 145 # the bisect loop below
146 146 def gendelta(delta):
147 147 i = 0
148 148 result = []
149 149 while i < len(delta):
150 150 start = delta[i][2]
151 151 end = delta[i][3]
152 152 l = delta[i][4]
153 153 if l == None:
154 154 l = ""
155 155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 156 and end >= delta[i+1][2]:
157 157 if delta[i+1][3] > end:
158 158 end = delta[i+1][3]
159 159 if delta[i+1][4]:
160 160 l += delta[i+1][4]
161 161 i += 1
162 162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 163 i += 1
164 164 return result
165 165
166 166 # apply the changes collected during the bisect loop to our addlist
167 167 def addlistdelta(addlist, delta):
168 168 # apply the deltas to the addlist. start from the bottom up
169 169 # so changes to the offsets don't mess things up.
170 170 i = len(delta)
171 171 while i > 0:
172 172 i -= 1
173 173 start = delta[i][0]
174 174 end = delta[i][1]
175 175 if delta[i][4]:
176 176 addlist[start:end] = [delta[i][4]]
177 177 else:
178 178 del addlist[start:end]
179 179 return addlist
180 180
181 181 # calculate the byte offset of the start of each line in the
182 182 # manifest
183 183 def calcoffsets(addlist):
184 184 offsets = [0] * (len(addlist) + 1)
185 185 offset = 0
186 186 i = 0
187 187 while i < len(addlist):
188 188 offsets[i] = offset
189 189 offset += len(addlist[i])
190 190 i += 1
191 191 offsets[i] = offset
192 192 return offsets
193 193
194 194 # if we're using the listcache, make sure it is valid and
195 195 # parented by the same node we're diffing against
196 196 if not changed or not self.listcache or not p1 or \
197 197 self.mapcache[0] != p1:
198 198 files = map.keys()
199 199 files.sort()
200 200
201 201 self.addlist = ["%s\000%s%s\n" %
202 202 (f, hex(map[f]), flags[f] and "x" or '')
203 203 for f in files]
204 204 cachedelta = None
205 205 else:
206 206 addlist = self.listcache[1]
207 207
208 208 # find the starting offset for each line in the add list
209 209 offsets = calcoffsets(addlist)
210 210
211 211 # combine the changed lists into one list for sorting
212 212 work = [[x, 0] for x in changed[0]]
213 213 work[len(work):] = [[x, 1] for x in changed[1]]
214 214 work.sort()
215 215
216 216 delta = []
217 217 bs = 0
218 218
219 219 for w in work:
220 220 f = w[0]
221 221 # bs will either be the index of the item or the insert point
222 222 bs = bisect.bisect(addlist, f, bs)
223 223 if bs < len(addlist):
224 224 fn = addlist[bs][:addlist[bs].index('\0')]
225 225 else:
226 226 fn = None
227 227 if w[1] == 0:
228 228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 229 flags[f] and "x" or '')
230 230 else:
231 231 l = None
232 232 start = bs
233 233 if fn != f:
234 234 # item not found, insert a new one
235 235 end = bs
236 236 if w[1] == 1:
237 237 sys.stderr.write("failed to remove %s from manifest\n"
238 238 % f)
239 239 sys.exit(1)
240 240 else:
241 241 # item is found, replace/delete the existing line
242 242 end = bs + 1
243 243 delta.append([start, end, offsets[start], offsets[end], l])
244 244
245 245 self.addlist = addlistdelta(addlist, delta)
246 246 if self.mapcache[0] == self.tip():
247 247 cachedelta = "".join(gendelta(delta))
248 248 else:
249 249 cachedelta = None
250 250
251 251 text = "".join(self.addlist)
252 252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 253 sys.stderr.write("manifest delta failure\n")
254 254 sys.exit(1)
255 255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 256 self.mapcache = (n, map, flags)
257 257 self.listcache = (text, self.addlist)
258 258 self.addlist = None
259 259
260 260 return n
261 261
262 262 class changelog(revlog):
263 263 def __init__(self, opener):
264 264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265 265
266 266 def extract(self, text):
267 267 if not text:
268 268 return (nullid, "", "0", [], "")
269 269 last = text.index("\n\n")
270 270 desc = text[last + 2:]
271 271 l = text[:last].splitlines()
272 272 manifest = bin(l[0])
273 273 user = l[1]
274 274 date = l[2]
275 275 if " " not in date:
276 276 date += " 0" # some tools used -d without a timezone
277 277 files = l[3:]
278 278 return (manifest, user, date, files, desc)
279 279
280 280 def read(self, node):
281 281 return self.extract(self.revision(node))
282 282
283 283 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
284 284 user=None, date=None):
285 285 if not date:
286 286 if time.daylight: offset = time.altzone
287 287 else: offset = time.timezone
288 288 date = "%d %d" % (time.time(), offset)
289 289 list.sort()
290 290 l = [hex(manifest), user, date] + list + ["", desc]
291 291 text = "\n".join(l)
292 292 return self.addrevision(text, transaction, self.count(), p1, p2)
293 293
294 294 class dirstate:
295 295 def __init__(self, opener, ui, root):
296 296 self.opener = opener
297 297 self.root = root
298 298 self.dirty = 0
299 299 self.ui = ui
300 300 self.map = None
301 301 self.pl = None
302 302 self.copies = {}
303 303 self.ignorefunc = None
304 304
305 305 def wjoin(self, f):
306 306 return os.path.join(self.root, f)
307 307
308 308 def getcwd(self):
309 309 cwd = os.getcwd()
310 310 if cwd == self.root: return ''
311 311 return cwd[len(self.root) + 1:]
312 312
313 313 def ignore(self, f):
314 314 if not self.ignorefunc:
315 315 bigpat = []
316 316 try:
317 317 l = file(self.wjoin(".hgignore"))
318 318 for pat in l:
319 319 p = pat.rstrip()
320 320 if p:
321 321 try:
322 322 re.compile(p)
323 323 except:
324 324 self.ui.warn("ignoring invalid ignore"
325 325 + " regular expression '%s'\n" % p)
326 326 else:
327 327 bigpat.append(p)
328 328 except IOError: pass
329 329
330 330 if bigpat:
331 331 s = "(?:%s)" % (")|(?:".join(bigpat))
332 332 r = re.compile(s)
333 333 self.ignorefunc = r.search
334 334 else:
335 335 self.ignorefunc = util.never
336 336
337 337 return self.ignorefunc(f)
338 338
339 339 def __del__(self):
340 340 if self.dirty:
341 341 self.write()
342 342
343 343 def __getitem__(self, key):
344 344 try:
345 345 return self.map[key]
346 346 except TypeError:
347 347 self.read()
348 348 return self[key]
349 349
350 350 def __contains__(self, key):
351 351 if not self.map: self.read()
352 352 return key in self.map
353 353
354 354 def parents(self):
355 355 if not self.pl:
356 356 self.read()
357 357 return self.pl
358 358
359 359 def markdirty(self):
360 360 if not self.dirty:
361 361 self.dirty = 1
362 362
363 def setparents(self, p1, p2 = nullid):
363 def setparents(self, p1, p2=nullid):
364 364 self.markdirty()
365 365 self.pl = p1, p2
366 366
367 367 def state(self, key):
368 368 try:
369 369 return self[key][0]
370 370 except KeyError:
371 371 return "?"
372 372
373 373 def read(self):
374 374 if self.map is not None: return self.map
375 375
376 376 self.map = {}
377 377 self.pl = [nullid, nullid]
378 378 try:
379 379 st = self.opener("dirstate").read()
380 380 if not st: return
381 381 except: return
382 382
383 383 self.pl = [st[:20], st[20: 40]]
384 384
385 385 pos = 40
386 386 while pos < len(st):
387 387 e = struct.unpack(">cllll", st[pos:pos+17])
388 388 l = e[4]
389 389 pos += 17
390 390 f = st[pos:pos + l]
391 391 if '\0' in f:
392 392 f, c = f.split('\0')
393 393 self.copies[f] = c
394 394 self.map[f] = e[:4]
395 395 pos += l
396 396
397 397 def copy(self, source, dest):
398 398 self.read()
399 399 self.markdirty()
400 400 self.copies[dest] = source
401 401
402 402 def copied(self, file):
403 403 return self.copies.get(file, None)
404 404
405 405 def update(self, files, state, **kw):
406 406 ''' current states:
407 407 n normal
408 408 m needs merging
409 409 r marked for removal
410 410 a marked for addition'''
411 411
412 412 if not files: return
413 413 self.read()
414 414 self.markdirty()
415 415 for f in files:
416 416 if state == "r":
417 417 self.map[f] = ('r', 0, 0, 0)
418 418 else:
419 419 s = os.stat(os.path.join(self.root, f))
420 420 st_size = kw.get('st_size', s.st_size)
421 421 st_mtime = kw.get('st_mtime', s.st_mtime)
422 422 self.map[f] = (state, s.st_mode, st_size, st_mtime)
423 423
424 424 def forget(self, files):
425 425 if not files: return
426 426 self.read()
427 427 self.markdirty()
428 428 for f in files:
429 429 try:
430 430 del self.map[f]
431 431 except KeyError:
432 432 self.ui.warn("not in dirstate: %s!\n" % f)
433 433 pass
434 434
435 435 def clear(self):
436 436 self.map = {}
437 437 self.markdirty()
438 438
439 439 def write(self):
440 440 st = self.opener("dirstate", "w")
441 441 st.write("".join(self.pl))
442 442 for f, e in self.map.items():
443 443 c = self.copied(f)
444 444 if c:
445 445 f = f + "\0" + c
446 446 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
447 447 st.write(e + f)
448 448 self.dirty = 0
449 449
450 450 def filterfiles(self, files):
451 451 ret = {}
452 452 unknown = []
453 453
454 454 for x in files:
455 455 if x is '.':
456 456 return self.map.copy()
457 457 if x not in self.map:
458 458 unknown.append(x)
459 459 else:
460 460 ret[x] = self.map[x]
461 461
462 462 if not unknown:
463 463 return ret
464 464
465 465 b = self.map.keys()
466 466 b.sort()
467 467 blen = len(b)
468 468
469 469 for x in unknown:
470 470 bs = bisect.bisect(b, x)
471 471 if bs != 0 and b[bs-1] == x:
472 472 ret[x] = self.map[x]
473 473 continue
474 474 while bs < blen:
475 475 s = b[bs]
476 476 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
477 477 ret[s] = self.map[s]
478 478 else:
479 479 break
480 480 bs += 1
481 481 return ret
482 482
483 def walk(self, files = None, match = util.always, dc=None):
483 def walk(self, files=None, match=util.always, dc=None):
484 484 self.read()
485 485
486 486 # walk all files by default
487 487 if not files:
488 488 files = [self.root]
489 489 if not dc:
490 490 dc = self.map.copy()
491 491 elif not dc:
492 492 dc = self.filterfiles(files)
493 493
494 494 known = {'.hg': 1}
495 495 def seen(fn):
496 496 if fn in known: return True
497 497 known[fn] = 1
498 498 def traverse():
499 499 for ff in util.unique(files):
500 500 f = os.path.join(self.root, ff)
501 501 try:
502 502 st = os.stat(f)
503 503 except OSError, inst:
504 504 if ff not in dc: self.ui.warn('%s: %s\n' % (
505 505 util.pathto(self.getcwd(), ff),
506 506 inst.strerror))
507 507 continue
508 508 if stat.S_ISDIR(st.st_mode):
509 509 for dir, subdirs, fl in os.walk(f):
510 510 d = dir[len(self.root) + 1:]
511 511 nd = util.normpath(d)
512 512 if nd == '.': nd = ''
513 513 if seen(nd):
514 514 subdirs[:] = []
515 515 continue
516 516 for sd in subdirs:
517 517 ds = os.path.join(nd, sd +'/')
518 518 if self.ignore(ds) or not match(ds):
519 519 subdirs.remove(sd)
520 520 subdirs.sort()
521 521 fl.sort()
522 522 for fn in fl:
523 523 fn = util.pconvert(os.path.join(d, fn))
524 524 yield 'f', fn
525 525 elif stat.S_ISREG(st.st_mode):
526 526 yield 'f', ff
527 527 else:
528 528 kind = 'unknown'
529 529 if stat.S_ISCHR(st.st_mode): kind = 'character device'
530 530 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
531 531 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
532 532 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
533 533 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
534 534 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
535 535 util.pathto(self.getcwd(), ff),
536 536 kind))
537 537
538 538 ks = dc.keys()
539 539 ks.sort()
540 540 for k in ks:
541 541 yield 'm', k
542 542
543 543 # yield only files that match: all in dirstate, others only if
544 544 # not in .hgignore
545 545
546 546 for src, fn in util.unique(traverse()):
547 547 fn = util.normpath(fn)
548 548 if seen(fn): continue
549 549 if fn not in dc and self.ignore(fn):
550 550 continue
551 551 if match(fn):
552 552 yield src, fn
553 553
554 554 def changes(self, files=None, match=util.always):
555 555 self.read()
556 556 if not files:
557 557 dc = self.map.copy()
558 558 else:
559 559 dc = self.filterfiles(files)
560 560 lookup, modified, added, unknown = [], [], [], []
561 561 removed, deleted = [], []
562 562
563 563 for src, fn in self.walk(files, match, dc=dc):
564 564 try:
565 565 s = os.stat(os.path.join(self.root, fn))
566 566 except OSError:
567 567 continue
568 568 if not stat.S_ISREG(s.st_mode):
569 569 continue
570 570 c = dc.get(fn)
571 571 if c:
572 572 del dc[fn]
573 573 if c[0] == 'm':
574 574 modified.append(fn)
575 575 elif c[0] == 'a':
576 576 added.append(fn)
577 577 elif c[0] == 'r':
578 578 unknown.append(fn)
579 579 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
580 580 modified.append(fn)
581 581 elif c[3] != s.st_mtime:
582 582 lookup.append(fn)
583 583 else:
584 584 unknown.append(fn)
585 585
586 586 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
587 587 if c[0] == 'r':
588 588 removed.append(fn)
589 589 else:
590 590 deleted.append(fn)
591 591 return (lookup, modified, added, removed + deleted, unknown)
592 592
593 593 # used to avoid circular references so destructors work
594 594 def opener(base):
595 595 p = base
596 596 def o(path, mode="r"):
597 597 if p.startswith("http://"):
598 598 f = os.path.join(p, urllib.quote(path))
599 599 return httprangereader.httprangereader(f)
600 600
601 601 f = os.path.join(p, path)
602 602
603 603 mode += "b" # for that other OS
604 604
605 605 if mode[0] != "r":
606 606 try:
607 607 s = os.stat(f)
608 608 except OSError:
609 609 d = os.path.dirname(f)
610 610 if not os.path.isdir(d):
611 611 os.makedirs(d)
612 612 else:
613 613 if s.st_nlink > 1:
614 614 file(f + ".tmp", "wb").write(file(f, "rb").read())
615 615 util.rename(f+".tmp", f)
616 616
617 617 return file(f, mode)
618 618
619 619 return o
620 620
621 621 class RepoError(Exception): pass
622 622
623 623 class localrepository:
624 624 def __init__(self, ui, path=None, create=0):
625 625 self.remote = 0
626 626 if path and path.startswith("http://"):
627 627 self.remote = 1
628 628 self.path = path
629 629 else:
630 630 if not path:
631 631 p = os.getcwd()
632 632 while not os.path.isdir(os.path.join(p, ".hg")):
633 633 oldp = p
634 634 p = os.path.dirname(p)
635 635 if p == oldp: raise RepoError("no repo found")
636 636 path = p
637 637 self.path = os.path.join(path, ".hg")
638 638
639 639 if not create and not os.path.isdir(self.path):
640 640 raise RepoError("repository %s not found" % self.path)
641 641
642 642 self.root = os.path.abspath(path)
643 643 self.ui = ui
644 644
645 645 if create:
646 646 os.mkdir(self.path)
647 647 os.mkdir(self.join("data"))
648 648
649 649 self.opener = opener(self.path)
650 650 self.wopener = opener(self.root)
651 651 self.manifest = manifest(self.opener)
652 652 self.changelog = changelog(self.opener)
653 653 self.tagscache = None
654 654 self.nodetagscache = None
655 655
656 656 if not self.remote:
657 657 self.dirstate = dirstate(self.opener, ui, self.root)
658 658 try:
659 659 self.ui.readconfig(self.opener("hgrc"))
660 660 except IOError: pass
661 661
662 662 def hook(self, name, **args):
663 663 s = self.ui.config("hooks", name)
664 664 if s:
665 665 self.ui.note("running hook %s: %s\n" % (name, s))
666 666 old = {}
667 667 for k, v in args.items():
668 668 k = k.upper()
669 669 old[k] = os.environ.get(k, None)
670 670 os.environ[k] = v
671 671
672 672 r = os.system(s)
673 673
674 674 for k, v in old.items():
675 675 if v != None:
676 676 os.environ[k] = v
677 677 else:
678 678 del os.environ[k]
679 679
680 680 if r:
681 681 self.ui.warn("abort: %s hook failed with status %d!\n" %
682 682 (name, r))
683 683 return False
684 684 return True
685 685
686 686 def tags(self):
687 687 '''return a mapping of tag to node'''
688 688 if not self.tagscache:
689 689 self.tagscache = {}
690 690 def addtag(self, k, n):
691 691 try:
692 692 bin_n = bin(n)
693 693 except TypeError:
694 694 bin_n = ''
695 695 self.tagscache[k.strip()] = bin_n
696 696
697 697 try:
698 698 # read each head of the tags file, ending with the tip
699 699 # and add each tag found to the map, with "newer" ones
700 700 # taking precedence
701 701 fl = self.file(".hgtags")
702 702 h = fl.heads()
703 703 h.reverse()
704 704 for r in h:
705 705 for l in fl.read(r).splitlines():
706 706 if l:
707 707 n, k = l.split(" ", 1)
708 708 addtag(self, k, n)
709 709 except KeyError:
710 710 pass
711 711
712 712 try:
713 713 f = self.opener("localtags")
714 714 for l in f:
715 715 n, k = l.split(" ", 1)
716 716 addtag(self, k, n)
717 717 except IOError:
718 718 pass
719 719
720 720 self.tagscache['tip'] = self.changelog.tip()
721 721
722 722 return self.tagscache
723 723
724 724 def tagslist(self):
725 725 '''return a list of tags ordered by revision'''
726 726 l = []
727 727 for t, n in self.tags().items():
728 728 try:
729 729 r = self.changelog.rev(n)
730 730 except:
731 731 r = -2 # sort to the beginning of the list if unknown
732 732 l.append((r,t,n))
733 733 l.sort()
734 734 return [(t,n) for r,t,n in l]
735 735
736 736 def nodetags(self, node):
737 737 '''return the tags associated with a node'''
738 738 if not self.nodetagscache:
739 739 self.nodetagscache = {}
740 740 for t,n in self.tags().items():
741 741 self.nodetagscache.setdefault(n,[]).append(t)
742 742 return self.nodetagscache.get(node, [])
743 743
744 744 def lookup(self, key):
745 745 try:
746 746 return self.tags()[key]
747 747 except KeyError:
748 748 try:
749 749 return self.changelog.lookup(key)
750 750 except:
751 751 raise RepoError("unknown revision '%s'" % key)
752 752
753 753 def dev(self):
754 754 if self.remote: return -1
755 755 return os.stat(self.path).st_dev
756 756
757 757 def local(self):
758 758 return not self.remote
759 759
760 760 def join(self, f):
761 761 return os.path.join(self.path, f)
762 762
763 763 def wjoin(self, f):
764 764 return os.path.join(self.root, f)
765 765
766 766 def file(self, f):
767 767 if f[0] == '/': f = f[1:]
768 768 return filelog(self.opener, f)
769 769
770 770 def getcwd(self):
771 771 return self.dirstate.getcwd()
772 772
773 773 def wfile(self, f, mode='r'):
774 774 return self.wopener(f, mode)
775 775
776 776 def wread(self, filename):
777 777 return self.wopener(filename, 'r').read()
778 778
779 779 def wwrite(self, filename, data, fd=None):
780 780 if fd:
781 781 return fd.write(data)
782 782 return self.wopener(filename, 'w').write(data)
783 783
784 784 def transaction(self):
785 785 # save dirstate for undo
786 786 try:
787 787 ds = self.opener("dirstate").read()
788 788 except IOError:
789 789 ds = ""
790 790 self.opener("journal.dirstate", "w").write(ds)
791 791
792 792 def after():
793 793 util.rename(self.join("journal"), self.join("undo"))
794 794 util.rename(self.join("journal.dirstate"),
795 795 self.join("undo.dirstate"))
796 796
797 797 return transaction.transaction(self.ui.warn, self.opener,
798 798 self.join("journal"), after)
799 799
800 800 def recover(self):
801 801 lock = self.lock()
802 802 if os.path.exists(self.join("journal")):
803 803 self.ui.status("rolling back interrupted transaction\n")
804 804 return transaction.rollback(self.opener, self.join("journal"))
805 805 else:
806 806 self.ui.warn("no interrupted transaction available\n")
807 807
808 808 def undo(self):
809 809 lock = self.lock()
810 810 if os.path.exists(self.join("undo")):
811 811 self.ui.status("rolling back last transaction\n")
812 812 transaction.rollback(self.opener, self.join("undo"))
813 813 self.dirstate = None
814 814 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
815 815 self.dirstate = dirstate(self.opener, self.ui, self.root)
816 816 else:
817 817 self.ui.warn("no undo information available\n")
818 818
819 def lock(self, wait = 1):
819 def lock(self, wait=1):
820 820 try:
821 821 return lock.lock(self.join("lock"), 0)
822 822 except lock.LockHeld, inst:
823 823 if wait:
824 824 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
825 825 return lock.lock(self.join("lock"), wait)
826 826 raise inst
827 827
828 828 def rawcommit(self, files, text, user, date, p1=None, p2=None):
829 829 orig_parent = self.dirstate.parents()[0] or nullid
830 830 p1 = p1 or self.dirstate.parents()[0] or nullid
831 831 p2 = p2 or self.dirstate.parents()[1] or nullid
832 832 c1 = self.changelog.read(p1)
833 833 c2 = self.changelog.read(p2)
834 834 m1 = self.manifest.read(c1[0])
835 835 mf1 = self.manifest.readflags(c1[0])
836 836 m2 = self.manifest.read(c2[0])
837 837 changed = []
838 838
839 839 if orig_parent == p1:
840 840 update_dirstate = 1
841 841 else:
842 842 update_dirstate = 0
843 843
844 844 tr = self.transaction()
845 845 mm = m1.copy()
846 846 mfm = mf1.copy()
847 847 linkrev = self.changelog.count()
848 848 for f in files:
849 849 try:
850 850 t = self.wread(f)
851 851 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
852 852 r = self.file(f)
853 853 mfm[f] = tm
854 854
855 855 fp1 = m1.get(f, nullid)
856 856 fp2 = m2.get(f, nullid)
857 857
858 858 # is the same revision on two branches of a merge?
859 859 if fp2 == fp1:
860 860 fp2 = nullid
861 861
862 862 if fp2 != nullid:
863 863 # is one parent an ancestor of the other?
864 864 fpa = r.ancestor(fp1, fp2)
865 865 if fpa == fp1:
866 866 fp1, fp2 = fp2, nullid
867 867 elif fpa == fp2:
868 868 fp2 = nullid
869 869
870 870 # is the file unmodified from the parent?
871 871 if t == r.read(fp1):
872 872 # record the proper existing parent in manifest
873 873 # no need to add a revision
874 874 mm[f] = fp1
875 875 continue
876 876
877 877 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
878 878 changed.append(f)
879 879 if update_dirstate:
880 880 self.dirstate.update([f], "n")
881 881 except IOError:
882 882 try:
883 883 del mm[f]
884 884 del mfm[f]
885 885 if update_dirstate:
886 886 self.dirstate.forget([f])
887 887 except:
888 888 # deleted from p2?
889 889 pass
890 890
891 891 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
892 892 user = user or self.ui.username()
893 893 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
894 894 tr.close()
895 895 if update_dirstate:
896 896 self.dirstate.setparents(n, nullid)
897 897
898 898 def commit(self, files = None, text = "", user = None, date = None,
899 899 match = util.always, force=False):
900 900 commit = []
901 901 remove = []
902 902 changed = []
903 903
904 904 if files:
905 905 for f in files:
906 906 s = self.dirstate.state(f)
907 907 if s in 'nmai':
908 908 commit.append(f)
909 909 elif s == 'r':
910 910 remove.append(f)
911 911 else:
912 912 self.ui.warn("%s not tracked!\n" % f)
913 913 else:
914 (c, a, d, u) = self.changes(match = match)
914 (c, a, d, u) = self.changes(match=match)
915 915 commit = c + a
916 916 remove = d
917 917
918 918 p1, p2 = self.dirstate.parents()
919 919 c1 = self.changelog.read(p1)
920 920 c2 = self.changelog.read(p2)
921 921 m1 = self.manifest.read(c1[0])
922 922 mf1 = self.manifest.readflags(c1[0])
923 923 m2 = self.manifest.read(c2[0])
924 924
925 925 if not commit and not remove and not force and p2 == nullid:
926 926 self.ui.status("nothing changed\n")
927 927 return None
928 928
929 929 if not self.hook("precommit"):
930 930 return None
931 931
932 932 lock = self.lock()
933 933 tr = self.transaction()
934 934
935 935 # check in files
936 936 new = {}
937 937 linkrev = self.changelog.count()
938 938 commit.sort()
939 939 for f in commit:
940 940 self.ui.note(f + "\n")
941 941 try:
942 942 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
943 943 t = self.wread(f)
944 944 except IOError:
945 945 self.ui.warn("trouble committing %s!\n" % f)
946 946 raise
947 947
948 948 meta = {}
949 949 cp = self.dirstate.copied(f)
950 950 if cp:
951 951 meta["copy"] = cp
952 952 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
953 953 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
954 954
955 955 r = self.file(f)
956 956 fp1 = m1.get(f, nullid)
957 957 fp2 = m2.get(f, nullid)
958 958
959 959 # is the same revision on two branches of a merge?
960 960 if fp2 == fp1:
961 961 fp2 = nullid
962 962
963 963 if fp2 != nullid:
964 964 # is one parent an ancestor of the other?
965 965 fpa = r.ancestor(fp1, fp2)
966 966 if fpa == fp1:
967 967 fp1, fp2 = fp2, nullid
968 968 elif fpa == fp2:
969 969 fp2 = nullid
970 970
971 971 # is the file unmodified from the parent?
972 972 if not meta and t == r.read(fp1):
973 973 # record the proper existing parent in manifest
974 974 # no need to add a revision
975 975 new[f] = fp1
976 976 continue
977 977
978 978 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
979 979 # remember what we've added so that we can later calculate
980 980 # the files to pull from a set of changesets
981 981 changed.append(f)
982 982
983 983 # update manifest
984 984 m1.update(new)
985 985 for f in remove:
986 986 if f in m1:
987 987 del m1[f]
988 988 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
989 989 (new, remove))
990 990
991 991 # add changeset
992 992 new = new.keys()
993 993 new.sort()
994 994
995 995 if not text:
996 996 edittext = ""
997 997 if p2 != nullid:
998 998 edittext += "HG: branch merge\n"
999 999 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
1000 1000 edittext += "".join(["HG: changed %s\n" % f for f in changed])
1001 1001 edittext += "".join(["HG: removed %s\n" % f for f in remove])
1002 1002 if not changed and not remove:
1003 1003 edittext += "HG: no files changed\n"
1004 1004 edittext = self.ui.edit(edittext)
1005 1005 if not edittext.rstrip():
1006 1006 return None
1007 1007 text = edittext
1008 1008
1009 1009 user = user or self.ui.username()
1010 1010 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1011 1011 tr.close()
1012 1012
1013 1013 self.dirstate.setparents(n)
1014 1014 self.dirstate.update(new, "n")
1015 1015 self.dirstate.forget(remove)
1016 1016
1017 1017 if not self.hook("commit", node=hex(n)):
1018 1018 return None
1019 1019 return n
1020 1020
1021 def walk(self, node = None, files = [], match = util.always):
1021 def walk(self, node=None, files=[], match=util.always):
1022 1022 if node:
1023 1023 for fn in self.manifest.read(self.changelog.read(node)[0]):
1024 1024 if match(fn): yield 'm', fn
1025 1025 else:
1026 1026 for src, fn in self.dirstate.walk(files, match):
1027 1027 yield src, fn
1028 1028
1029 1029 def changes(self, node1 = None, node2 = None, files = [],
1030 1030 match = util.always):
1031 1031 mf2, u = None, []
1032 1032
1033 1033 def fcmp(fn, mf):
1034 1034 t1 = self.wread(fn)
1035 1035 t2 = self.file(fn).read(mf.get(fn, nullid))
1036 1036 return cmp(t1, t2)
1037 1037
1038 1038 def mfmatches(node):
1039 1039 mf = dict(self.manifest.read(node))
1040 1040 for fn in mf.keys():
1041 1041 if not match(fn):
1042 1042 del mf[fn]
1043 1043 return mf
1044 1044
1045 1045 # are we comparing the working directory?
1046 1046 if not node2:
1047 1047 l, c, a, d, u = self.dirstate.changes(files, match)
1048 1048
1049 1049 # are we comparing working dir against its parent?
1050 1050 if not node1:
1051 1051 if l:
1052 1052 # do a full compare of any files that might have changed
1053 1053 change = self.changelog.read(self.dirstate.parents()[0])
1054 1054 mf2 = mfmatches(change[0])
1055 1055 for f in l:
1056 1056 if fcmp(f, mf2):
1057 1057 c.append(f)
1058 1058
1059 1059 for l in c, a, d, u:
1060 1060 l.sort()
1061 1061
1062 1062 return (c, a, d, u)
1063 1063
1064 1064 # are we comparing working dir against non-tip?
1065 1065 # generate a pseudo-manifest for the working dir
1066 1066 if not node2:
1067 1067 if not mf2:
1068 1068 change = self.changelog.read(self.dirstate.parents()[0])
1069 1069 mf2 = mfmatches(change[0])
1070 1070 for f in a + c + l:
1071 1071 mf2[f] = ""
1072 1072 for f in d:
1073 1073 if f in mf2: del mf2[f]
1074 1074 else:
1075 1075 change = self.changelog.read(node2)
1076 1076 mf2 = mfmatches(change[0])
1077 1077
1078 1078 # flush lists from dirstate before comparing manifests
1079 1079 c, a = [], []
1080 1080
1081 1081 change = self.changelog.read(node1)
1082 1082 mf1 = mfmatches(change[0])
1083 1083
1084 1084 for fn in mf2:
1085 1085 if mf1.has_key(fn):
1086 1086 if mf1[fn] != mf2[fn]:
1087 1087 if mf2[fn] != "" or fcmp(fn, mf1):
1088 1088 c.append(fn)
1089 1089 del mf1[fn]
1090 1090 else:
1091 1091 a.append(fn)
1092 1092
1093 1093 d = mf1.keys()
1094 1094
1095 1095 for l in c, a, d, u:
1096 1096 l.sort()
1097 1097
1098 1098 return (c, a, d, u)
1099 1099
1100 1100 def add(self, list):
1101 1101 for f in list:
1102 1102 p = self.wjoin(f)
1103 1103 if not os.path.exists(p):
1104 1104 self.ui.warn("%s does not exist!\n" % f)
1105 1105 elif not os.path.isfile(p):
1106 1106 self.ui.warn("%s not added: only files supported currently\n" % f)
1107 1107 elif self.dirstate.state(f) in 'an':
1108 1108 self.ui.warn("%s already tracked!\n" % f)
1109 1109 else:
1110 1110 self.dirstate.update([f], "a")
1111 1111
1112 1112 def forget(self, list):
1113 1113 for f in list:
1114 1114 if self.dirstate.state(f) not in 'ai':
1115 1115 self.ui.warn("%s not added!\n" % f)
1116 1116 else:
1117 1117 self.dirstate.forget([f])
1118 1118
1119 1119 def remove(self, list):
1120 1120 for f in list:
1121 1121 p = self.wjoin(f)
1122 1122 if os.path.exists(p):
1123 1123 self.ui.warn("%s still exists!\n" % f)
1124 1124 elif self.dirstate.state(f) == 'a':
1125 1125 self.ui.warn("%s never committed!\n" % f)
1126 1126 self.dirstate.forget([f])
1127 1127 elif f not in self.dirstate:
1128 1128 self.ui.warn("%s not tracked!\n" % f)
1129 1129 else:
1130 1130 self.dirstate.update([f], "r")
1131 1131
1132 1132 def copy(self, source, dest):
1133 1133 p = self.wjoin(dest)
1134 1134 if not os.path.exists(p):
1135 1135 self.ui.warn("%s does not exist!\n" % dest)
1136 1136 elif not os.path.isfile(p):
1137 1137 self.ui.warn("copy failed: %s is not a file\n" % dest)
1138 1138 else:
1139 1139 if self.dirstate.state(dest) == '?':
1140 1140 self.dirstate.update([dest], "a")
1141 1141 self.dirstate.copy(source, dest)
1142 1142
1143 1143 def heads(self):
1144 1144 return self.changelog.heads()
1145 1145
1146 1146 # branchlookup returns a dict giving a list of branches for
1147 1147 # each head. A branch is defined as the tag of a node or
1148 1148 # the branch of the node's parents. If a node has multiple
1149 1149 # branch tags, tags are eliminated if they are visible from other
1150 1150 # branch tags.
1151 1151 #
1152 1152 # So, for this graph: a->b->c->d->e
1153 1153 # \ /
1154 1154 # aa -----/
1155 1155 # a has tag 2.6.12
1156 1156 # d has tag 2.6.13
1157 1157 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1158 1158 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1159 1159 # from the list.
1160 1160 #
1161 1161 # It is possible that more than one head will have the same branch tag.
1162 1162 # callers need to check the result for multiple heads under the same
1163 1163 # branch tag if that is a problem for them (ie checkout of a specific
1164 1164 # branch).
1165 1165 #
1166 1166 # passing in a specific branch will limit the depth of the search
1167 1167 # through the parents. It won't limit the branches returned in the
1168 1168 # result though.
1169 1169 def branchlookup(self, heads=None, branch=None):
1170 1170 if not heads:
1171 1171 heads = self.heads()
1172 1172 headt = [ h for h in heads ]
1173 1173 chlog = self.changelog
1174 1174 branches = {}
1175 1175 merges = []
1176 1176 seenmerge = {}
1177 1177
1178 1178 # traverse the tree once for each head, recording in the branches
1179 1179 # dict which tags are visible from this head. The branches
1180 1180 # dict also records which tags are visible from each tag
1181 1181 # while we traverse.
1182 1182 while headt or merges:
1183 1183 if merges:
1184 1184 n, found = merges.pop()
1185 1185 visit = [n]
1186 1186 else:
1187 1187 h = headt.pop()
1188 1188 visit = [h]
1189 1189 found = [h]
1190 1190 seen = {}
1191 1191 while visit:
1192 1192 n = visit.pop()
1193 1193 if n in seen:
1194 1194 continue
1195 1195 pp = chlog.parents(n)
1196 1196 tags = self.nodetags(n)
1197 1197 if tags:
1198 1198 for x in tags:
1199 1199 if x == 'tip':
1200 1200 continue
1201 1201 for f in found:
1202 1202 branches.setdefault(f, {})[n] = 1
1203 1203 branches.setdefault(n, {})[n] = 1
1204 1204 break
1205 1205 if n not in found:
1206 1206 found.append(n)
1207 1207 if branch in tags:
1208 1208 continue
1209 1209 seen[n] = 1
1210 1210 if pp[1] != nullid and n not in seenmerge:
1211 1211 merges.append((pp[1], [x for x in found]))
1212 1212 seenmerge[n] = 1
1213 1213 if pp[0] != nullid:
1214 1214 visit.append(pp[0])
1215 1215 # traverse the branches dict, eliminating branch tags from each
1216 1216 # head that are visible from another branch tag for that head.
1217 1217 out = {}
1218 1218 viscache = {}
1219 1219 for h in heads:
1220 1220 def visible(node):
1221 1221 if node in viscache:
1222 1222 return viscache[node]
1223 1223 ret = {}
1224 1224 visit = [node]
1225 1225 while visit:
1226 1226 x = visit.pop()
1227 1227 if x in viscache:
1228 1228 ret.update(viscache[x])
1229 1229 elif x not in ret:
1230 1230 ret[x] = 1
1231 1231 if x in branches:
1232 1232 visit[len(visit):] = branches[x].keys()
1233 1233 viscache[node] = ret
1234 1234 return ret
1235 1235 if h not in branches:
1236 1236 continue
1237 1237 # O(n^2), but somewhat limited. This only searches the
1238 1238 # tags visible from a specific head, not all the tags in the
1239 1239 # whole repo.
1240 1240 for b in branches[h]:
1241 1241 vis = False
1242 1242 for bb in branches[h].keys():
1243 1243 if b != bb:
1244 1244 if b in visible(bb):
1245 1245 vis = True
1246 1246 break
1247 1247 if not vis:
1248 1248 l = out.setdefault(h, [])
1249 1249 l[len(l):] = self.nodetags(b)
1250 1250 return out
1251 1251
1252 1252 def branches(self, nodes):
1253 1253 if not nodes: nodes = [self.changelog.tip()]
1254 1254 b = []
1255 1255 for n in nodes:
1256 1256 t = n
1257 1257 while n:
1258 1258 p = self.changelog.parents(n)
1259 1259 if p[1] != nullid or p[0] == nullid:
1260 1260 b.append((t, n, p[0], p[1]))
1261 1261 break
1262 1262 n = p[0]
1263 1263 return b
1264 1264
1265 1265 def between(self, pairs):
1266 1266 r = []
1267 1267
1268 1268 for top, bottom in pairs:
1269 1269 n, l, i = top, [], 0
1270 1270 f = 1
1271 1271
1272 1272 while n != bottom:
1273 1273 p = self.changelog.parents(n)[0]
1274 1274 if i == f:
1275 1275 l.append(n)
1276 1276 f = f * 2
1277 1277 n = p
1278 1278 i += 1
1279 1279
1280 1280 r.append(l)
1281 1281
1282 1282 return r
1283 1283
1284 1284 def newer(self, nodes):
1285 1285 m = {}
1286 1286 nl = []
1287 1287 pm = {}
1288 1288 cl = self.changelog
1289 1289 t = l = cl.count()
1290 1290
1291 1291 # find the lowest numbered node
1292 1292 for n in nodes:
1293 1293 l = min(l, cl.rev(n))
1294 1294 m[n] = 1
1295 1295
1296 1296 for i in xrange(l, t):
1297 1297 n = cl.node(i)
1298 1298 if n in m: # explicitly listed
1299 1299 pm[n] = 1
1300 1300 nl.append(n)
1301 1301 continue
1302 1302 for p in cl.parents(n):
1303 1303 if p in pm: # parent listed
1304 1304 pm[n] = 1
1305 1305 nl.append(n)
1306 1306 break
1307 1307
1308 1308 return nl
1309 1309
1310 1310 def findincoming(self, remote, base=None, heads=None):
1311 1311 m = self.changelog.nodemap
1312 1312 search = []
1313 1313 fetch = []
1314 1314 seen = {}
1315 1315 seenbranch = {}
1316 1316 if base == None:
1317 1317 base = {}
1318 1318
1319 1319 # assume we're closer to the tip than the root
1320 1320 # and start by examining the heads
1321 1321 self.ui.status("searching for changes\n")
1322 1322
1323 1323 if not heads:
1324 1324 heads = remote.heads()
1325 1325
1326 1326 unknown = []
1327 1327 for h in heads:
1328 1328 if h not in m:
1329 1329 unknown.append(h)
1330 1330 else:
1331 1331 base[h] = 1
1332 1332
1333 1333 if not unknown:
1334 1334 return None
1335 1335
1336 1336 rep = {}
1337 1337 reqcnt = 0
1338 1338
1339 1339 # search through remote branches
1340 1340 # a 'branch' here is a linear segment of history, with four parts:
1341 1341 # head, root, first parent, second parent
1342 1342 # (a branch always has two parents (or none) by definition)
1343 1343 unknown = remote.branches(unknown)
1344 1344 while unknown:
1345 1345 r = []
1346 1346 while unknown:
1347 1347 n = unknown.pop(0)
1348 1348 if n[0] in seen:
1349 1349 continue
1350 1350
1351 1351 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1352 1352 if n[0] == nullid:
1353 1353 break
1354 1354 if n in seenbranch:
1355 1355 self.ui.debug("branch already found\n")
1356 1356 continue
1357 1357 if n[1] and n[1] in m: # do we know the base?
1358 1358 self.ui.debug("found incomplete branch %s:%s\n"
1359 1359 % (short(n[0]), short(n[1])))
1360 1360 search.append(n) # schedule branch range for scanning
1361 1361 seenbranch[n] = 1
1362 1362 else:
1363 1363 if n[1] not in seen and n[1] not in fetch:
1364 1364 if n[2] in m and n[3] in m:
1365 1365 self.ui.debug("found new changeset %s\n" %
1366 1366 short(n[1]))
1367 1367 fetch.append(n[1]) # earliest unknown
1368 1368 base[n[2]] = 1 # latest known
1369 1369 continue
1370 1370
1371 1371 for a in n[2:4]:
1372 1372 if a not in rep:
1373 1373 r.append(a)
1374 1374 rep[a] = 1
1375 1375
1376 1376 seen[n[0]] = 1
1377 1377
1378 1378 if r:
1379 1379 reqcnt += 1
1380 1380 self.ui.debug("request %d: %s\n" %
1381 1381 (reqcnt, " ".join(map(short, r))))
1382 1382 for p in range(0, len(r), 10):
1383 1383 for b in remote.branches(r[p:p+10]):
1384 1384 self.ui.debug("received %s:%s\n" %
1385 1385 (short(b[0]), short(b[1])))
1386 1386 if b[0] not in m and b[0] not in seen:
1387 1387 unknown.append(b)
1388 1388
1389 1389 # do binary search on the branches we found
1390 1390 while search:
1391 1391 n = search.pop(0)
1392 1392 reqcnt += 1
1393 1393 l = remote.between([(n[0], n[1])])[0]
1394 1394 l.append(n[1])
1395 1395 p = n[0]
1396 1396 f = 1
1397 1397 for i in l:
1398 1398 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1399 1399 if i in m:
1400 1400 if f <= 2:
1401 1401 self.ui.debug("found new branch changeset %s\n" %
1402 1402 short(p))
1403 1403 fetch.append(p)
1404 1404 base[i] = 1
1405 1405 else:
1406 1406 self.ui.debug("narrowed branch search to %s:%s\n"
1407 1407 % (short(p), short(i)))
1408 1408 search.append((p, i))
1409 1409 break
1410 1410 p, f = i, f * 2
1411 1411
1412 1412 # sanity check our fetch list
1413 1413 for f in fetch:
1414 1414 if f in m:
1415 1415 raise RepoError("already have changeset " + short(f[:4]))
1416 1416
1417 1417 if base.keys() == [nullid]:
1418 1418 self.ui.warn("warning: pulling from an unrelated repository!\n")
1419 1419
1420 1420 self.ui.note("adding new changesets starting at " +
1421 1421 " ".join([short(f) for f in fetch]) + "\n")
1422 1422
1423 1423 self.ui.debug("%d total queries\n" % reqcnt)
1424 1424
1425 1425 return fetch
1426 1426
1427 1427 def findoutgoing(self, remote, base=None, heads=None):
1428 1428 if base == None:
1429 1429 base = {}
1430 1430 self.findincoming(remote, base, heads)
1431 1431
1432 1432 remain = dict.fromkeys(self.changelog.nodemap)
1433 1433
1434 1434 # prune everything remote has from the tree
1435 1435 del remain[nullid]
1436 1436 remove = base.keys()
1437 1437 while remove:
1438 1438 n = remove.pop(0)
1439 1439 if n in remain:
1440 1440 del remain[n]
1441 1441 for p in self.changelog.parents(n):
1442 1442 remove.append(p)
1443 1443
1444 1444 # find every node whose parents have been pruned
1445 1445 subset = []
1446 1446 for n in remain:
1447 1447 p1, p2 = self.changelog.parents(n)
1448 1448 if p1 not in remain and p2 not in remain:
1449 1449 subset.append(n)
1450 1450
1451 1451 # this is the set of all roots we have to push
1452 1452 return subset
1453 1453
1454 1454 def pull(self, remote):
1455 1455 lock = self.lock()
1456 1456
1457 1457 # if we have an empty repo, fetch everything
1458 1458 if self.changelog.tip() == nullid:
1459 1459 self.ui.status("requesting all changes\n")
1460 1460 fetch = [nullid]
1461 1461 else:
1462 1462 fetch = self.findincoming(remote)
1463 1463
1464 1464 if not fetch:
1465 1465 self.ui.status("no changes found\n")
1466 1466 return 1
1467 1467
1468 1468 cg = remote.changegroup(fetch)
1469 1469 return self.addchangegroup(cg)
1470 1470
1471 1471 def push(self, remote, force=False):
1472 1472 lock = remote.lock()
1473 1473
1474 1474 base = {}
1475 1475 heads = remote.heads()
1476 1476 inc = self.findincoming(remote, base, heads)
1477 1477 if not force and inc:
1478 1478 self.ui.warn("abort: unsynced remote changes!\n")
1479 1479 self.ui.status("(did you forget to sync? use push -f to force)\n")
1480 1480 return 1
1481 1481
1482 1482 update = self.findoutgoing(remote, base)
1483 1483 if not update:
1484 1484 self.ui.status("no changes found\n")
1485 1485 return 1
1486 1486 elif not force:
1487 1487 if len(heads) < len(self.changelog.heads()):
1488 1488 self.ui.warn("abort: push creates new remote branches!\n")
1489 1489 self.ui.status("(did you forget to merge?" +
1490 1490 " use push -f to force)\n")
1491 1491 return 1
1492 1492
1493 1493 cg = self.changegroup(update)
1494 1494 return remote.addchangegroup(cg)
1495 1495
1496 1496 def changegroup(self, basenodes):
1497 1497 class genread:
1498 1498 def __init__(self, generator):
1499 1499 self.g = generator
1500 1500 self.buf = ""
1501 1501 def fillbuf(self):
1502 1502 self.buf += "".join(self.g)
1503 1503
1504 1504 def read(self, l):
1505 1505 while l > len(self.buf):
1506 1506 try:
1507 1507 self.buf += self.g.next()
1508 1508 except StopIteration:
1509 1509 break
1510 1510 d, self.buf = self.buf[:l], self.buf[l:]
1511 1511 return d
1512 1512
1513 1513 def gengroup():
1514 1514 nodes = self.newer(basenodes)
1515 1515
1516 1516 # construct the link map
1517 1517 linkmap = {}
1518 1518 for n in nodes:
1519 1519 linkmap[self.changelog.rev(n)] = n
1520 1520
1521 1521 # construct a list of all changed files
1522 1522 changed = {}
1523 1523 for n in nodes:
1524 1524 c = self.changelog.read(n)
1525 1525 for f in c[3]:
1526 1526 changed[f] = 1
1527 1527 changed = changed.keys()
1528 1528 changed.sort()
1529 1529
1530 1530 # the changegroup is changesets + manifests + all file revs
1531 1531 revs = [ self.changelog.rev(n) for n in nodes ]
1532 1532
1533 1533 for y in self.changelog.group(linkmap): yield y
1534 1534 for y in self.manifest.group(linkmap): yield y
1535 1535 for f in changed:
1536 1536 yield struct.pack(">l", len(f) + 4) + f
1537 1537 g = self.file(f).group(linkmap)
1538 1538 for y in g:
1539 1539 yield y
1540 1540
1541 1541 yield struct.pack(">l", 0)
1542 1542
1543 1543 return genread(gengroup())
1544 1544
1545 1545 def addchangegroup(self, source):
1546 1546
1547 1547 def getchunk():
1548 1548 d = source.read(4)
1549 1549 if not d: return ""
1550 1550 l = struct.unpack(">l", d)[0]
1551 1551 if l <= 4: return ""
1552 1552 return source.read(l - 4)
1553 1553
1554 1554 def getgroup():
1555 1555 while 1:
1556 1556 c = getchunk()
1557 1557 if not c: break
1558 1558 yield c
1559 1559
1560 1560 def csmap(x):
1561 1561 self.ui.debug("add changeset %s\n" % short(x))
1562 1562 return self.changelog.count()
1563 1563
1564 1564 def revmap(x):
1565 1565 return self.changelog.rev(x)
1566 1566
1567 1567 if not source: return
1568 1568 changesets = files = revisions = 0
1569 1569
1570 1570 tr = self.transaction()
1571 1571
1572 1572 oldheads = len(self.changelog.heads())
1573 1573
1574 1574 # pull off the changeset group
1575 1575 self.ui.status("adding changesets\n")
1576 1576 co = self.changelog.tip()
1577 1577 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1578 1578 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1579 1579
1580 1580 # pull off the manifest group
1581 1581 self.ui.status("adding manifests\n")
1582 1582 mm = self.manifest.tip()
1583 1583 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1584 1584
1585 1585 # process the files
1586 1586 self.ui.status("adding file changes\n")
1587 1587 while 1:
1588 1588 f = getchunk()
1589 1589 if not f: break
1590 1590 self.ui.debug("adding %s revisions\n" % f)
1591 1591 fl = self.file(f)
1592 1592 o = fl.count()
1593 1593 n = fl.addgroup(getgroup(), revmap, tr)
1594 1594 revisions += fl.count() - o
1595 1595 files += 1
1596 1596
1597 1597 newheads = len(self.changelog.heads())
1598 1598 heads = ""
1599 1599 if oldheads and newheads > oldheads:
1600 1600 heads = " (+%d heads)" % (newheads - oldheads)
1601 1601
1602 1602 self.ui.status(("added %d changesets" +
1603 1603 " with %d changes to %d files%s\n")
1604 1604 % (changesets, revisions, files, heads))
1605 1605
1606 1606 tr.close()
1607 1607
1608 1608 if not self.hook("changegroup"):
1609 1609 return 1
1610 1610
1611 1611 return
1612 1612
1613 1613 def update(self, node, allow=False, force=False, choose=None,
1614 1614 moddirstate=True):
1615 1615 pl = self.dirstate.parents()
1616 1616 if not force and pl[1] != nullid:
1617 1617 self.ui.warn("aborting: outstanding uncommitted merges\n")
1618 1618 return 1
1619 1619
1620 1620 p1, p2 = pl[0], node
1621 1621 pa = self.changelog.ancestor(p1, p2)
1622 1622 m1n = self.changelog.read(p1)[0]
1623 1623 m2n = self.changelog.read(p2)[0]
1624 1624 man = self.manifest.ancestor(m1n, m2n)
1625 1625 m1 = self.manifest.read(m1n)
1626 1626 mf1 = self.manifest.readflags(m1n)
1627 1627 m2 = self.manifest.read(m2n)
1628 1628 mf2 = self.manifest.readflags(m2n)
1629 1629 ma = self.manifest.read(man)
1630 1630 mfa = self.manifest.readflags(man)
1631 1631
1632 1632 (c, a, d, u) = self.changes()
1633 1633
1634 1634 # is this a jump, or a merge? i.e. is there a linear path
1635 1635 # from p1 to p2?
1636 1636 linear_path = (pa == p1 or pa == p2)
1637 1637
1638 1638 # resolve the manifest to determine which files
1639 1639 # we care about merging
1640 1640 self.ui.note("resolving manifests\n")
1641 1641 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1642 1642 (force, allow, moddirstate, linear_path))
1643 1643 self.ui.debug(" ancestor %s local %s remote %s\n" %
1644 1644 (short(man), short(m1n), short(m2n)))
1645 1645
1646 1646 merge = {}
1647 1647 get = {}
1648 1648 remove = []
1649 1649
1650 1650 # construct a working dir manifest
1651 1651 mw = m1.copy()
1652 1652 mfw = mf1.copy()
1653 1653 umap = dict.fromkeys(u)
1654 1654
1655 1655 for f in a + c + u:
1656 1656 mw[f] = ""
1657 1657 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1658 1658
1659 1659 for f in d:
1660 1660 if f in mw: del mw[f]
1661 1661
1662 1662 # If we're jumping between revisions (as opposed to merging),
1663 1663 # and if neither the working directory nor the target rev has
1664 1664 # the file, then we need to remove it from the dirstate, to
1665 1665 # prevent the dirstate from listing the file when it is no
1666 1666 # longer in the manifest.
1667 1667 if moddirstate and linear_path and f not in m2:
1668 1668 self.dirstate.forget((f,))
1669 1669
1670 1670 # Compare manifests
1671 1671 for f, n in mw.iteritems():
1672 1672 if choose and not choose(f): continue
1673 1673 if f in m2:
1674 1674 s = 0
1675 1675
1676 1676 # is the wfile new since m1, and match m2?
1677 1677 if f not in m1:
1678 1678 t1 = self.wread(f)
1679 1679 t2 = self.file(f).read(m2[f])
1680 1680 if cmp(t1, t2) == 0:
1681 1681 n = m2[f]
1682 1682 del t1, t2
1683 1683
1684 1684 # are files different?
1685 1685 if n != m2[f]:
1686 1686 a = ma.get(f, nullid)
1687 1687 # are both different from the ancestor?
1688 1688 if n != a and m2[f] != a:
1689 1689 self.ui.debug(" %s versions differ, resolve\n" % f)
1690 1690 # merge executable bits
1691 1691 # "if we changed or they changed, change in merge"
1692 1692 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1693 1693 mode = ((a^b) | (a^c)) ^ a
1694 1694 merge[f] = (m1.get(f, nullid), m2[f], mode)
1695 1695 s = 1
1696 1696 # are we clobbering?
1697 1697 # is remote's version newer?
1698 1698 # or are we going back in time?
1699 1699 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1700 1700 self.ui.debug(" remote %s is newer, get\n" % f)
1701 1701 get[f] = m2[f]
1702 1702 s = 1
1703 1703 elif f in umap:
1704 1704 # this unknown file is the same as the checkout
1705 1705 get[f] = m2[f]
1706 1706
1707 1707 if not s and mfw[f] != mf2[f]:
1708 1708 if force:
1709 1709 self.ui.debug(" updating permissions for %s\n" % f)
1710 1710 util.set_exec(self.wjoin(f), mf2[f])
1711 1711 else:
1712 1712 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1713 1713 mode = ((a^b) | (a^c)) ^ a
1714 1714 if mode != b:
1715 1715 self.ui.debug(" updating permissions for %s\n" % f)
1716 1716 util.set_exec(self.wjoin(f), mode)
1717 1717 del m2[f]
1718 1718 elif f in ma:
1719 1719 if n != ma[f]:
1720 1720 r = "d"
1721 1721 if not force and (linear_path or allow):
1722 1722 r = self.ui.prompt(
1723 1723 (" local changed %s which remote deleted\n" % f) +
1724 1724 "(k)eep or (d)elete?", "[kd]", "k")
1725 1725 if r == "d":
1726 1726 remove.append(f)
1727 1727 else:
1728 1728 self.ui.debug("other deleted %s\n" % f)
1729 1729 remove.append(f) # other deleted it
1730 1730 else:
1731 1731 if n == m1.get(f, nullid): # same as parent
1732 1732 if p2 == pa: # going backwards?
1733 1733 self.ui.debug("remote deleted %s\n" % f)
1734 1734 remove.append(f)
1735 1735 else:
1736 1736 self.ui.debug("local created %s, keeping\n" % f)
1737 1737 else:
1738 1738 self.ui.debug("working dir created %s, keeping\n" % f)
1739 1739
1740 1740 for f, n in m2.iteritems():
1741 1741 if choose and not choose(f): continue
1742 1742 if f[0] == "/": continue
1743 1743 if f in ma and n != ma[f]:
1744 1744 r = "k"
1745 1745 if not force and (linear_path or allow):
1746 1746 r = self.ui.prompt(
1747 1747 ("remote changed %s which local deleted\n" % f) +
1748 1748 "(k)eep or (d)elete?", "[kd]", "k")
1749 1749 if r == "k": get[f] = n
1750 1750 elif f not in ma:
1751 1751 self.ui.debug("remote created %s\n" % f)
1752 1752 get[f] = n
1753 1753 else:
1754 1754 if force or p2 == pa: # going backwards?
1755 1755 self.ui.debug("local deleted %s, recreating\n" % f)
1756 1756 get[f] = n
1757 1757 else:
1758 1758 self.ui.debug("local deleted %s\n" % f)
1759 1759
1760 1760 del mw, m1, m2, ma
1761 1761
1762 1762 if force:
1763 1763 for f in merge:
1764 1764 get[f] = merge[f][1]
1765 1765 merge = {}
1766 1766
1767 1767 if linear_path or force:
1768 1768 # we don't need to do any magic, just jump to the new rev
1769 1769 branch_merge = False
1770 1770 p1, p2 = p2, nullid
1771 1771 else:
1772 1772 if not allow:
1773 1773 self.ui.status("this update spans a branch" +
1774 1774 " affecting the following files:\n")
1775 1775 fl = merge.keys() + get.keys()
1776 1776 fl.sort()
1777 1777 for f in fl:
1778 1778 cf = ""
1779 1779 if f in merge: cf = " (resolve)"
1780 1780 self.ui.status(" %s%s\n" % (f, cf))
1781 1781 self.ui.warn("aborting update spanning branches!\n")
1782 1782 self.ui.status("(use update -m to merge across branches" +
1783 1783 " or -C to lose changes)\n")
1784 1784 return 1
1785 1785 branch_merge = True
1786 1786
1787 1787 if moddirstate:
1788 1788 self.dirstate.setparents(p1, p2)
1789 1789
1790 1790 # get the files we don't need to change
1791 1791 files = get.keys()
1792 1792 files.sort()
1793 1793 for f in files:
1794 1794 if f[0] == "/": continue
1795 1795 self.ui.note("getting %s\n" % f)
1796 1796 t = self.file(f).read(get[f])
1797 1797 try:
1798 1798 self.wwrite(f, t)
1799 1799 except IOError:
1800 1800 os.makedirs(os.path.dirname(self.wjoin(f)))
1801 1801 self.wwrite(f, t)
1802 1802 util.set_exec(self.wjoin(f), mf2[f])
1803 1803 if moddirstate:
1804 1804 if branch_merge:
1805 1805 self.dirstate.update([f], 'n', st_mtime=-1)
1806 1806 else:
1807 1807 self.dirstate.update([f], 'n')
1808 1808
1809 1809 # merge the tricky bits
1810 1810 files = merge.keys()
1811 1811 files.sort()
1812 1812 for f in files:
1813 1813 self.ui.status("merging %s\n" % f)
1814 1814 my, other, flag = merge[f]
1815 1815 self.merge3(f, my, other)
1816 1816 util.set_exec(self.wjoin(f), flag)
1817 1817 if moddirstate:
1818 1818 if branch_merge:
1819 1819 # We've done a branch merge, mark this file as merged
1820 1820 # so that we properly record the merger later
1821 1821 self.dirstate.update([f], 'm')
1822 1822 else:
1823 1823 # We've update-merged a locally modified file, so
1824 1824 # we set the dirstate to emulate a normal checkout
1825 1825 # of that file some time in the past. Thus our
1826 1826 # merge will appear as a normal local file
1827 1827 # modification.
1828 1828 f_len = len(self.file(f).read(other))
1829 1829 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1830 1830
1831 1831 remove.sort()
1832 1832 for f in remove:
1833 1833 self.ui.note("removing %s\n" % f)
1834 1834 try:
1835 1835 os.unlink(self.wjoin(f))
1836 1836 except OSError, inst:
1837 1837 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1838 1838 # try removing directories that might now be empty
1839 1839 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1840 1840 except: pass
1841 1841 if moddirstate:
1842 1842 if branch_merge:
1843 1843 self.dirstate.update(remove, 'r')
1844 1844 else:
1845 1845 self.dirstate.forget(remove)
1846 1846
1847 1847 def merge3(self, fn, my, other):
1848 1848 """perform a 3-way merge in the working directory"""
1849 1849
1850 1850 def temp(prefix, node):
1851 1851 pre = "%s~%s." % (os.path.basename(fn), prefix)
1852 1852 (fd, name) = tempfile.mkstemp("", pre)
1853 1853 f = os.fdopen(fd, "wb")
1854 1854 self.wwrite(fn, fl.read(node), f)
1855 1855 f.close()
1856 1856 return name
1857 1857
1858 1858 fl = self.file(fn)
1859 1859 base = fl.ancestor(my, other)
1860 1860 a = self.wjoin(fn)
1861 1861 b = temp("base", base)
1862 1862 c = temp("other", other)
1863 1863
1864 1864 self.ui.note("resolving %s\n" % fn)
1865 1865 self.ui.debug("file %s: other %s ancestor %s\n" %
1866 1866 (fn, short(other), short(base)))
1867 1867
1868 1868 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1869 1869 or "hgmerge")
1870 1870 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1871 1871 if r:
1872 1872 self.ui.warn("merging %s failed!\n" % fn)
1873 1873
1874 1874 os.unlink(b)
1875 1875 os.unlink(c)
1876 1876
1877 1877 def verify(self):
1878 1878 filelinkrevs = {}
1879 1879 filenodes = {}
1880 1880 changesets = revisions = files = 0
1881 1881 errors = 0
1882 1882
1883 1883 seen = {}
1884 1884 self.ui.status("checking changesets\n")
1885 1885 for i in range(self.changelog.count()):
1886 1886 changesets += 1
1887 1887 n = self.changelog.node(i)
1888 1888 if n in seen:
1889 1889 self.ui.warn("duplicate changeset at revision %d\n" % i)
1890 1890 errors += 1
1891 1891 seen[n] = 1
1892 1892
1893 1893 for p in self.changelog.parents(n):
1894 1894 if p not in self.changelog.nodemap:
1895 1895 self.ui.warn("changeset %s has unknown parent %s\n" %
1896 1896 (short(n), short(p)))
1897 1897 errors += 1
1898 1898 try:
1899 1899 changes = self.changelog.read(n)
1900 1900 except Exception, inst:
1901 1901 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1902 1902 errors += 1
1903 1903
1904 1904 for f in changes[3]:
1905 1905 filelinkrevs.setdefault(f, []).append(i)
1906 1906
1907 1907 seen = {}
1908 1908 self.ui.status("checking manifests\n")
1909 1909 for i in range(self.manifest.count()):
1910 1910 n = self.manifest.node(i)
1911 1911 if n in seen:
1912 1912 self.ui.warn("duplicate manifest at revision %d\n" % i)
1913 1913 errors += 1
1914 1914 seen[n] = 1
1915 1915
1916 1916 for p in self.manifest.parents(n):
1917 1917 if p not in self.manifest.nodemap:
1918 1918 self.ui.warn("manifest %s has unknown parent %s\n" %
1919 1919 (short(n), short(p)))
1920 1920 errors += 1
1921 1921
1922 1922 try:
1923 1923 delta = mdiff.patchtext(self.manifest.delta(n))
1924 1924 except KeyboardInterrupt:
1925 1925 self.ui.warn("aborted")
1926 1926 sys.exit(0)
1927 1927 except Exception, inst:
1928 1928 self.ui.warn("unpacking manifest %s: %s\n"
1929 1929 % (short(n), inst))
1930 1930 errors += 1
1931 1931
1932 1932 ff = [ l.split('\0') for l in delta.splitlines() ]
1933 1933 for f, fn in ff:
1934 1934 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1935 1935
1936 1936 self.ui.status("crosschecking files in changesets and manifests\n")
1937 1937 for f in filenodes:
1938 1938 if f not in filelinkrevs:
1939 1939 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1940 1940 errors += 1
1941 1941
1942 1942 for f in filelinkrevs:
1943 1943 if f not in filenodes:
1944 1944 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1945 1945 errors += 1
1946 1946
1947 1947 self.ui.status("checking files\n")
1948 1948 ff = filenodes.keys()
1949 1949 ff.sort()
1950 1950 for f in ff:
1951 1951 if f == "/dev/null": continue
1952 1952 files += 1
1953 1953 fl = self.file(f)
1954 1954 nodes = { nullid: 1 }
1955 1955 seen = {}
1956 1956 for i in range(fl.count()):
1957 1957 revisions += 1
1958 1958 n = fl.node(i)
1959 1959
1960 1960 if n in seen:
1961 1961 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1962 1962 errors += 1
1963 1963
1964 1964 if n not in filenodes[f]:
1965 1965 self.ui.warn("%s: %d:%s not in manifests\n"
1966 1966 % (f, i, short(n)))
1967 1967 errors += 1
1968 1968 else:
1969 1969 del filenodes[f][n]
1970 1970
1971 1971 flr = fl.linkrev(n)
1972 1972 if flr not in filelinkrevs[f]:
1973 1973 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1974 1974 % (f, short(n), fl.linkrev(n)))
1975 1975 errors += 1
1976 1976 else:
1977 1977 filelinkrevs[f].remove(flr)
1978 1978
1979 1979 # verify contents
1980 1980 try:
1981 1981 t = fl.read(n)
1982 1982 except Exception, inst:
1983 1983 self.ui.warn("unpacking file %s %s: %s\n"
1984 1984 % (f, short(n), inst))
1985 1985 errors += 1
1986 1986
1987 1987 # verify parents
1988 1988 (p1, p2) = fl.parents(n)
1989 1989 if p1 not in nodes:
1990 1990 self.ui.warn("file %s:%s unknown parent 1 %s" %
1991 1991 (f, short(n), short(p1)))
1992 1992 errors += 1
1993 1993 if p2 not in nodes:
1994 1994 self.ui.warn("file %s:%s unknown parent 2 %s" %
1995 1995 (f, short(n), short(p1)))
1996 1996 errors += 1
1997 1997 nodes[n] = 1
1998 1998
1999 1999 # cross-check
2000 2000 for node in filenodes[f]:
2001 2001 self.ui.warn("node %s in manifests not in %s\n"
2002 2002 % (hex(node), f))
2003 2003 errors += 1
2004 2004
2005 2005 self.ui.status("%d files, %d changesets, %d total revisions\n" %
2006 2006 (files, changesets, revisions))
2007 2007
2008 2008 if errors:
2009 2009 self.ui.warn("%d integrity errors encountered!\n" % errors)
2010 2010 return 1
2011 2011
2012 2012 class remoterepository:
2013 2013 def local(self):
2014 2014 return False
2015 2015
2016 2016 class httprepository(remoterepository):
2017 2017 def __init__(self, ui, path):
2018 2018 # fix missing / after hostname
2019 2019 s = urlparse.urlsplit(path)
2020 2020 partial = s[2]
2021 2021 if not partial: partial = "/"
2022 2022 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2023 2023 self.ui = ui
2024 2024 no_list = [ "localhost", "127.0.0.1" ]
2025 2025 host = ui.config("http_proxy", "host")
2026 2026 if host is None:
2027 2027 host = os.environ.get("http_proxy")
2028 2028 if host and host.startswith('http://'):
2029 2029 host = host[7:]
2030 2030 user = ui.config("http_proxy", "user")
2031 2031 passwd = ui.config("http_proxy", "passwd")
2032 2032 no = ui.config("http_proxy", "no")
2033 2033 if no is None:
2034 2034 no = os.environ.get("no_proxy")
2035 2035 if no:
2036 2036 no_list = no_list + no.split(",")
2037 2037
2038 2038 no_proxy = 0
2039 2039 for h in no_list:
2040 2040 if (path.startswith("http://" + h + "/") or
2041 2041 path.startswith("http://" + h + ":") or
2042 2042 path == "http://" + h):
2043 2043 no_proxy = 1
2044 2044
2045 2045 # Note: urllib2 takes proxy values from the environment and those will
2046 2046 # take precedence
2047 2047 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2048 2048 try:
2049 2049 if os.environ.has_key(env):
2050 2050 del os.environ[env]
2051 2051 except OSError:
2052 2052 pass
2053 2053
2054 2054 proxy_handler = urllib2.BaseHandler()
2055 2055 if host and not no_proxy:
2056 2056 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2057 2057
2058 2058 authinfo = None
2059 2059 if user and passwd:
2060 2060 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2061 2061 passmgr.add_password(None, host, user, passwd)
2062 2062 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2063 2063
2064 2064 opener = urllib2.build_opener(proxy_handler, authinfo)
2065 2065 urllib2.install_opener(opener)
2066 2066
2067 2067 def dev(self):
2068 2068 return -1
2069 2069
2070 2070 def do_cmd(self, cmd, **args):
2071 2071 self.ui.debug("sending %s command\n" % cmd)
2072 2072 q = {"cmd": cmd}
2073 2073 q.update(args)
2074 2074 qs = urllib.urlencode(q)
2075 2075 cu = "%s?%s" % (self.url, qs)
2076 2076 resp = urllib2.urlopen(cu)
2077 2077 proto = resp.headers['content-type']
2078 2078
2079 2079 # accept old "text/plain" and "application/hg-changegroup" for now
2080 2080 if not proto.startswith('application/mercurial') and \
2081 2081 not proto.startswith('text/plain') and \
2082 2082 not proto.startswith('application/hg-changegroup'):
2083 2083 raise RepoError("'%s' does not appear to be an hg repository"
2084 2084 % self.url)
2085 2085
2086 2086 if proto.startswith('application/mercurial'):
2087 2087 version = proto[22:]
2088 2088 if float(version) > 0.1:
2089 2089 raise RepoError("'%s' uses newer protocol %s" %
2090 2090 (self.url, version))
2091 2091
2092 2092 return resp
2093 2093
2094 2094 def heads(self):
2095 2095 d = self.do_cmd("heads").read()
2096 2096 try:
2097 2097 return map(bin, d[:-1].split(" "))
2098 2098 except:
2099 2099 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2100 2100 raise
2101 2101
2102 2102 def branches(self, nodes):
2103 2103 n = " ".join(map(hex, nodes))
2104 2104 d = self.do_cmd("branches", nodes=n).read()
2105 2105 try:
2106 2106 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2107 2107 return br
2108 2108 except:
2109 2109 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2110 2110 raise
2111 2111
2112 2112 def between(self, pairs):
2113 2113 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2114 2114 d = self.do_cmd("between", pairs=n).read()
2115 2115 try:
2116 2116 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2117 2117 return p
2118 2118 except:
2119 2119 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2120 2120 raise
2121 2121
2122 2122 def changegroup(self, nodes):
2123 2123 n = " ".join(map(hex, nodes))
2124 2124 f = self.do_cmd("changegroup", roots=n)
2125 2125 bytes = 0
2126 2126
2127 2127 class zread:
2128 2128 def __init__(self, f):
2129 2129 self.zd = zlib.decompressobj()
2130 2130 self.f = f
2131 2131 self.buf = ""
2132 2132 def read(self, l):
2133 2133 while l > len(self.buf):
2134 2134 r = self.f.read(4096)
2135 2135 if r:
2136 2136 self.buf += self.zd.decompress(r)
2137 2137 else:
2138 2138 self.buf += self.zd.flush()
2139 2139 break
2140 2140 d, self.buf = self.buf[:l], self.buf[l:]
2141 2141 return d
2142 2142
2143 2143 return zread(f)
2144 2144
2145 2145 class remotelock:
2146 2146 def __init__(self, repo):
2147 2147 self.repo = repo
2148 2148 def release(self):
2149 2149 self.repo.unlock()
2150 2150 self.repo = None
2151 2151 def __del__(self):
2152 2152 if self.repo:
2153 2153 self.release()
2154 2154
2155 2155 class sshrepository(remoterepository):
2156 2156 def __init__(self, ui, path):
2157 2157 self.url = path
2158 2158 self.ui = ui
2159 2159
2160 2160 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2161 2161 if not m:
2162 2162 raise RepoError("couldn't parse destination %s" % path)
2163 2163
2164 2164 self.user = m.group(2)
2165 2165 self.host = m.group(3)
2166 2166 self.port = m.group(5)
2167 2167 self.path = m.group(7)
2168 2168
2169 2169 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2170 2170 args = self.port and ("%s -p %s") % (args, self.port) or args
2171 2171 path = self.path or ""
2172 2172
2173 2173 if not path:
2174 2174 raise RepoError("no remote repository path specified")
2175 2175
2176 2176 sshcmd = self.ui.config("ui", "ssh", "ssh")
2177 2177 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2178 2178 cmd = "%s %s '%s -R %s serve --stdio'"
2179 2179 cmd = cmd % (sshcmd, args, remotecmd, path)
2180 2180
2181 2181 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2182 2182
2183 2183 def readerr(self):
2184 2184 while 1:
2185 2185 r,w,x = select.select([self.pipee], [], [], 0)
2186 2186 if not r: break
2187 2187 l = self.pipee.readline()
2188 2188 if not l: break
2189 2189 self.ui.status("remote: ", l)
2190 2190
2191 2191 def __del__(self):
2192 2192 try:
2193 2193 self.pipeo.close()
2194 2194 self.pipei.close()
2195 2195 for l in self.pipee:
2196 2196 self.ui.status("remote: ", l)
2197 2197 self.pipee.close()
2198 2198 except:
2199 2199 pass
2200 2200
2201 2201 def dev(self):
2202 2202 return -1
2203 2203
2204 2204 def do_cmd(self, cmd, **args):
2205 2205 self.ui.debug("sending %s command\n" % cmd)
2206 2206 self.pipeo.write("%s\n" % cmd)
2207 2207 for k, v in args.items():
2208 2208 self.pipeo.write("%s %d\n" % (k, len(v)))
2209 2209 self.pipeo.write(v)
2210 2210 self.pipeo.flush()
2211 2211
2212 2212 return self.pipei
2213 2213
2214 2214 def call(self, cmd, **args):
2215 2215 r = self.do_cmd(cmd, **args)
2216 2216 l = r.readline()
2217 2217 self.readerr()
2218 2218 try:
2219 2219 l = int(l)
2220 2220 except:
2221 2221 raise RepoError("unexpected response '%s'" % l)
2222 2222 return r.read(l)
2223 2223
2224 2224 def lock(self):
2225 2225 self.call("lock")
2226 2226 return remotelock(self)
2227 2227
2228 2228 def unlock(self):
2229 2229 self.call("unlock")
2230 2230
2231 2231 def heads(self):
2232 2232 d = self.call("heads")
2233 2233 try:
2234 2234 return map(bin, d[:-1].split(" "))
2235 2235 except:
2236 2236 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2237 2237
2238 2238 def branches(self, nodes):
2239 2239 n = " ".join(map(hex, nodes))
2240 2240 d = self.call("branches", nodes=n)
2241 2241 try:
2242 2242 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2243 2243 return br
2244 2244 except:
2245 2245 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2246 2246
2247 2247 def between(self, pairs):
2248 2248 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2249 2249 d = self.call("between", pairs=n)
2250 2250 try:
2251 2251 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2252 2252 return p
2253 2253 except:
2254 2254 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2255 2255
2256 2256 def changegroup(self, nodes):
2257 2257 n = " ".join(map(hex, nodes))
2258 2258 f = self.do_cmd("changegroup", roots=n)
2259 2259 return self.pipei
2260 2260
2261 2261 def addchangegroup(self, cg):
2262 2262 d = self.call("addchangegroup")
2263 2263 if d:
2264 2264 raise RepoError("push refused: %s", d)
2265 2265
2266 2266 while 1:
2267 2267 d = cg.read(4096)
2268 2268 if not d: break
2269 2269 self.pipeo.write(d)
2270 2270 self.readerr()
2271 2271
2272 2272 self.pipeo.flush()
2273 2273
2274 2274 self.readerr()
2275 2275 l = int(self.pipei.readline())
2276 2276 return self.pipei.read(l) != ""
2277 2277
2278 2278 class httpsrepository(httprepository):
2279 2279 pass
2280 2280
2281 2281 def repository(ui, path=None, create=0):
2282 2282 if path:
2283 2283 if path.startswith("http://"):
2284 2284 return httprepository(ui, path)
2285 2285 if path.startswith("https://"):
2286 2286 return httpsrepository(ui, path)
2287 2287 if path.startswith("hg://"):
2288 2288 return httprepository(ui, path.replace("hg://", "http://"))
2289 2289 if path.startswith("old-http://"):
2290 2290 return localrepository(ui, path.replace("old-http://", "http://"))
2291 2291 if path.startswith("ssh://"):
2292 2292 return sshrepository(ui, path)
2293 2293
2294 2294 return localrepository(ui, path, create)
@@ -1,892 +1,892
1 1 # hgweb.py - web interface to a mercurial repository
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, cgi, time, re, difflib, socket, sys, zlib
10 10 from mercurial.hg import *
11 11 from mercurial.ui import *
12 12
13 13 def templatepath():
14 14 for f in "templates", "../templates":
15 15 p = os.path.join(os.path.dirname(__file__), f)
16 16 if os.path.isdir(p): return p
17 17
18 18 def age(t):
19 19 def plural(t, c):
20 20 if c == 1: return t
21 21 return t + "s"
22 22 def fmt(t, c):
23 23 return "%d %s" % (c, plural(t, c))
24 24
25 25 now = time.time()
26 26 delta = max(1, int(now - t))
27 27
28 28 scales = [["second", 1],
29 29 ["minute", 60],
30 30 ["hour", 3600],
31 31 ["day", 3600 * 24],
32 32 ["week", 3600 * 24 * 7],
33 33 ["month", 3600 * 24 * 30],
34 34 ["year", 3600 * 24 * 365]]
35 35
36 36 scales.reverse()
37 37
38 38 for t, s in scales:
39 39 n = delta / s
40 40 if n >= 2 or s == 1: return fmt(t, n)
41 41
42 42 def nl2br(text):
43 43 return text.replace('\n', '<br/>\n')
44 44
45 45 def obfuscate(text):
46 46 return ''.join([ '&#%d;' % ord(c) for c in text ])
47 47
48 48 def up(p):
49 49 if p[0] != "/": p = "/" + p
50 50 if p[-1] == "/": p = p[:-1]
51 51 up = os.path.dirname(p)
52 52 if up == "/":
53 53 return "/"
54 54 return up + "/"
55 55
56 56 def httphdr(type):
57 57 sys.stdout.write('Content-type: %s\n\n' % type)
58 58
59 59 def write(*things):
60 60 for thing in things:
61 61 if hasattr(thing, "__iter__"):
62 62 for part in thing:
63 63 write(part)
64 64 else:
65 65 sys.stdout.write(str(thing))
66 66
67 67 class templater:
68 def __init__(self, mapfile, filters = {}, defaults = {}):
68 def __init__(self, mapfile, filters={}, defaults={}):
69 69 self.cache = {}
70 70 self.map = {}
71 71 self.base = os.path.dirname(mapfile)
72 72 self.filters = filters
73 73 self.defaults = defaults
74 74
75 75 for l in file(mapfile):
76 76 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
77 77 if m:
78 78 self.cache[m.group(1)] = m.group(2)
79 79 else:
80 80 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
81 81 if m:
82 82 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
83 83 else:
84 84 raise "unknown map entry '%s'" % l
85 85
86 86 def __call__(self, t, **map):
87 87 m = self.defaults.copy()
88 88 m.update(map)
89 89 try:
90 90 tmpl = self.cache[t]
91 91 except KeyError:
92 92 tmpl = self.cache[t] = file(self.map[t]).read()
93 93 return self.template(tmpl, self.filters, **m)
94 94
95 def template(self, tmpl, filters = {}, **map):
95 def template(self, tmpl, filters={}, **map):
96 96 while tmpl:
97 97 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
98 98 if m:
99 99 yield tmpl[:m.start(0)]
100 100 v = map.get(m.group(1), "")
101 101 v = callable(v) and v(**map) or v
102 102
103 103 format = m.group(2)
104 104 fl = m.group(4)
105 105
106 106 if format:
107 107 q = v.__iter__
108 108 for i in q():
109 109 lm = map.copy()
110 110 lm.update(i)
111 111 yield self(format[1:], **lm)
112 112
113 113 v = ""
114 114
115 115 elif fl:
116 116 for f in fl.split("|")[1:]:
117 117 v = filters[f](v)
118 118
119 119 yield v
120 120 tmpl = tmpl[m.end(0):]
121 121 else:
122 122 yield tmpl
123 123 return
124 124
125 125 def rfc822date(x):
126 126 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
127 127
128 128 common_filters = {
129 129 "escape": cgi.escape,
130 130 "age": age,
131 131 "date": (lambda x: time.asctime(time.gmtime(x))),
132 132 "addbreaks": nl2br,
133 133 "obfuscate": obfuscate,
134 134 "short": (lambda x: x[:12]),
135 135 "firstline": (lambda x: x.splitlines(1)[0]),
136 136 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
137 137 "rfc822date": rfc822date,
138 138 }
139 139
140 140 class hgweb:
141 141 def __init__(self, repo, name=None):
142 142 if type(repo) == type(""):
143 143 self.repo = repository(ui(), repo)
144 144 else:
145 145 self.repo = repo
146 146
147 147 self.mtime = -1
148 148 self.reponame = name or self.repo.ui.config("web", "name",
149 149 self.repo.root)
150 150
151 151 def refresh(self):
152 152 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
153 153 if s.st_mtime != self.mtime:
154 154 self.mtime = s.st_mtime
155 155 self.repo = repository(self.repo.ui, self.repo.root)
156 156 self.maxchanges = self.repo.ui.config("web", "maxchanges", 10)
157 157 self.maxfiles = self.repo.ui.config("web", "maxchanges", 10)
158 158 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
159 159
160 160 def date(self, cs):
161 161 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
162 162
163 163 def listfiles(self, files, mf):
164 164 for f in files[:self.maxfiles]:
165 yield self.t("filenodelink", node = hex(mf[f]), file = f)
165 yield self.t("filenodelink", node=hex(mf[f]), file=f)
166 166 if len(files) > self.maxfiles:
167 167 yield self.t("fileellipses")
168 168
169 169 def listfilediffs(self, files, changeset):
170 170 for f in files[:self.maxfiles]:
171 yield self.t("filedifflink", node = hex(changeset), file = f)
171 yield self.t("filedifflink", node=hex(changeset), file=f)
172 172 if len(files) > self.maxfiles:
173 173 yield self.t("fileellipses")
174 174
175 175 def parents(self, t1, nodes=[], rev=None,**args):
176 176 if not rev: rev = lambda x: ""
177 177 for node in nodes:
178 178 if node != nullid:
179 yield self.t(t1, node = hex(node), rev = rev(node), **args)
179 yield self.t(t1, node=hex(node), rev=rev(node), **args)
180 180
181 181 def showtag(self, t1, node=nullid, **args):
182 182 for t in self.repo.nodetags(node):
183 yield self.t(t1, tag = t, **args)
183 yield self.t(t1, tag=t, **args)
184 184
185 185 def diff(self, node1, node2, files):
186 186 def filterfiles(list, files):
187 187 l = [ x for x in list if x in files ]
188 188
189 189 for f in files:
190 190 if f[-1] != os.sep: f += os.sep
191 191 l += [ x for x in list if x.startswith(f) ]
192 192 return l
193 193
194 194 parity = [0]
195 195 def diffblock(diff, f, fn):
196 196 yield self.t("diffblock",
197 197 lines = prettyprintlines(diff),
198 198 parity = parity[0],
199 199 file = f,
200 200 filenode = hex(fn or nullid))
201 201 parity[0] = 1 - parity[0]
202 202
203 203 def prettyprintlines(diff):
204 204 for l in diff.splitlines(1):
205 205 if l.startswith('+'):
206 yield self.t("difflineplus", line = l)
206 yield self.t("difflineplus", line=l)
207 207 elif l.startswith('-'):
208 yield self.t("difflineminus", line = l)
208 yield self.t("difflineminus", line=l)
209 209 elif l.startswith('@'):
210 yield self.t("difflineat", line = l)
210 yield self.t("difflineat", line=l)
211 211 else:
212 yield self.t("diffline", line = l)
212 yield self.t("diffline", line=l)
213 213
214 214 r = self.repo
215 215 cl = r.changelog
216 216 mf = r.manifest
217 217 change1 = cl.read(node1)
218 218 change2 = cl.read(node2)
219 219 mmap1 = mf.read(change1[0])
220 220 mmap2 = mf.read(change2[0])
221 221 date1 = self.date(change1)
222 222 date2 = self.date(change2)
223 223
224 224 c, a, d, u = r.changes(node1, node2)
225 225 if files:
226 226 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
227 227
228 228 for f in c:
229 229 to = r.file(f).read(mmap1[f])
230 230 tn = r.file(f).read(mmap2[f])
231 231 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
232 232 for f in a:
233 233 to = None
234 234 tn = r.file(f).read(mmap2[f])
235 235 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
236 236 for f in d:
237 237 to = r.file(f).read(mmap1[f])
238 238 tn = None
239 239 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
240 240
241 241 def changelog(self, pos):
242 242 def changenav(**map):
243 def seq(factor = 1):
243 def seq(factor=1):
244 244 yield 1 * factor
245 245 yield 3 * factor
246 246 #yield 5 * factor
247 247 for f in seq(factor * 10):
248 248 yield f
249 249
250 250 l = []
251 251 for f in seq():
252 252 if f < self.maxchanges / 2: continue
253 253 if f > count: break
254 254 r = "%d" % f
255 255 if pos + f < count: l.append(("+" + r, pos + f))
256 256 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
257 257
258 258 yield {"rev": 0, "label": "(0)"}
259 259
260 260 for label, rev in l:
261 261 yield {"label": label, "rev": rev}
262 262
263 263 yield {"label": "tip", "rev": ""}
264 264
265 265 def changelist(**map):
266 266 parity = (start - end) & 1
267 267 cl = self.repo.changelog
268 268 l = [] # build a list in forward order for efficiency
269 269 for i in range(start, end):
270 270 n = cl.node(i)
271 271 changes = cl.read(n)
272 272 hn = hex(n)
273 273 t = float(changes[2].split(' ')[0])
274 274
275 275 l.insert(0, {
276 276 "parity": parity,
277 277 "author": changes[1],
278 278 "parent": self.parents("changelogparent",
279 279 cl.parents(n), cl.rev),
280 280 "changelogtag": self.showtag("changelogtag",n),
281 281 "manifest": hex(changes[0]),
282 282 "desc": changes[4],
283 283 "date": t,
284 284 "files": self.listfilediffs(changes[3], n),
285 285 "rev": i,
286 286 "node": hn})
287 287 parity = 1 - parity
288 288
289 289 for e in l: yield e
290 290
291 291 cl = self.repo.changelog
292 292 mf = cl.read(cl.tip())[0]
293 293 count = cl.count()
294 294 start = max(0, pos - self.maxchanges + 1)
295 295 end = min(count, start + self.maxchanges)
296 296 pos = end - 1
297 297
298 298 yield self.t('changelog',
299 changenav = changenav,
300 manifest = hex(mf),
301 rev = pos, changesets = count, entries = changelist)
299 changenav=changenav,
300 manifest=hex(mf),
301 rev=pos, changesets=count, entries=changelist)
302 302
303 303 def search(self, query):
304 304
305 305 def changelist(**map):
306 306 cl = self.repo.changelog
307 307 count = 0
308 308 qw = query.lower().split()
309 309
310 310 def revgen():
311 311 for i in range(cl.count() - 1, 0, -100):
312 312 l = []
313 313 for j in range(max(0, i - 100), i):
314 314 n = cl.node(j)
315 315 changes = cl.read(n)
316 316 l.append((n, j, changes))
317 317 l.reverse()
318 318 for e in l:
319 319 yield e
320 320
321 321 for n, i, changes in revgen():
322 322 miss = 0
323 323 for q in qw:
324 324 if not (q in changes[1].lower() or
325 325 q in changes[4].lower() or
326 326 q in " ".join(changes[3][:20]).lower()):
327 327 miss = 1
328 328 break
329 329 if miss: continue
330 330
331 331 count += 1
332 332 hn = hex(n)
333 333 t = float(changes[2].split(' ')[0])
334 334
335 335 yield self.t(
336 336 'searchentry',
337 parity = count & 1,
338 author = changes[1],
339 parent = self.parents("changelogparent",
337 parity=count & 1,
338 author=changes[1],
339 parent=self.parents("changelogparent",
340 340 cl.parents(n), cl.rev),
341 changelogtag = self.showtag("changelogtag",n),
342 manifest = hex(changes[0]),
343 desc = changes[4],
344 date = t,
345 files = self.listfilediffs(changes[3], n),
346 rev = i,
347 node = hn)
341 changelogtag=self.showtag("changelogtag",n),
342 manifest=hex(changes[0]),
343 desc=changes[4],
344 date=t,
345 files=self.listfilediffs(changes[3], n),
346 rev=i,
347 node=hn)
348 348
349 349 if count >= self.maxchanges: break
350 350
351 351 cl = self.repo.changelog
352 352 mf = cl.read(cl.tip())[0]
353 353
354 354 yield self.t('search',
355 query = query,
356 manifest = hex(mf),
357 entries = changelist)
355 query=query,
356 manifest=hex(mf),
357 entries=changelist)
358 358
359 359 def changeset(self, nodeid):
360 360 n = bin(nodeid)
361 361 cl = self.repo.changelog
362 362 changes = cl.read(n)
363 363 p1 = cl.parents(n)[0]
364 364 t = float(changes[2].split(' ')[0])
365 365
366 366 files = []
367 367 mf = self.repo.manifest.read(changes[0])
368 368 for f in changes[3]:
369 369 files.append(self.t("filenodelink",
370 filenode = hex(mf.get(f, nullid)), file = f))
370 filenode = hex(mf.get(f, nullid)), file=f))
371 371
372 372 def diff(**map):
373 373 yield self.diff(p1, n, None)
374 374
375 375 yield self.t('changeset',
376 diff = diff,
377 rev = cl.rev(n),
378 node = nodeid,
379 parent = self.parents("changesetparent",
376 diff=diff,
377 rev=cl.rev(n),
378 node=nodeid,
379 parent=self.parents("changesetparent",
380 380 cl.parents(n), cl.rev),
381 changesettag = self.showtag("changesettag",n),
382 manifest = hex(changes[0]),
383 author = changes[1],
384 desc = changes[4],
385 date = t,
386 files = files)
381 changesettag=self.showtag("changesettag",n),
382 manifest=hex(changes[0]),
383 author=changes[1],
384 desc=changes[4],
385 date=t,
386 files=files)
387 387
388 388 def filelog(self, f, filenode):
389 389 cl = self.repo.changelog
390 390 fl = self.repo.file(f)
391 391 count = fl.count()
392 392
393 393 def entries(**map):
394 394 l = []
395 395 parity = (count - 1) & 1
396 396
397 397 for i in range(count):
398 398
399 399 n = fl.node(i)
400 400 lr = fl.linkrev(n)
401 401 cn = cl.node(lr)
402 402 cs = cl.read(cl.node(lr))
403 403 t = float(cs[2].split(' ')[0])
404 404
405 405 l.insert(0, {"parity": parity,
406 406 "filenode": hex(n),
407 407 "filerev": i,
408 408 "file": f,
409 409 "node": hex(cn),
410 410 "author": cs[1],
411 411 "date": t,
412 412 "parent": self.parents("filelogparent",
413 413 fl.parents(n), fl.rev, file=f),
414 414 "desc": cs[4]})
415 415 parity = 1 - parity
416 416
417 417 for e in l: yield e
418 418
419 419 yield self.t("filelog",
420 file = f,
421 filenode = filenode,
422 entries = entries)
420 file=f,
421 filenode=filenode,
422 entries=entries)
423 423
424 424 def filerevision(self, f, node):
425 425 fl = self.repo.file(f)
426 426 n = bin(node)
427 427 text = fl.read(n)
428 428 changerev = fl.linkrev(n)
429 429 cl = self.repo.changelog
430 430 cn = cl.node(changerev)
431 431 cs = cl.read(cn)
432 432 t = float(cs[2].split(' ')[0])
433 433 mfn = cs[0]
434 434
435 435 def lines():
436 436 for l, t in enumerate(text.splitlines(1)):
437 437 yield {"line": t,
438 438 "linenumber": "% 6d" % (l + 1),
439 439 "parity": l & 1}
440 440
441 yield self.t("filerevision", file = f,
442 filenode = node,
443 path = up(f),
444 text = lines(),
445 rev = changerev,
446 node = hex(cn),
447 manifest = hex(mfn),
448 author = cs[1],
449 date = t,
450 parent = self.parents("filerevparent",
441 yield self.t("filerevision", file=f,
442 filenode=node,
443 path=up(f),
444 text=lines(),
445 rev=changerev,
446 node=hex(cn),
447 manifest=hex(mfn),
448 author=cs[1],
449 date=t,
450 parent=self.parents("filerevparent",
451 451 fl.parents(n), fl.rev, file=f),
452 permissions = self.repo.manifest.readflags(mfn)[f])
452 permissions=self.repo.manifest.readflags(mfn)[f])
453 453
454 454 def fileannotate(self, f, node):
455 455 bcache = {}
456 456 ncache = {}
457 457 fl = self.repo.file(f)
458 458 n = bin(node)
459 459 changerev = fl.linkrev(n)
460 460
461 461 cl = self.repo.changelog
462 462 cn = cl.node(changerev)
463 463 cs = cl.read(cn)
464 464 t = float(cs[2].split(' ')[0])
465 465 mfn = cs[0]
466 466
467 467 def annotate(**map):
468 468 parity = 1
469 469 last = None
470 470 for r, l in fl.annotate(n):
471 471 try:
472 472 cnode = ncache[r]
473 473 except KeyError:
474 474 cnode = ncache[r] = self.repo.changelog.node(r)
475 475
476 476 try:
477 477 name = bcache[r]
478 478 except KeyError:
479 479 cl = self.repo.changelog.read(cnode)
480 480 name = cl[1]
481 481 f = name.find('@')
482 482 if f >= 0:
483 483 name = name[:f]
484 484 f = name.find('<')
485 485 if f >= 0:
486 486 name = name[f+1:]
487 487 bcache[r] = name
488 488
489 489 if last != cnode:
490 490 parity = 1 - parity
491 491 last = cnode
492 492
493 493 yield {"parity": parity,
494 494 "node": hex(cnode),
495 495 "rev": r,
496 496 "author": name,
497 497 "file": f,
498 498 "line": l}
499 499
500 500 yield self.t("fileannotate",
501 501 file = f,
502 502 filenode = node,
503 503 annotate = annotate,
504 504 path = up(f),
505 505 rev = changerev,
506 506 node = hex(cn),
507 507 manifest = hex(mfn),
508 508 author = cs[1],
509 509 date = t,
510 510 parent = self.parents("fileannotateparent",
511 511 fl.parents(n), fl.rev, file=f),
512 512 permissions = self.repo.manifest.readflags(mfn)[f])
513 513
514 514 def manifest(self, mnode, path):
515 515 mf = self.repo.manifest.read(bin(mnode))
516 516 rev = self.repo.manifest.rev(bin(mnode))
517 517 node = self.repo.changelog.node(rev)
518 518 mff=self.repo.manifest.readflags(bin(mnode))
519 519
520 520 files = {}
521 521
522 522 p = path[1:]
523 523 l = len(p)
524 524
525 525 for f,n in mf.items():
526 526 if f[:l] != p:
527 527 continue
528 528 remain = f[l:]
529 529 if "/" in remain:
530 530 short = remain[:remain.find("/") + 1] # bleah
531 531 files[short] = (f, None)
532 532 else:
533 533 short = os.path.basename(remain)
534 534 files[short] = (f, n)
535 535
536 536 def filelist(**map):
537 537 parity = 0
538 538 fl = files.keys()
539 539 fl.sort()
540 540 for f in fl:
541 541 full, fnode = files[f]
542 542 if not fnode:
543 543 continue
544 544
545 545 yield {"file": full,
546 546 "manifest": mnode,
547 547 "filenode": hex(fnode),
548 548 "parity": parity,
549 549 "basename": f,
550 550 "permissions": mff[full]}
551 551 parity = 1 - parity
552 552
553 553 def dirlist(**map):
554 554 parity = 0
555 555 fl = files.keys()
556 556 fl.sort()
557 557 for f in fl:
558 558 full, fnode = files[f]
559 559 if fnode:
560 560 continue
561 561
562 562 yield {"parity": parity,
563 563 "path": os.path.join(path, f),
564 564 "manifest": mnode,
565 565 "basename": f[:-1]}
566 566 parity = 1 - parity
567 567
568 568 yield self.t("manifest",
569 manifest = mnode,
570 rev = rev,
571 node = hex(node),
572 path = path,
573 up = up(path),
574 fentries = filelist,
575 dentries = dirlist)
569 manifest=mnode,
570 rev=rev,
571 node=hex(node),
572 path=path,
573 up=up(path),
574 fentries=filelist,
575 dentries=dirlist)
576 576
577 577 def tags(self):
578 578 cl = self.repo.changelog
579 579 mf = cl.read(cl.tip())[0]
580 580
581 581 i = self.repo.tagslist()
582 582 i.reverse()
583 583
584 584 def entries(**map):
585 585 parity = 0
586 586 for k,n in i:
587 587 yield {"parity": parity,
588 588 "tag": k,
589 589 "node": hex(n)}
590 590 parity = 1 - parity
591 591
592 592 yield self.t("tags",
593 manifest = hex(mf),
594 entries = entries)
593 manifest=hex(mf),
594 entries=entries)
595 595
596 596 def filediff(self, file, changeset):
597 597 n = bin(changeset)
598 598 cl = self.repo.changelog
599 599 p1 = cl.parents(n)[0]
600 600 cs = cl.read(n)
601 601 mf = self.repo.manifest.read(cs[0])
602 602
603 603 def diff(**map):
604 604 yield self.diff(p1, n, file)
605 605
606 606 yield self.t("filediff",
607 file = file,
608 filenode = hex(mf.get(file, nullid)),
609 node = changeset,
610 rev = self.repo.changelog.rev(n),
611 parent = self.parents("filediffparent",
607 file=file,
608 filenode=hex(mf.get(file, nullid)),
609 node=changeset,
610 rev=self.repo.changelog.rev(n),
611 parent=self.parents("filediffparent",
612 612 cl.parents(n), cl.rev),
613 diff = diff)
613 diff=diff)
614 614
615 615 # add tags to things
616 616 # tags -> list of changesets corresponding to tags
617 617 # find tag, changeset, file
618 618
619 619 def run(self):
620 620 def header(**map):
621 621 yield self.t("header", **map)
622 622
623 623 def footer(**map):
624 624 yield self.t("footer", **map)
625 625
626 626 self.refresh()
627 627 args = cgi.parse()
628 628
629 629 t = self.repo.ui.config("web", "templates", templatepath())
630 630 m = os.path.join(t, "map")
631 631 style = self.repo.ui.config("web", "style", "")
632 632 if args.has_key('style'):
633 633 style = args['style'][0]
634 634 if style:
635 635 b = os.path.basename("map-" + style)
636 636 p = os.path.join(t, b)
637 637 if os.path.isfile(p): m = p
638 638
639 639 port = os.environ["SERVER_PORT"]
640 640 port = port != "80" and (":" + port) or ""
641 641 uri = os.environ["REQUEST_URI"]
642 642 if "?" in uri: uri = uri.split("?")[0]
643 643 url = "http://%s%s%s" % (os.environ["SERVER_NAME"], port, uri)
644 644
645 645 self.t = templater(m, common_filters,
646 {"url":url,
647 "repo":self.reponame,
648 "header":header,
649 "footer":footer,
646 {"url": url,
647 "repo": self.reponame,
648 "header": header,
649 "footer": footer,
650 650 })
651 651
652 652 if not args.has_key('cmd'):
653 653 args['cmd'] = [self.t.cache['default'],]
654 654
655 655 if args['cmd'][0] == 'changelog':
656 656 c = self.repo.changelog.count() - 1
657 657 hi = c
658 658 if args.has_key('rev'):
659 659 hi = args['rev'][0]
660 660 try:
661 661 hi = self.repo.changelog.rev(self.repo.lookup(hi))
662 662 except RepoError:
663 663 write(self.search(hi))
664 664 return
665 665
666 666 write(self.changelog(hi))
667 667
668 668 elif args['cmd'][0] == 'changeset':
669 669 write(self.changeset(args['node'][0]))
670 670
671 671 elif args['cmd'][0] == 'manifest':
672 672 write(self.manifest(args['manifest'][0], args['path'][0]))
673 673
674 674 elif args['cmd'][0] == 'tags':
675 675 write(self.tags())
676 676
677 677 elif args['cmd'][0] == 'filediff':
678 678 write(self.filediff(args['file'][0], args['node'][0]))
679 679
680 680 elif args['cmd'][0] == 'file':
681 681 write(self.filerevision(args['file'][0], args['filenode'][0]))
682 682
683 683 elif args['cmd'][0] == 'annotate':
684 684 write(self.fileannotate(args['file'][0], args['filenode'][0]))
685 685
686 686 elif args['cmd'][0] == 'filelog':
687 687 write(self.filelog(args['file'][0], args['filenode'][0]))
688 688
689 689 elif args['cmd'][0] == 'heads':
690 690 httphdr("application/mercurial-0.1")
691 691 h = self.repo.heads()
692 692 sys.stdout.write(" ".join(map(hex, h)) + "\n")
693 693
694 694 elif args['cmd'][0] == 'branches':
695 695 httphdr("application/mercurial-0.1")
696 696 nodes = []
697 697 if args.has_key('nodes'):
698 698 nodes = map(bin, args['nodes'][0].split(" "))
699 699 for b in self.repo.branches(nodes):
700 700 sys.stdout.write(" ".join(map(hex, b)) + "\n")
701 701
702 702 elif args['cmd'][0] == 'between':
703 703 httphdr("application/mercurial-0.1")
704 704 nodes = []
705 705 if args.has_key('pairs'):
706 706 pairs = [ map(bin, p.split("-"))
707 707 for p in args['pairs'][0].split(" ") ]
708 708 for b in self.repo.between(pairs):
709 709 sys.stdout.write(" ".join(map(hex, b)) + "\n")
710 710
711 711 elif args['cmd'][0] == 'changegroup':
712 712 httphdr("application/mercurial-0.1")
713 713 nodes = []
714 714 if not self.allowpull:
715 715 return
716 716
717 717 if args.has_key('roots'):
718 718 nodes = map(bin, args['roots'][0].split(" "))
719 719
720 720 z = zlib.compressobj()
721 721 f = self.repo.changegroup(nodes)
722 722 while 1:
723 723 chunk = f.read(4096)
724 724 if not chunk: break
725 725 sys.stdout.write(z.compress(chunk))
726 726
727 727 sys.stdout.write(z.flush())
728 728
729 729 else:
730 730 write(self.t("error"))
731 731
732 732 def create_server(repo):
733 733
734 734 def openlog(opt, default):
735 735 if opt and opt != '-':
736 736 return open(opt, 'w')
737 737 return default
738 738
739 739 address = repo.ui.config("web", "address", "")
740 740 port = int(repo.ui.config("web", "port", 8000))
741 741 use_ipv6 = repo.ui.configbool("web", "ipv6")
742 742 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
743 743 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
744 744
745 745 import BaseHTTPServer
746 746
747 747 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
748 748 address_family = getattr(socket, 'AF_INET6', None)
749 749
750 750 def __init__(self, *args, **kwargs):
751 751 if self.address_family is None:
752 752 raise RepoError('IPv6 not available on this system')
753 753 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
754 754
755 755 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
756 756 def log_error(self, format, *args):
757 757 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
758 758 self.log_date_time_string(),
759 759 format % args))
760 760
761 761 def log_message(self, format, *args):
762 762 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
763 763 self.log_date_time_string(),
764 764 format % args))
765 765
766 766 def do_POST(self):
767 767 try:
768 768 self.do_hgweb()
769 769 except socket.error, inst:
770 770 if inst.args[0] != 32: raise
771 771
772 772 def do_GET(self):
773 773 self.do_POST()
774 774
775 775 def do_hgweb(self):
776 776 query = ""
777 777 p = self.path.find("?")
778 778 if p:
779 779 query = self.path[p + 1:]
780 780 query = query.replace('+', ' ')
781 781
782 782 env = {}
783 783 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
784 784 env['REQUEST_METHOD'] = self.command
785 785 env['SERVER_NAME'] = self.server.server_name
786 786 env['SERVER_PORT'] = str(self.server.server_port)
787 787 env['REQUEST_URI'] = "/"
788 788 if query:
789 789 env['QUERY_STRING'] = query
790 790 host = self.address_string()
791 791 if host != self.client_address[0]:
792 792 env['REMOTE_HOST'] = host
793 793 env['REMOTE_ADDR'] = self.client_address[0]
794 794
795 795 if self.headers.typeheader is None:
796 796 env['CONTENT_TYPE'] = self.headers.type
797 797 else:
798 798 env['CONTENT_TYPE'] = self.headers.typeheader
799 799 length = self.headers.getheader('content-length')
800 800 if length:
801 801 env['CONTENT_LENGTH'] = length
802 802 accept = []
803 803 for line in self.headers.getallmatchingheaders('accept'):
804 804 if line[:1] in "\t\n\r ":
805 805 accept.append(line.strip())
806 806 else:
807 807 accept = accept + line[7:].split(',')
808 808 env['HTTP_ACCEPT'] = ','.join(accept)
809 809
810 810 os.environ.update(env)
811 811
812 812 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
813 813 try:
814 814 sys.stdin = self.rfile
815 815 sys.stdout = self.wfile
816 816 sys.argv = ["hgweb.py"]
817 817 if '=' not in query:
818 818 sys.argv.append(query)
819 819 self.send_response(200, "Script output follows")
820 820 hg.run()
821 821 finally:
822 822 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
823 823
824 824 hg = hgweb(repo)
825 825 if use_ipv6:
826 826 return IPv6HTTPServer((address, port), hgwebhandler)
827 827 else:
828 828 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
829 829
830 def server(path, name, templates, address, port, use_ipv6 = False,
831 accesslog = sys.stdout, errorlog = sys.stderr):
830 def server(path, name, templates, address, port, use_ipv6=False,
831 accesslog=sys.stdout, errorlog=sys.stderr):
832 832 httpd = create_server(path, name, templates, address, port, use_ipv6,
833 833 accesslog, errorlog)
834 834 httpd.serve_forever()
835 835
836 836 # This is a stopgap
837 837 class hgwebdir:
838 838 def __init__(self, config):
839 839 self.cp = ConfigParser.SafeConfigParser()
840 840 self.cp.read(config)
841 841
842 842 def run(self):
843 843 try:
844 844 virtual = os.environ["PATH_INFO"]
845 845 except:
846 846 virtual = ""
847 847
848 848 if virtual[1:]:
849 849 real = self.cp.get("paths", virtual[1:])
850 850 h = hgweb(real)
851 851 h.run()
852 852 return
853 853
854 854 def header(**map):
855 855 yield tmpl("header", **map)
856 856
857 857 def footer(**map):
858 858 yield tmpl("footer", **map)
859 859
860 860 templates = templatepath()
861 861 m = os.path.join(templates, "map")
862 862 tmpl = templater(m, common_filters,
863 863 {"header": header, "footer": footer})
864 864
865 865 def entries(**map):
866 866 parity = 0
867 867 l = self.cp.items("paths")
868 868 l.sort()
869 869 for v,r in l:
870 870 cp2 = ConfigParser.SafeConfigParser()
871 871 cp2.read(os.path.join(r, ".hg", "hgrc"))
872 872
873 873 def get(sec, val, default):
874 874 try:
875 875 return cp2.get(sec, val)
876 876 except:
877 877 return default
878 878
879 879 url = os.environ["REQUEST_URI"] + "/" + v
880 880 url = url.replace("//", "/")
881 881
882 yield dict(author = get("web", "author", "unknown"),
883 name = get("web", "name", v),
884 url = url,
885 parity = parity,
886 shortdesc = get("web", "description", "unknown"),
887 lastupdate = os.stat(os.path.join(r, ".hg",
882 yield dict(author=get("web", "author", "unknown"),
883 name=get("web", "name", v),
884 url=url,
885 parity=parity,
886 shortdesc=get("web", "description", "unknown"),
887 lastupdate=os.stat(os.path.join(r, ".hg",
888 888 "00changelog.d")).st_mtime)
889 889
890 890 parity = 1 - parity
891 891
892 write(tmpl("index", entries = entries))
892 write(tmpl("index", entries=entries))
@@ -1,49 +1,49
1 1 # lock.py - simple locking scheme for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, time
9 9 import util
10 10
11 11 class LockHeld(Exception):
12 12 pass
13 13
14 14 class lock:
15 def __init__(self, file, wait = 1):
15 def __init__(self, file, wait=1):
16 16 self.f = file
17 17 self.held = 0
18 18 self.wait = wait
19 19 self.lock()
20 20
21 21 def __del__(self):
22 22 self.release()
23 23
24 24 def lock(self):
25 25 while 1:
26 26 try:
27 27 self.trylock()
28 28 return 1
29 29 except LockHeld, inst:
30 30 if self.wait:
31 31 time.sleep(1)
32 32 continue
33 33 raise inst
34 34
35 35 def trylock(self):
36 36 pid = os.getpid()
37 37 try:
38 38 util.makelock(str(pid), self.f)
39 39 self.held = 1
40 40 except (OSError, IOError):
41 41 raise LockHeld(util.readlock(self.f))
42 42
43 43 def release(self):
44 44 if self.held:
45 45 self.held = 0
46 46 try:
47 47 os.unlink(self.f)
48 48 except: pass
49 49
@@ -1,551 +1,551
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # This provides efficient delta storage with O(1) retrieve and append
4 4 # and O(changes) merge between branches
5 5 #
6 6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10
11 11 import zlib, struct, sha, binascii, heapq
12 12 from mercurial import mdiff
13 13
14 14 def hex(node): return binascii.hexlify(node)
15 15 def bin(node): return binascii.unhexlify(node)
16 16 def short(node): return hex(node[:6])
17 17
18 18 def compress(text):
19 19 if not text: return text
20 20 if len(text) < 44:
21 21 if text[0] == '\0': return text
22 22 return 'u' + text
23 23 bin = zlib.compress(text)
24 24 if len(bin) > len(text):
25 25 if text[0] == '\0': return text
26 26 return 'u' + text
27 27 return bin
28 28
29 29 def decompress(bin):
30 30 if not bin: return bin
31 31 t = bin[0]
32 32 if t == '\0': return bin
33 33 if t == 'x': return zlib.decompress(bin)
34 34 if t == 'u': return bin[1:]
35 35 raise "unknown compression type %s" % t
36 36
37 37 def hash(text, p1, p2):
38 38 l = [p1, p2]
39 39 l.sort()
40 40 s = sha.new(l[0])
41 41 s.update(l[1])
42 42 s.update(text)
43 43 return s.digest()
44 44
45 45 nullid = "\0" * 20
46 46 indexformat = ">4l20s20s20s"
47 47
48 48 class lazyparser:
49 49 def __init__(self, data, revlog):
50 50 self.data = data
51 51 self.s = struct.calcsize(indexformat)
52 52 self.l = len(data)/self.s
53 53 self.index = [None] * self.l
54 54 self.map = {nullid: -1}
55 55 self.all = 0
56 56 self.revlog = revlog
57 57
58 58 def load(self, pos=None):
59 59 if self.all: return
60 60 if pos is not None:
61 61 block = pos / 1000
62 62 i = block * 1000
63 63 end = min(self.l, i + 1000)
64 64 else:
65 65 self.all = 1
66 66 i = 0
67 67 end = self.l
68 68 self.revlog.index = self.index
69 69 self.revlog.nodemap = self.map
70 70
71 71 while i < end:
72 72 d = self.data[i * self.s: (i + 1) * self.s]
73 73 e = struct.unpack(indexformat, d)
74 74 self.index[i] = e
75 75 self.map[e[6]] = i
76 76 i += 1
77 77
78 78 class lazyindex:
79 79 def __init__(self, parser):
80 80 self.p = parser
81 81 def __len__(self):
82 82 return len(self.p.index)
83 83 def load(self, pos):
84 84 self.p.load(pos)
85 85 return self.p.index[pos]
86 86 def __getitem__(self, pos):
87 87 return self.p.index[pos] or self.load(pos)
88 88 def append(self, e):
89 89 self.p.index.append(e)
90 90
91 91 class lazymap:
92 92 def __init__(self, parser):
93 93 self.p = parser
94 94 def load(self, key):
95 95 if self.p.all: return
96 96 n = self.p.data.find(key)
97 97 if n < 0: raise KeyError("node " + hex(key))
98 98 pos = n / self.p.s
99 99 self.p.load(pos)
100 100 def __contains__(self, key):
101 101 self.p.load()
102 102 return key in self.p.map
103 103 def __iter__(self):
104 104 yield nullid
105 105 for i in xrange(self.p.l):
106 106 try:
107 107 yield self.p.index[i][6]
108 108 except:
109 109 self.p.load(i)
110 110 yield self.p.index[i][6]
111 111 def __getitem__(self, key):
112 112 try:
113 113 return self.p.map[key]
114 114 except KeyError:
115 115 try:
116 116 self.load(key)
117 117 return self.p.map[key]
118 118 except KeyError:
119 119 raise KeyError("node " + hex(key))
120 120 def __setitem__(self, key, val):
121 121 self.p.map[key] = val
122 122
123 123 class revlog:
124 124 def __init__(self, opener, indexfile, datafile):
125 125 self.indexfile = indexfile
126 126 self.datafile = datafile
127 127 self.opener = opener
128 128 self.cache = None
129 129
130 130 try:
131 131 i = self.opener(self.indexfile).read()
132 132 except IOError:
133 133 i = ""
134 134
135 135 if len(i) > 10000:
136 136 # big index, let's parse it on demand
137 137 parser = lazyparser(i, self)
138 138 self.index = lazyindex(parser)
139 139 self.nodemap = lazymap(parser)
140 140 else:
141 141 s = struct.calcsize(indexformat)
142 142 l = len(i) / s
143 143 self.index = [None] * l
144 144 m = [None] * l
145 145
146 146 n = 0
147 147 for f in xrange(0, len(i), s):
148 148 # offset, size, base, linkrev, p1, p2, nodeid
149 149 e = struct.unpack(indexformat, i[f:f + s])
150 150 m[n] = (e[6], n)
151 151 self.index[n] = e
152 152 n += 1
153 153
154 154 self.nodemap = dict(m)
155 155 self.nodemap[nullid] = -1
156 156
157 157 def tip(self): return self.node(len(self.index) - 1)
158 158 def count(self): return len(self.index)
159 159 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
160 160 def rev(self, node): return self.nodemap[node]
161 161 def linkrev(self, node): return self.index[self.nodemap[node]][3]
162 162 def parents(self, node):
163 163 if node == nullid: return (nullid, nullid)
164 164 return self.index[self.nodemap[node]][4:6]
165 165
166 166 def start(self, rev): return self.index[rev][0]
167 167 def length(self, rev): return self.index[rev][1]
168 168 def end(self, rev): return self.start(rev) + self.length(rev)
169 169 def base(self, rev): return self.index[rev][2]
170 170
171 171 def heads(self, stop=None):
172 172 p = {}
173 173 h = []
174 174 stoprev = 0
175 175 if stop and stop in self.nodemap:
176 176 stoprev = self.rev(stop)
177 177
178 178 for r in range(self.count() - 1, -1, -1):
179 179 n = self.node(r)
180 180 if n not in p:
181 181 h.append(n)
182 182 if n == stop:
183 183 break
184 184 if r < stoprev:
185 185 break
186 186 for pn in self.parents(n):
187 187 p[pn] = 1
188 188 return h
189 189
190 190 def children(self, node):
191 191 c = []
192 192 p = self.rev(node)
193 193 for r in range(p + 1, self.count()):
194 194 n = self.node(r)
195 195 for pn in self.parents(n):
196 196 if pn == node:
197 197 c.append(n)
198 198 continue
199 199 elif pn == nullid:
200 200 continue
201 201 return c
202 202
203 203 def lookup(self, id):
204 204 try:
205 205 rev = int(id)
206 206 if str(rev) != id: raise ValueError
207 207 if rev < 0: rev = self.count() + rev
208 208 if rev < 0 or rev >= self.count(): raise ValueError
209 209 return self.node(rev)
210 210 except (ValueError, OverflowError):
211 211 c = []
212 212 for n in self.nodemap:
213 213 if hex(n).startswith(id):
214 214 c.append(n)
215 215 if len(c) > 1: raise KeyError("Ambiguous identifier")
216 216 if len(c) < 1: raise KeyError("No match found")
217 217 return c[0]
218 218
219 219 return None
220 220
221 221 def diff(self, a, b):
222 222 return mdiff.textdiff(a, b)
223 223
224 224 def patches(self, t, pl):
225 225 return mdiff.patches(t, pl)
226 226
227 227 def delta(self, node):
228 228 r = self.rev(node)
229 229 b = self.base(r)
230 230 if r == b:
231 231 return self.diff(self.revision(self.node(r - 1)),
232 232 self.revision(node))
233 233 else:
234 234 f = self.opener(self.datafile)
235 235 f.seek(self.start(r))
236 236 data = f.read(self.length(r))
237 237 return decompress(data)
238 238
239 239 def revision(self, node):
240 240 if node == nullid: return ""
241 241 if self.cache and self.cache[0] == node: return self.cache[2]
242 242
243 243 text = None
244 244 rev = self.rev(node)
245 245 start, length, base, link, p1, p2, node = self.index[rev]
246 246 end = start + length
247 247 if base != rev: start = self.start(base)
248 248
249 249 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
250 250 base = self.cache[1]
251 251 start = self.start(base + 1)
252 252 text = self.cache[2]
253 253 last = 0
254 254
255 255 f = self.opener(self.datafile)
256 256 f.seek(start)
257 257 data = f.read(end - start)
258 258
259 259 if text is None:
260 260 last = self.length(base)
261 261 text = decompress(data[:last])
262 262
263 263 bins = []
264 264 for r in xrange(base + 1, rev + 1):
265 265 s = self.length(r)
266 266 bins.append(decompress(data[last:last + s]))
267 267 last = last + s
268 268
269 269 text = mdiff.patches(text, bins)
270 270
271 271 if node != hash(text, p1, p2):
272 272 raise IOError("integrity check failed on %s:%d"
273 273 % (self.datafile, rev))
274 274
275 275 self.cache = (node, rev, text)
276 276 return text
277 277
278 278 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
279 279 if text is None: text = ""
280 280 if p1 is None: p1 = self.tip()
281 281 if p2 is None: p2 = nullid
282 282
283 283 node = hash(text, p1, p2)
284 284
285 285 if node in self.nodemap:
286 286 return node
287 287
288 288 n = self.count()
289 289 t = n - 1
290 290
291 291 if n:
292 292 base = self.base(t)
293 293 start = self.start(base)
294 294 end = self.end(t)
295 295 if not d:
296 296 prev = self.revision(self.tip())
297 297 d = self.diff(prev, text)
298 298 data = compress(d)
299 299 dist = end - start + len(data)
300 300
301 301 # full versions are inserted when the needed deltas
302 302 # become comparable to the uncompressed text
303 303 if not n or dist > len(text) * 2:
304 304 data = compress(text)
305 305 base = n
306 306 else:
307 307 base = self.base(t)
308 308
309 309 offset = 0
310 310 if t >= 0:
311 311 offset = self.end(t)
312 312
313 313 e = (offset, len(data), base, link, p1, p2, node)
314 314
315 315 self.index.append(e)
316 316 self.nodemap[node] = n
317 317 entry = struct.pack(indexformat, *e)
318 318
319 319 transaction.add(self.datafile, e[0])
320 320 self.opener(self.datafile, "a").write(data)
321 321 transaction.add(self.indexfile, n * len(entry))
322 322 self.opener(self.indexfile, "a").write(entry)
323 323
324 324 self.cache = (node, n, text)
325 325 return node
326 326
327 327 def ancestor(self, a, b):
328 328 # calculate the distance of every node from root
329 329 dist = {nullid: 0}
330 330 for i in xrange(self.count()):
331 331 n = self.node(i)
332 332 p1, p2 = self.parents(n)
333 333 dist[n] = max(dist[p1], dist[p2]) + 1
334 334
335 335 # traverse ancestors in order of decreasing distance from root
336 336 def ancestors(node):
337 337 # we store negative distances because heap returns smallest member
338 338 h = [(-dist[node], node)]
339 339 seen = {}
340 340 earliest = self.count()
341 341 while h:
342 342 d, n = heapq.heappop(h)
343 343 if n not in seen:
344 344 seen[n] = 1
345 345 r = self.rev(n)
346 346 yield (-d, r, n)
347 347 for p in self.parents(n):
348 348 heapq.heappush(h, (-dist[p], p))
349 349
350 350 x = ancestors(a)
351 351 y = ancestors(b)
352 352 lx = x.next()
353 353 ly = y.next()
354 354
355 355 # increment each ancestor list until it is closer to root than
356 356 # the other, or they match
357 357 while 1:
358 358 if lx == ly:
359 359 return lx[2]
360 360 elif lx < ly:
361 361 ly = y.next()
362 362 elif lx > ly:
363 363 lx = x.next()
364 364
365 365 def group(self, linkmap):
366 366 # given a list of changeset revs, return a set of deltas and
367 367 # metadata corresponding to nodes. the first delta is
368 368 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
369 369 # have this parent as it has all history before these
370 370 # changesets. parent is parent[0]
371 371
372 372 revs = []
373 373 needed = {}
374 374
375 375 # find file nodes/revs that match changeset revs
376 376 for i in xrange(0, self.count()):
377 377 if self.index[i][3] in linkmap:
378 378 revs.append(i)
379 379 needed[i] = 1
380 380
381 381 # if we don't have any revisions touched by these changesets, bail
382 382 if not revs:
383 383 yield struct.pack(">l", 0)
384 384 return
385 385
386 386 # add the parent of the first rev
387 387 p = self.parents(self.node(revs[0]))[0]
388 388 revs.insert(0, self.rev(p))
389 389
390 390 # for each delta that isn't contiguous in the log, we need to
391 391 # reconstruct the base, reconstruct the result, and then
392 392 # calculate the delta. We also need to do this where we've
393 393 # stored a full version and not a delta
394 394 for i in xrange(0, len(revs) - 1):
395 395 a, b = revs[i], revs[i + 1]
396 396 if a + 1 != b or self.base(b) == b:
397 397 for j in xrange(self.base(a), a + 1):
398 398 needed[j] = 1
399 399 for j in xrange(self.base(b), b + 1):
400 400 needed[j] = 1
401 401
402 402 # calculate spans to retrieve from datafile
403 403 needed = needed.keys()
404 404 needed.sort()
405 405 spans = []
406 406 oo = -1
407 407 ol = 0
408 408 for n in needed:
409 409 if n < 0: continue
410 410 o = self.start(n)
411 411 l = self.length(n)
412 412 if oo + ol == o: # can we merge with the previous?
413 413 nl = spans[-1][2]
414 414 nl.append((n, l))
415 415 ol += l
416 416 spans[-1] = (oo, ol, nl)
417 417 else:
418 418 oo = o
419 419 ol = l
420 420 spans.append((oo, ol, [(n, l)]))
421 421
422 422 # read spans in, divide up chunks
423 423 chunks = {}
424 424 for span in spans:
425 425 # we reopen the file for each span to make http happy for now
426 426 f = self.opener(self.datafile)
427 427 f.seek(span[0])
428 428 data = f.read(span[1])
429 429
430 430 # divide up the span
431 431 pos = 0
432 432 for r, l in span[2]:
433 433 chunks[r] = decompress(data[pos: pos + l])
434 434 pos += l
435 435
436 436 # helper to reconstruct intermediate versions
437 437 def construct(text, base, rev):
438 438 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
439 439 return mdiff.patches(text, bins)
440 440
441 441 # build deltas
442 442 deltas = []
443 443 for d in xrange(0, len(revs) - 1):
444 444 a, b = revs[d], revs[d + 1]
445 445 n = self.node(b)
446 446
447 447 # do we need to construct a new delta?
448 448 if a + 1 != b or self.base(b) == b:
449 449 if a >= 0:
450 450 base = self.base(a)
451 451 ta = chunks[self.base(a)]
452 452 ta = construct(ta, base, a)
453 453 else:
454 454 ta = ""
455 455
456 456 base = self.base(b)
457 457 if a > base:
458 458 base = a
459 459 tb = ta
460 460 else:
461 461 tb = chunks[self.base(b)]
462 462 tb = construct(tb, base, b)
463 463 d = self.diff(ta, tb)
464 464 else:
465 465 d = chunks[b]
466 466
467 467 p = self.parents(n)
468 468 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
469 469 l = struct.pack(">l", len(meta) + len(d) + 4)
470 470 yield l
471 471 yield meta
472 472 yield d
473 473
474 474 yield struct.pack(">l", 0)
475 475
476 def addgroup(self, revs, linkmapper, transaction, unique = 0):
476 def addgroup(self, revs, linkmapper, transaction, unique=0):
477 477 # given a set of deltas, add them to the revision log. the
478 478 # first delta is against its parent, which should be in our
479 479 # log, the rest are against the previous delta.
480 480
481 481 # track the base of the current delta log
482 482 r = self.count()
483 483 t = r - 1
484 484 node = nullid
485 485
486 486 base = prev = -1
487 487 start = end = measure = 0
488 488 if r:
489 489 start = self.start(self.base(t))
490 490 end = self.end(t)
491 491 measure = self.length(self.base(t))
492 492 base = self.base(t)
493 493 prev = self.tip()
494 494
495 495 transaction.add(self.datafile, end)
496 496 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
497 497 dfh = self.opener(self.datafile, "a")
498 498 ifh = self.opener(self.indexfile, "a")
499 499
500 500 # loop through our set of deltas
501 501 chain = None
502 502 for chunk in revs:
503 503 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
504 504 link = linkmapper(cs)
505 505 if node in self.nodemap:
506 506 # this can happen if two branches make the same change
507 507 if unique:
508 508 raise "already have %s" % hex(node[:4])
509 509 chain = node
510 510 continue
511 511 delta = chunk[80:]
512 512
513 513 if not chain:
514 514 # retrieve the parent revision of the delta chain
515 515 chain = p1
516 516 if not chain in self.nodemap:
517 517 raise "unknown base %s" % short(chain[:4])
518 518
519 519 # full versions are inserted when the needed deltas become
520 520 # comparable to the uncompressed text or when the previous
521 521 # version is not the one we have a delta against. We use
522 522 # the size of the previous full rev as a proxy for the
523 523 # current size.
524 524
525 525 if chain == prev:
526 526 cdelta = compress(delta)
527 527
528 528 if chain != prev or (end - start + len(cdelta)) > measure * 2:
529 529 # flush our writes here so we can read it in revision
530 530 dfh.flush()
531 531 ifh.flush()
532 532 text = self.revision(chain)
533 533 text = self.patches(text, [delta])
534 534 chk = self.addrevision(text, transaction, link, p1, p2)
535 535 if chk != node:
536 536 raise "consistency error adding group"
537 537 measure = len(text)
538 538 else:
539 539 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
540 540 self.index.append(e)
541 541 self.nodemap[node] = r
542 542 dfh.write(cdelta)
543 543 ifh.write(struct.pack(indexformat, *e))
544 544
545 545 t, r, chain, prev = r, r + 1, node, node
546 546 start = self.start(self.base(t))
547 547 end = self.end(t)
548 548
549 549 dfh.close()
550 550 ifh.close()
551 551 return node
@@ -1,78 +1,78
1 1 # transaction.py - simple journalling scheme for mercurial
2 2 #
3 3 # This transaction scheme is intended to gracefully handle program
4 4 # errors and interruptions. More serious failures like system crashes
5 5 # can be recovered with an fsck-like tool. As the whole repository is
6 6 # effectively log-structured, this should amount to simply truncating
7 7 # anything that isn't referenced in the changelog.
8 8 #
9 9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
10 10 #
11 11 # This software may be used and distributed according to the terms
12 12 # of the GNU General Public License, incorporated herein by reference.
13 13
14 14 import os
15 15 import util
16 16
17 17 class transaction:
18 def __init__(self, report, opener, journal, after = None):
18 def __init__(self, report, opener, journal, after=None):
19 19 self.journal = None
20 20
21 21 # abort here if the journal already exists
22 22 if os.path.exists(journal):
23 23 raise "journal already exists - run hg recover"
24 24
25 25 self.report = report
26 26 self.opener = opener
27 27 self.after = after
28 28 self.entries = []
29 29 self.map = {}
30 30 self.journal = journal
31 31
32 32 self.file = open(self.journal, "w")
33 33
34 34 def __del__(self):
35 35 if self.journal:
36 36 if self.entries: self.abort()
37 37 self.file.close()
38 38 try: os.unlink(self.journal)
39 39 except: pass
40 40
41 41 def add(self, file, offset):
42 42 if file in self.map: return
43 43 self.entries.append((file, offset))
44 44 self.map[file] = 1
45 45 # add enough data to the journal to do the truncate
46 46 self.file.write("%s\0%d\n" % (file, offset))
47 47 self.file.flush()
48 48
49 49 def close(self):
50 50 self.file.close()
51 51 self.entries = []
52 52 if self.after:
53 53 self.after()
54 54 else:
55 55 os.unlink(self.journal)
56 56 self.journal = None
57 57
58 58 def abort(self):
59 59 if not self.entries: return
60 60
61 61 self.report("transaction abort!\n")
62 62
63 63 for f, o in self.entries:
64 64 try:
65 65 self.opener(f, "a").truncate(o)
66 66 except:
67 67 self.report("failed to truncate %s\n" % f)
68 68
69 69 self.entries = []
70 70
71 71 self.report("rollback completed\n")
72 72
73 73 def rollback(opener, file):
74 74 for l in open(file).readlines():
75 75 f, o = l.split('\0')
76 76 opener(f, "a").truncate(int(o))
77 77 os.unlink(file)
78 78
@@ -1,128 +1,128
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, ConfigParser
9 9 from demandload import *
10 10 demandload(globals(), "re socket sys util")
11 11
12 12 class ui:
13 13 def __init__(self, verbose=False, debug=False, quiet=False,
14 14 interactive=True):
15 15 self.overlay = {}
16 16 self.cdata = ConfigParser.SafeConfigParser()
17 17 self.cdata.read([os.path.normpath(hgrc) for hgrc in
18 18 "/etc/mercurial/hgrc", os.path.expanduser("~/.hgrc")])
19 19
20 20 self.quiet = self.configbool("ui", "quiet")
21 21 self.verbose = self.configbool("ui", "verbose")
22 22 self.debugflag = self.configbool("ui", "debug")
23 23 self.interactive = self.configbool("ui", "interactive", True)
24 24
25 25 self.quiet = (self.quiet or quiet) and not verbose and not debug
26 26 self.verbose = (self.verbose or verbose) or debug
27 27 self.debugflag = (self.debugflag or debug)
28 28 self.interactive = (self.interactive and interactive)
29 29
30 30 def readconfig(self, fp):
31 31 self.cdata.readfp(fp)
32 32
33 33 def setconfig(self, section, name, val):
34 34 self.overlay[(section, name)] = val
35 35
36 36 def config(self, section, name, default=None):
37 37 if self.overlay.has_key((section, name)):
38 38 return self.overlay[(section, name)]
39 39 if self.cdata.has_option(section, name):
40 40 return self.cdata.get(section, name)
41 41 return default
42 42
43 43 def configbool(self, section, name, default=False):
44 44 if self.overlay.has_key((section, name)):
45 45 return self.overlay[(section, name)]
46 46 if self.cdata.has_option(section, name):
47 47 return self.cdata.getboolean(section, name)
48 48 return default
49 49
50 50 def configitems(self, section):
51 51 if self.cdata.has_section(section):
52 52 return self.cdata.items(section)
53 53 return []
54 54
55 55 def walkconfig(self):
56 56 seen = {}
57 57 for (section, name), value in self.overlay.iteritems():
58 58 yield section, name, value
59 59 seen[section, name] = 1
60 60 for section in self.cdata.sections():
61 61 for name, value in self.cdata.items(section):
62 62 if (section, name) in seen: continue
63 63 yield section, name, value.replace('\n', '\\n')
64 64 seen[section, name] = 1
65 65
66 66 def username(self):
67 67 return (os.environ.get("HGUSER") or
68 68 self.config("ui", "username") or
69 69 os.environ.get("EMAIL") or
70 70 (os.environ.get("LOGNAME",
71 71 os.environ.get("USERNAME", "unknown"))
72 72 + '@' + socket.getfqdn()))
73 73
74 74 def expandpath(self, loc):
75 75 paths = {}
76 76 for name, path in self.configitems("paths"):
77 77 paths[name] = path
78 78
79 79 return paths.get(loc, loc)
80 80
81 81 def write(self, *args):
82 82 for a in args:
83 83 sys.stdout.write(str(a))
84 84
85 85 def write_err(self, *args):
86 86 sys.stdout.flush()
87 87 for a in args:
88 88 sys.stderr.write(str(a))
89 89
90 90 def readline(self):
91 91 return sys.stdin.readline()[:-1]
92 def prompt(self, msg, pat, default = "y"):
92 def prompt(self, msg, pat, default="y"):
93 93 if not self.interactive: return default
94 94 while 1:
95 95 self.write(msg, " ")
96 96 r = self.readline()
97 97 if re.match(pat, r):
98 98 return r
99 99 else:
100 100 self.write("unrecognized response\n")
101 101 def status(self, *msg):
102 102 if not self.quiet: self.write(*msg)
103 103 def warn(self, *msg):
104 104 self.write_err(*msg)
105 105 def note(self, *msg):
106 106 if self.verbose: self.write(*msg)
107 107 def debug(self, *msg):
108 108 if self.debugflag: self.write(*msg)
109 109 def edit(self, text):
110 110 import tempfile
111 111 (fd, name) = tempfile.mkstemp("hg")
112 112 f = os.fdopen(fd, "w")
113 113 f.write(text)
114 114 f.close()
115 115
116 116 editor = (os.environ.get("HGEDITOR") or
117 117 self.config("ui", "editor") or
118 118 os.environ.get("EDITOR", "vi"))
119 119
120 120 os.environ["HGUSER"] = self.username()
121 util.system("%s %s" % (editor, name), errprefix = "edit failed")
121 util.system("%s %s" % (editor, name), errprefix="edit failed")
122 122
123 123 t = open(name).read()
124 124 t = re.sub("(?m)^HG:.*\n", "", t)
125 125
126 126 os.unlink(name)
127 127
128 128 return t
@@ -1,288 +1,288
1 1 # util.py - utility functions and platform specfic implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, errno
9 9 from demandload import *
10 10 demandload(globals(), "re")
11 11
12 12 def binary(s):
13 13 if s and '\0' in s[:4096]:
14 14 return True
15 15 return False
16 16
17 17 def unique(g):
18 18 seen = {}
19 19 for f in g:
20 20 if f not in seen:
21 21 seen[f] = 1
22 22 yield f
23 23
24 24 class Abort(Exception):
25 25 """Raised if a command needs to print an error and exit."""
26 26
27 27 def always(fn): return True
28 28 def never(fn): return False
29 29
30 def globre(pat, head = '^', tail = '$'):
30 def globre(pat, head='^', tail='$'):
31 31 "convert a glob pattern into a regexp"
32 32 i, n = 0, len(pat)
33 33 res = ''
34 34 group = False
35 35 def peek(): return i < n and pat[i]
36 36 while i < n:
37 37 c = pat[i]
38 38 i = i+1
39 39 if c == '*':
40 40 if peek() == '*':
41 41 i += 1
42 42 res += '.*'
43 43 else:
44 44 res += '[^/]*'
45 45 elif c == '?':
46 46 res += '.'
47 47 elif c == '[':
48 48 j = i
49 49 if j < n and pat[j] in '!]':
50 50 j += 1
51 51 while j < n and pat[j] != ']':
52 52 j += 1
53 53 if j >= n:
54 54 res += '\\['
55 55 else:
56 56 stuff = pat[i:j].replace('\\','\\\\')
57 57 i = j + 1
58 58 if stuff[0] == '!':
59 59 stuff = '^' + stuff[1:]
60 60 elif stuff[0] == '^':
61 61 stuff = '\\' + stuff
62 62 res = '%s[%s]' % (res, stuff)
63 63 elif c == '{':
64 64 group = True
65 65 res += '(?:'
66 66 elif c == '}' and group:
67 67 res += ')'
68 68 group = False
69 69 elif c == ',' and group:
70 70 res += '|'
71 71 else:
72 72 res += re.escape(c)
73 73 return head + res + tail
74 74
75 75 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
76 76
77 77 def pathto(n1, n2):
78 78 '''return the relative path from one place to another.
79 79 this returns a path in the form used by the local filesystem, not hg.'''
80 80 if not n1: return localpath(n2)
81 81 a, b = n1.split('/'), n2.split('/')
82 82 a.reverse(), b.reverse()
83 83 while a and b and a[-1] == b[-1]:
84 84 a.pop(), b.pop()
85 85 b.reverse()
86 86 return os.sep.join((['..'] * len(a)) + b)
87 87
88 88 def canonpath(repo, cwd, myname):
89 89 rootsep = repo.root + os.sep
90 90 name = myname
91 91 if not name.startswith(os.sep):
92 92 name = os.path.join(repo.root, cwd, name)
93 93 name = os.path.normpath(name)
94 94 if name.startswith(rootsep):
95 95 return pconvert(name[len(rootsep):])
96 96 elif name == repo.root:
97 97 return ''
98 98 else:
99 99 raise Abort('%s not under repository root' % myname)
100 100
101 def matcher(repo, cwd, names, inc, exc, head = ''):
101 def matcher(repo, cwd, names, inc, exc, head=''):
102 102 def patkind(name):
103 103 for prefix in 're:', 'glob:', 'path:', 'relpath:':
104 104 if name.startswith(prefix): return name.split(':', 1)
105 105 for c in name:
106 106 if c in _globchars: return 'glob', name
107 107 return 'relpath', name
108 108
109 109 def regex(kind, name, tail):
110 110 '''convert a pattern into a regular expression'''
111 111 if kind == 're':
112 112 return name
113 113 elif kind == 'path':
114 114 return '^' + re.escape(name) + '(?:/|$)'
115 115 elif kind == 'relpath':
116 116 return head + re.escape(name) + tail
117 117 return head + globre(name, '', tail)
118 118
119 119 def matchfn(pats, tail):
120 120 """build a matching function from a set of patterns"""
121 121 if pats:
122 122 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
123 123 return re.compile(pat).match
124 124
125 125 def globprefix(pat):
126 126 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
127 127 root = []
128 128 for p in pat.split(os.sep):
129 129 if patkind(p)[0] == 'glob': break
130 130 root.append(p)
131 131 return '/'.join(root)
132 132
133 133 pats = []
134 134 files = []
135 135 roots = []
136 136 for kind, name in map(patkind, names):
137 137 if kind in ('glob', 'relpath'):
138 138 name = canonpath(repo, cwd, name)
139 139 if name == '':
140 140 kind, name = 'glob', '**'
141 141 if kind in ('glob', 'path', 're'):
142 142 pats.append((kind, name))
143 143 if kind == 'glob':
144 144 root = globprefix(name)
145 145 if root: roots.append(root)
146 146 elif kind == 'relpath':
147 147 files.append((kind, name))
148 148 roots.append(name)
149 149
150 150 patmatch = matchfn(pats, '$') or always
151 151 filematch = matchfn(files, '(?:/|$)') or always
152 152 incmatch = always
153 153 if inc:
154 154 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
155 155 excmatch = lambda fn: False
156 156 if exc:
157 157 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
158 158
159 159 return (roots,
160 160 lambda fn: (incmatch(fn) and not excmatch(fn) and
161 161 (fn.endswith('/') or
162 162 (not pats and not files) or
163 163 (pats and patmatch(fn)) or
164 164 (files and filematch(fn)))),
165 165 (inc or exc or (pats and pats != [('glob', '**')])) and True)
166 166
167 167 def system(cmd, errprefix=None):
168 168 """execute a shell command that must succeed"""
169 169 rc = os.system(cmd)
170 170 if rc:
171 171 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
172 172 explain_exit(rc)[0])
173 173 if errprefix:
174 174 errmsg = "%s: %s" % (errprefix, errmsg)
175 175 raise Abort(errmsg)
176 176
177 177 def rename(src, dst):
178 178 try:
179 179 os.rename(src, dst)
180 180 except:
181 181 os.unlink(dst)
182 182 os.rename(src, dst)
183 183
184 184 def copytree(src, dst, copyfile):
185 185 """Copy a directory tree, files are copied using 'copyfile'."""
186 186 names = os.listdir(src)
187 187 os.mkdir(dst)
188 188
189 189 for name in names:
190 190 srcname = os.path.join(src, name)
191 191 dstname = os.path.join(dst, name)
192 192 if os.path.isdir(srcname):
193 193 copytree(srcname, dstname, copyfile)
194 194 elif os.path.isfile(srcname):
195 195 copyfile(srcname, dstname)
196 196 else:
197 197 pass
198 198
199 199 def _makelock_file(info, pathname):
200 200 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
201 201 os.write(ld, info)
202 202 os.close(ld)
203 203
204 204 def _readlock_file(pathname):
205 205 return file(pathname).read()
206 206
207 207 # Platfor specific varients
208 208 if os.name == 'nt':
209 209 nulldev = 'NUL:'
210 210
211 211 def is_exec(f, last):
212 212 return last
213 213
214 214 def set_exec(f, mode):
215 215 pass
216 216
217 217 def pconvert(path):
218 218 return path.replace("\\", "/")
219 219
220 220 def localpath(path):
221 221 return path.replace('/', '\\')
222 222
223 223 def normpath(path):
224 224 return pconvert(os.path.normpath(path))
225 225
226 226 makelock = _makelock_file
227 227 readlock = _readlock_file
228 228
229 229 def explain_exit(code):
230 230 return "exited with status %d" % code, code
231 231
232 232 else:
233 233 nulldev = '/dev/null'
234 234
235 235 def is_exec(f, last):
236 236 return (os.stat(f).st_mode & 0100 != 0)
237 237
238 238 def set_exec(f, mode):
239 239 s = os.stat(f).st_mode
240 240 if (s & 0100 != 0) == mode:
241 241 return
242 242 if mode:
243 243 # Turn on +x for every +r bit when making a file executable
244 244 # and obey umask.
245 245 umask = os.umask(0)
246 246 os.umask(umask)
247 247 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
248 248 else:
249 249 os.chmod(f, s & 0666)
250 250
251 251 def pconvert(path):
252 252 return path
253 253
254 254 def localpath(path):
255 255 return path
256 256
257 257 normpath = os.path.normpath
258 258
259 259 def makelock(info, pathname):
260 260 try:
261 261 os.symlink(info, pathname)
262 262 except OSError, why:
263 263 if why.errno == errno.EEXIST:
264 264 raise
265 265 else:
266 266 _makelock_file(info, pathname)
267 267
268 268 def readlock(pathname):
269 269 try:
270 270 return os.readlink(pathname)
271 271 except OSError, why:
272 272 if why.errno == errno.EINVAL:
273 273 return _readlock_file(pathname)
274 274 else:
275 275 raise
276 276
277 277 def explain_exit(code):
278 278 """return a 2-tuple (desc, code) describing a process's status"""
279 279 if os.WIFEXITED(code):
280 280 val = os.WEXITSTATUS(code)
281 281 return "exited with status %d" % val, val
282 282 elif os.WIFSIGNALED(code):
283 283 val = os.WTERMSIG(code)
284 284 return "killed by signal %d" % val, val
285 285 elif os.WIFSTOPPED(code):
286 286 val = os.WSTOPSIG(code)
287 287 return "stopped by signal %d" % val, val
288 288 raise ValueError("invalid exit code")
General Comments 0
You need to be logged in to leave comments. Login now