##// END OF EJS Templates
When pulling from a non hg repository URL (e.g. http://www.kernel.org/hg)...
Muli Ben-Yehuda -
r751:0b245ede default
parent child Browse files
Show More
@@ -1,1935 +1,1948 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, struct, os
9 9 import util
10 10 from revlog import *
11 11 from demandload import *
12 12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 13 demandload(globals(), "tempfile httprangereader bdiff")
14 14 demandload(globals(), "bisect select")
15 15
16 16 class filelog(revlog):
17 17 def __init__(self, opener, path):
18 18 revlog.__init__(self, opener,
19 19 os.path.join("data", path + ".i"),
20 20 os.path.join("data", path + ".d"))
21 21
22 22 def read(self, node):
23 23 t = self.revision(node)
24 24 if not t.startswith('\1\n'):
25 25 return t
26 26 s = t.find('\1\n', 2)
27 27 return t[s+2:]
28 28
29 29 def readmeta(self, node):
30 30 t = self.revision(node)
31 31 if not t.startswith('\1\n'):
32 32 return t
33 33 s = t.find('\1\n', 2)
34 34 mt = t[2:s]
35 35 for l in mt.splitlines():
36 36 k, v = l.split(": ", 1)
37 37 m[k] = v
38 38 return m
39 39
40 40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 41 if meta or text.startswith('\1\n'):
42 42 mt = ""
43 43 if meta:
44 44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 46 return self.addrevision(text, transaction, link, p1, p2)
47 47
48 48 def annotate(self, node):
49 49
50 50 def decorate(text, rev):
51 51 return ([rev] * len(text.splitlines()), text)
52 52
53 53 def pair(parent, child):
54 54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 55 child[0][b1:b2] = parent[0][a1:a2]
56 56 return child
57 57
58 58 # find all ancestors
59 59 needed = {node:1}
60 60 visit = [node]
61 61 while visit:
62 62 n = visit.pop(0)
63 63 for p in self.parents(n):
64 64 if p not in needed:
65 65 needed[p] = 1
66 66 visit.append(p)
67 67 else:
68 68 # count how many times we'll use this
69 69 needed[p] += 1
70 70
71 71 # sort by revision which is a topological order
72 72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 73 visit.sort()
74 74 hist = {}
75 75
76 76 for r,n in visit:
77 77 curr = decorate(self.read(n), self.linkrev(n))
78 78 for p in self.parents(n):
79 79 if p != nullid:
80 80 curr = pair(hist[p], curr)
81 81 # trim the history of unneeded revs
82 82 needed[p] -= 1
83 83 if not needed[p]:
84 84 del hist[p]
85 85 hist[n] = curr
86 86
87 87 return zip(hist[n][0], hist[n][1].splitlines(1))
88 88
89 89 class manifest(revlog):
90 90 def __init__(self, opener):
91 91 self.mapcache = None
92 92 self.listcache = None
93 93 self.addlist = None
94 94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95 95
96 96 def read(self, node):
97 97 if node == nullid: return {} # don't upset local cache
98 98 if self.mapcache and self.mapcache[0] == node:
99 99 return self.mapcache[1]
100 100 text = self.revision(node)
101 101 map = {}
102 102 flag = {}
103 103 self.listcache = (text, text.splitlines(1))
104 104 for l in self.listcache[1]:
105 105 (f, n) = l.split('\0')
106 106 map[f] = bin(n[:40])
107 107 flag[f] = (n[40:-1] == "x")
108 108 self.mapcache = (node, map, flag)
109 109 return map
110 110
111 111 def readflags(self, node):
112 112 if node == nullid: return {} # don't upset local cache
113 113 if not self.mapcache or self.mapcache[0] != node:
114 114 self.read(node)
115 115 return self.mapcache[2]
116 116
117 117 def diff(self, a, b):
118 118 # this is sneaky, as we're not actually using a and b
119 119 if self.listcache and self.addlist and self.listcache[0] == a:
120 120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 121 if mdiff.patch(a, d) != b:
122 122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 123 return mdiff.textdiff(a, b)
124 124 return d
125 125 else:
126 126 return mdiff.textdiff(a, b)
127 127
128 128 def add(self, map, flags, transaction, link, p1=None, p2=None,
129 129 changed=None):
130 130 # directly generate the mdiff delta from the data collected during
131 131 # the bisect loop below
132 132 def gendelta(delta):
133 133 i = 0
134 134 result = []
135 135 while i < len(delta):
136 136 start = delta[i][2]
137 137 end = delta[i][3]
138 138 l = delta[i][4]
139 139 if l == None:
140 140 l = ""
141 141 while i < len(delta) - 1 and start <= delta[i+1][2] \
142 142 and end >= delta[i+1][2]:
143 143 if delta[i+1][3] > end:
144 144 end = delta[i+1][3]
145 145 if delta[i+1][4]:
146 146 l += delta[i+1][4]
147 147 i += 1
148 148 result.append(struct.pack(">lll", start, end, len(l)) + l)
149 149 i += 1
150 150 return result
151 151
152 152 # apply the changes collected during the bisect loop to our addlist
153 153 def addlistdelta(addlist, delta):
154 154 # apply the deltas to the addlist. start from the bottom up
155 155 # so changes to the offsets don't mess things up.
156 156 i = len(delta)
157 157 while i > 0:
158 158 i -= 1
159 159 start = delta[i][0]
160 160 end = delta[i][1]
161 161 if delta[i][4]:
162 162 addlist[start:end] = [delta[i][4]]
163 163 else:
164 164 del addlist[start:end]
165 165 return addlist
166 166
167 167 # calculate the byte offset of the start of each line in the
168 168 # manifest
169 169 def calcoffsets(addlist):
170 170 offsets = [0] * (len(addlist) + 1)
171 171 offset = 0
172 172 i = 0
173 173 while i < len(addlist):
174 174 offsets[i] = offset
175 175 offset += len(addlist[i])
176 176 i += 1
177 177 offsets[i] = offset
178 178 return offsets
179 179
180 180 # if we're using the listcache, make sure it is valid and
181 181 # parented by the same node we're diffing against
182 182 if not changed or not self.listcache or not p1 or \
183 183 self.mapcache[0] != p1:
184 184 files = map.keys()
185 185 files.sort()
186 186
187 187 self.addlist = ["%s\000%s%s\n" %
188 188 (f, hex(map[f]), flags[f] and "x" or '')
189 189 for f in files]
190 190 cachedelta = None
191 191 else:
192 192 addlist = self.listcache[1]
193 193
194 194 # find the starting offset for each line in the add list
195 195 offsets = calcoffsets(addlist)
196 196
197 197 # combine the changed lists into one list for sorting
198 198 work = [[x, 0] for x in changed[0]]
199 199 work[len(work):] = [[x, 1] for x in changed[1]]
200 200 work.sort()
201 201
202 202 delta = []
203 203 bs = 0
204 204
205 205 for w in work:
206 206 f = w[0]
207 207 # bs will either be the index of the item or the insert point
208 208 bs = bisect.bisect(addlist, f, bs)
209 209 if bs < len(addlist):
210 210 fn = addlist[bs][:addlist[bs].index('\0')]
211 211 else:
212 212 fn = None
213 213 if w[1] == 0:
214 214 l = "%s\000%s%s\n" % (f, hex(map[f]),
215 215 flags[f] and "x" or '')
216 216 else:
217 217 l = None
218 218 start = bs
219 219 if fn != f:
220 220 # item not found, insert a new one
221 221 end = bs
222 222 if w[1] == 1:
223 223 sys.stderr.write("failed to remove %s from manifest\n"
224 224 % f)
225 225 sys.exit(1)
226 226 else:
227 227 # item is found, replace/delete the existing line
228 228 end = bs + 1
229 229 delta.append([start, end, offsets[start], offsets[end], l])
230 230
231 231 self.addlist = addlistdelta(addlist, delta)
232 232 if self.mapcache[0] == self.tip():
233 233 cachedelta = "".join(gendelta(delta))
234 234 else:
235 235 cachedelta = None
236 236
237 237 text = "".join(self.addlist)
238 238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
239 239 sys.stderr.write("manifest delta failure\n")
240 240 sys.exit(1)
241 241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
242 242 self.mapcache = (n, map, flags)
243 243 self.listcache = (text, self.addlist)
244 244 self.addlist = None
245 245
246 246 return n
247 247
248 248 class changelog(revlog):
249 249 def __init__(self, opener):
250 250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
251 251
252 252 def extract(self, text):
253 253 if not text:
254 254 return (nullid, "", "0", [], "")
255 255 last = text.index("\n\n")
256 256 desc = text[last + 2:]
257 257 l = text[:last].splitlines()
258 258 manifest = bin(l[0])
259 259 user = l[1]
260 260 date = l[2]
261 261 files = l[3:]
262 262 return (manifest, user, date, files, desc)
263 263
264 264 def read(self, node):
265 265 return self.extract(self.revision(node))
266 266
267 267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
268 268 user=None, date=None):
269 269 date = date or "%d %d" % (time.time(), time.timezone)
270 270 list.sort()
271 271 l = [hex(manifest), user, date] + list + ["", desc]
272 272 text = "\n".join(l)
273 273 return self.addrevision(text, transaction, self.count(), p1, p2)
274 274
275 275 class dirstate:
276 276 def __init__(self, opener, ui, root):
277 277 self.opener = opener
278 278 self.root = root
279 279 self.dirty = 0
280 280 self.ui = ui
281 281 self.map = None
282 282 self.pl = None
283 283 self.copies = {}
284 284 self.ignorefunc = None
285 285
286 286 def wjoin(self, f):
287 287 return os.path.join(self.root, f)
288 288
289 289 def ignore(self, f):
290 290 if not self.ignorefunc:
291 291 bigpat = []
292 292 try:
293 293 l = file(self.wjoin(".hgignore"))
294 294 for pat in l:
295 295 if pat != "\n":
296 296 p = util.pconvert(pat[:-1])
297 297 try:
298 298 r = re.compile(p)
299 299 except:
300 300 self.ui.warn("ignoring invalid ignore"
301 301 + " regular expression '%s'\n" % p)
302 302 else:
303 303 bigpat.append(util.pconvert(pat[:-1]))
304 304 except IOError: pass
305 305
306 306 if bigpat:
307 307 s = "(?:%s)" % (")|(?:".join(bigpat))
308 308 r = re.compile(s)
309 309 self.ignorefunc = r.search
310 310 else:
311 311 self.ignorefunc = util.never
312 312
313 313 return self.ignorefunc(f)
314 314
315 315 def __del__(self):
316 316 if self.dirty:
317 317 self.write()
318 318
319 319 def __getitem__(self, key):
320 320 try:
321 321 return self.map[key]
322 322 except TypeError:
323 323 self.read()
324 324 return self[key]
325 325
326 326 def __contains__(self, key):
327 327 if not self.map: self.read()
328 328 return key in self.map
329 329
330 330 def parents(self):
331 331 if not self.pl:
332 332 self.read()
333 333 return self.pl
334 334
335 335 def markdirty(self):
336 336 if not self.dirty:
337 337 self.dirty = 1
338 338
339 339 def setparents(self, p1, p2 = nullid):
340 340 self.markdirty()
341 341 self.pl = p1, p2
342 342
343 343 def state(self, key):
344 344 try:
345 345 return self[key][0]
346 346 except KeyError:
347 347 return "?"
348 348
349 349 def read(self):
350 350 if self.map is not None: return self.map
351 351
352 352 self.map = {}
353 353 self.pl = [nullid, nullid]
354 354 try:
355 355 st = self.opener("dirstate").read()
356 356 if not st: return
357 357 except: return
358 358
359 359 self.pl = [st[:20], st[20: 40]]
360 360
361 361 pos = 40
362 362 while pos < len(st):
363 363 e = struct.unpack(">cllll", st[pos:pos+17])
364 364 l = e[4]
365 365 pos += 17
366 366 f = st[pos:pos + l]
367 367 if '\0' in f:
368 368 f, c = f.split('\0')
369 369 self.copies[f] = c
370 370 self.map[f] = e[:4]
371 371 pos += l
372 372
373 373 def copy(self, source, dest):
374 374 self.read()
375 375 self.markdirty()
376 376 self.copies[dest] = source
377 377
378 378 def copied(self, file):
379 379 return self.copies.get(file, None)
380 380
381 381 def update(self, files, state):
382 382 ''' current states:
383 383 n normal
384 384 m needs merging
385 385 r marked for removal
386 386 a marked for addition'''
387 387
388 388 if not files: return
389 389 self.read()
390 390 self.markdirty()
391 391 for f in files:
392 392 if state == "r":
393 393 self.map[f] = ('r', 0, 0, 0)
394 394 else:
395 395 s = os.stat(os.path.join(self.root, f))
396 396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
397 397
398 398 def forget(self, files):
399 399 if not files: return
400 400 self.read()
401 401 self.markdirty()
402 402 for f in files:
403 403 try:
404 404 del self.map[f]
405 405 except KeyError:
406 406 self.ui.warn("not in dirstate: %s!\n" % f)
407 407 pass
408 408
409 409 def clear(self):
410 410 self.map = {}
411 411 self.markdirty()
412 412
413 413 def write(self):
414 414 st = self.opener("dirstate", "w")
415 415 st.write("".join(self.pl))
416 416 for f, e in self.map.items():
417 417 c = self.copied(f)
418 418 if c:
419 419 f = f + "\0" + c
420 420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
421 421 st.write(e + f)
422 422 self.dirty = 0
423 423
424 424 def walk(self, files = None, match = util.always):
425 425 self.read()
426 426 dc = self.map.copy()
427 427 # walk all files by default
428 428 if not files: files = [self.root]
429 429 def traverse():
430 430 for f in util.unique(files):
431 431 f = os.path.join(self.root, f)
432 432 if os.path.isdir(f):
433 433 for dir, subdirs, fl in os.walk(f):
434 434 d = dir[len(self.root) + 1:]
435 435 if d == '.hg':
436 436 subdirs[:] = []
437 437 continue
438 438 for sd in subdirs:
439 439 ds = os.path.join(d, sd +'/')
440 440 if self.ignore(ds) or not match(ds):
441 441 subdirs.remove(sd)
442 442 for fn in fl:
443 443 fn = util.pconvert(os.path.join(d, fn))
444 444 yield 'f', fn
445 445 else:
446 446 yield 'f', f[len(self.root) + 1:]
447 447
448 448 for k in dc.keys():
449 449 yield 'm', k
450 450
451 451 # yield only files that match: all in dirstate, others only if
452 452 # not in .hgignore
453 453
454 454 for src, fn in util.unique(traverse()):
455 455 if fn in dc:
456 456 del dc[fn]
457 457 elif self.ignore(fn):
458 458 continue
459 459 if match(fn):
460 460 yield src, fn
461 461
462 462 def changes(self, files = None, match = util.always):
463 463 self.read()
464 464 dc = self.map.copy()
465 465 lookup, changed, added, unknown = [], [], [], []
466 466
467 467 for src, fn in self.walk(files, match):
468 468 try: s = os.stat(os.path.join(self.root, fn))
469 469 except: continue
470 470
471 471 if fn in dc:
472 472 c = dc[fn]
473 473 del dc[fn]
474 474
475 475 if c[0] == 'm':
476 476 changed.append(fn)
477 477 elif c[0] == 'a':
478 478 added.append(fn)
479 479 elif c[0] == 'r':
480 480 unknown.append(fn)
481 481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
482 482 changed.append(fn)
483 483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
484 484 lookup.append(fn)
485 485 else:
486 486 if match(fn): unknown.append(fn)
487 487
488 488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
489 489
490 490 # used to avoid circular references so destructors work
491 491 def opener(base):
492 492 p = base
493 493 def o(path, mode="r"):
494 494 if p.startswith("http://"):
495 495 f = os.path.join(p, urllib.quote(path))
496 496 return httprangereader.httprangereader(f)
497 497
498 498 f = os.path.join(p, path)
499 499
500 500 mode += "b" # for that other OS
501 501
502 502 if mode[0] != "r":
503 503 try:
504 504 s = os.stat(f)
505 505 except OSError:
506 506 d = os.path.dirname(f)
507 507 if not os.path.isdir(d):
508 508 os.makedirs(d)
509 509 else:
510 510 if s.st_nlink > 1:
511 511 file(f + ".tmp", "wb").write(file(f, "rb").read())
512 512 util.rename(f+".tmp", f)
513 513
514 514 return file(f, mode)
515 515
516 516 return o
517 517
518 518 class RepoError(Exception): pass
519 519
520 520 class localrepository:
521 521 def __init__(self, ui, path=None, create=0):
522 522 self.remote = 0
523 523 if path and path.startswith("http://"):
524 524 self.remote = 1
525 525 self.path = path
526 526 else:
527 527 if not path:
528 528 p = os.getcwd()
529 529 while not os.path.isdir(os.path.join(p, ".hg")):
530 530 oldp = p
531 531 p = os.path.dirname(p)
532 532 if p == oldp: raise RepoError("no repo found")
533 533 path = p
534 534 self.path = os.path.join(path, ".hg")
535 535
536 536 if not create and not os.path.isdir(self.path):
537 537 raise RepoError("repository %s not found" % self.path)
538 538
539 539 self.root = path
540 540 self.ui = ui
541 541
542 542 if create:
543 543 os.mkdir(self.path)
544 544 os.mkdir(self.join("data"))
545 545
546 546 self.opener = opener(self.path)
547 547 self.wopener = opener(self.root)
548 548 self.manifest = manifest(self.opener)
549 549 self.changelog = changelog(self.opener)
550 550 self.tagscache = None
551 551 self.nodetagscache = None
552 552
553 553 if not self.remote:
554 554 self.dirstate = dirstate(self.opener, ui, self.root)
555 555 try:
556 556 self.ui.readconfig(self.opener("hgrc"))
557 557 except IOError: pass
558 558
559 559 def hook(self, name, **args):
560 560 s = self.ui.config("hooks", name)
561 561 if s:
562 562 self.ui.note("running hook %s: %s\n" % (name, s))
563 563 old = {}
564 564 for k, v in args.items():
565 565 k = k.upper()
566 566 old[k] = os.environ.get(k, None)
567 567 os.environ[k] = v
568 568
569 569 r = os.system(s)
570 570
571 571 for k, v in old.items():
572 572 if v != None:
573 573 os.environ[k] = v
574 574 else:
575 575 del os.environ[k]
576 576
577 577 if r:
578 578 self.ui.warn("abort: %s hook failed with status %d!\n" %
579 579 (name, r))
580 580 return False
581 581 return True
582 582
583 583 def tags(self):
584 584 '''return a mapping of tag to node'''
585 585 if not self.tagscache:
586 586 self.tagscache = {}
587 587 def addtag(self, k, n):
588 588 try:
589 589 bin_n = bin(n)
590 590 except TypeError:
591 591 bin_n = ''
592 592 self.tagscache[k.strip()] = bin_n
593 593
594 594 try:
595 595 # read each head of the tags file, ending with the tip
596 596 # and add each tag found to the map, with "newer" ones
597 597 # taking precedence
598 598 fl = self.file(".hgtags")
599 599 h = fl.heads()
600 600 h.reverse()
601 601 for r in h:
602 602 for l in fl.revision(r).splitlines():
603 603 if l:
604 604 n, k = l.split(" ", 1)
605 605 addtag(self, k, n)
606 606 except KeyError:
607 607 pass
608 608
609 609 try:
610 610 f = self.opener("localtags")
611 611 for l in f:
612 612 n, k = l.split(" ", 1)
613 613 addtag(self, k, n)
614 614 except IOError:
615 615 pass
616 616
617 617 self.tagscache['tip'] = self.changelog.tip()
618 618
619 619 return self.tagscache
620 620
621 621 def tagslist(self):
622 622 '''return a list of tags ordered by revision'''
623 623 l = []
624 624 for t, n in self.tags().items():
625 625 try:
626 626 r = self.changelog.rev(n)
627 627 except:
628 628 r = -2 # sort to the beginning of the list if unknown
629 629 l.append((r,t,n))
630 630 l.sort()
631 631 return [(t,n) for r,t,n in l]
632 632
633 633 def nodetags(self, node):
634 634 '''return the tags associated with a node'''
635 635 if not self.nodetagscache:
636 636 self.nodetagscache = {}
637 637 for t,n in self.tags().items():
638 638 self.nodetagscache.setdefault(n,[]).append(t)
639 639 return self.nodetagscache.get(node, [])
640 640
641 641 def lookup(self, key):
642 642 try:
643 643 return self.tags()[key]
644 644 except KeyError:
645 645 try:
646 646 return self.changelog.lookup(key)
647 647 except:
648 648 raise RepoError("unknown revision '%s'" % key)
649 649
650 650 def dev(self):
651 651 if self.remote: return -1
652 652 return os.stat(self.path).st_dev
653 653
654 654 def join(self, f):
655 655 return os.path.join(self.path, f)
656 656
657 657 def wjoin(self, f):
658 658 return os.path.join(self.root, f)
659 659
660 660 def file(self, f):
661 661 if f[0] == '/': f = f[1:]
662 662 return filelog(self.opener, f)
663 663
664 664 def getcwd(self):
665 665 cwd = os.getcwd()
666 666 if cwd == self.root: return ''
667 667 return cwd[len(self.root) + 1:]
668 668
669 669 def wfile(self, f, mode='r'):
670 670 return self.wopener(f, mode)
671 671
672 672 def transaction(self):
673 673 # save dirstate for undo
674 674 try:
675 675 ds = self.opener("dirstate").read()
676 676 except IOError:
677 677 ds = ""
678 678 self.opener("undo.dirstate", "w").write(ds)
679 679
680 680 return transaction.transaction(self.ui.warn,
681 681 self.opener, self.join("journal"),
682 682 self.join("undo"))
683 683
684 684 def recover(self):
685 685 lock = self.lock()
686 686 if os.path.exists(self.join("journal")):
687 687 self.ui.status("rolling back interrupted transaction\n")
688 688 return transaction.rollback(self.opener, self.join("journal"))
689 689 else:
690 690 self.ui.warn("no interrupted transaction available\n")
691 691
692 692 def undo(self):
693 693 lock = self.lock()
694 694 if os.path.exists(self.join("undo")):
695 695 self.ui.status("rolling back last transaction\n")
696 696 transaction.rollback(self.opener, self.join("undo"))
697 697 self.dirstate = None
698 698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
699 699 self.dirstate = dirstate(self.opener, self.ui, self.root)
700 700 else:
701 701 self.ui.warn("no undo information available\n")
702 702
703 703 def lock(self, wait = 1):
704 704 try:
705 705 return lock.lock(self.join("lock"), 0)
706 706 except lock.LockHeld, inst:
707 707 if wait:
708 708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
709 709 return lock.lock(self.join("lock"), wait)
710 710 raise inst
711 711
712 712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
713 713 orig_parent = self.dirstate.parents()[0] or nullid
714 714 p1 = p1 or self.dirstate.parents()[0] or nullid
715 715 p2 = p2 or self.dirstate.parents()[1] or nullid
716 716 c1 = self.changelog.read(p1)
717 717 c2 = self.changelog.read(p2)
718 718 m1 = self.manifest.read(c1[0])
719 719 mf1 = self.manifest.readflags(c1[0])
720 720 m2 = self.manifest.read(c2[0])
721 721
722 722 if orig_parent == p1:
723 723 update_dirstate = 1
724 724 else:
725 725 update_dirstate = 0
726 726
727 727 tr = self.transaction()
728 728 mm = m1.copy()
729 729 mfm = mf1.copy()
730 730 linkrev = self.changelog.count()
731 731 for f in files:
732 732 try:
733 733 t = self.wfile(f).read()
734 734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
735 735 r = self.file(f)
736 736 mfm[f] = tm
737 737 mm[f] = r.add(t, {}, tr, linkrev,
738 738 m1.get(f, nullid), m2.get(f, nullid))
739 739 if update_dirstate:
740 740 self.dirstate.update([f], "n")
741 741 except IOError:
742 742 try:
743 743 del mm[f]
744 744 del mfm[f]
745 745 if update_dirstate:
746 746 self.dirstate.forget([f])
747 747 except:
748 748 # deleted from p2?
749 749 pass
750 750
751 751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
752 752 user = user or self.ui.username()
753 753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
754 754 tr.close()
755 755 if update_dirstate:
756 756 self.dirstate.setparents(n, nullid)
757 757
758 758 def commit(self, files = None, text = "", user = None, date = None):
759 759 commit = []
760 760 remove = []
761 761 if files:
762 762 for f in files:
763 763 s = self.dirstate.state(f)
764 764 if s in 'nmai':
765 765 commit.append(f)
766 766 elif s == 'r':
767 767 remove.append(f)
768 768 else:
769 769 self.ui.warn("%s not tracked!\n" % f)
770 770 else:
771 771 (c, a, d, u) = self.changes()
772 772 commit = c + a
773 773 remove = d
774 774
775 775 if not commit and not remove:
776 776 self.ui.status("nothing changed\n")
777 777 return
778 778
779 779 if not self.hook("precommit"):
780 780 return 1
781 781
782 782 p1, p2 = self.dirstate.parents()
783 783 c1 = self.changelog.read(p1)
784 784 c2 = self.changelog.read(p2)
785 785 m1 = self.manifest.read(c1[0])
786 786 mf1 = self.manifest.readflags(c1[0])
787 787 m2 = self.manifest.read(c2[0])
788 788 lock = self.lock()
789 789 tr = self.transaction()
790 790
791 791 # check in files
792 792 new = {}
793 793 linkrev = self.changelog.count()
794 794 commit.sort()
795 795 for f in commit:
796 796 self.ui.note(f + "\n")
797 797 try:
798 798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
799 799 t = self.wfile(f).read()
800 800 except IOError:
801 801 self.ui.warn("trouble committing %s!\n" % f)
802 802 raise
803 803
804 804 meta = {}
805 805 cp = self.dirstate.copied(f)
806 806 if cp:
807 807 meta["copy"] = cp
808 808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
809 809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
810 810
811 811 r = self.file(f)
812 812 fp1 = m1.get(f, nullid)
813 813 fp2 = m2.get(f, nullid)
814 814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
815 815
816 816 # update manifest
817 817 m1.update(new)
818 818 for f in remove:
819 819 if f in m1:
820 820 del m1[f]
821 821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
822 822 (new, remove))
823 823
824 824 # add changeset
825 825 new = new.keys()
826 826 new.sort()
827 827
828 828 if not text:
829 829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
830 830 edittext += "".join(["HG: changed %s\n" % f for f in new])
831 831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
832 832 edittext = self.ui.edit(edittext)
833 833 if not edittext.rstrip():
834 834 return 1
835 835 text = edittext
836 836
837 837 user = user or self.ui.username()
838 838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
839 839
840 840 tr.close()
841 841
842 842 self.dirstate.setparents(n)
843 843 self.dirstate.update(new, "n")
844 844 self.dirstate.forget(remove)
845 845
846 846 if not self.hook("commit", node=hex(n)):
847 847 return 1
848 848
849 849 def walk(self, node = None, files = [], match = util.always):
850 850 if node:
851 851 for fn in self.manifest.read(self.changelog.read(node)[0]):
852 852 yield 'm', fn
853 853 else:
854 854 for src, fn in self.dirstate.walk(files, match):
855 855 yield src, fn
856 856
857 857 def changes(self, node1 = None, node2 = None, files = [],
858 858 match = util.always):
859 859 mf2, u = None, []
860 860
861 861 def fcmp(fn, mf):
862 862 t1 = self.wfile(fn).read()
863 863 t2 = self.file(fn).revision(mf[fn])
864 864 return cmp(t1, t2)
865 865
866 866 def mfmatches(node):
867 867 mf = dict(self.manifest.read(node))
868 868 for fn in mf.keys():
869 869 if not match(fn):
870 870 del mf[fn]
871 871 return mf
872 872
873 873 # are we comparing the working directory?
874 874 if not node2:
875 875 l, c, a, d, u = self.dirstate.changes(files, match)
876 876
877 877 # are we comparing working dir against its parent?
878 878 if not node1:
879 879 if l:
880 880 # do a full compare of any files that might have changed
881 881 change = self.changelog.read(self.dirstate.parents()[0])
882 882 mf2 = mfmatches(change[0])
883 883 for f in l:
884 884 if fcmp(f, mf2):
885 885 c.append(f)
886 886
887 887 for l in c, a, d, u:
888 888 l.sort()
889 889
890 890 return (c, a, d, u)
891 891
892 892 # are we comparing working dir against non-tip?
893 893 # generate a pseudo-manifest for the working dir
894 894 if not node2:
895 895 if not mf2:
896 896 change = self.changelog.read(self.dirstate.parents()[0])
897 897 mf2 = mfmatches(change[0])
898 898 for f in a + c + l:
899 899 mf2[f] = ""
900 900 for f in d:
901 901 if f in mf2: del mf2[f]
902 902 else:
903 903 change = self.changelog.read(node2)
904 904 mf2 = mfmatches(change[0])
905 905
906 906 # flush lists from dirstate before comparing manifests
907 907 c, a = [], []
908 908
909 909 change = self.changelog.read(node1)
910 910 mf1 = mfmatches(change[0])
911 911
912 912 for fn in mf2:
913 913 if mf1.has_key(fn):
914 914 if mf1[fn] != mf2[fn]:
915 915 if mf2[fn] != "" or fcmp(fn, mf1):
916 916 c.append(fn)
917 917 del mf1[fn]
918 918 else:
919 919 a.append(fn)
920 920
921 921 d = mf1.keys()
922 922
923 923 for l in c, a, d, u:
924 924 l.sort()
925 925
926 926 return (c, a, d, u)
927 927
928 928 def add(self, list):
929 929 for f in list:
930 930 p = self.wjoin(f)
931 931 if not os.path.exists(p):
932 932 self.ui.warn("%s does not exist!\n" % f)
933 933 elif not os.path.isfile(p):
934 934 self.ui.warn("%s not added: only files supported currently\n" % f)
935 935 elif self.dirstate.state(f) in 'an':
936 936 self.ui.warn("%s already tracked!\n" % f)
937 937 else:
938 938 self.dirstate.update([f], "a")
939 939
940 940 def forget(self, list):
941 941 for f in list:
942 942 if self.dirstate.state(f) not in 'ai':
943 943 self.ui.warn("%s not added!\n" % f)
944 944 else:
945 945 self.dirstate.forget([f])
946 946
947 947 def remove(self, list):
948 948 for f in list:
949 949 p = self.wjoin(f)
950 950 if os.path.exists(p):
951 951 self.ui.warn("%s still exists!\n" % f)
952 952 elif self.dirstate.state(f) == 'a':
953 953 self.ui.warn("%s never committed!\n" % f)
954 954 self.dirstate.forget([f])
955 955 elif f not in self.dirstate:
956 956 self.ui.warn("%s not tracked!\n" % f)
957 957 else:
958 958 self.dirstate.update([f], "r")
959 959
960 960 def copy(self, source, dest):
961 961 p = self.wjoin(dest)
962 962 if not os.path.exists(dest):
963 963 self.ui.warn("%s does not exist!\n" % dest)
964 964 elif not os.path.isfile(dest):
965 965 self.ui.warn("copy failed: %s is not a file\n" % dest)
966 966 else:
967 967 if self.dirstate.state(dest) == '?':
968 968 self.dirstate.update([dest], "a")
969 969 self.dirstate.copy(source, dest)
970 970
971 971 def heads(self):
972 972 return self.changelog.heads()
973 973
974 974 def branches(self, nodes):
975 975 if not nodes: nodes = [self.changelog.tip()]
976 976 b = []
977 977 for n in nodes:
978 978 t = n
979 979 while n:
980 980 p = self.changelog.parents(n)
981 981 if p[1] != nullid or p[0] == nullid:
982 982 b.append((t, n, p[0], p[1]))
983 983 break
984 984 n = p[0]
985 985 return b
986 986
987 987 def between(self, pairs):
988 988 r = []
989 989
990 990 for top, bottom in pairs:
991 991 n, l, i = top, [], 0
992 992 f = 1
993 993
994 994 while n != bottom:
995 995 p = self.changelog.parents(n)[0]
996 996 if i == f:
997 997 l.append(n)
998 998 f = f * 2
999 999 n = p
1000 1000 i += 1
1001 1001
1002 1002 r.append(l)
1003 1003
1004 1004 return r
1005 1005
1006 1006 def newer(self, nodes):
1007 1007 m = {}
1008 1008 nl = []
1009 1009 pm = {}
1010 1010 cl = self.changelog
1011 1011 t = l = cl.count()
1012 1012
1013 1013 # find the lowest numbered node
1014 1014 for n in nodes:
1015 1015 l = min(l, cl.rev(n))
1016 1016 m[n] = 1
1017 1017
1018 1018 for i in xrange(l, t):
1019 1019 n = cl.node(i)
1020 1020 if n in m: # explicitly listed
1021 1021 pm[n] = 1
1022 1022 nl.append(n)
1023 1023 continue
1024 1024 for p in cl.parents(n):
1025 1025 if p in pm: # parent listed
1026 1026 pm[n] = 1
1027 1027 nl.append(n)
1028 1028 break
1029 1029
1030 1030 return nl
1031 1031
1032 1032 def findincoming(self, remote, base={}):
1033 1033 m = self.changelog.nodemap
1034 1034 search = []
1035 1035 fetch = []
1036 1036 seen = {}
1037 1037 seenbranch = {}
1038 1038
1039 1039 # assume we're closer to the tip than the root
1040 1040 # and start by examining the heads
1041 1041 self.ui.status("searching for changes\n")
1042 1042 heads = remote.heads()
1043 1043 unknown = []
1044 1044 for h in heads:
1045 1045 if h not in m:
1046 1046 unknown.append(h)
1047 1047 else:
1048 1048 base[h] = 1
1049 1049
1050 1050 if not unknown:
1051 1051 return None
1052 1052
1053 1053 rep = {}
1054 1054 reqcnt = 0
1055 1055
1056 1056 # search through remote branches
1057 1057 # a 'branch' here is a linear segment of history, with four parts:
1058 1058 # head, root, first parent, second parent
1059 1059 # (a branch always has two parents (or none) by definition)
1060 1060 unknown = remote.branches(unknown)
1061 1061 while unknown:
1062 1062 r = []
1063 1063 while unknown:
1064 1064 n = unknown.pop(0)
1065 1065 if n[0] in seen:
1066 1066 continue
1067 1067
1068 1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1069 1069 if n[0] == nullid:
1070 1070 break
1071 1071 if n in seenbranch:
1072 1072 self.ui.debug("branch already found\n")
1073 1073 continue
1074 1074 if n[1] and n[1] in m: # do we know the base?
1075 1075 self.ui.debug("found incomplete branch %s:%s\n"
1076 1076 % (short(n[0]), short(n[1])))
1077 1077 search.append(n) # schedule branch range for scanning
1078 1078 seenbranch[n] = 1
1079 1079 else:
1080 1080 if n[1] not in seen and n[1] not in fetch:
1081 1081 if n[2] in m and n[3] in m:
1082 1082 self.ui.debug("found new changeset %s\n" %
1083 1083 short(n[1]))
1084 1084 fetch.append(n[1]) # earliest unknown
1085 1085 base[n[2]] = 1 # latest known
1086 1086 continue
1087 1087
1088 1088 for a in n[2:4]:
1089 1089 if a not in rep:
1090 1090 r.append(a)
1091 1091 rep[a] = 1
1092 1092
1093 1093 seen[n[0]] = 1
1094 1094
1095 1095 if r:
1096 1096 reqcnt += 1
1097 1097 self.ui.debug("request %d: %s\n" %
1098 1098 (reqcnt, " ".join(map(short, r))))
1099 1099 for p in range(0, len(r), 10):
1100 1100 for b in remote.branches(r[p:p+10]):
1101 1101 self.ui.debug("received %s:%s\n" %
1102 1102 (short(b[0]), short(b[1])))
1103 1103 if b[0] not in m and b[0] not in seen:
1104 1104 unknown.append(b)
1105 1105
1106 1106 # do binary search on the branches we found
1107 1107 while search:
1108 1108 n = search.pop(0)
1109 1109 reqcnt += 1
1110 1110 l = remote.between([(n[0], n[1])])[0]
1111 1111 l.append(n[1])
1112 1112 p = n[0]
1113 1113 f = 1
1114 1114 for i in l:
1115 1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1116 1116 if i in m:
1117 1117 if f <= 2:
1118 1118 self.ui.debug("found new branch changeset %s\n" %
1119 1119 short(p))
1120 1120 fetch.append(p)
1121 1121 base[i] = 1
1122 1122 else:
1123 1123 self.ui.debug("narrowed branch search to %s:%s\n"
1124 1124 % (short(p), short(i)))
1125 1125 search.append((p, i))
1126 1126 break
1127 1127 p, f = i, f * 2
1128 1128
1129 1129 # sanity check our fetch list
1130 1130 for f in fetch:
1131 1131 if f in m:
1132 1132 raise RepoError("already have changeset " + short(f[:4]))
1133 1133
1134 1134 if base.keys() == [nullid]:
1135 1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1136 1136
1137 1137 self.ui.note("adding new changesets starting at " +
1138 1138 " ".join([short(f) for f in fetch]) + "\n")
1139 1139
1140 1140 self.ui.debug("%d total queries\n" % reqcnt)
1141 1141
1142 1142 return fetch
1143 1143
1144 1144 def findoutgoing(self, remote):
1145 1145 base = {}
1146 1146 self.findincoming(remote, base)
1147 1147 remain = dict.fromkeys(self.changelog.nodemap)
1148 1148
1149 1149 # prune everything remote has from the tree
1150 1150 del remain[nullid]
1151 1151 remove = base.keys()
1152 1152 while remove:
1153 1153 n = remove.pop(0)
1154 1154 if n in remain:
1155 1155 del remain[n]
1156 1156 for p in self.changelog.parents(n):
1157 1157 remove.append(p)
1158 1158
1159 1159 # find every node whose parents have been pruned
1160 1160 subset = []
1161 1161 for n in remain:
1162 1162 p1, p2 = self.changelog.parents(n)
1163 1163 if p1 not in remain and p2 not in remain:
1164 1164 subset.append(n)
1165 1165
1166 1166 # this is the set of all roots we have to push
1167 1167 return subset
1168 1168
1169 1169 def pull(self, remote):
1170 1170 lock = self.lock()
1171 1171
1172 1172 # if we have an empty repo, fetch everything
1173 1173 if self.changelog.tip() == nullid:
1174 1174 self.ui.status("requesting all changes\n")
1175 1175 fetch = [nullid]
1176 1176 else:
1177 1177 fetch = self.findincoming(remote)
1178 1178
1179 1179 if not fetch:
1180 1180 self.ui.status("no changes found\n")
1181 1181 return 1
1182 1182
1183 1183 cg = remote.changegroup(fetch)
1184 1184 return self.addchangegroup(cg)
1185 1185
1186 1186 def push(self, remote):
1187 1187 lock = remote.lock()
1188 1188 update = self.findoutgoing(remote)
1189 1189 if not update:
1190 1190 self.ui.status("no changes found\n")
1191 1191 return 1
1192 1192
1193 1193 cg = self.changegroup(update)
1194 1194 return remote.addchangegroup(cg)
1195 1195
1196 1196 def changegroup(self, basenodes):
1197 1197 class genread:
1198 1198 def __init__(self, generator):
1199 1199 self.g = generator
1200 1200 self.buf = ""
1201 1201 def read(self, l):
1202 1202 while l > len(self.buf):
1203 1203 try:
1204 1204 self.buf += self.g.next()
1205 1205 except StopIteration:
1206 1206 break
1207 1207 d, self.buf = self.buf[:l], self.buf[l:]
1208 1208 return d
1209 1209
1210 1210 def gengroup():
1211 1211 nodes = self.newer(basenodes)
1212 1212
1213 1213 # construct the link map
1214 1214 linkmap = {}
1215 1215 for n in nodes:
1216 1216 linkmap[self.changelog.rev(n)] = n
1217 1217
1218 1218 # construct a list of all changed files
1219 1219 changed = {}
1220 1220 for n in nodes:
1221 1221 c = self.changelog.read(n)
1222 1222 for f in c[3]:
1223 1223 changed[f] = 1
1224 1224 changed = changed.keys()
1225 1225 changed.sort()
1226 1226
1227 1227 # the changegroup is changesets + manifests + all file revs
1228 1228 revs = [ self.changelog.rev(n) for n in nodes ]
1229 1229
1230 1230 for y in self.changelog.group(linkmap): yield y
1231 1231 for y in self.manifest.group(linkmap): yield y
1232 1232 for f in changed:
1233 1233 yield struct.pack(">l", len(f) + 4) + f
1234 1234 g = self.file(f).group(linkmap)
1235 1235 for y in g:
1236 1236 yield y
1237 1237
1238 1238 yield struct.pack(">l", 0)
1239 1239
1240 1240 return genread(gengroup())
1241 1241
1242 1242 def addchangegroup(self, source):
1243 1243
1244 1244 def getchunk():
1245 1245 d = source.read(4)
1246 1246 if not d: return ""
1247 1247 l = struct.unpack(">l", d)[0]
1248 1248 if l <= 4: return ""
1249 1249 return source.read(l - 4)
1250 1250
1251 1251 def getgroup():
1252 1252 while 1:
1253 1253 c = getchunk()
1254 1254 if not c: break
1255 1255 yield c
1256 1256
1257 1257 def csmap(x):
1258 1258 self.ui.debug("add changeset %s\n" % short(x))
1259 1259 return self.changelog.count()
1260 1260
1261 1261 def revmap(x):
1262 1262 return self.changelog.rev(x)
1263 1263
1264 1264 if not source: return
1265 1265 changesets = files = revisions = 0
1266 1266
1267 1267 tr = self.transaction()
1268 1268
1269 1269 # pull off the changeset group
1270 1270 self.ui.status("adding changesets\n")
1271 1271 co = self.changelog.tip()
1272 1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1273 1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1274 1274
1275 1275 # pull off the manifest group
1276 1276 self.ui.status("adding manifests\n")
1277 1277 mm = self.manifest.tip()
1278 1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1279 1279
1280 1280 # process the files
1281 1281 self.ui.status("adding file revisions\n")
1282 1282 while 1:
1283 1283 f = getchunk()
1284 1284 if not f: break
1285 1285 self.ui.debug("adding %s revisions\n" % f)
1286 1286 fl = self.file(f)
1287 1287 o = fl.count()
1288 1288 n = fl.addgroup(getgroup(), revmap, tr)
1289 1289 revisions += fl.count() - o
1290 1290 files += 1
1291 1291
1292 1292 self.ui.status(("modified %d files, added %d changesets" +
1293 1293 " and %d new revisions\n")
1294 1294 % (files, changesets, revisions))
1295 1295
1296 1296 tr.close()
1297 1297 return
1298 1298
1299 1299 def update(self, node, allow=False, force=False, choose=None,
1300 1300 moddirstate=True):
1301 1301 pl = self.dirstate.parents()
1302 1302 if not force and pl[1] != nullid:
1303 1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1304 1304 return 1
1305 1305
1306 1306 p1, p2 = pl[0], node
1307 1307 pa = self.changelog.ancestor(p1, p2)
1308 1308 m1n = self.changelog.read(p1)[0]
1309 1309 m2n = self.changelog.read(p2)[0]
1310 1310 man = self.manifest.ancestor(m1n, m2n)
1311 1311 m1 = self.manifest.read(m1n)
1312 1312 mf1 = self.manifest.readflags(m1n)
1313 1313 m2 = self.manifest.read(m2n)
1314 1314 mf2 = self.manifest.readflags(m2n)
1315 1315 ma = self.manifest.read(man)
1316 1316 mfa = self.manifest.readflags(man)
1317 1317
1318 1318 (c, a, d, u) = self.changes()
1319 1319
1320 1320 # is this a jump, or a merge? i.e. is there a linear path
1321 1321 # from p1 to p2?
1322 1322 linear_path = (pa == p1 or pa == p2)
1323 1323
1324 1324 # resolve the manifest to determine which files
1325 1325 # we care about merging
1326 1326 self.ui.note("resolving manifests\n")
1327 1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1328 1328 (force, allow, moddirstate, linear_path))
1329 1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1330 1330 (short(man), short(m1n), short(m2n)))
1331 1331
1332 1332 merge = {}
1333 1333 get = {}
1334 1334 remove = []
1335 1335 mark = {}
1336 1336
1337 1337 # construct a working dir manifest
1338 1338 mw = m1.copy()
1339 1339 mfw = mf1.copy()
1340 1340 umap = dict.fromkeys(u)
1341 1341
1342 1342 for f in a + c + u:
1343 1343 mw[f] = ""
1344 1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1345 1345
1346 1346 for f in d:
1347 1347 if f in mw: del mw[f]
1348 1348
1349 1349 # If we're jumping between revisions (as opposed to merging),
1350 1350 # and if neither the working directory nor the target rev has
1351 1351 # the file, then we need to remove it from the dirstate, to
1352 1352 # prevent the dirstate from listing the file when it is no
1353 1353 # longer in the manifest.
1354 1354 if moddirstate and linear_path and f not in m2:
1355 1355 self.dirstate.forget((f,))
1356 1356
1357 1357 # Compare manifests
1358 1358 for f, n in mw.iteritems():
1359 1359 if choose and not choose(f): continue
1360 1360 if f in m2:
1361 1361 s = 0
1362 1362
1363 1363 # is the wfile new since m1, and match m2?
1364 1364 if f not in m1:
1365 1365 t1 = self.wfile(f).read()
1366 1366 t2 = self.file(f).revision(m2[f])
1367 1367 if cmp(t1, t2) == 0:
1368 1368 mark[f] = 1
1369 1369 n = m2[f]
1370 1370 del t1, t2
1371 1371
1372 1372 # are files different?
1373 1373 if n != m2[f]:
1374 1374 a = ma.get(f, nullid)
1375 1375 # are both different from the ancestor?
1376 1376 if n != a and m2[f] != a:
1377 1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1378 1378 # merge executable bits
1379 1379 # "if we changed or they changed, change in merge"
1380 1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1381 1381 mode = ((a^b) | (a^c)) ^ a
1382 1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1383 1383 s = 1
1384 1384 # are we clobbering?
1385 1385 # is remote's version newer?
1386 1386 # or are we going back in time?
1387 1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1388 1388 self.ui.debug(" remote %s is newer, get\n" % f)
1389 1389 get[f] = m2[f]
1390 1390 s = 1
1391 1391 else:
1392 1392 mark[f] = 1
1393 1393 elif f in umap:
1394 1394 # this unknown file is the same as the checkout
1395 1395 get[f] = m2[f]
1396 1396
1397 1397 if not s and mfw[f] != mf2[f]:
1398 1398 if force:
1399 1399 self.ui.debug(" updating permissions for %s\n" % f)
1400 1400 util.set_exec(self.wjoin(f), mf2[f])
1401 1401 else:
1402 1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1403 1403 mode = ((a^b) | (a^c)) ^ a
1404 1404 if mode != b:
1405 1405 self.ui.debug(" updating permissions for %s\n" % f)
1406 1406 util.set_exec(self.wjoin(f), mode)
1407 1407 mark[f] = 1
1408 1408 del m2[f]
1409 1409 elif f in ma:
1410 1410 if n != ma[f]:
1411 1411 r = "d"
1412 1412 if not force and (linear_path or allow):
1413 1413 r = self.ui.prompt(
1414 1414 (" local changed %s which remote deleted\n" % f) +
1415 1415 "(k)eep or (d)elete?", "[kd]", "k")
1416 1416 if r == "d":
1417 1417 remove.append(f)
1418 1418 else:
1419 1419 self.ui.debug("other deleted %s\n" % f)
1420 1420 remove.append(f) # other deleted it
1421 1421 else:
1422 1422 if n == m1.get(f, nullid): # same as parent
1423 1423 if p2 == pa: # going backwards?
1424 1424 self.ui.debug("remote deleted %s\n" % f)
1425 1425 remove.append(f)
1426 1426 else:
1427 1427 self.ui.debug("local created %s, keeping\n" % f)
1428 1428 else:
1429 1429 self.ui.debug("working dir created %s, keeping\n" % f)
1430 1430
1431 1431 for f, n in m2.iteritems():
1432 1432 if choose and not choose(f): continue
1433 1433 if f[0] == "/": continue
1434 1434 if f in ma and n != ma[f]:
1435 1435 r = "k"
1436 1436 if not force and (linear_path or allow):
1437 1437 r = self.ui.prompt(
1438 1438 ("remote changed %s which local deleted\n" % f) +
1439 1439 "(k)eep or (d)elete?", "[kd]", "k")
1440 1440 if r == "k": get[f] = n
1441 1441 elif f not in ma:
1442 1442 self.ui.debug("remote created %s\n" % f)
1443 1443 get[f] = n
1444 1444 else:
1445 1445 if force or p2 == pa: # going backwards?
1446 1446 self.ui.debug("local deleted %s, recreating\n" % f)
1447 1447 get[f] = n
1448 1448 else:
1449 1449 self.ui.debug("local deleted %s\n" % f)
1450 1450
1451 1451 del mw, m1, m2, ma
1452 1452
1453 1453 if force:
1454 1454 for f in merge:
1455 1455 get[f] = merge[f][1]
1456 1456 merge = {}
1457 1457
1458 1458 if linear_path or force:
1459 1459 # we don't need to do any magic, just jump to the new rev
1460 1460 mode = 'n'
1461 1461 p1, p2 = p2, nullid
1462 1462 else:
1463 1463 if not allow:
1464 1464 self.ui.status("this update spans a branch" +
1465 1465 " affecting the following files:\n")
1466 1466 fl = merge.keys() + get.keys()
1467 1467 fl.sort()
1468 1468 for f in fl:
1469 1469 cf = ""
1470 1470 if f in merge: cf = " (resolve)"
1471 1471 self.ui.status(" %s%s\n" % (f, cf))
1472 1472 self.ui.warn("aborting update spanning branches!\n")
1473 1473 self.ui.status("(use update -m to perform a branch merge)\n")
1474 1474 return 1
1475 1475 # we have to remember what files we needed to get/change
1476 1476 # because any file that's different from either one of its
1477 1477 # parents must be in the changeset
1478 1478 mode = 'm'
1479 1479 if moddirstate:
1480 1480 self.dirstate.update(mark.keys(), "m")
1481 1481
1482 1482 if moddirstate:
1483 1483 self.dirstate.setparents(p1, p2)
1484 1484
1485 1485 # get the files we don't need to change
1486 1486 files = get.keys()
1487 1487 files.sort()
1488 1488 for f in files:
1489 1489 if f[0] == "/": continue
1490 1490 self.ui.note("getting %s\n" % f)
1491 1491 t = self.file(f).read(get[f])
1492 1492 try:
1493 1493 self.wfile(f, "w").write(t)
1494 1494 except IOError:
1495 1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1496 1496 self.wfile(f, "w").write(t)
1497 1497 util.set_exec(self.wjoin(f), mf2[f])
1498 1498 if moddirstate:
1499 1499 self.dirstate.update([f], mode)
1500 1500
1501 1501 # merge the tricky bits
1502 1502 files = merge.keys()
1503 1503 files.sort()
1504 1504 for f in files:
1505 1505 self.ui.status("merging %s\n" % f)
1506 1506 m, o, flag = merge[f]
1507 1507 self.merge3(f, m, o)
1508 1508 util.set_exec(self.wjoin(f), flag)
1509 1509 if moddirstate:
1510 1510 self.dirstate.update([f], 'm')
1511 1511
1512 1512 remove.sort()
1513 1513 for f in remove:
1514 1514 self.ui.note("removing %s\n" % f)
1515 1515 try:
1516 1516 os.unlink(f)
1517 1517 except OSError, inst:
1518 1518 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1519 1519 # try removing directories that might now be empty
1520 1520 try: os.removedirs(os.path.dirname(f))
1521 1521 except: pass
1522 1522 if moddirstate:
1523 1523 if mode == 'n':
1524 1524 self.dirstate.forget(remove)
1525 1525 else:
1526 1526 self.dirstate.update(remove, 'r')
1527 1527
1528 1528 def merge3(self, fn, my, other):
1529 1529 """perform a 3-way merge in the working directory"""
1530 1530
1531 1531 def temp(prefix, node):
1532 1532 pre = "%s~%s." % (os.path.basename(fn), prefix)
1533 1533 (fd, name) = tempfile.mkstemp("", pre)
1534 1534 f = os.fdopen(fd, "wb")
1535 1535 f.write(fl.revision(node))
1536 1536 f.close()
1537 1537 return name
1538 1538
1539 1539 fl = self.file(fn)
1540 1540 base = fl.ancestor(my, other)
1541 1541 a = self.wjoin(fn)
1542 1542 b = temp("base", base)
1543 1543 c = temp("other", other)
1544 1544
1545 1545 self.ui.note("resolving %s\n" % fn)
1546 1546 self.ui.debug("file %s: other %s ancestor %s\n" %
1547 1547 (fn, short(other), short(base)))
1548 1548
1549 1549 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1550 1550 or "hgmerge")
1551 1551 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1552 1552 if r:
1553 1553 self.ui.warn("merging %s failed!\n" % fn)
1554 1554
1555 1555 os.unlink(b)
1556 1556 os.unlink(c)
1557 1557
1558 1558 def verify(self):
1559 1559 filelinkrevs = {}
1560 1560 filenodes = {}
1561 1561 changesets = revisions = files = 0
1562 1562 errors = 0
1563 1563
1564 1564 seen = {}
1565 1565 self.ui.status("checking changesets\n")
1566 1566 for i in range(self.changelog.count()):
1567 1567 changesets += 1
1568 1568 n = self.changelog.node(i)
1569 1569 if n in seen:
1570 1570 self.ui.warn("duplicate changeset at revision %d\n" % i)
1571 1571 errors += 1
1572 1572 seen[n] = 1
1573 1573
1574 1574 for p in self.changelog.parents(n):
1575 1575 if p not in self.changelog.nodemap:
1576 1576 self.ui.warn("changeset %s has unknown parent %s\n" %
1577 1577 (short(n), short(p)))
1578 1578 errors += 1
1579 1579 try:
1580 1580 changes = self.changelog.read(n)
1581 1581 except Exception, inst:
1582 1582 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1583 1583 errors += 1
1584 1584
1585 1585 for f in changes[3]:
1586 1586 filelinkrevs.setdefault(f, []).append(i)
1587 1587
1588 1588 seen = {}
1589 1589 self.ui.status("checking manifests\n")
1590 1590 for i in range(self.manifest.count()):
1591 1591 n = self.manifest.node(i)
1592 1592 if n in seen:
1593 1593 self.ui.warn("duplicate manifest at revision %d\n" % i)
1594 1594 errors += 1
1595 1595 seen[n] = 1
1596 1596
1597 1597 for p in self.manifest.parents(n):
1598 1598 if p not in self.manifest.nodemap:
1599 1599 self.ui.warn("manifest %s has unknown parent %s\n" %
1600 1600 (short(n), short(p)))
1601 1601 errors += 1
1602 1602
1603 1603 try:
1604 1604 delta = mdiff.patchtext(self.manifest.delta(n))
1605 1605 except KeyboardInterrupt:
1606 1606 self.ui.warn("aborted")
1607 1607 sys.exit(0)
1608 1608 except Exception, inst:
1609 1609 self.ui.warn("unpacking manifest %s: %s\n"
1610 1610 % (short(n), inst))
1611 1611 errors += 1
1612 1612
1613 1613 ff = [ l.split('\0') for l in delta.splitlines() ]
1614 1614 for f, fn in ff:
1615 1615 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1616 1616
1617 1617 self.ui.status("crosschecking files in changesets and manifests\n")
1618 1618 for f in filenodes:
1619 1619 if f not in filelinkrevs:
1620 1620 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1621 1621 errors += 1
1622 1622
1623 1623 for f in filelinkrevs:
1624 1624 if f not in filenodes:
1625 1625 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1626 1626 errors += 1
1627 1627
1628 1628 self.ui.status("checking files\n")
1629 1629 ff = filenodes.keys()
1630 1630 ff.sort()
1631 1631 for f in ff:
1632 1632 if f == "/dev/null": continue
1633 1633 files += 1
1634 1634 fl = self.file(f)
1635 1635 nodes = { nullid: 1 }
1636 1636 seen = {}
1637 1637 for i in range(fl.count()):
1638 1638 revisions += 1
1639 1639 n = fl.node(i)
1640 1640
1641 1641 if n in seen:
1642 1642 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1643 1643 errors += 1
1644 1644
1645 1645 if n not in filenodes[f]:
1646 1646 self.ui.warn("%s: %d:%s not in manifests\n"
1647 1647 % (f, i, short(n)))
1648 1648 errors += 1
1649 1649 else:
1650 1650 del filenodes[f][n]
1651 1651
1652 1652 flr = fl.linkrev(n)
1653 1653 if flr not in filelinkrevs[f]:
1654 1654 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1655 1655 % (f, short(n), fl.linkrev(n)))
1656 1656 errors += 1
1657 1657 else:
1658 1658 filelinkrevs[f].remove(flr)
1659 1659
1660 1660 # verify contents
1661 1661 try:
1662 1662 t = fl.read(n)
1663 1663 except Exception, inst:
1664 1664 self.ui.warn("unpacking file %s %s: %s\n"
1665 1665 % (f, short(n), inst))
1666 1666 errors += 1
1667 1667
1668 1668 # verify parents
1669 1669 (p1, p2) = fl.parents(n)
1670 1670 if p1 not in nodes:
1671 1671 self.ui.warn("file %s:%s unknown parent 1 %s" %
1672 1672 (f, short(n), short(p1)))
1673 1673 errors += 1
1674 1674 if p2 not in nodes:
1675 1675 self.ui.warn("file %s:%s unknown parent 2 %s" %
1676 1676 (f, short(n), short(p1)))
1677 1677 errors += 1
1678 1678 nodes[n] = 1
1679 1679
1680 1680 # cross-check
1681 1681 for node in filenodes[f]:
1682 1682 self.ui.warn("node %s in manifests not in %s\n"
1683 1683 % (hex(node), f))
1684 1684 errors += 1
1685 1685
1686 1686 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1687 1687 (files, changesets, revisions))
1688 1688
1689 1689 if errors:
1690 1690 self.ui.warn("%d integrity errors encountered!\n" % errors)
1691 1691 return 1
1692 1692
1693 1693 class httprepository:
1694 1694 def __init__(self, ui, path):
1695 1695 self.url = path
1696 1696 self.ui = ui
1697 1697 no_list = [ "localhost", "127.0.0.1" ]
1698 1698 host = ui.config("http_proxy", "host")
1699 1699 if host is None:
1700 1700 host = os.environ.get("http_proxy")
1701 1701 if host and host.startswith('http://'):
1702 1702 host = host[7:]
1703 1703 user = ui.config("http_proxy", "user")
1704 1704 passwd = ui.config("http_proxy", "passwd")
1705 1705 no = ui.config("http_proxy", "no")
1706 1706 if no is None:
1707 1707 no = os.environ.get("no_proxy")
1708 1708 if no:
1709 1709 no_list = no_list + no.split(",")
1710 1710
1711 1711 no_proxy = 0
1712 1712 for h in no_list:
1713 1713 if (path.startswith("http://" + h + "/") or
1714 1714 path.startswith("http://" + h + ":") or
1715 1715 path == "http://" + h):
1716 1716 no_proxy = 1
1717 1717
1718 1718 # Note: urllib2 takes proxy values from the environment and those will
1719 1719 # take precedence
1720 1720 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1721 1721 if os.environ.has_key(env):
1722 1722 del os.environ[env]
1723 1723
1724 1724 proxy_handler = urllib2.BaseHandler()
1725 1725 if host and not no_proxy:
1726 1726 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1727 1727
1728 1728 authinfo = None
1729 1729 if user and passwd:
1730 1730 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1731 1731 passmgr.add_password(None, host, user, passwd)
1732 1732 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1733 1733
1734 1734 opener = urllib2.build_opener(proxy_handler, authinfo)
1735 1735 urllib2.install_opener(opener)
1736 1736
1737 1737 def dev(self):
1738 1738 return -1
1739 1739
1740 1740 def do_cmd(self, cmd, **args):
1741 1741 self.ui.debug("sending %s command\n" % cmd)
1742 1742 q = {"cmd": cmd}
1743 1743 q.update(args)
1744 1744 qs = urllib.urlencode(q)
1745 1745 cu = "%s?%s" % (self.url, qs)
1746 1746 return urllib2.urlopen(cu)
1747 1747
1748 1748 def heads(self):
1749 1749 d = self.do_cmd("heads").read()
1750 1750 try:
1751 1751 return map(bin, d[:-1].split(" "))
1752 1752 except:
1753 1753 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1754 1754 raise
1755 1755
1756 def verify_hg_repo(self, resp):
1757 if (resp.headers['content-type'] == 'application/hg-0.1'):
1758 pass
1759 else:
1760 msg = """'%s' does not appear to be a valid hg repository -
1761 missing a 'Content-type: application/hg-0.1' HTTP header""" % (self.url,)
1762 raise RepoError(msg)
1763
1756 1764 def branches(self, nodes):
1757 1765 n = " ".join(map(hex, nodes))
1758 d = self.do_cmd("branches", nodes=n).read()
1766 resp = self.do_cmd("branches", nodes=n);
1767 self.verify_hg_repo(resp);
1759 1768 try:
1769 d = resp.read()
1760 1770 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1761 1771 return br
1762 1772 except:
1763 1773 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1764 1774 raise
1765 1775
1766 1776 def between(self, pairs):
1767 1777 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1768 d = self.do_cmd("between", pairs=n).read()
1778 resp = self.do_cmd("between", pairs=n)
1779 self.verify_hg_repo(resp)
1769 1780 try:
1781 d = resp.read()
1770 1782 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1771 1783 return p
1772 1784 except:
1773 1785 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1774 1786 raise
1775 1787
1776 1788 def changegroup(self, nodes):
1777 1789 n = " ".join(map(hex, nodes))
1778 f = self.do_cmd("changegroup", roots=n)
1790 resp = self.do_cmd("changegroup", roots=n)
1791 self.verify_hg_repo(resp)
1779 1792 bytes = 0
1780 1793
1781 1794 class zread:
1782 1795 def __init__(self, f):
1783 1796 self.zd = zlib.decompressobj()
1784 1797 self.f = f
1785 1798 self.buf = ""
1786 1799 def read(self, l):
1787 1800 while l > len(self.buf):
1788 r = f.read(4096)
1801 r = self.f.read(4096)
1789 1802 if r:
1790 1803 self.buf += self.zd.decompress(r)
1791 1804 else:
1792 1805 self.buf += self.zd.flush()
1793 1806 break
1794 1807 d, self.buf = self.buf[:l], self.buf[l:]
1795 1808 return d
1796 1809
1797 return zread(f)
1810 return zread(resp)
1798 1811
1799 1812 class remotelock:
1800 1813 def __init__(self, repo):
1801 1814 self.repo = repo
1802 1815 def release(self):
1803 1816 self.repo.unlock()
1804 1817 self.repo = None
1805 1818 def __del__(self):
1806 1819 if self.repo:
1807 1820 self.release()
1808 1821
1809 1822 class sshrepository:
1810 1823 def __init__(self, ui, path):
1811 1824 self.url = path
1812 1825 self.ui = ui
1813 1826
1814 1827 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1815 1828 if not m:
1816 1829 raise RepoError("couldn't parse destination %s\n" % path)
1817 1830
1818 1831 self.user = m.group(2)
1819 1832 self.host = m.group(3)
1820 1833 self.port = m.group(5)
1821 1834 self.path = m.group(7)
1822 1835
1823 1836 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1824 1837 args = self.port and ("%s -p %s") % (args, self.port) or args
1825 1838 path = self.path or ""
1826 1839
1827 1840 cmd = "ssh %s 'hg -R %s serve --stdio'"
1828 1841 cmd = cmd % (args, path)
1829 1842
1830 1843 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1831 1844
1832 1845 def readerr(self):
1833 1846 while 1:
1834 1847 r,w,x = select.select([self.pipee], [], [], 0)
1835 1848 if not r: break
1836 1849 l = self.pipee.readline()
1837 1850 if not l: break
1838 1851 self.ui.status("remote: ", l)
1839 1852
1840 1853 def __del__(self):
1841 1854 self.pipeo.close()
1842 1855 self.pipei.close()
1843 1856 for l in self.pipee:
1844 1857 self.ui.status("remote: ", l)
1845 1858 self.pipee.close()
1846 1859
1847 1860 def dev(self):
1848 1861 return -1
1849 1862
1850 1863 def do_cmd(self, cmd, **args):
1851 1864 self.ui.debug("sending %s command\n" % cmd)
1852 1865 self.pipeo.write("%s\n" % cmd)
1853 1866 for k, v in args.items():
1854 1867 self.pipeo.write("%s %d\n" % (k, len(v)))
1855 1868 self.pipeo.write(v)
1856 1869 self.pipeo.flush()
1857 1870
1858 1871 return self.pipei
1859 1872
1860 1873 def call(self, cmd, **args):
1861 1874 r = self.do_cmd(cmd, **args)
1862 1875 l = r.readline()
1863 1876 self.readerr()
1864 1877 try:
1865 1878 l = int(l)
1866 1879 except:
1867 1880 raise RepoError("unexpected response '%s'" % l)
1868 1881 return r.read(l)
1869 1882
1870 1883 def lock(self):
1871 1884 self.call("lock")
1872 1885 return remotelock(self)
1873 1886
1874 1887 def unlock(self):
1875 1888 self.call("unlock")
1876 1889
1877 1890 def heads(self):
1878 1891 d = self.call("heads")
1879 1892 try:
1880 1893 return map(bin, d[:-1].split(" "))
1881 1894 except:
1882 1895 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1883 1896
1884 1897 def branches(self, nodes):
1885 1898 n = " ".join(map(hex, nodes))
1886 1899 d = self.call("branches", nodes=n)
1887 1900 try:
1888 1901 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1889 1902 return br
1890 1903 except:
1891 1904 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1892 1905
1893 1906 def between(self, pairs):
1894 1907 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1895 1908 d = self.call("between", pairs=n)
1896 1909 try:
1897 1910 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1898 1911 return p
1899 1912 except:
1900 1913 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1901 1914
1902 1915 def changegroup(self, nodes):
1903 1916 n = " ".join(map(hex, nodes))
1904 1917 f = self.do_cmd("changegroup", roots=n)
1905 1918 return self.pipei
1906 1919
1907 1920 def addchangegroup(self, cg):
1908 1921 d = self.call("addchangegroup")
1909 1922 if d:
1910 1923 raise RepoError("push refused: %s", d)
1911 1924
1912 1925 while 1:
1913 1926 d = cg.read(4096)
1914 1927 if not d: break
1915 1928 self.pipeo.write(d)
1916 1929 self.readerr()
1917 1930
1918 1931 self.pipeo.flush()
1919 1932
1920 1933 self.readerr()
1921 1934 l = int(self.pipei.readline())
1922 1935 return self.pipei.read(l) != ""
1923 1936
1924 1937 def repository(ui, path=None, create=0):
1925 1938 if path:
1926 1939 if path.startswith("http://"):
1927 1940 return httprepository(ui, path)
1928 1941 if path.startswith("hg://"):
1929 1942 return httprepository(ui, path.replace("hg://", "http://"))
1930 1943 if path.startswith("old-http://"):
1931 1944 return localrepository(ui, path.replace("old-http://", "http://"))
1932 1945 if path.startswith("ssh://"):
1933 1946 return sshrepository(ui, path)
1934 1947
1935 1948 return localrepository(ui, path, create)
@@ -1,783 +1,783 b''
1 1 # hgweb.py - web interface to a mercurial repository
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, cgi, time, re, difflib, sys, zlib
10 10 from mercurial.hg import *
11 11 from mercurial.ui import *
12 12
13 13 def templatepath():
14 14 for f in "templates", "../templates":
15 15 p = os.path.join(os.path.dirname(__file__), f)
16 16 if os.path.isdir(p): return p
17 17
18 18 def age(t):
19 19 def plural(t, c):
20 20 if c == 1: return t
21 21 return t + "s"
22 22 def fmt(t, c):
23 23 return "%d %s" % (c, plural(t, c))
24 24
25 25 now = time.time()
26 26 delta = max(1, int(now - t))
27 27
28 28 scales = [["second", 1],
29 29 ["minute", 60],
30 30 ["hour", 3600],
31 31 ["day", 3600 * 24],
32 32 ["week", 3600 * 24 * 7],
33 33 ["month", 3600 * 24 * 30],
34 34 ["year", 3600 * 24 * 365]]
35 35
36 36 scales.reverse()
37 37
38 38 for t, s in scales:
39 39 n = delta / s
40 40 if n >= 2 or s == 1: return fmt(t, n)
41 41
42 42 def nl2br(text):
43 43 return text.replace('\n', '<br/>\n')
44 44
45 45 def obfuscate(text):
46 46 return ''.join([ '&#%d;' % ord(c) for c in text ])
47 47
48 48 def up(p):
49 49 if p[0] != "/": p = "/" + p
50 50 if p[-1] == "/": p = p[:-1]
51 51 up = os.path.dirname(p)
52 52 if up == "/":
53 53 return "/"
54 54 return up + "/"
55 55
56 56 def httphdr(type):
57 57 sys.stdout.write('Content-type: %s\n\n' % type)
58 58
59 59 def write(*things):
60 60 for thing in things:
61 61 if hasattr(thing, "__iter__"):
62 62 for part in thing:
63 63 write(part)
64 64 else:
65 65 sys.stdout.write(str(thing))
66 66
67 67 def template(tmpl, filters = {}, **map):
68 68 while tmpl:
69 69 m = re.search(r"#([a-zA-Z0-9]+)((\|[a-zA-Z0-9]+)*)#", tmpl)
70 70 if m:
71 71 yield tmpl[:m.start(0)]
72 72 v = map.get(m.group(1), "")
73 73 v = callable(v) and v() or v
74 74
75 75 fl = m.group(2)
76 76 if fl:
77 77 for f in fl.split("|")[1:]:
78 78 v = filters[f](v)
79 79
80 80 yield v
81 81 tmpl = tmpl[m.end(0):]
82 82 else:
83 83 yield tmpl
84 84 return
85 85
86 86 class templater:
87 87 def __init__(self, mapfile, filters = {}, defaults = {}):
88 88 self.cache = {}
89 89 self.map = {}
90 90 self.base = os.path.dirname(mapfile)
91 91 self.filters = filters
92 92 self.defaults = defaults
93 93
94 94 for l in file(mapfile):
95 95 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
96 96 if m:
97 97 self.cache[m.group(1)] = m.group(2)
98 98 else:
99 99 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
100 100 if m:
101 101 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
102 102 else:
103 103 raise "unknown map entry '%s'" % l
104 104
105 105 def __call__(self, t, **map):
106 106 m = self.defaults.copy()
107 107 m.update(map)
108 108 try:
109 109 tmpl = self.cache[t]
110 110 except KeyError:
111 111 tmpl = self.cache[t] = file(self.map[t]).read()
112 112 return template(tmpl, self.filters, **m)
113 113
114 114 def rfc822date(x):
115 115 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
116 116
117 117 class hgweb:
118 118 maxchanges = 10
119 119 maxfiles = 10
120 120
121 121 def __init__(self, path, name, templates = ""):
122 122 self.templates = templates or templatepath()
123 123 self.reponame = name
124 124 self.path = path
125 125 self.mtime = -1
126 126 self.viewonly = 0
127 127
128 128 self.filters = {
129 129 "escape": cgi.escape,
130 130 "age": age,
131 131 "date": (lambda x: time.asctime(time.gmtime(x))),
132 132 "addbreaks": nl2br,
133 133 "obfuscate": obfuscate,
134 134 "short": (lambda x: x[:12]),
135 135 "firstline": (lambda x: x.splitlines(1)[0]),
136 136 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
137 137 "rfc822date": rfc822date,
138 138 }
139 139
140 140 def refresh(self):
141 141 s = os.stat(os.path.join(self.path, ".hg", "00changelog.i"))
142 142 if s.st_mtime != self.mtime:
143 143 self.mtime = s.st_mtime
144 144 self.repo = repository(ui(), self.path)
145 145
146 146 def date(self, cs):
147 147 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
148 148
149 149 def listfiles(self, files, mf):
150 150 for f in files[:self.maxfiles]:
151 151 yield self.t("filenodelink", node = hex(mf[f]), file = f)
152 152 if len(files) > self.maxfiles:
153 153 yield self.t("fileellipses")
154 154
155 155 def listfilediffs(self, files, changeset):
156 156 for f in files[:self.maxfiles]:
157 157 yield self.t("filedifflink", node = hex(changeset), file = f)
158 158 if len(files) > self.maxfiles:
159 159 yield self.t("fileellipses")
160 160
161 161 def parents(self, t1, nodes=[], rev=None,**args):
162 162 if not rev: rev = lambda x: ""
163 163 for node in nodes:
164 164 if node != nullid:
165 165 yield self.t(t1, node = hex(node), rev = rev(node), **args)
166 166
167 167 def showtag(self, t1, node=nullid, **args):
168 168 for t in self.repo.nodetags(node):
169 169 yield self.t(t1, tag = t, **args)
170 170
171 171 def diff(self, node1, node2, files):
172 172 def filterfiles(list, files):
173 173 l = [ x for x in list if x in files ]
174 174
175 175 for f in files:
176 176 if f[-1] != os.sep: f += os.sep
177 177 l += [ x for x in list if x.startswith(f) ]
178 178 return l
179 179
180 180 parity = [0]
181 181 def diffblock(diff, f, fn):
182 182 yield self.t("diffblock",
183 183 lines = prettyprintlines(diff),
184 184 parity = parity[0],
185 185 file = f,
186 186 filenode = hex(fn or nullid))
187 187 parity[0] = 1 - parity[0]
188 188
189 189 def prettyprintlines(diff):
190 190 for l in diff.splitlines(1):
191 191 if l.startswith('+'):
192 192 yield self.t("difflineplus", line = l)
193 193 elif l.startswith('-'):
194 194 yield self.t("difflineminus", line = l)
195 195 elif l.startswith('@'):
196 196 yield self.t("difflineat", line = l)
197 197 else:
198 198 yield self.t("diffline", line = l)
199 199
200 200 r = self.repo
201 201 cl = r.changelog
202 202 mf = r.manifest
203 203 change1 = cl.read(node1)
204 204 change2 = cl.read(node2)
205 205 mmap1 = mf.read(change1[0])
206 206 mmap2 = mf.read(change2[0])
207 207 date1 = self.date(change1)
208 208 date2 = self.date(change2)
209 209
210 210 c, a, d, u = r.changes(node1, node2)
211 211 if files:
212 212 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
213 213
214 214 for f in c:
215 215 to = r.file(f).read(mmap1[f])
216 216 tn = r.file(f).read(mmap2[f])
217 217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
218 218 for f in a:
219 219 to = None
220 220 tn = r.file(f).read(mmap2[f])
221 221 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
222 222 for f in d:
223 223 to = r.file(f).read(mmap1[f])
224 224 tn = None
225 225 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
226 226
227 227 def header(self):
228 228 yield self.t("header")
229 229
230 230 def footer(self):
231 231 yield self.t("footer")
232 232
233 233 def changelog(self, pos):
234 234 def changenav():
235 235 def seq(factor = 1):
236 236 yield 1 * factor
237 237 yield 3 * factor
238 238 #yield 5 * factor
239 239 for f in seq(factor * 10):
240 240 yield f
241 241
242 242 l = []
243 243 for f in seq():
244 244 if f < self.maxchanges / 2: continue
245 245 if f > count: break
246 246 r = "%d" % f
247 247 if pos + f < count: l.append(("+" + r, pos + f))
248 248 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
249 249
250 250 yield self.t("naventry", rev = 0, label="(0)")
251 251
252 252 for label, rev in l:
253 253 yield self.t("naventry", label = label, rev = rev)
254 254
255 255 yield self.t("naventry", label="tip")
256 256
257 257 def changelist():
258 258 parity = (start - end) & 1
259 259 cl = self.repo.changelog
260 260 l = [] # build a list in forward order for efficiency
261 261 for i in range(start, end):
262 262 n = cl.node(i)
263 263 changes = cl.read(n)
264 264 hn = hex(n)
265 265 t = float(changes[2].split(' ')[0])
266 266
267 267 l.insert(0, self.t(
268 268 'changelogentry',
269 269 parity = parity,
270 270 author = changes[1],
271 271 parent = self.parents("changelogparent",
272 272 cl.parents(n), cl.rev),
273 273 changelogtag = self.showtag("changelogtag",n),
274 274 manifest = hex(changes[0]),
275 275 desc = changes[4],
276 276 date = t,
277 277 files = self.listfilediffs(changes[3], n),
278 278 rev = i,
279 279 node = hn))
280 280 parity = 1 - parity
281 281
282 282 yield l
283 283
284 284 cl = self.repo.changelog
285 285 mf = cl.read(cl.tip())[0]
286 286 count = cl.count()
287 287 start = max(0, pos - self.maxchanges + 1)
288 288 end = min(count, start + self.maxchanges)
289 289 pos = end - 1
290 290
291 291 yield self.t('changelog',
292 292 changenav = changenav,
293 293 manifest = hex(mf),
294 294 rev = pos, changesets = count, entries = changelist)
295 295
296 296 def search(self, query):
297 297
298 298 def changelist():
299 299 cl = self.repo.changelog
300 300 count = 0
301 301 qw = query.lower().split()
302 302
303 303 def revgen():
304 304 for i in range(cl.count() - 1, 0, -100):
305 305 l = []
306 306 for j in range(max(0, i - 100), i):
307 307 n = cl.node(j)
308 308 changes = cl.read(n)
309 309 l.insert(0, (n, j, changes))
310 310 for e in l:
311 311 yield e
312 312
313 313 for n, i, changes in revgen():
314 314 miss = 0
315 315 for q in qw:
316 316 if not (q in changes[1].lower() or
317 317 q in changes[4].lower() or
318 318 q in " ".join(changes[3][:20]).lower()):
319 319 miss = 1
320 320 break
321 321 if miss: continue
322 322
323 323 count += 1
324 324 hn = hex(n)
325 325 t = float(changes[2].split(' ')[0])
326 326
327 327 yield self.t(
328 328 'searchentry',
329 329 parity = count & 1,
330 330 author = changes[1],
331 331 parent = self.parents("changelogparent",
332 332 cl.parents(n), cl.rev),
333 333 changelogtag = self.showtag("changelogtag",n),
334 334 manifest = hex(changes[0]),
335 335 desc = changes[4],
336 336 date = t,
337 337 files = self.listfilediffs(changes[3], n),
338 338 rev = i,
339 339 node = hn)
340 340
341 341 if count >= self.maxchanges: break
342 342
343 343 cl = self.repo.changelog
344 344 mf = cl.read(cl.tip())[0]
345 345
346 346 yield self.t('search',
347 347 query = query,
348 348 manifest = hex(mf),
349 349 entries = changelist)
350 350
351 351 def changeset(self, nodeid):
352 352 n = bin(nodeid)
353 353 cl = self.repo.changelog
354 354 changes = cl.read(n)
355 355 p1 = cl.parents(n)[0]
356 356 t = float(changes[2].split(' ')[0])
357 357
358 358 files = []
359 359 mf = self.repo.manifest.read(changes[0])
360 360 for f in changes[3]:
361 361 files.append(self.t("filenodelink",
362 362 filenode = hex(mf.get(f, nullid)), file = f))
363 363
364 364 def diff():
365 365 yield self.diff(p1, n, None)
366 366
367 367 yield self.t('changeset',
368 368 diff = diff,
369 369 rev = cl.rev(n),
370 370 node = nodeid,
371 371 parent = self.parents("changesetparent",
372 372 cl.parents(n), cl.rev),
373 373 changesettag = self.showtag("changesettag",n),
374 374 manifest = hex(changes[0]),
375 375 author = changes[1],
376 376 desc = changes[4],
377 377 date = t,
378 378 files = files)
379 379
380 380 def filelog(self, f, filenode):
381 381 cl = self.repo.changelog
382 382 fl = self.repo.file(f)
383 383 count = fl.count()
384 384
385 385 def entries():
386 386 l = []
387 387 parity = (count - 1) & 1
388 388
389 389 for i in range(count):
390 390
391 391 n = fl.node(i)
392 392 lr = fl.linkrev(n)
393 393 cn = cl.node(lr)
394 394 cs = cl.read(cl.node(lr))
395 395 t = float(cs[2].split(' ')[0])
396 396
397 397 l.insert(0, self.t("filelogentry",
398 398 parity = parity,
399 399 filenode = hex(n),
400 400 filerev = i,
401 401 file = f,
402 402 node = hex(cn),
403 403 author = cs[1],
404 404 date = t,
405 405 parent = self.parents("filelogparent",
406 406 fl.parents(n), fl.rev, file=f),
407 407 desc = cs[4]))
408 408 parity = 1 - parity
409 409
410 410 yield l
411 411
412 412 yield self.t("filelog",
413 413 file = f,
414 414 filenode = filenode,
415 415 entries = entries)
416 416
417 417 def filerevision(self, f, node):
418 418 fl = self.repo.file(f)
419 419 n = bin(node)
420 420 text = fl.read(n)
421 421 changerev = fl.linkrev(n)
422 422 cl = self.repo.changelog
423 423 cn = cl.node(changerev)
424 424 cs = cl.read(cn)
425 425 t = float(cs[2].split(' ')[0])
426 426 mfn = cs[0]
427 427
428 428 def lines():
429 429 for l, t in enumerate(text.splitlines(1)):
430 430 yield self.t("fileline", line = t,
431 431 linenumber = "% 6d" % (l + 1),
432 432 parity = l & 1)
433 433
434 434 yield self.t("filerevision", file = f,
435 435 filenode = node,
436 436 path = up(f),
437 437 text = lines(),
438 438 rev = changerev,
439 439 node = hex(cn),
440 440 manifest = hex(mfn),
441 441 author = cs[1],
442 442 date = t,
443 443 parent = self.parents("filerevparent",
444 444 fl.parents(n), fl.rev, file=f),
445 445 permissions = self.repo.manifest.readflags(mfn)[f])
446 446
447 447 def fileannotate(self, f, node):
448 448 bcache = {}
449 449 ncache = {}
450 450 fl = self.repo.file(f)
451 451 n = bin(node)
452 452 changerev = fl.linkrev(n)
453 453
454 454 cl = self.repo.changelog
455 455 cn = cl.node(changerev)
456 456 cs = cl.read(cn)
457 457 t = float(cs[2].split(' ')[0])
458 458 mfn = cs[0]
459 459
460 460 def annotate():
461 461 parity = 1
462 462 last = None
463 463 for r, l in fl.annotate(n):
464 464 try:
465 465 cnode = ncache[r]
466 466 except KeyError:
467 467 cnode = ncache[r] = self.repo.changelog.node(r)
468 468
469 469 try:
470 470 name = bcache[r]
471 471 except KeyError:
472 472 cl = self.repo.changelog.read(cnode)
473 473 name = cl[1]
474 474 f = name.find('@')
475 475 if f >= 0:
476 476 name = name[:f]
477 477 f = name.find('<')
478 478 if f >= 0:
479 479 name = name[f+1:]
480 480 bcache[r] = name
481 481
482 482 if last != cnode:
483 483 parity = 1 - parity
484 484 last = cnode
485 485
486 486 yield self.t("annotateline",
487 487 parity = parity,
488 488 node = hex(cnode),
489 489 rev = r,
490 490 author = name,
491 491 file = f,
492 492 line = l)
493 493
494 494 yield self.t("fileannotate",
495 495 file = f,
496 496 filenode = node,
497 497 annotate = annotate,
498 498 path = up(f),
499 499 rev = changerev,
500 500 node = hex(cn),
501 501 manifest = hex(mfn),
502 502 author = cs[1],
503 503 date = t,
504 504 parent = self.parents("fileannotateparent",
505 505 fl.parents(n), fl.rev, file=f),
506 506 permissions = self.repo.manifest.readflags(mfn)[f])
507 507
508 508 def manifest(self, mnode, path):
509 509 mf = self.repo.manifest.read(bin(mnode))
510 510 rev = self.repo.manifest.rev(bin(mnode))
511 511 node = self.repo.changelog.node(rev)
512 512 mff=self.repo.manifest.readflags(bin(mnode))
513 513
514 514 files = {}
515 515
516 516 p = path[1:]
517 517 l = len(p)
518 518
519 519 for f,n in mf.items():
520 520 if f[:l] != p:
521 521 continue
522 522 remain = f[l:]
523 523 if "/" in remain:
524 524 short = remain[:remain.find("/") + 1] # bleah
525 525 files[short] = (f, None)
526 526 else:
527 527 short = os.path.basename(remain)
528 528 files[short] = (f, n)
529 529
530 530 def filelist():
531 531 parity = 0
532 532 fl = files.keys()
533 533 fl.sort()
534 534 for f in fl:
535 535 full, fnode = files[f]
536 536 if fnode:
537 537 yield self.t("manifestfileentry",
538 538 file = full,
539 539 manifest = mnode,
540 540 filenode = hex(fnode),
541 541 parity = parity,
542 542 basename = f,
543 543 permissions = mff[full])
544 544 else:
545 545 yield self.t("manifestdirentry",
546 546 parity = parity,
547 547 path = os.path.join(path, f),
548 548 manifest = mnode, basename = f[:-1])
549 549 parity = 1 - parity
550 550
551 551 yield self.t("manifest",
552 552 manifest = mnode,
553 553 rev = rev,
554 554 node = hex(node),
555 555 path = path,
556 556 up = up(path),
557 557 entries = filelist)
558 558
559 559 def tags(self):
560 560 cl = self.repo.changelog
561 561 mf = cl.read(cl.tip())[0]
562 562
563 563 i = self.repo.tagslist()
564 564 i.reverse()
565 565
566 566 def entries():
567 567 parity = 0
568 568 for k,n in i:
569 569 yield self.t("tagentry",
570 570 parity = parity,
571 571 tag = k,
572 572 node = hex(n))
573 573 parity = 1 - parity
574 574
575 575 yield self.t("tags",
576 576 manifest = hex(mf),
577 577 entries = entries)
578 578
579 579 def filediff(self, file, changeset):
580 580 n = bin(changeset)
581 581 cl = self.repo.changelog
582 582 p1 = cl.parents(n)[0]
583 583 cs = cl.read(n)
584 584 mf = self.repo.manifest.read(cs[0])
585 585
586 586 def diff():
587 587 yield self.diff(p1, n, file)
588 588
589 589 yield self.t("filediff",
590 590 file = file,
591 591 filenode = hex(mf.get(file, nullid)),
592 592 node = changeset,
593 593 rev = self.repo.changelog.rev(n),
594 594 parent = self.parents("filediffparent",
595 595 cl.parents(n), cl.rev),
596 596 diff = diff)
597 597
598 598 # add tags to things
599 599 # tags -> list of changesets corresponding to tags
600 600 # find tag, changeset, file
601 601
602 602 def run(self):
603 603 self.refresh()
604 604 args = cgi.parse()
605 605
606 606 m = os.path.join(self.templates, "map")
607 607 if args.has_key('style'):
608 608 b = os.path.basename("map-" + args['style'][0])
609 609 p = os.path.join(self.templates, b)
610 610 if os.path.isfile(p): m = p
611 611
612 612 port = os.environ["SERVER_PORT"]
613 613 port = port != "80" and (":" + port) or ""
614 614 uri = os.environ["REQUEST_URI"]
615 615 if "?" in uri: uri = uri.split("?")[0]
616 616 url = "http://%s%s%s" % (os.environ["SERVER_NAME"], port, uri)
617 617
618 618 self.t = templater(m, self.filters,
619 619 {"url":url,
620 620 "repo":self.reponame,
621 621 "header":self.header(),
622 622 "footer":self.footer(),
623 623 })
624 624
625 625 if not args.has_key('cmd') or args['cmd'][0] == 'changelog':
626 626 c = self.repo.changelog.count() - 1
627 627 hi = c
628 628 if args.has_key('rev'):
629 629 hi = args['rev'][0]
630 630 try:
631 631 hi = self.repo.changelog.rev(self.repo.lookup(hi))
632 632 except RepoError:
633 633 write(self.search(hi))
634 634 return
635 635
636 636 write(self.changelog(hi))
637 637
638 638 elif args['cmd'][0] == 'changeset':
639 639 write(self.changeset(args['node'][0]))
640 640
641 641 elif args['cmd'][0] == 'manifest':
642 642 write(self.manifest(args['manifest'][0], args['path'][0]))
643 643
644 644 elif args['cmd'][0] == 'tags':
645 645 write(self.tags())
646 646
647 647 elif args['cmd'][0] == 'filediff':
648 648 write(self.filediff(args['file'][0], args['node'][0]))
649 649
650 650 elif args['cmd'][0] == 'file':
651 651 write(self.filerevision(args['file'][0], args['filenode'][0]))
652 652
653 653 elif args['cmd'][0] == 'annotate':
654 654 write(self.fileannotate(args['file'][0], args['filenode'][0]))
655 655
656 656 elif args['cmd'][0] == 'filelog':
657 657 write(self.filelog(args['file'][0], args['filenode'][0]))
658 658
659 659 elif args['cmd'][0] == 'heads':
660 httphdr("text/plain")
660 httphdr("application/mercurial-0.1")
661 661 h = self.repo.heads()
662 662 sys.stdout.write(" ".join(map(hex, h)) + "\n")
663 663
664 664 elif args['cmd'][0] == 'branches':
665 httphdr("text/plain")
665 httphdr("application/mercurial-0.1")
666 666 nodes = []
667 667 if args.has_key('nodes'):
668 668 nodes = map(bin, args['nodes'][0].split(" "))
669 669 for b in self.repo.branches(nodes):
670 670 sys.stdout.write(" ".join(map(hex, b)) + "\n")
671 671
672 672 elif args['cmd'][0] == 'between':
673 httphdr("text/plain")
673 httphdr("application/hg-0.1")
674 674 nodes = []
675 675 if args.has_key('pairs'):
676 676 pairs = [ map(bin, p.split("-"))
677 677 for p in args['pairs'][0].split(" ") ]
678 678 for b in self.repo.between(pairs):
679 679 sys.stdout.write(" ".join(map(hex, b)) + "\n")
680 680
681 681 elif args['cmd'][0] == 'changegroup':
682 httphdr("application/hg-changegroup")
682 httphdr("application/mercurial-0.1")
683 683 nodes = []
684 684 if self.viewonly:
685 685 return
686 686
687 687 if args.has_key('roots'):
688 688 nodes = map(bin, args['roots'][0].split(" "))
689 689
690 690 z = zlib.compressobj()
691 691 f = self.repo.changegroup(nodes)
692 692 while 1:
693 693 chunk = f.read(4096)
694 694 if not chunk: break
695 695 sys.stdout.write(z.compress(chunk))
696 696
697 697 sys.stdout.write(z.flush())
698 698
699 699 else:
700 700 write(self.t("error"))
701 701
702 702 def create_server(path, name, templates, address, port,
703 703 accesslog = sys.stdout, errorlog = sys.stderr):
704 704
705 705 import BaseHTTPServer
706 706
707 707 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
708 708 def log_error(self, format, *args):
709 709 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
710 710 self.log_date_time_string(),
711 711 format % args))
712 712
713 713 def log_message(self, format, *args):
714 714 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
715 715 self.log_date_time_string(),
716 716 format % args))
717 717
718 718 def do_POST(self):
719 719 try:
720 720 self.do_hgweb()
721 721 except socket.error, inst:
722 722 if inst.args[0] != 32: raise
723 723
724 724 def do_GET(self):
725 725 self.do_POST()
726 726
727 727 def do_hgweb(self):
728 728 query = ""
729 729 p = self.path.find("?")
730 730 if p:
731 731 query = self.path[p + 1:]
732 732 query = query.replace('+', ' ')
733 733
734 734 env = {}
735 735 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
736 736 env['REQUEST_METHOD'] = self.command
737 737 env['SERVER_NAME'] = self.server.server_name
738 738 env['SERVER_PORT'] = str(self.server.server_port)
739 739 env['REQUEST_URI'] = "/"
740 740 if query:
741 741 env['QUERY_STRING'] = query
742 742 host = self.address_string()
743 743 if host != self.client_address[0]:
744 744 env['REMOTE_HOST'] = host
745 745 env['REMOTE_ADDR'] = self.client_address[0]
746 746
747 747 if self.headers.typeheader is None:
748 748 env['CONTENT_TYPE'] = self.headers.type
749 749 else:
750 750 env['CONTENT_TYPE'] = self.headers.typeheader
751 751 length = self.headers.getheader('content-length')
752 752 if length:
753 753 env['CONTENT_LENGTH'] = length
754 754 accept = []
755 755 for line in self.headers.getallmatchingheaders('accept'):
756 756 if line[:1] in "\t\n\r ":
757 757 accept.append(line.strip())
758 758 else:
759 759 accept = accept + line[7:].split(',')
760 760 env['HTTP_ACCEPT'] = ','.join(accept)
761 761
762 762 os.environ.update(env)
763 763
764 764 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
765 765 try:
766 766 sys.stdin = self.rfile
767 767 sys.stdout = self.wfile
768 768 sys.argv = ["hgweb.py"]
769 769 if '=' not in query:
770 770 sys.argv.append(query)
771 771 self.send_response(200, "Script output follows")
772 772 hg.run()
773 773 finally:
774 774 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
775 775
776 776 hg = hgweb(path, name, templates)
777 777 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
778 778
779 779 def server(path, name, templates, address, port,
780 780 accesslog = sys.stdout, errorlog = sys.stderr):
781 781 httpd = create_server(path, name, templates, address, port,
782 782 accesslog, errorlog)
783 783 httpd.serve_forever()
General Comments 0
You need to be logged in to leave comments. Login now