##// END OF EJS Templates
merge with crew-stable
Alexis S. L. Carvalho -
r5123:79373ec3 merge default
parent child Browse files
Show More
@@ -0,0 +1,24
1 #!/bin/sh
2 # basic test for hg debugrebuildstate
3
4 hg init repo
5 cd repo
6
7 touch foo bar
8 hg ci -Am 'add foo bar'
9
10 touch baz
11 hg add baz
12 hg rm bar
13
14 echo '% state dump'
15 hg debugstate | cut -b 1-16,35- | sort
16 echo '% status'
17 hg st -A
18
19 hg debugrebuildstate
20 echo '% state dump'
21 hg debugstate | cut -b 1-16,35- | sort
22 echo '% status'
23 hg st -A
24
@@ -0,0 +1,17
1 adding bar
2 adding foo
3 % state dump
4 a 0 -1 baz
5 n 644 0 foo
6 r 0 0 bar
7 % status
8 A baz
9 R bar
10 C foo
11 % state dump
12 n 666 -1 bar
13 n 666 -1 foo
14 % status
15 ! bar
16 ? baz
17 C foo
@@ -1,534 +1,540
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._dirtypl = False
25 25 self._ui = ui
26 26
27 27 def __getattr__(self, name):
28 28 if name == '_map':
29 29 self._read()
30 30 return self._map
31 31 elif name == '_copymap':
32 32 self._read()
33 33 return self._copymap
34 34 elif name == '_branch':
35 35 try:
36 36 self._branch = (self._opener("branch").read().strip()
37 37 or "default")
38 38 except IOError:
39 39 self._branch = "default"
40 40 return self._branch
41 41 elif name == '_pl':
42 42 self._pl = [nullid, nullid]
43 43 try:
44 44 st = self._opener("dirstate").read(40)
45 45 if len(st) == 40:
46 46 self._pl = st[:20], st[20:40]
47 47 except IOError, err:
48 48 if err.errno != errno.ENOENT: raise
49 49 return self._pl
50 50 elif name == '_dirs':
51 51 self._dirs = {}
52 52 for f in self._map:
53 53 self._incpath(f)
54 54 return self._dirs
55 55 elif name == '_ignore':
56 56 files = [self._join('.hgignore')]
57 57 for name, path in self._ui.configitems("ui"):
58 58 if name == 'ignore' or name.startswith('ignore.'):
59 59 files.append(os.path.expanduser(path))
60 60 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
61 61 return self._ignore
62 62 elif name == '_slash':
63 63 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
64 64 return self._slash
65 65 else:
66 66 raise AttributeError, name
67 67
68 68 def _join(self, f):
69 69 return os.path.join(self._root, f)
70 70
71 71 def getcwd(self):
72 72 cwd = os.getcwd()
73 73 if cwd == self._root: return ''
74 74 # self._root ends with a path separator if self._root is '/' or 'C:\'
75 75 rootsep = self._root
76 76 if not rootsep.endswith(os.sep):
77 77 rootsep += os.sep
78 78 if cwd.startswith(rootsep):
79 79 return cwd[len(rootsep):]
80 80 else:
81 81 # we're outside the repo. return an absolute path.
82 82 return cwd
83 83
84 84 def pathto(self, f, cwd=None):
85 85 if cwd is None:
86 86 cwd = self.getcwd()
87 87 path = util.pathto(self._root, cwd, f)
88 88 if self._slash:
89 89 return path.replace(os.sep, '/')
90 90 return path
91 91
92 92 def __getitem__(self, key):
93 93 ''' current states:
94 94 n normal
95 95 m needs merging
96 96 r marked for removal
97 97 a marked for addition
98 98 ? not tracked'''
99 99 return self._map.get(key, ("?",))[0]
100 100
101 101 def __contains__(self, key):
102 102 return key in self._map
103 103
104 104 def __iter__(self):
105 105 a = self._map.keys()
106 106 a.sort()
107 107 for x in a:
108 108 yield x
109 109
110 110 def parents(self):
111 111 return self._pl
112 112
113 113 def branch(self):
114 114 return self._branch
115 115
116 116 def setparents(self, p1, p2=nullid):
117 117 self._dirty = self._dirtypl = True
118 118 self._pl = p1, p2
119 119
120 120 def setbranch(self, branch):
121 121 self._branch = branch
122 122 self._opener("branch", "w").write(branch + '\n')
123 123
124 124 def _read(self):
125 125 self._map = {}
126 126 self._copymap = {}
127 127 if not self._dirtypl:
128 128 self._pl = [nullid, nullid]
129 129 try:
130 130 st = self._opener("dirstate").read()
131 131 except IOError, err:
132 132 if err.errno != errno.ENOENT: raise
133 133 return
134 134 if not st:
135 135 return
136 136
137 137 if not self._dirtypl:
138 138 self._pl = [st[:20], st[20: 40]]
139 139
140 140 # deref fields so they will be local in loop
141 141 dmap = self._map
142 142 copymap = self._copymap
143 143 unpack = struct.unpack
144 144
145 145 pos = 40
146 146 e_size = struct.calcsize(_format)
147 147
148 148 while pos < len(st):
149 149 newpos = pos + e_size
150 150 e = unpack(_format, st[pos:newpos])
151 151 l = e[4]
152 152 pos = newpos
153 153 newpos = pos + l
154 154 f = st[pos:newpos]
155 155 if '\0' in f:
156 156 f, c = f.split('\0')
157 157 copymap[f] = c
158 158 dmap[f] = e[:4]
159 159 pos = newpos
160 160
161 161 def invalidate(self):
162 162 for a in "_map _copymap _branch _pl _dirs _ignore".split():
163 163 if a in self.__dict__:
164 164 delattr(self, a)
165 165 self._dirty = False
166 166
167 167 def copy(self, source, dest):
168 168 self._dirty = True
169 169 self._copymap[dest] = source
170 170
171 171 def copied(self, file):
172 172 return self._copymap.get(file, None)
173 173
174 174 def copies(self):
175 175 return self._copymap
176 176
177 177 def _incpath(self, path):
178 178 for c in strutil.findall(path, '/'):
179 179 pc = path[:c]
180 180 self._dirs.setdefault(pc, 0)
181 181 self._dirs[pc] += 1
182 182
183 183 def _decpath(self, path):
184 184 for c in strutil.findall(path, '/'):
185 185 pc = path[:c]
186 186 self._dirs.setdefault(pc, 0)
187 187 self._dirs[pc] -= 1
188 188
189 189 def _incpathcheck(self, f):
190 190 if '\r' in f or '\n' in f:
191 191 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
192 192 # shadows
193 193 if f in self._dirs:
194 194 raise util.Abort(_('directory %r already in dirstate') % f)
195 195 for c in strutil.rfindall(f, '/'):
196 196 d = f[:c]
197 197 if d in self._dirs:
198 198 break
199 199 if d in self._map:
200 200 raise util.Abort(_('file %r in dirstate clashes with %r') %
201 201 (d, f))
202 202 self._incpath(f)
203 203
204 204 def normal(self, f):
205 205 'mark a file normal'
206 206 self._dirty = True
207 207 s = os.lstat(self._join(f))
208 208 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
209 209 if self._copymap.has_key(f):
210 210 del self._copymap[f]
211 211
212 212 def normaldirty(self, f):
213 213 'mark a file normal, but possibly dirty'
214 214 self._dirty = True
215 215 s = os.lstat(self._join(f))
216 216 self._map[f] = ('n', s.st_mode, -1, -1)
217 217 if f in self._copymap:
218 218 del self._copymap[f]
219 219
220 220 def add(self, f):
221 221 'mark a file added'
222 222 self._dirty = True
223 223 self._incpathcheck(f)
224 224 self._map[f] = ('a', 0, -1, -1)
225 225 if f in self._copymap:
226 226 del self._copymap[f]
227 227
228 228 def remove(self, f):
229 229 'mark a file removed'
230 230 self._dirty = True
231 231 self._map[f] = ('r', 0, 0, 0)
232 232 self._decpath(f)
233 233 if f in self._copymap:
234 234 del self._copymap[f]
235 235
236 236 def merge(self, f):
237 237 'mark a file merged'
238 238 self._dirty = True
239 239 s = os.lstat(self._join(f))
240 240 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
241 241 if f in self._copymap:
242 242 del self._copymap[f]
243 243
244 244 def forget(self, f):
245 245 'forget a file'
246 246 self._dirty = True
247 247 try:
248 248 del self._map[f]
249 249 self._decpath(f)
250 250 except KeyError:
251 251 self._ui.warn(_("not in dirstate: %s!\n") % f)
252 252
253 def clear(self):
254 self._map = {}
255 self._copymap = {}
256 self._pl = [nullid, nullid]
257 self._dirty = True
258
253 259 def rebuild(self, parent, files):
254 self.invalidate()
260 self.clear()
255 261 for f in files:
256 262 if files.execf(f):
257 263 self._map[f] = ('n', 0777, -1, 0)
258 264 else:
259 265 self._map[f] = ('n', 0666, -1, 0)
260 266 self._pl = (parent, nullid)
261 267 self._dirty = True
262 268
263 269 def write(self):
264 270 if not self._dirty:
265 271 return
266 272 cs = cStringIO.StringIO()
267 273 cs.write("".join(self._pl))
268 274 for f, e in self._map.iteritems():
269 275 c = self.copied(f)
270 276 if c:
271 277 f = f + "\0" + c
272 278 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
273 279 cs.write(e)
274 280 cs.write(f)
275 281 st = self._opener("dirstate", "w", atomictemp=True)
276 282 st.write(cs.getvalue())
277 283 st.rename()
278 284 self._dirty = self._dirtypl = False
279 285
280 286 def _filter(self, files):
281 287 ret = {}
282 288 unknown = []
283 289
284 290 for x in files:
285 291 if x == '.':
286 292 return self._map.copy()
287 293 if x not in self._map:
288 294 unknown.append(x)
289 295 else:
290 296 ret[x] = self._map[x]
291 297
292 298 if not unknown:
293 299 return ret
294 300
295 301 b = self._map.keys()
296 302 b.sort()
297 303 blen = len(b)
298 304
299 305 for x in unknown:
300 306 bs = bisect.bisect(b, "%s%s" % (x, '/'))
301 307 while bs < blen:
302 308 s = b[bs]
303 309 if len(s) > len(x) and s.startswith(x):
304 310 ret[s] = self._map[s]
305 311 else:
306 312 break
307 313 bs += 1
308 314 return ret
309 315
310 316 def _supported(self, f, mode, verbose=False):
311 317 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
312 318 return True
313 319 if verbose:
314 320 kind = 'unknown'
315 321 if stat.S_ISCHR(mode): kind = _('character device')
316 322 elif stat.S_ISBLK(mode): kind = _('block device')
317 323 elif stat.S_ISFIFO(mode): kind = _('fifo')
318 324 elif stat.S_ISSOCK(mode): kind = _('socket')
319 325 elif stat.S_ISDIR(mode): kind = _('directory')
320 326 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
321 327 % (self.pathto(f), kind))
322 328 return False
323 329
324 330 def walk(self, files=None, match=util.always, badmatch=None):
325 331 # filter out the stat
326 332 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
327 333 yield src, f
328 334
329 335 def statwalk(self, files=None, match=util.always, ignored=False,
330 336 badmatch=None, directories=False):
331 337 '''
332 338 walk recursively through the directory tree, finding all files
333 339 matched by the match function
334 340
335 341 results are yielded in a tuple (src, filename, st), where src
336 342 is one of:
337 343 'f' the file was found in the directory tree
338 344 'd' the file is a directory of the tree
339 345 'm' the file was only in the dirstate and not in the tree
340 346 'b' file was not found and matched badmatch
341 347
342 348 and st is the stat result if the file was found in the directory.
343 349 '''
344 350
345 351 # walk all files by default
346 352 if not files:
347 353 files = ['.']
348 354 dc = self._map.copy()
349 355 else:
350 356 files = util.unique(files)
351 357 dc = self._filter(files)
352 358
353 359 def imatch(file_):
354 360 if file_ not in dc and self._ignore(file_):
355 361 return False
356 362 return match(file_)
357 363
358 364 ignore = self._ignore
359 365 if ignored:
360 366 imatch = match
361 367 ignore = util.never
362 368
363 369 # self._root may end with a path separator when self._root == '/'
364 370 common_prefix_len = len(self._root)
365 371 if not self._root.endswith(os.sep):
366 372 common_prefix_len += 1
367 373
368 374 normpath = util.normpath
369 375 listdir = os.listdir
370 376 lstat = os.lstat
371 377 bisect_left = bisect.bisect_left
372 378 isdir = os.path.isdir
373 379 pconvert = util.pconvert
374 380 join = os.path.join
375 381 s_isdir = stat.S_ISDIR
376 382 supported = self._supported
377 383 _join = self._join
378 384 known = {'.hg': 1}
379 385
380 386 # recursion free walker, faster than os.walk.
381 387 def findfiles(s):
382 388 work = [s]
383 389 wadd = work.append
384 390 found = []
385 391 add = found.append
386 392 if directories:
387 393 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
388 394 while work:
389 395 top = work.pop()
390 396 names = listdir(top)
391 397 names.sort()
392 398 # nd is the top of the repository dir tree
393 399 nd = normpath(top[common_prefix_len:])
394 400 if nd == '.':
395 401 nd = ''
396 402 else:
397 403 # do not recurse into a repo contained in this
398 404 # one. use bisect to find .hg directory so speed
399 405 # is good on big directory.
400 406 hg = bisect_left(names, '.hg')
401 407 if hg < len(names) and names[hg] == '.hg':
402 408 if isdir(join(top, '.hg')):
403 409 continue
404 410 for f in names:
405 411 np = pconvert(join(nd, f))
406 412 if np in known:
407 413 continue
408 414 known[np] = 1
409 415 p = join(top, f)
410 416 # don't trip over symlinks
411 417 st = lstat(p)
412 418 if s_isdir(st.st_mode):
413 419 if not ignore(np):
414 420 wadd(p)
415 421 if directories:
416 422 add((np, 'd', st))
417 423 if np in dc and match(np):
418 424 add((np, 'm', st))
419 425 elif imatch(np):
420 426 if supported(np, st.st_mode):
421 427 add((np, 'f', st))
422 428 elif np in dc:
423 429 add((np, 'm', st))
424 430 found.sort()
425 431 return found
426 432
427 433 # step one, find all files that match our criteria
428 434 files.sort()
429 435 for ff in files:
430 436 nf = normpath(ff)
431 437 f = _join(ff)
432 438 try:
433 439 st = lstat(f)
434 440 except OSError, inst:
435 441 found = False
436 442 for fn in dc:
437 443 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
438 444 found = True
439 445 break
440 446 if not found:
441 447 if inst.errno != errno.ENOENT or not badmatch:
442 448 self._ui.warn('%s: %s\n' %
443 449 (self.pathto(ff), inst.strerror))
444 450 elif badmatch and badmatch(ff) and imatch(nf):
445 451 yield 'b', ff, None
446 452 continue
447 453 if s_isdir(st.st_mode):
448 454 for f, src, st in findfiles(f):
449 455 yield src, f, st
450 456 else:
451 457 if nf in known:
452 458 continue
453 459 known[nf] = 1
454 460 if match(nf):
455 461 if supported(ff, st.st_mode, verbose=True):
456 462 yield 'f', nf, st
457 463 elif ff in dc:
458 464 yield 'm', nf, st
459 465
460 466 # step two run through anything left in the dc hash and yield
461 467 # if we haven't already seen it
462 468 ks = dc.keys()
463 469 ks.sort()
464 470 for k in ks:
465 471 if k in known:
466 472 continue
467 473 known[k] = 1
468 474 if imatch(k):
469 475 yield 'm', k, None
470 476
471 477 def status(self, files, match, list_ignored, list_clean):
472 478 lookup, modified, added, unknown, ignored = [], [], [], [], []
473 479 removed, deleted, clean = [], [], []
474 480
475 481 _join = self._join
476 482 lstat = os.lstat
477 483 cmap = self._copymap
478 484 dmap = self._map
479 485 ladd = lookup.append
480 486 madd = modified.append
481 487 aadd = added.append
482 488 uadd = unknown.append
483 489 iadd = ignored.append
484 490 radd = removed.append
485 491 dadd = deleted.append
486 492 cadd = clean.append
487 493
488 494 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
489 495 if fn in dmap:
490 496 type_, mode, size, time = dmap[fn]
491 497 else:
492 498 if list_ignored and self._ignore(fn):
493 499 iadd(fn)
494 500 else:
495 501 uadd(fn)
496 502 continue
497 503 if src == 'm':
498 504 nonexistent = True
499 505 if not st:
500 506 try:
501 507 st = lstat(_join(fn))
502 508 except OSError, inst:
503 509 if inst.errno != errno.ENOENT:
504 510 raise
505 511 st = None
506 512 # We need to re-check that it is a valid file
507 513 if st and self._supported(fn, st.st_mode):
508 514 nonexistent = False
509 515 # XXX: what to do with file no longer present in the fs
510 516 # who are not removed in the dirstate ?
511 517 if nonexistent and type_ in "nm":
512 518 dadd(fn)
513 519 continue
514 520 # check the common case first
515 521 if type_ == 'n':
516 522 if not st:
517 523 st = lstat(_join(fn))
518 524 if (size >= 0 and (size != st.st_size
519 525 or (mode ^ st.st_mode) & 0100)
520 526 or fn in self._copymap):
521 527 madd(fn)
522 528 elif time != int(st.st_mtime):
523 529 ladd(fn)
524 530 elif list_clean:
525 531 cadd(fn)
526 532 elif type_ == 'm':
527 533 madd(fn)
528 534 elif type_ == 'a':
529 535 aadd(fn)
530 536 elif type_ == 'r':
531 537 radd(fn)
532 538
533 539 return (lookup, modified, added, removed, deleted, unknown, ignored,
534 540 clean)
@@ -1,1188 +1,1188
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 10 import tempfile, urllib, bz2
11 11 from mercurial.node import *
12 12 from mercurial.i18n import gettext as _
13 13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 14 from mercurial import revlog, templater
15 15 from common import get_mtime, staticfile, style_map, paritygen
16 16
17 17 def _up(p):
18 18 if p[0] != "/":
19 19 p = "/" + p
20 20 if p[-1] == "/":
21 21 p = p[:-1]
22 22 up = os.path.dirname(p)
23 23 if up == "/":
24 24 return "/"
25 25 return up + "/"
26 26
27 27 def revnavgen(pos, pagelen, limit, nodefunc):
28 28 def seq(factor, limit=None):
29 29 if limit:
30 30 yield limit
31 31 if limit >= 20 and limit <= 40:
32 32 yield 50
33 33 else:
34 34 yield 1 * factor
35 35 yield 3 * factor
36 36 for f in seq(factor * 10):
37 37 yield f
38 38
39 39 def nav(**map):
40 40 l = []
41 41 last = 0
42 42 for f in seq(1, pagelen):
43 43 if f < pagelen or f <= last:
44 44 continue
45 45 if f > limit:
46 46 break
47 47 last = f
48 48 if pos + f < limit:
49 49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 50 if pos - f >= 0:
51 51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52 52
53 53 try:
54 54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55 55
56 56 for label, node in l:
57 57 yield {"label": label, "node": node}
58 58
59 59 yield {"label": "tip", "node": "tip"}
60 60 except hg.RepoError:
61 61 pass
62 62
63 63 return nav
64 64
65 65 class hgweb(object):
66 66 def __init__(self, repo, name=None):
67 67 if isinstance(repo, str):
68 68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 69 else:
70 70 self.repo = repo
71 71
72 72 self.mtime = -1
73 73 self.reponame = name
74 74 self.archives = 'zip', 'gz', 'bz2'
75 75 self.stripecount = 1
76 76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 77 # readable by the user running the CGI script
78 78 self.templatepath = self.config("web", "templates",
79 79 templater.templatepath(),
80 80 untrusted=False)
81 81
82 82 # The CGI scripts are often run by a user different from the repo owner.
83 83 # Trust the settings from the .hg/hgrc files by default.
84 84 def config(self, section, name, default=None, untrusted=True):
85 85 return self.repo.ui.config(section, name, default,
86 86 untrusted=untrusted)
87 87
88 88 def configbool(self, section, name, default=False, untrusted=True):
89 89 return self.repo.ui.configbool(section, name, default,
90 90 untrusted=untrusted)
91 91
92 92 def configlist(self, section, name, default=None, untrusted=True):
93 93 return self.repo.ui.configlist(section, name, default,
94 94 untrusted=untrusted)
95 95
96 96 def refresh(self):
97 97 mtime = get_mtime(self.repo.root)
98 98 if mtime != self.mtime:
99 99 self.mtime = mtime
100 100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 102 self.stripecount = int(self.config("web", "stripes", 1))
103 103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 105 self.allowpull = self.configbool("web", "allowpull", True)
106 106 self.encoding = self.config("web", "encoding", util._encoding)
107 107
108 108 def archivelist(self, nodeid):
109 109 allowed = self.configlist("web", "allow_archive")
110 110 for i, spec in self.archive_specs.iteritems():
111 111 if i in allowed or self.configbool("web", "allow" + i):
112 112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
113 113
114 114 def listfilediffs(self, files, changeset):
115 115 for f in files[:self.maxfiles]:
116 116 yield self.t("filedifflink", node=hex(changeset), file=f)
117 117 if len(files) > self.maxfiles:
118 118 yield self.t("fileellipses")
119 119
120 120 def siblings(self, siblings=[], hiderev=None, **args):
121 121 siblings = [s for s in siblings if s.node() != nullid]
122 122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
123 123 return
124 124 for s in siblings:
125 125 d = {'node': hex(s.node()), 'rev': s.rev()}
126 126 if hasattr(s, 'path'):
127 127 d['file'] = s.path()
128 128 d.update(args)
129 129 yield d
130 130
131 131 def renamelink(self, fl, node):
132 132 r = fl.renamed(node)
133 133 if r:
134 134 return [dict(file=r[0], node=hex(r[1]))]
135 135 return []
136 136
137 137 def nodetagsdict(self, node):
138 138 return [{"name": i} for i in self.repo.nodetags(node)]
139 139
140 140 def nodebranchdict(self, ctx):
141 141 branches = []
142 142 branch = ctx.branch()
143 143 if self.repo.branchtags()[branch] == ctx.node():
144 144 branches.append({"name": branch})
145 145 return branches
146 146
147 147 def showtag(self, t1, node=nullid, **args):
148 148 for t in self.repo.nodetags(node):
149 149 yield self.t(t1, tag=t, **args)
150 150
151 151 def diff(self, node1, node2, files):
152 152 def filterfiles(filters, files):
153 153 l = [x for x in files if x in filters]
154 154
155 155 for t in filters:
156 156 if t and t[-1] != os.sep:
157 157 t += os.sep
158 158 l += [x for x in files if x.startswith(t)]
159 159 return l
160 160
161 161 parity = paritygen(self.stripecount)
162 162 def diffblock(diff, f, fn):
163 163 yield self.t("diffblock",
164 164 lines=prettyprintlines(diff),
165 165 parity=parity.next(),
166 166 file=f,
167 167 filenode=hex(fn or nullid))
168 168
169 169 def prettyprintlines(diff):
170 170 for l in diff.splitlines(1):
171 171 if l.startswith('+'):
172 172 yield self.t("difflineplus", line=l)
173 173 elif l.startswith('-'):
174 174 yield self.t("difflineminus", line=l)
175 175 elif l.startswith('@'):
176 176 yield self.t("difflineat", line=l)
177 177 else:
178 178 yield self.t("diffline", line=l)
179 179
180 180 r = self.repo
181 181 c1 = r.changectx(node1)
182 182 c2 = r.changectx(node2)
183 183 date1 = util.datestr(c1.date())
184 184 date2 = util.datestr(c2.date())
185 185
186 186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
187 187 if files:
188 188 modified, added, removed = map(lambda x: filterfiles(files, x),
189 189 (modified, added, removed))
190 190
191 191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
192 192 for f in modified:
193 193 to = c1.filectx(f).data()
194 194 tn = c2.filectx(f).data()
195 195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 196 opts=diffopts), f, tn)
197 197 for f in added:
198 198 to = None
199 199 tn = c2.filectx(f).data()
200 200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
201 201 opts=diffopts), f, tn)
202 202 for f in removed:
203 203 to = c1.filectx(f).data()
204 204 tn = None
205 205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
206 206 opts=diffopts), f, tn)
207 207
208 208 def changelog(self, ctx, shortlog=False):
209 209 def changelist(**map):
210 210 cl = self.repo.changelog
211 211 l = [] # build a list in forward order for efficiency
212 212 for i in xrange(start, end):
213 213 ctx = self.repo.changectx(i)
214 214 n = ctx.node()
215 215
216 216 l.insert(0, {"parity": parity.next(),
217 217 "author": ctx.user(),
218 218 "parent": self.siblings(ctx.parents(), i - 1),
219 219 "child": self.siblings(ctx.children(), i + 1),
220 220 "changelogtag": self.showtag("changelogtag",n),
221 221 "desc": ctx.description(),
222 222 "date": ctx.date(),
223 223 "files": self.listfilediffs(ctx.files(), n),
224 224 "rev": i,
225 225 "node": hex(n),
226 226 "tags": self.nodetagsdict(n),
227 227 "branches": self.nodebranchdict(ctx)})
228 228
229 229 for e in l:
230 230 yield e
231 231
232 232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
233 233 cl = self.repo.changelog
234 234 count = cl.count()
235 235 pos = ctx.rev()
236 236 start = max(0, pos - maxchanges + 1)
237 237 end = min(count, start + maxchanges)
238 238 pos = end - 1
239 239 parity = paritygen(self.stripecount, offset=start-end)
240 240
241 241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
242 242
243 243 yield self.t(shortlog and 'shortlog' or 'changelog',
244 244 changenav=changenav,
245 245 node=hex(cl.tip()),
246 246 rev=pos, changesets=count, entries=changelist,
247 247 archives=self.archivelist("tip"))
248 248
249 249 def search(self, query):
250 250
251 251 def changelist(**map):
252 252 cl = self.repo.changelog
253 253 count = 0
254 254 qw = query.lower().split()
255 255
256 256 def revgen():
257 257 for i in xrange(cl.count() - 1, 0, -100):
258 258 l = []
259 259 for j in xrange(max(0, i - 100), i):
260 260 ctx = self.repo.changectx(j)
261 261 l.append(ctx)
262 262 l.reverse()
263 263 for e in l:
264 264 yield e
265 265
266 266 for ctx in revgen():
267 267 miss = 0
268 268 for q in qw:
269 269 if not (q in ctx.user().lower() or
270 270 q in ctx.description().lower() or
271 271 q in " ".join(ctx.files()).lower()):
272 272 miss = 1
273 273 break
274 274 if miss:
275 275 continue
276 276
277 277 count += 1
278 278 n = ctx.node()
279 279
280 280 yield self.t('searchentry',
281 281 parity=parity.next(),
282 282 author=ctx.user(),
283 283 parent=self.siblings(ctx.parents()),
284 284 child=self.siblings(ctx.children()),
285 285 changelogtag=self.showtag("changelogtag",n),
286 286 desc=ctx.description(),
287 287 date=ctx.date(),
288 288 files=self.listfilediffs(ctx.files(), n),
289 289 rev=ctx.rev(),
290 290 node=hex(n),
291 291 tags=self.nodetagsdict(n),
292 292 branches=self.nodebranchdict(ctx))
293 293
294 294 if count >= self.maxchanges:
295 295 break
296 296
297 297 cl = self.repo.changelog
298 298 parity = paritygen(self.stripecount)
299 299
300 300 yield self.t('search',
301 301 query=query,
302 302 node=hex(cl.tip()),
303 303 entries=changelist,
304 304 archives=self.archivelist("tip"))
305 305
306 306 def changeset(self, ctx):
307 307 n = ctx.node()
308 308 parents = ctx.parents()
309 309 p1 = parents[0].node()
310 310
311 311 files = []
312 312 parity = paritygen(self.stripecount)
313 313 for f in ctx.files():
314 314 files.append(self.t("filenodelink",
315 315 node=hex(n), file=f,
316 316 parity=parity.next()))
317 317
318 318 def diff(**map):
319 319 yield self.diff(p1, n, None)
320 320
321 321 yield self.t('changeset',
322 322 diff=diff,
323 323 rev=ctx.rev(),
324 324 node=hex(n),
325 325 parent=self.siblings(parents),
326 326 child=self.siblings(ctx.children()),
327 327 changesettag=self.showtag("changesettag",n),
328 328 author=ctx.user(),
329 329 desc=ctx.description(),
330 330 date=ctx.date(),
331 331 files=files,
332 332 archives=self.archivelist(hex(n)),
333 333 tags=self.nodetagsdict(n),
334 334 branches=self.nodebranchdict(ctx))
335 335
336 336 def filelog(self, fctx):
337 337 f = fctx.path()
338 338 fl = fctx.filelog()
339 339 count = fl.count()
340 340 pagelen = self.maxshortchanges
341 341 pos = fctx.filerev()
342 342 start = max(0, pos - pagelen + 1)
343 343 end = min(count, start + pagelen)
344 344 pos = end - 1
345 345 parity = paritygen(self.stripecount, offset=start-end)
346 346
347 347 def entries(**map):
348 348 l = []
349 349
350 350 for i in xrange(start, end):
351 351 ctx = fctx.filectx(i)
352 352 n = fl.node(i)
353 353
354 354 l.insert(0, {"parity": parity.next(),
355 355 "filerev": i,
356 356 "file": f,
357 357 "node": hex(ctx.node()),
358 358 "author": ctx.user(),
359 359 "date": ctx.date(),
360 360 "rename": self.renamelink(fl, n),
361 361 "parent": self.siblings(fctx.parents()),
362 362 "child": self.siblings(fctx.children()),
363 363 "desc": ctx.description()})
364 364
365 365 for e in l:
366 366 yield e
367 367
368 368 nodefunc = lambda x: fctx.filectx(fileid=x)
369 369 nav = revnavgen(pos, pagelen, count, nodefunc)
370 370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
371 371 entries=entries)
372 372
373 373 def filerevision(self, fctx):
374 374 f = fctx.path()
375 375 text = fctx.data()
376 376 fl = fctx.filelog()
377 377 n = fctx.filenode()
378 378 parity = paritygen(self.stripecount)
379 379
380 380 mt = mimetypes.guess_type(f)[0]
381 381 rawtext = text
382 382 if util.binary(text):
383 383 mt = mt or 'application/octet-stream'
384 384 text = "(binary:%s)" % mt
385 385 mt = mt or 'text/plain'
386 386
387 387 def lines():
388 388 for l, t in enumerate(text.splitlines(1)):
389 389 yield {"line": t,
390 390 "linenumber": "% 6d" % (l + 1),
391 391 "parity": parity.next()}
392 392
393 393 yield self.t("filerevision",
394 394 file=f,
395 395 path=_up(f),
396 396 text=lines(),
397 397 raw=rawtext,
398 398 mimetype=mt,
399 399 rev=fctx.rev(),
400 400 node=hex(fctx.node()),
401 401 author=fctx.user(),
402 402 date=fctx.date(),
403 403 desc=fctx.description(),
404 404 parent=self.siblings(fctx.parents()),
405 405 child=self.siblings(fctx.children()),
406 406 rename=self.renamelink(fl, n),
407 407 permissions=fctx.manifest().flags(f))
408 408
409 409 def fileannotate(self, fctx):
410 410 f = fctx.path()
411 411 n = fctx.filenode()
412 412 fl = fctx.filelog()
413 413 parity = paritygen(self.stripecount)
414 414
415 415 def annotate(**map):
416 416 last = None
417 417 for f, l in fctx.annotate(follow=True):
418 418 fnode = f.filenode()
419 419 name = self.repo.ui.shortuser(f.user())
420 420
421 421 if last != fnode:
422 422 last = fnode
423 423
424 424 yield {"parity": parity.next(),
425 425 "node": hex(f.node()),
426 426 "rev": f.rev(),
427 427 "author": name,
428 428 "file": f.path(),
429 429 "line": l}
430 430
431 431 yield self.t("fileannotate",
432 432 file=f,
433 433 annotate=annotate,
434 434 path=_up(f),
435 435 rev=fctx.rev(),
436 436 node=hex(fctx.node()),
437 437 author=fctx.user(),
438 438 date=fctx.date(),
439 439 desc=fctx.description(),
440 440 rename=self.renamelink(fl, n),
441 441 parent=self.siblings(fctx.parents()),
442 442 child=self.siblings(fctx.children()),
443 443 permissions=fctx.manifest().flags(f))
444 444
445 445 def manifest(self, ctx, path):
446 446 mf = ctx.manifest()
447 447 node = ctx.node()
448 448
449 449 files = {}
450 450 parity = paritygen(self.stripecount)
451 451
452 452 if path and path[-1] != "/":
453 453 path += "/"
454 454 l = len(path)
455 455 abspath = "/" + path
456 456
457 457 for f, n in mf.items():
458 458 if f[:l] != path:
459 459 continue
460 460 remain = f[l:]
461 461 if "/" in remain:
462 462 short = remain[:remain.index("/") + 1] # bleah
463 463 files[short] = (f, None)
464 464 else:
465 465 short = os.path.basename(remain)
466 466 files[short] = (f, n)
467 467
468 468 def filelist(**map):
469 469 fl = files.keys()
470 470 fl.sort()
471 471 for f in fl:
472 472 full, fnode = files[f]
473 473 if not fnode:
474 474 continue
475 475
476 476 yield {"file": full,
477 477 "parity": parity.next(),
478 478 "basename": f,
479 479 "size": ctx.filectx(full).size(),
480 480 "permissions": mf.flags(full)}
481 481
482 482 def dirlist(**map):
483 483 fl = files.keys()
484 484 fl.sort()
485 485 for f in fl:
486 486 full, fnode = files[f]
487 487 if fnode:
488 488 continue
489 489
490 490 yield {"parity": parity.next(),
491 "path": os.path.join(abspath, f),
491 "path": "%s%s" % (abspath, f),
492 492 "basename": f[:-1]}
493 493
494 494 yield self.t("manifest",
495 495 rev=ctx.rev(),
496 496 node=hex(node),
497 497 path=abspath,
498 498 up=_up(abspath),
499 499 upparity=parity.next(),
500 500 fentries=filelist,
501 501 dentries=dirlist,
502 502 archives=self.archivelist(hex(node)),
503 503 tags=self.nodetagsdict(node),
504 504 branches=self.nodebranchdict(ctx))
505 505
506 506 def tags(self):
507 507 i = self.repo.tagslist()
508 508 i.reverse()
509 509 parity = paritygen(self.stripecount)
510 510
511 511 def entries(notip=False, **map):
512 512 for k, n in i:
513 513 if notip and k == "tip":
514 514 continue
515 515 yield {"parity": parity.next(),
516 516 "tag": k,
517 517 "date": self.repo.changectx(n).date(),
518 518 "node": hex(n)}
519 519
520 520 yield self.t("tags",
521 521 node=hex(self.repo.changelog.tip()),
522 522 entries=lambda **x: entries(False, **x),
523 523 entriesnotip=lambda **x: entries(True, **x))
524 524
525 525 def summary(self):
526 526 i = self.repo.tagslist()
527 527 i.reverse()
528 528
529 529 def tagentries(**map):
530 530 parity = paritygen(self.stripecount)
531 531 count = 0
532 532 for k, n in i:
533 533 if k == "tip": # skip tip
534 534 continue;
535 535
536 536 count += 1
537 537 if count > 10: # limit to 10 tags
538 538 break;
539 539
540 540 yield self.t("tagentry",
541 541 parity=parity.next(),
542 542 tag=k,
543 543 node=hex(n),
544 544 date=self.repo.changectx(n).date())
545 545
546 546
547 547 def branches(**map):
548 548 parity = paritygen(self.stripecount)
549 549
550 550 b = self.repo.branchtags()
551 551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
552 552 l.sort()
553 553
554 554 for r,n,t in l:
555 555 ctx = self.repo.changectx(n)
556 556
557 557 yield {'parity': parity.next(),
558 558 'branch': t,
559 559 'node': hex(n),
560 560 'date': ctx.date()}
561 561
562 562 def changelist(**map):
563 563 parity = paritygen(self.stripecount, offset=start-end)
564 564 l = [] # build a list in forward order for efficiency
565 565 for i in xrange(start, end):
566 566 ctx = self.repo.changectx(i)
567 567 n = ctx.node()
568 568 hn = hex(n)
569 569
570 570 l.insert(0, self.t(
571 571 'shortlogentry',
572 572 parity=parity.next(),
573 573 author=ctx.user(),
574 574 desc=ctx.description(),
575 575 date=ctx.date(),
576 576 rev=i,
577 577 node=hn,
578 578 tags=self.nodetagsdict(n),
579 579 branches=self.nodebranchdict(ctx)))
580 580
581 581 yield l
582 582
583 583 cl = self.repo.changelog
584 584 count = cl.count()
585 585 start = max(0, count - self.maxchanges)
586 586 end = min(count, start + self.maxchanges)
587 587
588 588 yield self.t("summary",
589 589 desc=self.config("web", "description", "unknown"),
590 590 owner=(self.config("ui", "username") or # preferred
591 591 self.config("web", "contact") or # deprecated
592 592 self.config("web", "author", "unknown")), # also
593 593 lastchange=cl.read(cl.tip())[2],
594 594 tags=tagentries,
595 595 branches=branches,
596 596 shortlog=changelist,
597 597 node=hex(cl.tip()),
598 598 archives=self.archivelist("tip"))
599 599
600 600 def filediff(self, fctx):
601 601 n = fctx.node()
602 602 path = fctx.path()
603 603 parents = fctx.parents()
604 604 p1 = parents and parents[0].node() or nullid
605 605
606 606 def diff(**map):
607 607 yield self.diff(p1, n, [path])
608 608
609 609 yield self.t("filediff",
610 610 file=path,
611 611 node=hex(n),
612 612 rev=fctx.rev(),
613 613 parent=self.siblings(parents),
614 614 child=self.siblings(fctx.children()),
615 615 diff=diff)
616 616
617 617 archive_specs = {
618 618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
619 619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
620 620 'zip': ('application/zip', 'zip', '.zip', None),
621 621 }
622 622
623 623 def archive(self, req, key, type_):
624 624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
625 625 cnode = self.repo.lookup(key)
626 626 arch_version = key
627 627 if cnode == key or key == 'tip':
628 628 arch_version = short(cnode)
629 629 name = "%s-%s" % (reponame, arch_version)
630 630 mimetype, artype, extension, encoding = self.archive_specs[type_]
631 631 headers = [('Content-type', mimetype),
632 632 ('Content-disposition', 'attachment; filename=%s%s' %
633 633 (name, extension))]
634 634 if encoding:
635 635 headers.append(('Content-encoding', encoding))
636 636 req.header(headers)
637 637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
638 638
639 639 # add tags to things
640 640 # tags -> list of changesets corresponding to tags
641 641 # find tag, changeset, file
642 642
643 643 def cleanpath(self, path):
644 644 path = path.lstrip('/')
645 645 return util.canonpath(self.repo.root, '', path)
646 646
647 647 def run(self):
648 648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
649 649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
650 650 import mercurial.hgweb.wsgicgi as wsgicgi
651 651 from request import wsgiapplication
652 652 def make_web_app():
653 653 return self
654 654 wsgicgi.launch(wsgiapplication(make_web_app))
655 655
656 656 def run_wsgi(self, req):
657 657 def header(**map):
658 658 header_file = cStringIO.StringIO(
659 659 ''.join(self.t("header", encoding=self.encoding, **map)))
660 660 msg = mimetools.Message(header_file, 0)
661 661 req.header(msg.items())
662 662 yield header_file.read()
663 663
664 664 def rawfileheader(**map):
665 665 req.header([('Content-type', map['mimetype']),
666 666 ('Content-disposition', 'filename=%s' % map['file']),
667 667 ('Content-length', str(len(map['raw'])))])
668 668 yield ''
669 669
670 670 def footer(**map):
671 671 yield self.t("footer", **map)
672 672
673 673 def motd(**map):
674 674 yield self.config("web", "motd", "")
675 675
676 676 def expand_form(form):
677 677 shortcuts = {
678 678 'cl': [('cmd', ['changelog']), ('rev', None)],
679 679 'sl': [('cmd', ['shortlog']), ('rev', None)],
680 680 'cs': [('cmd', ['changeset']), ('node', None)],
681 681 'f': [('cmd', ['file']), ('filenode', None)],
682 682 'fl': [('cmd', ['filelog']), ('filenode', None)],
683 683 'fd': [('cmd', ['filediff']), ('node', None)],
684 684 'fa': [('cmd', ['annotate']), ('filenode', None)],
685 685 'mf': [('cmd', ['manifest']), ('manifest', None)],
686 686 'ca': [('cmd', ['archive']), ('node', None)],
687 687 'tags': [('cmd', ['tags'])],
688 688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
689 689 'static': [('cmd', ['static']), ('file', None)]
690 690 }
691 691
692 692 for k in shortcuts.iterkeys():
693 693 if form.has_key(k):
694 694 for name, value in shortcuts[k]:
695 695 if value is None:
696 696 value = form[k]
697 697 form[name] = value
698 698 del form[k]
699 699
700 700 def rewrite_request(req):
701 701 '''translate new web interface to traditional format'''
702 702
703 703 def spliturl(req):
704 704 def firstitem(query):
705 705 return query.split('&', 1)[0].split(';', 1)[0]
706 706
707 707 def normurl(url):
708 708 inner = '/'.join([x for x in url.split('/') if x])
709 709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
710 710
711 711 return '%s%s%s' % (url.startswith('/') and '/' or '',
712 712 inner, tl)
713 713
714 714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
715 715 pi = normurl(req.env.get('PATH_INFO', ''))
716 716 if pi:
717 717 # strip leading /
718 718 pi = pi[1:]
719 719 if pi:
720 720 root = root[:root.rfind(pi)]
721 721 if req.env.has_key('REPO_NAME'):
722 722 rn = req.env['REPO_NAME'] + '/'
723 723 root += rn
724 724 query = pi[len(rn):]
725 725 else:
726 726 query = pi
727 727 else:
728 728 root += '?'
729 729 query = firstitem(req.env['QUERY_STRING'])
730 730
731 731 return (root, query)
732 732
733 733 req.url, query = spliturl(req)
734 734
735 735 if req.form.has_key('cmd'):
736 736 # old style
737 737 return
738 738
739 739 args = query.split('/', 2)
740 740 if not args or not args[0]:
741 741 return
742 742
743 743 cmd = args.pop(0)
744 744 style = cmd.rfind('-')
745 745 if style != -1:
746 746 req.form['style'] = [cmd[:style]]
747 747 cmd = cmd[style+1:]
748 748 # avoid accepting e.g. style parameter as command
749 749 if hasattr(self, 'do_' + cmd):
750 750 req.form['cmd'] = [cmd]
751 751
752 752 if args and args[0]:
753 753 node = args.pop(0)
754 754 req.form['node'] = [node]
755 755 if args:
756 756 req.form['file'] = args
757 757
758 758 if cmd == 'static':
759 759 req.form['file'] = req.form['node']
760 760 elif cmd == 'archive':
761 761 fn = req.form['node'][0]
762 762 for type_, spec in self.archive_specs.iteritems():
763 763 ext = spec[2]
764 764 if fn.endswith(ext):
765 765 req.form['node'] = [fn[:-len(ext)]]
766 766 req.form['type'] = [type_]
767 767
768 768 def sessionvars(**map):
769 769 fields = []
770 770 if req.form.has_key('style'):
771 771 style = req.form['style'][0]
772 772 if style != self.config('web', 'style', ''):
773 773 fields.append(('style', style))
774 774
775 775 separator = req.url[-1] == '?' and ';' or '?'
776 776 for name, value in fields:
777 777 yield dict(name=name, value=value, separator=separator)
778 778 separator = ';'
779 779
780 780 self.refresh()
781 781
782 782 expand_form(req.form)
783 783 rewrite_request(req)
784 784
785 785 style = self.config("web", "style", "")
786 786 if req.form.has_key('style'):
787 787 style = req.form['style'][0]
788 788 mapfile = style_map(self.templatepath, style)
789 789
790 790 proto = req.env.get('wsgi.url_scheme')
791 791 if proto == 'https':
792 792 proto = 'https'
793 793 default_port = "443"
794 794 else:
795 795 proto = 'http'
796 796 default_port = "80"
797 797
798 798 port = req.env["SERVER_PORT"]
799 799 port = port != default_port and (":" + port) or ""
800 800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
801 801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
802 802 if not staticurl.endswith('/'):
803 803 staticurl += '/'
804 804
805 805 if not self.reponame:
806 806 self.reponame = (self.config("web", "name")
807 807 or req.env.get('REPO_NAME')
808 808 or req.url.strip('/') or self.repo.root)
809 809
810 810 self.t = templater.templater(mapfile, templater.common_filters,
811 811 defaults={"url": req.url,
812 812 "staticurl": staticurl,
813 813 "urlbase": urlbase,
814 814 "repo": self.reponame,
815 815 "header": header,
816 816 "footer": footer,
817 817 "motd": motd,
818 818 "rawfileheader": rawfileheader,
819 819 "sessionvars": sessionvars
820 820 })
821 821
822 822 try:
823 823 if not req.form.has_key('cmd'):
824 824 req.form['cmd'] = [self.t.cache['default']]
825 825
826 826 cmd = req.form['cmd'][0]
827 827
828 828 method = getattr(self, 'do_' + cmd, None)
829 829 if method:
830 830 try:
831 831 method(req)
832 832 except (hg.RepoError, revlog.RevlogError), inst:
833 833 req.write(self.t("error", error=str(inst)))
834 834 else:
835 835 req.write(self.t("error", error='No such method: ' + cmd))
836 836 finally:
837 837 self.t = None
838 838
839 839 def changectx(self, req):
840 840 if req.form.has_key('node'):
841 841 changeid = req.form['node'][0]
842 842 elif req.form.has_key('manifest'):
843 843 changeid = req.form['manifest'][0]
844 844 else:
845 845 changeid = self.repo.changelog.count() - 1
846 846
847 847 try:
848 848 ctx = self.repo.changectx(changeid)
849 849 except hg.RepoError:
850 850 man = self.repo.manifest
851 851 mn = man.lookup(changeid)
852 852 ctx = self.repo.changectx(man.linkrev(mn))
853 853
854 854 return ctx
855 855
856 856 def filectx(self, req):
857 857 path = self.cleanpath(req.form['file'][0])
858 858 if req.form.has_key('node'):
859 859 changeid = req.form['node'][0]
860 860 else:
861 861 changeid = req.form['filenode'][0]
862 862 try:
863 863 ctx = self.repo.changectx(changeid)
864 864 fctx = ctx.filectx(path)
865 865 except hg.RepoError:
866 866 fctx = self.repo.filectx(path, fileid=changeid)
867 867
868 868 return fctx
869 869
870 870 def do_log(self, req):
871 871 if req.form.has_key('file') and req.form['file'][0]:
872 872 self.do_filelog(req)
873 873 else:
874 874 self.do_changelog(req)
875 875
876 876 def do_rev(self, req):
877 877 self.do_changeset(req)
878 878
879 879 def do_file(self, req):
880 880 path = self.cleanpath(req.form.get('file', [''])[0])
881 881 if path:
882 882 try:
883 883 req.write(self.filerevision(self.filectx(req)))
884 884 return
885 885 except revlog.LookupError:
886 886 pass
887 887
888 888 req.write(self.manifest(self.changectx(req), path))
889 889
890 890 def do_diff(self, req):
891 891 self.do_filediff(req)
892 892
893 893 def do_changelog(self, req, shortlog = False):
894 894 if req.form.has_key('node'):
895 895 ctx = self.changectx(req)
896 896 else:
897 897 if req.form.has_key('rev'):
898 898 hi = req.form['rev'][0]
899 899 else:
900 900 hi = self.repo.changelog.count() - 1
901 901 try:
902 902 ctx = self.repo.changectx(hi)
903 903 except hg.RepoError:
904 904 req.write(self.search(hi)) # XXX redirect to 404 page?
905 905 return
906 906
907 907 req.write(self.changelog(ctx, shortlog = shortlog))
908 908
909 909 def do_shortlog(self, req):
910 910 self.do_changelog(req, shortlog = True)
911 911
912 912 def do_changeset(self, req):
913 913 req.write(self.changeset(self.changectx(req)))
914 914
915 915 def do_manifest(self, req):
916 916 req.write(self.manifest(self.changectx(req),
917 917 self.cleanpath(req.form['path'][0])))
918 918
919 919 def do_tags(self, req):
920 920 req.write(self.tags())
921 921
922 922 def do_summary(self, req):
923 923 req.write(self.summary())
924 924
925 925 def do_filediff(self, req):
926 926 req.write(self.filediff(self.filectx(req)))
927 927
928 928 def do_annotate(self, req):
929 929 req.write(self.fileannotate(self.filectx(req)))
930 930
931 931 def do_filelog(self, req):
932 932 req.write(self.filelog(self.filectx(req)))
933 933
934 934 def do_lookup(self, req):
935 935 try:
936 936 r = hex(self.repo.lookup(req.form['key'][0]))
937 937 success = 1
938 938 except Exception,inst:
939 939 r = str(inst)
940 940 success = 0
941 941 resp = "%s %s\n" % (success, r)
942 942 req.httphdr("application/mercurial-0.1", length=len(resp))
943 943 req.write(resp)
944 944
945 945 def do_heads(self, req):
946 946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
947 947 req.httphdr("application/mercurial-0.1", length=len(resp))
948 948 req.write(resp)
949 949
950 950 def do_branches(self, req):
951 951 nodes = []
952 952 if req.form.has_key('nodes'):
953 953 nodes = map(bin, req.form['nodes'][0].split(" "))
954 954 resp = cStringIO.StringIO()
955 955 for b in self.repo.branches(nodes):
956 956 resp.write(" ".join(map(hex, b)) + "\n")
957 957 resp = resp.getvalue()
958 958 req.httphdr("application/mercurial-0.1", length=len(resp))
959 959 req.write(resp)
960 960
961 961 def do_between(self, req):
962 962 if req.form.has_key('pairs'):
963 963 pairs = [map(bin, p.split("-"))
964 964 for p in req.form['pairs'][0].split(" ")]
965 965 resp = cStringIO.StringIO()
966 966 for b in self.repo.between(pairs):
967 967 resp.write(" ".join(map(hex, b)) + "\n")
968 968 resp = resp.getvalue()
969 969 req.httphdr("application/mercurial-0.1", length=len(resp))
970 970 req.write(resp)
971 971
972 972 def do_changegroup(self, req):
973 973 req.httphdr("application/mercurial-0.1")
974 974 nodes = []
975 975 if not self.allowpull:
976 976 return
977 977
978 978 if req.form.has_key('roots'):
979 979 nodes = map(bin, req.form['roots'][0].split(" "))
980 980
981 981 z = zlib.compressobj()
982 982 f = self.repo.changegroup(nodes, 'serve')
983 983 while 1:
984 984 chunk = f.read(4096)
985 985 if not chunk:
986 986 break
987 987 req.write(z.compress(chunk))
988 988
989 989 req.write(z.flush())
990 990
991 991 def do_changegroupsubset(self, req):
992 992 req.httphdr("application/mercurial-0.1")
993 993 bases = []
994 994 heads = []
995 995 if not self.allowpull:
996 996 return
997 997
998 998 if req.form.has_key('bases'):
999 999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
1000 1000 if req.form.has_key('heads'):
1001 1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1002 1002
1003 1003 z = zlib.compressobj()
1004 1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1005 1005 while 1:
1006 1006 chunk = f.read(4096)
1007 1007 if not chunk:
1008 1008 break
1009 1009 req.write(z.compress(chunk))
1010 1010
1011 1011 req.write(z.flush())
1012 1012
1013 1013 def do_archive(self, req):
1014 1014 type_ = req.form['type'][0]
1015 1015 allowed = self.configlist("web", "allow_archive")
1016 1016 if (type_ in self.archives and (type_ in allowed or
1017 1017 self.configbool("web", "allow" + type_, False))):
1018 1018 self.archive(req, req.form['node'][0], type_)
1019 1019 return
1020 1020
1021 1021 req.write(self.t("error"))
1022 1022
1023 1023 def do_static(self, req):
1024 1024 fname = req.form['file'][0]
1025 1025 # a repo owner may set web.static in .hg/hgrc to get any file
1026 1026 # readable by the user running the CGI script
1027 1027 static = self.config("web", "static",
1028 1028 os.path.join(self.templatepath, "static"),
1029 1029 untrusted=False)
1030 1030 req.write(staticfile(static, fname, req)
1031 1031 or self.t("error", error="%r not found" % fname))
1032 1032
1033 1033 def do_capabilities(self, req):
1034 1034 caps = ['lookup', 'changegroupsubset']
1035 1035 if self.configbool('server', 'uncompressed'):
1036 1036 caps.append('stream=%d' % self.repo.changelog.version)
1037 1037 # XXX: make configurable and/or share code with do_unbundle:
1038 1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1039 1039 if unbundleversions:
1040 1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1041 1041 resp = ' '.join(caps)
1042 1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1043 1043 req.write(resp)
1044 1044
1045 1045 def check_perm(self, req, op, default):
1046 1046 '''check permission for operation based on user auth.
1047 1047 return true if op allowed, else false.
1048 1048 default is policy to use if no config given.'''
1049 1049
1050 1050 user = req.env.get('REMOTE_USER')
1051 1051
1052 1052 deny = self.configlist('web', 'deny_' + op)
1053 1053 if deny and (not user or deny == ['*'] or user in deny):
1054 1054 return False
1055 1055
1056 1056 allow = self.configlist('web', 'allow_' + op)
1057 1057 return (allow and (allow == ['*'] or user in allow)) or default
1058 1058
1059 1059 def do_unbundle(self, req):
1060 1060 def bail(response, headers={}):
1061 1061 length = int(req.env['CONTENT_LENGTH'])
1062 1062 for s in util.filechunkiter(req, limit=length):
1063 1063 # drain incoming bundle, else client will not see
1064 1064 # response when run outside cgi script
1065 1065 pass
1066 1066 req.httphdr("application/mercurial-0.1", headers=headers)
1067 1067 req.write('0\n')
1068 1068 req.write(response)
1069 1069
1070 1070 # require ssl by default, auth info cannot be sniffed and
1071 1071 # replayed
1072 1072 ssl_req = self.configbool('web', 'push_ssl', True)
1073 1073 if ssl_req:
1074 1074 if req.env.get('wsgi.url_scheme') != 'https':
1075 1075 bail(_('ssl required\n'))
1076 1076 return
1077 1077 proto = 'https'
1078 1078 else:
1079 1079 proto = 'http'
1080 1080
1081 1081 # do not allow push unless explicitly allowed
1082 1082 if not self.check_perm(req, 'push', False):
1083 1083 bail(_('push not authorized\n'),
1084 1084 headers={'status': '401 Unauthorized'})
1085 1085 return
1086 1086
1087 1087 their_heads = req.form['heads'][0].split(' ')
1088 1088
1089 1089 def check_heads():
1090 1090 heads = map(hex, self.repo.heads())
1091 1091 return their_heads == [hex('force')] or their_heads == heads
1092 1092
1093 1093 # fail early if possible
1094 1094 if not check_heads():
1095 1095 bail(_('unsynced changes\n'))
1096 1096 return
1097 1097
1098 1098 req.httphdr("application/mercurial-0.1")
1099 1099
1100 1100 # do not lock repo until all changegroup data is
1101 1101 # streamed. save to temporary file.
1102 1102
1103 1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1104 1104 fp = os.fdopen(fd, 'wb+')
1105 1105 try:
1106 1106 length = int(req.env['CONTENT_LENGTH'])
1107 1107 for s in util.filechunkiter(req, limit=length):
1108 1108 fp.write(s)
1109 1109
1110 1110 try:
1111 1111 lock = self.repo.lock()
1112 1112 try:
1113 1113 if not check_heads():
1114 1114 req.write('0\n')
1115 1115 req.write(_('unsynced changes\n'))
1116 1116 return
1117 1117
1118 1118 fp.seek(0)
1119 1119 header = fp.read(6)
1120 1120 if not header.startswith("HG"):
1121 1121 # old client with uncompressed bundle
1122 1122 def generator(f):
1123 1123 yield header
1124 1124 for chunk in f:
1125 1125 yield chunk
1126 1126 elif not header.startswith("HG10"):
1127 1127 req.write("0\n")
1128 1128 req.write(_("unknown bundle version\n"))
1129 1129 return
1130 1130 elif header == "HG10GZ":
1131 1131 def generator(f):
1132 1132 zd = zlib.decompressobj()
1133 1133 for chunk in f:
1134 1134 yield zd.decompress(chunk)
1135 1135 elif header == "HG10BZ":
1136 1136 def generator(f):
1137 1137 zd = bz2.BZ2Decompressor()
1138 1138 zd.decompress("BZ")
1139 1139 for chunk in f:
1140 1140 yield zd.decompress(chunk)
1141 1141 elif header == "HG10UN":
1142 1142 def generator(f):
1143 1143 for chunk in f:
1144 1144 yield chunk
1145 1145 else:
1146 1146 req.write("0\n")
1147 1147 req.write(_("unknown bundle compression type\n"))
1148 1148 return
1149 1149 gen = generator(util.filechunkiter(fp, 4096))
1150 1150
1151 1151 # send addchangegroup output to client
1152 1152
1153 1153 old_stdout = sys.stdout
1154 1154 sys.stdout = cStringIO.StringIO()
1155 1155
1156 1156 try:
1157 1157 url = 'remote:%s:%s' % (proto,
1158 1158 req.env.get('REMOTE_HOST', ''))
1159 1159 try:
1160 1160 ret = self.repo.addchangegroup(
1161 1161 util.chunkbuffer(gen), 'serve', url)
1162 1162 except util.Abort, inst:
1163 1163 sys.stdout.write("abort: %s\n" % inst)
1164 1164 ret = 0
1165 1165 finally:
1166 1166 val = sys.stdout.getvalue()
1167 1167 sys.stdout = old_stdout
1168 1168 req.write('%d\n' % ret)
1169 1169 req.write(val)
1170 1170 finally:
1171 1171 del lock
1172 1172 except (OSError, IOError), inst:
1173 1173 req.write('0\n')
1174 1174 filename = getattr(inst, 'filename', '')
1175 1175 # Don't send our filesystem layout to the client
1176 1176 if filename.startswith(self.repo.root):
1177 1177 filename = filename[len(self.repo.root)+1:]
1178 1178 else:
1179 1179 filename = ''
1180 1180 error = getattr(inst, 'strerror', 'Unknown error')
1181 1181 req.write('%s: %s\n' % (error, filename))
1182 1182 finally:
1183 1183 fp.close()
1184 1184 os.unlink(tempname)
1185 1185
1186 1186 def do_stream_out(self, req):
1187 1187 req.httphdr("application/mercurial-0.1")
1188 1188 streamclone.stream_out(self.repo, req, untrusted=True)
@@ -1,258 +1,259
1 1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from mercurial import demandimport; demandimport.enable()
10 10 import os, mimetools, cStringIO
11 11 from mercurial.i18n import gettext as _
12 12 from mercurial import ui, hg, util, templater
13 13 from common import get_mtime, staticfile, style_map, paritygen
14 14 from hgweb_mod import hgweb
15 15
16 16 # This is a stopgap
17 17 class hgwebdir(object):
18 18 def __init__(self, config, parentui=None):
19 19 def cleannames(items):
20 return [(name.strip(os.sep), path) for name, path in items]
20 return [(util.pconvert(name.strip(os.sep)), path)
21 for name, path in items]
21 22
22 23 self.parentui = parentui
23 24 self.motd = None
24 25 self.style = None
25 26 self.stripecount = None
26 27 self.repos_sorted = ('name', False)
27 28 if isinstance(config, (list, tuple)):
28 29 self.repos = cleannames(config)
29 30 self.repos_sorted = ('', False)
30 31 elif isinstance(config, dict):
31 32 self.repos = cleannames(config.items())
32 33 self.repos.sort()
33 34 else:
34 35 if isinstance(config, util.configparser):
35 36 cp = config
36 37 else:
37 38 cp = util.configparser()
38 39 cp.read(config)
39 40 self.repos = []
40 41 if cp.has_section('web'):
41 42 if cp.has_option('web', 'motd'):
42 43 self.motd = cp.get('web', 'motd')
43 44 if cp.has_option('web', 'style'):
44 45 self.style = cp.get('web', 'style')
45 46 if cp.has_option('web', 'stripes'):
46 47 self.stripecount = int(cp.get('web', 'stripes'))
47 48 if cp.has_section('paths'):
48 49 self.repos.extend(cleannames(cp.items('paths')))
49 50 if cp.has_section('collections'):
50 51 for prefix, root in cp.items('collections'):
51 52 for path in util.walkrepos(root):
52 53 repo = os.path.normpath(path)
53 54 name = repo
54 55 if name.startswith(prefix):
55 56 name = name[len(prefix):]
56 57 self.repos.append((name.lstrip(os.sep), repo))
57 58 self.repos.sort()
58 59
59 60 def run(self):
60 61 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
61 62 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
62 63 import mercurial.hgweb.wsgicgi as wsgicgi
63 64 from request import wsgiapplication
64 65 def make_web_app():
65 66 return self
66 67 wsgicgi.launch(wsgiapplication(make_web_app))
67 68
68 69 def run_wsgi(self, req):
69 70 def header(**map):
70 71 header_file = cStringIO.StringIO(
71 72 ''.join(tmpl("header", encoding=util._encoding, **map)))
72 73 msg = mimetools.Message(header_file, 0)
73 74 req.header(msg.items())
74 75 yield header_file.read()
75 76
76 77 def footer(**map):
77 78 yield tmpl("footer", **map)
78 79
79 80 def motd(**map):
80 81 if self.motd is not None:
81 82 yield self.motd
82 83 else:
83 84 yield config('web', 'motd', '')
84 85
85 86 parentui = self.parentui or ui.ui(report_untrusted=False)
86 87
87 88 def config(section, name, default=None, untrusted=True):
88 89 return parentui.config(section, name, default, untrusted)
89 90
90 91 url = req.env['REQUEST_URI'].split('?')[0]
91 92 if not url.endswith('/'):
92 93 url += '/'
93 94 pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
94 95 base = url[:len(url) - len(pathinfo)]
95 96 if not base.endswith('/'):
96 97 base += '/'
97 98
98 99 staticurl = config('web', 'staticurl') or base + 'static/'
99 100 if not staticurl.endswith('/'):
100 101 staticurl += '/'
101 102
102 103 style = self.style
103 104 if style is None:
104 105 style = config('web', 'style', '')
105 106 if req.form.has_key('style'):
106 107 style = req.form['style'][0]
107 108 if self.stripecount is None:
108 109 self.stripecount = int(config('web', 'stripes', 1))
109 110 mapfile = style_map(templater.templatepath(), style)
110 111 tmpl = templater.templater(mapfile, templater.common_filters,
111 112 defaults={"header": header,
112 113 "footer": footer,
113 114 "motd": motd,
114 115 "url": url,
115 116 "staticurl": staticurl})
116 117
117 118 def archivelist(ui, nodeid, url):
118 119 allowed = ui.configlist("web", "allow_archive", untrusted=True)
119 120 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
120 121 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
121 122 untrusted=True):
122 123 yield {"type" : i[0], "extension": i[1],
123 124 "node": nodeid, "url": url}
124 125
125 126 def entries(sortcolumn="", descending=False, subdir="", **map):
126 127 def sessionvars(**map):
127 128 fields = []
128 129 if req.form.has_key('style'):
129 130 style = req.form['style'][0]
130 131 if style != get('web', 'style', ''):
131 132 fields.append(('style', style))
132 133
133 134 separator = url[-1] == '?' and ';' or '?'
134 135 for name, value in fields:
135 136 yield dict(name=name, value=value, separator=separator)
136 137 separator = ';'
137 138
138 139 rows = []
139 140 parity = paritygen(self.stripecount)
140 141 for name, path in self.repos:
141 142 if not name.startswith(subdir):
142 143 continue
143 144 name = name[len(subdir):]
144 145
145 146 u = ui.ui(parentui=parentui)
146 147 try:
147 148 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
148 149 except IOError:
149 150 pass
150 151 def get(section, name, default=None):
151 152 return u.config(section, name, default, untrusted=True)
152 153
153 154 if u.configbool("web", "hidden", untrusted=True):
154 155 continue
155 156
156 157 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
157 158 .replace("//", "/")) + '/'
158 159
159 160 # update time with local timezone
160 161 try:
161 162 d = (get_mtime(path), util.makedate()[1])
162 163 except OSError:
163 164 continue
164 165
165 166 contact = (get("ui", "username") or # preferred
166 167 get("web", "contact") or # deprecated
167 168 get("web", "author", "")) # also
168 169 description = get("web", "description", "")
169 170 name = get("web", "name", name)
170 171 row = dict(contact=contact or "unknown",
171 172 contact_sort=contact.upper() or "unknown",
172 173 name=name,
173 174 name_sort=name,
174 175 url=url,
175 176 description=description or "unknown",
176 177 description_sort=description.upper() or "unknown",
177 178 lastchange=d,
178 179 lastchange_sort=d[1]-d[0],
179 180 sessionvars=sessionvars,
180 181 archives=archivelist(u, "tip", url))
181 182 if (not sortcolumn
182 183 or (sortcolumn, descending) == self.repos_sorted):
183 184 # fast path for unsorted output
184 185 row['parity'] = parity.next()
185 186 yield row
186 187 else:
187 188 rows.append((row["%s_sort" % sortcolumn], row))
188 189 if rows:
189 190 rows.sort()
190 191 if descending:
191 192 rows.reverse()
192 193 for key, row in rows:
193 194 row['parity'] = parity.next()
194 195 yield row
195 196
196 197 def makeindex(req, subdir=""):
197 198 sortable = ["name", "description", "contact", "lastchange"]
198 199 sortcolumn, descending = self.repos_sorted
199 200 if req.form.has_key('sort'):
200 201 sortcolumn = req.form['sort'][0]
201 202 descending = sortcolumn.startswith('-')
202 203 if descending:
203 204 sortcolumn = sortcolumn[1:]
204 205 if sortcolumn not in sortable:
205 206 sortcolumn = ""
206 207
207 208 sort = [("sort_%s" % column,
208 209 "%s%s" % ((not descending and column == sortcolumn)
209 210 and "-" or "", column))
210 211 for column in sortable]
211 212 req.write(tmpl("index", entries=entries, subdir=subdir,
212 213 sortcolumn=sortcolumn, descending=descending,
213 214 **dict(sort)))
214 215
215 216 try:
216 217 virtual = req.env.get("PATH_INFO", "").strip('/')
217 218 if virtual.startswith('static/'):
218 219 static = os.path.join(templater.templatepath(), 'static')
219 220 fname = virtual[7:]
220 221 req.write(staticfile(static, fname, req) or
221 222 tmpl('error', error='%r not found' % fname))
222 223 elif virtual:
223 224 repos = dict(self.repos)
224 225 while virtual:
225 226 real = repos.get(virtual)
226 227 if real:
227 228 req.env['REPO_NAME'] = virtual
228 229 try:
229 230 repo = hg.repository(parentui, real)
230 231 hgweb(repo).run_wsgi(req)
231 232 except IOError, inst:
232 233 req.write(tmpl("error", error=inst.strerror))
233 234 except hg.RepoError, inst:
234 235 req.write(tmpl("error", error=str(inst)))
235 236 return
236 237
237 238 # browse subdirectories
238 239 subdir = virtual + '/'
239 240 if [r for r in repos if r.startswith(subdir)]:
240 241 makeindex(req, subdir)
241 242 return
242 243
243 244 up = virtual.rfind('/')
244 245 if up < 0:
245 246 break
246 247 virtual = virtual[:up]
247 248
248 249 req.write(tmpl("notfound", repo=virtual))
249 250 else:
250 251 if req.form.has_key('static'):
251 252 static = os.path.join(templater.templatepath(), "static")
252 253 fname = req.form['static'][0]
253 254 req.write(staticfile(static, fname, req)
254 255 or tmpl("error", error="%r not found" % fname))
255 256 else:
256 257 makeindex(req)
257 258 finally:
258 259 tmpl = None
@@ -1,414 +1,454
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from remoterepo import *
11 11 from i18n import _
12 12 import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
13 13 import errno, keepalive, tempfile, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 106 class basehttphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 110 has_https = hasattr(urllib2, 'HTTPSHandler')
111 111 if has_https:
112 112 class httpsconnection(httplib.HTTPSConnection):
113 113 response_class = keepalive.HTTPResponse
114 114 # must be able to send big bundle as stream.
115 115 send = _gen_sendfile(httplib.HTTPSConnection)
116 116
117 117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
118 118 def https_open(self, req):
119 119 return self.do_open(httpsconnection, req)
120 120 else:
121 121 class httphandler(basehttphandler):
122 122 pass
123 123
124 124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 125 # it doesn't know about the auth type requested. This can happen if
126 126 # somebody is using BasicAuth and types a bad password.
127 127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 129 try:
130 130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 131 self, auth_header, host, req, headers)
132 132 except ValueError, inst:
133 133 arg = inst.args[0]
134 134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 135 return
136 136 raise
137 137
138 138 def zgenerator(f):
139 139 zd = zlib.decompressobj()
140 140 try:
141 141 for chunk in util.filechunkiter(f):
142 142 yield zd.decompress(chunk)
143 143 except httplib.HTTPException, inst:
144 144 raise IOError(None, _('connection ended unexpectedly'))
145 145 yield zd.flush()
146 146
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 '0123456789' '_.-/')
150 _safeset = None
151 _hex = None
152 def quotepath(path):
153 '''quote the path part of a URL
154
155 This is similar to urllib.quote, but it also tries to avoid
156 quoting things twice (inspired by wget):
157
158 >>> quotepath('abc def')
159 'abc%20def'
160 >>> quotepath('abc%20def')
161 'abc%20def'
162 >>> quotepath('abc%20 def')
163 'abc%20%20def'
164 >>> quotepath('abc def%20')
165 'abc%20def%20'
166 >>> quotepath('abc def%2')
167 'abc%20def%252'
168 >>> quotepath('abc def%')
169 'abc%20def%25'
170 '''
171 global _safeset, _hex
172 if _safeset is None:
173 _safeset = util.set(_safe)
174 _hex = util.set('abcdefABCDEF0123456789')
175 l = list(path)
176 for i in xrange(len(l)):
177 c = l[i]
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 pass
180 elif c not in _safeset:
181 l[i] = '%%%02X' % ord(c)
182 return ''.join(l)
183
147 184 class httprepository(remoterepository):
148 185 def __init__(self, ui, path):
149 186 self.path = path
150 187 self.caps = None
151 188 self.handler = None
152 189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
153 190 if query or frag:
154 191 raise util.Abort(_('unsupported URL component: "%s"') %
155 192 (query or frag))
156 if not urlpath: urlpath = '/'
193 if not urlpath:
194 urlpath = '/'
195 urlpath = quotepath(urlpath)
157 196 host, port, user, passwd = netlocsplit(netloc)
158 197
159 198 # urllib cannot handle URLs with embedded user or passwd
160 199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
161 200 urlpath, '', ''))
162 201 self.ui = ui
202 self.ui.debug(_('using %s\n') % self._url)
163 203
164 204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
165 205 # XXX proxyauthinfo = None
166 206 self.handler = httphandler()
167 207 handlers = [self.handler]
168 208
169 209 if proxyurl:
170 210 # proxy can be proper url or host[:port]
171 211 if not (proxyurl.startswith('http:') or
172 212 proxyurl.startswith('https:')):
173 213 proxyurl = 'http://' + proxyurl + '/'
174 214 snpqf = urlparse.urlsplit(proxyurl)
175 215 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
176 216 hpup = netlocsplit(proxynetloc)
177 217
178 218 proxyhost, proxyport, proxyuser, proxypasswd = hpup
179 219 if not proxyuser:
180 220 proxyuser = ui.config("http_proxy", "user")
181 221 proxypasswd = ui.config("http_proxy", "passwd")
182 222
183 223 # see if we should use a proxy for this url
184 224 no_list = [ "localhost", "127.0.0.1" ]
185 225 no_list.extend([p.lower() for
186 226 p in ui.configlist("http_proxy", "no")])
187 227 no_list.extend([p.strip().lower() for
188 228 p in os.getenv("no_proxy", '').split(',')
189 229 if p.strip()])
190 230 # "http_proxy.always" config is for running tests on localhost
191 231 if (not ui.configbool("http_proxy", "always") and
192 232 host.lower() in no_list):
193 233 ui.debug(_('disabling proxy for %s\n') % host)
194 234 else:
195 235 proxyurl = urlparse.urlunsplit((
196 236 proxyscheme, netlocunsplit(proxyhost, proxyport,
197 237 proxyuser, proxypasswd or ''),
198 238 proxypath, proxyquery, proxyfrag))
199 239 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
200 240 ui.debug(_('proxying through http://%s:%s\n') %
201 241 (proxyhost, proxyport))
202 242
203 243 # urllib2 takes proxy values from the environment and those
204 244 # will take precedence if found, so drop them
205 245 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
206 246 try:
207 247 if os.environ.has_key(env):
208 248 del os.environ[env]
209 249 except OSError:
210 250 pass
211 251
212 252 passmgr = passwordmgr(ui)
213 253 if user:
214 254 ui.debug(_('http auth: user %s, password %s\n') %
215 255 (user, passwd and '*' * len(passwd) or 'not set'))
216 256 passmgr.add_password(None, host, user, passwd or '')
217 257
218 258 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
219 259 httpdigestauthhandler(passmgr)))
220 260 opener = urllib2.build_opener(*handlers)
221 261
222 262 # 1.0 here is the _protocol_ version
223 263 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
224 264 urllib2.install_opener(opener)
225 265
226 266 def __del__(self):
227 267 if self.handler:
228 268 self.handler.close_all()
229 269 self.handler = None
230 270
231 271 def url(self):
232 272 return self.path
233 273
234 274 # look up capabilities only when needed
235 275
236 276 def get_caps(self):
237 277 if self.caps is None:
238 278 try:
239 279 self.caps = self.do_read('capabilities').split()
240 280 except hg.RepoError:
241 281 self.caps = ()
242 282 self.ui.debug(_('capabilities: %s\n') %
243 283 (' '.join(self.caps or ['none'])))
244 284 return self.caps
245 285
246 286 capabilities = property(get_caps)
247 287
248 288 def lock(self):
249 289 raise util.Abort(_('operation not supported over http'))
250 290
251 291 def do_cmd(self, cmd, **args):
252 292 data = args.pop('data', None)
253 293 headers = args.pop('headers', {})
254 294 self.ui.debug(_("sending %s command\n") % cmd)
255 295 q = {"cmd": cmd}
256 296 q.update(args)
257 297 qs = '?%s' % urllib.urlencode(q)
258 298 cu = "%s%s" % (self._url, qs)
259 299 try:
260 300 if data:
261 301 self.ui.debug(_("sending %s bytes\n") %
262 302 headers.get('content-length', 'X'))
263 303 resp = urllib2.urlopen(request(cu, data, headers))
264 304 except urllib2.HTTPError, inst:
265 305 if inst.code == 401:
266 306 raise util.Abort(_('authorization failed'))
267 307 raise
268 308 except httplib.HTTPException, inst:
269 309 self.ui.debug(_('http error while sending %s command\n') % cmd)
270 310 self.ui.print_exc()
271 311 raise IOError(None, inst)
272 312 except IndexError:
273 313 # this only happens with Python 2.3, later versions raise URLError
274 314 raise util.Abort(_('http error, possibly caused by proxy setting'))
275 315 # record the url we got redirected to
276 316 resp_url = resp.geturl()
277 317 if resp_url.endswith(qs):
278 318 resp_url = resp_url[:-len(qs)]
279 319 if self._url != resp_url:
280 320 self.ui.status(_('real URL is %s\n') % resp_url)
281 321 self._url = resp_url
282 322 try:
283 323 proto = resp.getheader('content-type')
284 324 except AttributeError:
285 325 proto = resp.headers['content-type']
286 326
287 327 # accept old "text/plain" and "application/hg-changegroup" for now
288 328 if not (proto.startswith('application/mercurial-') or
289 329 proto.startswith('text/plain') or
290 330 proto.startswith('application/hg-changegroup')):
291 331 self.ui.debug(_("Requested URL: '%s'\n") % cu)
292 332 raise hg.RepoError(_("'%s' does not appear to be an hg repository")
293 333 % self._url)
294 334
295 335 if proto.startswith('application/mercurial-'):
296 336 try:
297 337 version = proto.split('-', 1)[1]
298 338 version_info = tuple([int(n) for n in version.split('.')])
299 339 except ValueError:
300 340 raise hg.RepoError(_("'%s' sent a broken Content-type "
301 341 "header (%s)") % (self._url, proto))
302 342 if version_info > (0, 1):
303 343 raise hg.RepoError(_("'%s' uses newer protocol %s") %
304 344 (self._url, version))
305 345
306 346 return resp
307 347
308 348 def do_read(self, cmd, **args):
309 349 fp = self.do_cmd(cmd, **args)
310 350 try:
311 351 return fp.read()
312 352 finally:
313 353 # if using keepalive, allow connection to be reused
314 354 fp.close()
315 355
316 356 def lookup(self, key):
317 357 d = self.do_cmd("lookup", key = key).read()
318 358 success, data = d[:-1].split(' ', 1)
319 359 if int(success):
320 360 return bin(data)
321 361 raise hg.RepoError(data)
322 362
323 363 def heads(self):
324 364 d = self.do_read("heads")
325 365 try:
326 366 return map(bin, d[:-1].split(" "))
327 367 except:
328 368 raise util.UnexpectedOutput(_("unexpected response:"), d)
329 369
330 370 def branches(self, nodes):
331 371 n = " ".join(map(hex, nodes))
332 372 d = self.do_read("branches", nodes=n)
333 373 try:
334 374 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
335 375 return br
336 376 except:
337 377 raise util.UnexpectedOutput(_("unexpected response:"), d)
338 378
339 379 def between(self, pairs):
340 380 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
341 381 d = self.do_read("between", pairs=n)
342 382 try:
343 383 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
344 384 return p
345 385 except:
346 386 raise util.UnexpectedOutput(_("unexpected response:"), d)
347 387
348 388 def changegroup(self, nodes, kind):
349 389 n = " ".join(map(hex, nodes))
350 390 f = self.do_cmd("changegroup", roots=n)
351 391 return util.chunkbuffer(zgenerator(f))
352 392
353 393 def changegroupsubset(self, bases, heads, source):
354 394 baselst = " ".join([hex(n) for n in bases])
355 395 headlst = " ".join([hex(n) for n in heads])
356 396 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
357 397 return util.chunkbuffer(zgenerator(f))
358 398
359 399 def unbundle(self, cg, heads, source):
360 400 # have to stream bundle to a temp file because we do not have
361 401 # http 1.1 chunked transfer.
362 402
363 403 type = ""
364 404 types = self.capable('unbundle')
365 405 # servers older than d1b16a746db6 will send 'unbundle' as a
366 406 # boolean capability
367 407 try:
368 408 types = types.split(',')
369 409 except AttributeError:
370 410 types = [""]
371 411 if types:
372 412 for x in types:
373 413 if x in changegroup.bundletypes:
374 414 type = x
375 415 break
376 416
377 417 tempname = changegroup.writebundle(cg, None, type)
378 418 fp = httpsendfile(tempname, "rb")
379 419 try:
380 420 try:
381 421 rfp = self.do_cmd(
382 422 'unbundle', data=fp,
383 423 headers={'content-type': 'application/octet-stream'},
384 424 heads=' '.join(map(hex, heads)))
385 425 try:
386 426 ret = int(rfp.readline())
387 427 self.ui.write(rfp.read())
388 428 return ret
389 429 finally:
390 430 rfp.close()
391 431 except socket.error, err:
392 432 if err[0] in (errno.ECONNRESET, errno.EPIPE):
393 433 raise util.Abort(_('push failed: %s') % err[1])
394 434 raise util.Abort(err[1])
395 435 finally:
396 436 fp.close()
397 437 os.unlink(tempname)
398 438
399 439 def stream_out(self):
400 440 return self.do_cmd('stream_out')
401 441
402 442 class httpsrepository(httprepository):
403 443 def __init__(self, ui, path):
404 444 if not has_https:
405 445 raise util.Abort(_('Python support for SSL and HTTPS '
406 446 'is not installed'))
407 447 httprepository.__init__(self, ui, path)
408 448
409 449 def instance(ui, path, create):
410 450 if create:
411 451 raise util.Abort(_('cannot create new http repository'))
412 452 if path.startswith('https:'):
413 453 return httpsrepository(ui, path)
414 454 return httprepository(ui, path)
@@ -1,7 +1,9
1 1 import doctest
2 2
3 3 import mercurial.changelog
4 4 # test doctest from changelog
5 5
6 6 doctest.testmod(mercurial.changelog)
7 7
8 import mercurial.httprepo
9 doctest.testmod(mercurial.httprepo)
General Comments 0
You need to be logged in to leave comments. Login now