##// END OF EJS Templates
merge with crew-stable
Alexis S. L. Carvalho -
r5123:79373ec3 merge default
parent child Browse files
Show More
@@ -0,0 +1,24
1 #!/bin/sh
2 # basic test for hg debugrebuildstate
3
4 hg init repo
5 cd repo
6
7 touch foo bar
8 hg ci -Am 'add foo bar'
9
10 touch baz
11 hg add baz
12 hg rm bar
13
14 echo '% state dump'
15 hg debugstate | cut -b 1-16,35- | sort
16 echo '% status'
17 hg st -A
18
19 hg debugrebuildstate
20 echo '% state dump'
21 hg debugstate | cut -b 1-16,35- | sort
22 echo '% status'
23 hg st -A
24
@@ -0,0 +1,17
1 adding bar
2 adding foo
3 % state dump
4 a 0 -1 baz
5 n 644 0 foo
6 r 0 0 bar
7 % status
8 A baz
9 R bar
10 C foo
11 % state dump
12 n 666 -1 bar
13 n 666 -1 foo
14 % status
15 ! bar
16 ? baz
17 C foo
@@ -1,534 +1,540
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO
13 import cStringIO
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._dirtypl = False
24 self._dirtypl = False
25 self._ui = ui
25 self._ui = ui
26
26
27 def __getattr__(self, name):
27 def __getattr__(self, name):
28 if name == '_map':
28 if name == '_map':
29 self._read()
29 self._read()
30 return self._map
30 return self._map
31 elif name == '_copymap':
31 elif name == '_copymap':
32 self._read()
32 self._read()
33 return self._copymap
33 return self._copymap
34 elif name == '_branch':
34 elif name == '_branch':
35 try:
35 try:
36 self._branch = (self._opener("branch").read().strip()
36 self._branch = (self._opener("branch").read().strip()
37 or "default")
37 or "default")
38 except IOError:
38 except IOError:
39 self._branch = "default"
39 self._branch = "default"
40 return self._branch
40 return self._branch
41 elif name == '_pl':
41 elif name == '_pl':
42 self._pl = [nullid, nullid]
42 self._pl = [nullid, nullid]
43 try:
43 try:
44 st = self._opener("dirstate").read(40)
44 st = self._opener("dirstate").read(40)
45 if len(st) == 40:
45 if len(st) == 40:
46 self._pl = st[:20], st[20:40]
46 self._pl = st[:20], st[20:40]
47 except IOError, err:
47 except IOError, err:
48 if err.errno != errno.ENOENT: raise
48 if err.errno != errno.ENOENT: raise
49 return self._pl
49 return self._pl
50 elif name == '_dirs':
50 elif name == '_dirs':
51 self._dirs = {}
51 self._dirs = {}
52 for f in self._map:
52 for f in self._map:
53 self._incpath(f)
53 self._incpath(f)
54 return self._dirs
54 return self._dirs
55 elif name == '_ignore':
55 elif name == '_ignore':
56 files = [self._join('.hgignore')]
56 files = [self._join('.hgignore')]
57 for name, path in self._ui.configitems("ui"):
57 for name, path in self._ui.configitems("ui"):
58 if name == 'ignore' or name.startswith('ignore.'):
58 if name == 'ignore' or name.startswith('ignore.'):
59 files.append(os.path.expanduser(path))
59 files.append(os.path.expanduser(path))
60 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
61 return self._ignore
61 return self._ignore
62 elif name == '_slash':
62 elif name == '_slash':
63 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
64 return self._slash
64 return self._slash
65 else:
65 else:
66 raise AttributeError, name
66 raise AttributeError, name
67
67
68 def _join(self, f):
68 def _join(self, f):
69 return os.path.join(self._root, f)
69 return os.path.join(self._root, f)
70
70
71 def getcwd(self):
71 def getcwd(self):
72 cwd = os.getcwd()
72 cwd = os.getcwd()
73 if cwd == self._root: return ''
73 if cwd == self._root: return ''
74 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 # self._root ends with a path separator if self._root is '/' or 'C:\'
75 rootsep = self._root
75 rootsep = self._root
76 if not rootsep.endswith(os.sep):
76 if not rootsep.endswith(os.sep):
77 rootsep += os.sep
77 rootsep += os.sep
78 if cwd.startswith(rootsep):
78 if cwd.startswith(rootsep):
79 return cwd[len(rootsep):]
79 return cwd[len(rootsep):]
80 else:
80 else:
81 # we're outside the repo. return an absolute path.
81 # we're outside the repo. return an absolute path.
82 return cwd
82 return cwd
83
83
84 def pathto(self, f, cwd=None):
84 def pathto(self, f, cwd=None):
85 if cwd is None:
85 if cwd is None:
86 cwd = self.getcwd()
86 cwd = self.getcwd()
87 path = util.pathto(self._root, cwd, f)
87 path = util.pathto(self._root, cwd, f)
88 if self._slash:
88 if self._slash:
89 return path.replace(os.sep, '/')
89 return path.replace(os.sep, '/')
90 return path
90 return path
91
91
92 def __getitem__(self, key):
92 def __getitem__(self, key):
93 ''' current states:
93 ''' current states:
94 n normal
94 n normal
95 m needs merging
95 m needs merging
96 r marked for removal
96 r marked for removal
97 a marked for addition
97 a marked for addition
98 ? not tracked'''
98 ? not tracked'''
99 return self._map.get(key, ("?",))[0]
99 return self._map.get(key, ("?",))[0]
100
100
101 def __contains__(self, key):
101 def __contains__(self, key):
102 return key in self._map
102 return key in self._map
103
103
104 def __iter__(self):
104 def __iter__(self):
105 a = self._map.keys()
105 a = self._map.keys()
106 a.sort()
106 a.sort()
107 for x in a:
107 for x in a:
108 yield x
108 yield x
109
109
110 def parents(self):
110 def parents(self):
111 return self._pl
111 return self._pl
112
112
113 def branch(self):
113 def branch(self):
114 return self._branch
114 return self._branch
115
115
116 def setparents(self, p1, p2=nullid):
116 def setparents(self, p1, p2=nullid):
117 self._dirty = self._dirtypl = True
117 self._dirty = self._dirtypl = True
118 self._pl = p1, p2
118 self._pl = p1, p2
119
119
120 def setbranch(self, branch):
120 def setbranch(self, branch):
121 self._branch = branch
121 self._branch = branch
122 self._opener("branch", "w").write(branch + '\n')
122 self._opener("branch", "w").write(branch + '\n')
123
123
124 def _read(self):
124 def _read(self):
125 self._map = {}
125 self._map = {}
126 self._copymap = {}
126 self._copymap = {}
127 if not self._dirtypl:
127 if not self._dirtypl:
128 self._pl = [nullid, nullid]
128 self._pl = [nullid, nullid]
129 try:
129 try:
130 st = self._opener("dirstate").read()
130 st = self._opener("dirstate").read()
131 except IOError, err:
131 except IOError, err:
132 if err.errno != errno.ENOENT: raise
132 if err.errno != errno.ENOENT: raise
133 return
133 return
134 if not st:
134 if not st:
135 return
135 return
136
136
137 if not self._dirtypl:
137 if not self._dirtypl:
138 self._pl = [st[:20], st[20: 40]]
138 self._pl = [st[:20], st[20: 40]]
139
139
140 # deref fields so they will be local in loop
140 # deref fields so they will be local in loop
141 dmap = self._map
141 dmap = self._map
142 copymap = self._copymap
142 copymap = self._copymap
143 unpack = struct.unpack
143 unpack = struct.unpack
144
144
145 pos = 40
145 pos = 40
146 e_size = struct.calcsize(_format)
146 e_size = struct.calcsize(_format)
147
147
148 while pos < len(st):
148 while pos < len(st):
149 newpos = pos + e_size
149 newpos = pos + e_size
150 e = unpack(_format, st[pos:newpos])
150 e = unpack(_format, st[pos:newpos])
151 l = e[4]
151 l = e[4]
152 pos = newpos
152 pos = newpos
153 newpos = pos + l
153 newpos = pos + l
154 f = st[pos:newpos]
154 f = st[pos:newpos]
155 if '\0' in f:
155 if '\0' in f:
156 f, c = f.split('\0')
156 f, c = f.split('\0')
157 copymap[f] = c
157 copymap[f] = c
158 dmap[f] = e[:4]
158 dmap[f] = e[:4]
159 pos = newpos
159 pos = newpos
160
160
161 def invalidate(self):
161 def invalidate(self):
162 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 for a in "_map _copymap _branch _pl _dirs _ignore".split():
163 if a in self.__dict__:
163 if a in self.__dict__:
164 delattr(self, a)
164 delattr(self, a)
165 self._dirty = False
165 self._dirty = False
166
166
167 def copy(self, source, dest):
167 def copy(self, source, dest):
168 self._dirty = True
168 self._dirty = True
169 self._copymap[dest] = source
169 self._copymap[dest] = source
170
170
171 def copied(self, file):
171 def copied(self, file):
172 return self._copymap.get(file, None)
172 return self._copymap.get(file, None)
173
173
174 def copies(self):
174 def copies(self):
175 return self._copymap
175 return self._copymap
176
176
177 def _incpath(self, path):
177 def _incpath(self, path):
178 for c in strutil.findall(path, '/'):
178 for c in strutil.findall(path, '/'):
179 pc = path[:c]
179 pc = path[:c]
180 self._dirs.setdefault(pc, 0)
180 self._dirs.setdefault(pc, 0)
181 self._dirs[pc] += 1
181 self._dirs[pc] += 1
182
182
183 def _decpath(self, path):
183 def _decpath(self, path):
184 for c in strutil.findall(path, '/'):
184 for c in strutil.findall(path, '/'):
185 pc = path[:c]
185 pc = path[:c]
186 self._dirs.setdefault(pc, 0)
186 self._dirs.setdefault(pc, 0)
187 self._dirs[pc] -= 1
187 self._dirs[pc] -= 1
188
188
189 def _incpathcheck(self, f):
189 def _incpathcheck(self, f):
190 if '\r' in f or '\n' in f:
190 if '\r' in f or '\n' in f:
191 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
191 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
192 # shadows
192 # shadows
193 if f in self._dirs:
193 if f in self._dirs:
194 raise util.Abort(_('directory %r already in dirstate') % f)
194 raise util.Abort(_('directory %r already in dirstate') % f)
195 for c in strutil.rfindall(f, '/'):
195 for c in strutil.rfindall(f, '/'):
196 d = f[:c]
196 d = f[:c]
197 if d in self._dirs:
197 if d in self._dirs:
198 break
198 break
199 if d in self._map:
199 if d in self._map:
200 raise util.Abort(_('file %r in dirstate clashes with %r') %
200 raise util.Abort(_('file %r in dirstate clashes with %r') %
201 (d, f))
201 (d, f))
202 self._incpath(f)
202 self._incpath(f)
203
203
204 def normal(self, f):
204 def normal(self, f):
205 'mark a file normal'
205 'mark a file normal'
206 self._dirty = True
206 self._dirty = True
207 s = os.lstat(self._join(f))
207 s = os.lstat(self._join(f))
208 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
208 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
209 if self._copymap.has_key(f):
209 if self._copymap.has_key(f):
210 del self._copymap[f]
210 del self._copymap[f]
211
211
212 def normaldirty(self, f):
212 def normaldirty(self, f):
213 'mark a file normal, but possibly dirty'
213 'mark a file normal, but possibly dirty'
214 self._dirty = True
214 self._dirty = True
215 s = os.lstat(self._join(f))
215 s = os.lstat(self._join(f))
216 self._map[f] = ('n', s.st_mode, -1, -1)
216 self._map[f] = ('n', s.st_mode, -1, -1)
217 if f in self._copymap:
217 if f in self._copymap:
218 del self._copymap[f]
218 del self._copymap[f]
219
219
220 def add(self, f):
220 def add(self, f):
221 'mark a file added'
221 'mark a file added'
222 self._dirty = True
222 self._dirty = True
223 self._incpathcheck(f)
223 self._incpathcheck(f)
224 self._map[f] = ('a', 0, -1, -1)
224 self._map[f] = ('a', 0, -1, -1)
225 if f in self._copymap:
225 if f in self._copymap:
226 del self._copymap[f]
226 del self._copymap[f]
227
227
228 def remove(self, f):
228 def remove(self, f):
229 'mark a file removed'
229 'mark a file removed'
230 self._dirty = True
230 self._dirty = True
231 self._map[f] = ('r', 0, 0, 0)
231 self._map[f] = ('r', 0, 0, 0)
232 self._decpath(f)
232 self._decpath(f)
233 if f in self._copymap:
233 if f in self._copymap:
234 del self._copymap[f]
234 del self._copymap[f]
235
235
236 def merge(self, f):
236 def merge(self, f):
237 'mark a file merged'
237 'mark a file merged'
238 self._dirty = True
238 self._dirty = True
239 s = os.lstat(self._join(f))
239 s = os.lstat(self._join(f))
240 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
240 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
241 if f in self._copymap:
241 if f in self._copymap:
242 del self._copymap[f]
242 del self._copymap[f]
243
243
244 def forget(self, f):
244 def forget(self, f):
245 'forget a file'
245 'forget a file'
246 self._dirty = True
246 self._dirty = True
247 try:
247 try:
248 del self._map[f]
248 del self._map[f]
249 self._decpath(f)
249 self._decpath(f)
250 except KeyError:
250 except KeyError:
251 self._ui.warn(_("not in dirstate: %s!\n") % f)
251 self._ui.warn(_("not in dirstate: %s!\n") % f)
252
252
253 def clear(self):
254 self._map = {}
255 self._copymap = {}
256 self._pl = [nullid, nullid]
257 self._dirty = True
258
253 def rebuild(self, parent, files):
259 def rebuild(self, parent, files):
254 self.invalidate()
260 self.clear()
255 for f in files:
261 for f in files:
256 if files.execf(f):
262 if files.execf(f):
257 self._map[f] = ('n', 0777, -1, 0)
263 self._map[f] = ('n', 0777, -1, 0)
258 else:
264 else:
259 self._map[f] = ('n', 0666, -1, 0)
265 self._map[f] = ('n', 0666, -1, 0)
260 self._pl = (parent, nullid)
266 self._pl = (parent, nullid)
261 self._dirty = True
267 self._dirty = True
262
268
263 def write(self):
269 def write(self):
264 if not self._dirty:
270 if not self._dirty:
265 return
271 return
266 cs = cStringIO.StringIO()
272 cs = cStringIO.StringIO()
267 cs.write("".join(self._pl))
273 cs.write("".join(self._pl))
268 for f, e in self._map.iteritems():
274 for f, e in self._map.iteritems():
269 c = self.copied(f)
275 c = self.copied(f)
270 if c:
276 if c:
271 f = f + "\0" + c
277 f = f + "\0" + c
272 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
278 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
273 cs.write(e)
279 cs.write(e)
274 cs.write(f)
280 cs.write(f)
275 st = self._opener("dirstate", "w", atomictemp=True)
281 st = self._opener("dirstate", "w", atomictemp=True)
276 st.write(cs.getvalue())
282 st.write(cs.getvalue())
277 st.rename()
283 st.rename()
278 self._dirty = self._dirtypl = False
284 self._dirty = self._dirtypl = False
279
285
280 def _filter(self, files):
286 def _filter(self, files):
281 ret = {}
287 ret = {}
282 unknown = []
288 unknown = []
283
289
284 for x in files:
290 for x in files:
285 if x == '.':
291 if x == '.':
286 return self._map.copy()
292 return self._map.copy()
287 if x not in self._map:
293 if x not in self._map:
288 unknown.append(x)
294 unknown.append(x)
289 else:
295 else:
290 ret[x] = self._map[x]
296 ret[x] = self._map[x]
291
297
292 if not unknown:
298 if not unknown:
293 return ret
299 return ret
294
300
295 b = self._map.keys()
301 b = self._map.keys()
296 b.sort()
302 b.sort()
297 blen = len(b)
303 blen = len(b)
298
304
299 for x in unknown:
305 for x in unknown:
300 bs = bisect.bisect(b, "%s%s" % (x, '/'))
306 bs = bisect.bisect(b, "%s%s" % (x, '/'))
301 while bs < blen:
307 while bs < blen:
302 s = b[bs]
308 s = b[bs]
303 if len(s) > len(x) and s.startswith(x):
309 if len(s) > len(x) and s.startswith(x):
304 ret[s] = self._map[s]
310 ret[s] = self._map[s]
305 else:
311 else:
306 break
312 break
307 bs += 1
313 bs += 1
308 return ret
314 return ret
309
315
310 def _supported(self, f, mode, verbose=False):
316 def _supported(self, f, mode, verbose=False):
311 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
317 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
312 return True
318 return True
313 if verbose:
319 if verbose:
314 kind = 'unknown'
320 kind = 'unknown'
315 if stat.S_ISCHR(mode): kind = _('character device')
321 if stat.S_ISCHR(mode): kind = _('character device')
316 elif stat.S_ISBLK(mode): kind = _('block device')
322 elif stat.S_ISBLK(mode): kind = _('block device')
317 elif stat.S_ISFIFO(mode): kind = _('fifo')
323 elif stat.S_ISFIFO(mode): kind = _('fifo')
318 elif stat.S_ISSOCK(mode): kind = _('socket')
324 elif stat.S_ISSOCK(mode): kind = _('socket')
319 elif stat.S_ISDIR(mode): kind = _('directory')
325 elif stat.S_ISDIR(mode): kind = _('directory')
320 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
326 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
321 % (self.pathto(f), kind))
327 % (self.pathto(f), kind))
322 return False
328 return False
323
329
324 def walk(self, files=None, match=util.always, badmatch=None):
330 def walk(self, files=None, match=util.always, badmatch=None):
325 # filter out the stat
331 # filter out the stat
326 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
332 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
327 yield src, f
333 yield src, f
328
334
329 def statwalk(self, files=None, match=util.always, ignored=False,
335 def statwalk(self, files=None, match=util.always, ignored=False,
330 badmatch=None, directories=False):
336 badmatch=None, directories=False):
331 '''
337 '''
332 walk recursively through the directory tree, finding all files
338 walk recursively through the directory tree, finding all files
333 matched by the match function
339 matched by the match function
334
340
335 results are yielded in a tuple (src, filename, st), where src
341 results are yielded in a tuple (src, filename, st), where src
336 is one of:
342 is one of:
337 'f' the file was found in the directory tree
343 'f' the file was found in the directory tree
338 'd' the file is a directory of the tree
344 'd' the file is a directory of the tree
339 'm' the file was only in the dirstate and not in the tree
345 'm' the file was only in the dirstate and not in the tree
340 'b' file was not found and matched badmatch
346 'b' file was not found and matched badmatch
341
347
342 and st is the stat result if the file was found in the directory.
348 and st is the stat result if the file was found in the directory.
343 '''
349 '''
344
350
345 # walk all files by default
351 # walk all files by default
346 if not files:
352 if not files:
347 files = ['.']
353 files = ['.']
348 dc = self._map.copy()
354 dc = self._map.copy()
349 else:
355 else:
350 files = util.unique(files)
356 files = util.unique(files)
351 dc = self._filter(files)
357 dc = self._filter(files)
352
358
353 def imatch(file_):
359 def imatch(file_):
354 if file_ not in dc and self._ignore(file_):
360 if file_ not in dc and self._ignore(file_):
355 return False
361 return False
356 return match(file_)
362 return match(file_)
357
363
358 ignore = self._ignore
364 ignore = self._ignore
359 if ignored:
365 if ignored:
360 imatch = match
366 imatch = match
361 ignore = util.never
367 ignore = util.never
362
368
363 # self._root may end with a path separator when self._root == '/'
369 # self._root may end with a path separator when self._root == '/'
364 common_prefix_len = len(self._root)
370 common_prefix_len = len(self._root)
365 if not self._root.endswith(os.sep):
371 if not self._root.endswith(os.sep):
366 common_prefix_len += 1
372 common_prefix_len += 1
367
373
368 normpath = util.normpath
374 normpath = util.normpath
369 listdir = os.listdir
375 listdir = os.listdir
370 lstat = os.lstat
376 lstat = os.lstat
371 bisect_left = bisect.bisect_left
377 bisect_left = bisect.bisect_left
372 isdir = os.path.isdir
378 isdir = os.path.isdir
373 pconvert = util.pconvert
379 pconvert = util.pconvert
374 join = os.path.join
380 join = os.path.join
375 s_isdir = stat.S_ISDIR
381 s_isdir = stat.S_ISDIR
376 supported = self._supported
382 supported = self._supported
377 _join = self._join
383 _join = self._join
378 known = {'.hg': 1}
384 known = {'.hg': 1}
379
385
380 # recursion free walker, faster than os.walk.
386 # recursion free walker, faster than os.walk.
381 def findfiles(s):
387 def findfiles(s):
382 work = [s]
388 work = [s]
383 wadd = work.append
389 wadd = work.append
384 found = []
390 found = []
385 add = found.append
391 add = found.append
386 if directories:
392 if directories:
387 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
393 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
388 while work:
394 while work:
389 top = work.pop()
395 top = work.pop()
390 names = listdir(top)
396 names = listdir(top)
391 names.sort()
397 names.sort()
392 # nd is the top of the repository dir tree
398 # nd is the top of the repository dir tree
393 nd = normpath(top[common_prefix_len:])
399 nd = normpath(top[common_prefix_len:])
394 if nd == '.':
400 if nd == '.':
395 nd = ''
401 nd = ''
396 else:
402 else:
397 # do not recurse into a repo contained in this
403 # do not recurse into a repo contained in this
398 # one. use bisect to find .hg directory so speed
404 # one. use bisect to find .hg directory so speed
399 # is good on big directory.
405 # is good on big directory.
400 hg = bisect_left(names, '.hg')
406 hg = bisect_left(names, '.hg')
401 if hg < len(names) and names[hg] == '.hg':
407 if hg < len(names) and names[hg] == '.hg':
402 if isdir(join(top, '.hg')):
408 if isdir(join(top, '.hg')):
403 continue
409 continue
404 for f in names:
410 for f in names:
405 np = pconvert(join(nd, f))
411 np = pconvert(join(nd, f))
406 if np in known:
412 if np in known:
407 continue
413 continue
408 known[np] = 1
414 known[np] = 1
409 p = join(top, f)
415 p = join(top, f)
410 # don't trip over symlinks
416 # don't trip over symlinks
411 st = lstat(p)
417 st = lstat(p)
412 if s_isdir(st.st_mode):
418 if s_isdir(st.st_mode):
413 if not ignore(np):
419 if not ignore(np):
414 wadd(p)
420 wadd(p)
415 if directories:
421 if directories:
416 add((np, 'd', st))
422 add((np, 'd', st))
417 if np in dc and match(np):
423 if np in dc and match(np):
418 add((np, 'm', st))
424 add((np, 'm', st))
419 elif imatch(np):
425 elif imatch(np):
420 if supported(np, st.st_mode):
426 if supported(np, st.st_mode):
421 add((np, 'f', st))
427 add((np, 'f', st))
422 elif np in dc:
428 elif np in dc:
423 add((np, 'm', st))
429 add((np, 'm', st))
424 found.sort()
430 found.sort()
425 return found
431 return found
426
432
427 # step one, find all files that match our criteria
433 # step one, find all files that match our criteria
428 files.sort()
434 files.sort()
429 for ff in files:
435 for ff in files:
430 nf = normpath(ff)
436 nf = normpath(ff)
431 f = _join(ff)
437 f = _join(ff)
432 try:
438 try:
433 st = lstat(f)
439 st = lstat(f)
434 except OSError, inst:
440 except OSError, inst:
435 found = False
441 found = False
436 for fn in dc:
442 for fn in dc:
437 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
443 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
438 found = True
444 found = True
439 break
445 break
440 if not found:
446 if not found:
441 if inst.errno != errno.ENOENT or not badmatch:
447 if inst.errno != errno.ENOENT or not badmatch:
442 self._ui.warn('%s: %s\n' %
448 self._ui.warn('%s: %s\n' %
443 (self.pathto(ff), inst.strerror))
449 (self.pathto(ff), inst.strerror))
444 elif badmatch and badmatch(ff) and imatch(nf):
450 elif badmatch and badmatch(ff) and imatch(nf):
445 yield 'b', ff, None
451 yield 'b', ff, None
446 continue
452 continue
447 if s_isdir(st.st_mode):
453 if s_isdir(st.st_mode):
448 for f, src, st in findfiles(f):
454 for f, src, st in findfiles(f):
449 yield src, f, st
455 yield src, f, st
450 else:
456 else:
451 if nf in known:
457 if nf in known:
452 continue
458 continue
453 known[nf] = 1
459 known[nf] = 1
454 if match(nf):
460 if match(nf):
455 if supported(ff, st.st_mode, verbose=True):
461 if supported(ff, st.st_mode, verbose=True):
456 yield 'f', nf, st
462 yield 'f', nf, st
457 elif ff in dc:
463 elif ff in dc:
458 yield 'm', nf, st
464 yield 'm', nf, st
459
465
460 # step two run through anything left in the dc hash and yield
466 # step two run through anything left in the dc hash and yield
461 # if we haven't already seen it
467 # if we haven't already seen it
462 ks = dc.keys()
468 ks = dc.keys()
463 ks.sort()
469 ks.sort()
464 for k in ks:
470 for k in ks:
465 if k in known:
471 if k in known:
466 continue
472 continue
467 known[k] = 1
473 known[k] = 1
468 if imatch(k):
474 if imatch(k):
469 yield 'm', k, None
475 yield 'm', k, None
470
476
471 def status(self, files, match, list_ignored, list_clean):
477 def status(self, files, match, list_ignored, list_clean):
472 lookup, modified, added, unknown, ignored = [], [], [], [], []
478 lookup, modified, added, unknown, ignored = [], [], [], [], []
473 removed, deleted, clean = [], [], []
479 removed, deleted, clean = [], [], []
474
480
475 _join = self._join
481 _join = self._join
476 lstat = os.lstat
482 lstat = os.lstat
477 cmap = self._copymap
483 cmap = self._copymap
478 dmap = self._map
484 dmap = self._map
479 ladd = lookup.append
485 ladd = lookup.append
480 madd = modified.append
486 madd = modified.append
481 aadd = added.append
487 aadd = added.append
482 uadd = unknown.append
488 uadd = unknown.append
483 iadd = ignored.append
489 iadd = ignored.append
484 radd = removed.append
490 radd = removed.append
485 dadd = deleted.append
491 dadd = deleted.append
486 cadd = clean.append
492 cadd = clean.append
487
493
488 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
494 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
489 if fn in dmap:
495 if fn in dmap:
490 type_, mode, size, time = dmap[fn]
496 type_, mode, size, time = dmap[fn]
491 else:
497 else:
492 if list_ignored and self._ignore(fn):
498 if list_ignored and self._ignore(fn):
493 iadd(fn)
499 iadd(fn)
494 else:
500 else:
495 uadd(fn)
501 uadd(fn)
496 continue
502 continue
497 if src == 'm':
503 if src == 'm':
498 nonexistent = True
504 nonexistent = True
499 if not st:
505 if not st:
500 try:
506 try:
501 st = lstat(_join(fn))
507 st = lstat(_join(fn))
502 except OSError, inst:
508 except OSError, inst:
503 if inst.errno != errno.ENOENT:
509 if inst.errno != errno.ENOENT:
504 raise
510 raise
505 st = None
511 st = None
506 # We need to re-check that it is a valid file
512 # We need to re-check that it is a valid file
507 if st and self._supported(fn, st.st_mode):
513 if st and self._supported(fn, st.st_mode):
508 nonexistent = False
514 nonexistent = False
509 # XXX: what to do with file no longer present in the fs
515 # XXX: what to do with file no longer present in the fs
510 # who are not removed in the dirstate ?
516 # who are not removed in the dirstate ?
511 if nonexistent and type_ in "nm":
517 if nonexistent and type_ in "nm":
512 dadd(fn)
518 dadd(fn)
513 continue
519 continue
514 # check the common case first
520 # check the common case first
515 if type_ == 'n':
521 if type_ == 'n':
516 if not st:
522 if not st:
517 st = lstat(_join(fn))
523 st = lstat(_join(fn))
518 if (size >= 0 and (size != st.st_size
524 if (size >= 0 and (size != st.st_size
519 or (mode ^ st.st_mode) & 0100)
525 or (mode ^ st.st_mode) & 0100)
520 or fn in self._copymap):
526 or fn in self._copymap):
521 madd(fn)
527 madd(fn)
522 elif time != int(st.st_mtime):
528 elif time != int(st.st_mtime):
523 ladd(fn)
529 ladd(fn)
524 elif list_clean:
530 elif list_clean:
525 cadd(fn)
531 cadd(fn)
526 elif type_ == 'm':
532 elif type_ == 'm':
527 madd(fn)
533 madd(fn)
528 elif type_ == 'a':
534 elif type_ == 'a':
529 aadd(fn)
535 aadd(fn)
530 elif type_ == 'r':
536 elif type_ == 'r':
531 radd(fn)
537 radd(fn)
532
538
533 return (lookup, modified, added, removed, deleted, unknown, ignored,
539 return (lookup, modified, added, removed, deleted, unknown, ignored,
534 clean)
540 clean)
@@ -1,1188 +1,1188
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 import tempfile, urllib, bz2
10 import tempfile, urllib, bz2
11 from mercurial.node import *
11 from mercurial.node import *
12 from mercurial.i18n import gettext as _
12 from mercurial.i18n import gettext as _
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 from mercurial import revlog, templater
14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map, paritygen
15 from common import get_mtime, staticfile, style_map, paritygen
16
16
17 def _up(p):
17 def _up(p):
18 if p[0] != "/":
18 if p[0] != "/":
19 p = "/" + p
19 p = "/" + p
20 if p[-1] == "/":
20 if p[-1] == "/":
21 p = p[:-1]
21 p = p[:-1]
22 up = os.path.dirname(p)
22 up = os.path.dirname(p)
23 if up == "/":
23 if up == "/":
24 return "/"
24 return "/"
25 return up + "/"
25 return up + "/"
26
26
27 def revnavgen(pos, pagelen, limit, nodefunc):
27 def revnavgen(pos, pagelen, limit, nodefunc):
28 def seq(factor, limit=None):
28 def seq(factor, limit=None):
29 if limit:
29 if limit:
30 yield limit
30 yield limit
31 if limit >= 20 and limit <= 40:
31 if limit >= 20 and limit <= 40:
32 yield 50
32 yield 50
33 else:
33 else:
34 yield 1 * factor
34 yield 1 * factor
35 yield 3 * factor
35 yield 3 * factor
36 for f in seq(factor * 10):
36 for f in seq(factor * 10):
37 yield f
37 yield f
38
38
39 def nav(**map):
39 def nav(**map):
40 l = []
40 l = []
41 last = 0
41 last = 0
42 for f in seq(1, pagelen):
42 for f in seq(1, pagelen):
43 if f < pagelen or f <= last:
43 if f < pagelen or f <= last:
44 continue
44 continue
45 if f > limit:
45 if f > limit:
46 break
46 break
47 last = f
47 last = f
48 if pos + f < limit:
48 if pos + f < limit:
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 if pos - f >= 0:
50 if pos - f >= 0:
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52
52
53 try:
53 try:
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55
55
56 for label, node in l:
56 for label, node in l:
57 yield {"label": label, "node": node}
57 yield {"label": label, "node": node}
58
58
59 yield {"label": "tip", "node": "tip"}
59 yield {"label": "tip", "node": "tip"}
60 except hg.RepoError:
60 except hg.RepoError:
61 pass
61 pass
62
62
63 return nav
63 return nav
64
64
65 class hgweb(object):
65 class hgweb(object):
66 def __init__(self, repo, name=None):
66 def __init__(self, repo, name=None):
67 if isinstance(repo, str):
67 if isinstance(repo, str):
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 else:
69 else:
70 self.repo = repo
70 self.repo = repo
71
71
72 self.mtime = -1
72 self.mtime = -1
73 self.reponame = name
73 self.reponame = name
74 self.archives = 'zip', 'gz', 'bz2'
74 self.archives = 'zip', 'gz', 'bz2'
75 self.stripecount = 1
75 self.stripecount = 1
76 # a repo owner may set web.templates in .hg/hgrc to get any file
76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 # readable by the user running the CGI script
77 # readable by the user running the CGI script
78 self.templatepath = self.config("web", "templates",
78 self.templatepath = self.config("web", "templates",
79 templater.templatepath(),
79 templater.templatepath(),
80 untrusted=False)
80 untrusted=False)
81
81
82 # The CGI scripts are often run by a user different from the repo owner.
82 # The CGI scripts are often run by a user different from the repo owner.
83 # Trust the settings from the .hg/hgrc files by default.
83 # Trust the settings from the .hg/hgrc files by default.
84 def config(self, section, name, default=None, untrusted=True):
84 def config(self, section, name, default=None, untrusted=True):
85 return self.repo.ui.config(section, name, default,
85 return self.repo.ui.config(section, name, default,
86 untrusted=untrusted)
86 untrusted=untrusted)
87
87
88 def configbool(self, section, name, default=False, untrusted=True):
88 def configbool(self, section, name, default=False, untrusted=True):
89 return self.repo.ui.configbool(section, name, default,
89 return self.repo.ui.configbool(section, name, default,
90 untrusted=untrusted)
90 untrusted=untrusted)
91
91
92 def configlist(self, section, name, default=None, untrusted=True):
92 def configlist(self, section, name, default=None, untrusted=True):
93 return self.repo.ui.configlist(section, name, default,
93 return self.repo.ui.configlist(section, name, default,
94 untrusted=untrusted)
94 untrusted=untrusted)
95
95
96 def refresh(self):
96 def refresh(self):
97 mtime = get_mtime(self.repo.root)
97 mtime = get_mtime(self.repo.root)
98 if mtime != self.mtime:
98 if mtime != self.mtime:
99 self.mtime = mtime
99 self.mtime = mtime
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 self.stripecount = int(self.config("web", "stripes", 1))
102 self.stripecount = int(self.config("web", "stripes", 1))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 self.allowpull = self.configbool("web", "allowpull", True)
105 self.allowpull = self.configbool("web", "allowpull", True)
106 self.encoding = self.config("web", "encoding", util._encoding)
106 self.encoding = self.config("web", "encoding", util._encoding)
107
107
108 def archivelist(self, nodeid):
108 def archivelist(self, nodeid):
109 allowed = self.configlist("web", "allow_archive")
109 allowed = self.configlist("web", "allow_archive")
110 for i, spec in self.archive_specs.iteritems():
110 for i, spec in self.archive_specs.iteritems():
111 if i in allowed or self.configbool("web", "allow" + i):
111 if i in allowed or self.configbool("web", "allow" + i):
112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
113
113
114 def listfilediffs(self, files, changeset):
114 def listfilediffs(self, files, changeset):
115 for f in files[:self.maxfiles]:
115 for f in files[:self.maxfiles]:
116 yield self.t("filedifflink", node=hex(changeset), file=f)
116 yield self.t("filedifflink", node=hex(changeset), file=f)
117 if len(files) > self.maxfiles:
117 if len(files) > self.maxfiles:
118 yield self.t("fileellipses")
118 yield self.t("fileellipses")
119
119
120 def siblings(self, siblings=[], hiderev=None, **args):
120 def siblings(self, siblings=[], hiderev=None, **args):
121 siblings = [s for s in siblings if s.node() != nullid]
121 siblings = [s for s in siblings if s.node() != nullid]
122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
123 return
123 return
124 for s in siblings:
124 for s in siblings:
125 d = {'node': hex(s.node()), 'rev': s.rev()}
125 d = {'node': hex(s.node()), 'rev': s.rev()}
126 if hasattr(s, 'path'):
126 if hasattr(s, 'path'):
127 d['file'] = s.path()
127 d['file'] = s.path()
128 d.update(args)
128 d.update(args)
129 yield d
129 yield d
130
130
131 def renamelink(self, fl, node):
131 def renamelink(self, fl, node):
132 r = fl.renamed(node)
132 r = fl.renamed(node)
133 if r:
133 if r:
134 return [dict(file=r[0], node=hex(r[1]))]
134 return [dict(file=r[0], node=hex(r[1]))]
135 return []
135 return []
136
136
137 def nodetagsdict(self, node):
137 def nodetagsdict(self, node):
138 return [{"name": i} for i in self.repo.nodetags(node)]
138 return [{"name": i} for i in self.repo.nodetags(node)]
139
139
140 def nodebranchdict(self, ctx):
140 def nodebranchdict(self, ctx):
141 branches = []
141 branches = []
142 branch = ctx.branch()
142 branch = ctx.branch()
143 if self.repo.branchtags()[branch] == ctx.node():
143 if self.repo.branchtags()[branch] == ctx.node():
144 branches.append({"name": branch})
144 branches.append({"name": branch})
145 return branches
145 return branches
146
146
147 def showtag(self, t1, node=nullid, **args):
147 def showtag(self, t1, node=nullid, **args):
148 for t in self.repo.nodetags(node):
148 for t in self.repo.nodetags(node):
149 yield self.t(t1, tag=t, **args)
149 yield self.t(t1, tag=t, **args)
150
150
151 def diff(self, node1, node2, files):
151 def diff(self, node1, node2, files):
152 def filterfiles(filters, files):
152 def filterfiles(filters, files):
153 l = [x for x in files if x in filters]
153 l = [x for x in files if x in filters]
154
154
155 for t in filters:
155 for t in filters:
156 if t and t[-1] != os.sep:
156 if t and t[-1] != os.sep:
157 t += os.sep
157 t += os.sep
158 l += [x for x in files if x.startswith(t)]
158 l += [x for x in files if x.startswith(t)]
159 return l
159 return l
160
160
161 parity = paritygen(self.stripecount)
161 parity = paritygen(self.stripecount)
162 def diffblock(diff, f, fn):
162 def diffblock(diff, f, fn):
163 yield self.t("diffblock",
163 yield self.t("diffblock",
164 lines=prettyprintlines(diff),
164 lines=prettyprintlines(diff),
165 parity=parity.next(),
165 parity=parity.next(),
166 file=f,
166 file=f,
167 filenode=hex(fn or nullid))
167 filenode=hex(fn or nullid))
168
168
169 def prettyprintlines(diff):
169 def prettyprintlines(diff):
170 for l in diff.splitlines(1):
170 for l in diff.splitlines(1):
171 if l.startswith('+'):
171 if l.startswith('+'):
172 yield self.t("difflineplus", line=l)
172 yield self.t("difflineplus", line=l)
173 elif l.startswith('-'):
173 elif l.startswith('-'):
174 yield self.t("difflineminus", line=l)
174 yield self.t("difflineminus", line=l)
175 elif l.startswith('@'):
175 elif l.startswith('@'):
176 yield self.t("difflineat", line=l)
176 yield self.t("difflineat", line=l)
177 else:
177 else:
178 yield self.t("diffline", line=l)
178 yield self.t("diffline", line=l)
179
179
180 r = self.repo
180 r = self.repo
181 c1 = r.changectx(node1)
181 c1 = r.changectx(node1)
182 c2 = r.changectx(node2)
182 c2 = r.changectx(node2)
183 date1 = util.datestr(c1.date())
183 date1 = util.datestr(c1.date())
184 date2 = util.datestr(c2.date())
184 date2 = util.datestr(c2.date())
185
185
186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
187 if files:
187 if files:
188 modified, added, removed = map(lambda x: filterfiles(files, x),
188 modified, added, removed = map(lambda x: filterfiles(files, x),
189 (modified, added, removed))
189 (modified, added, removed))
190
190
191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
192 for f in modified:
192 for f in modified:
193 to = c1.filectx(f).data()
193 to = c1.filectx(f).data()
194 tn = c2.filectx(f).data()
194 tn = c2.filectx(f).data()
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 opts=diffopts), f, tn)
196 opts=diffopts), f, tn)
197 for f in added:
197 for f in added:
198 to = None
198 to = None
199 tn = c2.filectx(f).data()
199 tn = c2.filectx(f).data()
200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
201 opts=diffopts), f, tn)
201 opts=diffopts), f, tn)
202 for f in removed:
202 for f in removed:
203 to = c1.filectx(f).data()
203 to = c1.filectx(f).data()
204 tn = None
204 tn = None
205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
206 opts=diffopts), f, tn)
206 opts=diffopts), f, tn)
207
207
208 def changelog(self, ctx, shortlog=False):
208 def changelog(self, ctx, shortlog=False):
209 def changelist(**map):
209 def changelist(**map):
210 cl = self.repo.changelog
210 cl = self.repo.changelog
211 l = [] # build a list in forward order for efficiency
211 l = [] # build a list in forward order for efficiency
212 for i in xrange(start, end):
212 for i in xrange(start, end):
213 ctx = self.repo.changectx(i)
213 ctx = self.repo.changectx(i)
214 n = ctx.node()
214 n = ctx.node()
215
215
216 l.insert(0, {"parity": parity.next(),
216 l.insert(0, {"parity": parity.next(),
217 "author": ctx.user(),
217 "author": ctx.user(),
218 "parent": self.siblings(ctx.parents(), i - 1),
218 "parent": self.siblings(ctx.parents(), i - 1),
219 "child": self.siblings(ctx.children(), i + 1),
219 "child": self.siblings(ctx.children(), i + 1),
220 "changelogtag": self.showtag("changelogtag",n),
220 "changelogtag": self.showtag("changelogtag",n),
221 "desc": ctx.description(),
221 "desc": ctx.description(),
222 "date": ctx.date(),
222 "date": ctx.date(),
223 "files": self.listfilediffs(ctx.files(), n),
223 "files": self.listfilediffs(ctx.files(), n),
224 "rev": i,
224 "rev": i,
225 "node": hex(n),
225 "node": hex(n),
226 "tags": self.nodetagsdict(n),
226 "tags": self.nodetagsdict(n),
227 "branches": self.nodebranchdict(ctx)})
227 "branches": self.nodebranchdict(ctx)})
228
228
229 for e in l:
229 for e in l:
230 yield e
230 yield e
231
231
232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
233 cl = self.repo.changelog
233 cl = self.repo.changelog
234 count = cl.count()
234 count = cl.count()
235 pos = ctx.rev()
235 pos = ctx.rev()
236 start = max(0, pos - maxchanges + 1)
236 start = max(0, pos - maxchanges + 1)
237 end = min(count, start + maxchanges)
237 end = min(count, start + maxchanges)
238 pos = end - 1
238 pos = end - 1
239 parity = paritygen(self.stripecount, offset=start-end)
239 parity = paritygen(self.stripecount, offset=start-end)
240
240
241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
242
242
243 yield self.t(shortlog and 'shortlog' or 'changelog',
243 yield self.t(shortlog and 'shortlog' or 'changelog',
244 changenav=changenav,
244 changenav=changenav,
245 node=hex(cl.tip()),
245 node=hex(cl.tip()),
246 rev=pos, changesets=count, entries=changelist,
246 rev=pos, changesets=count, entries=changelist,
247 archives=self.archivelist("tip"))
247 archives=self.archivelist("tip"))
248
248
249 def search(self, query):
249 def search(self, query):
250
250
251 def changelist(**map):
251 def changelist(**map):
252 cl = self.repo.changelog
252 cl = self.repo.changelog
253 count = 0
253 count = 0
254 qw = query.lower().split()
254 qw = query.lower().split()
255
255
256 def revgen():
256 def revgen():
257 for i in xrange(cl.count() - 1, 0, -100):
257 for i in xrange(cl.count() - 1, 0, -100):
258 l = []
258 l = []
259 for j in xrange(max(0, i - 100), i):
259 for j in xrange(max(0, i - 100), i):
260 ctx = self.repo.changectx(j)
260 ctx = self.repo.changectx(j)
261 l.append(ctx)
261 l.append(ctx)
262 l.reverse()
262 l.reverse()
263 for e in l:
263 for e in l:
264 yield e
264 yield e
265
265
266 for ctx in revgen():
266 for ctx in revgen():
267 miss = 0
267 miss = 0
268 for q in qw:
268 for q in qw:
269 if not (q in ctx.user().lower() or
269 if not (q in ctx.user().lower() or
270 q in ctx.description().lower() or
270 q in ctx.description().lower() or
271 q in " ".join(ctx.files()).lower()):
271 q in " ".join(ctx.files()).lower()):
272 miss = 1
272 miss = 1
273 break
273 break
274 if miss:
274 if miss:
275 continue
275 continue
276
276
277 count += 1
277 count += 1
278 n = ctx.node()
278 n = ctx.node()
279
279
280 yield self.t('searchentry',
280 yield self.t('searchentry',
281 parity=parity.next(),
281 parity=parity.next(),
282 author=ctx.user(),
282 author=ctx.user(),
283 parent=self.siblings(ctx.parents()),
283 parent=self.siblings(ctx.parents()),
284 child=self.siblings(ctx.children()),
284 child=self.siblings(ctx.children()),
285 changelogtag=self.showtag("changelogtag",n),
285 changelogtag=self.showtag("changelogtag",n),
286 desc=ctx.description(),
286 desc=ctx.description(),
287 date=ctx.date(),
287 date=ctx.date(),
288 files=self.listfilediffs(ctx.files(), n),
288 files=self.listfilediffs(ctx.files(), n),
289 rev=ctx.rev(),
289 rev=ctx.rev(),
290 node=hex(n),
290 node=hex(n),
291 tags=self.nodetagsdict(n),
291 tags=self.nodetagsdict(n),
292 branches=self.nodebranchdict(ctx))
292 branches=self.nodebranchdict(ctx))
293
293
294 if count >= self.maxchanges:
294 if count >= self.maxchanges:
295 break
295 break
296
296
297 cl = self.repo.changelog
297 cl = self.repo.changelog
298 parity = paritygen(self.stripecount)
298 parity = paritygen(self.stripecount)
299
299
300 yield self.t('search',
300 yield self.t('search',
301 query=query,
301 query=query,
302 node=hex(cl.tip()),
302 node=hex(cl.tip()),
303 entries=changelist,
303 entries=changelist,
304 archives=self.archivelist("tip"))
304 archives=self.archivelist("tip"))
305
305
306 def changeset(self, ctx):
306 def changeset(self, ctx):
307 n = ctx.node()
307 n = ctx.node()
308 parents = ctx.parents()
308 parents = ctx.parents()
309 p1 = parents[0].node()
309 p1 = parents[0].node()
310
310
311 files = []
311 files = []
312 parity = paritygen(self.stripecount)
312 parity = paritygen(self.stripecount)
313 for f in ctx.files():
313 for f in ctx.files():
314 files.append(self.t("filenodelink",
314 files.append(self.t("filenodelink",
315 node=hex(n), file=f,
315 node=hex(n), file=f,
316 parity=parity.next()))
316 parity=parity.next()))
317
317
318 def diff(**map):
318 def diff(**map):
319 yield self.diff(p1, n, None)
319 yield self.diff(p1, n, None)
320
320
321 yield self.t('changeset',
321 yield self.t('changeset',
322 diff=diff,
322 diff=diff,
323 rev=ctx.rev(),
323 rev=ctx.rev(),
324 node=hex(n),
324 node=hex(n),
325 parent=self.siblings(parents),
325 parent=self.siblings(parents),
326 child=self.siblings(ctx.children()),
326 child=self.siblings(ctx.children()),
327 changesettag=self.showtag("changesettag",n),
327 changesettag=self.showtag("changesettag",n),
328 author=ctx.user(),
328 author=ctx.user(),
329 desc=ctx.description(),
329 desc=ctx.description(),
330 date=ctx.date(),
330 date=ctx.date(),
331 files=files,
331 files=files,
332 archives=self.archivelist(hex(n)),
332 archives=self.archivelist(hex(n)),
333 tags=self.nodetagsdict(n),
333 tags=self.nodetagsdict(n),
334 branches=self.nodebranchdict(ctx))
334 branches=self.nodebranchdict(ctx))
335
335
336 def filelog(self, fctx):
336 def filelog(self, fctx):
337 f = fctx.path()
337 f = fctx.path()
338 fl = fctx.filelog()
338 fl = fctx.filelog()
339 count = fl.count()
339 count = fl.count()
340 pagelen = self.maxshortchanges
340 pagelen = self.maxshortchanges
341 pos = fctx.filerev()
341 pos = fctx.filerev()
342 start = max(0, pos - pagelen + 1)
342 start = max(0, pos - pagelen + 1)
343 end = min(count, start + pagelen)
343 end = min(count, start + pagelen)
344 pos = end - 1
344 pos = end - 1
345 parity = paritygen(self.stripecount, offset=start-end)
345 parity = paritygen(self.stripecount, offset=start-end)
346
346
347 def entries(**map):
347 def entries(**map):
348 l = []
348 l = []
349
349
350 for i in xrange(start, end):
350 for i in xrange(start, end):
351 ctx = fctx.filectx(i)
351 ctx = fctx.filectx(i)
352 n = fl.node(i)
352 n = fl.node(i)
353
353
354 l.insert(0, {"parity": parity.next(),
354 l.insert(0, {"parity": parity.next(),
355 "filerev": i,
355 "filerev": i,
356 "file": f,
356 "file": f,
357 "node": hex(ctx.node()),
357 "node": hex(ctx.node()),
358 "author": ctx.user(),
358 "author": ctx.user(),
359 "date": ctx.date(),
359 "date": ctx.date(),
360 "rename": self.renamelink(fl, n),
360 "rename": self.renamelink(fl, n),
361 "parent": self.siblings(fctx.parents()),
361 "parent": self.siblings(fctx.parents()),
362 "child": self.siblings(fctx.children()),
362 "child": self.siblings(fctx.children()),
363 "desc": ctx.description()})
363 "desc": ctx.description()})
364
364
365 for e in l:
365 for e in l:
366 yield e
366 yield e
367
367
368 nodefunc = lambda x: fctx.filectx(fileid=x)
368 nodefunc = lambda x: fctx.filectx(fileid=x)
369 nav = revnavgen(pos, pagelen, count, nodefunc)
369 nav = revnavgen(pos, pagelen, count, nodefunc)
370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
371 entries=entries)
371 entries=entries)
372
372
373 def filerevision(self, fctx):
373 def filerevision(self, fctx):
374 f = fctx.path()
374 f = fctx.path()
375 text = fctx.data()
375 text = fctx.data()
376 fl = fctx.filelog()
376 fl = fctx.filelog()
377 n = fctx.filenode()
377 n = fctx.filenode()
378 parity = paritygen(self.stripecount)
378 parity = paritygen(self.stripecount)
379
379
380 mt = mimetypes.guess_type(f)[0]
380 mt = mimetypes.guess_type(f)[0]
381 rawtext = text
381 rawtext = text
382 if util.binary(text):
382 if util.binary(text):
383 mt = mt or 'application/octet-stream'
383 mt = mt or 'application/octet-stream'
384 text = "(binary:%s)" % mt
384 text = "(binary:%s)" % mt
385 mt = mt or 'text/plain'
385 mt = mt or 'text/plain'
386
386
387 def lines():
387 def lines():
388 for l, t in enumerate(text.splitlines(1)):
388 for l, t in enumerate(text.splitlines(1)):
389 yield {"line": t,
389 yield {"line": t,
390 "linenumber": "% 6d" % (l + 1),
390 "linenumber": "% 6d" % (l + 1),
391 "parity": parity.next()}
391 "parity": parity.next()}
392
392
393 yield self.t("filerevision",
393 yield self.t("filerevision",
394 file=f,
394 file=f,
395 path=_up(f),
395 path=_up(f),
396 text=lines(),
396 text=lines(),
397 raw=rawtext,
397 raw=rawtext,
398 mimetype=mt,
398 mimetype=mt,
399 rev=fctx.rev(),
399 rev=fctx.rev(),
400 node=hex(fctx.node()),
400 node=hex(fctx.node()),
401 author=fctx.user(),
401 author=fctx.user(),
402 date=fctx.date(),
402 date=fctx.date(),
403 desc=fctx.description(),
403 desc=fctx.description(),
404 parent=self.siblings(fctx.parents()),
404 parent=self.siblings(fctx.parents()),
405 child=self.siblings(fctx.children()),
405 child=self.siblings(fctx.children()),
406 rename=self.renamelink(fl, n),
406 rename=self.renamelink(fl, n),
407 permissions=fctx.manifest().flags(f))
407 permissions=fctx.manifest().flags(f))
408
408
409 def fileannotate(self, fctx):
409 def fileannotate(self, fctx):
410 f = fctx.path()
410 f = fctx.path()
411 n = fctx.filenode()
411 n = fctx.filenode()
412 fl = fctx.filelog()
412 fl = fctx.filelog()
413 parity = paritygen(self.stripecount)
413 parity = paritygen(self.stripecount)
414
414
415 def annotate(**map):
415 def annotate(**map):
416 last = None
416 last = None
417 for f, l in fctx.annotate(follow=True):
417 for f, l in fctx.annotate(follow=True):
418 fnode = f.filenode()
418 fnode = f.filenode()
419 name = self.repo.ui.shortuser(f.user())
419 name = self.repo.ui.shortuser(f.user())
420
420
421 if last != fnode:
421 if last != fnode:
422 last = fnode
422 last = fnode
423
423
424 yield {"parity": parity.next(),
424 yield {"parity": parity.next(),
425 "node": hex(f.node()),
425 "node": hex(f.node()),
426 "rev": f.rev(),
426 "rev": f.rev(),
427 "author": name,
427 "author": name,
428 "file": f.path(),
428 "file": f.path(),
429 "line": l}
429 "line": l}
430
430
431 yield self.t("fileannotate",
431 yield self.t("fileannotate",
432 file=f,
432 file=f,
433 annotate=annotate,
433 annotate=annotate,
434 path=_up(f),
434 path=_up(f),
435 rev=fctx.rev(),
435 rev=fctx.rev(),
436 node=hex(fctx.node()),
436 node=hex(fctx.node()),
437 author=fctx.user(),
437 author=fctx.user(),
438 date=fctx.date(),
438 date=fctx.date(),
439 desc=fctx.description(),
439 desc=fctx.description(),
440 rename=self.renamelink(fl, n),
440 rename=self.renamelink(fl, n),
441 parent=self.siblings(fctx.parents()),
441 parent=self.siblings(fctx.parents()),
442 child=self.siblings(fctx.children()),
442 child=self.siblings(fctx.children()),
443 permissions=fctx.manifest().flags(f))
443 permissions=fctx.manifest().flags(f))
444
444
445 def manifest(self, ctx, path):
445 def manifest(self, ctx, path):
446 mf = ctx.manifest()
446 mf = ctx.manifest()
447 node = ctx.node()
447 node = ctx.node()
448
448
449 files = {}
449 files = {}
450 parity = paritygen(self.stripecount)
450 parity = paritygen(self.stripecount)
451
451
452 if path and path[-1] != "/":
452 if path and path[-1] != "/":
453 path += "/"
453 path += "/"
454 l = len(path)
454 l = len(path)
455 abspath = "/" + path
455 abspath = "/" + path
456
456
457 for f, n in mf.items():
457 for f, n in mf.items():
458 if f[:l] != path:
458 if f[:l] != path:
459 continue
459 continue
460 remain = f[l:]
460 remain = f[l:]
461 if "/" in remain:
461 if "/" in remain:
462 short = remain[:remain.index("/") + 1] # bleah
462 short = remain[:remain.index("/") + 1] # bleah
463 files[short] = (f, None)
463 files[short] = (f, None)
464 else:
464 else:
465 short = os.path.basename(remain)
465 short = os.path.basename(remain)
466 files[short] = (f, n)
466 files[short] = (f, n)
467
467
468 def filelist(**map):
468 def filelist(**map):
469 fl = files.keys()
469 fl = files.keys()
470 fl.sort()
470 fl.sort()
471 for f in fl:
471 for f in fl:
472 full, fnode = files[f]
472 full, fnode = files[f]
473 if not fnode:
473 if not fnode:
474 continue
474 continue
475
475
476 yield {"file": full,
476 yield {"file": full,
477 "parity": parity.next(),
477 "parity": parity.next(),
478 "basename": f,
478 "basename": f,
479 "size": ctx.filectx(full).size(),
479 "size": ctx.filectx(full).size(),
480 "permissions": mf.flags(full)}
480 "permissions": mf.flags(full)}
481
481
482 def dirlist(**map):
482 def dirlist(**map):
483 fl = files.keys()
483 fl = files.keys()
484 fl.sort()
484 fl.sort()
485 for f in fl:
485 for f in fl:
486 full, fnode = files[f]
486 full, fnode = files[f]
487 if fnode:
487 if fnode:
488 continue
488 continue
489
489
490 yield {"parity": parity.next(),
490 yield {"parity": parity.next(),
491 "path": os.path.join(abspath, f),
491 "path": "%s%s" % (abspath, f),
492 "basename": f[:-1]}
492 "basename": f[:-1]}
493
493
494 yield self.t("manifest",
494 yield self.t("manifest",
495 rev=ctx.rev(),
495 rev=ctx.rev(),
496 node=hex(node),
496 node=hex(node),
497 path=abspath,
497 path=abspath,
498 up=_up(abspath),
498 up=_up(abspath),
499 upparity=parity.next(),
499 upparity=parity.next(),
500 fentries=filelist,
500 fentries=filelist,
501 dentries=dirlist,
501 dentries=dirlist,
502 archives=self.archivelist(hex(node)),
502 archives=self.archivelist(hex(node)),
503 tags=self.nodetagsdict(node),
503 tags=self.nodetagsdict(node),
504 branches=self.nodebranchdict(ctx))
504 branches=self.nodebranchdict(ctx))
505
505
506 def tags(self):
506 def tags(self):
507 i = self.repo.tagslist()
507 i = self.repo.tagslist()
508 i.reverse()
508 i.reverse()
509 parity = paritygen(self.stripecount)
509 parity = paritygen(self.stripecount)
510
510
511 def entries(notip=False, **map):
511 def entries(notip=False, **map):
512 for k, n in i:
512 for k, n in i:
513 if notip and k == "tip":
513 if notip and k == "tip":
514 continue
514 continue
515 yield {"parity": parity.next(),
515 yield {"parity": parity.next(),
516 "tag": k,
516 "tag": k,
517 "date": self.repo.changectx(n).date(),
517 "date": self.repo.changectx(n).date(),
518 "node": hex(n)}
518 "node": hex(n)}
519
519
520 yield self.t("tags",
520 yield self.t("tags",
521 node=hex(self.repo.changelog.tip()),
521 node=hex(self.repo.changelog.tip()),
522 entries=lambda **x: entries(False, **x),
522 entries=lambda **x: entries(False, **x),
523 entriesnotip=lambda **x: entries(True, **x))
523 entriesnotip=lambda **x: entries(True, **x))
524
524
525 def summary(self):
525 def summary(self):
526 i = self.repo.tagslist()
526 i = self.repo.tagslist()
527 i.reverse()
527 i.reverse()
528
528
529 def tagentries(**map):
529 def tagentries(**map):
530 parity = paritygen(self.stripecount)
530 parity = paritygen(self.stripecount)
531 count = 0
531 count = 0
532 for k, n in i:
532 for k, n in i:
533 if k == "tip": # skip tip
533 if k == "tip": # skip tip
534 continue;
534 continue;
535
535
536 count += 1
536 count += 1
537 if count > 10: # limit to 10 tags
537 if count > 10: # limit to 10 tags
538 break;
538 break;
539
539
540 yield self.t("tagentry",
540 yield self.t("tagentry",
541 parity=parity.next(),
541 parity=parity.next(),
542 tag=k,
542 tag=k,
543 node=hex(n),
543 node=hex(n),
544 date=self.repo.changectx(n).date())
544 date=self.repo.changectx(n).date())
545
545
546
546
547 def branches(**map):
547 def branches(**map):
548 parity = paritygen(self.stripecount)
548 parity = paritygen(self.stripecount)
549
549
550 b = self.repo.branchtags()
550 b = self.repo.branchtags()
551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
552 l.sort()
552 l.sort()
553
553
554 for r,n,t in l:
554 for r,n,t in l:
555 ctx = self.repo.changectx(n)
555 ctx = self.repo.changectx(n)
556
556
557 yield {'parity': parity.next(),
557 yield {'parity': parity.next(),
558 'branch': t,
558 'branch': t,
559 'node': hex(n),
559 'node': hex(n),
560 'date': ctx.date()}
560 'date': ctx.date()}
561
561
562 def changelist(**map):
562 def changelist(**map):
563 parity = paritygen(self.stripecount, offset=start-end)
563 parity = paritygen(self.stripecount, offset=start-end)
564 l = [] # build a list in forward order for efficiency
564 l = [] # build a list in forward order for efficiency
565 for i in xrange(start, end):
565 for i in xrange(start, end):
566 ctx = self.repo.changectx(i)
566 ctx = self.repo.changectx(i)
567 n = ctx.node()
567 n = ctx.node()
568 hn = hex(n)
568 hn = hex(n)
569
569
570 l.insert(0, self.t(
570 l.insert(0, self.t(
571 'shortlogentry',
571 'shortlogentry',
572 parity=parity.next(),
572 parity=parity.next(),
573 author=ctx.user(),
573 author=ctx.user(),
574 desc=ctx.description(),
574 desc=ctx.description(),
575 date=ctx.date(),
575 date=ctx.date(),
576 rev=i,
576 rev=i,
577 node=hn,
577 node=hn,
578 tags=self.nodetagsdict(n),
578 tags=self.nodetagsdict(n),
579 branches=self.nodebranchdict(ctx)))
579 branches=self.nodebranchdict(ctx)))
580
580
581 yield l
581 yield l
582
582
583 cl = self.repo.changelog
583 cl = self.repo.changelog
584 count = cl.count()
584 count = cl.count()
585 start = max(0, count - self.maxchanges)
585 start = max(0, count - self.maxchanges)
586 end = min(count, start + self.maxchanges)
586 end = min(count, start + self.maxchanges)
587
587
588 yield self.t("summary",
588 yield self.t("summary",
589 desc=self.config("web", "description", "unknown"),
589 desc=self.config("web", "description", "unknown"),
590 owner=(self.config("ui", "username") or # preferred
590 owner=(self.config("ui", "username") or # preferred
591 self.config("web", "contact") or # deprecated
591 self.config("web", "contact") or # deprecated
592 self.config("web", "author", "unknown")), # also
592 self.config("web", "author", "unknown")), # also
593 lastchange=cl.read(cl.tip())[2],
593 lastchange=cl.read(cl.tip())[2],
594 tags=tagentries,
594 tags=tagentries,
595 branches=branches,
595 branches=branches,
596 shortlog=changelist,
596 shortlog=changelist,
597 node=hex(cl.tip()),
597 node=hex(cl.tip()),
598 archives=self.archivelist("tip"))
598 archives=self.archivelist("tip"))
599
599
600 def filediff(self, fctx):
600 def filediff(self, fctx):
601 n = fctx.node()
601 n = fctx.node()
602 path = fctx.path()
602 path = fctx.path()
603 parents = fctx.parents()
603 parents = fctx.parents()
604 p1 = parents and parents[0].node() or nullid
604 p1 = parents and parents[0].node() or nullid
605
605
606 def diff(**map):
606 def diff(**map):
607 yield self.diff(p1, n, [path])
607 yield self.diff(p1, n, [path])
608
608
609 yield self.t("filediff",
609 yield self.t("filediff",
610 file=path,
610 file=path,
611 node=hex(n),
611 node=hex(n),
612 rev=fctx.rev(),
612 rev=fctx.rev(),
613 parent=self.siblings(parents),
613 parent=self.siblings(parents),
614 child=self.siblings(fctx.children()),
614 child=self.siblings(fctx.children()),
615 diff=diff)
615 diff=diff)
616
616
617 archive_specs = {
617 archive_specs = {
618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
620 'zip': ('application/zip', 'zip', '.zip', None),
620 'zip': ('application/zip', 'zip', '.zip', None),
621 }
621 }
622
622
623 def archive(self, req, key, type_):
623 def archive(self, req, key, type_):
624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
625 cnode = self.repo.lookup(key)
625 cnode = self.repo.lookup(key)
626 arch_version = key
626 arch_version = key
627 if cnode == key or key == 'tip':
627 if cnode == key or key == 'tip':
628 arch_version = short(cnode)
628 arch_version = short(cnode)
629 name = "%s-%s" % (reponame, arch_version)
629 name = "%s-%s" % (reponame, arch_version)
630 mimetype, artype, extension, encoding = self.archive_specs[type_]
630 mimetype, artype, extension, encoding = self.archive_specs[type_]
631 headers = [('Content-type', mimetype),
631 headers = [('Content-type', mimetype),
632 ('Content-disposition', 'attachment; filename=%s%s' %
632 ('Content-disposition', 'attachment; filename=%s%s' %
633 (name, extension))]
633 (name, extension))]
634 if encoding:
634 if encoding:
635 headers.append(('Content-encoding', encoding))
635 headers.append(('Content-encoding', encoding))
636 req.header(headers)
636 req.header(headers)
637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
638
638
639 # add tags to things
639 # add tags to things
640 # tags -> list of changesets corresponding to tags
640 # tags -> list of changesets corresponding to tags
641 # find tag, changeset, file
641 # find tag, changeset, file
642
642
643 def cleanpath(self, path):
643 def cleanpath(self, path):
644 path = path.lstrip('/')
644 path = path.lstrip('/')
645 return util.canonpath(self.repo.root, '', path)
645 return util.canonpath(self.repo.root, '', path)
646
646
647 def run(self):
647 def run(self):
648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
650 import mercurial.hgweb.wsgicgi as wsgicgi
650 import mercurial.hgweb.wsgicgi as wsgicgi
651 from request import wsgiapplication
651 from request import wsgiapplication
652 def make_web_app():
652 def make_web_app():
653 return self
653 return self
654 wsgicgi.launch(wsgiapplication(make_web_app))
654 wsgicgi.launch(wsgiapplication(make_web_app))
655
655
656 def run_wsgi(self, req):
656 def run_wsgi(self, req):
657 def header(**map):
657 def header(**map):
658 header_file = cStringIO.StringIO(
658 header_file = cStringIO.StringIO(
659 ''.join(self.t("header", encoding=self.encoding, **map)))
659 ''.join(self.t("header", encoding=self.encoding, **map)))
660 msg = mimetools.Message(header_file, 0)
660 msg = mimetools.Message(header_file, 0)
661 req.header(msg.items())
661 req.header(msg.items())
662 yield header_file.read()
662 yield header_file.read()
663
663
664 def rawfileheader(**map):
664 def rawfileheader(**map):
665 req.header([('Content-type', map['mimetype']),
665 req.header([('Content-type', map['mimetype']),
666 ('Content-disposition', 'filename=%s' % map['file']),
666 ('Content-disposition', 'filename=%s' % map['file']),
667 ('Content-length', str(len(map['raw'])))])
667 ('Content-length', str(len(map['raw'])))])
668 yield ''
668 yield ''
669
669
670 def footer(**map):
670 def footer(**map):
671 yield self.t("footer", **map)
671 yield self.t("footer", **map)
672
672
673 def motd(**map):
673 def motd(**map):
674 yield self.config("web", "motd", "")
674 yield self.config("web", "motd", "")
675
675
676 def expand_form(form):
676 def expand_form(form):
677 shortcuts = {
677 shortcuts = {
678 'cl': [('cmd', ['changelog']), ('rev', None)],
678 'cl': [('cmd', ['changelog']), ('rev', None)],
679 'sl': [('cmd', ['shortlog']), ('rev', None)],
679 'sl': [('cmd', ['shortlog']), ('rev', None)],
680 'cs': [('cmd', ['changeset']), ('node', None)],
680 'cs': [('cmd', ['changeset']), ('node', None)],
681 'f': [('cmd', ['file']), ('filenode', None)],
681 'f': [('cmd', ['file']), ('filenode', None)],
682 'fl': [('cmd', ['filelog']), ('filenode', None)],
682 'fl': [('cmd', ['filelog']), ('filenode', None)],
683 'fd': [('cmd', ['filediff']), ('node', None)],
683 'fd': [('cmd', ['filediff']), ('node', None)],
684 'fa': [('cmd', ['annotate']), ('filenode', None)],
684 'fa': [('cmd', ['annotate']), ('filenode', None)],
685 'mf': [('cmd', ['manifest']), ('manifest', None)],
685 'mf': [('cmd', ['manifest']), ('manifest', None)],
686 'ca': [('cmd', ['archive']), ('node', None)],
686 'ca': [('cmd', ['archive']), ('node', None)],
687 'tags': [('cmd', ['tags'])],
687 'tags': [('cmd', ['tags'])],
688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
689 'static': [('cmd', ['static']), ('file', None)]
689 'static': [('cmd', ['static']), ('file', None)]
690 }
690 }
691
691
692 for k in shortcuts.iterkeys():
692 for k in shortcuts.iterkeys():
693 if form.has_key(k):
693 if form.has_key(k):
694 for name, value in shortcuts[k]:
694 for name, value in shortcuts[k]:
695 if value is None:
695 if value is None:
696 value = form[k]
696 value = form[k]
697 form[name] = value
697 form[name] = value
698 del form[k]
698 del form[k]
699
699
700 def rewrite_request(req):
700 def rewrite_request(req):
701 '''translate new web interface to traditional format'''
701 '''translate new web interface to traditional format'''
702
702
703 def spliturl(req):
703 def spliturl(req):
704 def firstitem(query):
704 def firstitem(query):
705 return query.split('&', 1)[0].split(';', 1)[0]
705 return query.split('&', 1)[0].split(';', 1)[0]
706
706
707 def normurl(url):
707 def normurl(url):
708 inner = '/'.join([x for x in url.split('/') if x])
708 inner = '/'.join([x for x in url.split('/') if x])
709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
710
710
711 return '%s%s%s' % (url.startswith('/') and '/' or '',
711 return '%s%s%s' % (url.startswith('/') and '/' or '',
712 inner, tl)
712 inner, tl)
713
713
714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
715 pi = normurl(req.env.get('PATH_INFO', ''))
715 pi = normurl(req.env.get('PATH_INFO', ''))
716 if pi:
716 if pi:
717 # strip leading /
717 # strip leading /
718 pi = pi[1:]
718 pi = pi[1:]
719 if pi:
719 if pi:
720 root = root[:root.rfind(pi)]
720 root = root[:root.rfind(pi)]
721 if req.env.has_key('REPO_NAME'):
721 if req.env.has_key('REPO_NAME'):
722 rn = req.env['REPO_NAME'] + '/'
722 rn = req.env['REPO_NAME'] + '/'
723 root += rn
723 root += rn
724 query = pi[len(rn):]
724 query = pi[len(rn):]
725 else:
725 else:
726 query = pi
726 query = pi
727 else:
727 else:
728 root += '?'
728 root += '?'
729 query = firstitem(req.env['QUERY_STRING'])
729 query = firstitem(req.env['QUERY_STRING'])
730
730
731 return (root, query)
731 return (root, query)
732
732
733 req.url, query = spliturl(req)
733 req.url, query = spliturl(req)
734
734
735 if req.form.has_key('cmd'):
735 if req.form.has_key('cmd'):
736 # old style
736 # old style
737 return
737 return
738
738
739 args = query.split('/', 2)
739 args = query.split('/', 2)
740 if not args or not args[0]:
740 if not args or not args[0]:
741 return
741 return
742
742
743 cmd = args.pop(0)
743 cmd = args.pop(0)
744 style = cmd.rfind('-')
744 style = cmd.rfind('-')
745 if style != -1:
745 if style != -1:
746 req.form['style'] = [cmd[:style]]
746 req.form['style'] = [cmd[:style]]
747 cmd = cmd[style+1:]
747 cmd = cmd[style+1:]
748 # avoid accepting e.g. style parameter as command
748 # avoid accepting e.g. style parameter as command
749 if hasattr(self, 'do_' + cmd):
749 if hasattr(self, 'do_' + cmd):
750 req.form['cmd'] = [cmd]
750 req.form['cmd'] = [cmd]
751
751
752 if args and args[0]:
752 if args and args[0]:
753 node = args.pop(0)
753 node = args.pop(0)
754 req.form['node'] = [node]
754 req.form['node'] = [node]
755 if args:
755 if args:
756 req.form['file'] = args
756 req.form['file'] = args
757
757
758 if cmd == 'static':
758 if cmd == 'static':
759 req.form['file'] = req.form['node']
759 req.form['file'] = req.form['node']
760 elif cmd == 'archive':
760 elif cmd == 'archive':
761 fn = req.form['node'][0]
761 fn = req.form['node'][0]
762 for type_, spec in self.archive_specs.iteritems():
762 for type_, spec in self.archive_specs.iteritems():
763 ext = spec[2]
763 ext = spec[2]
764 if fn.endswith(ext):
764 if fn.endswith(ext):
765 req.form['node'] = [fn[:-len(ext)]]
765 req.form['node'] = [fn[:-len(ext)]]
766 req.form['type'] = [type_]
766 req.form['type'] = [type_]
767
767
768 def sessionvars(**map):
768 def sessionvars(**map):
769 fields = []
769 fields = []
770 if req.form.has_key('style'):
770 if req.form.has_key('style'):
771 style = req.form['style'][0]
771 style = req.form['style'][0]
772 if style != self.config('web', 'style', ''):
772 if style != self.config('web', 'style', ''):
773 fields.append(('style', style))
773 fields.append(('style', style))
774
774
775 separator = req.url[-1] == '?' and ';' or '?'
775 separator = req.url[-1] == '?' and ';' or '?'
776 for name, value in fields:
776 for name, value in fields:
777 yield dict(name=name, value=value, separator=separator)
777 yield dict(name=name, value=value, separator=separator)
778 separator = ';'
778 separator = ';'
779
779
780 self.refresh()
780 self.refresh()
781
781
782 expand_form(req.form)
782 expand_form(req.form)
783 rewrite_request(req)
783 rewrite_request(req)
784
784
785 style = self.config("web", "style", "")
785 style = self.config("web", "style", "")
786 if req.form.has_key('style'):
786 if req.form.has_key('style'):
787 style = req.form['style'][0]
787 style = req.form['style'][0]
788 mapfile = style_map(self.templatepath, style)
788 mapfile = style_map(self.templatepath, style)
789
789
790 proto = req.env.get('wsgi.url_scheme')
790 proto = req.env.get('wsgi.url_scheme')
791 if proto == 'https':
791 if proto == 'https':
792 proto = 'https'
792 proto = 'https'
793 default_port = "443"
793 default_port = "443"
794 else:
794 else:
795 proto = 'http'
795 proto = 'http'
796 default_port = "80"
796 default_port = "80"
797
797
798 port = req.env["SERVER_PORT"]
798 port = req.env["SERVER_PORT"]
799 port = port != default_port and (":" + port) or ""
799 port = port != default_port and (":" + port) or ""
800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
802 if not staticurl.endswith('/'):
802 if not staticurl.endswith('/'):
803 staticurl += '/'
803 staticurl += '/'
804
804
805 if not self.reponame:
805 if not self.reponame:
806 self.reponame = (self.config("web", "name")
806 self.reponame = (self.config("web", "name")
807 or req.env.get('REPO_NAME')
807 or req.env.get('REPO_NAME')
808 or req.url.strip('/') or self.repo.root)
808 or req.url.strip('/') or self.repo.root)
809
809
810 self.t = templater.templater(mapfile, templater.common_filters,
810 self.t = templater.templater(mapfile, templater.common_filters,
811 defaults={"url": req.url,
811 defaults={"url": req.url,
812 "staticurl": staticurl,
812 "staticurl": staticurl,
813 "urlbase": urlbase,
813 "urlbase": urlbase,
814 "repo": self.reponame,
814 "repo": self.reponame,
815 "header": header,
815 "header": header,
816 "footer": footer,
816 "footer": footer,
817 "motd": motd,
817 "motd": motd,
818 "rawfileheader": rawfileheader,
818 "rawfileheader": rawfileheader,
819 "sessionvars": sessionvars
819 "sessionvars": sessionvars
820 })
820 })
821
821
822 try:
822 try:
823 if not req.form.has_key('cmd'):
823 if not req.form.has_key('cmd'):
824 req.form['cmd'] = [self.t.cache['default']]
824 req.form['cmd'] = [self.t.cache['default']]
825
825
826 cmd = req.form['cmd'][0]
826 cmd = req.form['cmd'][0]
827
827
828 method = getattr(self, 'do_' + cmd, None)
828 method = getattr(self, 'do_' + cmd, None)
829 if method:
829 if method:
830 try:
830 try:
831 method(req)
831 method(req)
832 except (hg.RepoError, revlog.RevlogError), inst:
832 except (hg.RepoError, revlog.RevlogError), inst:
833 req.write(self.t("error", error=str(inst)))
833 req.write(self.t("error", error=str(inst)))
834 else:
834 else:
835 req.write(self.t("error", error='No such method: ' + cmd))
835 req.write(self.t("error", error='No such method: ' + cmd))
836 finally:
836 finally:
837 self.t = None
837 self.t = None
838
838
839 def changectx(self, req):
839 def changectx(self, req):
840 if req.form.has_key('node'):
840 if req.form.has_key('node'):
841 changeid = req.form['node'][0]
841 changeid = req.form['node'][0]
842 elif req.form.has_key('manifest'):
842 elif req.form.has_key('manifest'):
843 changeid = req.form['manifest'][0]
843 changeid = req.form['manifest'][0]
844 else:
844 else:
845 changeid = self.repo.changelog.count() - 1
845 changeid = self.repo.changelog.count() - 1
846
846
847 try:
847 try:
848 ctx = self.repo.changectx(changeid)
848 ctx = self.repo.changectx(changeid)
849 except hg.RepoError:
849 except hg.RepoError:
850 man = self.repo.manifest
850 man = self.repo.manifest
851 mn = man.lookup(changeid)
851 mn = man.lookup(changeid)
852 ctx = self.repo.changectx(man.linkrev(mn))
852 ctx = self.repo.changectx(man.linkrev(mn))
853
853
854 return ctx
854 return ctx
855
855
856 def filectx(self, req):
856 def filectx(self, req):
857 path = self.cleanpath(req.form['file'][0])
857 path = self.cleanpath(req.form['file'][0])
858 if req.form.has_key('node'):
858 if req.form.has_key('node'):
859 changeid = req.form['node'][0]
859 changeid = req.form['node'][0]
860 else:
860 else:
861 changeid = req.form['filenode'][0]
861 changeid = req.form['filenode'][0]
862 try:
862 try:
863 ctx = self.repo.changectx(changeid)
863 ctx = self.repo.changectx(changeid)
864 fctx = ctx.filectx(path)
864 fctx = ctx.filectx(path)
865 except hg.RepoError:
865 except hg.RepoError:
866 fctx = self.repo.filectx(path, fileid=changeid)
866 fctx = self.repo.filectx(path, fileid=changeid)
867
867
868 return fctx
868 return fctx
869
869
870 def do_log(self, req):
870 def do_log(self, req):
871 if req.form.has_key('file') and req.form['file'][0]:
871 if req.form.has_key('file') and req.form['file'][0]:
872 self.do_filelog(req)
872 self.do_filelog(req)
873 else:
873 else:
874 self.do_changelog(req)
874 self.do_changelog(req)
875
875
876 def do_rev(self, req):
876 def do_rev(self, req):
877 self.do_changeset(req)
877 self.do_changeset(req)
878
878
879 def do_file(self, req):
879 def do_file(self, req):
880 path = self.cleanpath(req.form.get('file', [''])[0])
880 path = self.cleanpath(req.form.get('file', [''])[0])
881 if path:
881 if path:
882 try:
882 try:
883 req.write(self.filerevision(self.filectx(req)))
883 req.write(self.filerevision(self.filectx(req)))
884 return
884 return
885 except revlog.LookupError:
885 except revlog.LookupError:
886 pass
886 pass
887
887
888 req.write(self.manifest(self.changectx(req), path))
888 req.write(self.manifest(self.changectx(req), path))
889
889
890 def do_diff(self, req):
890 def do_diff(self, req):
891 self.do_filediff(req)
891 self.do_filediff(req)
892
892
893 def do_changelog(self, req, shortlog = False):
893 def do_changelog(self, req, shortlog = False):
894 if req.form.has_key('node'):
894 if req.form.has_key('node'):
895 ctx = self.changectx(req)
895 ctx = self.changectx(req)
896 else:
896 else:
897 if req.form.has_key('rev'):
897 if req.form.has_key('rev'):
898 hi = req.form['rev'][0]
898 hi = req.form['rev'][0]
899 else:
899 else:
900 hi = self.repo.changelog.count() - 1
900 hi = self.repo.changelog.count() - 1
901 try:
901 try:
902 ctx = self.repo.changectx(hi)
902 ctx = self.repo.changectx(hi)
903 except hg.RepoError:
903 except hg.RepoError:
904 req.write(self.search(hi)) # XXX redirect to 404 page?
904 req.write(self.search(hi)) # XXX redirect to 404 page?
905 return
905 return
906
906
907 req.write(self.changelog(ctx, shortlog = shortlog))
907 req.write(self.changelog(ctx, shortlog = shortlog))
908
908
909 def do_shortlog(self, req):
909 def do_shortlog(self, req):
910 self.do_changelog(req, shortlog = True)
910 self.do_changelog(req, shortlog = True)
911
911
912 def do_changeset(self, req):
912 def do_changeset(self, req):
913 req.write(self.changeset(self.changectx(req)))
913 req.write(self.changeset(self.changectx(req)))
914
914
915 def do_manifest(self, req):
915 def do_manifest(self, req):
916 req.write(self.manifest(self.changectx(req),
916 req.write(self.manifest(self.changectx(req),
917 self.cleanpath(req.form['path'][0])))
917 self.cleanpath(req.form['path'][0])))
918
918
919 def do_tags(self, req):
919 def do_tags(self, req):
920 req.write(self.tags())
920 req.write(self.tags())
921
921
922 def do_summary(self, req):
922 def do_summary(self, req):
923 req.write(self.summary())
923 req.write(self.summary())
924
924
925 def do_filediff(self, req):
925 def do_filediff(self, req):
926 req.write(self.filediff(self.filectx(req)))
926 req.write(self.filediff(self.filectx(req)))
927
927
928 def do_annotate(self, req):
928 def do_annotate(self, req):
929 req.write(self.fileannotate(self.filectx(req)))
929 req.write(self.fileannotate(self.filectx(req)))
930
930
931 def do_filelog(self, req):
931 def do_filelog(self, req):
932 req.write(self.filelog(self.filectx(req)))
932 req.write(self.filelog(self.filectx(req)))
933
933
934 def do_lookup(self, req):
934 def do_lookup(self, req):
935 try:
935 try:
936 r = hex(self.repo.lookup(req.form['key'][0]))
936 r = hex(self.repo.lookup(req.form['key'][0]))
937 success = 1
937 success = 1
938 except Exception,inst:
938 except Exception,inst:
939 r = str(inst)
939 r = str(inst)
940 success = 0
940 success = 0
941 resp = "%s %s\n" % (success, r)
941 resp = "%s %s\n" % (success, r)
942 req.httphdr("application/mercurial-0.1", length=len(resp))
942 req.httphdr("application/mercurial-0.1", length=len(resp))
943 req.write(resp)
943 req.write(resp)
944
944
945 def do_heads(self, req):
945 def do_heads(self, req):
946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
947 req.httphdr("application/mercurial-0.1", length=len(resp))
947 req.httphdr("application/mercurial-0.1", length=len(resp))
948 req.write(resp)
948 req.write(resp)
949
949
950 def do_branches(self, req):
950 def do_branches(self, req):
951 nodes = []
951 nodes = []
952 if req.form.has_key('nodes'):
952 if req.form.has_key('nodes'):
953 nodes = map(bin, req.form['nodes'][0].split(" "))
953 nodes = map(bin, req.form['nodes'][0].split(" "))
954 resp = cStringIO.StringIO()
954 resp = cStringIO.StringIO()
955 for b in self.repo.branches(nodes):
955 for b in self.repo.branches(nodes):
956 resp.write(" ".join(map(hex, b)) + "\n")
956 resp.write(" ".join(map(hex, b)) + "\n")
957 resp = resp.getvalue()
957 resp = resp.getvalue()
958 req.httphdr("application/mercurial-0.1", length=len(resp))
958 req.httphdr("application/mercurial-0.1", length=len(resp))
959 req.write(resp)
959 req.write(resp)
960
960
961 def do_between(self, req):
961 def do_between(self, req):
962 if req.form.has_key('pairs'):
962 if req.form.has_key('pairs'):
963 pairs = [map(bin, p.split("-"))
963 pairs = [map(bin, p.split("-"))
964 for p in req.form['pairs'][0].split(" ")]
964 for p in req.form['pairs'][0].split(" ")]
965 resp = cStringIO.StringIO()
965 resp = cStringIO.StringIO()
966 for b in self.repo.between(pairs):
966 for b in self.repo.between(pairs):
967 resp.write(" ".join(map(hex, b)) + "\n")
967 resp.write(" ".join(map(hex, b)) + "\n")
968 resp = resp.getvalue()
968 resp = resp.getvalue()
969 req.httphdr("application/mercurial-0.1", length=len(resp))
969 req.httphdr("application/mercurial-0.1", length=len(resp))
970 req.write(resp)
970 req.write(resp)
971
971
972 def do_changegroup(self, req):
972 def do_changegroup(self, req):
973 req.httphdr("application/mercurial-0.1")
973 req.httphdr("application/mercurial-0.1")
974 nodes = []
974 nodes = []
975 if not self.allowpull:
975 if not self.allowpull:
976 return
976 return
977
977
978 if req.form.has_key('roots'):
978 if req.form.has_key('roots'):
979 nodes = map(bin, req.form['roots'][0].split(" "))
979 nodes = map(bin, req.form['roots'][0].split(" "))
980
980
981 z = zlib.compressobj()
981 z = zlib.compressobj()
982 f = self.repo.changegroup(nodes, 'serve')
982 f = self.repo.changegroup(nodes, 'serve')
983 while 1:
983 while 1:
984 chunk = f.read(4096)
984 chunk = f.read(4096)
985 if not chunk:
985 if not chunk:
986 break
986 break
987 req.write(z.compress(chunk))
987 req.write(z.compress(chunk))
988
988
989 req.write(z.flush())
989 req.write(z.flush())
990
990
991 def do_changegroupsubset(self, req):
991 def do_changegroupsubset(self, req):
992 req.httphdr("application/mercurial-0.1")
992 req.httphdr("application/mercurial-0.1")
993 bases = []
993 bases = []
994 heads = []
994 heads = []
995 if not self.allowpull:
995 if not self.allowpull:
996 return
996 return
997
997
998 if req.form.has_key('bases'):
998 if req.form.has_key('bases'):
999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
1000 if req.form.has_key('heads'):
1000 if req.form.has_key('heads'):
1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1002
1002
1003 z = zlib.compressobj()
1003 z = zlib.compressobj()
1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1005 while 1:
1005 while 1:
1006 chunk = f.read(4096)
1006 chunk = f.read(4096)
1007 if not chunk:
1007 if not chunk:
1008 break
1008 break
1009 req.write(z.compress(chunk))
1009 req.write(z.compress(chunk))
1010
1010
1011 req.write(z.flush())
1011 req.write(z.flush())
1012
1012
1013 def do_archive(self, req):
1013 def do_archive(self, req):
1014 type_ = req.form['type'][0]
1014 type_ = req.form['type'][0]
1015 allowed = self.configlist("web", "allow_archive")
1015 allowed = self.configlist("web", "allow_archive")
1016 if (type_ in self.archives and (type_ in allowed or
1016 if (type_ in self.archives and (type_ in allowed or
1017 self.configbool("web", "allow" + type_, False))):
1017 self.configbool("web", "allow" + type_, False))):
1018 self.archive(req, req.form['node'][0], type_)
1018 self.archive(req, req.form['node'][0], type_)
1019 return
1019 return
1020
1020
1021 req.write(self.t("error"))
1021 req.write(self.t("error"))
1022
1022
1023 def do_static(self, req):
1023 def do_static(self, req):
1024 fname = req.form['file'][0]
1024 fname = req.form['file'][0]
1025 # a repo owner may set web.static in .hg/hgrc to get any file
1025 # a repo owner may set web.static in .hg/hgrc to get any file
1026 # readable by the user running the CGI script
1026 # readable by the user running the CGI script
1027 static = self.config("web", "static",
1027 static = self.config("web", "static",
1028 os.path.join(self.templatepath, "static"),
1028 os.path.join(self.templatepath, "static"),
1029 untrusted=False)
1029 untrusted=False)
1030 req.write(staticfile(static, fname, req)
1030 req.write(staticfile(static, fname, req)
1031 or self.t("error", error="%r not found" % fname))
1031 or self.t("error", error="%r not found" % fname))
1032
1032
1033 def do_capabilities(self, req):
1033 def do_capabilities(self, req):
1034 caps = ['lookup', 'changegroupsubset']
1034 caps = ['lookup', 'changegroupsubset']
1035 if self.configbool('server', 'uncompressed'):
1035 if self.configbool('server', 'uncompressed'):
1036 caps.append('stream=%d' % self.repo.changelog.version)
1036 caps.append('stream=%d' % self.repo.changelog.version)
1037 # XXX: make configurable and/or share code with do_unbundle:
1037 # XXX: make configurable and/or share code with do_unbundle:
1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1039 if unbundleversions:
1039 if unbundleversions:
1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1041 resp = ' '.join(caps)
1041 resp = ' '.join(caps)
1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1043 req.write(resp)
1043 req.write(resp)
1044
1044
1045 def check_perm(self, req, op, default):
1045 def check_perm(self, req, op, default):
1046 '''check permission for operation based on user auth.
1046 '''check permission for operation based on user auth.
1047 return true if op allowed, else false.
1047 return true if op allowed, else false.
1048 default is policy to use if no config given.'''
1048 default is policy to use if no config given.'''
1049
1049
1050 user = req.env.get('REMOTE_USER')
1050 user = req.env.get('REMOTE_USER')
1051
1051
1052 deny = self.configlist('web', 'deny_' + op)
1052 deny = self.configlist('web', 'deny_' + op)
1053 if deny and (not user or deny == ['*'] or user in deny):
1053 if deny and (not user or deny == ['*'] or user in deny):
1054 return False
1054 return False
1055
1055
1056 allow = self.configlist('web', 'allow_' + op)
1056 allow = self.configlist('web', 'allow_' + op)
1057 return (allow and (allow == ['*'] or user in allow)) or default
1057 return (allow and (allow == ['*'] or user in allow)) or default
1058
1058
1059 def do_unbundle(self, req):
1059 def do_unbundle(self, req):
1060 def bail(response, headers={}):
1060 def bail(response, headers={}):
1061 length = int(req.env['CONTENT_LENGTH'])
1061 length = int(req.env['CONTENT_LENGTH'])
1062 for s in util.filechunkiter(req, limit=length):
1062 for s in util.filechunkiter(req, limit=length):
1063 # drain incoming bundle, else client will not see
1063 # drain incoming bundle, else client will not see
1064 # response when run outside cgi script
1064 # response when run outside cgi script
1065 pass
1065 pass
1066 req.httphdr("application/mercurial-0.1", headers=headers)
1066 req.httphdr("application/mercurial-0.1", headers=headers)
1067 req.write('0\n')
1067 req.write('0\n')
1068 req.write(response)
1068 req.write(response)
1069
1069
1070 # require ssl by default, auth info cannot be sniffed and
1070 # require ssl by default, auth info cannot be sniffed and
1071 # replayed
1071 # replayed
1072 ssl_req = self.configbool('web', 'push_ssl', True)
1072 ssl_req = self.configbool('web', 'push_ssl', True)
1073 if ssl_req:
1073 if ssl_req:
1074 if req.env.get('wsgi.url_scheme') != 'https':
1074 if req.env.get('wsgi.url_scheme') != 'https':
1075 bail(_('ssl required\n'))
1075 bail(_('ssl required\n'))
1076 return
1076 return
1077 proto = 'https'
1077 proto = 'https'
1078 else:
1078 else:
1079 proto = 'http'
1079 proto = 'http'
1080
1080
1081 # do not allow push unless explicitly allowed
1081 # do not allow push unless explicitly allowed
1082 if not self.check_perm(req, 'push', False):
1082 if not self.check_perm(req, 'push', False):
1083 bail(_('push not authorized\n'),
1083 bail(_('push not authorized\n'),
1084 headers={'status': '401 Unauthorized'})
1084 headers={'status': '401 Unauthorized'})
1085 return
1085 return
1086
1086
1087 their_heads = req.form['heads'][0].split(' ')
1087 their_heads = req.form['heads'][0].split(' ')
1088
1088
1089 def check_heads():
1089 def check_heads():
1090 heads = map(hex, self.repo.heads())
1090 heads = map(hex, self.repo.heads())
1091 return their_heads == [hex('force')] or their_heads == heads
1091 return their_heads == [hex('force')] or their_heads == heads
1092
1092
1093 # fail early if possible
1093 # fail early if possible
1094 if not check_heads():
1094 if not check_heads():
1095 bail(_('unsynced changes\n'))
1095 bail(_('unsynced changes\n'))
1096 return
1096 return
1097
1097
1098 req.httphdr("application/mercurial-0.1")
1098 req.httphdr("application/mercurial-0.1")
1099
1099
1100 # do not lock repo until all changegroup data is
1100 # do not lock repo until all changegroup data is
1101 # streamed. save to temporary file.
1101 # streamed. save to temporary file.
1102
1102
1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1104 fp = os.fdopen(fd, 'wb+')
1104 fp = os.fdopen(fd, 'wb+')
1105 try:
1105 try:
1106 length = int(req.env['CONTENT_LENGTH'])
1106 length = int(req.env['CONTENT_LENGTH'])
1107 for s in util.filechunkiter(req, limit=length):
1107 for s in util.filechunkiter(req, limit=length):
1108 fp.write(s)
1108 fp.write(s)
1109
1109
1110 try:
1110 try:
1111 lock = self.repo.lock()
1111 lock = self.repo.lock()
1112 try:
1112 try:
1113 if not check_heads():
1113 if not check_heads():
1114 req.write('0\n')
1114 req.write('0\n')
1115 req.write(_('unsynced changes\n'))
1115 req.write(_('unsynced changes\n'))
1116 return
1116 return
1117
1117
1118 fp.seek(0)
1118 fp.seek(0)
1119 header = fp.read(6)
1119 header = fp.read(6)
1120 if not header.startswith("HG"):
1120 if not header.startswith("HG"):
1121 # old client with uncompressed bundle
1121 # old client with uncompressed bundle
1122 def generator(f):
1122 def generator(f):
1123 yield header
1123 yield header
1124 for chunk in f:
1124 for chunk in f:
1125 yield chunk
1125 yield chunk
1126 elif not header.startswith("HG10"):
1126 elif not header.startswith("HG10"):
1127 req.write("0\n")
1127 req.write("0\n")
1128 req.write(_("unknown bundle version\n"))
1128 req.write(_("unknown bundle version\n"))
1129 return
1129 return
1130 elif header == "HG10GZ":
1130 elif header == "HG10GZ":
1131 def generator(f):
1131 def generator(f):
1132 zd = zlib.decompressobj()
1132 zd = zlib.decompressobj()
1133 for chunk in f:
1133 for chunk in f:
1134 yield zd.decompress(chunk)
1134 yield zd.decompress(chunk)
1135 elif header == "HG10BZ":
1135 elif header == "HG10BZ":
1136 def generator(f):
1136 def generator(f):
1137 zd = bz2.BZ2Decompressor()
1137 zd = bz2.BZ2Decompressor()
1138 zd.decompress("BZ")
1138 zd.decompress("BZ")
1139 for chunk in f:
1139 for chunk in f:
1140 yield zd.decompress(chunk)
1140 yield zd.decompress(chunk)
1141 elif header == "HG10UN":
1141 elif header == "HG10UN":
1142 def generator(f):
1142 def generator(f):
1143 for chunk in f:
1143 for chunk in f:
1144 yield chunk
1144 yield chunk
1145 else:
1145 else:
1146 req.write("0\n")
1146 req.write("0\n")
1147 req.write(_("unknown bundle compression type\n"))
1147 req.write(_("unknown bundle compression type\n"))
1148 return
1148 return
1149 gen = generator(util.filechunkiter(fp, 4096))
1149 gen = generator(util.filechunkiter(fp, 4096))
1150
1150
1151 # send addchangegroup output to client
1151 # send addchangegroup output to client
1152
1152
1153 old_stdout = sys.stdout
1153 old_stdout = sys.stdout
1154 sys.stdout = cStringIO.StringIO()
1154 sys.stdout = cStringIO.StringIO()
1155
1155
1156 try:
1156 try:
1157 url = 'remote:%s:%s' % (proto,
1157 url = 'remote:%s:%s' % (proto,
1158 req.env.get('REMOTE_HOST', ''))
1158 req.env.get('REMOTE_HOST', ''))
1159 try:
1159 try:
1160 ret = self.repo.addchangegroup(
1160 ret = self.repo.addchangegroup(
1161 util.chunkbuffer(gen), 'serve', url)
1161 util.chunkbuffer(gen), 'serve', url)
1162 except util.Abort, inst:
1162 except util.Abort, inst:
1163 sys.stdout.write("abort: %s\n" % inst)
1163 sys.stdout.write("abort: %s\n" % inst)
1164 ret = 0
1164 ret = 0
1165 finally:
1165 finally:
1166 val = sys.stdout.getvalue()
1166 val = sys.stdout.getvalue()
1167 sys.stdout = old_stdout
1167 sys.stdout = old_stdout
1168 req.write('%d\n' % ret)
1168 req.write('%d\n' % ret)
1169 req.write(val)
1169 req.write(val)
1170 finally:
1170 finally:
1171 del lock
1171 del lock
1172 except (OSError, IOError), inst:
1172 except (OSError, IOError), inst:
1173 req.write('0\n')
1173 req.write('0\n')
1174 filename = getattr(inst, 'filename', '')
1174 filename = getattr(inst, 'filename', '')
1175 # Don't send our filesystem layout to the client
1175 # Don't send our filesystem layout to the client
1176 if filename.startswith(self.repo.root):
1176 if filename.startswith(self.repo.root):
1177 filename = filename[len(self.repo.root)+1:]
1177 filename = filename[len(self.repo.root)+1:]
1178 else:
1178 else:
1179 filename = ''
1179 filename = ''
1180 error = getattr(inst, 'strerror', 'Unknown error')
1180 error = getattr(inst, 'strerror', 'Unknown error')
1181 req.write('%s: %s\n' % (error, filename))
1181 req.write('%s: %s\n' % (error, filename))
1182 finally:
1182 finally:
1183 fp.close()
1183 fp.close()
1184 os.unlink(tempname)
1184 os.unlink(tempname)
1185
1185
1186 def do_stream_out(self, req):
1186 def do_stream_out(self, req):
1187 req.httphdr("application/mercurial-0.1")
1187 req.httphdr("application/mercurial-0.1")
1188 streamclone.stream_out(self.repo, req, untrusted=True)
1188 streamclone.stream_out(self.repo, req, untrusted=True)
@@ -1,258 +1,259
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial import demandimport; demandimport.enable()
9 from mercurial import demandimport; demandimport.enable()
10 import os, mimetools, cStringIO
10 import os, mimetools, cStringIO
11 from mercurial.i18n import gettext as _
11 from mercurial.i18n import gettext as _
12 from mercurial import ui, hg, util, templater
12 from mercurial import ui, hg, util, templater
13 from common import get_mtime, staticfile, style_map, paritygen
13 from common import get_mtime, staticfile, style_map, paritygen
14 from hgweb_mod import hgweb
14 from hgweb_mod import hgweb
15
15
16 # This is a stopgap
16 # This is a stopgap
17 class hgwebdir(object):
17 class hgwebdir(object):
18 def __init__(self, config, parentui=None):
18 def __init__(self, config, parentui=None):
19 def cleannames(items):
19 def cleannames(items):
20 return [(name.strip(os.sep), path) for name, path in items]
20 return [(util.pconvert(name.strip(os.sep)), path)
21 for name, path in items]
21
22
22 self.parentui = parentui
23 self.parentui = parentui
23 self.motd = None
24 self.motd = None
24 self.style = None
25 self.style = None
25 self.stripecount = None
26 self.stripecount = None
26 self.repos_sorted = ('name', False)
27 self.repos_sorted = ('name', False)
27 if isinstance(config, (list, tuple)):
28 if isinstance(config, (list, tuple)):
28 self.repos = cleannames(config)
29 self.repos = cleannames(config)
29 self.repos_sorted = ('', False)
30 self.repos_sorted = ('', False)
30 elif isinstance(config, dict):
31 elif isinstance(config, dict):
31 self.repos = cleannames(config.items())
32 self.repos = cleannames(config.items())
32 self.repos.sort()
33 self.repos.sort()
33 else:
34 else:
34 if isinstance(config, util.configparser):
35 if isinstance(config, util.configparser):
35 cp = config
36 cp = config
36 else:
37 else:
37 cp = util.configparser()
38 cp = util.configparser()
38 cp.read(config)
39 cp.read(config)
39 self.repos = []
40 self.repos = []
40 if cp.has_section('web'):
41 if cp.has_section('web'):
41 if cp.has_option('web', 'motd'):
42 if cp.has_option('web', 'motd'):
42 self.motd = cp.get('web', 'motd')
43 self.motd = cp.get('web', 'motd')
43 if cp.has_option('web', 'style'):
44 if cp.has_option('web', 'style'):
44 self.style = cp.get('web', 'style')
45 self.style = cp.get('web', 'style')
45 if cp.has_option('web', 'stripes'):
46 if cp.has_option('web', 'stripes'):
46 self.stripecount = int(cp.get('web', 'stripes'))
47 self.stripecount = int(cp.get('web', 'stripes'))
47 if cp.has_section('paths'):
48 if cp.has_section('paths'):
48 self.repos.extend(cleannames(cp.items('paths')))
49 self.repos.extend(cleannames(cp.items('paths')))
49 if cp.has_section('collections'):
50 if cp.has_section('collections'):
50 for prefix, root in cp.items('collections'):
51 for prefix, root in cp.items('collections'):
51 for path in util.walkrepos(root):
52 for path in util.walkrepos(root):
52 repo = os.path.normpath(path)
53 repo = os.path.normpath(path)
53 name = repo
54 name = repo
54 if name.startswith(prefix):
55 if name.startswith(prefix):
55 name = name[len(prefix):]
56 name = name[len(prefix):]
56 self.repos.append((name.lstrip(os.sep), repo))
57 self.repos.append((name.lstrip(os.sep), repo))
57 self.repos.sort()
58 self.repos.sort()
58
59
59 def run(self):
60 def run(self):
60 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
61 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
61 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
62 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
62 import mercurial.hgweb.wsgicgi as wsgicgi
63 import mercurial.hgweb.wsgicgi as wsgicgi
63 from request import wsgiapplication
64 from request import wsgiapplication
64 def make_web_app():
65 def make_web_app():
65 return self
66 return self
66 wsgicgi.launch(wsgiapplication(make_web_app))
67 wsgicgi.launch(wsgiapplication(make_web_app))
67
68
68 def run_wsgi(self, req):
69 def run_wsgi(self, req):
69 def header(**map):
70 def header(**map):
70 header_file = cStringIO.StringIO(
71 header_file = cStringIO.StringIO(
71 ''.join(tmpl("header", encoding=util._encoding, **map)))
72 ''.join(tmpl("header", encoding=util._encoding, **map)))
72 msg = mimetools.Message(header_file, 0)
73 msg = mimetools.Message(header_file, 0)
73 req.header(msg.items())
74 req.header(msg.items())
74 yield header_file.read()
75 yield header_file.read()
75
76
76 def footer(**map):
77 def footer(**map):
77 yield tmpl("footer", **map)
78 yield tmpl("footer", **map)
78
79
79 def motd(**map):
80 def motd(**map):
80 if self.motd is not None:
81 if self.motd is not None:
81 yield self.motd
82 yield self.motd
82 else:
83 else:
83 yield config('web', 'motd', '')
84 yield config('web', 'motd', '')
84
85
85 parentui = self.parentui or ui.ui(report_untrusted=False)
86 parentui = self.parentui or ui.ui(report_untrusted=False)
86
87
87 def config(section, name, default=None, untrusted=True):
88 def config(section, name, default=None, untrusted=True):
88 return parentui.config(section, name, default, untrusted)
89 return parentui.config(section, name, default, untrusted)
89
90
90 url = req.env['REQUEST_URI'].split('?')[0]
91 url = req.env['REQUEST_URI'].split('?')[0]
91 if not url.endswith('/'):
92 if not url.endswith('/'):
92 url += '/'
93 url += '/'
93 pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
94 pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
94 base = url[:len(url) - len(pathinfo)]
95 base = url[:len(url) - len(pathinfo)]
95 if not base.endswith('/'):
96 if not base.endswith('/'):
96 base += '/'
97 base += '/'
97
98
98 staticurl = config('web', 'staticurl') or base + 'static/'
99 staticurl = config('web', 'staticurl') or base + 'static/'
99 if not staticurl.endswith('/'):
100 if not staticurl.endswith('/'):
100 staticurl += '/'
101 staticurl += '/'
101
102
102 style = self.style
103 style = self.style
103 if style is None:
104 if style is None:
104 style = config('web', 'style', '')
105 style = config('web', 'style', '')
105 if req.form.has_key('style'):
106 if req.form.has_key('style'):
106 style = req.form['style'][0]
107 style = req.form['style'][0]
107 if self.stripecount is None:
108 if self.stripecount is None:
108 self.stripecount = int(config('web', 'stripes', 1))
109 self.stripecount = int(config('web', 'stripes', 1))
109 mapfile = style_map(templater.templatepath(), style)
110 mapfile = style_map(templater.templatepath(), style)
110 tmpl = templater.templater(mapfile, templater.common_filters,
111 tmpl = templater.templater(mapfile, templater.common_filters,
111 defaults={"header": header,
112 defaults={"header": header,
112 "footer": footer,
113 "footer": footer,
113 "motd": motd,
114 "motd": motd,
114 "url": url,
115 "url": url,
115 "staticurl": staticurl})
116 "staticurl": staticurl})
116
117
117 def archivelist(ui, nodeid, url):
118 def archivelist(ui, nodeid, url):
118 allowed = ui.configlist("web", "allow_archive", untrusted=True)
119 allowed = ui.configlist("web", "allow_archive", untrusted=True)
119 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
120 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
120 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
121 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
121 untrusted=True):
122 untrusted=True):
122 yield {"type" : i[0], "extension": i[1],
123 yield {"type" : i[0], "extension": i[1],
123 "node": nodeid, "url": url}
124 "node": nodeid, "url": url}
124
125
125 def entries(sortcolumn="", descending=False, subdir="", **map):
126 def entries(sortcolumn="", descending=False, subdir="", **map):
126 def sessionvars(**map):
127 def sessionvars(**map):
127 fields = []
128 fields = []
128 if req.form.has_key('style'):
129 if req.form.has_key('style'):
129 style = req.form['style'][0]
130 style = req.form['style'][0]
130 if style != get('web', 'style', ''):
131 if style != get('web', 'style', ''):
131 fields.append(('style', style))
132 fields.append(('style', style))
132
133
133 separator = url[-1] == '?' and ';' or '?'
134 separator = url[-1] == '?' and ';' or '?'
134 for name, value in fields:
135 for name, value in fields:
135 yield dict(name=name, value=value, separator=separator)
136 yield dict(name=name, value=value, separator=separator)
136 separator = ';'
137 separator = ';'
137
138
138 rows = []
139 rows = []
139 parity = paritygen(self.stripecount)
140 parity = paritygen(self.stripecount)
140 for name, path in self.repos:
141 for name, path in self.repos:
141 if not name.startswith(subdir):
142 if not name.startswith(subdir):
142 continue
143 continue
143 name = name[len(subdir):]
144 name = name[len(subdir):]
144
145
145 u = ui.ui(parentui=parentui)
146 u = ui.ui(parentui=parentui)
146 try:
147 try:
147 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
148 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
148 except IOError:
149 except IOError:
149 pass
150 pass
150 def get(section, name, default=None):
151 def get(section, name, default=None):
151 return u.config(section, name, default, untrusted=True)
152 return u.config(section, name, default, untrusted=True)
152
153
153 if u.configbool("web", "hidden", untrusted=True):
154 if u.configbool("web", "hidden", untrusted=True):
154 continue
155 continue
155
156
156 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
157 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
157 .replace("//", "/")) + '/'
158 .replace("//", "/")) + '/'
158
159
159 # update time with local timezone
160 # update time with local timezone
160 try:
161 try:
161 d = (get_mtime(path), util.makedate()[1])
162 d = (get_mtime(path), util.makedate()[1])
162 except OSError:
163 except OSError:
163 continue
164 continue
164
165
165 contact = (get("ui", "username") or # preferred
166 contact = (get("ui", "username") or # preferred
166 get("web", "contact") or # deprecated
167 get("web", "contact") or # deprecated
167 get("web", "author", "")) # also
168 get("web", "author", "")) # also
168 description = get("web", "description", "")
169 description = get("web", "description", "")
169 name = get("web", "name", name)
170 name = get("web", "name", name)
170 row = dict(contact=contact or "unknown",
171 row = dict(contact=contact or "unknown",
171 contact_sort=contact.upper() or "unknown",
172 contact_sort=contact.upper() or "unknown",
172 name=name,
173 name=name,
173 name_sort=name,
174 name_sort=name,
174 url=url,
175 url=url,
175 description=description or "unknown",
176 description=description or "unknown",
176 description_sort=description.upper() or "unknown",
177 description_sort=description.upper() or "unknown",
177 lastchange=d,
178 lastchange=d,
178 lastchange_sort=d[1]-d[0],
179 lastchange_sort=d[1]-d[0],
179 sessionvars=sessionvars,
180 sessionvars=sessionvars,
180 archives=archivelist(u, "tip", url))
181 archives=archivelist(u, "tip", url))
181 if (not sortcolumn
182 if (not sortcolumn
182 or (sortcolumn, descending) == self.repos_sorted):
183 or (sortcolumn, descending) == self.repos_sorted):
183 # fast path for unsorted output
184 # fast path for unsorted output
184 row['parity'] = parity.next()
185 row['parity'] = parity.next()
185 yield row
186 yield row
186 else:
187 else:
187 rows.append((row["%s_sort" % sortcolumn], row))
188 rows.append((row["%s_sort" % sortcolumn], row))
188 if rows:
189 if rows:
189 rows.sort()
190 rows.sort()
190 if descending:
191 if descending:
191 rows.reverse()
192 rows.reverse()
192 for key, row in rows:
193 for key, row in rows:
193 row['parity'] = parity.next()
194 row['parity'] = parity.next()
194 yield row
195 yield row
195
196
196 def makeindex(req, subdir=""):
197 def makeindex(req, subdir=""):
197 sortable = ["name", "description", "contact", "lastchange"]
198 sortable = ["name", "description", "contact", "lastchange"]
198 sortcolumn, descending = self.repos_sorted
199 sortcolumn, descending = self.repos_sorted
199 if req.form.has_key('sort'):
200 if req.form.has_key('sort'):
200 sortcolumn = req.form['sort'][0]
201 sortcolumn = req.form['sort'][0]
201 descending = sortcolumn.startswith('-')
202 descending = sortcolumn.startswith('-')
202 if descending:
203 if descending:
203 sortcolumn = sortcolumn[1:]
204 sortcolumn = sortcolumn[1:]
204 if sortcolumn not in sortable:
205 if sortcolumn not in sortable:
205 sortcolumn = ""
206 sortcolumn = ""
206
207
207 sort = [("sort_%s" % column,
208 sort = [("sort_%s" % column,
208 "%s%s" % ((not descending and column == sortcolumn)
209 "%s%s" % ((not descending and column == sortcolumn)
209 and "-" or "", column))
210 and "-" or "", column))
210 for column in sortable]
211 for column in sortable]
211 req.write(tmpl("index", entries=entries, subdir=subdir,
212 req.write(tmpl("index", entries=entries, subdir=subdir,
212 sortcolumn=sortcolumn, descending=descending,
213 sortcolumn=sortcolumn, descending=descending,
213 **dict(sort)))
214 **dict(sort)))
214
215
215 try:
216 try:
216 virtual = req.env.get("PATH_INFO", "").strip('/')
217 virtual = req.env.get("PATH_INFO", "").strip('/')
217 if virtual.startswith('static/'):
218 if virtual.startswith('static/'):
218 static = os.path.join(templater.templatepath(), 'static')
219 static = os.path.join(templater.templatepath(), 'static')
219 fname = virtual[7:]
220 fname = virtual[7:]
220 req.write(staticfile(static, fname, req) or
221 req.write(staticfile(static, fname, req) or
221 tmpl('error', error='%r not found' % fname))
222 tmpl('error', error='%r not found' % fname))
222 elif virtual:
223 elif virtual:
223 repos = dict(self.repos)
224 repos = dict(self.repos)
224 while virtual:
225 while virtual:
225 real = repos.get(virtual)
226 real = repos.get(virtual)
226 if real:
227 if real:
227 req.env['REPO_NAME'] = virtual
228 req.env['REPO_NAME'] = virtual
228 try:
229 try:
229 repo = hg.repository(parentui, real)
230 repo = hg.repository(parentui, real)
230 hgweb(repo).run_wsgi(req)
231 hgweb(repo).run_wsgi(req)
231 except IOError, inst:
232 except IOError, inst:
232 req.write(tmpl("error", error=inst.strerror))
233 req.write(tmpl("error", error=inst.strerror))
233 except hg.RepoError, inst:
234 except hg.RepoError, inst:
234 req.write(tmpl("error", error=str(inst)))
235 req.write(tmpl("error", error=str(inst)))
235 return
236 return
236
237
237 # browse subdirectories
238 # browse subdirectories
238 subdir = virtual + '/'
239 subdir = virtual + '/'
239 if [r for r in repos if r.startswith(subdir)]:
240 if [r for r in repos if r.startswith(subdir)]:
240 makeindex(req, subdir)
241 makeindex(req, subdir)
241 return
242 return
242
243
243 up = virtual.rfind('/')
244 up = virtual.rfind('/')
244 if up < 0:
245 if up < 0:
245 break
246 break
246 virtual = virtual[:up]
247 virtual = virtual[:up]
247
248
248 req.write(tmpl("notfound", repo=virtual))
249 req.write(tmpl("notfound", repo=virtual))
249 else:
250 else:
250 if req.form.has_key('static'):
251 if req.form.has_key('static'):
251 static = os.path.join(templater.templatepath(), "static")
252 static = os.path.join(templater.templatepath(), "static")
252 fname = req.form['static'][0]
253 fname = req.form['static'][0]
253 req.write(staticfile(static, fname, req)
254 req.write(staticfile(static, fname, req)
254 or tmpl("error", error="%r not found" % fname))
255 or tmpl("error", error="%r not found" % fname))
255 else:
256 else:
256 makeindex(req)
257 makeindex(req)
257 finally:
258 finally:
258 tmpl = None
259 tmpl = None
@@ -1,414 +1,454
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from remoterepo import *
10 from remoterepo import *
11 from i18n import _
11 from i18n import _
12 import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
12 import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
13 import errno, keepalive, tempfile, socket, changegroup
13 import errno, keepalive, tempfile, socket, changegroup
14
14
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 def __init__(self, ui):
16 def __init__(self, ui):
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 self.ui = ui
18 self.ui = ui
19
19
20 def find_user_password(self, realm, authuri):
20 def find_user_password(self, realm, authuri):
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 self, realm, authuri)
22 self, realm, authuri)
23 user, passwd = authinfo
23 user, passwd = authinfo
24 if user and passwd:
24 if user and passwd:
25 return (user, passwd)
25 return (user, passwd)
26
26
27 if not self.ui.interactive:
27 if not self.ui.interactive:
28 raise util.Abort(_('http authorization required'))
28 raise util.Abort(_('http authorization required'))
29
29
30 self.ui.write(_("http authorization required\n"))
30 self.ui.write(_("http authorization required\n"))
31 self.ui.status(_("realm: %s\n") % realm)
31 self.ui.status(_("realm: %s\n") % realm)
32 if user:
32 if user:
33 self.ui.status(_("user: %s\n") % user)
33 self.ui.status(_("user: %s\n") % user)
34 else:
34 else:
35 user = self.ui.prompt(_("user:"), default=None)
35 user = self.ui.prompt(_("user:"), default=None)
36
36
37 if not passwd:
37 if not passwd:
38 passwd = self.ui.getpass()
38 passwd = self.ui.getpass()
39
39
40 self.add_password(realm, authuri, user, passwd)
40 self.add_password(realm, authuri, user, passwd)
41 return (user, passwd)
41 return (user, passwd)
42
42
43 def netlocsplit(netloc):
43 def netlocsplit(netloc):
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45
45
46 a = netloc.find('@')
46 a = netloc.find('@')
47 if a == -1:
47 if a == -1:
48 user, passwd = None, None
48 user, passwd = None, None
49 else:
49 else:
50 userpass, netloc = netloc[:a], netloc[a+1:]
50 userpass, netloc = netloc[:a], netloc[a+1:]
51 c = userpass.find(':')
51 c = userpass.find(':')
52 if c == -1:
52 if c == -1:
53 user, passwd = urllib.unquote(userpass), None
53 user, passwd = urllib.unquote(userpass), None
54 else:
54 else:
55 user = urllib.unquote(userpass[:c])
55 user = urllib.unquote(userpass[:c])
56 passwd = urllib.unquote(userpass[c+1:])
56 passwd = urllib.unquote(userpass[c+1:])
57 c = netloc.find(':')
57 c = netloc.find(':')
58 if c == -1:
58 if c == -1:
59 host, port = netloc, None
59 host, port = netloc, None
60 else:
60 else:
61 host, port = netloc[:c], netloc[c+1:]
61 host, port = netloc[:c], netloc[c+1:]
62 return host, port, user, passwd
62 return host, port, user, passwd
63
63
64 def netlocunsplit(host, port, user=None, passwd=None):
64 def netlocunsplit(host, port, user=None, passwd=None):
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 if port:
66 if port:
67 hostport = host + ':' + port
67 hostport = host + ':' + port
68 else:
68 else:
69 hostport = host
69 hostport = host
70 if user:
70 if user:
71 if passwd:
71 if passwd:
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 else:
73 else:
74 userpass = urllib.quote(user)
74 userpass = urllib.quote(user)
75 return userpass + '@' + hostport
75 return userpass + '@' + hostport
76 return hostport
76 return hostport
77
77
78 # work around a bug in Python < 2.4.2
78 # work around a bug in Python < 2.4.2
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 class request(urllib2.Request):
80 class request(urllib2.Request):
81 def add_header(self, key, val):
81 def add_header(self, key, val):
82 if key.lower() == 'proxy-authorization':
82 if key.lower() == 'proxy-authorization':
83 val = val.strip()
83 val = val.strip()
84 return urllib2.Request.add_header(self, key, val)
84 return urllib2.Request.add_header(self, key, val)
85
85
86 class httpsendfile(file):
86 class httpsendfile(file):
87 def __len__(self):
87 def __len__(self):
88 return os.fstat(self.fileno()).st_size
88 return os.fstat(self.fileno()).st_size
89
89
90 def _gen_sendfile(connection):
90 def _gen_sendfile(connection):
91 def _sendfile(self, data):
91 def _sendfile(self, data):
92 # send a file
92 # send a file
93 if isinstance(data, httpsendfile):
93 if isinstance(data, httpsendfile):
94 # if auth required, some data sent twice, so rewind here
94 # if auth required, some data sent twice, so rewind here
95 data.seek(0)
95 data.seek(0)
96 for chunk in util.filechunkiter(data):
96 for chunk in util.filechunkiter(data):
97 connection.send(self, chunk)
97 connection.send(self, chunk)
98 else:
98 else:
99 connection.send(self, data)
99 connection.send(self, data)
100 return _sendfile
100 return _sendfile
101
101
102 class httpconnection(keepalive.HTTPConnection):
102 class httpconnection(keepalive.HTTPConnection):
103 # must be able to send big bundle as stream.
103 # must be able to send big bundle as stream.
104 send = _gen_sendfile(keepalive.HTTPConnection)
104 send = _gen_sendfile(keepalive.HTTPConnection)
105
105
106 class basehttphandler(keepalive.HTTPHandler):
106 class basehttphandler(keepalive.HTTPHandler):
107 def http_open(self, req):
107 def http_open(self, req):
108 return self.do_open(httpconnection, req)
108 return self.do_open(httpconnection, req)
109
109
110 has_https = hasattr(urllib2, 'HTTPSHandler')
110 has_https = hasattr(urllib2, 'HTTPSHandler')
111 if has_https:
111 if has_https:
112 class httpsconnection(httplib.HTTPSConnection):
112 class httpsconnection(httplib.HTTPSConnection):
113 response_class = keepalive.HTTPResponse
113 response_class = keepalive.HTTPResponse
114 # must be able to send big bundle as stream.
114 # must be able to send big bundle as stream.
115 send = _gen_sendfile(httplib.HTTPSConnection)
115 send = _gen_sendfile(httplib.HTTPSConnection)
116
116
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
118 def https_open(self, req):
118 def https_open(self, req):
119 return self.do_open(httpsconnection, req)
119 return self.do_open(httpsconnection, req)
120 else:
120 else:
121 class httphandler(basehttphandler):
121 class httphandler(basehttphandler):
122 pass
122 pass
123
123
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 # it doesn't know about the auth type requested. This can happen if
125 # it doesn't know about the auth type requested. This can happen if
126 # somebody is using BasicAuth and types a bad password.
126 # somebody is using BasicAuth and types a bad password.
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 try:
129 try:
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 self, auth_header, host, req, headers)
131 self, auth_header, host, req, headers)
132 except ValueError, inst:
132 except ValueError, inst:
133 arg = inst.args[0]
133 arg = inst.args[0]
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 return
135 return
136 raise
136 raise
137
137
138 def zgenerator(f):
138 def zgenerator(f):
139 zd = zlib.decompressobj()
139 zd = zlib.decompressobj()
140 try:
140 try:
141 for chunk in util.filechunkiter(f):
141 for chunk in util.filechunkiter(f):
142 yield zd.decompress(chunk)
142 yield zd.decompress(chunk)
143 except httplib.HTTPException, inst:
143 except httplib.HTTPException, inst:
144 raise IOError(None, _('connection ended unexpectedly'))
144 raise IOError(None, _('connection ended unexpectedly'))
145 yield zd.flush()
145 yield zd.flush()
146
146
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 '0123456789' '_.-/')
150 _safeset = None
151 _hex = None
152 def quotepath(path):
153 '''quote the path part of a URL
154
155 This is similar to urllib.quote, but it also tries to avoid
156 quoting things twice (inspired by wget):
157
158 >>> quotepath('abc def')
159 'abc%20def'
160 >>> quotepath('abc%20def')
161 'abc%20def'
162 >>> quotepath('abc%20 def')
163 'abc%20%20def'
164 >>> quotepath('abc def%20')
165 'abc%20def%20'
166 >>> quotepath('abc def%2')
167 'abc%20def%252'
168 >>> quotepath('abc def%')
169 'abc%20def%25'
170 '''
171 global _safeset, _hex
172 if _safeset is None:
173 _safeset = util.set(_safe)
174 _hex = util.set('abcdefABCDEF0123456789')
175 l = list(path)
176 for i in xrange(len(l)):
177 c = l[i]
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 pass
180 elif c not in _safeset:
181 l[i] = '%%%02X' % ord(c)
182 return ''.join(l)
183
147 class httprepository(remoterepository):
184 class httprepository(remoterepository):
148 def __init__(self, ui, path):
185 def __init__(self, ui, path):
149 self.path = path
186 self.path = path
150 self.caps = None
187 self.caps = None
151 self.handler = None
188 self.handler = None
152 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
153 if query or frag:
190 if query or frag:
154 raise util.Abort(_('unsupported URL component: "%s"') %
191 raise util.Abort(_('unsupported URL component: "%s"') %
155 (query or frag))
192 (query or frag))
156 if not urlpath: urlpath = '/'
193 if not urlpath:
194 urlpath = '/'
195 urlpath = quotepath(urlpath)
157 host, port, user, passwd = netlocsplit(netloc)
196 host, port, user, passwd = netlocsplit(netloc)
158
197
159 # urllib cannot handle URLs with embedded user or passwd
198 # urllib cannot handle URLs with embedded user or passwd
160 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
161 urlpath, '', ''))
200 urlpath, '', ''))
162 self.ui = ui
201 self.ui = ui
202 self.ui.debug(_('using %s\n') % self._url)
163
203
164 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
165 # XXX proxyauthinfo = None
205 # XXX proxyauthinfo = None
166 self.handler = httphandler()
206 self.handler = httphandler()
167 handlers = [self.handler]
207 handlers = [self.handler]
168
208
169 if proxyurl:
209 if proxyurl:
170 # proxy can be proper url or host[:port]
210 # proxy can be proper url or host[:port]
171 if not (proxyurl.startswith('http:') or
211 if not (proxyurl.startswith('http:') or
172 proxyurl.startswith('https:')):
212 proxyurl.startswith('https:')):
173 proxyurl = 'http://' + proxyurl + '/'
213 proxyurl = 'http://' + proxyurl + '/'
174 snpqf = urlparse.urlsplit(proxyurl)
214 snpqf = urlparse.urlsplit(proxyurl)
175 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
215 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
176 hpup = netlocsplit(proxynetloc)
216 hpup = netlocsplit(proxynetloc)
177
217
178 proxyhost, proxyport, proxyuser, proxypasswd = hpup
218 proxyhost, proxyport, proxyuser, proxypasswd = hpup
179 if not proxyuser:
219 if not proxyuser:
180 proxyuser = ui.config("http_proxy", "user")
220 proxyuser = ui.config("http_proxy", "user")
181 proxypasswd = ui.config("http_proxy", "passwd")
221 proxypasswd = ui.config("http_proxy", "passwd")
182
222
183 # see if we should use a proxy for this url
223 # see if we should use a proxy for this url
184 no_list = [ "localhost", "127.0.0.1" ]
224 no_list = [ "localhost", "127.0.0.1" ]
185 no_list.extend([p.lower() for
225 no_list.extend([p.lower() for
186 p in ui.configlist("http_proxy", "no")])
226 p in ui.configlist("http_proxy", "no")])
187 no_list.extend([p.strip().lower() for
227 no_list.extend([p.strip().lower() for
188 p in os.getenv("no_proxy", '').split(',')
228 p in os.getenv("no_proxy", '').split(',')
189 if p.strip()])
229 if p.strip()])
190 # "http_proxy.always" config is for running tests on localhost
230 # "http_proxy.always" config is for running tests on localhost
191 if (not ui.configbool("http_proxy", "always") and
231 if (not ui.configbool("http_proxy", "always") and
192 host.lower() in no_list):
232 host.lower() in no_list):
193 ui.debug(_('disabling proxy for %s\n') % host)
233 ui.debug(_('disabling proxy for %s\n') % host)
194 else:
234 else:
195 proxyurl = urlparse.urlunsplit((
235 proxyurl = urlparse.urlunsplit((
196 proxyscheme, netlocunsplit(proxyhost, proxyport,
236 proxyscheme, netlocunsplit(proxyhost, proxyport,
197 proxyuser, proxypasswd or ''),
237 proxyuser, proxypasswd or ''),
198 proxypath, proxyquery, proxyfrag))
238 proxypath, proxyquery, proxyfrag))
199 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
239 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
200 ui.debug(_('proxying through http://%s:%s\n') %
240 ui.debug(_('proxying through http://%s:%s\n') %
201 (proxyhost, proxyport))
241 (proxyhost, proxyport))
202
242
203 # urllib2 takes proxy values from the environment and those
243 # urllib2 takes proxy values from the environment and those
204 # will take precedence if found, so drop them
244 # will take precedence if found, so drop them
205 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
245 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
206 try:
246 try:
207 if os.environ.has_key(env):
247 if os.environ.has_key(env):
208 del os.environ[env]
248 del os.environ[env]
209 except OSError:
249 except OSError:
210 pass
250 pass
211
251
212 passmgr = passwordmgr(ui)
252 passmgr = passwordmgr(ui)
213 if user:
253 if user:
214 ui.debug(_('http auth: user %s, password %s\n') %
254 ui.debug(_('http auth: user %s, password %s\n') %
215 (user, passwd and '*' * len(passwd) or 'not set'))
255 (user, passwd and '*' * len(passwd) or 'not set'))
216 passmgr.add_password(None, host, user, passwd or '')
256 passmgr.add_password(None, host, user, passwd or '')
217
257
218 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
258 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
219 httpdigestauthhandler(passmgr)))
259 httpdigestauthhandler(passmgr)))
220 opener = urllib2.build_opener(*handlers)
260 opener = urllib2.build_opener(*handlers)
221
261
222 # 1.0 here is the _protocol_ version
262 # 1.0 here is the _protocol_ version
223 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
263 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
224 urllib2.install_opener(opener)
264 urllib2.install_opener(opener)
225
265
226 def __del__(self):
266 def __del__(self):
227 if self.handler:
267 if self.handler:
228 self.handler.close_all()
268 self.handler.close_all()
229 self.handler = None
269 self.handler = None
230
270
231 def url(self):
271 def url(self):
232 return self.path
272 return self.path
233
273
234 # look up capabilities only when needed
274 # look up capabilities only when needed
235
275
236 def get_caps(self):
276 def get_caps(self):
237 if self.caps is None:
277 if self.caps is None:
238 try:
278 try:
239 self.caps = self.do_read('capabilities').split()
279 self.caps = self.do_read('capabilities').split()
240 except hg.RepoError:
280 except hg.RepoError:
241 self.caps = ()
281 self.caps = ()
242 self.ui.debug(_('capabilities: %s\n') %
282 self.ui.debug(_('capabilities: %s\n') %
243 (' '.join(self.caps or ['none'])))
283 (' '.join(self.caps or ['none'])))
244 return self.caps
284 return self.caps
245
285
246 capabilities = property(get_caps)
286 capabilities = property(get_caps)
247
287
248 def lock(self):
288 def lock(self):
249 raise util.Abort(_('operation not supported over http'))
289 raise util.Abort(_('operation not supported over http'))
250
290
251 def do_cmd(self, cmd, **args):
291 def do_cmd(self, cmd, **args):
252 data = args.pop('data', None)
292 data = args.pop('data', None)
253 headers = args.pop('headers', {})
293 headers = args.pop('headers', {})
254 self.ui.debug(_("sending %s command\n") % cmd)
294 self.ui.debug(_("sending %s command\n") % cmd)
255 q = {"cmd": cmd}
295 q = {"cmd": cmd}
256 q.update(args)
296 q.update(args)
257 qs = '?%s' % urllib.urlencode(q)
297 qs = '?%s' % urllib.urlencode(q)
258 cu = "%s%s" % (self._url, qs)
298 cu = "%s%s" % (self._url, qs)
259 try:
299 try:
260 if data:
300 if data:
261 self.ui.debug(_("sending %s bytes\n") %
301 self.ui.debug(_("sending %s bytes\n") %
262 headers.get('content-length', 'X'))
302 headers.get('content-length', 'X'))
263 resp = urllib2.urlopen(request(cu, data, headers))
303 resp = urllib2.urlopen(request(cu, data, headers))
264 except urllib2.HTTPError, inst:
304 except urllib2.HTTPError, inst:
265 if inst.code == 401:
305 if inst.code == 401:
266 raise util.Abort(_('authorization failed'))
306 raise util.Abort(_('authorization failed'))
267 raise
307 raise
268 except httplib.HTTPException, inst:
308 except httplib.HTTPException, inst:
269 self.ui.debug(_('http error while sending %s command\n') % cmd)
309 self.ui.debug(_('http error while sending %s command\n') % cmd)
270 self.ui.print_exc()
310 self.ui.print_exc()
271 raise IOError(None, inst)
311 raise IOError(None, inst)
272 except IndexError:
312 except IndexError:
273 # this only happens with Python 2.3, later versions raise URLError
313 # this only happens with Python 2.3, later versions raise URLError
274 raise util.Abort(_('http error, possibly caused by proxy setting'))
314 raise util.Abort(_('http error, possibly caused by proxy setting'))
275 # record the url we got redirected to
315 # record the url we got redirected to
276 resp_url = resp.geturl()
316 resp_url = resp.geturl()
277 if resp_url.endswith(qs):
317 if resp_url.endswith(qs):
278 resp_url = resp_url[:-len(qs)]
318 resp_url = resp_url[:-len(qs)]
279 if self._url != resp_url:
319 if self._url != resp_url:
280 self.ui.status(_('real URL is %s\n') % resp_url)
320 self.ui.status(_('real URL is %s\n') % resp_url)
281 self._url = resp_url
321 self._url = resp_url
282 try:
322 try:
283 proto = resp.getheader('content-type')
323 proto = resp.getheader('content-type')
284 except AttributeError:
324 except AttributeError:
285 proto = resp.headers['content-type']
325 proto = resp.headers['content-type']
286
326
287 # accept old "text/plain" and "application/hg-changegroup" for now
327 # accept old "text/plain" and "application/hg-changegroup" for now
288 if not (proto.startswith('application/mercurial-') or
328 if not (proto.startswith('application/mercurial-') or
289 proto.startswith('text/plain') or
329 proto.startswith('text/plain') or
290 proto.startswith('application/hg-changegroup')):
330 proto.startswith('application/hg-changegroup')):
291 self.ui.debug(_("Requested URL: '%s'\n") % cu)
331 self.ui.debug(_("Requested URL: '%s'\n") % cu)
292 raise hg.RepoError(_("'%s' does not appear to be an hg repository")
332 raise hg.RepoError(_("'%s' does not appear to be an hg repository")
293 % self._url)
333 % self._url)
294
334
295 if proto.startswith('application/mercurial-'):
335 if proto.startswith('application/mercurial-'):
296 try:
336 try:
297 version = proto.split('-', 1)[1]
337 version = proto.split('-', 1)[1]
298 version_info = tuple([int(n) for n in version.split('.')])
338 version_info = tuple([int(n) for n in version.split('.')])
299 except ValueError:
339 except ValueError:
300 raise hg.RepoError(_("'%s' sent a broken Content-type "
340 raise hg.RepoError(_("'%s' sent a broken Content-type "
301 "header (%s)") % (self._url, proto))
341 "header (%s)") % (self._url, proto))
302 if version_info > (0, 1):
342 if version_info > (0, 1):
303 raise hg.RepoError(_("'%s' uses newer protocol %s") %
343 raise hg.RepoError(_("'%s' uses newer protocol %s") %
304 (self._url, version))
344 (self._url, version))
305
345
306 return resp
346 return resp
307
347
308 def do_read(self, cmd, **args):
348 def do_read(self, cmd, **args):
309 fp = self.do_cmd(cmd, **args)
349 fp = self.do_cmd(cmd, **args)
310 try:
350 try:
311 return fp.read()
351 return fp.read()
312 finally:
352 finally:
313 # if using keepalive, allow connection to be reused
353 # if using keepalive, allow connection to be reused
314 fp.close()
354 fp.close()
315
355
316 def lookup(self, key):
356 def lookup(self, key):
317 d = self.do_cmd("lookup", key = key).read()
357 d = self.do_cmd("lookup", key = key).read()
318 success, data = d[:-1].split(' ', 1)
358 success, data = d[:-1].split(' ', 1)
319 if int(success):
359 if int(success):
320 return bin(data)
360 return bin(data)
321 raise hg.RepoError(data)
361 raise hg.RepoError(data)
322
362
323 def heads(self):
363 def heads(self):
324 d = self.do_read("heads")
364 d = self.do_read("heads")
325 try:
365 try:
326 return map(bin, d[:-1].split(" "))
366 return map(bin, d[:-1].split(" "))
327 except:
367 except:
328 raise util.UnexpectedOutput(_("unexpected response:"), d)
368 raise util.UnexpectedOutput(_("unexpected response:"), d)
329
369
330 def branches(self, nodes):
370 def branches(self, nodes):
331 n = " ".join(map(hex, nodes))
371 n = " ".join(map(hex, nodes))
332 d = self.do_read("branches", nodes=n)
372 d = self.do_read("branches", nodes=n)
333 try:
373 try:
334 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
374 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
335 return br
375 return br
336 except:
376 except:
337 raise util.UnexpectedOutput(_("unexpected response:"), d)
377 raise util.UnexpectedOutput(_("unexpected response:"), d)
338
378
339 def between(self, pairs):
379 def between(self, pairs):
340 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
380 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
341 d = self.do_read("between", pairs=n)
381 d = self.do_read("between", pairs=n)
342 try:
382 try:
343 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
383 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
344 return p
384 return p
345 except:
385 except:
346 raise util.UnexpectedOutput(_("unexpected response:"), d)
386 raise util.UnexpectedOutput(_("unexpected response:"), d)
347
387
348 def changegroup(self, nodes, kind):
388 def changegroup(self, nodes, kind):
349 n = " ".join(map(hex, nodes))
389 n = " ".join(map(hex, nodes))
350 f = self.do_cmd("changegroup", roots=n)
390 f = self.do_cmd("changegroup", roots=n)
351 return util.chunkbuffer(zgenerator(f))
391 return util.chunkbuffer(zgenerator(f))
352
392
353 def changegroupsubset(self, bases, heads, source):
393 def changegroupsubset(self, bases, heads, source):
354 baselst = " ".join([hex(n) for n in bases])
394 baselst = " ".join([hex(n) for n in bases])
355 headlst = " ".join([hex(n) for n in heads])
395 headlst = " ".join([hex(n) for n in heads])
356 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
396 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
357 return util.chunkbuffer(zgenerator(f))
397 return util.chunkbuffer(zgenerator(f))
358
398
359 def unbundle(self, cg, heads, source):
399 def unbundle(self, cg, heads, source):
360 # have to stream bundle to a temp file because we do not have
400 # have to stream bundle to a temp file because we do not have
361 # http 1.1 chunked transfer.
401 # http 1.1 chunked transfer.
362
402
363 type = ""
403 type = ""
364 types = self.capable('unbundle')
404 types = self.capable('unbundle')
365 # servers older than d1b16a746db6 will send 'unbundle' as a
405 # servers older than d1b16a746db6 will send 'unbundle' as a
366 # boolean capability
406 # boolean capability
367 try:
407 try:
368 types = types.split(',')
408 types = types.split(',')
369 except AttributeError:
409 except AttributeError:
370 types = [""]
410 types = [""]
371 if types:
411 if types:
372 for x in types:
412 for x in types:
373 if x in changegroup.bundletypes:
413 if x in changegroup.bundletypes:
374 type = x
414 type = x
375 break
415 break
376
416
377 tempname = changegroup.writebundle(cg, None, type)
417 tempname = changegroup.writebundle(cg, None, type)
378 fp = httpsendfile(tempname, "rb")
418 fp = httpsendfile(tempname, "rb")
379 try:
419 try:
380 try:
420 try:
381 rfp = self.do_cmd(
421 rfp = self.do_cmd(
382 'unbundle', data=fp,
422 'unbundle', data=fp,
383 headers={'content-type': 'application/octet-stream'},
423 headers={'content-type': 'application/octet-stream'},
384 heads=' '.join(map(hex, heads)))
424 heads=' '.join(map(hex, heads)))
385 try:
425 try:
386 ret = int(rfp.readline())
426 ret = int(rfp.readline())
387 self.ui.write(rfp.read())
427 self.ui.write(rfp.read())
388 return ret
428 return ret
389 finally:
429 finally:
390 rfp.close()
430 rfp.close()
391 except socket.error, err:
431 except socket.error, err:
392 if err[0] in (errno.ECONNRESET, errno.EPIPE):
432 if err[0] in (errno.ECONNRESET, errno.EPIPE):
393 raise util.Abort(_('push failed: %s') % err[1])
433 raise util.Abort(_('push failed: %s') % err[1])
394 raise util.Abort(err[1])
434 raise util.Abort(err[1])
395 finally:
435 finally:
396 fp.close()
436 fp.close()
397 os.unlink(tempname)
437 os.unlink(tempname)
398
438
399 def stream_out(self):
439 def stream_out(self):
400 return self.do_cmd('stream_out')
440 return self.do_cmd('stream_out')
401
441
402 class httpsrepository(httprepository):
442 class httpsrepository(httprepository):
403 def __init__(self, ui, path):
443 def __init__(self, ui, path):
404 if not has_https:
444 if not has_https:
405 raise util.Abort(_('Python support for SSL and HTTPS '
445 raise util.Abort(_('Python support for SSL and HTTPS '
406 'is not installed'))
446 'is not installed'))
407 httprepository.__init__(self, ui, path)
447 httprepository.__init__(self, ui, path)
408
448
409 def instance(ui, path, create):
449 def instance(ui, path, create):
410 if create:
450 if create:
411 raise util.Abort(_('cannot create new http repository'))
451 raise util.Abort(_('cannot create new http repository'))
412 if path.startswith('https:'):
452 if path.startswith('https:'):
413 return httpsrepository(ui, path)
453 return httpsrepository(ui, path)
414 return httprepository(ui, path)
454 return httprepository(ui, path)
@@ -1,7 +1,9
1 import doctest
1 import doctest
2
2
3 import mercurial.changelog
3 import mercurial.changelog
4 # test doctest from changelog
4 # test doctest from changelog
5
5
6 doctest.testmod(mercurial.changelog)
6 doctest.testmod(mercurial.changelog)
7
7
8 import mercurial.httprepo
9 doctest.testmod(mercurial.httprepo)
General Comments 0
You need to be logged in to leave comments. Login now