##// END OF EJS Templates
When pulling from a non hg repository URL (e.g. http://www.kernel.org/hg)...
Muli Ben-Yehuda -
r751:0b245ede default
parent child Browse files
Show More
@@ -1,1935 +1,1948 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if not t.startswith('\1\n'):
24 if not t.startswith('\1\n'):
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if not t.startswith('\1\n'):
31 if not t.startswith('\1\n'):
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text.startswith('\1\n'):
41 if meta or text.startswith('\1\n'):
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
129 changed=None):
129 changed=None):
130 # directly generate the mdiff delta from the data collected during
130 # directly generate the mdiff delta from the data collected during
131 # the bisect loop below
131 # the bisect loop below
132 def gendelta(delta):
132 def gendelta(delta):
133 i = 0
133 i = 0
134 result = []
134 result = []
135 while i < len(delta):
135 while i < len(delta):
136 start = delta[i][2]
136 start = delta[i][2]
137 end = delta[i][3]
137 end = delta[i][3]
138 l = delta[i][4]
138 l = delta[i][4]
139 if l == None:
139 if l == None:
140 l = ""
140 l = ""
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
142 and end >= delta[i+1][2]:
142 and end >= delta[i+1][2]:
143 if delta[i+1][3] > end:
143 if delta[i+1][3] > end:
144 end = delta[i+1][3]
144 end = delta[i+1][3]
145 if delta[i+1][4]:
145 if delta[i+1][4]:
146 l += delta[i+1][4]
146 l += delta[i+1][4]
147 i += 1
147 i += 1
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
149 i += 1
149 i += 1
150 return result
150 return result
151
151
152 # apply the changes collected during the bisect loop to our addlist
152 # apply the changes collected during the bisect loop to our addlist
153 def addlistdelta(addlist, delta):
153 def addlistdelta(addlist, delta):
154 # apply the deltas to the addlist. start from the bottom up
154 # apply the deltas to the addlist. start from the bottom up
155 # so changes to the offsets don't mess things up.
155 # so changes to the offsets don't mess things up.
156 i = len(delta)
156 i = len(delta)
157 while i > 0:
157 while i > 0:
158 i -= 1
158 i -= 1
159 start = delta[i][0]
159 start = delta[i][0]
160 end = delta[i][1]
160 end = delta[i][1]
161 if delta[i][4]:
161 if delta[i][4]:
162 addlist[start:end] = [delta[i][4]]
162 addlist[start:end] = [delta[i][4]]
163 else:
163 else:
164 del addlist[start:end]
164 del addlist[start:end]
165 return addlist
165 return addlist
166
166
167 # calculate the byte offset of the start of each line in the
167 # calculate the byte offset of the start of each line in the
168 # manifest
168 # manifest
169 def calcoffsets(addlist):
169 def calcoffsets(addlist):
170 offsets = [0] * (len(addlist) + 1)
170 offsets = [0] * (len(addlist) + 1)
171 offset = 0
171 offset = 0
172 i = 0
172 i = 0
173 while i < len(addlist):
173 while i < len(addlist):
174 offsets[i] = offset
174 offsets[i] = offset
175 offset += len(addlist[i])
175 offset += len(addlist[i])
176 i += 1
176 i += 1
177 offsets[i] = offset
177 offsets[i] = offset
178 return offsets
178 return offsets
179
179
180 # if we're using the listcache, make sure it is valid and
180 # if we're using the listcache, make sure it is valid and
181 # parented by the same node we're diffing against
181 # parented by the same node we're diffing against
182 if not changed or not self.listcache or not p1 or \
182 if not changed or not self.listcache or not p1 or \
183 self.mapcache[0] != p1:
183 self.mapcache[0] != p1:
184 files = map.keys()
184 files = map.keys()
185 files.sort()
185 files.sort()
186
186
187 self.addlist = ["%s\000%s%s\n" %
187 self.addlist = ["%s\000%s%s\n" %
188 (f, hex(map[f]), flags[f] and "x" or '')
188 (f, hex(map[f]), flags[f] and "x" or '')
189 for f in files]
189 for f in files]
190 cachedelta = None
190 cachedelta = None
191 else:
191 else:
192 addlist = self.listcache[1]
192 addlist = self.listcache[1]
193
193
194 # find the starting offset for each line in the add list
194 # find the starting offset for each line in the add list
195 offsets = calcoffsets(addlist)
195 offsets = calcoffsets(addlist)
196
196
197 # combine the changed lists into one list for sorting
197 # combine the changed lists into one list for sorting
198 work = [[x, 0] for x in changed[0]]
198 work = [[x, 0] for x in changed[0]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
200 work.sort()
200 work.sort()
201
201
202 delta = []
202 delta = []
203 bs = 0
203 bs = 0
204
204
205 for w in work:
205 for w in work:
206 f = w[0]
206 f = w[0]
207 # bs will either be the index of the item or the insert point
207 # bs will either be the index of the item or the insert point
208 bs = bisect.bisect(addlist, f, bs)
208 bs = bisect.bisect(addlist, f, bs)
209 if bs < len(addlist):
209 if bs < len(addlist):
210 fn = addlist[bs][:addlist[bs].index('\0')]
210 fn = addlist[bs][:addlist[bs].index('\0')]
211 else:
211 else:
212 fn = None
212 fn = None
213 if w[1] == 0:
213 if w[1] == 0:
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
215 flags[f] and "x" or '')
215 flags[f] and "x" or '')
216 else:
216 else:
217 l = None
217 l = None
218 start = bs
218 start = bs
219 if fn != f:
219 if fn != f:
220 # item not found, insert a new one
220 # item not found, insert a new one
221 end = bs
221 end = bs
222 if w[1] == 1:
222 if w[1] == 1:
223 sys.stderr.write("failed to remove %s from manifest\n"
223 sys.stderr.write("failed to remove %s from manifest\n"
224 % f)
224 % f)
225 sys.exit(1)
225 sys.exit(1)
226 else:
226 else:
227 # item is found, replace/delete the existing line
227 # item is found, replace/delete the existing line
228 end = bs + 1
228 end = bs + 1
229 delta.append([start, end, offsets[start], offsets[end], l])
229 delta.append([start, end, offsets[start], offsets[end], l])
230
230
231 self.addlist = addlistdelta(addlist, delta)
231 self.addlist = addlistdelta(addlist, delta)
232 if self.mapcache[0] == self.tip():
232 if self.mapcache[0] == self.tip():
233 cachedelta = "".join(gendelta(delta))
233 cachedelta = "".join(gendelta(delta))
234 else:
234 else:
235 cachedelta = None
235 cachedelta = None
236
236
237 text = "".join(self.addlist)
237 text = "".join(self.addlist)
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
239 sys.stderr.write("manifest delta failure\n")
239 sys.stderr.write("manifest delta failure\n")
240 sys.exit(1)
240 sys.exit(1)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
242 self.mapcache = (n, map, flags)
242 self.mapcache = (n, map, flags)
243 self.listcache = (text, self.addlist)
243 self.listcache = (text, self.addlist)
244 self.addlist = None
244 self.addlist = None
245
245
246 return n
246 return n
247
247
248 class changelog(revlog):
248 class changelog(revlog):
249 def __init__(self, opener):
249 def __init__(self, opener):
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
251
251
252 def extract(self, text):
252 def extract(self, text):
253 if not text:
253 if not text:
254 return (nullid, "", "0", [], "")
254 return (nullid, "", "0", [], "")
255 last = text.index("\n\n")
255 last = text.index("\n\n")
256 desc = text[last + 2:]
256 desc = text[last + 2:]
257 l = text[:last].splitlines()
257 l = text[:last].splitlines()
258 manifest = bin(l[0])
258 manifest = bin(l[0])
259 user = l[1]
259 user = l[1]
260 date = l[2]
260 date = l[2]
261 files = l[3:]
261 files = l[3:]
262 return (manifest, user, date, files, desc)
262 return (manifest, user, date, files, desc)
263
263
264 def read(self, node):
264 def read(self, node):
265 return self.extract(self.revision(node))
265 return self.extract(self.revision(node))
266
266
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
268 user=None, date=None):
268 user=None, date=None):
269 date = date or "%d %d" % (time.time(), time.timezone)
269 date = date or "%d %d" % (time.time(), time.timezone)
270 list.sort()
270 list.sort()
271 l = [hex(manifest), user, date] + list + ["", desc]
271 l = [hex(manifest), user, date] + list + ["", desc]
272 text = "\n".join(l)
272 text = "\n".join(l)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
274
274
275 class dirstate:
275 class dirstate:
276 def __init__(self, opener, ui, root):
276 def __init__(self, opener, ui, root):
277 self.opener = opener
277 self.opener = opener
278 self.root = root
278 self.root = root
279 self.dirty = 0
279 self.dirty = 0
280 self.ui = ui
280 self.ui = ui
281 self.map = None
281 self.map = None
282 self.pl = None
282 self.pl = None
283 self.copies = {}
283 self.copies = {}
284 self.ignorefunc = None
284 self.ignorefunc = None
285
285
286 def wjoin(self, f):
286 def wjoin(self, f):
287 return os.path.join(self.root, f)
287 return os.path.join(self.root, f)
288
288
289 def ignore(self, f):
289 def ignore(self, f):
290 if not self.ignorefunc:
290 if not self.ignorefunc:
291 bigpat = []
291 bigpat = []
292 try:
292 try:
293 l = file(self.wjoin(".hgignore"))
293 l = file(self.wjoin(".hgignore"))
294 for pat in l:
294 for pat in l:
295 if pat != "\n":
295 if pat != "\n":
296 p = util.pconvert(pat[:-1])
296 p = util.pconvert(pat[:-1])
297 try:
297 try:
298 r = re.compile(p)
298 r = re.compile(p)
299 except:
299 except:
300 self.ui.warn("ignoring invalid ignore"
300 self.ui.warn("ignoring invalid ignore"
301 + " regular expression '%s'\n" % p)
301 + " regular expression '%s'\n" % p)
302 else:
302 else:
303 bigpat.append(util.pconvert(pat[:-1]))
303 bigpat.append(util.pconvert(pat[:-1]))
304 except IOError: pass
304 except IOError: pass
305
305
306 if bigpat:
306 if bigpat:
307 s = "(?:%s)" % (")|(?:".join(bigpat))
307 s = "(?:%s)" % (")|(?:".join(bigpat))
308 r = re.compile(s)
308 r = re.compile(s)
309 self.ignorefunc = r.search
309 self.ignorefunc = r.search
310 else:
310 else:
311 self.ignorefunc = util.never
311 self.ignorefunc = util.never
312
312
313 return self.ignorefunc(f)
313 return self.ignorefunc(f)
314
314
315 def __del__(self):
315 def __del__(self):
316 if self.dirty:
316 if self.dirty:
317 self.write()
317 self.write()
318
318
319 def __getitem__(self, key):
319 def __getitem__(self, key):
320 try:
320 try:
321 return self.map[key]
321 return self.map[key]
322 except TypeError:
322 except TypeError:
323 self.read()
323 self.read()
324 return self[key]
324 return self[key]
325
325
326 def __contains__(self, key):
326 def __contains__(self, key):
327 if not self.map: self.read()
327 if not self.map: self.read()
328 return key in self.map
328 return key in self.map
329
329
330 def parents(self):
330 def parents(self):
331 if not self.pl:
331 if not self.pl:
332 self.read()
332 self.read()
333 return self.pl
333 return self.pl
334
334
335 def markdirty(self):
335 def markdirty(self):
336 if not self.dirty:
336 if not self.dirty:
337 self.dirty = 1
337 self.dirty = 1
338
338
339 def setparents(self, p1, p2 = nullid):
339 def setparents(self, p1, p2 = nullid):
340 self.markdirty()
340 self.markdirty()
341 self.pl = p1, p2
341 self.pl = p1, p2
342
342
343 def state(self, key):
343 def state(self, key):
344 try:
344 try:
345 return self[key][0]
345 return self[key][0]
346 except KeyError:
346 except KeyError:
347 return "?"
347 return "?"
348
348
349 def read(self):
349 def read(self):
350 if self.map is not None: return self.map
350 if self.map is not None: return self.map
351
351
352 self.map = {}
352 self.map = {}
353 self.pl = [nullid, nullid]
353 self.pl = [nullid, nullid]
354 try:
354 try:
355 st = self.opener("dirstate").read()
355 st = self.opener("dirstate").read()
356 if not st: return
356 if not st: return
357 except: return
357 except: return
358
358
359 self.pl = [st[:20], st[20: 40]]
359 self.pl = [st[:20], st[20: 40]]
360
360
361 pos = 40
361 pos = 40
362 while pos < len(st):
362 while pos < len(st):
363 e = struct.unpack(">cllll", st[pos:pos+17])
363 e = struct.unpack(">cllll", st[pos:pos+17])
364 l = e[4]
364 l = e[4]
365 pos += 17
365 pos += 17
366 f = st[pos:pos + l]
366 f = st[pos:pos + l]
367 if '\0' in f:
367 if '\0' in f:
368 f, c = f.split('\0')
368 f, c = f.split('\0')
369 self.copies[f] = c
369 self.copies[f] = c
370 self.map[f] = e[:4]
370 self.map[f] = e[:4]
371 pos += l
371 pos += l
372
372
373 def copy(self, source, dest):
373 def copy(self, source, dest):
374 self.read()
374 self.read()
375 self.markdirty()
375 self.markdirty()
376 self.copies[dest] = source
376 self.copies[dest] = source
377
377
378 def copied(self, file):
378 def copied(self, file):
379 return self.copies.get(file, None)
379 return self.copies.get(file, None)
380
380
381 def update(self, files, state):
381 def update(self, files, state):
382 ''' current states:
382 ''' current states:
383 n normal
383 n normal
384 m needs merging
384 m needs merging
385 r marked for removal
385 r marked for removal
386 a marked for addition'''
386 a marked for addition'''
387
387
388 if not files: return
388 if not files: return
389 self.read()
389 self.read()
390 self.markdirty()
390 self.markdirty()
391 for f in files:
391 for f in files:
392 if state == "r":
392 if state == "r":
393 self.map[f] = ('r', 0, 0, 0)
393 self.map[f] = ('r', 0, 0, 0)
394 else:
394 else:
395 s = os.stat(os.path.join(self.root, f))
395 s = os.stat(os.path.join(self.root, f))
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
397
397
398 def forget(self, files):
398 def forget(self, files):
399 if not files: return
399 if not files: return
400 self.read()
400 self.read()
401 self.markdirty()
401 self.markdirty()
402 for f in files:
402 for f in files:
403 try:
403 try:
404 del self.map[f]
404 del self.map[f]
405 except KeyError:
405 except KeyError:
406 self.ui.warn("not in dirstate: %s!\n" % f)
406 self.ui.warn("not in dirstate: %s!\n" % f)
407 pass
407 pass
408
408
409 def clear(self):
409 def clear(self):
410 self.map = {}
410 self.map = {}
411 self.markdirty()
411 self.markdirty()
412
412
413 def write(self):
413 def write(self):
414 st = self.opener("dirstate", "w")
414 st = self.opener("dirstate", "w")
415 st.write("".join(self.pl))
415 st.write("".join(self.pl))
416 for f, e in self.map.items():
416 for f, e in self.map.items():
417 c = self.copied(f)
417 c = self.copied(f)
418 if c:
418 if c:
419 f = f + "\0" + c
419 f = f + "\0" + c
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
421 st.write(e + f)
421 st.write(e + f)
422 self.dirty = 0
422 self.dirty = 0
423
423
424 def walk(self, files = None, match = util.always):
424 def walk(self, files = None, match = util.always):
425 self.read()
425 self.read()
426 dc = self.map.copy()
426 dc = self.map.copy()
427 # walk all files by default
427 # walk all files by default
428 if not files: files = [self.root]
428 if not files: files = [self.root]
429 def traverse():
429 def traverse():
430 for f in util.unique(files):
430 for f in util.unique(files):
431 f = os.path.join(self.root, f)
431 f = os.path.join(self.root, f)
432 if os.path.isdir(f):
432 if os.path.isdir(f):
433 for dir, subdirs, fl in os.walk(f):
433 for dir, subdirs, fl in os.walk(f):
434 d = dir[len(self.root) + 1:]
434 d = dir[len(self.root) + 1:]
435 if d == '.hg':
435 if d == '.hg':
436 subdirs[:] = []
436 subdirs[:] = []
437 continue
437 continue
438 for sd in subdirs:
438 for sd in subdirs:
439 ds = os.path.join(d, sd +'/')
439 ds = os.path.join(d, sd +'/')
440 if self.ignore(ds) or not match(ds):
440 if self.ignore(ds) or not match(ds):
441 subdirs.remove(sd)
441 subdirs.remove(sd)
442 for fn in fl:
442 for fn in fl:
443 fn = util.pconvert(os.path.join(d, fn))
443 fn = util.pconvert(os.path.join(d, fn))
444 yield 'f', fn
444 yield 'f', fn
445 else:
445 else:
446 yield 'f', f[len(self.root) + 1:]
446 yield 'f', f[len(self.root) + 1:]
447
447
448 for k in dc.keys():
448 for k in dc.keys():
449 yield 'm', k
449 yield 'm', k
450
450
451 # yield only files that match: all in dirstate, others only if
451 # yield only files that match: all in dirstate, others only if
452 # not in .hgignore
452 # not in .hgignore
453
453
454 for src, fn in util.unique(traverse()):
454 for src, fn in util.unique(traverse()):
455 if fn in dc:
455 if fn in dc:
456 del dc[fn]
456 del dc[fn]
457 elif self.ignore(fn):
457 elif self.ignore(fn):
458 continue
458 continue
459 if match(fn):
459 if match(fn):
460 yield src, fn
460 yield src, fn
461
461
462 def changes(self, files = None, match = util.always):
462 def changes(self, files = None, match = util.always):
463 self.read()
463 self.read()
464 dc = self.map.copy()
464 dc = self.map.copy()
465 lookup, changed, added, unknown = [], [], [], []
465 lookup, changed, added, unknown = [], [], [], []
466
466
467 for src, fn in self.walk(files, match):
467 for src, fn in self.walk(files, match):
468 try: s = os.stat(os.path.join(self.root, fn))
468 try: s = os.stat(os.path.join(self.root, fn))
469 except: continue
469 except: continue
470
470
471 if fn in dc:
471 if fn in dc:
472 c = dc[fn]
472 c = dc[fn]
473 del dc[fn]
473 del dc[fn]
474
474
475 if c[0] == 'm':
475 if c[0] == 'm':
476 changed.append(fn)
476 changed.append(fn)
477 elif c[0] == 'a':
477 elif c[0] == 'a':
478 added.append(fn)
478 added.append(fn)
479 elif c[0] == 'r':
479 elif c[0] == 'r':
480 unknown.append(fn)
480 unknown.append(fn)
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
482 changed.append(fn)
482 changed.append(fn)
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
484 lookup.append(fn)
484 lookup.append(fn)
485 else:
485 else:
486 if match(fn): unknown.append(fn)
486 if match(fn): unknown.append(fn)
487
487
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
489
489
490 # used to avoid circular references so destructors work
490 # used to avoid circular references so destructors work
491 def opener(base):
491 def opener(base):
492 p = base
492 p = base
493 def o(path, mode="r"):
493 def o(path, mode="r"):
494 if p.startswith("http://"):
494 if p.startswith("http://"):
495 f = os.path.join(p, urllib.quote(path))
495 f = os.path.join(p, urllib.quote(path))
496 return httprangereader.httprangereader(f)
496 return httprangereader.httprangereader(f)
497
497
498 f = os.path.join(p, path)
498 f = os.path.join(p, path)
499
499
500 mode += "b" # for that other OS
500 mode += "b" # for that other OS
501
501
502 if mode[0] != "r":
502 if mode[0] != "r":
503 try:
503 try:
504 s = os.stat(f)
504 s = os.stat(f)
505 except OSError:
505 except OSError:
506 d = os.path.dirname(f)
506 d = os.path.dirname(f)
507 if not os.path.isdir(d):
507 if not os.path.isdir(d):
508 os.makedirs(d)
508 os.makedirs(d)
509 else:
509 else:
510 if s.st_nlink > 1:
510 if s.st_nlink > 1:
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
512 util.rename(f+".tmp", f)
512 util.rename(f+".tmp", f)
513
513
514 return file(f, mode)
514 return file(f, mode)
515
515
516 return o
516 return o
517
517
518 class RepoError(Exception): pass
518 class RepoError(Exception): pass
519
519
520 class localrepository:
520 class localrepository:
521 def __init__(self, ui, path=None, create=0):
521 def __init__(self, ui, path=None, create=0):
522 self.remote = 0
522 self.remote = 0
523 if path and path.startswith("http://"):
523 if path and path.startswith("http://"):
524 self.remote = 1
524 self.remote = 1
525 self.path = path
525 self.path = path
526 else:
526 else:
527 if not path:
527 if not path:
528 p = os.getcwd()
528 p = os.getcwd()
529 while not os.path.isdir(os.path.join(p, ".hg")):
529 while not os.path.isdir(os.path.join(p, ".hg")):
530 oldp = p
530 oldp = p
531 p = os.path.dirname(p)
531 p = os.path.dirname(p)
532 if p == oldp: raise RepoError("no repo found")
532 if p == oldp: raise RepoError("no repo found")
533 path = p
533 path = p
534 self.path = os.path.join(path, ".hg")
534 self.path = os.path.join(path, ".hg")
535
535
536 if not create and not os.path.isdir(self.path):
536 if not create and not os.path.isdir(self.path):
537 raise RepoError("repository %s not found" % self.path)
537 raise RepoError("repository %s not found" % self.path)
538
538
539 self.root = path
539 self.root = path
540 self.ui = ui
540 self.ui = ui
541
541
542 if create:
542 if create:
543 os.mkdir(self.path)
543 os.mkdir(self.path)
544 os.mkdir(self.join("data"))
544 os.mkdir(self.join("data"))
545
545
546 self.opener = opener(self.path)
546 self.opener = opener(self.path)
547 self.wopener = opener(self.root)
547 self.wopener = opener(self.root)
548 self.manifest = manifest(self.opener)
548 self.manifest = manifest(self.opener)
549 self.changelog = changelog(self.opener)
549 self.changelog = changelog(self.opener)
550 self.tagscache = None
550 self.tagscache = None
551 self.nodetagscache = None
551 self.nodetagscache = None
552
552
553 if not self.remote:
553 if not self.remote:
554 self.dirstate = dirstate(self.opener, ui, self.root)
554 self.dirstate = dirstate(self.opener, ui, self.root)
555 try:
555 try:
556 self.ui.readconfig(self.opener("hgrc"))
556 self.ui.readconfig(self.opener("hgrc"))
557 except IOError: pass
557 except IOError: pass
558
558
559 def hook(self, name, **args):
559 def hook(self, name, **args):
560 s = self.ui.config("hooks", name)
560 s = self.ui.config("hooks", name)
561 if s:
561 if s:
562 self.ui.note("running hook %s: %s\n" % (name, s))
562 self.ui.note("running hook %s: %s\n" % (name, s))
563 old = {}
563 old = {}
564 for k, v in args.items():
564 for k, v in args.items():
565 k = k.upper()
565 k = k.upper()
566 old[k] = os.environ.get(k, None)
566 old[k] = os.environ.get(k, None)
567 os.environ[k] = v
567 os.environ[k] = v
568
568
569 r = os.system(s)
569 r = os.system(s)
570
570
571 for k, v in old.items():
571 for k, v in old.items():
572 if v != None:
572 if v != None:
573 os.environ[k] = v
573 os.environ[k] = v
574 else:
574 else:
575 del os.environ[k]
575 del os.environ[k]
576
576
577 if r:
577 if r:
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
579 (name, r))
579 (name, r))
580 return False
580 return False
581 return True
581 return True
582
582
583 def tags(self):
583 def tags(self):
584 '''return a mapping of tag to node'''
584 '''return a mapping of tag to node'''
585 if not self.tagscache:
585 if not self.tagscache:
586 self.tagscache = {}
586 self.tagscache = {}
587 def addtag(self, k, n):
587 def addtag(self, k, n):
588 try:
588 try:
589 bin_n = bin(n)
589 bin_n = bin(n)
590 except TypeError:
590 except TypeError:
591 bin_n = ''
591 bin_n = ''
592 self.tagscache[k.strip()] = bin_n
592 self.tagscache[k.strip()] = bin_n
593
593
594 try:
594 try:
595 # read each head of the tags file, ending with the tip
595 # read each head of the tags file, ending with the tip
596 # and add each tag found to the map, with "newer" ones
596 # and add each tag found to the map, with "newer" ones
597 # taking precedence
597 # taking precedence
598 fl = self.file(".hgtags")
598 fl = self.file(".hgtags")
599 h = fl.heads()
599 h = fl.heads()
600 h.reverse()
600 h.reverse()
601 for r in h:
601 for r in h:
602 for l in fl.revision(r).splitlines():
602 for l in fl.revision(r).splitlines():
603 if l:
603 if l:
604 n, k = l.split(" ", 1)
604 n, k = l.split(" ", 1)
605 addtag(self, k, n)
605 addtag(self, k, n)
606 except KeyError:
606 except KeyError:
607 pass
607 pass
608
608
609 try:
609 try:
610 f = self.opener("localtags")
610 f = self.opener("localtags")
611 for l in f:
611 for l in f:
612 n, k = l.split(" ", 1)
612 n, k = l.split(" ", 1)
613 addtag(self, k, n)
613 addtag(self, k, n)
614 except IOError:
614 except IOError:
615 pass
615 pass
616
616
617 self.tagscache['tip'] = self.changelog.tip()
617 self.tagscache['tip'] = self.changelog.tip()
618
618
619 return self.tagscache
619 return self.tagscache
620
620
621 def tagslist(self):
621 def tagslist(self):
622 '''return a list of tags ordered by revision'''
622 '''return a list of tags ordered by revision'''
623 l = []
623 l = []
624 for t, n in self.tags().items():
624 for t, n in self.tags().items():
625 try:
625 try:
626 r = self.changelog.rev(n)
626 r = self.changelog.rev(n)
627 except:
627 except:
628 r = -2 # sort to the beginning of the list if unknown
628 r = -2 # sort to the beginning of the list if unknown
629 l.append((r,t,n))
629 l.append((r,t,n))
630 l.sort()
630 l.sort()
631 return [(t,n) for r,t,n in l]
631 return [(t,n) for r,t,n in l]
632
632
633 def nodetags(self, node):
633 def nodetags(self, node):
634 '''return the tags associated with a node'''
634 '''return the tags associated with a node'''
635 if not self.nodetagscache:
635 if not self.nodetagscache:
636 self.nodetagscache = {}
636 self.nodetagscache = {}
637 for t,n in self.tags().items():
637 for t,n in self.tags().items():
638 self.nodetagscache.setdefault(n,[]).append(t)
638 self.nodetagscache.setdefault(n,[]).append(t)
639 return self.nodetagscache.get(node, [])
639 return self.nodetagscache.get(node, [])
640
640
641 def lookup(self, key):
641 def lookup(self, key):
642 try:
642 try:
643 return self.tags()[key]
643 return self.tags()[key]
644 except KeyError:
644 except KeyError:
645 try:
645 try:
646 return self.changelog.lookup(key)
646 return self.changelog.lookup(key)
647 except:
647 except:
648 raise RepoError("unknown revision '%s'" % key)
648 raise RepoError("unknown revision '%s'" % key)
649
649
650 def dev(self):
650 def dev(self):
651 if self.remote: return -1
651 if self.remote: return -1
652 return os.stat(self.path).st_dev
652 return os.stat(self.path).st_dev
653
653
654 def join(self, f):
654 def join(self, f):
655 return os.path.join(self.path, f)
655 return os.path.join(self.path, f)
656
656
657 def wjoin(self, f):
657 def wjoin(self, f):
658 return os.path.join(self.root, f)
658 return os.path.join(self.root, f)
659
659
660 def file(self, f):
660 def file(self, f):
661 if f[0] == '/': f = f[1:]
661 if f[0] == '/': f = f[1:]
662 return filelog(self.opener, f)
662 return filelog(self.opener, f)
663
663
664 def getcwd(self):
664 def getcwd(self):
665 cwd = os.getcwd()
665 cwd = os.getcwd()
666 if cwd == self.root: return ''
666 if cwd == self.root: return ''
667 return cwd[len(self.root) + 1:]
667 return cwd[len(self.root) + 1:]
668
668
669 def wfile(self, f, mode='r'):
669 def wfile(self, f, mode='r'):
670 return self.wopener(f, mode)
670 return self.wopener(f, mode)
671
671
672 def transaction(self):
672 def transaction(self):
673 # save dirstate for undo
673 # save dirstate for undo
674 try:
674 try:
675 ds = self.opener("dirstate").read()
675 ds = self.opener("dirstate").read()
676 except IOError:
676 except IOError:
677 ds = ""
677 ds = ""
678 self.opener("undo.dirstate", "w").write(ds)
678 self.opener("undo.dirstate", "w").write(ds)
679
679
680 return transaction.transaction(self.ui.warn,
680 return transaction.transaction(self.ui.warn,
681 self.opener, self.join("journal"),
681 self.opener, self.join("journal"),
682 self.join("undo"))
682 self.join("undo"))
683
683
684 def recover(self):
684 def recover(self):
685 lock = self.lock()
685 lock = self.lock()
686 if os.path.exists(self.join("journal")):
686 if os.path.exists(self.join("journal")):
687 self.ui.status("rolling back interrupted transaction\n")
687 self.ui.status("rolling back interrupted transaction\n")
688 return transaction.rollback(self.opener, self.join("journal"))
688 return transaction.rollback(self.opener, self.join("journal"))
689 else:
689 else:
690 self.ui.warn("no interrupted transaction available\n")
690 self.ui.warn("no interrupted transaction available\n")
691
691
692 def undo(self):
692 def undo(self):
693 lock = self.lock()
693 lock = self.lock()
694 if os.path.exists(self.join("undo")):
694 if os.path.exists(self.join("undo")):
695 self.ui.status("rolling back last transaction\n")
695 self.ui.status("rolling back last transaction\n")
696 transaction.rollback(self.opener, self.join("undo"))
696 transaction.rollback(self.opener, self.join("undo"))
697 self.dirstate = None
697 self.dirstate = None
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
700 else:
700 else:
701 self.ui.warn("no undo information available\n")
701 self.ui.warn("no undo information available\n")
702
702
703 def lock(self, wait = 1):
703 def lock(self, wait = 1):
704 try:
704 try:
705 return lock.lock(self.join("lock"), 0)
705 return lock.lock(self.join("lock"), 0)
706 except lock.LockHeld, inst:
706 except lock.LockHeld, inst:
707 if wait:
707 if wait:
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
709 return lock.lock(self.join("lock"), wait)
709 return lock.lock(self.join("lock"), wait)
710 raise inst
710 raise inst
711
711
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
713 orig_parent = self.dirstate.parents()[0] or nullid
713 orig_parent = self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
716 c1 = self.changelog.read(p1)
716 c1 = self.changelog.read(p1)
717 c2 = self.changelog.read(p2)
717 c2 = self.changelog.read(p2)
718 m1 = self.manifest.read(c1[0])
718 m1 = self.manifest.read(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
720 m2 = self.manifest.read(c2[0])
720 m2 = self.manifest.read(c2[0])
721
721
722 if orig_parent == p1:
722 if orig_parent == p1:
723 update_dirstate = 1
723 update_dirstate = 1
724 else:
724 else:
725 update_dirstate = 0
725 update_dirstate = 0
726
726
727 tr = self.transaction()
727 tr = self.transaction()
728 mm = m1.copy()
728 mm = m1.copy()
729 mfm = mf1.copy()
729 mfm = mf1.copy()
730 linkrev = self.changelog.count()
730 linkrev = self.changelog.count()
731 for f in files:
731 for f in files:
732 try:
732 try:
733 t = self.wfile(f).read()
733 t = self.wfile(f).read()
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
735 r = self.file(f)
735 r = self.file(f)
736 mfm[f] = tm
736 mfm[f] = tm
737 mm[f] = r.add(t, {}, tr, linkrev,
737 mm[f] = r.add(t, {}, tr, linkrev,
738 m1.get(f, nullid), m2.get(f, nullid))
738 m1.get(f, nullid), m2.get(f, nullid))
739 if update_dirstate:
739 if update_dirstate:
740 self.dirstate.update([f], "n")
740 self.dirstate.update([f], "n")
741 except IOError:
741 except IOError:
742 try:
742 try:
743 del mm[f]
743 del mm[f]
744 del mfm[f]
744 del mfm[f]
745 if update_dirstate:
745 if update_dirstate:
746 self.dirstate.forget([f])
746 self.dirstate.forget([f])
747 except:
747 except:
748 # deleted from p2?
748 # deleted from p2?
749 pass
749 pass
750
750
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
752 user = user or self.ui.username()
752 user = user or self.ui.username()
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
754 tr.close()
754 tr.close()
755 if update_dirstate:
755 if update_dirstate:
756 self.dirstate.setparents(n, nullid)
756 self.dirstate.setparents(n, nullid)
757
757
758 def commit(self, files = None, text = "", user = None, date = None):
758 def commit(self, files = None, text = "", user = None, date = None):
759 commit = []
759 commit = []
760 remove = []
760 remove = []
761 if files:
761 if files:
762 for f in files:
762 for f in files:
763 s = self.dirstate.state(f)
763 s = self.dirstate.state(f)
764 if s in 'nmai':
764 if s in 'nmai':
765 commit.append(f)
765 commit.append(f)
766 elif s == 'r':
766 elif s == 'r':
767 remove.append(f)
767 remove.append(f)
768 else:
768 else:
769 self.ui.warn("%s not tracked!\n" % f)
769 self.ui.warn("%s not tracked!\n" % f)
770 else:
770 else:
771 (c, a, d, u) = self.changes()
771 (c, a, d, u) = self.changes()
772 commit = c + a
772 commit = c + a
773 remove = d
773 remove = d
774
774
775 if not commit and not remove:
775 if not commit and not remove:
776 self.ui.status("nothing changed\n")
776 self.ui.status("nothing changed\n")
777 return
777 return
778
778
779 if not self.hook("precommit"):
779 if not self.hook("precommit"):
780 return 1
780 return 1
781
781
782 p1, p2 = self.dirstate.parents()
782 p1, p2 = self.dirstate.parents()
783 c1 = self.changelog.read(p1)
783 c1 = self.changelog.read(p1)
784 c2 = self.changelog.read(p2)
784 c2 = self.changelog.read(p2)
785 m1 = self.manifest.read(c1[0])
785 m1 = self.manifest.read(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
787 m2 = self.manifest.read(c2[0])
787 m2 = self.manifest.read(c2[0])
788 lock = self.lock()
788 lock = self.lock()
789 tr = self.transaction()
789 tr = self.transaction()
790
790
791 # check in files
791 # check in files
792 new = {}
792 new = {}
793 linkrev = self.changelog.count()
793 linkrev = self.changelog.count()
794 commit.sort()
794 commit.sort()
795 for f in commit:
795 for f in commit:
796 self.ui.note(f + "\n")
796 self.ui.note(f + "\n")
797 try:
797 try:
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
799 t = self.wfile(f).read()
799 t = self.wfile(f).read()
800 except IOError:
800 except IOError:
801 self.ui.warn("trouble committing %s!\n" % f)
801 self.ui.warn("trouble committing %s!\n" % f)
802 raise
802 raise
803
803
804 meta = {}
804 meta = {}
805 cp = self.dirstate.copied(f)
805 cp = self.dirstate.copied(f)
806 if cp:
806 if cp:
807 meta["copy"] = cp
807 meta["copy"] = cp
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
810
810
811 r = self.file(f)
811 r = self.file(f)
812 fp1 = m1.get(f, nullid)
812 fp1 = m1.get(f, nullid)
813 fp2 = m2.get(f, nullid)
813 fp2 = m2.get(f, nullid)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
815
815
816 # update manifest
816 # update manifest
817 m1.update(new)
817 m1.update(new)
818 for f in remove:
818 for f in remove:
819 if f in m1:
819 if f in m1:
820 del m1[f]
820 del m1[f]
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
822 (new, remove))
822 (new, remove))
823
823
824 # add changeset
824 # add changeset
825 new = new.keys()
825 new = new.keys()
826 new.sort()
826 new.sort()
827
827
828 if not text:
828 if not text:
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
832 edittext = self.ui.edit(edittext)
832 edittext = self.ui.edit(edittext)
833 if not edittext.rstrip():
833 if not edittext.rstrip():
834 return 1
834 return 1
835 text = edittext
835 text = edittext
836
836
837 user = user or self.ui.username()
837 user = user or self.ui.username()
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
839
839
840 tr.close()
840 tr.close()
841
841
842 self.dirstate.setparents(n)
842 self.dirstate.setparents(n)
843 self.dirstate.update(new, "n")
843 self.dirstate.update(new, "n")
844 self.dirstate.forget(remove)
844 self.dirstate.forget(remove)
845
845
846 if not self.hook("commit", node=hex(n)):
846 if not self.hook("commit", node=hex(n)):
847 return 1
847 return 1
848
848
849 def walk(self, node = None, files = [], match = util.always):
849 def walk(self, node = None, files = [], match = util.always):
850 if node:
850 if node:
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
852 yield 'm', fn
852 yield 'm', fn
853 else:
853 else:
854 for src, fn in self.dirstate.walk(files, match):
854 for src, fn in self.dirstate.walk(files, match):
855 yield src, fn
855 yield src, fn
856
856
857 def changes(self, node1 = None, node2 = None, files = [],
857 def changes(self, node1 = None, node2 = None, files = [],
858 match = util.always):
858 match = util.always):
859 mf2, u = None, []
859 mf2, u = None, []
860
860
861 def fcmp(fn, mf):
861 def fcmp(fn, mf):
862 t1 = self.wfile(fn).read()
862 t1 = self.wfile(fn).read()
863 t2 = self.file(fn).revision(mf[fn])
863 t2 = self.file(fn).revision(mf[fn])
864 return cmp(t1, t2)
864 return cmp(t1, t2)
865
865
866 def mfmatches(node):
866 def mfmatches(node):
867 mf = dict(self.manifest.read(node))
867 mf = dict(self.manifest.read(node))
868 for fn in mf.keys():
868 for fn in mf.keys():
869 if not match(fn):
869 if not match(fn):
870 del mf[fn]
870 del mf[fn]
871 return mf
871 return mf
872
872
873 # are we comparing the working directory?
873 # are we comparing the working directory?
874 if not node2:
874 if not node2:
875 l, c, a, d, u = self.dirstate.changes(files, match)
875 l, c, a, d, u = self.dirstate.changes(files, match)
876
876
877 # are we comparing working dir against its parent?
877 # are we comparing working dir against its parent?
878 if not node1:
878 if not node1:
879 if l:
879 if l:
880 # do a full compare of any files that might have changed
880 # do a full compare of any files that might have changed
881 change = self.changelog.read(self.dirstate.parents()[0])
881 change = self.changelog.read(self.dirstate.parents()[0])
882 mf2 = mfmatches(change[0])
882 mf2 = mfmatches(change[0])
883 for f in l:
883 for f in l:
884 if fcmp(f, mf2):
884 if fcmp(f, mf2):
885 c.append(f)
885 c.append(f)
886
886
887 for l in c, a, d, u:
887 for l in c, a, d, u:
888 l.sort()
888 l.sort()
889
889
890 return (c, a, d, u)
890 return (c, a, d, u)
891
891
892 # are we comparing working dir against non-tip?
892 # are we comparing working dir against non-tip?
893 # generate a pseudo-manifest for the working dir
893 # generate a pseudo-manifest for the working dir
894 if not node2:
894 if not node2:
895 if not mf2:
895 if not mf2:
896 change = self.changelog.read(self.dirstate.parents()[0])
896 change = self.changelog.read(self.dirstate.parents()[0])
897 mf2 = mfmatches(change[0])
897 mf2 = mfmatches(change[0])
898 for f in a + c + l:
898 for f in a + c + l:
899 mf2[f] = ""
899 mf2[f] = ""
900 for f in d:
900 for f in d:
901 if f in mf2: del mf2[f]
901 if f in mf2: del mf2[f]
902 else:
902 else:
903 change = self.changelog.read(node2)
903 change = self.changelog.read(node2)
904 mf2 = mfmatches(change[0])
904 mf2 = mfmatches(change[0])
905
905
906 # flush lists from dirstate before comparing manifests
906 # flush lists from dirstate before comparing manifests
907 c, a = [], []
907 c, a = [], []
908
908
909 change = self.changelog.read(node1)
909 change = self.changelog.read(node1)
910 mf1 = mfmatches(change[0])
910 mf1 = mfmatches(change[0])
911
911
912 for fn in mf2:
912 for fn in mf2:
913 if mf1.has_key(fn):
913 if mf1.has_key(fn):
914 if mf1[fn] != mf2[fn]:
914 if mf1[fn] != mf2[fn]:
915 if mf2[fn] != "" or fcmp(fn, mf1):
915 if mf2[fn] != "" or fcmp(fn, mf1):
916 c.append(fn)
916 c.append(fn)
917 del mf1[fn]
917 del mf1[fn]
918 else:
918 else:
919 a.append(fn)
919 a.append(fn)
920
920
921 d = mf1.keys()
921 d = mf1.keys()
922
922
923 for l in c, a, d, u:
923 for l in c, a, d, u:
924 l.sort()
924 l.sort()
925
925
926 return (c, a, d, u)
926 return (c, a, d, u)
927
927
928 def add(self, list):
928 def add(self, list):
929 for f in list:
929 for f in list:
930 p = self.wjoin(f)
930 p = self.wjoin(f)
931 if not os.path.exists(p):
931 if not os.path.exists(p):
932 self.ui.warn("%s does not exist!\n" % f)
932 self.ui.warn("%s does not exist!\n" % f)
933 elif not os.path.isfile(p):
933 elif not os.path.isfile(p):
934 self.ui.warn("%s not added: only files supported currently\n" % f)
934 self.ui.warn("%s not added: only files supported currently\n" % f)
935 elif self.dirstate.state(f) in 'an':
935 elif self.dirstate.state(f) in 'an':
936 self.ui.warn("%s already tracked!\n" % f)
936 self.ui.warn("%s already tracked!\n" % f)
937 else:
937 else:
938 self.dirstate.update([f], "a")
938 self.dirstate.update([f], "a")
939
939
940 def forget(self, list):
940 def forget(self, list):
941 for f in list:
941 for f in list:
942 if self.dirstate.state(f) not in 'ai':
942 if self.dirstate.state(f) not in 'ai':
943 self.ui.warn("%s not added!\n" % f)
943 self.ui.warn("%s not added!\n" % f)
944 else:
944 else:
945 self.dirstate.forget([f])
945 self.dirstate.forget([f])
946
946
947 def remove(self, list):
947 def remove(self, list):
948 for f in list:
948 for f in list:
949 p = self.wjoin(f)
949 p = self.wjoin(f)
950 if os.path.exists(p):
950 if os.path.exists(p):
951 self.ui.warn("%s still exists!\n" % f)
951 self.ui.warn("%s still exists!\n" % f)
952 elif self.dirstate.state(f) == 'a':
952 elif self.dirstate.state(f) == 'a':
953 self.ui.warn("%s never committed!\n" % f)
953 self.ui.warn("%s never committed!\n" % f)
954 self.dirstate.forget([f])
954 self.dirstate.forget([f])
955 elif f not in self.dirstate:
955 elif f not in self.dirstate:
956 self.ui.warn("%s not tracked!\n" % f)
956 self.ui.warn("%s not tracked!\n" % f)
957 else:
957 else:
958 self.dirstate.update([f], "r")
958 self.dirstate.update([f], "r")
959
959
960 def copy(self, source, dest):
960 def copy(self, source, dest):
961 p = self.wjoin(dest)
961 p = self.wjoin(dest)
962 if not os.path.exists(dest):
962 if not os.path.exists(dest):
963 self.ui.warn("%s does not exist!\n" % dest)
963 self.ui.warn("%s does not exist!\n" % dest)
964 elif not os.path.isfile(dest):
964 elif not os.path.isfile(dest):
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
966 else:
966 else:
967 if self.dirstate.state(dest) == '?':
967 if self.dirstate.state(dest) == '?':
968 self.dirstate.update([dest], "a")
968 self.dirstate.update([dest], "a")
969 self.dirstate.copy(source, dest)
969 self.dirstate.copy(source, dest)
970
970
971 def heads(self):
971 def heads(self):
972 return self.changelog.heads()
972 return self.changelog.heads()
973
973
974 def branches(self, nodes):
974 def branches(self, nodes):
975 if not nodes: nodes = [self.changelog.tip()]
975 if not nodes: nodes = [self.changelog.tip()]
976 b = []
976 b = []
977 for n in nodes:
977 for n in nodes:
978 t = n
978 t = n
979 while n:
979 while n:
980 p = self.changelog.parents(n)
980 p = self.changelog.parents(n)
981 if p[1] != nullid or p[0] == nullid:
981 if p[1] != nullid or p[0] == nullid:
982 b.append((t, n, p[0], p[1]))
982 b.append((t, n, p[0], p[1]))
983 break
983 break
984 n = p[0]
984 n = p[0]
985 return b
985 return b
986
986
987 def between(self, pairs):
987 def between(self, pairs):
988 r = []
988 r = []
989
989
990 for top, bottom in pairs:
990 for top, bottom in pairs:
991 n, l, i = top, [], 0
991 n, l, i = top, [], 0
992 f = 1
992 f = 1
993
993
994 while n != bottom:
994 while n != bottom:
995 p = self.changelog.parents(n)[0]
995 p = self.changelog.parents(n)[0]
996 if i == f:
996 if i == f:
997 l.append(n)
997 l.append(n)
998 f = f * 2
998 f = f * 2
999 n = p
999 n = p
1000 i += 1
1000 i += 1
1001
1001
1002 r.append(l)
1002 r.append(l)
1003
1003
1004 return r
1004 return r
1005
1005
1006 def newer(self, nodes):
1006 def newer(self, nodes):
1007 m = {}
1007 m = {}
1008 nl = []
1008 nl = []
1009 pm = {}
1009 pm = {}
1010 cl = self.changelog
1010 cl = self.changelog
1011 t = l = cl.count()
1011 t = l = cl.count()
1012
1012
1013 # find the lowest numbered node
1013 # find the lowest numbered node
1014 for n in nodes:
1014 for n in nodes:
1015 l = min(l, cl.rev(n))
1015 l = min(l, cl.rev(n))
1016 m[n] = 1
1016 m[n] = 1
1017
1017
1018 for i in xrange(l, t):
1018 for i in xrange(l, t):
1019 n = cl.node(i)
1019 n = cl.node(i)
1020 if n in m: # explicitly listed
1020 if n in m: # explicitly listed
1021 pm[n] = 1
1021 pm[n] = 1
1022 nl.append(n)
1022 nl.append(n)
1023 continue
1023 continue
1024 for p in cl.parents(n):
1024 for p in cl.parents(n):
1025 if p in pm: # parent listed
1025 if p in pm: # parent listed
1026 pm[n] = 1
1026 pm[n] = 1
1027 nl.append(n)
1027 nl.append(n)
1028 break
1028 break
1029
1029
1030 return nl
1030 return nl
1031
1031
1032 def findincoming(self, remote, base={}):
1032 def findincoming(self, remote, base={}):
1033 m = self.changelog.nodemap
1033 m = self.changelog.nodemap
1034 search = []
1034 search = []
1035 fetch = []
1035 fetch = []
1036 seen = {}
1036 seen = {}
1037 seenbranch = {}
1037 seenbranch = {}
1038
1038
1039 # assume we're closer to the tip than the root
1039 # assume we're closer to the tip than the root
1040 # and start by examining the heads
1040 # and start by examining the heads
1041 self.ui.status("searching for changes\n")
1041 self.ui.status("searching for changes\n")
1042 heads = remote.heads()
1042 heads = remote.heads()
1043 unknown = []
1043 unknown = []
1044 for h in heads:
1044 for h in heads:
1045 if h not in m:
1045 if h not in m:
1046 unknown.append(h)
1046 unknown.append(h)
1047 else:
1047 else:
1048 base[h] = 1
1048 base[h] = 1
1049
1049
1050 if not unknown:
1050 if not unknown:
1051 return None
1051 return None
1052
1052
1053 rep = {}
1053 rep = {}
1054 reqcnt = 0
1054 reqcnt = 0
1055
1055
1056 # search through remote branches
1056 # search through remote branches
1057 # a 'branch' here is a linear segment of history, with four parts:
1057 # a 'branch' here is a linear segment of history, with four parts:
1058 # head, root, first parent, second parent
1058 # head, root, first parent, second parent
1059 # (a branch always has two parents (or none) by definition)
1059 # (a branch always has two parents (or none) by definition)
1060 unknown = remote.branches(unknown)
1060 unknown = remote.branches(unknown)
1061 while unknown:
1061 while unknown:
1062 r = []
1062 r = []
1063 while unknown:
1063 while unknown:
1064 n = unknown.pop(0)
1064 n = unknown.pop(0)
1065 if n[0] in seen:
1065 if n[0] in seen:
1066 continue
1066 continue
1067
1067
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1069 if n[0] == nullid:
1069 if n[0] == nullid:
1070 break
1070 break
1071 if n in seenbranch:
1071 if n in seenbranch:
1072 self.ui.debug("branch already found\n")
1072 self.ui.debug("branch already found\n")
1073 continue
1073 continue
1074 if n[1] and n[1] in m: # do we know the base?
1074 if n[1] and n[1] in m: # do we know the base?
1075 self.ui.debug("found incomplete branch %s:%s\n"
1075 self.ui.debug("found incomplete branch %s:%s\n"
1076 % (short(n[0]), short(n[1])))
1076 % (short(n[0]), short(n[1])))
1077 search.append(n) # schedule branch range for scanning
1077 search.append(n) # schedule branch range for scanning
1078 seenbranch[n] = 1
1078 seenbranch[n] = 1
1079 else:
1079 else:
1080 if n[1] not in seen and n[1] not in fetch:
1080 if n[1] not in seen and n[1] not in fetch:
1081 if n[2] in m and n[3] in m:
1081 if n[2] in m and n[3] in m:
1082 self.ui.debug("found new changeset %s\n" %
1082 self.ui.debug("found new changeset %s\n" %
1083 short(n[1]))
1083 short(n[1]))
1084 fetch.append(n[1]) # earliest unknown
1084 fetch.append(n[1]) # earliest unknown
1085 base[n[2]] = 1 # latest known
1085 base[n[2]] = 1 # latest known
1086 continue
1086 continue
1087
1087
1088 for a in n[2:4]:
1088 for a in n[2:4]:
1089 if a not in rep:
1089 if a not in rep:
1090 r.append(a)
1090 r.append(a)
1091 rep[a] = 1
1091 rep[a] = 1
1092
1092
1093 seen[n[0]] = 1
1093 seen[n[0]] = 1
1094
1094
1095 if r:
1095 if r:
1096 reqcnt += 1
1096 reqcnt += 1
1097 self.ui.debug("request %d: %s\n" %
1097 self.ui.debug("request %d: %s\n" %
1098 (reqcnt, " ".join(map(short, r))))
1098 (reqcnt, " ".join(map(short, r))))
1099 for p in range(0, len(r), 10):
1099 for p in range(0, len(r), 10):
1100 for b in remote.branches(r[p:p+10]):
1100 for b in remote.branches(r[p:p+10]):
1101 self.ui.debug("received %s:%s\n" %
1101 self.ui.debug("received %s:%s\n" %
1102 (short(b[0]), short(b[1])))
1102 (short(b[0]), short(b[1])))
1103 if b[0] not in m and b[0] not in seen:
1103 if b[0] not in m and b[0] not in seen:
1104 unknown.append(b)
1104 unknown.append(b)
1105
1105
1106 # do binary search on the branches we found
1106 # do binary search on the branches we found
1107 while search:
1107 while search:
1108 n = search.pop(0)
1108 n = search.pop(0)
1109 reqcnt += 1
1109 reqcnt += 1
1110 l = remote.between([(n[0], n[1])])[0]
1110 l = remote.between([(n[0], n[1])])[0]
1111 l.append(n[1])
1111 l.append(n[1])
1112 p = n[0]
1112 p = n[0]
1113 f = 1
1113 f = 1
1114 for i in l:
1114 for i in l:
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1116 if i in m:
1116 if i in m:
1117 if f <= 2:
1117 if f <= 2:
1118 self.ui.debug("found new branch changeset %s\n" %
1118 self.ui.debug("found new branch changeset %s\n" %
1119 short(p))
1119 short(p))
1120 fetch.append(p)
1120 fetch.append(p)
1121 base[i] = 1
1121 base[i] = 1
1122 else:
1122 else:
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1124 % (short(p), short(i)))
1124 % (short(p), short(i)))
1125 search.append((p, i))
1125 search.append((p, i))
1126 break
1126 break
1127 p, f = i, f * 2
1127 p, f = i, f * 2
1128
1128
1129 # sanity check our fetch list
1129 # sanity check our fetch list
1130 for f in fetch:
1130 for f in fetch:
1131 if f in m:
1131 if f in m:
1132 raise RepoError("already have changeset " + short(f[:4]))
1132 raise RepoError("already have changeset " + short(f[:4]))
1133
1133
1134 if base.keys() == [nullid]:
1134 if base.keys() == [nullid]:
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1136
1136
1137 self.ui.note("adding new changesets starting at " +
1137 self.ui.note("adding new changesets starting at " +
1138 " ".join([short(f) for f in fetch]) + "\n")
1138 " ".join([short(f) for f in fetch]) + "\n")
1139
1139
1140 self.ui.debug("%d total queries\n" % reqcnt)
1140 self.ui.debug("%d total queries\n" % reqcnt)
1141
1141
1142 return fetch
1142 return fetch
1143
1143
1144 def findoutgoing(self, remote):
1144 def findoutgoing(self, remote):
1145 base = {}
1145 base = {}
1146 self.findincoming(remote, base)
1146 self.findincoming(remote, base)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1148
1148
1149 # prune everything remote has from the tree
1149 # prune everything remote has from the tree
1150 del remain[nullid]
1150 del remain[nullid]
1151 remove = base.keys()
1151 remove = base.keys()
1152 while remove:
1152 while remove:
1153 n = remove.pop(0)
1153 n = remove.pop(0)
1154 if n in remain:
1154 if n in remain:
1155 del remain[n]
1155 del remain[n]
1156 for p in self.changelog.parents(n):
1156 for p in self.changelog.parents(n):
1157 remove.append(p)
1157 remove.append(p)
1158
1158
1159 # find every node whose parents have been pruned
1159 # find every node whose parents have been pruned
1160 subset = []
1160 subset = []
1161 for n in remain:
1161 for n in remain:
1162 p1, p2 = self.changelog.parents(n)
1162 p1, p2 = self.changelog.parents(n)
1163 if p1 not in remain and p2 not in remain:
1163 if p1 not in remain and p2 not in remain:
1164 subset.append(n)
1164 subset.append(n)
1165
1165
1166 # this is the set of all roots we have to push
1166 # this is the set of all roots we have to push
1167 return subset
1167 return subset
1168
1168
1169 def pull(self, remote):
1169 def pull(self, remote):
1170 lock = self.lock()
1170 lock = self.lock()
1171
1171
1172 # if we have an empty repo, fetch everything
1172 # if we have an empty repo, fetch everything
1173 if self.changelog.tip() == nullid:
1173 if self.changelog.tip() == nullid:
1174 self.ui.status("requesting all changes\n")
1174 self.ui.status("requesting all changes\n")
1175 fetch = [nullid]
1175 fetch = [nullid]
1176 else:
1176 else:
1177 fetch = self.findincoming(remote)
1177 fetch = self.findincoming(remote)
1178
1178
1179 if not fetch:
1179 if not fetch:
1180 self.ui.status("no changes found\n")
1180 self.ui.status("no changes found\n")
1181 return 1
1181 return 1
1182
1182
1183 cg = remote.changegroup(fetch)
1183 cg = remote.changegroup(fetch)
1184 return self.addchangegroup(cg)
1184 return self.addchangegroup(cg)
1185
1185
1186 def push(self, remote):
1186 def push(self, remote):
1187 lock = remote.lock()
1187 lock = remote.lock()
1188 update = self.findoutgoing(remote)
1188 update = self.findoutgoing(remote)
1189 if not update:
1189 if not update:
1190 self.ui.status("no changes found\n")
1190 self.ui.status("no changes found\n")
1191 return 1
1191 return 1
1192
1192
1193 cg = self.changegroup(update)
1193 cg = self.changegroup(update)
1194 return remote.addchangegroup(cg)
1194 return remote.addchangegroup(cg)
1195
1195
1196 def changegroup(self, basenodes):
1196 def changegroup(self, basenodes):
1197 class genread:
1197 class genread:
1198 def __init__(self, generator):
1198 def __init__(self, generator):
1199 self.g = generator
1199 self.g = generator
1200 self.buf = ""
1200 self.buf = ""
1201 def read(self, l):
1201 def read(self, l):
1202 while l > len(self.buf):
1202 while l > len(self.buf):
1203 try:
1203 try:
1204 self.buf += self.g.next()
1204 self.buf += self.g.next()
1205 except StopIteration:
1205 except StopIteration:
1206 break
1206 break
1207 d, self.buf = self.buf[:l], self.buf[l:]
1207 d, self.buf = self.buf[:l], self.buf[l:]
1208 return d
1208 return d
1209
1209
1210 def gengroup():
1210 def gengroup():
1211 nodes = self.newer(basenodes)
1211 nodes = self.newer(basenodes)
1212
1212
1213 # construct the link map
1213 # construct the link map
1214 linkmap = {}
1214 linkmap = {}
1215 for n in nodes:
1215 for n in nodes:
1216 linkmap[self.changelog.rev(n)] = n
1216 linkmap[self.changelog.rev(n)] = n
1217
1217
1218 # construct a list of all changed files
1218 # construct a list of all changed files
1219 changed = {}
1219 changed = {}
1220 for n in nodes:
1220 for n in nodes:
1221 c = self.changelog.read(n)
1221 c = self.changelog.read(n)
1222 for f in c[3]:
1222 for f in c[3]:
1223 changed[f] = 1
1223 changed[f] = 1
1224 changed = changed.keys()
1224 changed = changed.keys()
1225 changed.sort()
1225 changed.sort()
1226
1226
1227 # the changegroup is changesets + manifests + all file revs
1227 # the changegroup is changesets + manifests + all file revs
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1229
1229
1230 for y in self.changelog.group(linkmap): yield y
1230 for y in self.changelog.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1232 for f in changed:
1232 for f in changed:
1233 yield struct.pack(">l", len(f) + 4) + f
1233 yield struct.pack(">l", len(f) + 4) + f
1234 g = self.file(f).group(linkmap)
1234 g = self.file(f).group(linkmap)
1235 for y in g:
1235 for y in g:
1236 yield y
1236 yield y
1237
1237
1238 yield struct.pack(">l", 0)
1238 yield struct.pack(">l", 0)
1239
1239
1240 return genread(gengroup())
1240 return genread(gengroup())
1241
1241
1242 def addchangegroup(self, source):
1242 def addchangegroup(self, source):
1243
1243
1244 def getchunk():
1244 def getchunk():
1245 d = source.read(4)
1245 d = source.read(4)
1246 if not d: return ""
1246 if not d: return ""
1247 l = struct.unpack(">l", d)[0]
1247 l = struct.unpack(">l", d)[0]
1248 if l <= 4: return ""
1248 if l <= 4: return ""
1249 return source.read(l - 4)
1249 return source.read(l - 4)
1250
1250
1251 def getgroup():
1251 def getgroup():
1252 while 1:
1252 while 1:
1253 c = getchunk()
1253 c = getchunk()
1254 if not c: break
1254 if not c: break
1255 yield c
1255 yield c
1256
1256
1257 def csmap(x):
1257 def csmap(x):
1258 self.ui.debug("add changeset %s\n" % short(x))
1258 self.ui.debug("add changeset %s\n" % short(x))
1259 return self.changelog.count()
1259 return self.changelog.count()
1260
1260
1261 def revmap(x):
1261 def revmap(x):
1262 return self.changelog.rev(x)
1262 return self.changelog.rev(x)
1263
1263
1264 if not source: return
1264 if not source: return
1265 changesets = files = revisions = 0
1265 changesets = files = revisions = 0
1266
1266
1267 tr = self.transaction()
1267 tr = self.transaction()
1268
1268
1269 # pull off the changeset group
1269 # pull off the changeset group
1270 self.ui.status("adding changesets\n")
1270 self.ui.status("adding changesets\n")
1271 co = self.changelog.tip()
1271 co = self.changelog.tip()
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1274
1274
1275 # pull off the manifest group
1275 # pull off the manifest group
1276 self.ui.status("adding manifests\n")
1276 self.ui.status("adding manifests\n")
1277 mm = self.manifest.tip()
1277 mm = self.manifest.tip()
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1279
1279
1280 # process the files
1280 # process the files
1281 self.ui.status("adding file revisions\n")
1281 self.ui.status("adding file revisions\n")
1282 while 1:
1282 while 1:
1283 f = getchunk()
1283 f = getchunk()
1284 if not f: break
1284 if not f: break
1285 self.ui.debug("adding %s revisions\n" % f)
1285 self.ui.debug("adding %s revisions\n" % f)
1286 fl = self.file(f)
1286 fl = self.file(f)
1287 o = fl.count()
1287 o = fl.count()
1288 n = fl.addgroup(getgroup(), revmap, tr)
1288 n = fl.addgroup(getgroup(), revmap, tr)
1289 revisions += fl.count() - o
1289 revisions += fl.count() - o
1290 files += 1
1290 files += 1
1291
1291
1292 self.ui.status(("modified %d files, added %d changesets" +
1292 self.ui.status(("modified %d files, added %d changesets" +
1293 " and %d new revisions\n")
1293 " and %d new revisions\n")
1294 % (files, changesets, revisions))
1294 % (files, changesets, revisions))
1295
1295
1296 tr.close()
1296 tr.close()
1297 return
1297 return
1298
1298
1299 def update(self, node, allow=False, force=False, choose=None,
1299 def update(self, node, allow=False, force=False, choose=None,
1300 moddirstate=True):
1300 moddirstate=True):
1301 pl = self.dirstate.parents()
1301 pl = self.dirstate.parents()
1302 if not force and pl[1] != nullid:
1302 if not force and pl[1] != nullid:
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1304 return 1
1304 return 1
1305
1305
1306 p1, p2 = pl[0], node
1306 p1, p2 = pl[0], node
1307 pa = self.changelog.ancestor(p1, p2)
1307 pa = self.changelog.ancestor(p1, p2)
1308 m1n = self.changelog.read(p1)[0]
1308 m1n = self.changelog.read(p1)[0]
1309 m2n = self.changelog.read(p2)[0]
1309 m2n = self.changelog.read(p2)[0]
1310 man = self.manifest.ancestor(m1n, m2n)
1310 man = self.manifest.ancestor(m1n, m2n)
1311 m1 = self.manifest.read(m1n)
1311 m1 = self.manifest.read(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1313 m2 = self.manifest.read(m2n)
1313 m2 = self.manifest.read(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1315 ma = self.manifest.read(man)
1315 ma = self.manifest.read(man)
1316 mfa = self.manifest.readflags(man)
1316 mfa = self.manifest.readflags(man)
1317
1317
1318 (c, a, d, u) = self.changes()
1318 (c, a, d, u) = self.changes()
1319
1319
1320 # is this a jump, or a merge? i.e. is there a linear path
1320 # is this a jump, or a merge? i.e. is there a linear path
1321 # from p1 to p2?
1321 # from p1 to p2?
1322 linear_path = (pa == p1 or pa == p2)
1322 linear_path = (pa == p1 or pa == p2)
1323
1323
1324 # resolve the manifest to determine which files
1324 # resolve the manifest to determine which files
1325 # we care about merging
1325 # we care about merging
1326 self.ui.note("resolving manifests\n")
1326 self.ui.note("resolving manifests\n")
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1328 (force, allow, moddirstate, linear_path))
1328 (force, allow, moddirstate, linear_path))
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1330 (short(man), short(m1n), short(m2n)))
1330 (short(man), short(m1n), short(m2n)))
1331
1331
1332 merge = {}
1332 merge = {}
1333 get = {}
1333 get = {}
1334 remove = []
1334 remove = []
1335 mark = {}
1335 mark = {}
1336
1336
1337 # construct a working dir manifest
1337 # construct a working dir manifest
1338 mw = m1.copy()
1338 mw = m1.copy()
1339 mfw = mf1.copy()
1339 mfw = mf1.copy()
1340 umap = dict.fromkeys(u)
1340 umap = dict.fromkeys(u)
1341
1341
1342 for f in a + c + u:
1342 for f in a + c + u:
1343 mw[f] = ""
1343 mw[f] = ""
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1345
1345
1346 for f in d:
1346 for f in d:
1347 if f in mw: del mw[f]
1347 if f in mw: del mw[f]
1348
1348
1349 # If we're jumping between revisions (as opposed to merging),
1349 # If we're jumping between revisions (as opposed to merging),
1350 # and if neither the working directory nor the target rev has
1350 # and if neither the working directory nor the target rev has
1351 # the file, then we need to remove it from the dirstate, to
1351 # the file, then we need to remove it from the dirstate, to
1352 # prevent the dirstate from listing the file when it is no
1352 # prevent the dirstate from listing the file when it is no
1353 # longer in the manifest.
1353 # longer in the manifest.
1354 if moddirstate and linear_path and f not in m2:
1354 if moddirstate and linear_path and f not in m2:
1355 self.dirstate.forget((f,))
1355 self.dirstate.forget((f,))
1356
1356
1357 # Compare manifests
1357 # Compare manifests
1358 for f, n in mw.iteritems():
1358 for f, n in mw.iteritems():
1359 if choose and not choose(f): continue
1359 if choose and not choose(f): continue
1360 if f in m2:
1360 if f in m2:
1361 s = 0
1361 s = 0
1362
1362
1363 # is the wfile new since m1, and match m2?
1363 # is the wfile new since m1, and match m2?
1364 if f not in m1:
1364 if f not in m1:
1365 t1 = self.wfile(f).read()
1365 t1 = self.wfile(f).read()
1366 t2 = self.file(f).revision(m2[f])
1366 t2 = self.file(f).revision(m2[f])
1367 if cmp(t1, t2) == 0:
1367 if cmp(t1, t2) == 0:
1368 mark[f] = 1
1368 mark[f] = 1
1369 n = m2[f]
1369 n = m2[f]
1370 del t1, t2
1370 del t1, t2
1371
1371
1372 # are files different?
1372 # are files different?
1373 if n != m2[f]:
1373 if n != m2[f]:
1374 a = ma.get(f, nullid)
1374 a = ma.get(f, nullid)
1375 # are both different from the ancestor?
1375 # are both different from the ancestor?
1376 if n != a and m2[f] != a:
1376 if n != a and m2[f] != a:
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1378 # merge executable bits
1378 # merge executable bits
1379 # "if we changed or they changed, change in merge"
1379 # "if we changed or they changed, change in merge"
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1381 mode = ((a^b) | (a^c)) ^ a
1381 mode = ((a^b) | (a^c)) ^ a
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1383 s = 1
1383 s = 1
1384 # are we clobbering?
1384 # are we clobbering?
1385 # is remote's version newer?
1385 # is remote's version newer?
1386 # or are we going back in time?
1386 # or are we going back in time?
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1389 get[f] = m2[f]
1389 get[f] = m2[f]
1390 s = 1
1390 s = 1
1391 else:
1391 else:
1392 mark[f] = 1
1392 mark[f] = 1
1393 elif f in umap:
1393 elif f in umap:
1394 # this unknown file is the same as the checkout
1394 # this unknown file is the same as the checkout
1395 get[f] = m2[f]
1395 get[f] = m2[f]
1396
1396
1397 if not s and mfw[f] != mf2[f]:
1397 if not s and mfw[f] != mf2[f]:
1398 if force:
1398 if force:
1399 self.ui.debug(" updating permissions for %s\n" % f)
1399 self.ui.debug(" updating permissions for %s\n" % f)
1400 util.set_exec(self.wjoin(f), mf2[f])
1400 util.set_exec(self.wjoin(f), mf2[f])
1401 else:
1401 else:
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1403 mode = ((a^b) | (a^c)) ^ a
1403 mode = ((a^b) | (a^c)) ^ a
1404 if mode != b:
1404 if mode != b:
1405 self.ui.debug(" updating permissions for %s\n" % f)
1405 self.ui.debug(" updating permissions for %s\n" % f)
1406 util.set_exec(self.wjoin(f), mode)
1406 util.set_exec(self.wjoin(f), mode)
1407 mark[f] = 1
1407 mark[f] = 1
1408 del m2[f]
1408 del m2[f]
1409 elif f in ma:
1409 elif f in ma:
1410 if n != ma[f]:
1410 if n != ma[f]:
1411 r = "d"
1411 r = "d"
1412 if not force and (linear_path or allow):
1412 if not force and (linear_path or allow):
1413 r = self.ui.prompt(
1413 r = self.ui.prompt(
1414 (" local changed %s which remote deleted\n" % f) +
1414 (" local changed %s which remote deleted\n" % f) +
1415 "(k)eep or (d)elete?", "[kd]", "k")
1415 "(k)eep or (d)elete?", "[kd]", "k")
1416 if r == "d":
1416 if r == "d":
1417 remove.append(f)
1417 remove.append(f)
1418 else:
1418 else:
1419 self.ui.debug("other deleted %s\n" % f)
1419 self.ui.debug("other deleted %s\n" % f)
1420 remove.append(f) # other deleted it
1420 remove.append(f) # other deleted it
1421 else:
1421 else:
1422 if n == m1.get(f, nullid): # same as parent
1422 if n == m1.get(f, nullid): # same as parent
1423 if p2 == pa: # going backwards?
1423 if p2 == pa: # going backwards?
1424 self.ui.debug("remote deleted %s\n" % f)
1424 self.ui.debug("remote deleted %s\n" % f)
1425 remove.append(f)
1425 remove.append(f)
1426 else:
1426 else:
1427 self.ui.debug("local created %s, keeping\n" % f)
1427 self.ui.debug("local created %s, keeping\n" % f)
1428 else:
1428 else:
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1430
1430
1431 for f, n in m2.iteritems():
1431 for f, n in m2.iteritems():
1432 if choose and not choose(f): continue
1432 if choose and not choose(f): continue
1433 if f[0] == "/": continue
1433 if f[0] == "/": continue
1434 if f in ma and n != ma[f]:
1434 if f in ma and n != ma[f]:
1435 r = "k"
1435 r = "k"
1436 if not force and (linear_path or allow):
1436 if not force and (linear_path or allow):
1437 r = self.ui.prompt(
1437 r = self.ui.prompt(
1438 ("remote changed %s which local deleted\n" % f) +
1438 ("remote changed %s which local deleted\n" % f) +
1439 "(k)eep or (d)elete?", "[kd]", "k")
1439 "(k)eep or (d)elete?", "[kd]", "k")
1440 if r == "k": get[f] = n
1440 if r == "k": get[f] = n
1441 elif f not in ma:
1441 elif f not in ma:
1442 self.ui.debug("remote created %s\n" % f)
1442 self.ui.debug("remote created %s\n" % f)
1443 get[f] = n
1443 get[f] = n
1444 else:
1444 else:
1445 if force or p2 == pa: # going backwards?
1445 if force or p2 == pa: # going backwards?
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1447 get[f] = n
1447 get[f] = n
1448 else:
1448 else:
1449 self.ui.debug("local deleted %s\n" % f)
1449 self.ui.debug("local deleted %s\n" % f)
1450
1450
1451 del mw, m1, m2, ma
1451 del mw, m1, m2, ma
1452
1452
1453 if force:
1453 if force:
1454 for f in merge:
1454 for f in merge:
1455 get[f] = merge[f][1]
1455 get[f] = merge[f][1]
1456 merge = {}
1456 merge = {}
1457
1457
1458 if linear_path or force:
1458 if linear_path or force:
1459 # we don't need to do any magic, just jump to the new rev
1459 # we don't need to do any magic, just jump to the new rev
1460 mode = 'n'
1460 mode = 'n'
1461 p1, p2 = p2, nullid
1461 p1, p2 = p2, nullid
1462 else:
1462 else:
1463 if not allow:
1463 if not allow:
1464 self.ui.status("this update spans a branch" +
1464 self.ui.status("this update spans a branch" +
1465 " affecting the following files:\n")
1465 " affecting the following files:\n")
1466 fl = merge.keys() + get.keys()
1466 fl = merge.keys() + get.keys()
1467 fl.sort()
1467 fl.sort()
1468 for f in fl:
1468 for f in fl:
1469 cf = ""
1469 cf = ""
1470 if f in merge: cf = " (resolve)"
1470 if f in merge: cf = " (resolve)"
1471 self.ui.status(" %s%s\n" % (f, cf))
1471 self.ui.status(" %s%s\n" % (f, cf))
1472 self.ui.warn("aborting update spanning branches!\n")
1472 self.ui.warn("aborting update spanning branches!\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1474 return 1
1474 return 1
1475 # we have to remember what files we needed to get/change
1475 # we have to remember what files we needed to get/change
1476 # because any file that's different from either one of its
1476 # because any file that's different from either one of its
1477 # parents must be in the changeset
1477 # parents must be in the changeset
1478 mode = 'm'
1478 mode = 'm'
1479 if moddirstate:
1479 if moddirstate:
1480 self.dirstate.update(mark.keys(), "m")
1480 self.dirstate.update(mark.keys(), "m")
1481
1481
1482 if moddirstate:
1482 if moddirstate:
1483 self.dirstate.setparents(p1, p2)
1483 self.dirstate.setparents(p1, p2)
1484
1484
1485 # get the files we don't need to change
1485 # get the files we don't need to change
1486 files = get.keys()
1486 files = get.keys()
1487 files.sort()
1487 files.sort()
1488 for f in files:
1488 for f in files:
1489 if f[0] == "/": continue
1489 if f[0] == "/": continue
1490 self.ui.note("getting %s\n" % f)
1490 self.ui.note("getting %s\n" % f)
1491 t = self.file(f).read(get[f])
1491 t = self.file(f).read(get[f])
1492 try:
1492 try:
1493 self.wfile(f, "w").write(t)
1493 self.wfile(f, "w").write(t)
1494 except IOError:
1494 except IOError:
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1496 self.wfile(f, "w").write(t)
1496 self.wfile(f, "w").write(t)
1497 util.set_exec(self.wjoin(f), mf2[f])
1497 util.set_exec(self.wjoin(f), mf2[f])
1498 if moddirstate:
1498 if moddirstate:
1499 self.dirstate.update([f], mode)
1499 self.dirstate.update([f], mode)
1500
1500
1501 # merge the tricky bits
1501 # merge the tricky bits
1502 files = merge.keys()
1502 files = merge.keys()
1503 files.sort()
1503 files.sort()
1504 for f in files:
1504 for f in files:
1505 self.ui.status("merging %s\n" % f)
1505 self.ui.status("merging %s\n" % f)
1506 m, o, flag = merge[f]
1506 m, o, flag = merge[f]
1507 self.merge3(f, m, o)
1507 self.merge3(f, m, o)
1508 util.set_exec(self.wjoin(f), flag)
1508 util.set_exec(self.wjoin(f), flag)
1509 if moddirstate:
1509 if moddirstate:
1510 self.dirstate.update([f], 'm')
1510 self.dirstate.update([f], 'm')
1511
1511
1512 remove.sort()
1512 remove.sort()
1513 for f in remove:
1513 for f in remove:
1514 self.ui.note("removing %s\n" % f)
1514 self.ui.note("removing %s\n" % f)
1515 try:
1515 try:
1516 os.unlink(f)
1516 os.unlink(f)
1517 except OSError, inst:
1517 except OSError, inst:
1518 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1518 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1519 # try removing directories that might now be empty
1519 # try removing directories that might now be empty
1520 try: os.removedirs(os.path.dirname(f))
1520 try: os.removedirs(os.path.dirname(f))
1521 except: pass
1521 except: pass
1522 if moddirstate:
1522 if moddirstate:
1523 if mode == 'n':
1523 if mode == 'n':
1524 self.dirstate.forget(remove)
1524 self.dirstate.forget(remove)
1525 else:
1525 else:
1526 self.dirstate.update(remove, 'r')
1526 self.dirstate.update(remove, 'r')
1527
1527
1528 def merge3(self, fn, my, other):
1528 def merge3(self, fn, my, other):
1529 """perform a 3-way merge in the working directory"""
1529 """perform a 3-way merge in the working directory"""
1530
1530
1531 def temp(prefix, node):
1531 def temp(prefix, node):
1532 pre = "%s~%s." % (os.path.basename(fn), prefix)
1532 pre = "%s~%s." % (os.path.basename(fn), prefix)
1533 (fd, name) = tempfile.mkstemp("", pre)
1533 (fd, name) = tempfile.mkstemp("", pre)
1534 f = os.fdopen(fd, "wb")
1534 f = os.fdopen(fd, "wb")
1535 f.write(fl.revision(node))
1535 f.write(fl.revision(node))
1536 f.close()
1536 f.close()
1537 return name
1537 return name
1538
1538
1539 fl = self.file(fn)
1539 fl = self.file(fn)
1540 base = fl.ancestor(my, other)
1540 base = fl.ancestor(my, other)
1541 a = self.wjoin(fn)
1541 a = self.wjoin(fn)
1542 b = temp("base", base)
1542 b = temp("base", base)
1543 c = temp("other", other)
1543 c = temp("other", other)
1544
1544
1545 self.ui.note("resolving %s\n" % fn)
1545 self.ui.note("resolving %s\n" % fn)
1546 self.ui.debug("file %s: other %s ancestor %s\n" %
1546 self.ui.debug("file %s: other %s ancestor %s\n" %
1547 (fn, short(other), short(base)))
1547 (fn, short(other), short(base)))
1548
1548
1549 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1549 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1550 or "hgmerge")
1550 or "hgmerge")
1551 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1551 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1552 if r:
1552 if r:
1553 self.ui.warn("merging %s failed!\n" % fn)
1553 self.ui.warn("merging %s failed!\n" % fn)
1554
1554
1555 os.unlink(b)
1555 os.unlink(b)
1556 os.unlink(c)
1556 os.unlink(c)
1557
1557
1558 def verify(self):
1558 def verify(self):
1559 filelinkrevs = {}
1559 filelinkrevs = {}
1560 filenodes = {}
1560 filenodes = {}
1561 changesets = revisions = files = 0
1561 changesets = revisions = files = 0
1562 errors = 0
1562 errors = 0
1563
1563
1564 seen = {}
1564 seen = {}
1565 self.ui.status("checking changesets\n")
1565 self.ui.status("checking changesets\n")
1566 for i in range(self.changelog.count()):
1566 for i in range(self.changelog.count()):
1567 changesets += 1
1567 changesets += 1
1568 n = self.changelog.node(i)
1568 n = self.changelog.node(i)
1569 if n in seen:
1569 if n in seen:
1570 self.ui.warn("duplicate changeset at revision %d\n" % i)
1570 self.ui.warn("duplicate changeset at revision %d\n" % i)
1571 errors += 1
1571 errors += 1
1572 seen[n] = 1
1572 seen[n] = 1
1573
1573
1574 for p in self.changelog.parents(n):
1574 for p in self.changelog.parents(n):
1575 if p not in self.changelog.nodemap:
1575 if p not in self.changelog.nodemap:
1576 self.ui.warn("changeset %s has unknown parent %s\n" %
1576 self.ui.warn("changeset %s has unknown parent %s\n" %
1577 (short(n), short(p)))
1577 (short(n), short(p)))
1578 errors += 1
1578 errors += 1
1579 try:
1579 try:
1580 changes = self.changelog.read(n)
1580 changes = self.changelog.read(n)
1581 except Exception, inst:
1581 except Exception, inst:
1582 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1582 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1583 errors += 1
1583 errors += 1
1584
1584
1585 for f in changes[3]:
1585 for f in changes[3]:
1586 filelinkrevs.setdefault(f, []).append(i)
1586 filelinkrevs.setdefault(f, []).append(i)
1587
1587
1588 seen = {}
1588 seen = {}
1589 self.ui.status("checking manifests\n")
1589 self.ui.status("checking manifests\n")
1590 for i in range(self.manifest.count()):
1590 for i in range(self.manifest.count()):
1591 n = self.manifest.node(i)
1591 n = self.manifest.node(i)
1592 if n in seen:
1592 if n in seen:
1593 self.ui.warn("duplicate manifest at revision %d\n" % i)
1593 self.ui.warn("duplicate manifest at revision %d\n" % i)
1594 errors += 1
1594 errors += 1
1595 seen[n] = 1
1595 seen[n] = 1
1596
1596
1597 for p in self.manifest.parents(n):
1597 for p in self.manifest.parents(n):
1598 if p not in self.manifest.nodemap:
1598 if p not in self.manifest.nodemap:
1599 self.ui.warn("manifest %s has unknown parent %s\n" %
1599 self.ui.warn("manifest %s has unknown parent %s\n" %
1600 (short(n), short(p)))
1600 (short(n), short(p)))
1601 errors += 1
1601 errors += 1
1602
1602
1603 try:
1603 try:
1604 delta = mdiff.patchtext(self.manifest.delta(n))
1604 delta = mdiff.patchtext(self.manifest.delta(n))
1605 except KeyboardInterrupt:
1605 except KeyboardInterrupt:
1606 self.ui.warn("aborted")
1606 self.ui.warn("aborted")
1607 sys.exit(0)
1607 sys.exit(0)
1608 except Exception, inst:
1608 except Exception, inst:
1609 self.ui.warn("unpacking manifest %s: %s\n"
1609 self.ui.warn("unpacking manifest %s: %s\n"
1610 % (short(n), inst))
1610 % (short(n), inst))
1611 errors += 1
1611 errors += 1
1612
1612
1613 ff = [ l.split('\0') for l in delta.splitlines() ]
1613 ff = [ l.split('\0') for l in delta.splitlines() ]
1614 for f, fn in ff:
1614 for f, fn in ff:
1615 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1615 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1616
1616
1617 self.ui.status("crosschecking files in changesets and manifests\n")
1617 self.ui.status("crosschecking files in changesets and manifests\n")
1618 for f in filenodes:
1618 for f in filenodes:
1619 if f not in filelinkrevs:
1619 if f not in filelinkrevs:
1620 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1620 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1621 errors += 1
1621 errors += 1
1622
1622
1623 for f in filelinkrevs:
1623 for f in filelinkrevs:
1624 if f not in filenodes:
1624 if f not in filenodes:
1625 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1625 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1626 errors += 1
1626 errors += 1
1627
1627
1628 self.ui.status("checking files\n")
1628 self.ui.status("checking files\n")
1629 ff = filenodes.keys()
1629 ff = filenodes.keys()
1630 ff.sort()
1630 ff.sort()
1631 for f in ff:
1631 for f in ff:
1632 if f == "/dev/null": continue
1632 if f == "/dev/null": continue
1633 files += 1
1633 files += 1
1634 fl = self.file(f)
1634 fl = self.file(f)
1635 nodes = { nullid: 1 }
1635 nodes = { nullid: 1 }
1636 seen = {}
1636 seen = {}
1637 for i in range(fl.count()):
1637 for i in range(fl.count()):
1638 revisions += 1
1638 revisions += 1
1639 n = fl.node(i)
1639 n = fl.node(i)
1640
1640
1641 if n in seen:
1641 if n in seen:
1642 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1642 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1643 errors += 1
1643 errors += 1
1644
1644
1645 if n not in filenodes[f]:
1645 if n not in filenodes[f]:
1646 self.ui.warn("%s: %d:%s not in manifests\n"
1646 self.ui.warn("%s: %d:%s not in manifests\n"
1647 % (f, i, short(n)))
1647 % (f, i, short(n)))
1648 errors += 1
1648 errors += 1
1649 else:
1649 else:
1650 del filenodes[f][n]
1650 del filenodes[f][n]
1651
1651
1652 flr = fl.linkrev(n)
1652 flr = fl.linkrev(n)
1653 if flr not in filelinkrevs[f]:
1653 if flr not in filelinkrevs[f]:
1654 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1654 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1655 % (f, short(n), fl.linkrev(n)))
1655 % (f, short(n), fl.linkrev(n)))
1656 errors += 1
1656 errors += 1
1657 else:
1657 else:
1658 filelinkrevs[f].remove(flr)
1658 filelinkrevs[f].remove(flr)
1659
1659
1660 # verify contents
1660 # verify contents
1661 try:
1661 try:
1662 t = fl.read(n)
1662 t = fl.read(n)
1663 except Exception, inst:
1663 except Exception, inst:
1664 self.ui.warn("unpacking file %s %s: %s\n"
1664 self.ui.warn("unpacking file %s %s: %s\n"
1665 % (f, short(n), inst))
1665 % (f, short(n), inst))
1666 errors += 1
1666 errors += 1
1667
1667
1668 # verify parents
1668 # verify parents
1669 (p1, p2) = fl.parents(n)
1669 (p1, p2) = fl.parents(n)
1670 if p1 not in nodes:
1670 if p1 not in nodes:
1671 self.ui.warn("file %s:%s unknown parent 1 %s" %
1671 self.ui.warn("file %s:%s unknown parent 1 %s" %
1672 (f, short(n), short(p1)))
1672 (f, short(n), short(p1)))
1673 errors += 1
1673 errors += 1
1674 if p2 not in nodes:
1674 if p2 not in nodes:
1675 self.ui.warn("file %s:%s unknown parent 2 %s" %
1675 self.ui.warn("file %s:%s unknown parent 2 %s" %
1676 (f, short(n), short(p1)))
1676 (f, short(n), short(p1)))
1677 errors += 1
1677 errors += 1
1678 nodes[n] = 1
1678 nodes[n] = 1
1679
1679
1680 # cross-check
1680 # cross-check
1681 for node in filenodes[f]:
1681 for node in filenodes[f]:
1682 self.ui.warn("node %s in manifests not in %s\n"
1682 self.ui.warn("node %s in manifests not in %s\n"
1683 % (hex(node), f))
1683 % (hex(node), f))
1684 errors += 1
1684 errors += 1
1685
1685
1686 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1686 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1687 (files, changesets, revisions))
1687 (files, changesets, revisions))
1688
1688
1689 if errors:
1689 if errors:
1690 self.ui.warn("%d integrity errors encountered!\n" % errors)
1690 self.ui.warn("%d integrity errors encountered!\n" % errors)
1691 return 1
1691 return 1
1692
1692
1693 class httprepository:
1693 class httprepository:
1694 def __init__(self, ui, path):
1694 def __init__(self, ui, path):
1695 self.url = path
1695 self.url = path
1696 self.ui = ui
1696 self.ui = ui
1697 no_list = [ "localhost", "127.0.0.1" ]
1697 no_list = [ "localhost", "127.0.0.1" ]
1698 host = ui.config("http_proxy", "host")
1698 host = ui.config("http_proxy", "host")
1699 if host is None:
1699 if host is None:
1700 host = os.environ.get("http_proxy")
1700 host = os.environ.get("http_proxy")
1701 if host and host.startswith('http://'):
1701 if host and host.startswith('http://'):
1702 host = host[7:]
1702 host = host[7:]
1703 user = ui.config("http_proxy", "user")
1703 user = ui.config("http_proxy", "user")
1704 passwd = ui.config("http_proxy", "passwd")
1704 passwd = ui.config("http_proxy", "passwd")
1705 no = ui.config("http_proxy", "no")
1705 no = ui.config("http_proxy", "no")
1706 if no is None:
1706 if no is None:
1707 no = os.environ.get("no_proxy")
1707 no = os.environ.get("no_proxy")
1708 if no:
1708 if no:
1709 no_list = no_list + no.split(",")
1709 no_list = no_list + no.split(",")
1710
1710
1711 no_proxy = 0
1711 no_proxy = 0
1712 for h in no_list:
1712 for h in no_list:
1713 if (path.startswith("http://" + h + "/") or
1713 if (path.startswith("http://" + h + "/") or
1714 path.startswith("http://" + h + ":") or
1714 path.startswith("http://" + h + ":") or
1715 path == "http://" + h):
1715 path == "http://" + h):
1716 no_proxy = 1
1716 no_proxy = 1
1717
1717
1718 # Note: urllib2 takes proxy values from the environment and those will
1718 # Note: urllib2 takes proxy values from the environment and those will
1719 # take precedence
1719 # take precedence
1720 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1720 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1721 if os.environ.has_key(env):
1721 if os.environ.has_key(env):
1722 del os.environ[env]
1722 del os.environ[env]
1723
1723
1724 proxy_handler = urllib2.BaseHandler()
1724 proxy_handler = urllib2.BaseHandler()
1725 if host and not no_proxy:
1725 if host and not no_proxy:
1726 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1726 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1727
1727
1728 authinfo = None
1728 authinfo = None
1729 if user and passwd:
1729 if user and passwd:
1730 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1730 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1731 passmgr.add_password(None, host, user, passwd)
1731 passmgr.add_password(None, host, user, passwd)
1732 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1732 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1733
1733
1734 opener = urllib2.build_opener(proxy_handler, authinfo)
1734 opener = urllib2.build_opener(proxy_handler, authinfo)
1735 urllib2.install_opener(opener)
1735 urllib2.install_opener(opener)
1736
1736
1737 def dev(self):
1737 def dev(self):
1738 return -1
1738 return -1
1739
1739
1740 def do_cmd(self, cmd, **args):
1740 def do_cmd(self, cmd, **args):
1741 self.ui.debug("sending %s command\n" % cmd)
1741 self.ui.debug("sending %s command\n" % cmd)
1742 q = {"cmd": cmd}
1742 q = {"cmd": cmd}
1743 q.update(args)
1743 q.update(args)
1744 qs = urllib.urlencode(q)
1744 qs = urllib.urlencode(q)
1745 cu = "%s?%s" % (self.url, qs)
1745 cu = "%s?%s" % (self.url, qs)
1746 return urllib2.urlopen(cu)
1746 return urllib2.urlopen(cu)
1747
1747
1748 def heads(self):
1748 def heads(self):
1749 d = self.do_cmd("heads").read()
1749 d = self.do_cmd("heads").read()
1750 try:
1750 try:
1751 return map(bin, d[:-1].split(" "))
1751 return map(bin, d[:-1].split(" "))
1752 except:
1752 except:
1753 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1753 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1754 raise
1754 raise
1755
1755
1756 def verify_hg_repo(self, resp):
1757 if (resp.headers['content-type'] == 'application/hg-0.1'):
1758 pass
1759 else:
1760 msg = """'%s' does not appear to be a valid hg repository -
1761 missing a 'Content-type: application/hg-0.1' HTTP header""" % (self.url,)
1762 raise RepoError(msg)
1763
1756 def branches(self, nodes):
1764 def branches(self, nodes):
1757 n = " ".join(map(hex, nodes))
1765 n = " ".join(map(hex, nodes))
1758 d = self.do_cmd("branches", nodes=n).read()
1766 resp = self.do_cmd("branches", nodes=n);
1767 self.verify_hg_repo(resp);
1759 try:
1768 try:
1769 d = resp.read()
1760 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1770 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1761 return br
1771 return br
1762 except:
1772 except:
1763 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1773 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1764 raise
1774 raise
1765
1775
1766 def between(self, pairs):
1776 def between(self, pairs):
1767 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1777 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1768 d = self.do_cmd("between", pairs=n).read()
1778 resp = self.do_cmd("between", pairs=n)
1779 self.verify_hg_repo(resp)
1769 try:
1780 try:
1781 d = resp.read()
1770 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1782 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1771 return p
1783 return p
1772 except:
1784 except:
1773 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1785 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1774 raise
1786 raise
1775
1787
1776 def changegroup(self, nodes):
1788 def changegroup(self, nodes):
1777 n = " ".join(map(hex, nodes))
1789 n = " ".join(map(hex, nodes))
1778 f = self.do_cmd("changegroup", roots=n)
1790 resp = self.do_cmd("changegroup", roots=n)
1791 self.verify_hg_repo(resp)
1779 bytes = 0
1792 bytes = 0
1780
1793
1781 class zread:
1794 class zread:
1782 def __init__(self, f):
1795 def __init__(self, f):
1783 self.zd = zlib.decompressobj()
1796 self.zd = zlib.decompressobj()
1784 self.f = f
1797 self.f = f
1785 self.buf = ""
1798 self.buf = ""
1786 def read(self, l):
1799 def read(self, l):
1787 while l > len(self.buf):
1800 while l > len(self.buf):
1788 r = f.read(4096)
1801 r = self.f.read(4096)
1789 if r:
1802 if r:
1790 self.buf += self.zd.decompress(r)
1803 self.buf += self.zd.decompress(r)
1791 else:
1804 else:
1792 self.buf += self.zd.flush()
1805 self.buf += self.zd.flush()
1793 break
1806 break
1794 d, self.buf = self.buf[:l], self.buf[l:]
1807 d, self.buf = self.buf[:l], self.buf[l:]
1795 return d
1808 return d
1796
1809
1797 return zread(f)
1810 return zread(resp)
1798
1811
1799 class remotelock:
1812 class remotelock:
1800 def __init__(self, repo):
1813 def __init__(self, repo):
1801 self.repo = repo
1814 self.repo = repo
1802 def release(self):
1815 def release(self):
1803 self.repo.unlock()
1816 self.repo.unlock()
1804 self.repo = None
1817 self.repo = None
1805 def __del__(self):
1818 def __del__(self):
1806 if self.repo:
1819 if self.repo:
1807 self.release()
1820 self.release()
1808
1821
1809 class sshrepository:
1822 class sshrepository:
1810 def __init__(self, ui, path):
1823 def __init__(self, ui, path):
1811 self.url = path
1824 self.url = path
1812 self.ui = ui
1825 self.ui = ui
1813
1826
1814 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1827 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1815 if not m:
1828 if not m:
1816 raise RepoError("couldn't parse destination %s\n" % path)
1829 raise RepoError("couldn't parse destination %s\n" % path)
1817
1830
1818 self.user = m.group(2)
1831 self.user = m.group(2)
1819 self.host = m.group(3)
1832 self.host = m.group(3)
1820 self.port = m.group(5)
1833 self.port = m.group(5)
1821 self.path = m.group(7)
1834 self.path = m.group(7)
1822
1835
1823 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1836 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1824 args = self.port and ("%s -p %s") % (args, self.port) or args
1837 args = self.port and ("%s -p %s") % (args, self.port) or args
1825 path = self.path or ""
1838 path = self.path or ""
1826
1839
1827 cmd = "ssh %s 'hg -R %s serve --stdio'"
1840 cmd = "ssh %s 'hg -R %s serve --stdio'"
1828 cmd = cmd % (args, path)
1841 cmd = cmd % (args, path)
1829
1842
1830 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1843 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1831
1844
1832 def readerr(self):
1845 def readerr(self):
1833 while 1:
1846 while 1:
1834 r,w,x = select.select([self.pipee], [], [], 0)
1847 r,w,x = select.select([self.pipee], [], [], 0)
1835 if not r: break
1848 if not r: break
1836 l = self.pipee.readline()
1849 l = self.pipee.readline()
1837 if not l: break
1850 if not l: break
1838 self.ui.status("remote: ", l)
1851 self.ui.status("remote: ", l)
1839
1852
1840 def __del__(self):
1853 def __del__(self):
1841 self.pipeo.close()
1854 self.pipeo.close()
1842 self.pipei.close()
1855 self.pipei.close()
1843 for l in self.pipee:
1856 for l in self.pipee:
1844 self.ui.status("remote: ", l)
1857 self.ui.status("remote: ", l)
1845 self.pipee.close()
1858 self.pipee.close()
1846
1859
1847 def dev(self):
1860 def dev(self):
1848 return -1
1861 return -1
1849
1862
1850 def do_cmd(self, cmd, **args):
1863 def do_cmd(self, cmd, **args):
1851 self.ui.debug("sending %s command\n" % cmd)
1864 self.ui.debug("sending %s command\n" % cmd)
1852 self.pipeo.write("%s\n" % cmd)
1865 self.pipeo.write("%s\n" % cmd)
1853 for k, v in args.items():
1866 for k, v in args.items():
1854 self.pipeo.write("%s %d\n" % (k, len(v)))
1867 self.pipeo.write("%s %d\n" % (k, len(v)))
1855 self.pipeo.write(v)
1868 self.pipeo.write(v)
1856 self.pipeo.flush()
1869 self.pipeo.flush()
1857
1870
1858 return self.pipei
1871 return self.pipei
1859
1872
1860 def call(self, cmd, **args):
1873 def call(self, cmd, **args):
1861 r = self.do_cmd(cmd, **args)
1874 r = self.do_cmd(cmd, **args)
1862 l = r.readline()
1875 l = r.readline()
1863 self.readerr()
1876 self.readerr()
1864 try:
1877 try:
1865 l = int(l)
1878 l = int(l)
1866 except:
1879 except:
1867 raise RepoError("unexpected response '%s'" % l)
1880 raise RepoError("unexpected response '%s'" % l)
1868 return r.read(l)
1881 return r.read(l)
1869
1882
1870 def lock(self):
1883 def lock(self):
1871 self.call("lock")
1884 self.call("lock")
1872 return remotelock(self)
1885 return remotelock(self)
1873
1886
1874 def unlock(self):
1887 def unlock(self):
1875 self.call("unlock")
1888 self.call("unlock")
1876
1889
1877 def heads(self):
1890 def heads(self):
1878 d = self.call("heads")
1891 d = self.call("heads")
1879 try:
1892 try:
1880 return map(bin, d[:-1].split(" "))
1893 return map(bin, d[:-1].split(" "))
1881 except:
1894 except:
1882 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1895 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1883
1896
1884 def branches(self, nodes):
1897 def branches(self, nodes):
1885 n = " ".join(map(hex, nodes))
1898 n = " ".join(map(hex, nodes))
1886 d = self.call("branches", nodes=n)
1899 d = self.call("branches", nodes=n)
1887 try:
1900 try:
1888 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1901 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1889 return br
1902 return br
1890 except:
1903 except:
1891 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1904 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1892
1905
1893 def between(self, pairs):
1906 def between(self, pairs):
1894 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1907 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1895 d = self.call("between", pairs=n)
1908 d = self.call("between", pairs=n)
1896 try:
1909 try:
1897 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1910 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1898 return p
1911 return p
1899 except:
1912 except:
1900 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1913 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1901
1914
1902 def changegroup(self, nodes):
1915 def changegroup(self, nodes):
1903 n = " ".join(map(hex, nodes))
1916 n = " ".join(map(hex, nodes))
1904 f = self.do_cmd("changegroup", roots=n)
1917 f = self.do_cmd("changegroup", roots=n)
1905 return self.pipei
1918 return self.pipei
1906
1919
1907 def addchangegroup(self, cg):
1920 def addchangegroup(self, cg):
1908 d = self.call("addchangegroup")
1921 d = self.call("addchangegroup")
1909 if d:
1922 if d:
1910 raise RepoError("push refused: %s", d)
1923 raise RepoError("push refused: %s", d)
1911
1924
1912 while 1:
1925 while 1:
1913 d = cg.read(4096)
1926 d = cg.read(4096)
1914 if not d: break
1927 if not d: break
1915 self.pipeo.write(d)
1928 self.pipeo.write(d)
1916 self.readerr()
1929 self.readerr()
1917
1930
1918 self.pipeo.flush()
1931 self.pipeo.flush()
1919
1932
1920 self.readerr()
1933 self.readerr()
1921 l = int(self.pipei.readline())
1934 l = int(self.pipei.readline())
1922 return self.pipei.read(l) != ""
1935 return self.pipei.read(l) != ""
1923
1936
1924 def repository(ui, path=None, create=0):
1937 def repository(ui, path=None, create=0):
1925 if path:
1938 if path:
1926 if path.startswith("http://"):
1939 if path.startswith("http://"):
1927 return httprepository(ui, path)
1940 return httprepository(ui, path)
1928 if path.startswith("hg://"):
1941 if path.startswith("hg://"):
1929 return httprepository(ui, path.replace("hg://", "http://"))
1942 return httprepository(ui, path.replace("hg://", "http://"))
1930 if path.startswith("old-http://"):
1943 if path.startswith("old-http://"):
1931 return localrepository(ui, path.replace("old-http://", "http://"))
1944 return localrepository(ui, path.replace("old-http://", "http://"))
1932 if path.startswith("ssh://"):
1945 if path.startswith("ssh://"):
1933 return sshrepository(ui, path)
1946 return sshrepository(ui, path)
1934
1947
1935 return localrepository(ui, path, create)
1948 return localrepository(ui, path, create)
@@ -1,783 +1,783 b''
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, time, re, difflib, sys, zlib
9 import os, cgi, time, re, difflib, sys, zlib
10 from mercurial.hg import *
10 from mercurial.hg import *
11 from mercurial.ui import *
11 from mercurial.ui import *
12
12
13 def templatepath():
13 def templatepath():
14 for f in "templates", "../templates":
14 for f in "templates", "../templates":
15 p = os.path.join(os.path.dirname(__file__), f)
15 p = os.path.join(os.path.dirname(__file__), f)
16 if os.path.isdir(p): return p
16 if os.path.isdir(p): return p
17
17
18 def age(t):
18 def age(t):
19 def plural(t, c):
19 def plural(t, c):
20 if c == 1: return t
20 if c == 1: return t
21 return t + "s"
21 return t + "s"
22 def fmt(t, c):
22 def fmt(t, c):
23 return "%d %s" % (c, plural(t, c))
23 return "%d %s" % (c, plural(t, c))
24
24
25 now = time.time()
25 now = time.time()
26 delta = max(1, int(now - t))
26 delta = max(1, int(now - t))
27
27
28 scales = [["second", 1],
28 scales = [["second", 1],
29 ["minute", 60],
29 ["minute", 60],
30 ["hour", 3600],
30 ["hour", 3600],
31 ["day", 3600 * 24],
31 ["day", 3600 * 24],
32 ["week", 3600 * 24 * 7],
32 ["week", 3600 * 24 * 7],
33 ["month", 3600 * 24 * 30],
33 ["month", 3600 * 24 * 30],
34 ["year", 3600 * 24 * 365]]
34 ["year", 3600 * 24 * 365]]
35
35
36 scales.reverse()
36 scales.reverse()
37
37
38 for t, s in scales:
38 for t, s in scales:
39 n = delta / s
39 n = delta / s
40 if n >= 2 or s == 1: return fmt(t, n)
40 if n >= 2 or s == 1: return fmt(t, n)
41
41
42 def nl2br(text):
42 def nl2br(text):
43 return text.replace('\n', '<br/>\n')
43 return text.replace('\n', '<br/>\n')
44
44
45 def obfuscate(text):
45 def obfuscate(text):
46 return ''.join([ '&#%d;' % ord(c) for c in text ])
46 return ''.join([ '&#%d;' % ord(c) for c in text ])
47
47
48 def up(p):
48 def up(p):
49 if p[0] != "/": p = "/" + p
49 if p[0] != "/": p = "/" + p
50 if p[-1] == "/": p = p[:-1]
50 if p[-1] == "/": p = p[:-1]
51 up = os.path.dirname(p)
51 up = os.path.dirname(p)
52 if up == "/":
52 if up == "/":
53 return "/"
53 return "/"
54 return up + "/"
54 return up + "/"
55
55
56 def httphdr(type):
56 def httphdr(type):
57 sys.stdout.write('Content-type: %s\n\n' % type)
57 sys.stdout.write('Content-type: %s\n\n' % type)
58
58
59 def write(*things):
59 def write(*things):
60 for thing in things:
60 for thing in things:
61 if hasattr(thing, "__iter__"):
61 if hasattr(thing, "__iter__"):
62 for part in thing:
62 for part in thing:
63 write(part)
63 write(part)
64 else:
64 else:
65 sys.stdout.write(str(thing))
65 sys.stdout.write(str(thing))
66
66
67 def template(tmpl, filters = {}, **map):
67 def template(tmpl, filters = {}, **map):
68 while tmpl:
68 while tmpl:
69 m = re.search(r"#([a-zA-Z0-9]+)((\|[a-zA-Z0-9]+)*)#", tmpl)
69 m = re.search(r"#([a-zA-Z0-9]+)((\|[a-zA-Z0-9]+)*)#", tmpl)
70 if m:
70 if m:
71 yield tmpl[:m.start(0)]
71 yield tmpl[:m.start(0)]
72 v = map.get(m.group(1), "")
72 v = map.get(m.group(1), "")
73 v = callable(v) and v() or v
73 v = callable(v) and v() or v
74
74
75 fl = m.group(2)
75 fl = m.group(2)
76 if fl:
76 if fl:
77 for f in fl.split("|")[1:]:
77 for f in fl.split("|")[1:]:
78 v = filters[f](v)
78 v = filters[f](v)
79
79
80 yield v
80 yield v
81 tmpl = tmpl[m.end(0):]
81 tmpl = tmpl[m.end(0):]
82 else:
82 else:
83 yield tmpl
83 yield tmpl
84 return
84 return
85
85
86 class templater:
86 class templater:
87 def __init__(self, mapfile, filters = {}, defaults = {}):
87 def __init__(self, mapfile, filters = {}, defaults = {}):
88 self.cache = {}
88 self.cache = {}
89 self.map = {}
89 self.map = {}
90 self.base = os.path.dirname(mapfile)
90 self.base = os.path.dirname(mapfile)
91 self.filters = filters
91 self.filters = filters
92 self.defaults = defaults
92 self.defaults = defaults
93
93
94 for l in file(mapfile):
94 for l in file(mapfile):
95 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
95 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
96 if m:
96 if m:
97 self.cache[m.group(1)] = m.group(2)
97 self.cache[m.group(1)] = m.group(2)
98 else:
98 else:
99 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
99 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
100 if m:
100 if m:
101 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
101 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
102 else:
102 else:
103 raise "unknown map entry '%s'" % l
103 raise "unknown map entry '%s'" % l
104
104
105 def __call__(self, t, **map):
105 def __call__(self, t, **map):
106 m = self.defaults.copy()
106 m = self.defaults.copy()
107 m.update(map)
107 m.update(map)
108 try:
108 try:
109 tmpl = self.cache[t]
109 tmpl = self.cache[t]
110 except KeyError:
110 except KeyError:
111 tmpl = self.cache[t] = file(self.map[t]).read()
111 tmpl = self.cache[t] = file(self.map[t]).read()
112 return template(tmpl, self.filters, **m)
112 return template(tmpl, self.filters, **m)
113
113
114 def rfc822date(x):
114 def rfc822date(x):
115 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
115 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
116
116
117 class hgweb:
117 class hgweb:
118 maxchanges = 10
118 maxchanges = 10
119 maxfiles = 10
119 maxfiles = 10
120
120
121 def __init__(self, path, name, templates = ""):
121 def __init__(self, path, name, templates = ""):
122 self.templates = templates or templatepath()
122 self.templates = templates or templatepath()
123 self.reponame = name
123 self.reponame = name
124 self.path = path
124 self.path = path
125 self.mtime = -1
125 self.mtime = -1
126 self.viewonly = 0
126 self.viewonly = 0
127
127
128 self.filters = {
128 self.filters = {
129 "escape": cgi.escape,
129 "escape": cgi.escape,
130 "age": age,
130 "age": age,
131 "date": (lambda x: time.asctime(time.gmtime(x))),
131 "date": (lambda x: time.asctime(time.gmtime(x))),
132 "addbreaks": nl2br,
132 "addbreaks": nl2br,
133 "obfuscate": obfuscate,
133 "obfuscate": obfuscate,
134 "short": (lambda x: x[:12]),
134 "short": (lambda x: x[:12]),
135 "firstline": (lambda x: x.splitlines(1)[0]),
135 "firstline": (lambda x: x.splitlines(1)[0]),
136 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
136 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
137 "rfc822date": rfc822date,
137 "rfc822date": rfc822date,
138 }
138 }
139
139
140 def refresh(self):
140 def refresh(self):
141 s = os.stat(os.path.join(self.path, ".hg", "00changelog.i"))
141 s = os.stat(os.path.join(self.path, ".hg", "00changelog.i"))
142 if s.st_mtime != self.mtime:
142 if s.st_mtime != self.mtime:
143 self.mtime = s.st_mtime
143 self.mtime = s.st_mtime
144 self.repo = repository(ui(), self.path)
144 self.repo = repository(ui(), self.path)
145
145
146 def date(self, cs):
146 def date(self, cs):
147 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
147 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
148
148
149 def listfiles(self, files, mf):
149 def listfiles(self, files, mf):
150 for f in files[:self.maxfiles]:
150 for f in files[:self.maxfiles]:
151 yield self.t("filenodelink", node = hex(mf[f]), file = f)
151 yield self.t("filenodelink", node = hex(mf[f]), file = f)
152 if len(files) > self.maxfiles:
152 if len(files) > self.maxfiles:
153 yield self.t("fileellipses")
153 yield self.t("fileellipses")
154
154
155 def listfilediffs(self, files, changeset):
155 def listfilediffs(self, files, changeset):
156 for f in files[:self.maxfiles]:
156 for f in files[:self.maxfiles]:
157 yield self.t("filedifflink", node = hex(changeset), file = f)
157 yield self.t("filedifflink", node = hex(changeset), file = f)
158 if len(files) > self.maxfiles:
158 if len(files) > self.maxfiles:
159 yield self.t("fileellipses")
159 yield self.t("fileellipses")
160
160
161 def parents(self, t1, nodes=[], rev=None,**args):
161 def parents(self, t1, nodes=[], rev=None,**args):
162 if not rev: rev = lambda x: ""
162 if not rev: rev = lambda x: ""
163 for node in nodes:
163 for node in nodes:
164 if node != nullid:
164 if node != nullid:
165 yield self.t(t1, node = hex(node), rev = rev(node), **args)
165 yield self.t(t1, node = hex(node), rev = rev(node), **args)
166
166
167 def showtag(self, t1, node=nullid, **args):
167 def showtag(self, t1, node=nullid, **args):
168 for t in self.repo.nodetags(node):
168 for t in self.repo.nodetags(node):
169 yield self.t(t1, tag = t, **args)
169 yield self.t(t1, tag = t, **args)
170
170
171 def diff(self, node1, node2, files):
171 def diff(self, node1, node2, files):
172 def filterfiles(list, files):
172 def filterfiles(list, files):
173 l = [ x for x in list if x in files ]
173 l = [ x for x in list if x in files ]
174
174
175 for f in files:
175 for f in files:
176 if f[-1] != os.sep: f += os.sep
176 if f[-1] != os.sep: f += os.sep
177 l += [ x for x in list if x.startswith(f) ]
177 l += [ x for x in list if x.startswith(f) ]
178 return l
178 return l
179
179
180 parity = [0]
180 parity = [0]
181 def diffblock(diff, f, fn):
181 def diffblock(diff, f, fn):
182 yield self.t("diffblock",
182 yield self.t("diffblock",
183 lines = prettyprintlines(diff),
183 lines = prettyprintlines(diff),
184 parity = parity[0],
184 parity = parity[0],
185 file = f,
185 file = f,
186 filenode = hex(fn or nullid))
186 filenode = hex(fn or nullid))
187 parity[0] = 1 - parity[0]
187 parity[0] = 1 - parity[0]
188
188
189 def prettyprintlines(diff):
189 def prettyprintlines(diff):
190 for l in diff.splitlines(1):
190 for l in diff.splitlines(1):
191 if l.startswith('+'):
191 if l.startswith('+'):
192 yield self.t("difflineplus", line = l)
192 yield self.t("difflineplus", line = l)
193 elif l.startswith('-'):
193 elif l.startswith('-'):
194 yield self.t("difflineminus", line = l)
194 yield self.t("difflineminus", line = l)
195 elif l.startswith('@'):
195 elif l.startswith('@'):
196 yield self.t("difflineat", line = l)
196 yield self.t("difflineat", line = l)
197 else:
197 else:
198 yield self.t("diffline", line = l)
198 yield self.t("diffline", line = l)
199
199
200 r = self.repo
200 r = self.repo
201 cl = r.changelog
201 cl = r.changelog
202 mf = r.manifest
202 mf = r.manifest
203 change1 = cl.read(node1)
203 change1 = cl.read(node1)
204 change2 = cl.read(node2)
204 change2 = cl.read(node2)
205 mmap1 = mf.read(change1[0])
205 mmap1 = mf.read(change1[0])
206 mmap2 = mf.read(change2[0])
206 mmap2 = mf.read(change2[0])
207 date1 = self.date(change1)
207 date1 = self.date(change1)
208 date2 = self.date(change2)
208 date2 = self.date(change2)
209
209
210 c, a, d, u = r.changes(node1, node2)
210 c, a, d, u = r.changes(node1, node2)
211 if files:
211 if files:
212 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
212 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
213
213
214 for f in c:
214 for f in c:
215 to = r.file(f).read(mmap1[f])
215 to = r.file(f).read(mmap1[f])
216 tn = r.file(f).read(mmap2[f])
216 tn = r.file(f).read(mmap2[f])
217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
218 for f in a:
218 for f in a:
219 to = None
219 to = None
220 tn = r.file(f).read(mmap2[f])
220 tn = r.file(f).read(mmap2[f])
221 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
221 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
222 for f in d:
222 for f in d:
223 to = r.file(f).read(mmap1[f])
223 to = r.file(f).read(mmap1[f])
224 tn = None
224 tn = None
225 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
225 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
226
226
227 def header(self):
227 def header(self):
228 yield self.t("header")
228 yield self.t("header")
229
229
230 def footer(self):
230 def footer(self):
231 yield self.t("footer")
231 yield self.t("footer")
232
232
233 def changelog(self, pos):
233 def changelog(self, pos):
234 def changenav():
234 def changenav():
235 def seq(factor = 1):
235 def seq(factor = 1):
236 yield 1 * factor
236 yield 1 * factor
237 yield 3 * factor
237 yield 3 * factor
238 #yield 5 * factor
238 #yield 5 * factor
239 for f in seq(factor * 10):
239 for f in seq(factor * 10):
240 yield f
240 yield f
241
241
242 l = []
242 l = []
243 for f in seq():
243 for f in seq():
244 if f < self.maxchanges / 2: continue
244 if f < self.maxchanges / 2: continue
245 if f > count: break
245 if f > count: break
246 r = "%d" % f
246 r = "%d" % f
247 if pos + f < count: l.append(("+" + r, pos + f))
247 if pos + f < count: l.append(("+" + r, pos + f))
248 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
248 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
249
249
250 yield self.t("naventry", rev = 0, label="(0)")
250 yield self.t("naventry", rev = 0, label="(0)")
251
251
252 for label, rev in l:
252 for label, rev in l:
253 yield self.t("naventry", label = label, rev = rev)
253 yield self.t("naventry", label = label, rev = rev)
254
254
255 yield self.t("naventry", label="tip")
255 yield self.t("naventry", label="tip")
256
256
257 def changelist():
257 def changelist():
258 parity = (start - end) & 1
258 parity = (start - end) & 1
259 cl = self.repo.changelog
259 cl = self.repo.changelog
260 l = [] # build a list in forward order for efficiency
260 l = [] # build a list in forward order for efficiency
261 for i in range(start, end):
261 for i in range(start, end):
262 n = cl.node(i)
262 n = cl.node(i)
263 changes = cl.read(n)
263 changes = cl.read(n)
264 hn = hex(n)
264 hn = hex(n)
265 t = float(changes[2].split(' ')[0])
265 t = float(changes[2].split(' ')[0])
266
266
267 l.insert(0, self.t(
267 l.insert(0, self.t(
268 'changelogentry',
268 'changelogentry',
269 parity = parity,
269 parity = parity,
270 author = changes[1],
270 author = changes[1],
271 parent = self.parents("changelogparent",
271 parent = self.parents("changelogparent",
272 cl.parents(n), cl.rev),
272 cl.parents(n), cl.rev),
273 changelogtag = self.showtag("changelogtag",n),
273 changelogtag = self.showtag("changelogtag",n),
274 manifest = hex(changes[0]),
274 manifest = hex(changes[0]),
275 desc = changes[4],
275 desc = changes[4],
276 date = t,
276 date = t,
277 files = self.listfilediffs(changes[3], n),
277 files = self.listfilediffs(changes[3], n),
278 rev = i,
278 rev = i,
279 node = hn))
279 node = hn))
280 parity = 1 - parity
280 parity = 1 - parity
281
281
282 yield l
282 yield l
283
283
284 cl = self.repo.changelog
284 cl = self.repo.changelog
285 mf = cl.read(cl.tip())[0]
285 mf = cl.read(cl.tip())[0]
286 count = cl.count()
286 count = cl.count()
287 start = max(0, pos - self.maxchanges + 1)
287 start = max(0, pos - self.maxchanges + 1)
288 end = min(count, start + self.maxchanges)
288 end = min(count, start + self.maxchanges)
289 pos = end - 1
289 pos = end - 1
290
290
291 yield self.t('changelog',
291 yield self.t('changelog',
292 changenav = changenav,
292 changenav = changenav,
293 manifest = hex(mf),
293 manifest = hex(mf),
294 rev = pos, changesets = count, entries = changelist)
294 rev = pos, changesets = count, entries = changelist)
295
295
296 def search(self, query):
296 def search(self, query):
297
297
298 def changelist():
298 def changelist():
299 cl = self.repo.changelog
299 cl = self.repo.changelog
300 count = 0
300 count = 0
301 qw = query.lower().split()
301 qw = query.lower().split()
302
302
303 def revgen():
303 def revgen():
304 for i in range(cl.count() - 1, 0, -100):
304 for i in range(cl.count() - 1, 0, -100):
305 l = []
305 l = []
306 for j in range(max(0, i - 100), i):
306 for j in range(max(0, i - 100), i):
307 n = cl.node(j)
307 n = cl.node(j)
308 changes = cl.read(n)
308 changes = cl.read(n)
309 l.insert(0, (n, j, changes))
309 l.insert(0, (n, j, changes))
310 for e in l:
310 for e in l:
311 yield e
311 yield e
312
312
313 for n, i, changes in revgen():
313 for n, i, changes in revgen():
314 miss = 0
314 miss = 0
315 for q in qw:
315 for q in qw:
316 if not (q in changes[1].lower() or
316 if not (q in changes[1].lower() or
317 q in changes[4].lower() or
317 q in changes[4].lower() or
318 q in " ".join(changes[3][:20]).lower()):
318 q in " ".join(changes[3][:20]).lower()):
319 miss = 1
319 miss = 1
320 break
320 break
321 if miss: continue
321 if miss: continue
322
322
323 count += 1
323 count += 1
324 hn = hex(n)
324 hn = hex(n)
325 t = float(changes[2].split(' ')[0])
325 t = float(changes[2].split(' ')[0])
326
326
327 yield self.t(
327 yield self.t(
328 'searchentry',
328 'searchentry',
329 parity = count & 1,
329 parity = count & 1,
330 author = changes[1],
330 author = changes[1],
331 parent = self.parents("changelogparent",
331 parent = self.parents("changelogparent",
332 cl.parents(n), cl.rev),
332 cl.parents(n), cl.rev),
333 changelogtag = self.showtag("changelogtag",n),
333 changelogtag = self.showtag("changelogtag",n),
334 manifest = hex(changes[0]),
334 manifest = hex(changes[0]),
335 desc = changes[4],
335 desc = changes[4],
336 date = t,
336 date = t,
337 files = self.listfilediffs(changes[3], n),
337 files = self.listfilediffs(changes[3], n),
338 rev = i,
338 rev = i,
339 node = hn)
339 node = hn)
340
340
341 if count >= self.maxchanges: break
341 if count >= self.maxchanges: break
342
342
343 cl = self.repo.changelog
343 cl = self.repo.changelog
344 mf = cl.read(cl.tip())[0]
344 mf = cl.read(cl.tip())[0]
345
345
346 yield self.t('search',
346 yield self.t('search',
347 query = query,
347 query = query,
348 manifest = hex(mf),
348 manifest = hex(mf),
349 entries = changelist)
349 entries = changelist)
350
350
351 def changeset(self, nodeid):
351 def changeset(self, nodeid):
352 n = bin(nodeid)
352 n = bin(nodeid)
353 cl = self.repo.changelog
353 cl = self.repo.changelog
354 changes = cl.read(n)
354 changes = cl.read(n)
355 p1 = cl.parents(n)[0]
355 p1 = cl.parents(n)[0]
356 t = float(changes[2].split(' ')[0])
356 t = float(changes[2].split(' ')[0])
357
357
358 files = []
358 files = []
359 mf = self.repo.manifest.read(changes[0])
359 mf = self.repo.manifest.read(changes[0])
360 for f in changes[3]:
360 for f in changes[3]:
361 files.append(self.t("filenodelink",
361 files.append(self.t("filenodelink",
362 filenode = hex(mf.get(f, nullid)), file = f))
362 filenode = hex(mf.get(f, nullid)), file = f))
363
363
364 def diff():
364 def diff():
365 yield self.diff(p1, n, None)
365 yield self.diff(p1, n, None)
366
366
367 yield self.t('changeset',
367 yield self.t('changeset',
368 diff = diff,
368 diff = diff,
369 rev = cl.rev(n),
369 rev = cl.rev(n),
370 node = nodeid,
370 node = nodeid,
371 parent = self.parents("changesetparent",
371 parent = self.parents("changesetparent",
372 cl.parents(n), cl.rev),
372 cl.parents(n), cl.rev),
373 changesettag = self.showtag("changesettag",n),
373 changesettag = self.showtag("changesettag",n),
374 manifest = hex(changes[0]),
374 manifest = hex(changes[0]),
375 author = changes[1],
375 author = changes[1],
376 desc = changes[4],
376 desc = changes[4],
377 date = t,
377 date = t,
378 files = files)
378 files = files)
379
379
380 def filelog(self, f, filenode):
380 def filelog(self, f, filenode):
381 cl = self.repo.changelog
381 cl = self.repo.changelog
382 fl = self.repo.file(f)
382 fl = self.repo.file(f)
383 count = fl.count()
383 count = fl.count()
384
384
385 def entries():
385 def entries():
386 l = []
386 l = []
387 parity = (count - 1) & 1
387 parity = (count - 1) & 1
388
388
389 for i in range(count):
389 for i in range(count):
390
390
391 n = fl.node(i)
391 n = fl.node(i)
392 lr = fl.linkrev(n)
392 lr = fl.linkrev(n)
393 cn = cl.node(lr)
393 cn = cl.node(lr)
394 cs = cl.read(cl.node(lr))
394 cs = cl.read(cl.node(lr))
395 t = float(cs[2].split(' ')[0])
395 t = float(cs[2].split(' ')[0])
396
396
397 l.insert(0, self.t("filelogentry",
397 l.insert(0, self.t("filelogentry",
398 parity = parity,
398 parity = parity,
399 filenode = hex(n),
399 filenode = hex(n),
400 filerev = i,
400 filerev = i,
401 file = f,
401 file = f,
402 node = hex(cn),
402 node = hex(cn),
403 author = cs[1],
403 author = cs[1],
404 date = t,
404 date = t,
405 parent = self.parents("filelogparent",
405 parent = self.parents("filelogparent",
406 fl.parents(n), fl.rev, file=f),
406 fl.parents(n), fl.rev, file=f),
407 desc = cs[4]))
407 desc = cs[4]))
408 parity = 1 - parity
408 parity = 1 - parity
409
409
410 yield l
410 yield l
411
411
412 yield self.t("filelog",
412 yield self.t("filelog",
413 file = f,
413 file = f,
414 filenode = filenode,
414 filenode = filenode,
415 entries = entries)
415 entries = entries)
416
416
417 def filerevision(self, f, node):
417 def filerevision(self, f, node):
418 fl = self.repo.file(f)
418 fl = self.repo.file(f)
419 n = bin(node)
419 n = bin(node)
420 text = fl.read(n)
420 text = fl.read(n)
421 changerev = fl.linkrev(n)
421 changerev = fl.linkrev(n)
422 cl = self.repo.changelog
422 cl = self.repo.changelog
423 cn = cl.node(changerev)
423 cn = cl.node(changerev)
424 cs = cl.read(cn)
424 cs = cl.read(cn)
425 t = float(cs[2].split(' ')[0])
425 t = float(cs[2].split(' ')[0])
426 mfn = cs[0]
426 mfn = cs[0]
427
427
428 def lines():
428 def lines():
429 for l, t in enumerate(text.splitlines(1)):
429 for l, t in enumerate(text.splitlines(1)):
430 yield self.t("fileline", line = t,
430 yield self.t("fileline", line = t,
431 linenumber = "% 6d" % (l + 1),
431 linenumber = "% 6d" % (l + 1),
432 parity = l & 1)
432 parity = l & 1)
433
433
434 yield self.t("filerevision", file = f,
434 yield self.t("filerevision", file = f,
435 filenode = node,
435 filenode = node,
436 path = up(f),
436 path = up(f),
437 text = lines(),
437 text = lines(),
438 rev = changerev,
438 rev = changerev,
439 node = hex(cn),
439 node = hex(cn),
440 manifest = hex(mfn),
440 manifest = hex(mfn),
441 author = cs[1],
441 author = cs[1],
442 date = t,
442 date = t,
443 parent = self.parents("filerevparent",
443 parent = self.parents("filerevparent",
444 fl.parents(n), fl.rev, file=f),
444 fl.parents(n), fl.rev, file=f),
445 permissions = self.repo.manifest.readflags(mfn)[f])
445 permissions = self.repo.manifest.readflags(mfn)[f])
446
446
447 def fileannotate(self, f, node):
447 def fileannotate(self, f, node):
448 bcache = {}
448 bcache = {}
449 ncache = {}
449 ncache = {}
450 fl = self.repo.file(f)
450 fl = self.repo.file(f)
451 n = bin(node)
451 n = bin(node)
452 changerev = fl.linkrev(n)
452 changerev = fl.linkrev(n)
453
453
454 cl = self.repo.changelog
454 cl = self.repo.changelog
455 cn = cl.node(changerev)
455 cn = cl.node(changerev)
456 cs = cl.read(cn)
456 cs = cl.read(cn)
457 t = float(cs[2].split(' ')[0])
457 t = float(cs[2].split(' ')[0])
458 mfn = cs[0]
458 mfn = cs[0]
459
459
460 def annotate():
460 def annotate():
461 parity = 1
461 parity = 1
462 last = None
462 last = None
463 for r, l in fl.annotate(n):
463 for r, l in fl.annotate(n):
464 try:
464 try:
465 cnode = ncache[r]
465 cnode = ncache[r]
466 except KeyError:
466 except KeyError:
467 cnode = ncache[r] = self.repo.changelog.node(r)
467 cnode = ncache[r] = self.repo.changelog.node(r)
468
468
469 try:
469 try:
470 name = bcache[r]
470 name = bcache[r]
471 except KeyError:
471 except KeyError:
472 cl = self.repo.changelog.read(cnode)
472 cl = self.repo.changelog.read(cnode)
473 name = cl[1]
473 name = cl[1]
474 f = name.find('@')
474 f = name.find('@')
475 if f >= 0:
475 if f >= 0:
476 name = name[:f]
476 name = name[:f]
477 f = name.find('<')
477 f = name.find('<')
478 if f >= 0:
478 if f >= 0:
479 name = name[f+1:]
479 name = name[f+1:]
480 bcache[r] = name
480 bcache[r] = name
481
481
482 if last != cnode:
482 if last != cnode:
483 parity = 1 - parity
483 parity = 1 - parity
484 last = cnode
484 last = cnode
485
485
486 yield self.t("annotateline",
486 yield self.t("annotateline",
487 parity = parity,
487 parity = parity,
488 node = hex(cnode),
488 node = hex(cnode),
489 rev = r,
489 rev = r,
490 author = name,
490 author = name,
491 file = f,
491 file = f,
492 line = l)
492 line = l)
493
493
494 yield self.t("fileannotate",
494 yield self.t("fileannotate",
495 file = f,
495 file = f,
496 filenode = node,
496 filenode = node,
497 annotate = annotate,
497 annotate = annotate,
498 path = up(f),
498 path = up(f),
499 rev = changerev,
499 rev = changerev,
500 node = hex(cn),
500 node = hex(cn),
501 manifest = hex(mfn),
501 manifest = hex(mfn),
502 author = cs[1],
502 author = cs[1],
503 date = t,
503 date = t,
504 parent = self.parents("fileannotateparent",
504 parent = self.parents("fileannotateparent",
505 fl.parents(n), fl.rev, file=f),
505 fl.parents(n), fl.rev, file=f),
506 permissions = self.repo.manifest.readflags(mfn)[f])
506 permissions = self.repo.manifest.readflags(mfn)[f])
507
507
508 def manifest(self, mnode, path):
508 def manifest(self, mnode, path):
509 mf = self.repo.manifest.read(bin(mnode))
509 mf = self.repo.manifest.read(bin(mnode))
510 rev = self.repo.manifest.rev(bin(mnode))
510 rev = self.repo.manifest.rev(bin(mnode))
511 node = self.repo.changelog.node(rev)
511 node = self.repo.changelog.node(rev)
512 mff=self.repo.manifest.readflags(bin(mnode))
512 mff=self.repo.manifest.readflags(bin(mnode))
513
513
514 files = {}
514 files = {}
515
515
516 p = path[1:]
516 p = path[1:]
517 l = len(p)
517 l = len(p)
518
518
519 for f,n in mf.items():
519 for f,n in mf.items():
520 if f[:l] != p:
520 if f[:l] != p:
521 continue
521 continue
522 remain = f[l:]
522 remain = f[l:]
523 if "/" in remain:
523 if "/" in remain:
524 short = remain[:remain.find("/") + 1] # bleah
524 short = remain[:remain.find("/") + 1] # bleah
525 files[short] = (f, None)
525 files[short] = (f, None)
526 else:
526 else:
527 short = os.path.basename(remain)
527 short = os.path.basename(remain)
528 files[short] = (f, n)
528 files[short] = (f, n)
529
529
530 def filelist():
530 def filelist():
531 parity = 0
531 parity = 0
532 fl = files.keys()
532 fl = files.keys()
533 fl.sort()
533 fl.sort()
534 for f in fl:
534 for f in fl:
535 full, fnode = files[f]
535 full, fnode = files[f]
536 if fnode:
536 if fnode:
537 yield self.t("manifestfileentry",
537 yield self.t("manifestfileentry",
538 file = full,
538 file = full,
539 manifest = mnode,
539 manifest = mnode,
540 filenode = hex(fnode),
540 filenode = hex(fnode),
541 parity = parity,
541 parity = parity,
542 basename = f,
542 basename = f,
543 permissions = mff[full])
543 permissions = mff[full])
544 else:
544 else:
545 yield self.t("manifestdirentry",
545 yield self.t("manifestdirentry",
546 parity = parity,
546 parity = parity,
547 path = os.path.join(path, f),
547 path = os.path.join(path, f),
548 manifest = mnode, basename = f[:-1])
548 manifest = mnode, basename = f[:-1])
549 parity = 1 - parity
549 parity = 1 - parity
550
550
551 yield self.t("manifest",
551 yield self.t("manifest",
552 manifest = mnode,
552 manifest = mnode,
553 rev = rev,
553 rev = rev,
554 node = hex(node),
554 node = hex(node),
555 path = path,
555 path = path,
556 up = up(path),
556 up = up(path),
557 entries = filelist)
557 entries = filelist)
558
558
559 def tags(self):
559 def tags(self):
560 cl = self.repo.changelog
560 cl = self.repo.changelog
561 mf = cl.read(cl.tip())[0]
561 mf = cl.read(cl.tip())[0]
562
562
563 i = self.repo.tagslist()
563 i = self.repo.tagslist()
564 i.reverse()
564 i.reverse()
565
565
566 def entries():
566 def entries():
567 parity = 0
567 parity = 0
568 for k,n in i:
568 for k,n in i:
569 yield self.t("tagentry",
569 yield self.t("tagentry",
570 parity = parity,
570 parity = parity,
571 tag = k,
571 tag = k,
572 node = hex(n))
572 node = hex(n))
573 parity = 1 - parity
573 parity = 1 - parity
574
574
575 yield self.t("tags",
575 yield self.t("tags",
576 manifest = hex(mf),
576 manifest = hex(mf),
577 entries = entries)
577 entries = entries)
578
578
579 def filediff(self, file, changeset):
579 def filediff(self, file, changeset):
580 n = bin(changeset)
580 n = bin(changeset)
581 cl = self.repo.changelog
581 cl = self.repo.changelog
582 p1 = cl.parents(n)[0]
582 p1 = cl.parents(n)[0]
583 cs = cl.read(n)
583 cs = cl.read(n)
584 mf = self.repo.manifest.read(cs[0])
584 mf = self.repo.manifest.read(cs[0])
585
585
586 def diff():
586 def diff():
587 yield self.diff(p1, n, file)
587 yield self.diff(p1, n, file)
588
588
589 yield self.t("filediff",
589 yield self.t("filediff",
590 file = file,
590 file = file,
591 filenode = hex(mf.get(file, nullid)),
591 filenode = hex(mf.get(file, nullid)),
592 node = changeset,
592 node = changeset,
593 rev = self.repo.changelog.rev(n),
593 rev = self.repo.changelog.rev(n),
594 parent = self.parents("filediffparent",
594 parent = self.parents("filediffparent",
595 cl.parents(n), cl.rev),
595 cl.parents(n), cl.rev),
596 diff = diff)
596 diff = diff)
597
597
598 # add tags to things
598 # add tags to things
599 # tags -> list of changesets corresponding to tags
599 # tags -> list of changesets corresponding to tags
600 # find tag, changeset, file
600 # find tag, changeset, file
601
601
602 def run(self):
602 def run(self):
603 self.refresh()
603 self.refresh()
604 args = cgi.parse()
604 args = cgi.parse()
605
605
606 m = os.path.join(self.templates, "map")
606 m = os.path.join(self.templates, "map")
607 if args.has_key('style'):
607 if args.has_key('style'):
608 b = os.path.basename("map-" + args['style'][0])
608 b = os.path.basename("map-" + args['style'][0])
609 p = os.path.join(self.templates, b)
609 p = os.path.join(self.templates, b)
610 if os.path.isfile(p): m = p
610 if os.path.isfile(p): m = p
611
611
612 port = os.environ["SERVER_PORT"]
612 port = os.environ["SERVER_PORT"]
613 port = port != "80" and (":" + port) or ""
613 port = port != "80" and (":" + port) or ""
614 uri = os.environ["REQUEST_URI"]
614 uri = os.environ["REQUEST_URI"]
615 if "?" in uri: uri = uri.split("?")[0]
615 if "?" in uri: uri = uri.split("?")[0]
616 url = "http://%s%s%s" % (os.environ["SERVER_NAME"], port, uri)
616 url = "http://%s%s%s" % (os.environ["SERVER_NAME"], port, uri)
617
617
618 self.t = templater(m, self.filters,
618 self.t = templater(m, self.filters,
619 {"url":url,
619 {"url":url,
620 "repo":self.reponame,
620 "repo":self.reponame,
621 "header":self.header(),
621 "header":self.header(),
622 "footer":self.footer(),
622 "footer":self.footer(),
623 })
623 })
624
624
625 if not args.has_key('cmd') or args['cmd'][0] == 'changelog':
625 if not args.has_key('cmd') or args['cmd'][0] == 'changelog':
626 c = self.repo.changelog.count() - 1
626 c = self.repo.changelog.count() - 1
627 hi = c
627 hi = c
628 if args.has_key('rev'):
628 if args.has_key('rev'):
629 hi = args['rev'][0]
629 hi = args['rev'][0]
630 try:
630 try:
631 hi = self.repo.changelog.rev(self.repo.lookup(hi))
631 hi = self.repo.changelog.rev(self.repo.lookup(hi))
632 except RepoError:
632 except RepoError:
633 write(self.search(hi))
633 write(self.search(hi))
634 return
634 return
635
635
636 write(self.changelog(hi))
636 write(self.changelog(hi))
637
637
638 elif args['cmd'][0] == 'changeset':
638 elif args['cmd'][0] == 'changeset':
639 write(self.changeset(args['node'][0]))
639 write(self.changeset(args['node'][0]))
640
640
641 elif args['cmd'][0] == 'manifest':
641 elif args['cmd'][0] == 'manifest':
642 write(self.manifest(args['manifest'][0], args['path'][0]))
642 write(self.manifest(args['manifest'][0], args['path'][0]))
643
643
644 elif args['cmd'][0] == 'tags':
644 elif args['cmd'][0] == 'tags':
645 write(self.tags())
645 write(self.tags())
646
646
647 elif args['cmd'][0] == 'filediff':
647 elif args['cmd'][0] == 'filediff':
648 write(self.filediff(args['file'][0], args['node'][0]))
648 write(self.filediff(args['file'][0], args['node'][0]))
649
649
650 elif args['cmd'][0] == 'file':
650 elif args['cmd'][0] == 'file':
651 write(self.filerevision(args['file'][0], args['filenode'][0]))
651 write(self.filerevision(args['file'][0], args['filenode'][0]))
652
652
653 elif args['cmd'][0] == 'annotate':
653 elif args['cmd'][0] == 'annotate':
654 write(self.fileannotate(args['file'][0], args['filenode'][0]))
654 write(self.fileannotate(args['file'][0], args['filenode'][0]))
655
655
656 elif args['cmd'][0] == 'filelog':
656 elif args['cmd'][0] == 'filelog':
657 write(self.filelog(args['file'][0], args['filenode'][0]))
657 write(self.filelog(args['file'][0], args['filenode'][0]))
658
658
659 elif args['cmd'][0] == 'heads':
659 elif args['cmd'][0] == 'heads':
660 httphdr("text/plain")
660 httphdr("application/mercurial-0.1")
661 h = self.repo.heads()
661 h = self.repo.heads()
662 sys.stdout.write(" ".join(map(hex, h)) + "\n")
662 sys.stdout.write(" ".join(map(hex, h)) + "\n")
663
663
664 elif args['cmd'][0] == 'branches':
664 elif args['cmd'][0] == 'branches':
665 httphdr("text/plain")
665 httphdr("application/mercurial-0.1")
666 nodes = []
666 nodes = []
667 if args.has_key('nodes'):
667 if args.has_key('nodes'):
668 nodes = map(bin, args['nodes'][0].split(" "))
668 nodes = map(bin, args['nodes'][0].split(" "))
669 for b in self.repo.branches(nodes):
669 for b in self.repo.branches(nodes):
670 sys.stdout.write(" ".join(map(hex, b)) + "\n")
670 sys.stdout.write(" ".join(map(hex, b)) + "\n")
671
671
672 elif args['cmd'][0] == 'between':
672 elif args['cmd'][0] == 'between':
673 httphdr("text/plain")
673 httphdr("application/hg-0.1")
674 nodes = []
674 nodes = []
675 if args.has_key('pairs'):
675 if args.has_key('pairs'):
676 pairs = [ map(bin, p.split("-"))
676 pairs = [ map(bin, p.split("-"))
677 for p in args['pairs'][0].split(" ") ]
677 for p in args['pairs'][0].split(" ") ]
678 for b in self.repo.between(pairs):
678 for b in self.repo.between(pairs):
679 sys.stdout.write(" ".join(map(hex, b)) + "\n")
679 sys.stdout.write(" ".join(map(hex, b)) + "\n")
680
680
681 elif args['cmd'][0] == 'changegroup':
681 elif args['cmd'][0] == 'changegroup':
682 httphdr("application/hg-changegroup")
682 httphdr("application/mercurial-0.1")
683 nodes = []
683 nodes = []
684 if self.viewonly:
684 if self.viewonly:
685 return
685 return
686
686
687 if args.has_key('roots'):
687 if args.has_key('roots'):
688 nodes = map(bin, args['roots'][0].split(" "))
688 nodes = map(bin, args['roots'][0].split(" "))
689
689
690 z = zlib.compressobj()
690 z = zlib.compressobj()
691 f = self.repo.changegroup(nodes)
691 f = self.repo.changegroup(nodes)
692 while 1:
692 while 1:
693 chunk = f.read(4096)
693 chunk = f.read(4096)
694 if not chunk: break
694 if not chunk: break
695 sys.stdout.write(z.compress(chunk))
695 sys.stdout.write(z.compress(chunk))
696
696
697 sys.stdout.write(z.flush())
697 sys.stdout.write(z.flush())
698
698
699 else:
699 else:
700 write(self.t("error"))
700 write(self.t("error"))
701
701
702 def create_server(path, name, templates, address, port,
702 def create_server(path, name, templates, address, port,
703 accesslog = sys.stdout, errorlog = sys.stderr):
703 accesslog = sys.stdout, errorlog = sys.stderr):
704
704
705 import BaseHTTPServer
705 import BaseHTTPServer
706
706
707 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
707 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
708 def log_error(self, format, *args):
708 def log_error(self, format, *args):
709 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
709 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
710 self.log_date_time_string(),
710 self.log_date_time_string(),
711 format % args))
711 format % args))
712
712
713 def log_message(self, format, *args):
713 def log_message(self, format, *args):
714 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
714 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
715 self.log_date_time_string(),
715 self.log_date_time_string(),
716 format % args))
716 format % args))
717
717
718 def do_POST(self):
718 def do_POST(self):
719 try:
719 try:
720 self.do_hgweb()
720 self.do_hgweb()
721 except socket.error, inst:
721 except socket.error, inst:
722 if inst.args[0] != 32: raise
722 if inst.args[0] != 32: raise
723
723
724 def do_GET(self):
724 def do_GET(self):
725 self.do_POST()
725 self.do_POST()
726
726
727 def do_hgweb(self):
727 def do_hgweb(self):
728 query = ""
728 query = ""
729 p = self.path.find("?")
729 p = self.path.find("?")
730 if p:
730 if p:
731 query = self.path[p + 1:]
731 query = self.path[p + 1:]
732 query = query.replace('+', ' ')
732 query = query.replace('+', ' ')
733
733
734 env = {}
734 env = {}
735 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
735 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
736 env['REQUEST_METHOD'] = self.command
736 env['REQUEST_METHOD'] = self.command
737 env['SERVER_NAME'] = self.server.server_name
737 env['SERVER_NAME'] = self.server.server_name
738 env['SERVER_PORT'] = str(self.server.server_port)
738 env['SERVER_PORT'] = str(self.server.server_port)
739 env['REQUEST_URI'] = "/"
739 env['REQUEST_URI'] = "/"
740 if query:
740 if query:
741 env['QUERY_STRING'] = query
741 env['QUERY_STRING'] = query
742 host = self.address_string()
742 host = self.address_string()
743 if host != self.client_address[0]:
743 if host != self.client_address[0]:
744 env['REMOTE_HOST'] = host
744 env['REMOTE_HOST'] = host
745 env['REMOTE_ADDR'] = self.client_address[0]
745 env['REMOTE_ADDR'] = self.client_address[0]
746
746
747 if self.headers.typeheader is None:
747 if self.headers.typeheader is None:
748 env['CONTENT_TYPE'] = self.headers.type
748 env['CONTENT_TYPE'] = self.headers.type
749 else:
749 else:
750 env['CONTENT_TYPE'] = self.headers.typeheader
750 env['CONTENT_TYPE'] = self.headers.typeheader
751 length = self.headers.getheader('content-length')
751 length = self.headers.getheader('content-length')
752 if length:
752 if length:
753 env['CONTENT_LENGTH'] = length
753 env['CONTENT_LENGTH'] = length
754 accept = []
754 accept = []
755 for line in self.headers.getallmatchingheaders('accept'):
755 for line in self.headers.getallmatchingheaders('accept'):
756 if line[:1] in "\t\n\r ":
756 if line[:1] in "\t\n\r ":
757 accept.append(line.strip())
757 accept.append(line.strip())
758 else:
758 else:
759 accept = accept + line[7:].split(',')
759 accept = accept + line[7:].split(',')
760 env['HTTP_ACCEPT'] = ','.join(accept)
760 env['HTTP_ACCEPT'] = ','.join(accept)
761
761
762 os.environ.update(env)
762 os.environ.update(env)
763
763
764 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
764 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
765 try:
765 try:
766 sys.stdin = self.rfile
766 sys.stdin = self.rfile
767 sys.stdout = self.wfile
767 sys.stdout = self.wfile
768 sys.argv = ["hgweb.py"]
768 sys.argv = ["hgweb.py"]
769 if '=' not in query:
769 if '=' not in query:
770 sys.argv.append(query)
770 sys.argv.append(query)
771 self.send_response(200, "Script output follows")
771 self.send_response(200, "Script output follows")
772 hg.run()
772 hg.run()
773 finally:
773 finally:
774 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
774 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
775
775
776 hg = hgweb(path, name, templates)
776 hg = hgweb(path, name, templates)
777 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
777 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
778
778
779 def server(path, name, templates, address, port,
779 def server(path, name, templates, address, port,
780 accesslog = sys.stdout, errorlog = sys.stderr):
780 accesslog = sys.stdout, errorlog = sys.stderr):
781 httpd = create_server(path, name, templates, address, port,
781 httpd = create_server(path, name, templates, address, port,
782 accesslog, errorlog)
782 accesslog, errorlog)
783 httpd.serve_forever()
783 httpd.serve_forever()
General Comments 0
You need to be logged in to leave comments. Login now