##// END OF EJS Templates
hgweb: use ui:username rather than web:contact...
mpm@selenic.com -
r1260:4603eef6 default
parent child Browse files
Show More
@@ -1,986 +1,987 b''
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, sys
9 import os, cgi, sys
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer")
13 from node import *
13 from node import *
14
14
15 def templatepath():
15 def templatepath():
16 for f in "templates", "../templates":
16 for f in "templates", "../templates":
17 p = os.path.join(os.path.dirname(__file__), f)
17 p = os.path.join(os.path.dirname(__file__), f)
18 if os.path.isdir(p):
18 if os.path.isdir(p):
19 return p
19 return p
20
20
21 def age(t):
21 def age(t):
22 def plural(t, c):
22 def plural(t, c):
23 if c == 1:
23 if c == 1:
24 return t
24 return t
25 return t + "s"
25 return t + "s"
26 def fmt(t, c):
26 def fmt(t, c):
27 return "%d %s" % (c, plural(t, c))
27 return "%d %s" % (c, plural(t, c))
28
28
29 now = time.time()
29 now = time.time()
30 delta = max(1, int(now - t))
30 delta = max(1, int(now - t))
31
31
32 scales = [["second", 1],
32 scales = [["second", 1],
33 ["minute", 60],
33 ["minute", 60],
34 ["hour", 3600],
34 ["hour", 3600],
35 ["day", 3600 * 24],
35 ["day", 3600 * 24],
36 ["week", 3600 * 24 * 7],
36 ["week", 3600 * 24 * 7],
37 ["month", 3600 * 24 * 30],
37 ["month", 3600 * 24 * 30],
38 ["year", 3600 * 24 * 365]]
38 ["year", 3600 * 24 * 365]]
39
39
40 scales.reverse()
40 scales.reverse()
41
41
42 for t, s in scales:
42 for t, s in scales:
43 n = delta / s
43 n = delta / s
44 if n >= 2 or s == 1:
44 if n >= 2 or s == 1:
45 return fmt(t, n)
45 return fmt(t, n)
46
46
47 def nl2br(text):
47 def nl2br(text):
48 return text.replace('\n', '<br/>\n')
48 return text.replace('\n', '<br/>\n')
49
49
50 def obfuscate(text):
50 def obfuscate(text):
51 return ''.join(['&#%d;' % ord(c) for c in text])
51 return ''.join(['&#%d;' % ord(c) for c in text])
52
52
53 def up(p):
53 def up(p):
54 if p[0] != "/":
54 if p[0] != "/":
55 p = "/" + p
55 p = "/" + p
56 if p[-1] == "/":
56 if p[-1] == "/":
57 p = p[:-1]
57 p = p[:-1]
58 up = os.path.dirname(p)
58 up = os.path.dirname(p)
59 if up == "/":
59 if up == "/":
60 return "/"
60 return "/"
61 return up + "/"
61 return up + "/"
62
62
63 class hgrequest:
63 class hgrequest:
64 def __init__(self, inp=None, out=None, env=None):
64 def __init__(self, inp=None, out=None, env=None):
65 self.inp = inp or sys.stdin
65 self.inp = inp or sys.stdin
66 self.out = out or sys.stdout
66 self.out = out or sys.stdout
67 self.env = env or os.environ
67 self.env = env or os.environ
68 self.form = cgi.parse(self.inp, self.env)
68 self.form = cgi.parse(self.inp, self.env)
69
69
70 def write(self, *things):
70 def write(self, *things):
71 for thing in things:
71 for thing in things:
72 if hasattr(thing, "__iter__"):
72 if hasattr(thing, "__iter__"):
73 for part in thing:
73 for part in thing:
74 self.write(part)
74 self.write(part)
75 else:
75 else:
76 try:
76 try:
77 self.out.write(str(thing))
77 self.out.write(str(thing))
78 except socket.error, inst:
78 except socket.error, inst:
79 if inst[0] != errno.ECONNRESET:
79 if inst[0] != errno.ECONNRESET:
80 raise
80 raise
81
81
82 def header(self, headers=[('Content-type','text/html')]):
82 def header(self, headers=[('Content-type','text/html')]):
83 for header in headers:
83 for header in headers:
84 self.out.write("%s: %s\r\n" % header)
84 self.out.write("%s: %s\r\n" % header)
85 self.out.write("\r\n")
85 self.out.write("\r\n")
86
86
87 def httphdr(self, type, file="", size=0):
87 def httphdr(self, type, file="", size=0):
88
88
89 headers = [('Content-type', type)]
89 headers = [('Content-type', type)]
90 if file:
90 if file:
91 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
91 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
92 if size > 0:
92 if size > 0:
93 headers.append(('Content-length', str(size)))
93 headers.append(('Content-length', str(size)))
94 self.header(headers)
94 self.header(headers)
95
95
96 class templater:
96 class templater:
97 def __init__(self, mapfile, filters={}, defaults={}):
97 def __init__(self, mapfile, filters={}, defaults={}):
98 self.cache = {}
98 self.cache = {}
99 self.map = {}
99 self.map = {}
100 self.base = os.path.dirname(mapfile)
100 self.base = os.path.dirname(mapfile)
101 self.filters = filters
101 self.filters = filters
102 self.defaults = defaults
102 self.defaults = defaults
103
103
104 for l in file(mapfile):
104 for l in file(mapfile):
105 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
105 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
106 if m:
106 if m:
107 self.cache[m.group(1)] = m.group(2)
107 self.cache[m.group(1)] = m.group(2)
108 else:
108 else:
109 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
109 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
110 if m:
110 if m:
111 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
111 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
112 else:
112 else:
113 raise LookupError("unknown map entry '%s'" % l)
113 raise LookupError("unknown map entry '%s'" % l)
114
114
115 def __call__(self, t, **map):
115 def __call__(self, t, **map):
116 m = self.defaults.copy()
116 m = self.defaults.copy()
117 m.update(map)
117 m.update(map)
118 try:
118 try:
119 tmpl = self.cache[t]
119 tmpl = self.cache[t]
120 except KeyError:
120 except KeyError:
121 tmpl = self.cache[t] = file(self.map[t]).read()
121 tmpl = self.cache[t] = file(self.map[t]).read()
122 return self.template(tmpl, self.filters, **m)
122 return self.template(tmpl, self.filters, **m)
123
123
124 def template(self, tmpl, filters={}, **map):
124 def template(self, tmpl, filters={}, **map):
125 while tmpl:
125 while tmpl:
126 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
126 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
127 if m:
127 if m:
128 yield tmpl[:m.start(0)]
128 yield tmpl[:m.start(0)]
129 v = map.get(m.group(1), "")
129 v = map.get(m.group(1), "")
130 v = callable(v) and v(**map) or v
130 v = callable(v) and v(**map) or v
131
131
132 format = m.group(2)
132 format = m.group(2)
133 fl = m.group(4)
133 fl = m.group(4)
134
134
135 if format:
135 if format:
136 q = v.__iter__
136 q = v.__iter__
137 for i in q():
137 for i in q():
138 lm = map.copy()
138 lm = map.copy()
139 lm.update(i)
139 lm.update(i)
140 yield self(format[1:], **lm)
140 yield self(format[1:], **lm)
141
141
142 v = ""
142 v = ""
143
143
144 elif fl:
144 elif fl:
145 for f in fl.split("|")[1:]:
145 for f in fl.split("|")[1:]:
146 v = filters[f](v)
146 v = filters[f](v)
147
147
148 yield v
148 yield v
149 tmpl = tmpl[m.end(0):]
149 tmpl = tmpl[m.end(0):]
150 else:
150 else:
151 yield tmpl
151 yield tmpl
152 return
152 return
153
153
154 def rfc822date(x):
154 def rfc822date(x):
155 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
155 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(x))
156
156
157 common_filters = {
157 common_filters = {
158 "escape": cgi.escape,
158 "escape": cgi.escape,
159 "age": age,
159 "age": age,
160 "date": (lambda x: time.asctime(time.gmtime(x))),
160 "date": (lambda x: time.asctime(time.gmtime(x))),
161 "addbreaks": nl2br,
161 "addbreaks": nl2br,
162 "obfuscate": obfuscate,
162 "obfuscate": obfuscate,
163 "short": (lambda x: x[:12]),
163 "short": (lambda x: x[:12]),
164 "firstline": (lambda x: x.splitlines(1)[0]),
164 "firstline": (lambda x: x.splitlines(1)[0]),
165 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
165 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
166 "rfc822date": rfc822date,
166 "rfc822date": rfc822date,
167 }
167 }
168
168
169
169
170
170
171 class hgweb:
171 class hgweb:
172 def __init__(self, repo, name=None):
172 def __init__(self, repo, name=None):
173 if type(repo) == type(""):
173 if type(repo) == type(""):
174 self.repo = hg.repository(ui.ui(), repo)
174 self.repo = hg.repository(ui.ui(), repo)
175 else:
175 else:
176 self.repo = repo
176 self.repo = repo
177
177
178 self.mtime = -1
178 self.mtime = -1
179 self.reponame = name
179 self.reponame = name
180 self.archives = 'zip', 'gz', 'bz2'
180 self.archives = 'zip', 'gz', 'bz2'
181
181
182 def refresh(self):
182 def refresh(self):
183 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
183 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
184 if s.st_mtime != self.mtime:
184 if s.st_mtime != self.mtime:
185 self.mtime = s.st_mtime
185 self.mtime = s.st_mtime
186 self.repo = hg.repository(self.repo.ui, self.repo.root)
186 self.repo = hg.repository(self.repo.ui, self.repo.root)
187 self.maxchanges = self.repo.ui.config("web", "maxchanges", 10)
187 self.maxchanges = self.repo.ui.config("web", "maxchanges", 10)
188 self.maxfiles = self.repo.ui.config("web", "maxchanges", 10)
188 self.maxfiles = self.repo.ui.config("web", "maxchanges", 10)
189 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
189 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
190
190
191 def date(self, cs):
191 def date(self, cs):
192 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
192 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
193
193
194 def listfiles(self, files, mf):
194 def listfiles(self, files, mf):
195 for f in files[:self.maxfiles]:
195 for f in files[:self.maxfiles]:
196 yield self.t("filenodelink", node=hex(mf[f]), file=f)
196 yield self.t("filenodelink", node=hex(mf[f]), file=f)
197 if len(files) > self.maxfiles:
197 if len(files) > self.maxfiles:
198 yield self.t("fileellipses")
198 yield self.t("fileellipses")
199
199
200 def listfilediffs(self, files, changeset):
200 def listfilediffs(self, files, changeset):
201 for f in files[:self.maxfiles]:
201 for f in files[:self.maxfiles]:
202 yield self.t("filedifflink", node=hex(changeset), file=f)
202 yield self.t("filedifflink", node=hex(changeset), file=f)
203 if len(files) > self.maxfiles:
203 if len(files) > self.maxfiles:
204 yield self.t("fileellipses")
204 yield self.t("fileellipses")
205
205
206 def parents(self, t1, nodes=[], rev=None,**args):
206 def parents(self, t1, nodes=[], rev=None,**args):
207 if not rev:
207 if not rev:
208 rev = lambda x: ""
208 rev = lambda x: ""
209 for node in nodes:
209 for node in nodes:
210 if node != nullid:
210 if node != nullid:
211 yield self.t(t1, node=hex(node), rev=rev(node), **args)
211 yield self.t(t1, node=hex(node), rev=rev(node), **args)
212
212
213 def showtag(self, t1, node=nullid, **args):
213 def showtag(self, t1, node=nullid, **args):
214 for t in self.repo.nodetags(node):
214 for t in self.repo.nodetags(node):
215 yield self.t(t1, tag=t, **args)
215 yield self.t(t1, tag=t, **args)
216
216
217 def diff(self, node1, node2, files):
217 def diff(self, node1, node2, files):
218 def filterfiles(list, files):
218 def filterfiles(list, files):
219 l = [x for x in list if x in files]
219 l = [x for x in list if x in files]
220
220
221 for f in files:
221 for f in files:
222 if f[-1] != os.sep:
222 if f[-1] != os.sep:
223 f += os.sep
223 f += os.sep
224 l += [x for x in list if x.startswith(f)]
224 l += [x for x in list if x.startswith(f)]
225 return l
225 return l
226
226
227 parity = [0]
227 parity = [0]
228 def diffblock(diff, f, fn):
228 def diffblock(diff, f, fn):
229 yield self.t("diffblock",
229 yield self.t("diffblock",
230 lines=prettyprintlines(diff),
230 lines=prettyprintlines(diff),
231 parity=parity[0],
231 parity=parity[0],
232 file=f,
232 file=f,
233 filenode=hex(fn or nullid))
233 filenode=hex(fn or nullid))
234 parity[0] = 1 - parity[0]
234 parity[0] = 1 - parity[0]
235
235
236 def prettyprintlines(diff):
236 def prettyprintlines(diff):
237 for l in diff.splitlines(1):
237 for l in diff.splitlines(1):
238 if l.startswith('+'):
238 if l.startswith('+'):
239 yield self.t("difflineplus", line=l)
239 yield self.t("difflineplus", line=l)
240 elif l.startswith('-'):
240 elif l.startswith('-'):
241 yield self.t("difflineminus", line=l)
241 yield self.t("difflineminus", line=l)
242 elif l.startswith('@'):
242 elif l.startswith('@'):
243 yield self.t("difflineat", line=l)
243 yield self.t("difflineat", line=l)
244 else:
244 else:
245 yield self.t("diffline", line=l)
245 yield self.t("diffline", line=l)
246
246
247 r = self.repo
247 r = self.repo
248 cl = r.changelog
248 cl = r.changelog
249 mf = r.manifest
249 mf = r.manifest
250 change1 = cl.read(node1)
250 change1 = cl.read(node1)
251 change2 = cl.read(node2)
251 change2 = cl.read(node2)
252 mmap1 = mf.read(change1[0])
252 mmap1 = mf.read(change1[0])
253 mmap2 = mf.read(change2[0])
253 mmap2 = mf.read(change2[0])
254 date1 = self.date(change1)
254 date1 = self.date(change1)
255 date2 = self.date(change2)
255 date2 = self.date(change2)
256
256
257 c, a, d, u = r.changes(node1, node2)
257 c, a, d, u = r.changes(node1, node2)
258 if files:
258 if files:
259 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
259 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
260
260
261 for f in c:
261 for f in c:
262 to = r.file(f).read(mmap1[f])
262 to = r.file(f).read(mmap1[f])
263 tn = r.file(f).read(mmap2[f])
263 tn = r.file(f).read(mmap2[f])
264 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
264 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
265 for f in a:
265 for f in a:
266 to = None
266 to = None
267 tn = r.file(f).read(mmap2[f])
267 tn = r.file(f).read(mmap2[f])
268 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
268 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
269 for f in d:
269 for f in d:
270 to = r.file(f).read(mmap1[f])
270 to = r.file(f).read(mmap1[f])
271 tn = None
271 tn = None
272 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
272 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
273
273
274 def changelog(self, pos):
274 def changelog(self, pos):
275 def changenav(**map):
275 def changenav(**map):
276 def seq(factor=1):
276 def seq(factor=1):
277 yield 1 * factor
277 yield 1 * factor
278 yield 3 * factor
278 yield 3 * factor
279 #yield 5 * factor
279 #yield 5 * factor
280 for f in seq(factor * 10):
280 for f in seq(factor * 10):
281 yield f
281 yield f
282
282
283 l = []
283 l = []
284 for f in seq():
284 for f in seq():
285 if f < self.maxchanges / 2:
285 if f < self.maxchanges / 2:
286 continue
286 continue
287 if f > count:
287 if f > count:
288 break
288 break
289 r = "%d" % f
289 r = "%d" % f
290 if pos + f < count:
290 if pos + f < count:
291 l.append(("+" + r, pos + f))
291 l.append(("+" + r, pos + f))
292 if pos - f >= 0:
292 if pos - f >= 0:
293 l.insert(0, ("-" + r, pos - f))
293 l.insert(0, ("-" + r, pos - f))
294
294
295 yield {"rev": 0, "label": "(0)"}
295 yield {"rev": 0, "label": "(0)"}
296
296
297 for label, rev in l:
297 for label, rev in l:
298 yield {"label": label, "rev": rev}
298 yield {"label": label, "rev": rev}
299
299
300 yield {"label": "tip", "rev": ""}
300 yield {"label": "tip", "rev": ""}
301
301
302 def changelist(**map):
302 def changelist(**map):
303 parity = (start - end) & 1
303 parity = (start - end) & 1
304 cl = self.repo.changelog
304 cl = self.repo.changelog
305 l = [] # build a list in forward order for efficiency
305 l = [] # build a list in forward order for efficiency
306 for i in range(start, end):
306 for i in range(start, end):
307 n = cl.node(i)
307 n = cl.node(i)
308 changes = cl.read(n)
308 changes = cl.read(n)
309 hn = hex(n)
309 hn = hex(n)
310 t = float(changes[2].split(' ')[0])
310 t = float(changes[2].split(' ')[0])
311
311
312 l.insert(0, {"parity": parity,
312 l.insert(0, {"parity": parity,
313 "author": changes[1],
313 "author": changes[1],
314 "parent": self.parents("changelogparent",
314 "parent": self.parents("changelogparent",
315 cl.parents(n), cl.rev),
315 cl.parents(n), cl.rev),
316 "changelogtag": self.showtag("changelogtag",n),
316 "changelogtag": self.showtag("changelogtag",n),
317 "manifest": hex(changes[0]),
317 "manifest": hex(changes[0]),
318 "desc": changes[4],
318 "desc": changes[4],
319 "date": t,
319 "date": t,
320 "files": self.listfilediffs(changes[3], n),
320 "files": self.listfilediffs(changes[3], n),
321 "rev": i,
321 "rev": i,
322 "node": hn})
322 "node": hn})
323 parity = 1 - parity
323 parity = 1 - parity
324
324
325 for e in l:
325 for e in l:
326 yield e
326 yield e
327
327
328 cl = self.repo.changelog
328 cl = self.repo.changelog
329 mf = cl.read(cl.tip())[0]
329 mf = cl.read(cl.tip())[0]
330 count = cl.count()
330 count = cl.count()
331 start = max(0, pos - self.maxchanges + 1)
331 start = max(0, pos - self.maxchanges + 1)
332 end = min(count, start + self.maxchanges)
332 end = min(count, start + self.maxchanges)
333 pos = end - 1
333 pos = end - 1
334
334
335 yield self.t('changelog',
335 yield self.t('changelog',
336 changenav=changenav,
336 changenav=changenav,
337 manifest=hex(mf),
337 manifest=hex(mf),
338 rev=pos, changesets=count, entries=changelist)
338 rev=pos, changesets=count, entries=changelist)
339
339
340 def search(self, query):
340 def search(self, query):
341
341
342 def changelist(**map):
342 def changelist(**map):
343 cl = self.repo.changelog
343 cl = self.repo.changelog
344 count = 0
344 count = 0
345 qw = query.lower().split()
345 qw = query.lower().split()
346
346
347 def revgen():
347 def revgen():
348 for i in range(cl.count() - 1, 0, -100):
348 for i in range(cl.count() - 1, 0, -100):
349 l = []
349 l = []
350 for j in range(max(0, i - 100), i):
350 for j in range(max(0, i - 100), i):
351 n = cl.node(j)
351 n = cl.node(j)
352 changes = cl.read(n)
352 changes = cl.read(n)
353 l.append((n, j, changes))
353 l.append((n, j, changes))
354 l.reverse()
354 l.reverse()
355 for e in l:
355 for e in l:
356 yield e
356 yield e
357
357
358 for n, i, changes in revgen():
358 for n, i, changes in revgen():
359 miss = 0
359 miss = 0
360 for q in qw:
360 for q in qw:
361 if not (q in changes[1].lower() or
361 if not (q in changes[1].lower() or
362 q in changes[4].lower() or
362 q in changes[4].lower() or
363 q in " ".join(changes[3][:20]).lower()):
363 q in " ".join(changes[3][:20]).lower()):
364 miss = 1
364 miss = 1
365 break
365 break
366 if miss:
366 if miss:
367 continue
367 continue
368
368
369 count += 1
369 count += 1
370 hn = hex(n)
370 hn = hex(n)
371 t = float(changes[2].split(' ')[0])
371 t = float(changes[2].split(' ')[0])
372
372
373 yield self.t('searchentry',
373 yield self.t('searchentry',
374 parity=count & 1,
374 parity=count & 1,
375 author=changes[1],
375 author=changes[1],
376 parent=self.parents("changelogparent",
376 parent=self.parents("changelogparent",
377 cl.parents(n), cl.rev),
377 cl.parents(n), cl.rev),
378 changelogtag=self.showtag("changelogtag",n),
378 changelogtag=self.showtag("changelogtag",n),
379 manifest=hex(changes[0]),
379 manifest=hex(changes[0]),
380 desc=changes[4],
380 desc=changes[4],
381 date=t,
381 date=t,
382 files=self.listfilediffs(changes[3], n),
382 files=self.listfilediffs(changes[3], n),
383 rev=i,
383 rev=i,
384 node=hn)
384 node=hn)
385
385
386 if count >= self.maxchanges:
386 if count >= self.maxchanges:
387 break
387 break
388
388
389 cl = self.repo.changelog
389 cl = self.repo.changelog
390 mf = cl.read(cl.tip())[0]
390 mf = cl.read(cl.tip())[0]
391
391
392 yield self.t('search',
392 yield self.t('search',
393 query=query,
393 query=query,
394 manifest=hex(mf),
394 manifest=hex(mf),
395 entries=changelist)
395 entries=changelist)
396
396
397 def changeset(self, nodeid):
397 def changeset(self, nodeid):
398 n = bin(nodeid)
398 n = bin(nodeid)
399 cl = self.repo.changelog
399 cl = self.repo.changelog
400 changes = cl.read(n)
400 changes = cl.read(n)
401 p1 = cl.parents(n)[0]
401 p1 = cl.parents(n)[0]
402 t = float(changes[2].split(' ')[0])
402 t = float(changes[2].split(' ')[0])
403
403
404 files = []
404 files = []
405 mf = self.repo.manifest.read(changes[0])
405 mf = self.repo.manifest.read(changes[0])
406 for f in changes[3]:
406 for f in changes[3]:
407 files.append(self.t("filenodelink",
407 files.append(self.t("filenodelink",
408 filenode=hex(mf.get(f, nullid)), file=f))
408 filenode=hex(mf.get(f, nullid)), file=f))
409
409
410 def diff(**map):
410 def diff(**map):
411 yield self.diff(p1, n, None)
411 yield self.diff(p1, n, None)
412
412
413 def archivelist():
413 def archivelist():
414 for i in self.archives:
414 for i in self.archives:
415 if self.repo.ui.configbool("web", "allow" + i, False):
415 if self.repo.ui.configbool("web", "allow" + i, False):
416 yield {"type" : i, "node" : nodeid}
416 yield {"type" : i, "node" : nodeid}
417
417
418 yield self.t('changeset',
418 yield self.t('changeset',
419 diff=diff,
419 diff=diff,
420 rev=cl.rev(n),
420 rev=cl.rev(n),
421 node=nodeid,
421 node=nodeid,
422 parent=self.parents("changesetparent",
422 parent=self.parents("changesetparent",
423 cl.parents(n), cl.rev),
423 cl.parents(n), cl.rev),
424 changesettag=self.showtag("changesettag",n),
424 changesettag=self.showtag("changesettag",n),
425 manifest=hex(changes[0]),
425 manifest=hex(changes[0]),
426 author=changes[1],
426 author=changes[1],
427 desc=changes[4],
427 desc=changes[4],
428 date=t,
428 date=t,
429 files=files,
429 files=files,
430 archives=archivelist())
430 archives=archivelist())
431
431
432 def filelog(self, f, filenode):
432 def filelog(self, f, filenode):
433 cl = self.repo.changelog
433 cl = self.repo.changelog
434 fl = self.repo.file(f)
434 fl = self.repo.file(f)
435 count = fl.count()
435 count = fl.count()
436
436
437 def entries(**map):
437 def entries(**map):
438 l = []
438 l = []
439 parity = (count - 1) & 1
439 parity = (count - 1) & 1
440
440
441 for i in range(count):
441 for i in range(count):
442 n = fl.node(i)
442 n = fl.node(i)
443 lr = fl.linkrev(n)
443 lr = fl.linkrev(n)
444 cn = cl.node(lr)
444 cn = cl.node(lr)
445 cs = cl.read(cl.node(lr))
445 cs = cl.read(cl.node(lr))
446 t = float(cs[2].split(' ')[0])
446 t = float(cs[2].split(' ')[0])
447
447
448 l.insert(0, {"parity": parity,
448 l.insert(0, {"parity": parity,
449 "filenode": hex(n),
449 "filenode": hex(n),
450 "filerev": i,
450 "filerev": i,
451 "file": f,
451 "file": f,
452 "node": hex(cn),
452 "node": hex(cn),
453 "author": cs[1],
453 "author": cs[1],
454 "date": t,
454 "date": t,
455 "parent": self.parents("filelogparent",
455 "parent": self.parents("filelogparent",
456 fl.parents(n),
456 fl.parents(n),
457 fl.rev, file=f),
457 fl.rev, file=f),
458 "desc": cs[4]})
458 "desc": cs[4]})
459 parity = 1 - parity
459 parity = 1 - parity
460
460
461 for e in l:
461 for e in l:
462 yield e
462 yield e
463
463
464 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
464 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
465
465
466 def filerevision(self, f, node):
466 def filerevision(self, f, node):
467 fl = self.repo.file(f)
467 fl = self.repo.file(f)
468 n = bin(node)
468 n = bin(node)
469 text = fl.read(n)
469 text = fl.read(n)
470 changerev = fl.linkrev(n)
470 changerev = fl.linkrev(n)
471 cl = self.repo.changelog
471 cl = self.repo.changelog
472 cn = cl.node(changerev)
472 cn = cl.node(changerev)
473 cs = cl.read(cn)
473 cs = cl.read(cn)
474 t = float(cs[2].split(' ')[0])
474 t = float(cs[2].split(' ')[0])
475 mfn = cs[0]
475 mfn = cs[0]
476
476
477 def lines():
477 def lines():
478 for l, t in enumerate(text.splitlines(1)):
478 for l, t in enumerate(text.splitlines(1)):
479 yield {"line": t,
479 yield {"line": t,
480 "linenumber": "% 6d" % (l + 1),
480 "linenumber": "% 6d" % (l + 1),
481 "parity": l & 1}
481 "parity": l & 1}
482
482
483 yield self.t("filerevision",
483 yield self.t("filerevision",
484 file=f,
484 file=f,
485 filenode=node,
485 filenode=node,
486 path=up(f),
486 path=up(f),
487 text=lines(),
487 text=lines(),
488 rev=changerev,
488 rev=changerev,
489 node=hex(cn),
489 node=hex(cn),
490 manifest=hex(mfn),
490 manifest=hex(mfn),
491 author=cs[1],
491 author=cs[1],
492 date=t,
492 date=t,
493 parent=self.parents("filerevparent",
493 parent=self.parents("filerevparent",
494 fl.parents(n), fl.rev, file=f),
494 fl.parents(n), fl.rev, file=f),
495 permissions=self.repo.manifest.readflags(mfn)[f])
495 permissions=self.repo.manifest.readflags(mfn)[f])
496
496
497 def fileannotate(self, f, node):
497 def fileannotate(self, f, node):
498 bcache = {}
498 bcache = {}
499 ncache = {}
499 ncache = {}
500 fl = self.repo.file(f)
500 fl = self.repo.file(f)
501 n = bin(node)
501 n = bin(node)
502 changerev = fl.linkrev(n)
502 changerev = fl.linkrev(n)
503
503
504 cl = self.repo.changelog
504 cl = self.repo.changelog
505 cn = cl.node(changerev)
505 cn = cl.node(changerev)
506 cs = cl.read(cn)
506 cs = cl.read(cn)
507 t = float(cs[2].split(' ')[0])
507 t = float(cs[2].split(' ')[0])
508 mfn = cs[0]
508 mfn = cs[0]
509
509
510 def annotate(**map):
510 def annotate(**map):
511 parity = 1
511 parity = 1
512 last = None
512 last = None
513 for r, l in fl.annotate(n):
513 for r, l in fl.annotate(n):
514 try:
514 try:
515 cnode = ncache[r]
515 cnode = ncache[r]
516 except KeyError:
516 except KeyError:
517 cnode = ncache[r] = self.repo.changelog.node(r)
517 cnode = ncache[r] = self.repo.changelog.node(r)
518
518
519 try:
519 try:
520 name = bcache[r]
520 name = bcache[r]
521 except KeyError:
521 except KeyError:
522 cl = self.repo.changelog.read(cnode)
522 cl = self.repo.changelog.read(cnode)
523 bcache[r] = name = self.repo.ui.shortuser(cl[1])
523 bcache[r] = name = self.repo.ui.shortuser(cl[1])
524
524
525 if last != cnode:
525 if last != cnode:
526 parity = 1 - parity
526 parity = 1 - parity
527 last = cnode
527 last = cnode
528
528
529 yield {"parity": parity,
529 yield {"parity": parity,
530 "node": hex(cnode),
530 "node": hex(cnode),
531 "rev": r,
531 "rev": r,
532 "author": name,
532 "author": name,
533 "file": f,
533 "file": f,
534 "line": l}
534 "line": l}
535
535
536 yield self.t("fileannotate",
536 yield self.t("fileannotate",
537 file=f,
537 file=f,
538 filenode=node,
538 filenode=node,
539 annotate=annotate,
539 annotate=annotate,
540 path=up(f),
540 path=up(f),
541 rev=changerev,
541 rev=changerev,
542 node=hex(cn),
542 node=hex(cn),
543 manifest=hex(mfn),
543 manifest=hex(mfn),
544 author=cs[1],
544 author=cs[1],
545 date=t,
545 date=t,
546 parent=self.parents("fileannotateparent",
546 parent=self.parents("fileannotateparent",
547 fl.parents(n), fl.rev, file=f),
547 fl.parents(n), fl.rev, file=f),
548 permissions=self.repo.manifest.readflags(mfn)[f])
548 permissions=self.repo.manifest.readflags(mfn)[f])
549
549
550 def manifest(self, mnode, path):
550 def manifest(self, mnode, path):
551 mf = self.repo.manifest.read(bin(mnode))
551 mf = self.repo.manifest.read(bin(mnode))
552 rev = self.repo.manifest.rev(bin(mnode))
552 rev = self.repo.manifest.rev(bin(mnode))
553 node = self.repo.changelog.node(rev)
553 node = self.repo.changelog.node(rev)
554 mff=self.repo.manifest.readflags(bin(mnode))
554 mff=self.repo.manifest.readflags(bin(mnode))
555
555
556 files = {}
556 files = {}
557
557
558 p = path[1:]
558 p = path[1:]
559 l = len(p)
559 l = len(p)
560
560
561 for f,n in mf.items():
561 for f,n in mf.items():
562 if f[:l] != p:
562 if f[:l] != p:
563 continue
563 continue
564 remain = f[l:]
564 remain = f[l:]
565 if "/" in remain:
565 if "/" in remain:
566 short = remain[:remain.find("/") + 1] # bleah
566 short = remain[:remain.find("/") + 1] # bleah
567 files[short] = (f, None)
567 files[short] = (f, None)
568 else:
568 else:
569 short = os.path.basename(remain)
569 short = os.path.basename(remain)
570 files[short] = (f, n)
570 files[short] = (f, n)
571
571
572 def filelist(**map):
572 def filelist(**map):
573 parity = 0
573 parity = 0
574 fl = files.keys()
574 fl = files.keys()
575 fl.sort()
575 fl.sort()
576 for f in fl:
576 for f in fl:
577 full, fnode = files[f]
577 full, fnode = files[f]
578 if not fnode:
578 if not fnode:
579 continue
579 continue
580
580
581 yield {"file": full,
581 yield {"file": full,
582 "manifest": mnode,
582 "manifest": mnode,
583 "filenode": hex(fnode),
583 "filenode": hex(fnode),
584 "parity": parity,
584 "parity": parity,
585 "basename": f,
585 "basename": f,
586 "permissions": mff[full]}
586 "permissions": mff[full]}
587 parity = 1 - parity
587 parity = 1 - parity
588
588
589 def dirlist(**map):
589 def dirlist(**map):
590 parity = 0
590 parity = 0
591 fl = files.keys()
591 fl = files.keys()
592 fl.sort()
592 fl.sort()
593 for f in fl:
593 for f in fl:
594 full, fnode = files[f]
594 full, fnode = files[f]
595 if fnode:
595 if fnode:
596 continue
596 continue
597
597
598 yield {"parity": parity,
598 yield {"parity": parity,
599 "path": os.path.join(path, f),
599 "path": os.path.join(path, f),
600 "manifest": mnode,
600 "manifest": mnode,
601 "basename": f[:-1]}
601 "basename": f[:-1]}
602 parity = 1 - parity
602 parity = 1 - parity
603
603
604 yield self.t("manifest",
604 yield self.t("manifest",
605 manifest=mnode,
605 manifest=mnode,
606 rev=rev,
606 rev=rev,
607 node=hex(node),
607 node=hex(node),
608 path=path,
608 path=path,
609 up=up(path),
609 up=up(path),
610 fentries=filelist,
610 fentries=filelist,
611 dentries=dirlist)
611 dentries=dirlist)
612
612
613 def tags(self):
613 def tags(self):
614 cl = self.repo.changelog
614 cl = self.repo.changelog
615 mf = cl.read(cl.tip())[0]
615 mf = cl.read(cl.tip())[0]
616
616
617 i = self.repo.tagslist()
617 i = self.repo.tagslist()
618 i.reverse()
618 i.reverse()
619
619
620 def entries(**map):
620 def entries(**map):
621 parity = 0
621 parity = 0
622 for k,n in i:
622 for k,n in i:
623 yield {"parity": parity,
623 yield {"parity": parity,
624 "tag": k,
624 "tag": k,
625 "node": hex(n)}
625 "node": hex(n)}
626 parity = 1 - parity
626 parity = 1 - parity
627
627
628 yield self.t("tags",
628 yield self.t("tags",
629 manifest=hex(mf),
629 manifest=hex(mf),
630 entries=entries)
630 entries=entries)
631
631
632 def filediff(self, file, changeset):
632 def filediff(self, file, changeset):
633 n = bin(changeset)
633 n = bin(changeset)
634 cl = self.repo.changelog
634 cl = self.repo.changelog
635 p1 = cl.parents(n)[0]
635 p1 = cl.parents(n)[0]
636 cs = cl.read(n)
636 cs = cl.read(n)
637 mf = self.repo.manifest.read(cs[0])
637 mf = self.repo.manifest.read(cs[0])
638
638
639 def diff(**map):
639 def diff(**map):
640 yield self.diff(p1, n, file)
640 yield self.diff(p1, n, file)
641
641
642 yield self.t("filediff",
642 yield self.t("filediff",
643 file=file,
643 file=file,
644 filenode=hex(mf.get(file, nullid)),
644 filenode=hex(mf.get(file, nullid)),
645 node=changeset,
645 node=changeset,
646 rev=self.repo.changelog.rev(n),
646 rev=self.repo.changelog.rev(n),
647 parent=self.parents("filediffparent",
647 parent=self.parents("filediffparent",
648 cl.parents(n), cl.rev),
648 cl.parents(n), cl.rev),
649 diff=diff)
649 diff=diff)
650
650
651 def archive(self, req, cnode, type):
651 def archive(self, req, cnode, type):
652 cs = self.repo.changelog.read(cnode)
652 cs = self.repo.changelog.read(cnode)
653 mnode = cs[0]
653 mnode = cs[0]
654 mf = self.repo.manifest.read(mnode)
654 mf = self.repo.manifest.read(mnode)
655 rev = self.repo.manifest.rev(mnode)
655 rev = self.repo.manifest.rev(mnode)
656 reponame = re.sub(r"\W+", "-", self.reponame)
656 reponame = re.sub(r"\W+", "-", self.reponame)
657 name = "%s-%s/" % (reponame, short(cnode))
657 name = "%s-%s/" % (reponame, short(cnode))
658
658
659 files = mf.keys()
659 files = mf.keys()
660 files.sort()
660 files.sort()
661
661
662 if type == 'zip':
662 if type == 'zip':
663 tmp = tempfile.mkstemp()[1]
663 tmp = tempfile.mkstemp()[1]
664 try:
664 try:
665 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
665 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
666
666
667 for f in files:
667 for f in files:
668 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
668 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
669 zf.close()
669 zf.close()
670
670
671 f = open(tmp, 'r')
671 f = open(tmp, 'r')
672 req.httphdr('application/zip', name[:-1] + '.zip',
672 req.httphdr('application/zip', name[:-1] + '.zip',
673 os.path.getsize(tmp))
673 os.path.getsize(tmp))
674 req.write(f.read())
674 req.write(f.read())
675 f.close()
675 f.close()
676 finally:
676 finally:
677 os.unlink(tmp)
677 os.unlink(tmp)
678
678
679 else:
679 else:
680 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
680 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
681 mff = self.repo.manifest.readflags(mnode)
681 mff = self.repo.manifest.readflags(mnode)
682 mtime = int(time.time())
682 mtime = int(time.time())
683
683
684 if type == "gz":
684 if type == "gz":
685 encoding = "gzip"
685 encoding = "gzip"
686 else:
686 else:
687 encoding = "x-bzip2"
687 encoding = "x-bzip2"
688 req.header([('Content-type', 'application/x-tar'),
688 req.header([('Content-type', 'application/x-tar'),
689 ('Content-disposition', 'attachment; filename=%s%s%s' %
689 ('Content-disposition', 'attachment; filename=%s%s%s' %
690 (name[:-1], '.tar.', type)),
690 (name[:-1], '.tar.', type)),
691 ('Content-encoding', encoding)])
691 ('Content-encoding', encoding)])
692 for fname in files:
692 for fname in files:
693 rcont = self.repo.file(fname).read(mf[fname])
693 rcont = self.repo.file(fname).read(mf[fname])
694 finfo = tarfile.TarInfo(name + fname)
694 finfo = tarfile.TarInfo(name + fname)
695 finfo.mtime = mtime
695 finfo.mtime = mtime
696 finfo.size = len(rcont)
696 finfo.size = len(rcont)
697 finfo.mode = mff[fname] and 0755 or 0644
697 finfo.mode = mff[fname] and 0755 or 0644
698 tf.addfile(finfo, StringIO.StringIO(rcont))
698 tf.addfile(finfo, StringIO.StringIO(rcont))
699 tf.close()
699 tf.close()
700
700
701 # add tags to things
701 # add tags to things
702 # tags -> list of changesets corresponding to tags
702 # tags -> list of changesets corresponding to tags
703 # find tag, changeset, file
703 # find tag, changeset, file
704
704
705 def run(self, req=hgrequest()):
705 def run(self, req=hgrequest()):
706 def header(**map):
706 def header(**map):
707 yield self.t("header", **map)
707 yield self.t("header", **map)
708
708
709 def footer(**map):
709 def footer(**map):
710 yield self.t("footer", **map)
710 yield self.t("footer", **map)
711
711
712 self.refresh()
712 self.refresh()
713
713
714 t = self.repo.ui.config("web", "templates", templatepath())
714 t = self.repo.ui.config("web", "templates", templatepath())
715 m = os.path.join(t, "map")
715 m = os.path.join(t, "map")
716 style = self.repo.ui.config("web", "style", "")
716 style = self.repo.ui.config("web", "style", "")
717 if req.form.has_key('style'):
717 if req.form.has_key('style'):
718 style = req.form['style'][0]
718 style = req.form['style'][0]
719 if style:
719 if style:
720 b = os.path.basename("map-" + style)
720 b = os.path.basename("map-" + style)
721 p = os.path.join(t, b)
721 p = os.path.join(t, b)
722 if os.path.isfile(p):
722 if os.path.isfile(p):
723 m = p
723 m = p
724
724
725 port = req.env["SERVER_PORT"]
725 port = req.env["SERVER_PORT"]
726 port = port != "80" and (":" + port) or ""
726 port = port != "80" and (":" + port) or ""
727 uri = req.env["REQUEST_URI"]
727 uri = req.env["REQUEST_URI"]
728 if "?" in uri:
728 if "?" in uri:
729 uri = uri.split("?")[0]
729 uri = uri.split("?")[0]
730 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
730 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
731 if not self.reponame:
731 if not self.reponame:
732 self.reponame = (self.repo.ui.config("web", "name")
732 self.reponame = (self.repo.ui.config("web", "name")
733 or uri.strip('/') or self.repo.root)
733 or uri.strip('/') or self.repo.root)
734
734
735 self.t = templater(m, common_filters,
735 self.t = templater(m, common_filters,
736 {"url": url,
736 {"url": url,
737 "repo": self.reponame,
737 "repo": self.reponame,
738 "header": header,
738 "header": header,
739 "footer": footer,
739 "footer": footer,
740 })
740 })
741
741
742 if not req.form.has_key('cmd'):
742 if not req.form.has_key('cmd'):
743 req.form['cmd'] = [self.t.cache['default'],]
743 req.form['cmd'] = [self.t.cache['default'],]
744
744
745 if req.form['cmd'][0] == 'changelog':
745 if req.form['cmd'][0] == 'changelog':
746 c = self.repo.changelog.count() - 1
746 c = self.repo.changelog.count() - 1
747 hi = c
747 hi = c
748 if req.form.has_key('rev'):
748 if req.form.has_key('rev'):
749 hi = req.form['rev'][0]
749 hi = req.form['rev'][0]
750 try:
750 try:
751 hi = self.repo.changelog.rev(self.repo.lookup(hi))
751 hi = self.repo.changelog.rev(self.repo.lookup(hi))
752 except hg.RepoError:
752 except hg.RepoError:
753 req.write(self.search(hi))
753 req.write(self.search(hi))
754 return
754 return
755
755
756 req.write(self.changelog(hi))
756 req.write(self.changelog(hi))
757
757
758 elif req.form['cmd'][0] == 'changeset':
758 elif req.form['cmd'][0] == 'changeset':
759 req.write(self.changeset(req.form['node'][0]))
759 req.write(self.changeset(req.form['node'][0]))
760
760
761 elif req.form['cmd'][0] == 'manifest':
761 elif req.form['cmd'][0] == 'manifest':
762 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
762 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
763
763
764 elif req.form['cmd'][0] == 'tags':
764 elif req.form['cmd'][0] == 'tags':
765 req.write(self.tags())
765 req.write(self.tags())
766
766
767 elif req.form['cmd'][0] == 'filediff':
767 elif req.form['cmd'][0] == 'filediff':
768 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
768 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
769
769
770 elif req.form['cmd'][0] == 'file':
770 elif req.form['cmd'][0] == 'file':
771 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
771 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
772
772
773 elif req.form['cmd'][0] == 'annotate':
773 elif req.form['cmd'][0] == 'annotate':
774 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
774 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
775
775
776 elif req.form['cmd'][0] == 'filelog':
776 elif req.form['cmd'][0] == 'filelog':
777 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
777 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
778
778
779 elif req.form['cmd'][0] == 'heads':
779 elif req.form['cmd'][0] == 'heads':
780 req.httphdr("application/mercurial-0.1")
780 req.httphdr("application/mercurial-0.1")
781 h = self.repo.heads()
781 h = self.repo.heads()
782 req.write(" ".join(map(hex, h)) + "\n")
782 req.write(" ".join(map(hex, h)) + "\n")
783
783
784 elif req.form['cmd'][0] == 'branches':
784 elif req.form['cmd'][0] == 'branches':
785 req.httphdr("application/mercurial-0.1")
785 req.httphdr("application/mercurial-0.1")
786 nodes = []
786 nodes = []
787 if req.form.has_key('nodes'):
787 if req.form.has_key('nodes'):
788 nodes = map(bin, req.form['nodes'][0].split(" "))
788 nodes = map(bin, req.form['nodes'][0].split(" "))
789 for b in self.repo.branches(nodes):
789 for b in self.repo.branches(nodes):
790 req.write(" ".join(map(hex, b)) + "\n")
790 req.write(" ".join(map(hex, b)) + "\n")
791
791
792 elif req.form['cmd'][0] == 'between':
792 elif req.form['cmd'][0] == 'between':
793 req.httphdr("application/mercurial-0.1")
793 req.httphdr("application/mercurial-0.1")
794 nodes = []
794 nodes = []
795 if req.form.has_key('pairs'):
795 if req.form.has_key('pairs'):
796 pairs = [map(bin, p.split("-"))
796 pairs = [map(bin, p.split("-"))
797 for p in req.form['pairs'][0].split(" ")]
797 for p in req.form['pairs'][0].split(" ")]
798 for b in self.repo.between(pairs):
798 for b in self.repo.between(pairs):
799 req.write(" ".join(map(hex, b)) + "\n")
799 req.write(" ".join(map(hex, b)) + "\n")
800
800
801 elif req.form['cmd'][0] == 'changegroup':
801 elif req.form['cmd'][0] == 'changegroup':
802 req.httphdr("application/mercurial-0.1")
802 req.httphdr("application/mercurial-0.1")
803 nodes = []
803 nodes = []
804 if not self.allowpull:
804 if not self.allowpull:
805 return
805 return
806
806
807 if req.form.has_key('roots'):
807 if req.form.has_key('roots'):
808 nodes = map(bin, req.form['roots'][0].split(" "))
808 nodes = map(bin, req.form['roots'][0].split(" "))
809
809
810 z = zlib.compressobj()
810 z = zlib.compressobj()
811 f = self.repo.changegroup(nodes)
811 f = self.repo.changegroup(nodes)
812 while 1:
812 while 1:
813 chunk = f.read(4096)
813 chunk = f.read(4096)
814 if not chunk:
814 if not chunk:
815 break
815 break
816 req.write(z.compress(chunk))
816 req.write(z.compress(chunk))
817
817
818 req.write(z.flush())
818 req.write(z.flush())
819
819
820 elif req.form['cmd'][0] == 'archive':
820 elif req.form['cmd'][0] == 'archive':
821 changeset = bin(req.form['node'][0])
821 changeset = bin(req.form['node'][0])
822 type = req.form['type'][0]
822 type = req.form['type'][0]
823 if (type in self.archives and
823 if (type in self.archives and
824 self.repo.ui.configbool("web", "allow" + type, False)):
824 self.repo.ui.configbool("web", "allow" + type, False)):
825 self.archive(req, changeset, type)
825 self.archive(req, changeset, type)
826 return
826 return
827
827
828 req.write(self.t("error"))
828 req.write(self.t("error"))
829
829
830 else:
830 else:
831 req.write(self.t("error"))
831 req.write(self.t("error"))
832
832
833 def create_server(repo):
833 def create_server(repo):
834
834
835 def openlog(opt, default):
835 def openlog(opt, default):
836 if opt and opt != '-':
836 if opt and opt != '-':
837 return open(opt, 'w')
837 return open(opt, 'w')
838 return default
838 return default
839
839
840 address = repo.ui.config("web", "address", "")
840 address = repo.ui.config("web", "address", "")
841 port = int(repo.ui.config("web", "port", 8000))
841 port = int(repo.ui.config("web", "port", 8000))
842 use_ipv6 = repo.ui.configbool("web", "ipv6")
842 use_ipv6 = repo.ui.configbool("web", "ipv6")
843 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
843 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
844 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
844 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
845
845
846 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
846 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
847 address_family = getattr(socket, 'AF_INET6', None)
847 address_family = getattr(socket, 'AF_INET6', None)
848
848
849 def __init__(self, *args, **kwargs):
849 def __init__(self, *args, **kwargs):
850 if self.address_family is None:
850 if self.address_family is None:
851 raise hg.RepoError('IPv6 not available on this system')
851 raise hg.RepoError('IPv6 not available on this system')
852 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
852 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
853
853
854 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
854 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
855 def log_error(self, format, *args):
855 def log_error(self, format, *args):
856 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
856 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
857 self.log_date_time_string(),
857 self.log_date_time_string(),
858 format % args))
858 format % args))
859
859
860 def log_message(self, format, *args):
860 def log_message(self, format, *args):
861 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
861 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
862 self.log_date_time_string(),
862 self.log_date_time_string(),
863 format % args))
863 format % args))
864
864
865 def do_POST(self):
865 def do_POST(self):
866 try:
866 try:
867 self.do_hgweb()
867 self.do_hgweb()
868 except socket.error, inst:
868 except socket.error, inst:
869 if inst[0] != errno.EPIPE:
869 if inst[0] != errno.EPIPE:
870 raise
870 raise
871
871
872 def do_GET(self):
872 def do_GET(self):
873 self.do_POST()
873 self.do_POST()
874
874
875 def do_hgweb(self):
875 def do_hgweb(self):
876 query = ""
876 query = ""
877 p = self.path.find("?")
877 p = self.path.find("?")
878 if p:
878 if p:
879 query = self.path[p + 1:]
879 query = self.path[p + 1:]
880 query = query.replace('+', ' ')
880 query = query.replace('+', ' ')
881
881
882 env = {}
882 env = {}
883 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
883 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
884 env['REQUEST_METHOD'] = self.command
884 env['REQUEST_METHOD'] = self.command
885 env['SERVER_NAME'] = self.server.server_name
885 env['SERVER_NAME'] = self.server.server_name
886 env['SERVER_PORT'] = str(self.server.server_port)
886 env['SERVER_PORT'] = str(self.server.server_port)
887 env['REQUEST_URI'] = "/"
887 env['REQUEST_URI'] = "/"
888 if query:
888 if query:
889 env['QUERY_STRING'] = query
889 env['QUERY_STRING'] = query
890 host = self.address_string()
890 host = self.address_string()
891 if host != self.client_address[0]:
891 if host != self.client_address[0]:
892 env['REMOTE_HOST'] = host
892 env['REMOTE_HOST'] = host
893 env['REMOTE_ADDR'] = self.client_address[0]
893 env['REMOTE_ADDR'] = self.client_address[0]
894
894
895 if self.headers.typeheader is None:
895 if self.headers.typeheader is None:
896 env['CONTENT_TYPE'] = self.headers.type
896 env['CONTENT_TYPE'] = self.headers.type
897 else:
897 else:
898 env['CONTENT_TYPE'] = self.headers.typeheader
898 env['CONTENT_TYPE'] = self.headers.typeheader
899 length = self.headers.getheader('content-length')
899 length = self.headers.getheader('content-length')
900 if length:
900 if length:
901 env['CONTENT_LENGTH'] = length
901 env['CONTENT_LENGTH'] = length
902 accept = []
902 accept = []
903 for line in self.headers.getallmatchingheaders('accept'):
903 for line in self.headers.getallmatchingheaders('accept'):
904 if line[:1] in "\t\n\r ":
904 if line[:1] in "\t\n\r ":
905 accept.append(line.strip())
905 accept.append(line.strip())
906 else:
906 else:
907 accept = accept + line[7:].split(',')
907 accept = accept + line[7:].split(',')
908 env['HTTP_ACCEPT'] = ','.join(accept)
908 env['HTTP_ACCEPT'] = ','.join(accept)
909
909
910 req = hgrequest(self.rfile, self.wfile, env)
910 req = hgrequest(self.rfile, self.wfile, env)
911 self.send_response(200, "Script output follows")
911 self.send_response(200, "Script output follows")
912 hg.run(req)
912 hg.run(req)
913
913
914 hg = hgweb(repo)
914 hg = hgweb(repo)
915 if use_ipv6:
915 if use_ipv6:
916 return IPv6HTTPServer((address, port), hgwebhandler)
916 return IPv6HTTPServer((address, port), hgwebhandler)
917 else:
917 else:
918 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
918 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
919
919
920 def server(path, name, templates, address, port, use_ipv6=False,
920 def server(path, name, templates, address, port, use_ipv6=False,
921 accesslog=sys.stdout, errorlog=sys.stderr):
921 accesslog=sys.stdout, errorlog=sys.stderr):
922 httpd = create_server(path, name, templates, address, port, use_ipv6,
922 httpd = create_server(path, name, templates, address, port, use_ipv6,
923 accesslog, errorlog)
923 accesslog, errorlog)
924 httpd.serve_forever()
924 httpd.serve_forever()
925
925
926 # This is a stopgap
926 # This is a stopgap
927 class hgwebdir:
927 class hgwebdir:
928 def __init__(self, config):
928 def __init__(self, config):
929 def cleannames(items):
929 def cleannames(items):
930 return [(name.strip('/'), path) for name, path in items]
930 return [(name.strip('/'), path) for name, path in items]
931
931
932 if type(config) == type([]):
932 if type(config) == type([]):
933 self.repos = cleannames(config)
933 self.repos = cleannames(config)
934 elif type(config) == type({}):
934 elif type(config) == type({}):
935 self.repos = cleannames(config.items())
935 self.repos = cleannames(config.items())
936 self.repos.sort()
936 self.repos.sort()
937 else:
937 else:
938 cp = ConfigParser.SafeConfigParser()
938 cp = ConfigParser.SafeConfigParser()
939 cp.read(config)
939 cp.read(config)
940 self.repos = cleannames(cp.items("paths"))
940 self.repos = cleannames(cp.items("paths"))
941 self.repos.sort()
941 self.repos.sort()
942
942
943 def run(self, req=hgrequest()):
943 def run(self, req=hgrequest()):
944 def header(**map):
944 def header(**map):
945 yield tmpl("header", **map)
945 yield tmpl("header", **map)
946
946
947 def footer(**map):
947 def footer(**map):
948 yield tmpl("footer", **map)
948 yield tmpl("footer", **map)
949
949
950 m = os.path.join(templatepath(), "map")
950 m = os.path.join(templatepath(), "map")
951 tmpl = templater(m, common_filters,
951 tmpl = templater(m, common_filters,
952 {"header": header, "footer": footer})
952 {"header": header, "footer": footer})
953
953
954 def entries(**map):
954 def entries(**map):
955 parity = 0
955 parity = 0
956 for name, path in self.repos:
956 for name, path in self.repos:
957 u = ui.ui()
957 u = ui.ui()
958 try:
958 try:
959 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
959 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
960 except IOError:
960 except IOError:
961 pass
961 pass
962 get = u.config
962 get = u.config
963
963
964 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
964 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
965 .replace("//", "/"))
965 .replace("//", "/"))
966
966
967 yield dict(contact=get("web", "contact") or
967 yield dict(contact=(get("ui", "username") or # preferred
968 get("web", "author", "unknown"),
968 get("web", "contact") or # deprecated
969 get("web", "author", "unknown")), # also
969 name=get("web", "name", name),
970 name=get("web", "name", name),
970 url=url,
971 url=url,
971 parity=parity,
972 parity=parity,
972 shortdesc=get("web", "description", "unknown"),
973 shortdesc=get("web", "description", "unknown"),
973 lastupdate=os.stat(os.path.join(path, ".hg",
974 lastupdate=os.stat(os.path.join(path, ".hg",
974 "00changelog.d")).st_mtime)
975 "00changelog.d")).st_mtime)
975
976
976 parity = 1 - parity
977 parity = 1 - parity
977
978
978 virtual = req.env.get("PATH_INFO", "").strip('/')
979 virtual = req.env.get("PATH_INFO", "").strip('/')
979 if virtual:
980 if virtual:
980 real = dict(self.repos).get(virtual)
981 real = dict(self.repos).get(virtual)
981 if real:
982 if real:
982 hgweb(real).run(req)
983 hgweb(real).run(req)
983 else:
984 else:
984 req.write(tmpl("notfound", repo=virtual))
985 req.write(tmpl("notfound", repo=virtual))
985 else:
986 else:
986 req.write(tmpl("index", entries=entries))
987 req.write(tmpl("index", entries=entries))
@@ -1,1424 +1,1421 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.manifest = manifest.manifest(self.opener)
32 self.manifest = manifest.manifest(self.opener)
33 self.changelog = changelog.changelog(self.opener)
33 self.changelog = changelog.changelog(self.opener)
34 self.tagscache = None
34 self.tagscache = None
35 self.nodetagscache = None
35 self.nodetagscache = None
36 self.encodepats = None
36 self.encodepats = None
37 self.decodepats = None
37 self.decodepats = None
38
38
39 if create:
39 if create:
40 os.mkdir(self.path)
40 os.mkdir(self.path)
41 os.mkdir(self.join("data"))
41 os.mkdir(self.join("data"))
42 f = self.opener("hgrc", "w")
43 f.write("[web]\n")
44 f.write("contact = %s\n" % ui.shortuser(ui.username()))
45
42
46 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
47 try:
44 try:
48 self.ui.readconfig(self.opener("hgrc"))
45 self.ui.readconfig(self.opener("hgrc"))
49 except IOError: pass
46 except IOError: pass
50
47
51 def hook(self, name, **args):
48 def hook(self, name, **args):
52 s = self.ui.config("hooks", name)
49 s = self.ui.config("hooks", name)
53 if s:
50 if s:
54 self.ui.note("running hook %s: %s\n" % (name, s))
51 self.ui.note("running hook %s: %s\n" % (name, s))
55 old = {}
52 old = {}
56 for k, v in args.items():
53 for k, v in args.items():
57 k = k.upper()
54 k = k.upper()
58 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
59 os.environ[k] = v
56 os.environ[k] = v
60
57
61 r = os.system(s)
58 r = os.system(s)
62
59
63 for k, v in old.items():
60 for k, v in old.items():
64 if v != None:
61 if v != None:
65 os.environ[k] = v
62 os.environ[k] = v
66 else:
63 else:
67 del os.environ[k]
64 del os.environ[k]
68
65
69 if r:
66 if r:
70 self.ui.warn("abort: %s hook failed with status %d!\n" %
67 self.ui.warn("abort: %s hook failed with status %d!\n" %
71 (name, r))
68 (name, r))
72 return False
69 return False
73 return True
70 return True
74
71
75 def tags(self):
72 def tags(self):
76 '''return a mapping of tag to node'''
73 '''return a mapping of tag to node'''
77 if not self.tagscache:
74 if not self.tagscache:
78 self.tagscache = {}
75 self.tagscache = {}
79 def addtag(self, k, n):
76 def addtag(self, k, n):
80 try:
77 try:
81 bin_n = bin(n)
78 bin_n = bin(n)
82 except TypeError:
79 except TypeError:
83 bin_n = ''
80 bin_n = ''
84 self.tagscache[k.strip()] = bin_n
81 self.tagscache[k.strip()] = bin_n
85
82
86 try:
83 try:
87 # read each head of the tags file, ending with the tip
84 # read each head of the tags file, ending with the tip
88 # and add each tag found to the map, with "newer" ones
85 # and add each tag found to the map, with "newer" ones
89 # taking precedence
86 # taking precedence
90 fl = self.file(".hgtags")
87 fl = self.file(".hgtags")
91 h = fl.heads()
88 h = fl.heads()
92 h.reverse()
89 h.reverse()
93 for r in h:
90 for r in h:
94 for l in fl.read(r).splitlines():
91 for l in fl.read(r).splitlines():
95 if l:
92 if l:
96 n, k = l.split(" ", 1)
93 n, k = l.split(" ", 1)
97 addtag(self, k, n)
94 addtag(self, k, n)
98 except KeyError:
95 except KeyError:
99 pass
96 pass
100
97
101 try:
98 try:
102 f = self.opener("localtags")
99 f = self.opener("localtags")
103 for l in f:
100 for l in f:
104 n, k = l.split(" ", 1)
101 n, k = l.split(" ", 1)
105 addtag(self, k, n)
102 addtag(self, k, n)
106 except IOError:
103 except IOError:
107 pass
104 pass
108
105
109 self.tagscache['tip'] = self.changelog.tip()
106 self.tagscache['tip'] = self.changelog.tip()
110
107
111 return self.tagscache
108 return self.tagscache
112
109
113 def tagslist(self):
110 def tagslist(self):
114 '''return a list of tags ordered by revision'''
111 '''return a list of tags ordered by revision'''
115 l = []
112 l = []
116 for t, n in self.tags().items():
113 for t, n in self.tags().items():
117 try:
114 try:
118 r = self.changelog.rev(n)
115 r = self.changelog.rev(n)
119 except:
116 except:
120 r = -2 # sort to the beginning of the list if unknown
117 r = -2 # sort to the beginning of the list if unknown
121 l.append((r,t,n))
118 l.append((r,t,n))
122 l.sort()
119 l.sort()
123 return [(t,n) for r,t,n in l]
120 return [(t,n) for r,t,n in l]
124
121
125 def nodetags(self, node):
122 def nodetags(self, node):
126 '''return the tags associated with a node'''
123 '''return the tags associated with a node'''
127 if not self.nodetagscache:
124 if not self.nodetagscache:
128 self.nodetagscache = {}
125 self.nodetagscache = {}
129 for t,n in self.tags().items():
126 for t,n in self.tags().items():
130 self.nodetagscache.setdefault(n,[]).append(t)
127 self.nodetagscache.setdefault(n,[]).append(t)
131 return self.nodetagscache.get(node, [])
128 return self.nodetagscache.get(node, [])
132
129
133 def lookup(self, key):
130 def lookup(self, key):
134 try:
131 try:
135 return self.tags()[key]
132 return self.tags()[key]
136 except KeyError:
133 except KeyError:
137 try:
134 try:
138 return self.changelog.lookup(key)
135 return self.changelog.lookup(key)
139 except:
136 except:
140 raise repo.RepoError("unknown revision '%s'" % key)
137 raise repo.RepoError("unknown revision '%s'" % key)
141
138
142 def dev(self):
139 def dev(self):
143 return os.stat(self.path).st_dev
140 return os.stat(self.path).st_dev
144
141
145 def local(self):
142 def local(self):
146 return True
143 return True
147
144
148 def join(self, f):
145 def join(self, f):
149 return os.path.join(self.path, f)
146 return os.path.join(self.path, f)
150
147
151 def wjoin(self, f):
148 def wjoin(self, f):
152 return os.path.join(self.root, f)
149 return os.path.join(self.root, f)
153
150
154 def file(self, f):
151 def file(self, f):
155 if f[0] == '/': f = f[1:]
152 if f[0] == '/': f = f[1:]
156 return filelog.filelog(self.opener, f)
153 return filelog.filelog(self.opener, f)
157
154
158 def getcwd(self):
155 def getcwd(self):
159 return self.dirstate.getcwd()
156 return self.dirstate.getcwd()
160
157
161 def wfile(self, f, mode='r'):
158 def wfile(self, f, mode='r'):
162 return self.wopener(f, mode)
159 return self.wopener(f, mode)
163
160
164 def wread(self, filename):
161 def wread(self, filename):
165 if self.encodepats == None:
162 if self.encodepats == None:
166 l = []
163 l = []
167 for pat, cmd in self.ui.configitems("encode"):
164 for pat, cmd in self.ui.configitems("encode"):
168 mf = util.matcher("", "/", [pat], [], [])[1]
165 mf = util.matcher("", "/", [pat], [], [])[1]
169 l.append((mf, cmd))
166 l.append((mf, cmd))
170 self.encodepats = l
167 self.encodepats = l
171
168
172 data = self.wopener(filename, 'r').read()
169 data = self.wopener(filename, 'r').read()
173
170
174 for mf, cmd in self.encodepats:
171 for mf, cmd in self.encodepats:
175 if mf(filename):
172 if mf(filename):
176 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
173 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
177 data = util.filter(data, cmd)
174 data = util.filter(data, cmd)
178 break
175 break
179
176
180 return data
177 return data
181
178
182 def wwrite(self, filename, data, fd=None):
179 def wwrite(self, filename, data, fd=None):
183 if self.decodepats == None:
180 if self.decodepats == None:
184 l = []
181 l = []
185 for pat, cmd in self.ui.configitems("decode"):
182 for pat, cmd in self.ui.configitems("decode"):
186 mf = util.matcher("", "/", [pat], [], [])[1]
183 mf = util.matcher("", "/", [pat], [], [])[1]
187 l.append((mf, cmd))
184 l.append((mf, cmd))
188 self.decodepats = l
185 self.decodepats = l
189
186
190 for mf, cmd in self.decodepats:
187 for mf, cmd in self.decodepats:
191 if mf(filename):
188 if mf(filename):
192 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
189 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
193 data = util.filter(data, cmd)
190 data = util.filter(data, cmd)
194 break
191 break
195
192
196 if fd:
193 if fd:
197 return fd.write(data)
194 return fd.write(data)
198 return self.wopener(filename, 'w').write(data)
195 return self.wopener(filename, 'w').write(data)
199
196
200 def transaction(self):
197 def transaction(self):
201 # save dirstate for undo
198 # save dirstate for undo
202 try:
199 try:
203 ds = self.opener("dirstate").read()
200 ds = self.opener("dirstate").read()
204 except IOError:
201 except IOError:
205 ds = ""
202 ds = ""
206 self.opener("journal.dirstate", "w").write(ds)
203 self.opener("journal.dirstate", "w").write(ds)
207
204
208 def after():
205 def after():
209 util.rename(self.join("journal"), self.join("undo"))
206 util.rename(self.join("journal"), self.join("undo"))
210 util.rename(self.join("journal.dirstate"),
207 util.rename(self.join("journal.dirstate"),
211 self.join("undo.dirstate"))
208 self.join("undo.dirstate"))
212
209
213 return transaction.transaction(self.ui.warn, self.opener,
210 return transaction.transaction(self.ui.warn, self.opener,
214 self.join("journal"), after)
211 self.join("journal"), after)
215
212
216 def recover(self):
213 def recover(self):
217 lock = self.lock()
214 lock = self.lock()
218 if os.path.exists(self.join("journal")):
215 if os.path.exists(self.join("journal")):
219 self.ui.status("rolling back interrupted transaction\n")
216 self.ui.status("rolling back interrupted transaction\n")
220 return transaction.rollback(self.opener, self.join("journal"))
217 return transaction.rollback(self.opener, self.join("journal"))
221 else:
218 else:
222 self.ui.warn("no interrupted transaction available\n")
219 self.ui.warn("no interrupted transaction available\n")
223
220
224 def undo(self):
221 def undo(self):
225 lock = self.lock()
222 lock = self.lock()
226 if os.path.exists(self.join("undo")):
223 if os.path.exists(self.join("undo")):
227 self.ui.status("rolling back last transaction\n")
224 self.ui.status("rolling back last transaction\n")
228 transaction.rollback(self.opener, self.join("undo"))
225 transaction.rollback(self.opener, self.join("undo"))
229 self.dirstate = None
226 self.dirstate = None
230 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
227 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
231 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
228 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
232 else:
229 else:
233 self.ui.warn("no undo information available\n")
230 self.ui.warn("no undo information available\n")
234
231
235 def lock(self, wait=1):
232 def lock(self, wait=1):
236 try:
233 try:
237 return lock.lock(self.join("lock"), 0)
234 return lock.lock(self.join("lock"), 0)
238 except lock.LockHeld, inst:
235 except lock.LockHeld, inst:
239 if wait:
236 if wait:
240 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
237 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
241 return lock.lock(self.join("lock"), wait)
238 return lock.lock(self.join("lock"), wait)
242 raise inst
239 raise inst
243
240
244 def rawcommit(self, files, text, user, date, p1=None, p2=None):
241 def rawcommit(self, files, text, user, date, p1=None, p2=None):
245 orig_parent = self.dirstate.parents()[0] or nullid
242 orig_parent = self.dirstate.parents()[0] or nullid
246 p1 = p1 or self.dirstate.parents()[0] or nullid
243 p1 = p1 or self.dirstate.parents()[0] or nullid
247 p2 = p2 or self.dirstate.parents()[1] or nullid
244 p2 = p2 or self.dirstate.parents()[1] or nullid
248 c1 = self.changelog.read(p1)
245 c1 = self.changelog.read(p1)
249 c2 = self.changelog.read(p2)
246 c2 = self.changelog.read(p2)
250 m1 = self.manifest.read(c1[0])
247 m1 = self.manifest.read(c1[0])
251 mf1 = self.manifest.readflags(c1[0])
248 mf1 = self.manifest.readflags(c1[0])
252 m2 = self.manifest.read(c2[0])
249 m2 = self.manifest.read(c2[0])
253 changed = []
250 changed = []
254
251
255 if orig_parent == p1:
252 if orig_parent == p1:
256 update_dirstate = 1
253 update_dirstate = 1
257 else:
254 else:
258 update_dirstate = 0
255 update_dirstate = 0
259
256
260 tr = self.transaction()
257 tr = self.transaction()
261 mm = m1.copy()
258 mm = m1.copy()
262 mfm = mf1.copy()
259 mfm = mf1.copy()
263 linkrev = self.changelog.count()
260 linkrev = self.changelog.count()
264 for f in files:
261 for f in files:
265 try:
262 try:
266 t = self.wread(f)
263 t = self.wread(f)
267 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
264 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
268 r = self.file(f)
265 r = self.file(f)
269 mfm[f] = tm
266 mfm[f] = tm
270
267
271 fp1 = m1.get(f, nullid)
268 fp1 = m1.get(f, nullid)
272 fp2 = m2.get(f, nullid)
269 fp2 = m2.get(f, nullid)
273
270
274 # is the same revision on two branches of a merge?
271 # is the same revision on two branches of a merge?
275 if fp2 == fp1:
272 if fp2 == fp1:
276 fp2 = nullid
273 fp2 = nullid
277
274
278 if fp2 != nullid:
275 if fp2 != nullid:
279 # is one parent an ancestor of the other?
276 # is one parent an ancestor of the other?
280 fpa = r.ancestor(fp1, fp2)
277 fpa = r.ancestor(fp1, fp2)
281 if fpa == fp1:
278 if fpa == fp1:
282 fp1, fp2 = fp2, nullid
279 fp1, fp2 = fp2, nullid
283 elif fpa == fp2:
280 elif fpa == fp2:
284 fp2 = nullid
281 fp2 = nullid
285
282
286 # is the file unmodified from the parent?
283 # is the file unmodified from the parent?
287 if t == r.read(fp1):
284 if t == r.read(fp1):
288 # record the proper existing parent in manifest
285 # record the proper existing parent in manifest
289 # no need to add a revision
286 # no need to add a revision
290 mm[f] = fp1
287 mm[f] = fp1
291 continue
288 continue
292
289
293 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
290 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
294 changed.append(f)
291 changed.append(f)
295 if update_dirstate:
292 if update_dirstate:
296 self.dirstate.update([f], "n")
293 self.dirstate.update([f], "n")
297 except IOError:
294 except IOError:
298 try:
295 try:
299 del mm[f]
296 del mm[f]
300 del mfm[f]
297 del mfm[f]
301 if update_dirstate:
298 if update_dirstate:
302 self.dirstate.forget([f])
299 self.dirstate.forget([f])
303 except:
300 except:
304 # deleted from p2?
301 # deleted from p2?
305 pass
302 pass
306
303
307 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
304 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
308 user = user or self.ui.username()
305 user = user or self.ui.username()
309 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
306 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
310 tr.close()
307 tr.close()
311 if update_dirstate:
308 if update_dirstate:
312 self.dirstate.setparents(n, nullid)
309 self.dirstate.setparents(n, nullid)
313
310
314 def commit(self, files = None, text = "", user = None, date = None,
311 def commit(self, files = None, text = "", user = None, date = None,
315 match = util.always, force=False):
312 match = util.always, force=False):
316 commit = []
313 commit = []
317 remove = []
314 remove = []
318 changed = []
315 changed = []
319
316
320 if files:
317 if files:
321 for f in files:
318 for f in files:
322 s = self.dirstate.state(f)
319 s = self.dirstate.state(f)
323 if s in 'nmai':
320 if s in 'nmai':
324 commit.append(f)
321 commit.append(f)
325 elif s == 'r':
322 elif s == 'r':
326 remove.append(f)
323 remove.append(f)
327 else:
324 else:
328 self.ui.warn("%s not tracked!\n" % f)
325 self.ui.warn("%s not tracked!\n" % f)
329 else:
326 else:
330 (c, a, d, u) = self.changes(match=match)
327 (c, a, d, u) = self.changes(match=match)
331 commit = c + a
328 commit = c + a
332 remove = d
329 remove = d
333
330
334 p1, p2 = self.dirstate.parents()
331 p1, p2 = self.dirstate.parents()
335 c1 = self.changelog.read(p1)
332 c1 = self.changelog.read(p1)
336 c2 = self.changelog.read(p2)
333 c2 = self.changelog.read(p2)
337 m1 = self.manifest.read(c1[0])
334 m1 = self.manifest.read(c1[0])
338 mf1 = self.manifest.readflags(c1[0])
335 mf1 = self.manifest.readflags(c1[0])
339 m2 = self.manifest.read(c2[0])
336 m2 = self.manifest.read(c2[0])
340
337
341 if not commit and not remove and not force and p2 == nullid:
338 if not commit and not remove and not force and p2 == nullid:
342 self.ui.status("nothing changed\n")
339 self.ui.status("nothing changed\n")
343 return None
340 return None
344
341
345 if not self.hook("precommit"):
342 if not self.hook("precommit"):
346 return None
343 return None
347
344
348 lock = self.lock()
345 lock = self.lock()
349 tr = self.transaction()
346 tr = self.transaction()
350
347
351 # check in files
348 # check in files
352 new = {}
349 new = {}
353 linkrev = self.changelog.count()
350 linkrev = self.changelog.count()
354 commit.sort()
351 commit.sort()
355 for f in commit:
352 for f in commit:
356 self.ui.note(f + "\n")
353 self.ui.note(f + "\n")
357 try:
354 try:
358 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
355 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
359 t = self.wread(f)
356 t = self.wread(f)
360 except IOError:
357 except IOError:
361 self.ui.warn("trouble committing %s!\n" % f)
358 self.ui.warn("trouble committing %s!\n" % f)
362 raise
359 raise
363
360
364 r = self.file(f)
361 r = self.file(f)
365
362
366 meta = {}
363 meta = {}
367 cp = self.dirstate.copied(f)
364 cp = self.dirstate.copied(f)
368 if cp:
365 if cp:
369 meta["copy"] = cp
366 meta["copy"] = cp
370 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
367 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
371 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
368 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
372 fp1, fp2 = nullid, nullid
369 fp1, fp2 = nullid, nullid
373 else:
370 else:
374 fp1 = m1.get(f, nullid)
371 fp1 = m1.get(f, nullid)
375 fp2 = m2.get(f, nullid)
372 fp2 = m2.get(f, nullid)
376
373
377 # is the same revision on two branches of a merge?
374 # is the same revision on two branches of a merge?
378 if fp2 == fp1:
375 if fp2 == fp1:
379 fp2 = nullid
376 fp2 = nullid
380
377
381 if fp2 != nullid:
378 if fp2 != nullid:
382 # is one parent an ancestor of the other?
379 # is one parent an ancestor of the other?
383 fpa = r.ancestor(fp1, fp2)
380 fpa = r.ancestor(fp1, fp2)
384 if fpa == fp1:
381 if fpa == fp1:
385 fp1, fp2 = fp2, nullid
382 fp1, fp2 = fp2, nullid
386 elif fpa == fp2:
383 elif fpa == fp2:
387 fp2 = nullid
384 fp2 = nullid
388
385
389 # is the file unmodified from the parent?
386 # is the file unmodified from the parent?
390 if not meta and t == r.read(fp1):
387 if not meta and t == r.read(fp1):
391 # record the proper existing parent in manifest
388 # record the proper existing parent in manifest
392 # no need to add a revision
389 # no need to add a revision
393 new[f] = fp1
390 new[f] = fp1
394 continue
391 continue
395
392
396 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
393 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
397 # remember what we've added so that we can later calculate
394 # remember what we've added so that we can later calculate
398 # the files to pull from a set of changesets
395 # the files to pull from a set of changesets
399 changed.append(f)
396 changed.append(f)
400
397
401 # update manifest
398 # update manifest
402 m1.update(new)
399 m1.update(new)
403 for f in remove:
400 for f in remove:
404 if f in m1:
401 if f in m1:
405 del m1[f]
402 del m1[f]
406 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
403 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
407 (new, remove))
404 (new, remove))
408
405
409 # add changeset
406 # add changeset
410 new = new.keys()
407 new = new.keys()
411 new.sort()
408 new.sort()
412
409
413 if not text:
410 if not text:
414 edittext = ""
411 edittext = ""
415 if p2 != nullid:
412 if p2 != nullid:
416 edittext += "HG: branch merge\n"
413 edittext += "HG: branch merge\n"
417 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
414 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
418 edittext += "".join(["HG: changed %s\n" % f for f in changed])
415 edittext += "".join(["HG: changed %s\n" % f for f in changed])
419 edittext += "".join(["HG: removed %s\n" % f for f in remove])
416 edittext += "".join(["HG: removed %s\n" % f for f in remove])
420 if not changed and not remove:
417 if not changed and not remove:
421 edittext += "HG: no files changed\n"
418 edittext += "HG: no files changed\n"
422 edittext = self.ui.edit(edittext)
419 edittext = self.ui.edit(edittext)
423 if not edittext.rstrip():
420 if not edittext.rstrip():
424 return None
421 return None
425 text = edittext
422 text = edittext
426
423
427 user = user or self.ui.username()
424 user = user or self.ui.username()
428 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
425 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
429 tr.close()
426 tr.close()
430
427
431 self.dirstate.setparents(n)
428 self.dirstate.setparents(n)
432 self.dirstate.update(new, "n")
429 self.dirstate.update(new, "n")
433 self.dirstate.forget(remove)
430 self.dirstate.forget(remove)
434
431
435 if not self.hook("commit", node=hex(n)):
432 if not self.hook("commit", node=hex(n)):
436 return None
433 return None
437 return n
434 return n
438
435
439 def walk(self, node=None, files=[], match=util.always):
436 def walk(self, node=None, files=[], match=util.always):
440 if node:
437 if node:
441 for fn in self.manifest.read(self.changelog.read(node)[0]):
438 for fn in self.manifest.read(self.changelog.read(node)[0]):
442 if match(fn): yield 'm', fn
439 if match(fn): yield 'm', fn
443 else:
440 else:
444 for src, fn in self.dirstate.walk(files, match):
441 for src, fn in self.dirstate.walk(files, match):
445 yield src, fn
442 yield src, fn
446
443
447 def changes(self, node1 = None, node2 = None, files = [],
444 def changes(self, node1 = None, node2 = None, files = [],
448 match = util.always):
445 match = util.always):
449 mf2, u = None, []
446 mf2, u = None, []
450
447
451 def fcmp(fn, mf):
448 def fcmp(fn, mf):
452 t1 = self.wread(fn)
449 t1 = self.wread(fn)
453 t2 = self.file(fn).read(mf.get(fn, nullid))
450 t2 = self.file(fn).read(mf.get(fn, nullid))
454 return cmp(t1, t2)
451 return cmp(t1, t2)
455
452
456 def mfmatches(node):
453 def mfmatches(node):
457 mf = dict(self.manifest.read(node))
454 mf = dict(self.manifest.read(node))
458 for fn in mf.keys():
455 for fn in mf.keys():
459 if not match(fn):
456 if not match(fn):
460 del mf[fn]
457 del mf[fn]
461 return mf
458 return mf
462
459
463 # are we comparing the working directory?
460 # are we comparing the working directory?
464 if not node2:
461 if not node2:
465 l, c, a, d, u = self.dirstate.changes(files, match)
462 l, c, a, d, u = self.dirstate.changes(files, match)
466
463
467 # are we comparing working dir against its parent?
464 # are we comparing working dir against its parent?
468 if not node1:
465 if not node1:
469 if l:
466 if l:
470 # do a full compare of any files that might have changed
467 # do a full compare of any files that might have changed
471 change = self.changelog.read(self.dirstate.parents()[0])
468 change = self.changelog.read(self.dirstate.parents()[0])
472 mf2 = mfmatches(change[0])
469 mf2 = mfmatches(change[0])
473 for f in l:
470 for f in l:
474 if fcmp(f, mf2):
471 if fcmp(f, mf2):
475 c.append(f)
472 c.append(f)
476
473
477 for l in c, a, d, u:
474 for l in c, a, d, u:
478 l.sort()
475 l.sort()
479
476
480 return (c, a, d, u)
477 return (c, a, d, u)
481
478
482 # are we comparing working dir against non-tip?
479 # are we comparing working dir against non-tip?
483 # generate a pseudo-manifest for the working dir
480 # generate a pseudo-manifest for the working dir
484 if not node2:
481 if not node2:
485 if not mf2:
482 if not mf2:
486 change = self.changelog.read(self.dirstate.parents()[0])
483 change = self.changelog.read(self.dirstate.parents()[0])
487 mf2 = mfmatches(change[0])
484 mf2 = mfmatches(change[0])
488 for f in a + c + l:
485 for f in a + c + l:
489 mf2[f] = ""
486 mf2[f] = ""
490 for f in d:
487 for f in d:
491 if f in mf2: del mf2[f]
488 if f in mf2: del mf2[f]
492 else:
489 else:
493 change = self.changelog.read(node2)
490 change = self.changelog.read(node2)
494 mf2 = mfmatches(change[0])
491 mf2 = mfmatches(change[0])
495
492
496 # flush lists from dirstate before comparing manifests
493 # flush lists from dirstate before comparing manifests
497 c, a = [], []
494 c, a = [], []
498
495
499 change = self.changelog.read(node1)
496 change = self.changelog.read(node1)
500 mf1 = mfmatches(change[0])
497 mf1 = mfmatches(change[0])
501
498
502 for fn in mf2:
499 for fn in mf2:
503 if mf1.has_key(fn):
500 if mf1.has_key(fn):
504 if mf1[fn] != mf2[fn]:
501 if mf1[fn] != mf2[fn]:
505 if mf2[fn] != "" or fcmp(fn, mf1):
502 if mf2[fn] != "" or fcmp(fn, mf1):
506 c.append(fn)
503 c.append(fn)
507 del mf1[fn]
504 del mf1[fn]
508 else:
505 else:
509 a.append(fn)
506 a.append(fn)
510
507
511 d = mf1.keys()
508 d = mf1.keys()
512
509
513 for l in c, a, d, u:
510 for l in c, a, d, u:
514 l.sort()
511 l.sort()
515
512
516 return (c, a, d, u)
513 return (c, a, d, u)
517
514
518 def add(self, list):
515 def add(self, list):
519 for f in list:
516 for f in list:
520 p = self.wjoin(f)
517 p = self.wjoin(f)
521 if not os.path.exists(p):
518 if not os.path.exists(p):
522 self.ui.warn("%s does not exist!\n" % f)
519 self.ui.warn("%s does not exist!\n" % f)
523 elif not os.path.isfile(p):
520 elif not os.path.isfile(p):
524 self.ui.warn("%s not added: only files supported currently\n" % f)
521 self.ui.warn("%s not added: only files supported currently\n" % f)
525 elif self.dirstate.state(f) in 'an':
522 elif self.dirstate.state(f) in 'an':
526 self.ui.warn("%s already tracked!\n" % f)
523 self.ui.warn("%s already tracked!\n" % f)
527 else:
524 else:
528 self.dirstate.update([f], "a")
525 self.dirstate.update([f], "a")
529
526
530 def forget(self, list):
527 def forget(self, list):
531 for f in list:
528 for f in list:
532 if self.dirstate.state(f) not in 'ai':
529 if self.dirstate.state(f) not in 'ai':
533 self.ui.warn("%s not added!\n" % f)
530 self.ui.warn("%s not added!\n" % f)
534 else:
531 else:
535 self.dirstate.forget([f])
532 self.dirstate.forget([f])
536
533
537 def remove(self, list):
534 def remove(self, list):
538 for f in list:
535 for f in list:
539 p = self.wjoin(f)
536 p = self.wjoin(f)
540 if os.path.exists(p):
537 if os.path.exists(p):
541 self.ui.warn("%s still exists!\n" % f)
538 self.ui.warn("%s still exists!\n" % f)
542 elif self.dirstate.state(f) == 'a':
539 elif self.dirstate.state(f) == 'a':
543 self.ui.warn("%s never committed!\n" % f)
540 self.ui.warn("%s never committed!\n" % f)
544 self.dirstate.forget([f])
541 self.dirstate.forget([f])
545 elif f not in self.dirstate:
542 elif f not in self.dirstate:
546 self.ui.warn("%s not tracked!\n" % f)
543 self.ui.warn("%s not tracked!\n" % f)
547 else:
544 else:
548 self.dirstate.update([f], "r")
545 self.dirstate.update([f], "r")
549
546
550 def copy(self, source, dest):
547 def copy(self, source, dest):
551 p = self.wjoin(dest)
548 p = self.wjoin(dest)
552 if not os.path.exists(p):
549 if not os.path.exists(p):
553 self.ui.warn("%s does not exist!\n" % dest)
550 self.ui.warn("%s does not exist!\n" % dest)
554 elif not os.path.isfile(p):
551 elif not os.path.isfile(p):
555 self.ui.warn("copy failed: %s is not a file\n" % dest)
552 self.ui.warn("copy failed: %s is not a file\n" % dest)
556 else:
553 else:
557 if self.dirstate.state(dest) == '?':
554 if self.dirstate.state(dest) == '?':
558 self.dirstate.update([dest], "a")
555 self.dirstate.update([dest], "a")
559 self.dirstate.copy(source, dest)
556 self.dirstate.copy(source, dest)
560
557
561 def heads(self):
558 def heads(self):
562 return self.changelog.heads()
559 return self.changelog.heads()
563
560
564 # branchlookup returns a dict giving a list of branches for
561 # branchlookup returns a dict giving a list of branches for
565 # each head. A branch is defined as the tag of a node or
562 # each head. A branch is defined as the tag of a node or
566 # the branch of the node's parents. If a node has multiple
563 # the branch of the node's parents. If a node has multiple
567 # branch tags, tags are eliminated if they are visible from other
564 # branch tags, tags are eliminated if they are visible from other
568 # branch tags.
565 # branch tags.
569 #
566 #
570 # So, for this graph: a->b->c->d->e
567 # So, for this graph: a->b->c->d->e
571 # \ /
568 # \ /
572 # aa -----/
569 # aa -----/
573 # a has tag 2.6.12
570 # a has tag 2.6.12
574 # d has tag 2.6.13
571 # d has tag 2.6.13
575 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
572 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
576 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
573 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
577 # from the list.
574 # from the list.
578 #
575 #
579 # It is possible that more than one head will have the same branch tag.
576 # It is possible that more than one head will have the same branch tag.
580 # callers need to check the result for multiple heads under the same
577 # callers need to check the result for multiple heads under the same
581 # branch tag if that is a problem for them (ie checkout of a specific
578 # branch tag if that is a problem for them (ie checkout of a specific
582 # branch).
579 # branch).
583 #
580 #
584 # passing in a specific branch will limit the depth of the search
581 # passing in a specific branch will limit the depth of the search
585 # through the parents. It won't limit the branches returned in the
582 # through the parents. It won't limit the branches returned in the
586 # result though.
583 # result though.
587 def branchlookup(self, heads=None, branch=None):
584 def branchlookup(self, heads=None, branch=None):
588 if not heads:
585 if not heads:
589 heads = self.heads()
586 heads = self.heads()
590 headt = [ h for h in heads ]
587 headt = [ h for h in heads ]
591 chlog = self.changelog
588 chlog = self.changelog
592 branches = {}
589 branches = {}
593 merges = []
590 merges = []
594 seenmerge = {}
591 seenmerge = {}
595
592
596 # traverse the tree once for each head, recording in the branches
593 # traverse the tree once for each head, recording in the branches
597 # dict which tags are visible from this head. The branches
594 # dict which tags are visible from this head. The branches
598 # dict also records which tags are visible from each tag
595 # dict also records which tags are visible from each tag
599 # while we traverse.
596 # while we traverse.
600 while headt or merges:
597 while headt or merges:
601 if merges:
598 if merges:
602 n, found = merges.pop()
599 n, found = merges.pop()
603 visit = [n]
600 visit = [n]
604 else:
601 else:
605 h = headt.pop()
602 h = headt.pop()
606 visit = [h]
603 visit = [h]
607 found = [h]
604 found = [h]
608 seen = {}
605 seen = {}
609 while visit:
606 while visit:
610 n = visit.pop()
607 n = visit.pop()
611 if n in seen:
608 if n in seen:
612 continue
609 continue
613 pp = chlog.parents(n)
610 pp = chlog.parents(n)
614 tags = self.nodetags(n)
611 tags = self.nodetags(n)
615 if tags:
612 if tags:
616 for x in tags:
613 for x in tags:
617 if x == 'tip':
614 if x == 'tip':
618 continue
615 continue
619 for f in found:
616 for f in found:
620 branches.setdefault(f, {})[n] = 1
617 branches.setdefault(f, {})[n] = 1
621 branches.setdefault(n, {})[n] = 1
618 branches.setdefault(n, {})[n] = 1
622 break
619 break
623 if n not in found:
620 if n not in found:
624 found.append(n)
621 found.append(n)
625 if branch in tags:
622 if branch in tags:
626 continue
623 continue
627 seen[n] = 1
624 seen[n] = 1
628 if pp[1] != nullid and n not in seenmerge:
625 if pp[1] != nullid and n not in seenmerge:
629 merges.append((pp[1], [x for x in found]))
626 merges.append((pp[1], [x for x in found]))
630 seenmerge[n] = 1
627 seenmerge[n] = 1
631 if pp[0] != nullid:
628 if pp[0] != nullid:
632 visit.append(pp[0])
629 visit.append(pp[0])
633 # traverse the branches dict, eliminating branch tags from each
630 # traverse the branches dict, eliminating branch tags from each
634 # head that are visible from another branch tag for that head.
631 # head that are visible from another branch tag for that head.
635 out = {}
632 out = {}
636 viscache = {}
633 viscache = {}
637 for h in heads:
634 for h in heads:
638 def visible(node):
635 def visible(node):
639 if node in viscache:
636 if node in viscache:
640 return viscache[node]
637 return viscache[node]
641 ret = {}
638 ret = {}
642 visit = [node]
639 visit = [node]
643 while visit:
640 while visit:
644 x = visit.pop()
641 x = visit.pop()
645 if x in viscache:
642 if x in viscache:
646 ret.update(viscache[x])
643 ret.update(viscache[x])
647 elif x not in ret:
644 elif x not in ret:
648 ret[x] = 1
645 ret[x] = 1
649 if x in branches:
646 if x in branches:
650 visit[len(visit):] = branches[x].keys()
647 visit[len(visit):] = branches[x].keys()
651 viscache[node] = ret
648 viscache[node] = ret
652 return ret
649 return ret
653 if h not in branches:
650 if h not in branches:
654 continue
651 continue
655 # O(n^2), but somewhat limited. This only searches the
652 # O(n^2), but somewhat limited. This only searches the
656 # tags visible from a specific head, not all the tags in the
653 # tags visible from a specific head, not all the tags in the
657 # whole repo.
654 # whole repo.
658 for b in branches[h]:
655 for b in branches[h]:
659 vis = False
656 vis = False
660 for bb in branches[h].keys():
657 for bb in branches[h].keys():
661 if b != bb:
658 if b != bb:
662 if b in visible(bb):
659 if b in visible(bb):
663 vis = True
660 vis = True
664 break
661 break
665 if not vis:
662 if not vis:
666 l = out.setdefault(h, [])
663 l = out.setdefault(h, [])
667 l[len(l):] = self.nodetags(b)
664 l[len(l):] = self.nodetags(b)
668 return out
665 return out
669
666
670 def branches(self, nodes):
667 def branches(self, nodes):
671 if not nodes: nodes = [self.changelog.tip()]
668 if not nodes: nodes = [self.changelog.tip()]
672 b = []
669 b = []
673 for n in nodes:
670 for n in nodes:
674 t = n
671 t = n
675 while n:
672 while n:
676 p = self.changelog.parents(n)
673 p = self.changelog.parents(n)
677 if p[1] != nullid or p[0] == nullid:
674 if p[1] != nullid or p[0] == nullid:
678 b.append((t, n, p[0], p[1]))
675 b.append((t, n, p[0], p[1]))
679 break
676 break
680 n = p[0]
677 n = p[0]
681 return b
678 return b
682
679
683 def between(self, pairs):
680 def between(self, pairs):
684 r = []
681 r = []
685
682
686 for top, bottom in pairs:
683 for top, bottom in pairs:
687 n, l, i = top, [], 0
684 n, l, i = top, [], 0
688 f = 1
685 f = 1
689
686
690 while n != bottom:
687 while n != bottom:
691 p = self.changelog.parents(n)[0]
688 p = self.changelog.parents(n)[0]
692 if i == f:
689 if i == f:
693 l.append(n)
690 l.append(n)
694 f = f * 2
691 f = f * 2
695 n = p
692 n = p
696 i += 1
693 i += 1
697
694
698 r.append(l)
695 r.append(l)
699
696
700 return r
697 return r
701
698
702 def newer(self, nodes):
699 def newer(self, nodes):
703 m = {}
700 m = {}
704 nl = []
701 nl = []
705 pm = {}
702 pm = {}
706 cl = self.changelog
703 cl = self.changelog
707 t = l = cl.count()
704 t = l = cl.count()
708
705
709 # find the lowest numbered node
706 # find the lowest numbered node
710 for n in nodes:
707 for n in nodes:
711 l = min(l, cl.rev(n))
708 l = min(l, cl.rev(n))
712 m[n] = 1
709 m[n] = 1
713
710
714 for i in xrange(l, t):
711 for i in xrange(l, t):
715 n = cl.node(i)
712 n = cl.node(i)
716 if n in m: # explicitly listed
713 if n in m: # explicitly listed
717 pm[n] = 1
714 pm[n] = 1
718 nl.append(n)
715 nl.append(n)
719 continue
716 continue
720 for p in cl.parents(n):
717 for p in cl.parents(n):
721 if p in pm: # parent listed
718 if p in pm: # parent listed
722 pm[n] = 1
719 pm[n] = 1
723 nl.append(n)
720 nl.append(n)
724 break
721 break
725
722
726 return nl
723 return nl
727
724
728 def findincoming(self, remote, base=None, heads=None):
725 def findincoming(self, remote, base=None, heads=None):
729 m = self.changelog.nodemap
726 m = self.changelog.nodemap
730 search = []
727 search = []
731 fetch = {}
728 fetch = {}
732 seen = {}
729 seen = {}
733 seenbranch = {}
730 seenbranch = {}
734 if base == None:
731 if base == None:
735 base = {}
732 base = {}
736
733
737 # assume we're closer to the tip than the root
734 # assume we're closer to the tip than the root
738 # and start by examining the heads
735 # and start by examining the heads
739 self.ui.status("searching for changes\n")
736 self.ui.status("searching for changes\n")
740
737
741 if not heads:
738 if not heads:
742 heads = remote.heads()
739 heads = remote.heads()
743
740
744 unknown = []
741 unknown = []
745 for h in heads:
742 for h in heads:
746 if h not in m:
743 if h not in m:
747 unknown.append(h)
744 unknown.append(h)
748 else:
745 else:
749 base[h] = 1
746 base[h] = 1
750
747
751 if not unknown:
748 if not unknown:
752 return None
749 return None
753
750
754 rep = {}
751 rep = {}
755 reqcnt = 0
752 reqcnt = 0
756
753
757 # search through remote branches
754 # search through remote branches
758 # a 'branch' here is a linear segment of history, with four parts:
755 # a 'branch' here is a linear segment of history, with four parts:
759 # head, root, first parent, second parent
756 # head, root, first parent, second parent
760 # (a branch always has two parents (or none) by definition)
757 # (a branch always has two parents (or none) by definition)
761 unknown = remote.branches(unknown)
758 unknown = remote.branches(unknown)
762 while unknown:
759 while unknown:
763 r = []
760 r = []
764 while unknown:
761 while unknown:
765 n = unknown.pop(0)
762 n = unknown.pop(0)
766 if n[0] in seen:
763 if n[0] in seen:
767 continue
764 continue
768
765
769 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
766 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
770 if n[0] == nullid:
767 if n[0] == nullid:
771 break
768 break
772 if n in seenbranch:
769 if n in seenbranch:
773 self.ui.debug("branch already found\n")
770 self.ui.debug("branch already found\n")
774 continue
771 continue
775 if n[1] and n[1] in m: # do we know the base?
772 if n[1] and n[1] in m: # do we know the base?
776 self.ui.debug("found incomplete branch %s:%s\n"
773 self.ui.debug("found incomplete branch %s:%s\n"
777 % (short(n[0]), short(n[1])))
774 % (short(n[0]), short(n[1])))
778 search.append(n) # schedule branch range for scanning
775 search.append(n) # schedule branch range for scanning
779 seenbranch[n] = 1
776 seenbranch[n] = 1
780 else:
777 else:
781 if n[1] not in seen and n[1] not in fetch:
778 if n[1] not in seen and n[1] not in fetch:
782 if n[2] in m and n[3] in m:
779 if n[2] in m and n[3] in m:
783 self.ui.debug("found new changeset %s\n" %
780 self.ui.debug("found new changeset %s\n" %
784 short(n[1]))
781 short(n[1]))
785 fetch[n[1]] = 1 # earliest unknown
782 fetch[n[1]] = 1 # earliest unknown
786 base[n[2]] = 1 # latest known
783 base[n[2]] = 1 # latest known
787 continue
784 continue
788
785
789 for a in n[2:4]:
786 for a in n[2:4]:
790 if a not in rep:
787 if a not in rep:
791 r.append(a)
788 r.append(a)
792 rep[a] = 1
789 rep[a] = 1
793
790
794 seen[n[0]] = 1
791 seen[n[0]] = 1
795
792
796 if r:
793 if r:
797 reqcnt += 1
794 reqcnt += 1
798 self.ui.debug("request %d: %s\n" %
795 self.ui.debug("request %d: %s\n" %
799 (reqcnt, " ".join(map(short, r))))
796 (reqcnt, " ".join(map(short, r))))
800 for p in range(0, len(r), 10):
797 for p in range(0, len(r), 10):
801 for b in remote.branches(r[p:p+10]):
798 for b in remote.branches(r[p:p+10]):
802 self.ui.debug("received %s:%s\n" %
799 self.ui.debug("received %s:%s\n" %
803 (short(b[0]), short(b[1])))
800 (short(b[0]), short(b[1])))
804 if b[0] in m:
801 if b[0] in m:
805 self.ui.debug("found base node %s\n" % short(b[0]))
802 self.ui.debug("found base node %s\n" % short(b[0]))
806 base[b[0]] = 1
803 base[b[0]] = 1
807 elif b[0] not in seen:
804 elif b[0] not in seen:
808 unknown.append(b)
805 unknown.append(b)
809
806
810 # do binary search on the branches we found
807 # do binary search on the branches we found
811 while search:
808 while search:
812 n = search.pop(0)
809 n = search.pop(0)
813 reqcnt += 1
810 reqcnt += 1
814 l = remote.between([(n[0], n[1])])[0]
811 l = remote.between([(n[0], n[1])])[0]
815 l.append(n[1])
812 l.append(n[1])
816 p = n[0]
813 p = n[0]
817 f = 1
814 f = 1
818 for i in l:
815 for i in l:
819 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
816 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
820 if i in m:
817 if i in m:
821 if f <= 2:
818 if f <= 2:
822 self.ui.debug("found new branch changeset %s\n" %
819 self.ui.debug("found new branch changeset %s\n" %
823 short(p))
820 short(p))
824 fetch[p] = 1
821 fetch[p] = 1
825 base[i] = 1
822 base[i] = 1
826 else:
823 else:
827 self.ui.debug("narrowed branch search to %s:%s\n"
824 self.ui.debug("narrowed branch search to %s:%s\n"
828 % (short(p), short(i)))
825 % (short(p), short(i)))
829 search.append((p, i))
826 search.append((p, i))
830 break
827 break
831 p, f = i, f * 2
828 p, f = i, f * 2
832
829
833 # sanity check our fetch list
830 # sanity check our fetch list
834 for f in fetch.keys():
831 for f in fetch.keys():
835 if f in m:
832 if f in m:
836 raise repo.RepoError("already have changeset " + short(f[:4]))
833 raise repo.RepoError("already have changeset " + short(f[:4]))
837
834
838 if base.keys() == [nullid]:
835 if base.keys() == [nullid]:
839 self.ui.warn("warning: pulling from an unrelated repository!\n")
836 self.ui.warn("warning: pulling from an unrelated repository!\n")
840
837
841 self.ui.note("found new changesets starting at " +
838 self.ui.note("found new changesets starting at " +
842 " ".join([short(f) for f in fetch]) + "\n")
839 " ".join([short(f) for f in fetch]) + "\n")
843
840
844 self.ui.debug("%d total queries\n" % reqcnt)
841 self.ui.debug("%d total queries\n" % reqcnt)
845
842
846 return fetch.keys()
843 return fetch.keys()
847
844
848 def findoutgoing(self, remote, base=None, heads=None):
845 def findoutgoing(self, remote, base=None, heads=None):
849 if base == None:
846 if base == None:
850 base = {}
847 base = {}
851 self.findincoming(remote, base, heads)
848 self.findincoming(remote, base, heads)
852
849
853 self.ui.debug("common changesets up to "
850 self.ui.debug("common changesets up to "
854 + " ".join(map(short, base.keys())) + "\n")
851 + " ".join(map(short, base.keys())) + "\n")
855
852
856 remain = dict.fromkeys(self.changelog.nodemap)
853 remain = dict.fromkeys(self.changelog.nodemap)
857
854
858 # prune everything remote has from the tree
855 # prune everything remote has from the tree
859 del remain[nullid]
856 del remain[nullid]
860 remove = base.keys()
857 remove = base.keys()
861 while remove:
858 while remove:
862 n = remove.pop(0)
859 n = remove.pop(0)
863 if n in remain:
860 if n in remain:
864 del remain[n]
861 del remain[n]
865 for p in self.changelog.parents(n):
862 for p in self.changelog.parents(n):
866 remove.append(p)
863 remove.append(p)
867
864
868 # find every node whose parents have been pruned
865 # find every node whose parents have been pruned
869 subset = []
866 subset = []
870 for n in remain:
867 for n in remain:
871 p1, p2 = self.changelog.parents(n)
868 p1, p2 = self.changelog.parents(n)
872 if p1 not in remain and p2 not in remain:
869 if p1 not in remain and p2 not in remain:
873 subset.append(n)
870 subset.append(n)
874
871
875 # this is the set of all roots we have to push
872 # this is the set of all roots we have to push
876 return subset
873 return subset
877
874
878 def pull(self, remote):
875 def pull(self, remote):
879 lock = self.lock()
876 lock = self.lock()
880
877
881 # if we have an empty repo, fetch everything
878 # if we have an empty repo, fetch everything
882 if self.changelog.tip() == nullid:
879 if self.changelog.tip() == nullid:
883 self.ui.status("requesting all changes\n")
880 self.ui.status("requesting all changes\n")
884 fetch = [nullid]
881 fetch = [nullid]
885 else:
882 else:
886 fetch = self.findincoming(remote)
883 fetch = self.findincoming(remote)
887
884
888 if not fetch:
885 if not fetch:
889 self.ui.status("no changes found\n")
886 self.ui.status("no changes found\n")
890 return 1
887 return 1
891
888
892 cg = remote.changegroup(fetch)
889 cg = remote.changegroup(fetch)
893 return self.addchangegroup(cg)
890 return self.addchangegroup(cg)
894
891
895 def push(self, remote, force=False):
892 def push(self, remote, force=False):
896 lock = remote.lock()
893 lock = remote.lock()
897
894
898 base = {}
895 base = {}
899 heads = remote.heads()
896 heads = remote.heads()
900 inc = self.findincoming(remote, base, heads)
897 inc = self.findincoming(remote, base, heads)
901 if not force and inc:
898 if not force and inc:
902 self.ui.warn("abort: unsynced remote changes!\n")
899 self.ui.warn("abort: unsynced remote changes!\n")
903 self.ui.status("(did you forget to sync? use push -f to force)\n")
900 self.ui.status("(did you forget to sync? use push -f to force)\n")
904 return 1
901 return 1
905
902
906 update = self.findoutgoing(remote, base)
903 update = self.findoutgoing(remote, base)
907 if not update:
904 if not update:
908 self.ui.status("no changes found\n")
905 self.ui.status("no changes found\n")
909 return 1
906 return 1
910 elif not force:
907 elif not force:
911 if len(heads) < len(self.changelog.heads()):
908 if len(heads) < len(self.changelog.heads()):
912 self.ui.warn("abort: push creates new remote branches!\n")
909 self.ui.warn("abort: push creates new remote branches!\n")
913 self.ui.status("(did you forget to merge?" +
910 self.ui.status("(did you forget to merge?" +
914 " use push -f to force)\n")
911 " use push -f to force)\n")
915 return 1
912 return 1
916
913
917 cg = self.changegroup(update)
914 cg = self.changegroup(update)
918 return remote.addchangegroup(cg)
915 return remote.addchangegroup(cg)
919
916
920 def changegroup(self, basenodes):
917 def changegroup(self, basenodes):
921 genread = util.chunkbuffer
918 genread = util.chunkbuffer
922
919
923 def gengroup():
920 def gengroup():
924 nodes = self.newer(basenodes)
921 nodes = self.newer(basenodes)
925
922
926 # construct the link map
923 # construct the link map
927 linkmap = {}
924 linkmap = {}
928 for n in nodes:
925 for n in nodes:
929 linkmap[self.changelog.rev(n)] = n
926 linkmap[self.changelog.rev(n)] = n
930
927
931 # construct a list of all changed files
928 # construct a list of all changed files
932 changed = {}
929 changed = {}
933 for n in nodes:
930 for n in nodes:
934 c = self.changelog.read(n)
931 c = self.changelog.read(n)
935 for f in c[3]:
932 for f in c[3]:
936 changed[f] = 1
933 changed[f] = 1
937 changed = changed.keys()
934 changed = changed.keys()
938 changed.sort()
935 changed.sort()
939
936
940 # the changegroup is changesets + manifests + all file revs
937 # the changegroup is changesets + manifests + all file revs
941 revs = [ self.changelog.rev(n) for n in nodes ]
938 revs = [ self.changelog.rev(n) for n in nodes ]
942
939
943 for y in self.changelog.group(linkmap): yield y
940 for y in self.changelog.group(linkmap): yield y
944 for y in self.manifest.group(linkmap): yield y
941 for y in self.manifest.group(linkmap): yield y
945 for f in changed:
942 for f in changed:
946 yield struct.pack(">l", len(f) + 4) + f
943 yield struct.pack(">l", len(f) + 4) + f
947 g = self.file(f).group(linkmap)
944 g = self.file(f).group(linkmap)
948 for y in g:
945 for y in g:
949 yield y
946 yield y
950
947
951 yield struct.pack(">l", 0)
948 yield struct.pack(">l", 0)
952
949
953 return genread(gengroup())
950 return genread(gengroup())
954
951
955 def addchangegroup(self, source):
952 def addchangegroup(self, source):
956
953
957 def getchunk():
954 def getchunk():
958 d = source.read(4)
955 d = source.read(4)
959 if not d: return ""
956 if not d: return ""
960 l = struct.unpack(">l", d)[0]
957 l = struct.unpack(">l", d)[0]
961 if l <= 4: return ""
958 if l <= 4: return ""
962 return source.read(l - 4)
959 return source.read(l - 4)
963
960
964 def getgroup():
961 def getgroup():
965 while 1:
962 while 1:
966 c = getchunk()
963 c = getchunk()
967 if not c: break
964 if not c: break
968 yield c
965 yield c
969
966
970 def csmap(x):
967 def csmap(x):
971 self.ui.debug("add changeset %s\n" % short(x))
968 self.ui.debug("add changeset %s\n" % short(x))
972 return self.changelog.count()
969 return self.changelog.count()
973
970
974 def revmap(x):
971 def revmap(x):
975 return self.changelog.rev(x)
972 return self.changelog.rev(x)
976
973
977 if not source: return
974 if not source: return
978 changesets = files = revisions = 0
975 changesets = files = revisions = 0
979
976
980 tr = self.transaction()
977 tr = self.transaction()
981
978
982 oldheads = len(self.changelog.heads())
979 oldheads = len(self.changelog.heads())
983
980
984 # pull off the changeset group
981 # pull off the changeset group
985 self.ui.status("adding changesets\n")
982 self.ui.status("adding changesets\n")
986 co = self.changelog.tip()
983 co = self.changelog.tip()
987 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
984 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
988 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
985 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
989
986
990 # pull off the manifest group
987 # pull off the manifest group
991 self.ui.status("adding manifests\n")
988 self.ui.status("adding manifests\n")
992 mm = self.manifest.tip()
989 mm = self.manifest.tip()
993 mo = self.manifest.addgroup(getgroup(), revmap, tr)
990 mo = self.manifest.addgroup(getgroup(), revmap, tr)
994
991
995 # process the files
992 # process the files
996 self.ui.status("adding file changes\n")
993 self.ui.status("adding file changes\n")
997 while 1:
994 while 1:
998 f = getchunk()
995 f = getchunk()
999 if not f: break
996 if not f: break
1000 self.ui.debug("adding %s revisions\n" % f)
997 self.ui.debug("adding %s revisions\n" % f)
1001 fl = self.file(f)
998 fl = self.file(f)
1002 o = fl.count()
999 o = fl.count()
1003 n = fl.addgroup(getgroup(), revmap, tr)
1000 n = fl.addgroup(getgroup(), revmap, tr)
1004 revisions += fl.count() - o
1001 revisions += fl.count() - o
1005 files += 1
1002 files += 1
1006
1003
1007 newheads = len(self.changelog.heads())
1004 newheads = len(self.changelog.heads())
1008 heads = ""
1005 heads = ""
1009 if oldheads and newheads > oldheads:
1006 if oldheads and newheads > oldheads:
1010 heads = " (+%d heads)" % (newheads - oldheads)
1007 heads = " (+%d heads)" % (newheads - oldheads)
1011
1008
1012 self.ui.status(("added %d changesets" +
1009 self.ui.status(("added %d changesets" +
1013 " with %d changes to %d files%s\n")
1010 " with %d changes to %d files%s\n")
1014 % (changesets, revisions, files, heads))
1011 % (changesets, revisions, files, heads))
1015
1012
1016 tr.close()
1013 tr.close()
1017
1014
1018 if not self.hook("changegroup"):
1015 if not self.hook("changegroup"):
1019 return 1
1016 return 1
1020
1017
1021 return
1018 return
1022
1019
1023 def update(self, node, allow=False, force=False, choose=None,
1020 def update(self, node, allow=False, force=False, choose=None,
1024 moddirstate=True):
1021 moddirstate=True):
1025 pl = self.dirstate.parents()
1022 pl = self.dirstate.parents()
1026 if not force and pl[1] != nullid:
1023 if not force and pl[1] != nullid:
1027 self.ui.warn("aborting: outstanding uncommitted merges\n")
1024 self.ui.warn("aborting: outstanding uncommitted merges\n")
1028 return 1
1025 return 1
1029
1026
1030 p1, p2 = pl[0], node
1027 p1, p2 = pl[0], node
1031 pa = self.changelog.ancestor(p1, p2)
1028 pa = self.changelog.ancestor(p1, p2)
1032 m1n = self.changelog.read(p1)[0]
1029 m1n = self.changelog.read(p1)[0]
1033 m2n = self.changelog.read(p2)[0]
1030 m2n = self.changelog.read(p2)[0]
1034 man = self.manifest.ancestor(m1n, m2n)
1031 man = self.manifest.ancestor(m1n, m2n)
1035 m1 = self.manifest.read(m1n)
1032 m1 = self.manifest.read(m1n)
1036 mf1 = self.manifest.readflags(m1n)
1033 mf1 = self.manifest.readflags(m1n)
1037 m2 = self.manifest.read(m2n)
1034 m2 = self.manifest.read(m2n)
1038 mf2 = self.manifest.readflags(m2n)
1035 mf2 = self.manifest.readflags(m2n)
1039 ma = self.manifest.read(man)
1036 ma = self.manifest.read(man)
1040 mfa = self.manifest.readflags(man)
1037 mfa = self.manifest.readflags(man)
1041
1038
1042 (c, a, d, u) = self.changes()
1039 (c, a, d, u) = self.changes()
1043
1040
1044 # is this a jump, or a merge? i.e. is there a linear path
1041 # is this a jump, or a merge? i.e. is there a linear path
1045 # from p1 to p2?
1042 # from p1 to p2?
1046 linear_path = (pa == p1 or pa == p2)
1043 linear_path = (pa == p1 or pa == p2)
1047
1044
1048 # resolve the manifest to determine which files
1045 # resolve the manifest to determine which files
1049 # we care about merging
1046 # we care about merging
1050 self.ui.note("resolving manifests\n")
1047 self.ui.note("resolving manifests\n")
1051 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1048 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1052 (force, allow, moddirstate, linear_path))
1049 (force, allow, moddirstate, linear_path))
1053 self.ui.debug(" ancestor %s local %s remote %s\n" %
1050 self.ui.debug(" ancestor %s local %s remote %s\n" %
1054 (short(man), short(m1n), short(m2n)))
1051 (short(man), short(m1n), short(m2n)))
1055
1052
1056 merge = {}
1053 merge = {}
1057 get = {}
1054 get = {}
1058 remove = []
1055 remove = []
1059
1056
1060 # construct a working dir manifest
1057 # construct a working dir manifest
1061 mw = m1.copy()
1058 mw = m1.copy()
1062 mfw = mf1.copy()
1059 mfw = mf1.copy()
1063 umap = dict.fromkeys(u)
1060 umap = dict.fromkeys(u)
1064
1061
1065 for f in a + c + u:
1062 for f in a + c + u:
1066 mw[f] = ""
1063 mw[f] = ""
1067 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1064 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1068
1065
1069 for f in d:
1066 for f in d:
1070 if f in mw: del mw[f]
1067 if f in mw: del mw[f]
1071
1068
1072 # If we're jumping between revisions (as opposed to merging),
1069 # If we're jumping between revisions (as opposed to merging),
1073 # and if neither the working directory nor the target rev has
1070 # and if neither the working directory nor the target rev has
1074 # the file, then we need to remove it from the dirstate, to
1071 # the file, then we need to remove it from the dirstate, to
1075 # prevent the dirstate from listing the file when it is no
1072 # prevent the dirstate from listing the file when it is no
1076 # longer in the manifest.
1073 # longer in the manifest.
1077 if moddirstate and linear_path and f not in m2:
1074 if moddirstate and linear_path and f not in m2:
1078 self.dirstate.forget((f,))
1075 self.dirstate.forget((f,))
1079
1076
1080 # Compare manifests
1077 # Compare manifests
1081 for f, n in mw.iteritems():
1078 for f, n in mw.iteritems():
1082 if choose and not choose(f): continue
1079 if choose and not choose(f): continue
1083 if f in m2:
1080 if f in m2:
1084 s = 0
1081 s = 0
1085
1082
1086 # is the wfile new since m1, and match m2?
1083 # is the wfile new since m1, and match m2?
1087 if f not in m1:
1084 if f not in m1:
1088 t1 = self.wread(f)
1085 t1 = self.wread(f)
1089 t2 = self.file(f).read(m2[f])
1086 t2 = self.file(f).read(m2[f])
1090 if cmp(t1, t2) == 0:
1087 if cmp(t1, t2) == 0:
1091 n = m2[f]
1088 n = m2[f]
1092 del t1, t2
1089 del t1, t2
1093
1090
1094 # are files different?
1091 # are files different?
1095 if n != m2[f]:
1092 if n != m2[f]:
1096 a = ma.get(f, nullid)
1093 a = ma.get(f, nullid)
1097 # are both different from the ancestor?
1094 # are both different from the ancestor?
1098 if n != a and m2[f] != a:
1095 if n != a and m2[f] != a:
1099 self.ui.debug(" %s versions differ, resolve\n" % f)
1096 self.ui.debug(" %s versions differ, resolve\n" % f)
1100 # merge executable bits
1097 # merge executable bits
1101 # "if we changed or they changed, change in merge"
1098 # "if we changed or they changed, change in merge"
1102 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1099 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1103 mode = ((a^b) | (a^c)) ^ a
1100 mode = ((a^b) | (a^c)) ^ a
1104 merge[f] = (m1.get(f, nullid), m2[f], mode)
1101 merge[f] = (m1.get(f, nullid), m2[f], mode)
1105 s = 1
1102 s = 1
1106 # are we clobbering?
1103 # are we clobbering?
1107 # is remote's version newer?
1104 # is remote's version newer?
1108 # or are we going back in time?
1105 # or are we going back in time?
1109 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1106 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1110 self.ui.debug(" remote %s is newer, get\n" % f)
1107 self.ui.debug(" remote %s is newer, get\n" % f)
1111 get[f] = m2[f]
1108 get[f] = m2[f]
1112 s = 1
1109 s = 1
1113 elif f in umap:
1110 elif f in umap:
1114 # this unknown file is the same as the checkout
1111 # this unknown file is the same as the checkout
1115 get[f] = m2[f]
1112 get[f] = m2[f]
1116
1113
1117 if not s and mfw[f] != mf2[f]:
1114 if not s and mfw[f] != mf2[f]:
1118 if force:
1115 if force:
1119 self.ui.debug(" updating permissions for %s\n" % f)
1116 self.ui.debug(" updating permissions for %s\n" % f)
1120 util.set_exec(self.wjoin(f), mf2[f])
1117 util.set_exec(self.wjoin(f), mf2[f])
1121 else:
1118 else:
1122 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1119 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1123 mode = ((a^b) | (a^c)) ^ a
1120 mode = ((a^b) | (a^c)) ^ a
1124 if mode != b:
1121 if mode != b:
1125 self.ui.debug(" updating permissions for %s\n" % f)
1122 self.ui.debug(" updating permissions for %s\n" % f)
1126 util.set_exec(self.wjoin(f), mode)
1123 util.set_exec(self.wjoin(f), mode)
1127 del m2[f]
1124 del m2[f]
1128 elif f in ma:
1125 elif f in ma:
1129 if n != ma[f]:
1126 if n != ma[f]:
1130 r = "d"
1127 r = "d"
1131 if not force and (linear_path or allow):
1128 if not force and (linear_path or allow):
1132 r = self.ui.prompt(
1129 r = self.ui.prompt(
1133 (" local changed %s which remote deleted\n" % f) +
1130 (" local changed %s which remote deleted\n" % f) +
1134 "(k)eep or (d)elete?", "[kd]", "k")
1131 "(k)eep or (d)elete?", "[kd]", "k")
1135 if r == "d":
1132 if r == "d":
1136 remove.append(f)
1133 remove.append(f)
1137 else:
1134 else:
1138 self.ui.debug("other deleted %s\n" % f)
1135 self.ui.debug("other deleted %s\n" % f)
1139 remove.append(f) # other deleted it
1136 remove.append(f) # other deleted it
1140 else:
1137 else:
1141 # file is created on branch or in working directory
1138 # file is created on branch or in working directory
1142 if force and f not in umap:
1139 if force and f not in umap:
1143 self.ui.debug("remote deleted %s, clobbering\n" % f)
1140 self.ui.debug("remote deleted %s, clobbering\n" % f)
1144 remove.append(f)
1141 remove.append(f)
1145 elif n == m1.get(f, nullid): # same as parent
1142 elif n == m1.get(f, nullid): # same as parent
1146 if p2 == pa: # going backwards?
1143 if p2 == pa: # going backwards?
1147 self.ui.debug("remote deleted %s\n" % f)
1144 self.ui.debug("remote deleted %s\n" % f)
1148 remove.append(f)
1145 remove.append(f)
1149 else:
1146 else:
1150 self.ui.debug("local modified %s, keeping\n" % f)
1147 self.ui.debug("local modified %s, keeping\n" % f)
1151 else:
1148 else:
1152 self.ui.debug("working dir created %s, keeping\n" % f)
1149 self.ui.debug("working dir created %s, keeping\n" % f)
1153
1150
1154 for f, n in m2.iteritems():
1151 for f, n in m2.iteritems():
1155 if choose and not choose(f): continue
1152 if choose and not choose(f): continue
1156 if f[0] == "/": continue
1153 if f[0] == "/": continue
1157 if f in ma and n != ma[f]:
1154 if f in ma and n != ma[f]:
1158 r = "k"
1155 r = "k"
1159 if not force and (linear_path or allow):
1156 if not force and (linear_path or allow):
1160 r = self.ui.prompt(
1157 r = self.ui.prompt(
1161 ("remote changed %s which local deleted\n" % f) +
1158 ("remote changed %s which local deleted\n" % f) +
1162 "(k)eep or (d)elete?", "[kd]", "k")
1159 "(k)eep or (d)elete?", "[kd]", "k")
1163 if r == "k": get[f] = n
1160 if r == "k": get[f] = n
1164 elif f not in ma:
1161 elif f not in ma:
1165 self.ui.debug("remote created %s\n" % f)
1162 self.ui.debug("remote created %s\n" % f)
1166 get[f] = n
1163 get[f] = n
1167 else:
1164 else:
1168 if force or p2 == pa: # going backwards?
1165 if force or p2 == pa: # going backwards?
1169 self.ui.debug("local deleted %s, recreating\n" % f)
1166 self.ui.debug("local deleted %s, recreating\n" % f)
1170 get[f] = n
1167 get[f] = n
1171 else:
1168 else:
1172 self.ui.debug("local deleted %s\n" % f)
1169 self.ui.debug("local deleted %s\n" % f)
1173
1170
1174 del mw, m1, m2, ma
1171 del mw, m1, m2, ma
1175
1172
1176 if force:
1173 if force:
1177 for f in merge:
1174 for f in merge:
1178 get[f] = merge[f][1]
1175 get[f] = merge[f][1]
1179 merge = {}
1176 merge = {}
1180
1177
1181 if linear_path or force:
1178 if linear_path or force:
1182 # we don't need to do any magic, just jump to the new rev
1179 # we don't need to do any magic, just jump to the new rev
1183 branch_merge = False
1180 branch_merge = False
1184 p1, p2 = p2, nullid
1181 p1, p2 = p2, nullid
1185 else:
1182 else:
1186 if not allow:
1183 if not allow:
1187 self.ui.status("this update spans a branch" +
1184 self.ui.status("this update spans a branch" +
1188 " affecting the following files:\n")
1185 " affecting the following files:\n")
1189 fl = merge.keys() + get.keys()
1186 fl = merge.keys() + get.keys()
1190 fl.sort()
1187 fl.sort()
1191 for f in fl:
1188 for f in fl:
1192 cf = ""
1189 cf = ""
1193 if f in merge: cf = " (resolve)"
1190 if f in merge: cf = " (resolve)"
1194 self.ui.status(" %s%s\n" % (f, cf))
1191 self.ui.status(" %s%s\n" % (f, cf))
1195 self.ui.warn("aborting update spanning branches!\n")
1192 self.ui.warn("aborting update spanning branches!\n")
1196 self.ui.status("(use update -m to merge across branches" +
1193 self.ui.status("(use update -m to merge across branches" +
1197 " or -C to lose changes)\n")
1194 " or -C to lose changes)\n")
1198 return 1
1195 return 1
1199 branch_merge = True
1196 branch_merge = True
1200
1197
1201 if moddirstate:
1198 if moddirstate:
1202 self.dirstate.setparents(p1, p2)
1199 self.dirstate.setparents(p1, p2)
1203
1200
1204 # get the files we don't need to change
1201 # get the files we don't need to change
1205 files = get.keys()
1202 files = get.keys()
1206 files.sort()
1203 files.sort()
1207 for f in files:
1204 for f in files:
1208 if f[0] == "/": continue
1205 if f[0] == "/": continue
1209 self.ui.note("getting %s\n" % f)
1206 self.ui.note("getting %s\n" % f)
1210 t = self.file(f).read(get[f])
1207 t = self.file(f).read(get[f])
1211 try:
1208 try:
1212 self.wwrite(f, t)
1209 self.wwrite(f, t)
1213 except IOError:
1210 except IOError:
1214 os.makedirs(os.path.dirname(self.wjoin(f)))
1211 os.makedirs(os.path.dirname(self.wjoin(f)))
1215 self.wwrite(f, t)
1212 self.wwrite(f, t)
1216 util.set_exec(self.wjoin(f), mf2[f])
1213 util.set_exec(self.wjoin(f), mf2[f])
1217 if moddirstate:
1214 if moddirstate:
1218 if branch_merge:
1215 if branch_merge:
1219 self.dirstate.update([f], 'n', st_mtime=-1)
1216 self.dirstate.update([f], 'n', st_mtime=-1)
1220 else:
1217 else:
1221 self.dirstate.update([f], 'n')
1218 self.dirstate.update([f], 'n')
1222
1219
1223 # merge the tricky bits
1220 # merge the tricky bits
1224 files = merge.keys()
1221 files = merge.keys()
1225 files.sort()
1222 files.sort()
1226 for f in files:
1223 for f in files:
1227 self.ui.status("merging %s\n" % f)
1224 self.ui.status("merging %s\n" % f)
1228 my, other, flag = merge[f]
1225 my, other, flag = merge[f]
1229 self.merge3(f, my, other)
1226 self.merge3(f, my, other)
1230 util.set_exec(self.wjoin(f), flag)
1227 util.set_exec(self.wjoin(f), flag)
1231 if moddirstate:
1228 if moddirstate:
1232 if branch_merge:
1229 if branch_merge:
1233 # We've done a branch merge, mark this file as merged
1230 # We've done a branch merge, mark this file as merged
1234 # so that we properly record the merger later
1231 # so that we properly record the merger later
1235 self.dirstate.update([f], 'm')
1232 self.dirstate.update([f], 'm')
1236 else:
1233 else:
1237 # We've update-merged a locally modified file, so
1234 # We've update-merged a locally modified file, so
1238 # we set the dirstate to emulate a normal checkout
1235 # we set the dirstate to emulate a normal checkout
1239 # of that file some time in the past. Thus our
1236 # of that file some time in the past. Thus our
1240 # merge will appear as a normal local file
1237 # merge will appear as a normal local file
1241 # modification.
1238 # modification.
1242 f_len = len(self.file(f).read(other))
1239 f_len = len(self.file(f).read(other))
1243 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1240 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1244
1241
1245 remove.sort()
1242 remove.sort()
1246 for f in remove:
1243 for f in remove:
1247 self.ui.note("removing %s\n" % f)
1244 self.ui.note("removing %s\n" % f)
1248 try:
1245 try:
1249 os.unlink(self.wjoin(f))
1246 os.unlink(self.wjoin(f))
1250 except OSError, inst:
1247 except OSError, inst:
1251 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1248 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1252 # try removing directories that might now be empty
1249 # try removing directories that might now be empty
1253 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1250 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1254 except: pass
1251 except: pass
1255 if moddirstate:
1252 if moddirstate:
1256 if branch_merge:
1253 if branch_merge:
1257 self.dirstate.update(remove, 'r')
1254 self.dirstate.update(remove, 'r')
1258 else:
1255 else:
1259 self.dirstate.forget(remove)
1256 self.dirstate.forget(remove)
1260
1257
1261 def merge3(self, fn, my, other):
1258 def merge3(self, fn, my, other):
1262 """perform a 3-way merge in the working directory"""
1259 """perform a 3-way merge in the working directory"""
1263
1260
1264 def temp(prefix, node):
1261 def temp(prefix, node):
1265 pre = "%s~%s." % (os.path.basename(fn), prefix)
1262 pre = "%s~%s." % (os.path.basename(fn), prefix)
1266 (fd, name) = tempfile.mkstemp("", pre)
1263 (fd, name) = tempfile.mkstemp("", pre)
1267 f = os.fdopen(fd, "wb")
1264 f = os.fdopen(fd, "wb")
1268 self.wwrite(fn, fl.read(node), f)
1265 self.wwrite(fn, fl.read(node), f)
1269 f.close()
1266 f.close()
1270 return name
1267 return name
1271
1268
1272 fl = self.file(fn)
1269 fl = self.file(fn)
1273 base = fl.ancestor(my, other)
1270 base = fl.ancestor(my, other)
1274 a = self.wjoin(fn)
1271 a = self.wjoin(fn)
1275 b = temp("base", base)
1272 b = temp("base", base)
1276 c = temp("other", other)
1273 c = temp("other", other)
1277
1274
1278 self.ui.note("resolving %s\n" % fn)
1275 self.ui.note("resolving %s\n" % fn)
1279 self.ui.debug("file %s: other %s ancestor %s\n" %
1276 self.ui.debug("file %s: other %s ancestor %s\n" %
1280 (fn, short(other), short(base)))
1277 (fn, short(other), short(base)))
1281
1278
1282 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1279 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1283 or "hgmerge")
1280 or "hgmerge")
1284 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1281 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1285 if r:
1282 if r:
1286 self.ui.warn("merging %s failed!\n" % fn)
1283 self.ui.warn("merging %s failed!\n" % fn)
1287
1284
1288 os.unlink(b)
1285 os.unlink(b)
1289 os.unlink(c)
1286 os.unlink(c)
1290
1287
1291 def verify(self):
1288 def verify(self):
1292 filelinkrevs = {}
1289 filelinkrevs = {}
1293 filenodes = {}
1290 filenodes = {}
1294 changesets = revisions = files = 0
1291 changesets = revisions = files = 0
1295 errors = 0
1292 errors = 0
1296
1293
1297 seen = {}
1294 seen = {}
1298 self.ui.status("checking changesets\n")
1295 self.ui.status("checking changesets\n")
1299 for i in range(self.changelog.count()):
1296 for i in range(self.changelog.count()):
1300 changesets += 1
1297 changesets += 1
1301 n = self.changelog.node(i)
1298 n = self.changelog.node(i)
1302 if n in seen:
1299 if n in seen:
1303 self.ui.warn("duplicate changeset at revision %d\n" % i)
1300 self.ui.warn("duplicate changeset at revision %d\n" % i)
1304 errors += 1
1301 errors += 1
1305 seen[n] = 1
1302 seen[n] = 1
1306
1303
1307 for p in self.changelog.parents(n):
1304 for p in self.changelog.parents(n):
1308 if p not in self.changelog.nodemap:
1305 if p not in self.changelog.nodemap:
1309 self.ui.warn("changeset %s has unknown parent %s\n" %
1306 self.ui.warn("changeset %s has unknown parent %s\n" %
1310 (short(n), short(p)))
1307 (short(n), short(p)))
1311 errors += 1
1308 errors += 1
1312 try:
1309 try:
1313 changes = self.changelog.read(n)
1310 changes = self.changelog.read(n)
1314 except Exception, inst:
1311 except Exception, inst:
1315 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1312 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1316 errors += 1
1313 errors += 1
1317
1314
1318 for f in changes[3]:
1315 for f in changes[3]:
1319 filelinkrevs.setdefault(f, []).append(i)
1316 filelinkrevs.setdefault(f, []).append(i)
1320
1317
1321 seen = {}
1318 seen = {}
1322 self.ui.status("checking manifests\n")
1319 self.ui.status("checking manifests\n")
1323 for i in range(self.manifest.count()):
1320 for i in range(self.manifest.count()):
1324 n = self.manifest.node(i)
1321 n = self.manifest.node(i)
1325 if n in seen:
1322 if n in seen:
1326 self.ui.warn("duplicate manifest at revision %d\n" % i)
1323 self.ui.warn("duplicate manifest at revision %d\n" % i)
1327 errors += 1
1324 errors += 1
1328 seen[n] = 1
1325 seen[n] = 1
1329
1326
1330 for p in self.manifest.parents(n):
1327 for p in self.manifest.parents(n):
1331 if p not in self.manifest.nodemap:
1328 if p not in self.manifest.nodemap:
1332 self.ui.warn("manifest %s has unknown parent %s\n" %
1329 self.ui.warn("manifest %s has unknown parent %s\n" %
1333 (short(n), short(p)))
1330 (short(n), short(p)))
1334 errors += 1
1331 errors += 1
1335
1332
1336 try:
1333 try:
1337 delta = mdiff.patchtext(self.manifest.delta(n))
1334 delta = mdiff.patchtext(self.manifest.delta(n))
1338 except KeyboardInterrupt:
1335 except KeyboardInterrupt:
1339 self.ui.warn("interrupted")
1336 self.ui.warn("interrupted")
1340 raise
1337 raise
1341 except Exception, inst:
1338 except Exception, inst:
1342 self.ui.warn("unpacking manifest %s: %s\n"
1339 self.ui.warn("unpacking manifest %s: %s\n"
1343 % (short(n), inst))
1340 % (short(n), inst))
1344 errors += 1
1341 errors += 1
1345
1342
1346 ff = [ l.split('\0') for l in delta.splitlines() ]
1343 ff = [ l.split('\0') for l in delta.splitlines() ]
1347 for f, fn in ff:
1344 for f, fn in ff:
1348 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1345 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1349
1346
1350 self.ui.status("crosschecking files in changesets and manifests\n")
1347 self.ui.status("crosschecking files in changesets and manifests\n")
1351 for f in filenodes:
1348 for f in filenodes:
1352 if f not in filelinkrevs:
1349 if f not in filelinkrevs:
1353 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1350 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1354 errors += 1
1351 errors += 1
1355
1352
1356 for f in filelinkrevs:
1353 for f in filelinkrevs:
1357 if f not in filenodes:
1354 if f not in filenodes:
1358 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1355 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1359 errors += 1
1356 errors += 1
1360
1357
1361 self.ui.status("checking files\n")
1358 self.ui.status("checking files\n")
1362 ff = filenodes.keys()
1359 ff = filenodes.keys()
1363 ff.sort()
1360 ff.sort()
1364 for f in ff:
1361 for f in ff:
1365 if f == "/dev/null": continue
1362 if f == "/dev/null": continue
1366 files += 1
1363 files += 1
1367 fl = self.file(f)
1364 fl = self.file(f)
1368 nodes = { nullid: 1 }
1365 nodes = { nullid: 1 }
1369 seen = {}
1366 seen = {}
1370 for i in range(fl.count()):
1367 for i in range(fl.count()):
1371 revisions += 1
1368 revisions += 1
1372 n = fl.node(i)
1369 n = fl.node(i)
1373
1370
1374 if n in seen:
1371 if n in seen:
1375 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1372 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1376 errors += 1
1373 errors += 1
1377
1374
1378 if n not in filenodes[f]:
1375 if n not in filenodes[f]:
1379 self.ui.warn("%s: %d:%s not in manifests\n"
1376 self.ui.warn("%s: %d:%s not in manifests\n"
1380 % (f, i, short(n)))
1377 % (f, i, short(n)))
1381 errors += 1
1378 errors += 1
1382 else:
1379 else:
1383 del filenodes[f][n]
1380 del filenodes[f][n]
1384
1381
1385 flr = fl.linkrev(n)
1382 flr = fl.linkrev(n)
1386 if flr not in filelinkrevs[f]:
1383 if flr not in filelinkrevs[f]:
1387 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1384 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1388 % (f, short(n), fl.linkrev(n)))
1385 % (f, short(n), fl.linkrev(n)))
1389 errors += 1
1386 errors += 1
1390 else:
1387 else:
1391 filelinkrevs[f].remove(flr)
1388 filelinkrevs[f].remove(flr)
1392
1389
1393 # verify contents
1390 # verify contents
1394 try:
1391 try:
1395 t = fl.read(n)
1392 t = fl.read(n)
1396 except Exception, inst:
1393 except Exception, inst:
1397 self.ui.warn("unpacking file %s %s: %s\n"
1394 self.ui.warn("unpacking file %s %s: %s\n"
1398 % (f, short(n), inst))
1395 % (f, short(n), inst))
1399 errors += 1
1396 errors += 1
1400
1397
1401 # verify parents
1398 # verify parents
1402 (p1, p2) = fl.parents(n)
1399 (p1, p2) = fl.parents(n)
1403 if p1 not in nodes:
1400 if p1 not in nodes:
1404 self.ui.warn("file %s:%s unknown parent 1 %s" %
1401 self.ui.warn("file %s:%s unknown parent 1 %s" %
1405 (f, short(n), short(p1)))
1402 (f, short(n), short(p1)))
1406 errors += 1
1403 errors += 1
1407 if p2 not in nodes:
1404 if p2 not in nodes:
1408 self.ui.warn("file %s:%s unknown parent 2 %s" %
1405 self.ui.warn("file %s:%s unknown parent 2 %s" %
1409 (f, short(n), short(p1)))
1406 (f, short(n), short(p1)))
1410 errors += 1
1407 errors += 1
1411 nodes[n] = 1
1408 nodes[n] = 1
1412
1409
1413 # cross-check
1410 # cross-check
1414 for node in filenodes[f]:
1411 for node in filenodes[f]:
1415 self.ui.warn("node %s in manifests not in %s\n"
1412 self.ui.warn("node %s in manifests not in %s\n"
1416 % (hex(node), f))
1413 % (hex(node), f))
1417 errors += 1
1414 errors += 1
1418
1415
1419 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1416 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1420 (files, changesets, revisions))
1417 (files, changesets, revisions))
1421
1418
1422 if errors:
1419 if errors:
1423 self.ui.warn("%d integrity errors encountered!\n" % errors)
1420 self.ui.warn("%d integrity errors encountered!\n" % errors)
1424 return 1
1421 return 1
@@ -1,37 +1,38 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 mkdir test
3 mkdir test
4 cd test
4 cd test
5 hg init
5 hg init
6 echo foo>foo
6 echo foo>foo
7 hg addremove
7 hg addremove
8 hg commit -m 1
8 hg commit -m 1
9 echo bar>bar
9 echo bar>bar
10 hg addremove
10 hg addremove
11 hg commit -m 2
11 hg commit -m 2
12 mkdir baz
12 mkdir baz
13 echo bletch>baz/bletch
13 echo bletch>baz/bletch
14 hg addremove
14 hg addremove
15 hg commit -m 3
15 hg commit -m 3
16 echo "[web]" >> .hg/hgrc
16 echo "name = test-archive" >> .hg/hgrc
17 echo "name = test-archive" >> .hg/hgrc
17 echo "allowzip = true" >> .hg/hgrc
18 echo "allowzip = true" >> .hg/hgrc
18 echo "allowgz = true" >> .hg/hgrc
19 echo "allowgz = true" >> .hg/hgrc
19 echo "allowbz2 = true" >> .hg/hgrc
20 echo "allowbz2 = true" >> .hg/hgrc
20 hg serve -p 20059 > /dev/null &
21 hg serve -p 20059 > /dev/null &
21 sleep 1 # wait for server to be started
22 sleep 1 # wait for server to be started
22
23
23 TIP=`hg id -v | cut -f1 -d' '`
24 TIP=`hg id -v | cut -f1 -d' '`
24 QTIP=`hg id -q`
25 QTIP=`hg id -q`
25 cat > getarchive.py <<EOF
26 cat > getarchive.py <<EOF
26 import sys, urllib2
27 import sys, urllib2
27 node, archive = sys.argv[1:]
28 node, archive = sys.argv[1:]
28 f = urllib2.urlopen('http://127.0.0.1:20059/?cmd=archive;node=%s;type=%s'
29 f = urllib2.urlopen('http://127.0.0.1:20059/?cmd=archive;node=%s;type=%s'
29 % (node, archive))
30 % (node, archive))
30 sys.stdout.write(f.read())
31 sys.stdout.write(f.read())
31 EOF
32 EOF
32 http_proxy= python getarchive.py "$TIP" gz | tar tzf - | sed "s/$QTIP/TIP/"
33 http_proxy= python getarchive.py "$TIP" gz | tar tzf - | sed "s/$QTIP/TIP/"
33 http_proxy= python getarchive.py "$TIP" bz2 | tar tjf - | sed "s/$QTIP/TIP/"
34 http_proxy= python getarchive.py "$TIP" bz2 | tar tjf - | sed "s/$QTIP/TIP/"
34 http_proxy= python getarchive.py "$TIP" zip > archive.zip
35 http_proxy= python getarchive.py "$TIP" zip > archive.zip
35 unzip -t archive.zip | sed "s/$QTIP/TIP/"
36 unzip -t archive.zip | sed "s/$QTIP/TIP/"
36
37
37 kill $!
38 kill $!
General Comments 0
You need to be logged in to leave comments. Login now