##// END OF EJS Templates
make readconfig take a filename instead of a file pointer as argument...
Benoit Boissinot -
r1473:7d66ce98 default
parent child Browse files
Show More
@@ -0,0 +1,7
1 #!/bin/sh
2
3 mkdir t
4 cd t
5 hg init
6 echo "invalid" > .hg/hgrc
7 hg status 2>&1 |sed -e "s:/.*\(/t/.*\):...\1:"
@@ -1,1023 +1,1023
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, sys
9 import os, cgi, sys
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
13 demandload(globals(), "mimetypes")
13 demandload(globals(), "mimetypes")
14 from node import *
14 from node import *
15 from i18n import gettext as _
15 from i18n import gettext as _
16
16
17 def templatepath():
17 def templatepath():
18 for f in "templates", "../templates":
18 for f in "templates", "../templates":
19 p = os.path.join(os.path.dirname(__file__), f)
19 p = os.path.join(os.path.dirname(__file__), f)
20 if os.path.isdir(p):
20 if os.path.isdir(p):
21 return p
21 return p
22
22
23 def age(x):
23 def age(x):
24 def plural(t, c):
24 def plural(t, c):
25 if c == 1:
25 if c == 1:
26 return t
26 return t
27 return t + "s"
27 return t + "s"
28 def fmt(t, c):
28 def fmt(t, c):
29 return "%d %s" % (c, plural(t, c))
29 return "%d %s" % (c, plural(t, c))
30
30
31 now = time.time()
31 now = time.time()
32 then = x[0]
32 then = x[0]
33 delta = max(1, int(now - then))
33 delta = max(1, int(now - then))
34
34
35 scales = [["second", 1],
35 scales = [["second", 1],
36 ["minute", 60],
36 ["minute", 60],
37 ["hour", 3600],
37 ["hour", 3600],
38 ["day", 3600 * 24],
38 ["day", 3600 * 24],
39 ["week", 3600 * 24 * 7],
39 ["week", 3600 * 24 * 7],
40 ["month", 3600 * 24 * 30],
40 ["month", 3600 * 24 * 30],
41 ["year", 3600 * 24 * 365]]
41 ["year", 3600 * 24 * 365]]
42
42
43 scales.reverse()
43 scales.reverse()
44
44
45 for t, s in scales:
45 for t, s in scales:
46 n = delta / s
46 n = delta / s
47 if n >= 2 or s == 1:
47 if n >= 2 or s == 1:
48 return fmt(t, n)
48 return fmt(t, n)
49
49
50 def nl2br(text):
50 def nl2br(text):
51 return text.replace('\n', '<br/>\n')
51 return text.replace('\n', '<br/>\n')
52
52
53 def obfuscate(text):
53 def obfuscate(text):
54 return ''.join(['&#%d;' % ord(c) for c in text])
54 return ''.join(['&#%d;' % ord(c) for c in text])
55
55
56 def up(p):
56 def up(p):
57 if p[0] != "/":
57 if p[0] != "/":
58 p = "/" + p
58 p = "/" + p
59 if p[-1] == "/":
59 if p[-1] == "/":
60 p = p[:-1]
60 p = p[:-1]
61 up = os.path.dirname(p)
61 up = os.path.dirname(p)
62 if up == "/":
62 if up == "/":
63 return "/"
63 return "/"
64 return up + "/"
64 return up + "/"
65
65
66 def get_mtime(repo_path):
66 def get_mtime(repo_path):
67 hg_path = os.path.join(repo_path, ".hg")
67 hg_path = os.path.join(repo_path, ".hg")
68 cl_path = os.path.join(hg_path, "00changelog.i")
68 cl_path = os.path.join(hg_path, "00changelog.i")
69 if os.path.exists(os.path.join(cl_path)):
69 if os.path.exists(os.path.join(cl_path)):
70 return os.stat(cl_path).st_mtime
70 return os.stat(cl_path).st_mtime
71 else:
71 else:
72 return os.stat(hg_path).st_mtime
72 return os.stat(hg_path).st_mtime
73
73
74 class hgrequest:
74 class hgrequest:
75 def __init__(self, inp=None, out=None, env=None):
75 def __init__(self, inp=None, out=None, env=None):
76 self.inp = inp or sys.stdin
76 self.inp = inp or sys.stdin
77 self.out = out or sys.stdout
77 self.out = out or sys.stdout
78 self.env = env or os.environ
78 self.env = env or os.environ
79 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
79 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
80
80
81 def write(self, *things):
81 def write(self, *things):
82 for thing in things:
82 for thing in things:
83 if hasattr(thing, "__iter__"):
83 if hasattr(thing, "__iter__"):
84 for part in thing:
84 for part in thing:
85 self.write(part)
85 self.write(part)
86 else:
86 else:
87 try:
87 try:
88 self.out.write(str(thing))
88 self.out.write(str(thing))
89 except socket.error, inst:
89 except socket.error, inst:
90 if inst[0] != errno.ECONNRESET:
90 if inst[0] != errno.ECONNRESET:
91 raise
91 raise
92
92
93 def header(self, headers=[('Content-type','text/html')]):
93 def header(self, headers=[('Content-type','text/html')]):
94 for header in headers:
94 for header in headers:
95 self.out.write("%s: %s\r\n" % header)
95 self.out.write("%s: %s\r\n" % header)
96 self.out.write("\r\n")
96 self.out.write("\r\n")
97
97
98 def httphdr(self, type, file="", size=0):
98 def httphdr(self, type, file="", size=0):
99
99
100 headers = [('Content-type', type)]
100 headers = [('Content-type', type)]
101 if file:
101 if file:
102 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
102 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
103 if size > 0:
103 if size > 0:
104 headers.append(('Content-length', str(size)))
104 headers.append(('Content-length', str(size)))
105 self.header(headers)
105 self.header(headers)
106
106
107 class templater:
107 class templater:
108 def __init__(self, mapfile, filters={}, defaults={}):
108 def __init__(self, mapfile, filters={}, defaults={}):
109 self.cache = {}
109 self.cache = {}
110 self.map = {}
110 self.map = {}
111 self.base = os.path.dirname(mapfile)
111 self.base = os.path.dirname(mapfile)
112 self.filters = filters
112 self.filters = filters
113 self.defaults = defaults
113 self.defaults = defaults
114
114
115 for l in file(mapfile):
115 for l in file(mapfile):
116 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
116 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
117 if m:
117 if m:
118 self.cache[m.group(1)] = m.group(2)
118 self.cache[m.group(1)] = m.group(2)
119 else:
119 else:
120 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
120 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
121 if m:
121 if m:
122 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
122 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
123 else:
123 else:
124 raise LookupError(_("unknown map entry '%s'") % l)
124 raise LookupError(_("unknown map entry '%s'") % l)
125
125
126 def __call__(self, t, **map):
126 def __call__(self, t, **map):
127 m = self.defaults.copy()
127 m = self.defaults.copy()
128 m.update(map)
128 m.update(map)
129 try:
129 try:
130 tmpl = self.cache[t]
130 tmpl = self.cache[t]
131 except KeyError:
131 except KeyError:
132 tmpl = self.cache[t] = file(self.map[t]).read()
132 tmpl = self.cache[t] = file(self.map[t]).read()
133 return self.template(tmpl, self.filters, **m)
133 return self.template(tmpl, self.filters, **m)
134
134
135 def template(self, tmpl, filters={}, **map):
135 def template(self, tmpl, filters={}, **map):
136 while tmpl:
136 while tmpl:
137 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
137 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
138 if m:
138 if m:
139 yield tmpl[:m.start(0)]
139 yield tmpl[:m.start(0)]
140 v = map.get(m.group(1), "")
140 v = map.get(m.group(1), "")
141 v = callable(v) and v(**map) or v
141 v = callable(v) and v(**map) or v
142
142
143 format = m.group(2)
143 format = m.group(2)
144 fl = m.group(4)
144 fl = m.group(4)
145
145
146 if format:
146 if format:
147 q = v.__iter__
147 q = v.__iter__
148 for i in q():
148 for i in q():
149 lm = map.copy()
149 lm = map.copy()
150 lm.update(i)
150 lm.update(i)
151 yield self(format[1:], **lm)
151 yield self(format[1:], **lm)
152
152
153 v = ""
153 v = ""
154
154
155 elif fl:
155 elif fl:
156 for f in fl.split("|")[1:]:
156 for f in fl.split("|")[1:]:
157 v = filters[f](v)
157 v = filters[f](v)
158
158
159 yield v
159 yield v
160 tmpl = tmpl[m.end(0):]
160 tmpl = tmpl[m.end(0):]
161 else:
161 else:
162 yield tmpl
162 yield tmpl
163 return
163 return
164
164
165 common_filters = {
165 common_filters = {
166 "escape": cgi.escape,
166 "escape": cgi.escape,
167 "strip": lambda x: x.strip(),
167 "strip": lambda x: x.strip(),
168 "rstrip": lambda x: x.rstrip(),
168 "rstrip": lambda x: x.rstrip(),
169 "age": age,
169 "age": age,
170 "date": lambda x: util.datestr(x),
170 "date": lambda x: util.datestr(x),
171 "addbreaks": nl2br,
171 "addbreaks": nl2br,
172 "obfuscate": obfuscate,
172 "obfuscate": obfuscate,
173 "short": (lambda x: x[:12]),
173 "short": (lambda x: x[:12]),
174 "firstline": (lambda x: x.splitlines(1)[0]),
174 "firstline": (lambda x: x.splitlines(1)[0]),
175 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
175 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
176 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
176 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
177 }
177 }
178
178
179 class hgweb:
179 class hgweb:
180 def __init__(self, repo, name=None):
180 def __init__(self, repo, name=None):
181 if type(repo) == type(""):
181 if type(repo) == type(""):
182 self.repo = hg.repository(ui.ui(), repo)
182 self.repo = hg.repository(ui.ui(), repo)
183 else:
183 else:
184 self.repo = repo
184 self.repo = repo
185
185
186 self.mtime = -1
186 self.mtime = -1
187 self.reponame = name
187 self.reponame = name
188 self.archives = 'zip', 'gz', 'bz2'
188 self.archives = 'zip', 'gz', 'bz2'
189
189
190 def refresh(self):
190 def refresh(self):
191 mtime = get_mtime(self.repo.root)
191 mtime = get_mtime(self.repo.root)
192 if mtime != self.mtime:
192 if mtime != self.mtime:
193 self.mtime = mtime
193 self.mtime = mtime
194 self.repo = hg.repository(self.repo.ui, self.repo.root)
194 self.repo = hg.repository(self.repo.ui, self.repo.root)
195 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
195 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
196 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
196 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
197 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
197 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
198
198
199 def listfiles(self, files, mf):
199 def listfiles(self, files, mf):
200 for f in files[:self.maxfiles]:
200 for f in files[:self.maxfiles]:
201 yield self.t("filenodelink", node=hex(mf[f]), file=f)
201 yield self.t("filenodelink", node=hex(mf[f]), file=f)
202 if len(files) > self.maxfiles:
202 if len(files) > self.maxfiles:
203 yield self.t("fileellipses")
203 yield self.t("fileellipses")
204
204
205 def listfilediffs(self, files, changeset):
205 def listfilediffs(self, files, changeset):
206 for f in files[:self.maxfiles]:
206 for f in files[:self.maxfiles]:
207 yield self.t("filedifflink", node=hex(changeset), file=f)
207 yield self.t("filedifflink", node=hex(changeset), file=f)
208 if len(files) > self.maxfiles:
208 if len(files) > self.maxfiles:
209 yield self.t("fileellipses")
209 yield self.t("fileellipses")
210
210
211 def parents(self, node, parents=[], rev=None, hide=False, **args):
211 def parents(self, node, parents=[], rev=None, hide=False, **args):
212 if not rev:
212 if not rev:
213 rev = lambda x: ""
213 rev = lambda x: ""
214 parents = [p for p in parents if p != nullid]
214 parents = [p for p in parents if p != nullid]
215 if hide and len(parents) == 1 and rev(parents[0]) == rev(node) - 1:
215 if hide and len(parents) == 1 and rev(parents[0]) == rev(node) - 1:
216 return
216 return
217 for p in parents:
217 for p in parents:
218 yield dict(node=hex(p), rev=rev(p), **args)
218 yield dict(node=hex(p), rev=rev(p), **args)
219
219
220 def showtag(self, t1, node=nullid, **args):
220 def showtag(self, t1, node=nullid, **args):
221 for t in self.repo.nodetags(node):
221 for t in self.repo.nodetags(node):
222 yield self.t(t1, tag=t, **args)
222 yield self.t(t1, tag=t, **args)
223
223
224 def diff(self, node1, node2, files):
224 def diff(self, node1, node2, files):
225 def filterfiles(list, files):
225 def filterfiles(list, files):
226 l = [x for x in list if x in files]
226 l = [x for x in list if x in files]
227
227
228 for f in files:
228 for f in files:
229 if f[-1] != os.sep:
229 if f[-1] != os.sep:
230 f += os.sep
230 f += os.sep
231 l += [x for x in list if x.startswith(f)]
231 l += [x for x in list if x.startswith(f)]
232 return l
232 return l
233
233
234 parity = [0]
234 parity = [0]
235 def diffblock(diff, f, fn):
235 def diffblock(diff, f, fn):
236 yield self.t("diffblock",
236 yield self.t("diffblock",
237 lines=prettyprintlines(diff),
237 lines=prettyprintlines(diff),
238 parity=parity[0],
238 parity=parity[0],
239 file=f,
239 file=f,
240 filenode=hex(fn or nullid))
240 filenode=hex(fn or nullid))
241 parity[0] = 1 - parity[0]
241 parity[0] = 1 - parity[0]
242
242
243 def prettyprintlines(diff):
243 def prettyprintlines(diff):
244 for l in diff.splitlines(1):
244 for l in diff.splitlines(1):
245 if l.startswith('+'):
245 if l.startswith('+'):
246 yield self.t("difflineplus", line=l)
246 yield self.t("difflineplus", line=l)
247 elif l.startswith('-'):
247 elif l.startswith('-'):
248 yield self.t("difflineminus", line=l)
248 yield self.t("difflineminus", line=l)
249 elif l.startswith('@'):
249 elif l.startswith('@'):
250 yield self.t("difflineat", line=l)
250 yield self.t("difflineat", line=l)
251 else:
251 else:
252 yield self.t("diffline", line=l)
252 yield self.t("diffline", line=l)
253
253
254 r = self.repo
254 r = self.repo
255 cl = r.changelog
255 cl = r.changelog
256 mf = r.manifest
256 mf = r.manifest
257 change1 = cl.read(node1)
257 change1 = cl.read(node1)
258 change2 = cl.read(node2)
258 change2 = cl.read(node2)
259 mmap1 = mf.read(change1[0])
259 mmap1 = mf.read(change1[0])
260 mmap2 = mf.read(change2[0])
260 mmap2 = mf.read(change2[0])
261 date1 = util.datestr(change1[2])
261 date1 = util.datestr(change1[2])
262 date2 = util.datestr(change2[2])
262 date2 = util.datestr(change2[2])
263
263
264 c, a, d, u = r.changes(node1, node2)
264 c, a, d, u = r.changes(node1, node2)
265 if files:
265 if files:
266 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
266 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
267
267
268 for f in c:
268 for f in c:
269 to = r.file(f).read(mmap1[f])
269 to = r.file(f).read(mmap1[f])
270 tn = r.file(f).read(mmap2[f])
270 tn = r.file(f).read(mmap2[f])
271 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
271 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
272 for f in a:
272 for f in a:
273 to = None
273 to = None
274 tn = r.file(f).read(mmap2[f])
274 tn = r.file(f).read(mmap2[f])
275 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
275 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
276 for f in d:
276 for f in d:
277 to = r.file(f).read(mmap1[f])
277 to = r.file(f).read(mmap1[f])
278 tn = None
278 tn = None
279 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
279 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
280
280
281 def changelog(self, pos):
281 def changelog(self, pos):
282 def changenav(**map):
282 def changenav(**map):
283 def seq(factor=1):
283 def seq(factor=1):
284 yield 1 * factor
284 yield 1 * factor
285 yield 3 * factor
285 yield 3 * factor
286 #yield 5 * factor
286 #yield 5 * factor
287 for f in seq(factor * 10):
287 for f in seq(factor * 10):
288 yield f
288 yield f
289
289
290 l = []
290 l = []
291 for f in seq():
291 for f in seq():
292 if f < self.maxchanges / 2:
292 if f < self.maxchanges / 2:
293 continue
293 continue
294 if f > count:
294 if f > count:
295 break
295 break
296 r = "%d" % f
296 r = "%d" % f
297 if pos + f < count:
297 if pos + f < count:
298 l.append(("+" + r, pos + f))
298 l.append(("+" + r, pos + f))
299 if pos - f >= 0:
299 if pos - f >= 0:
300 l.insert(0, ("-" + r, pos - f))
300 l.insert(0, ("-" + r, pos - f))
301
301
302 yield {"rev": 0, "label": "(0)"}
302 yield {"rev": 0, "label": "(0)"}
303
303
304 for label, rev in l:
304 for label, rev in l:
305 yield {"label": label, "rev": rev}
305 yield {"label": label, "rev": rev}
306
306
307 yield {"label": "tip", "rev": "tip"}
307 yield {"label": "tip", "rev": "tip"}
308
308
309 def changelist(**map):
309 def changelist(**map):
310 parity = (start - end) & 1
310 parity = (start - end) & 1
311 cl = self.repo.changelog
311 cl = self.repo.changelog
312 l = [] # build a list in forward order for efficiency
312 l = [] # build a list in forward order for efficiency
313 for i in range(start, end):
313 for i in range(start, end):
314 n = cl.node(i)
314 n = cl.node(i)
315 changes = cl.read(n)
315 changes = cl.read(n)
316 hn = hex(n)
316 hn = hex(n)
317
317
318 l.insert(0, {"parity": parity,
318 l.insert(0, {"parity": parity,
319 "author": changes[1],
319 "author": changes[1],
320 "parent": self.parents(n, cl.parents(n), cl.rev,
320 "parent": self.parents(n, cl.parents(n), cl.rev,
321 hide=True),
321 hide=True),
322 "changelogtag": self.showtag("changelogtag",n),
322 "changelogtag": self.showtag("changelogtag",n),
323 "manifest": hex(changes[0]),
323 "manifest": hex(changes[0]),
324 "desc": changes[4],
324 "desc": changes[4],
325 "date": changes[2],
325 "date": changes[2],
326 "files": self.listfilediffs(changes[3], n),
326 "files": self.listfilediffs(changes[3], n),
327 "rev": i,
327 "rev": i,
328 "node": hn})
328 "node": hn})
329 parity = 1 - parity
329 parity = 1 - parity
330
330
331 for e in l:
331 for e in l:
332 yield e
332 yield e
333
333
334 cl = self.repo.changelog
334 cl = self.repo.changelog
335 mf = cl.read(cl.tip())[0]
335 mf = cl.read(cl.tip())[0]
336 count = cl.count()
336 count = cl.count()
337 start = max(0, pos - self.maxchanges + 1)
337 start = max(0, pos - self.maxchanges + 1)
338 end = min(count, start + self.maxchanges)
338 end = min(count, start + self.maxchanges)
339 pos = end - 1
339 pos = end - 1
340
340
341 yield self.t('changelog',
341 yield self.t('changelog',
342 changenav=changenav,
342 changenav=changenav,
343 manifest=hex(mf),
343 manifest=hex(mf),
344 rev=pos, changesets=count, entries=changelist)
344 rev=pos, changesets=count, entries=changelist)
345
345
346 def search(self, query):
346 def search(self, query):
347
347
348 def changelist(**map):
348 def changelist(**map):
349 cl = self.repo.changelog
349 cl = self.repo.changelog
350 count = 0
350 count = 0
351 qw = query.lower().split()
351 qw = query.lower().split()
352
352
353 def revgen():
353 def revgen():
354 for i in range(cl.count() - 1, 0, -100):
354 for i in range(cl.count() - 1, 0, -100):
355 l = []
355 l = []
356 for j in range(max(0, i - 100), i):
356 for j in range(max(0, i - 100), i):
357 n = cl.node(j)
357 n = cl.node(j)
358 changes = cl.read(n)
358 changes = cl.read(n)
359 l.append((n, j, changes))
359 l.append((n, j, changes))
360 l.reverse()
360 l.reverse()
361 for e in l:
361 for e in l:
362 yield e
362 yield e
363
363
364 for n, i, changes in revgen():
364 for n, i, changes in revgen():
365 miss = 0
365 miss = 0
366 for q in qw:
366 for q in qw:
367 if not (q in changes[1].lower() or
367 if not (q in changes[1].lower() or
368 q in changes[4].lower() or
368 q in changes[4].lower() or
369 q in " ".join(changes[3][:20]).lower()):
369 q in " ".join(changes[3][:20]).lower()):
370 miss = 1
370 miss = 1
371 break
371 break
372 if miss:
372 if miss:
373 continue
373 continue
374
374
375 count += 1
375 count += 1
376 hn = hex(n)
376 hn = hex(n)
377
377
378 yield self.t('searchentry',
378 yield self.t('searchentry',
379 parity=count & 1,
379 parity=count & 1,
380 author=changes[1],
380 author=changes[1],
381 parent=self.parents(n, cl.parents(n), cl.rev),
381 parent=self.parents(n, cl.parents(n), cl.rev),
382 changelogtag=self.showtag("changelogtag",n),
382 changelogtag=self.showtag("changelogtag",n),
383 manifest=hex(changes[0]),
383 manifest=hex(changes[0]),
384 desc=changes[4],
384 desc=changes[4],
385 date=changes[2],
385 date=changes[2],
386 files=self.listfilediffs(changes[3], n),
386 files=self.listfilediffs(changes[3], n),
387 rev=i,
387 rev=i,
388 node=hn)
388 node=hn)
389
389
390 if count >= self.maxchanges:
390 if count >= self.maxchanges:
391 break
391 break
392
392
393 cl = self.repo.changelog
393 cl = self.repo.changelog
394 mf = cl.read(cl.tip())[0]
394 mf = cl.read(cl.tip())[0]
395
395
396 yield self.t('search',
396 yield self.t('search',
397 query=query,
397 query=query,
398 manifest=hex(mf),
398 manifest=hex(mf),
399 entries=changelist)
399 entries=changelist)
400
400
401 def changeset(self, nodeid):
401 def changeset(self, nodeid):
402 cl = self.repo.changelog
402 cl = self.repo.changelog
403 n = self.repo.lookup(nodeid)
403 n = self.repo.lookup(nodeid)
404 nodeid = hex(n)
404 nodeid = hex(n)
405 changes = cl.read(n)
405 changes = cl.read(n)
406 p1 = cl.parents(n)[0]
406 p1 = cl.parents(n)[0]
407
407
408 files = []
408 files = []
409 mf = self.repo.manifest.read(changes[0])
409 mf = self.repo.manifest.read(changes[0])
410 for f in changes[3]:
410 for f in changes[3]:
411 files.append(self.t("filenodelink",
411 files.append(self.t("filenodelink",
412 filenode=hex(mf.get(f, nullid)), file=f))
412 filenode=hex(mf.get(f, nullid)), file=f))
413
413
414 def diff(**map):
414 def diff(**map):
415 yield self.diff(p1, n, None)
415 yield self.diff(p1, n, None)
416
416
417 def archivelist():
417 def archivelist():
418 for i in self.archives:
418 for i in self.archives:
419 if self.repo.ui.configbool("web", "allow" + i, False):
419 if self.repo.ui.configbool("web", "allow" + i, False):
420 yield {"type" : i, "node" : nodeid}
420 yield {"type" : i, "node" : nodeid}
421
421
422 yield self.t('changeset',
422 yield self.t('changeset',
423 diff=diff,
423 diff=diff,
424 rev=cl.rev(n),
424 rev=cl.rev(n),
425 node=nodeid,
425 node=nodeid,
426 parent=self.parents(n, cl.parents(n), cl.rev),
426 parent=self.parents(n, cl.parents(n), cl.rev),
427 changesettag=self.showtag("changesettag",n),
427 changesettag=self.showtag("changesettag",n),
428 manifest=hex(changes[0]),
428 manifest=hex(changes[0]),
429 author=changes[1],
429 author=changes[1],
430 desc=changes[4],
430 desc=changes[4],
431 date=changes[2],
431 date=changes[2],
432 files=files,
432 files=files,
433 archives=archivelist())
433 archives=archivelist())
434
434
435 def filelog(self, f, filenode):
435 def filelog(self, f, filenode):
436 cl = self.repo.changelog
436 cl = self.repo.changelog
437 fl = self.repo.file(f)
437 fl = self.repo.file(f)
438 filenode = hex(fl.lookup(filenode))
438 filenode = hex(fl.lookup(filenode))
439 count = fl.count()
439 count = fl.count()
440
440
441 def entries(**map):
441 def entries(**map):
442 l = []
442 l = []
443 parity = (count - 1) & 1
443 parity = (count - 1) & 1
444
444
445 for i in range(count):
445 for i in range(count):
446 n = fl.node(i)
446 n = fl.node(i)
447 lr = fl.linkrev(n)
447 lr = fl.linkrev(n)
448 cn = cl.node(lr)
448 cn = cl.node(lr)
449 cs = cl.read(cl.node(lr))
449 cs = cl.read(cl.node(lr))
450
450
451 l.insert(0, {"parity": parity,
451 l.insert(0, {"parity": parity,
452 "filenode": hex(n),
452 "filenode": hex(n),
453 "filerev": i,
453 "filerev": i,
454 "file": f,
454 "file": f,
455 "node": hex(cn),
455 "node": hex(cn),
456 "author": cs[1],
456 "author": cs[1],
457 "date": cs[2],
457 "date": cs[2],
458 "parent": self.parents(n, fl.parents(n),
458 "parent": self.parents(n, fl.parents(n),
459 fl.rev, file=f),
459 fl.rev, file=f),
460 "desc": cs[4]})
460 "desc": cs[4]})
461 parity = 1 - parity
461 parity = 1 - parity
462
462
463 for e in l:
463 for e in l:
464 yield e
464 yield e
465
465
466 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
466 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
467
467
468 def filerevision(self, f, node):
468 def filerevision(self, f, node):
469 fl = self.repo.file(f)
469 fl = self.repo.file(f)
470 n = fl.lookup(node)
470 n = fl.lookup(node)
471 node = hex(n)
471 node = hex(n)
472 text = fl.read(n)
472 text = fl.read(n)
473 changerev = fl.linkrev(n)
473 changerev = fl.linkrev(n)
474 cl = self.repo.changelog
474 cl = self.repo.changelog
475 cn = cl.node(changerev)
475 cn = cl.node(changerev)
476 cs = cl.read(cn)
476 cs = cl.read(cn)
477 mfn = cs[0]
477 mfn = cs[0]
478
478
479 mt = mimetypes.guess_type(f)[0]
479 mt = mimetypes.guess_type(f)[0]
480 rawtext = text
480 rawtext = text
481 if util.binary(text):
481 if util.binary(text):
482 text = "(binary:%s)" % mt
482 text = "(binary:%s)" % mt
483
483
484 def lines():
484 def lines():
485 for l, t in enumerate(text.splitlines(1)):
485 for l, t in enumerate(text.splitlines(1)):
486 yield {"line": t,
486 yield {"line": t,
487 "linenumber": "% 6d" % (l + 1),
487 "linenumber": "% 6d" % (l + 1),
488 "parity": l & 1}
488 "parity": l & 1}
489
489
490 yield self.t("filerevision",
490 yield self.t("filerevision",
491 file=f,
491 file=f,
492 filenode=node,
492 filenode=node,
493 path=up(f),
493 path=up(f),
494 text=lines(),
494 text=lines(),
495 raw=rawtext,
495 raw=rawtext,
496 mimetype=mt,
496 mimetype=mt,
497 rev=changerev,
497 rev=changerev,
498 node=hex(cn),
498 node=hex(cn),
499 manifest=hex(mfn),
499 manifest=hex(mfn),
500 author=cs[1],
500 author=cs[1],
501 date=cs[2],
501 date=cs[2],
502 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
502 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
503 permissions=self.repo.manifest.readflags(mfn)[f])
503 permissions=self.repo.manifest.readflags(mfn)[f])
504
504
505 def fileannotate(self, f, node):
505 def fileannotate(self, f, node):
506 bcache = {}
506 bcache = {}
507 ncache = {}
507 ncache = {}
508 fl = self.repo.file(f)
508 fl = self.repo.file(f)
509 n = fl.lookup(node)
509 n = fl.lookup(node)
510 node = hex(n)
510 node = hex(n)
511 changerev = fl.linkrev(n)
511 changerev = fl.linkrev(n)
512
512
513 cl = self.repo.changelog
513 cl = self.repo.changelog
514 cn = cl.node(changerev)
514 cn = cl.node(changerev)
515 cs = cl.read(cn)
515 cs = cl.read(cn)
516 mfn = cs[0]
516 mfn = cs[0]
517
517
518 def annotate(**map):
518 def annotate(**map):
519 parity = 1
519 parity = 1
520 last = None
520 last = None
521 for r, l in fl.annotate(n):
521 for r, l in fl.annotate(n):
522 try:
522 try:
523 cnode = ncache[r]
523 cnode = ncache[r]
524 except KeyError:
524 except KeyError:
525 cnode = ncache[r] = self.repo.changelog.node(r)
525 cnode = ncache[r] = self.repo.changelog.node(r)
526
526
527 try:
527 try:
528 name = bcache[r]
528 name = bcache[r]
529 except KeyError:
529 except KeyError:
530 cl = self.repo.changelog.read(cnode)
530 cl = self.repo.changelog.read(cnode)
531 bcache[r] = name = self.repo.ui.shortuser(cl[1])
531 bcache[r] = name = self.repo.ui.shortuser(cl[1])
532
532
533 if last != cnode:
533 if last != cnode:
534 parity = 1 - parity
534 parity = 1 - parity
535 last = cnode
535 last = cnode
536
536
537 yield {"parity": parity,
537 yield {"parity": parity,
538 "node": hex(cnode),
538 "node": hex(cnode),
539 "rev": r,
539 "rev": r,
540 "author": name,
540 "author": name,
541 "file": f,
541 "file": f,
542 "line": l}
542 "line": l}
543
543
544 yield self.t("fileannotate",
544 yield self.t("fileannotate",
545 file=f,
545 file=f,
546 filenode=node,
546 filenode=node,
547 annotate=annotate,
547 annotate=annotate,
548 path=up(f),
548 path=up(f),
549 rev=changerev,
549 rev=changerev,
550 node=hex(cn),
550 node=hex(cn),
551 manifest=hex(mfn),
551 manifest=hex(mfn),
552 author=cs[1],
552 author=cs[1],
553 date=cs[2],
553 date=cs[2],
554 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
554 parent=self.parents(n, fl.parents(n), fl.rev, file=f),
555 permissions=self.repo.manifest.readflags(mfn)[f])
555 permissions=self.repo.manifest.readflags(mfn)[f])
556
556
557 def manifest(self, mnode, path):
557 def manifest(self, mnode, path):
558 man = self.repo.manifest
558 man = self.repo.manifest
559 mn = man.lookup(mnode)
559 mn = man.lookup(mnode)
560 mnode = hex(mn)
560 mnode = hex(mn)
561 mf = man.read(mn)
561 mf = man.read(mn)
562 rev = man.rev(mn)
562 rev = man.rev(mn)
563 node = self.repo.changelog.node(rev)
563 node = self.repo.changelog.node(rev)
564 mff = man.readflags(mn)
564 mff = man.readflags(mn)
565
565
566 files = {}
566 files = {}
567
567
568 p = path[1:]
568 p = path[1:]
569 l = len(p)
569 l = len(p)
570
570
571 for f,n in mf.items():
571 for f,n in mf.items():
572 if f[:l] != p:
572 if f[:l] != p:
573 continue
573 continue
574 remain = f[l:]
574 remain = f[l:]
575 if "/" in remain:
575 if "/" in remain:
576 short = remain[:remain.find("/") + 1] # bleah
576 short = remain[:remain.find("/") + 1] # bleah
577 files[short] = (f, None)
577 files[short] = (f, None)
578 else:
578 else:
579 short = os.path.basename(remain)
579 short = os.path.basename(remain)
580 files[short] = (f, n)
580 files[short] = (f, n)
581
581
582 def filelist(**map):
582 def filelist(**map):
583 parity = 0
583 parity = 0
584 fl = files.keys()
584 fl = files.keys()
585 fl.sort()
585 fl.sort()
586 for f in fl:
586 for f in fl:
587 full, fnode = files[f]
587 full, fnode = files[f]
588 if not fnode:
588 if not fnode:
589 continue
589 continue
590
590
591 yield {"file": full,
591 yield {"file": full,
592 "manifest": mnode,
592 "manifest": mnode,
593 "filenode": hex(fnode),
593 "filenode": hex(fnode),
594 "parity": parity,
594 "parity": parity,
595 "basename": f,
595 "basename": f,
596 "permissions": mff[full]}
596 "permissions": mff[full]}
597 parity = 1 - parity
597 parity = 1 - parity
598
598
599 def dirlist(**map):
599 def dirlist(**map):
600 parity = 0
600 parity = 0
601 fl = files.keys()
601 fl = files.keys()
602 fl.sort()
602 fl.sort()
603 for f in fl:
603 for f in fl:
604 full, fnode = files[f]
604 full, fnode = files[f]
605 if fnode:
605 if fnode:
606 continue
606 continue
607
607
608 yield {"parity": parity,
608 yield {"parity": parity,
609 "path": os.path.join(path, f),
609 "path": os.path.join(path, f),
610 "manifest": mnode,
610 "manifest": mnode,
611 "basename": f[:-1]}
611 "basename": f[:-1]}
612 parity = 1 - parity
612 parity = 1 - parity
613
613
614 yield self.t("manifest",
614 yield self.t("manifest",
615 manifest=mnode,
615 manifest=mnode,
616 rev=rev,
616 rev=rev,
617 node=hex(node),
617 node=hex(node),
618 path=path,
618 path=path,
619 up=up(path),
619 up=up(path),
620 fentries=filelist,
620 fentries=filelist,
621 dentries=dirlist)
621 dentries=dirlist)
622
622
623 def tags(self):
623 def tags(self):
624 cl = self.repo.changelog
624 cl = self.repo.changelog
625 mf = cl.read(cl.tip())[0]
625 mf = cl.read(cl.tip())[0]
626
626
627 i = self.repo.tagslist()
627 i = self.repo.tagslist()
628 i.reverse()
628 i.reverse()
629
629
630 def entries(**map):
630 def entries(**map):
631 parity = 0
631 parity = 0
632 for k,n in i:
632 for k,n in i:
633 yield {"parity": parity,
633 yield {"parity": parity,
634 "tag": k,
634 "tag": k,
635 "node": hex(n)}
635 "node": hex(n)}
636 parity = 1 - parity
636 parity = 1 - parity
637
637
638 yield self.t("tags",
638 yield self.t("tags",
639 manifest=hex(mf),
639 manifest=hex(mf),
640 entries=entries)
640 entries=entries)
641
641
642 def filediff(self, file, changeset):
642 def filediff(self, file, changeset):
643 cl = self.repo.changelog
643 cl = self.repo.changelog
644 n = self.repo.lookup(changeset)
644 n = self.repo.lookup(changeset)
645 changeset = hex(n)
645 changeset = hex(n)
646 p1 = cl.parents(n)[0]
646 p1 = cl.parents(n)[0]
647 cs = cl.read(n)
647 cs = cl.read(n)
648 mf = self.repo.manifest.read(cs[0])
648 mf = self.repo.manifest.read(cs[0])
649
649
650 def diff(**map):
650 def diff(**map):
651 yield self.diff(p1, n, file)
651 yield self.diff(p1, n, file)
652
652
653 yield self.t("filediff",
653 yield self.t("filediff",
654 file=file,
654 file=file,
655 filenode=hex(mf.get(file, nullid)),
655 filenode=hex(mf.get(file, nullid)),
656 node=changeset,
656 node=changeset,
657 rev=self.repo.changelog.rev(n),
657 rev=self.repo.changelog.rev(n),
658 parent=self.parents(n, cl.parents(n), cl.rev),
658 parent=self.parents(n, cl.parents(n), cl.rev),
659 diff=diff)
659 diff=diff)
660
660
661 def archive(self, req, cnode, type):
661 def archive(self, req, cnode, type):
662 cs = self.repo.changelog.read(cnode)
662 cs = self.repo.changelog.read(cnode)
663 mnode = cs[0]
663 mnode = cs[0]
664 mf = self.repo.manifest.read(mnode)
664 mf = self.repo.manifest.read(mnode)
665 rev = self.repo.manifest.rev(mnode)
665 rev = self.repo.manifest.rev(mnode)
666 reponame = re.sub(r"\W+", "-", self.reponame)
666 reponame = re.sub(r"\W+", "-", self.reponame)
667 name = "%s-%s/" % (reponame, short(cnode))
667 name = "%s-%s/" % (reponame, short(cnode))
668
668
669 files = mf.keys()
669 files = mf.keys()
670 files.sort()
670 files.sort()
671
671
672 if type == 'zip':
672 if type == 'zip':
673 tmp = tempfile.mkstemp()[1]
673 tmp = tempfile.mkstemp()[1]
674 try:
674 try:
675 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
675 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
676
676
677 for f in files:
677 for f in files:
678 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
678 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
679 zf.close()
679 zf.close()
680
680
681 f = open(tmp, 'r')
681 f = open(tmp, 'r')
682 req.httphdr('application/zip', name[:-1] + '.zip',
682 req.httphdr('application/zip', name[:-1] + '.zip',
683 os.path.getsize(tmp))
683 os.path.getsize(tmp))
684 req.write(f.read())
684 req.write(f.read())
685 f.close()
685 f.close()
686 finally:
686 finally:
687 os.unlink(tmp)
687 os.unlink(tmp)
688
688
689 else:
689 else:
690 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
690 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
691 mff = self.repo.manifest.readflags(mnode)
691 mff = self.repo.manifest.readflags(mnode)
692 mtime = int(time.time())
692 mtime = int(time.time())
693
693
694 if type == "gz":
694 if type == "gz":
695 encoding = "gzip"
695 encoding = "gzip"
696 else:
696 else:
697 encoding = "x-bzip2"
697 encoding = "x-bzip2"
698 req.header([('Content-type', 'application/x-tar'),
698 req.header([('Content-type', 'application/x-tar'),
699 ('Content-disposition', 'attachment; filename=%s%s%s' %
699 ('Content-disposition', 'attachment; filename=%s%s%s' %
700 (name[:-1], '.tar.', type)),
700 (name[:-1], '.tar.', type)),
701 ('Content-encoding', encoding)])
701 ('Content-encoding', encoding)])
702 for fname in files:
702 for fname in files:
703 rcont = self.repo.file(fname).read(mf[fname])
703 rcont = self.repo.file(fname).read(mf[fname])
704 finfo = tarfile.TarInfo(name + fname)
704 finfo = tarfile.TarInfo(name + fname)
705 finfo.mtime = mtime
705 finfo.mtime = mtime
706 finfo.size = len(rcont)
706 finfo.size = len(rcont)
707 finfo.mode = mff[fname] and 0755 or 0644
707 finfo.mode = mff[fname] and 0755 or 0644
708 tf.addfile(finfo, StringIO.StringIO(rcont))
708 tf.addfile(finfo, StringIO.StringIO(rcont))
709 tf.close()
709 tf.close()
710
710
711 # add tags to things
711 # add tags to things
712 # tags -> list of changesets corresponding to tags
712 # tags -> list of changesets corresponding to tags
713 # find tag, changeset, file
713 # find tag, changeset, file
714
714
715 def run(self, req=hgrequest()):
715 def run(self, req=hgrequest()):
716 def header(**map):
716 def header(**map):
717 yield self.t("header", **map)
717 yield self.t("header", **map)
718
718
719 def footer(**map):
719 def footer(**map):
720 yield self.t("footer", **map)
720 yield self.t("footer", **map)
721
721
722 def expand_form(form):
722 def expand_form(form):
723 shortcuts = {
723 shortcuts = {
724 'cl': [('cmd', ['changelog']), ('rev', None)],
724 'cl': [('cmd', ['changelog']), ('rev', None)],
725 'cs': [('cmd', ['changeset']), ('node', None)],
725 'cs': [('cmd', ['changeset']), ('node', None)],
726 'f': [('cmd', ['file']), ('filenode', None)],
726 'f': [('cmd', ['file']), ('filenode', None)],
727 'fl': [('cmd', ['filelog']), ('filenode', None)],
727 'fl': [('cmd', ['filelog']), ('filenode', None)],
728 'fd': [('cmd', ['filediff']), ('node', None)],
728 'fd': [('cmd', ['filediff']), ('node', None)],
729 'fa': [('cmd', ['annotate']), ('filenode', None)],
729 'fa': [('cmd', ['annotate']), ('filenode', None)],
730 'mf': [('cmd', ['manifest']), ('manifest', None)],
730 'mf': [('cmd', ['manifest']), ('manifest', None)],
731 'ca': [('cmd', ['archive']), ('node', None)],
731 'ca': [('cmd', ['archive']), ('node', None)],
732 'tags': [('cmd', ['tags'])],
732 'tags': [('cmd', ['tags'])],
733 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
733 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
734 }
734 }
735
735
736 for k in shortcuts.iterkeys():
736 for k in shortcuts.iterkeys():
737 if form.has_key(k):
737 if form.has_key(k):
738 for name, value in shortcuts[k]:
738 for name, value in shortcuts[k]:
739 if value is None:
739 if value is None:
740 value = form[k]
740 value = form[k]
741 form[name] = value
741 form[name] = value
742 del form[k]
742 del form[k]
743
743
744 self.refresh()
744 self.refresh()
745
745
746 expand_form(req.form)
746 expand_form(req.form)
747
747
748 t = self.repo.ui.config("web", "templates", templatepath())
748 t = self.repo.ui.config("web", "templates", templatepath())
749 m = os.path.join(t, "map")
749 m = os.path.join(t, "map")
750 style = self.repo.ui.config("web", "style", "")
750 style = self.repo.ui.config("web", "style", "")
751 if req.form.has_key('style'):
751 if req.form.has_key('style'):
752 style = req.form['style'][0]
752 style = req.form['style'][0]
753 if style:
753 if style:
754 b = os.path.basename("map-" + style)
754 b = os.path.basename("map-" + style)
755 p = os.path.join(t, b)
755 p = os.path.join(t, b)
756 if os.path.isfile(p):
756 if os.path.isfile(p):
757 m = p
757 m = p
758
758
759 port = req.env["SERVER_PORT"]
759 port = req.env["SERVER_PORT"]
760 port = port != "80" and (":" + port) or ""
760 port = port != "80" and (":" + port) or ""
761 uri = req.env["REQUEST_URI"]
761 uri = req.env["REQUEST_URI"]
762 if "?" in uri:
762 if "?" in uri:
763 uri = uri.split("?")[0]
763 uri = uri.split("?")[0]
764 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
764 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
765 if not self.reponame:
765 if not self.reponame:
766 self.reponame = (self.repo.ui.config("web", "name")
766 self.reponame = (self.repo.ui.config("web", "name")
767 or uri.strip('/') or self.repo.root)
767 or uri.strip('/') or self.repo.root)
768
768
769 self.t = templater(m, common_filters,
769 self.t = templater(m, common_filters,
770 {"url": url,
770 {"url": url,
771 "repo": self.reponame,
771 "repo": self.reponame,
772 "header": header,
772 "header": header,
773 "footer": footer,
773 "footer": footer,
774 })
774 })
775
775
776 if not req.form.has_key('cmd'):
776 if not req.form.has_key('cmd'):
777 req.form['cmd'] = [self.t.cache['default'],]
777 req.form['cmd'] = [self.t.cache['default'],]
778
778
779 if req.form['cmd'][0] == 'changelog':
779 if req.form['cmd'][0] == 'changelog':
780 c = self.repo.changelog.count() - 1
780 c = self.repo.changelog.count() - 1
781 hi = c
781 hi = c
782 if req.form.has_key('rev'):
782 if req.form.has_key('rev'):
783 hi = req.form['rev'][0]
783 hi = req.form['rev'][0]
784 try:
784 try:
785 hi = self.repo.changelog.rev(self.repo.lookup(hi))
785 hi = self.repo.changelog.rev(self.repo.lookup(hi))
786 except hg.RepoError:
786 except hg.RepoError:
787 req.write(self.search(hi))
787 req.write(self.search(hi))
788 return
788 return
789
789
790 req.write(self.changelog(hi))
790 req.write(self.changelog(hi))
791
791
792 elif req.form['cmd'][0] == 'changeset':
792 elif req.form['cmd'][0] == 'changeset':
793 req.write(self.changeset(req.form['node'][0]))
793 req.write(self.changeset(req.form['node'][0]))
794
794
795 elif req.form['cmd'][0] == 'manifest':
795 elif req.form['cmd'][0] == 'manifest':
796 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
796 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
797
797
798 elif req.form['cmd'][0] == 'tags':
798 elif req.form['cmd'][0] == 'tags':
799 req.write(self.tags())
799 req.write(self.tags())
800
800
801 elif req.form['cmd'][0] == 'filediff':
801 elif req.form['cmd'][0] == 'filediff':
802 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
802 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
803
803
804 elif req.form['cmd'][0] == 'file':
804 elif req.form['cmd'][0] == 'file':
805 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
805 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
806
806
807 elif req.form['cmd'][0] == 'annotate':
807 elif req.form['cmd'][0] == 'annotate':
808 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
808 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
809
809
810 elif req.form['cmd'][0] == 'filelog':
810 elif req.form['cmd'][0] == 'filelog':
811 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
811 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
812
812
813 elif req.form['cmd'][0] == 'heads':
813 elif req.form['cmd'][0] == 'heads':
814 req.httphdr("application/mercurial-0.1")
814 req.httphdr("application/mercurial-0.1")
815 h = self.repo.heads()
815 h = self.repo.heads()
816 req.write(" ".join(map(hex, h)) + "\n")
816 req.write(" ".join(map(hex, h)) + "\n")
817
817
818 elif req.form['cmd'][0] == 'branches':
818 elif req.form['cmd'][0] == 'branches':
819 req.httphdr("application/mercurial-0.1")
819 req.httphdr("application/mercurial-0.1")
820 nodes = []
820 nodes = []
821 if req.form.has_key('nodes'):
821 if req.form.has_key('nodes'):
822 nodes = map(bin, req.form['nodes'][0].split(" "))
822 nodes = map(bin, req.form['nodes'][0].split(" "))
823 for b in self.repo.branches(nodes):
823 for b in self.repo.branches(nodes):
824 req.write(" ".join(map(hex, b)) + "\n")
824 req.write(" ".join(map(hex, b)) + "\n")
825
825
826 elif req.form['cmd'][0] == 'between':
826 elif req.form['cmd'][0] == 'between':
827 req.httphdr("application/mercurial-0.1")
827 req.httphdr("application/mercurial-0.1")
828 nodes = []
828 nodes = []
829 if req.form.has_key('pairs'):
829 if req.form.has_key('pairs'):
830 pairs = [map(bin, p.split("-"))
830 pairs = [map(bin, p.split("-"))
831 for p in req.form['pairs'][0].split(" ")]
831 for p in req.form['pairs'][0].split(" ")]
832 for b in self.repo.between(pairs):
832 for b in self.repo.between(pairs):
833 req.write(" ".join(map(hex, b)) + "\n")
833 req.write(" ".join(map(hex, b)) + "\n")
834
834
835 elif req.form['cmd'][0] == 'changegroup':
835 elif req.form['cmd'][0] == 'changegroup':
836 req.httphdr("application/mercurial-0.1")
836 req.httphdr("application/mercurial-0.1")
837 nodes = []
837 nodes = []
838 if not self.allowpull:
838 if not self.allowpull:
839 return
839 return
840
840
841 if req.form.has_key('roots'):
841 if req.form.has_key('roots'):
842 nodes = map(bin, req.form['roots'][0].split(" "))
842 nodes = map(bin, req.form['roots'][0].split(" "))
843
843
844 z = zlib.compressobj()
844 z = zlib.compressobj()
845 f = self.repo.changegroup(nodes)
845 f = self.repo.changegroup(nodes)
846 while 1:
846 while 1:
847 chunk = f.read(4096)
847 chunk = f.read(4096)
848 if not chunk:
848 if not chunk:
849 break
849 break
850 req.write(z.compress(chunk))
850 req.write(z.compress(chunk))
851
851
852 req.write(z.flush())
852 req.write(z.flush())
853
853
854 elif req.form['cmd'][0] == 'archive':
854 elif req.form['cmd'][0] == 'archive':
855 changeset = self.repo.lookup(req.form['node'][0])
855 changeset = self.repo.lookup(req.form['node'][0])
856 type = req.form['type'][0]
856 type = req.form['type'][0]
857 if (type in self.archives and
857 if (type in self.archives and
858 self.repo.ui.configbool("web", "allow" + type, False)):
858 self.repo.ui.configbool("web", "allow" + type, False)):
859 self.archive(req, changeset, type)
859 self.archive(req, changeset, type)
860 return
860 return
861
861
862 req.write(self.t("error"))
862 req.write(self.t("error"))
863
863
864 else:
864 else:
865 req.write(self.t("error"))
865 req.write(self.t("error"))
866
866
867 def create_server(repo):
867 def create_server(repo):
868
868
869 def openlog(opt, default):
869 def openlog(opt, default):
870 if opt and opt != '-':
870 if opt and opt != '-':
871 return open(opt, 'w')
871 return open(opt, 'w')
872 return default
872 return default
873
873
874 address = repo.ui.config("web", "address", "")
874 address = repo.ui.config("web", "address", "")
875 port = int(repo.ui.config("web", "port", 8000))
875 port = int(repo.ui.config("web", "port", 8000))
876 use_ipv6 = repo.ui.configbool("web", "ipv6")
876 use_ipv6 = repo.ui.configbool("web", "ipv6")
877 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
877 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
878 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
878 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
879
879
880 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
880 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
881 address_family = getattr(socket, 'AF_INET6', None)
881 address_family = getattr(socket, 'AF_INET6', None)
882
882
883 def __init__(self, *args, **kwargs):
883 def __init__(self, *args, **kwargs):
884 if self.address_family is None:
884 if self.address_family is None:
885 raise hg.RepoError(_('IPv6 not available on this system'))
885 raise hg.RepoError(_('IPv6 not available on this system'))
886 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
886 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
887
887
888 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
888 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
889 def log_error(self, format, *args):
889 def log_error(self, format, *args):
890 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
890 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
891 self.log_date_time_string(),
891 self.log_date_time_string(),
892 format % args))
892 format % args))
893
893
894 def log_message(self, format, *args):
894 def log_message(self, format, *args):
895 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
895 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
896 self.log_date_time_string(),
896 self.log_date_time_string(),
897 format % args))
897 format % args))
898
898
899 def do_POST(self):
899 def do_POST(self):
900 try:
900 try:
901 self.do_hgweb()
901 self.do_hgweb()
902 except socket.error, inst:
902 except socket.error, inst:
903 if inst[0] != errno.EPIPE:
903 if inst[0] != errno.EPIPE:
904 raise
904 raise
905
905
906 def do_GET(self):
906 def do_GET(self):
907 self.do_POST()
907 self.do_POST()
908
908
909 def do_hgweb(self):
909 def do_hgweb(self):
910 query = ""
910 query = ""
911 p = self.path.find("?")
911 p = self.path.find("?")
912 if p:
912 if p:
913 query = self.path[p + 1:]
913 query = self.path[p + 1:]
914 query = query.replace('+', ' ')
914 query = query.replace('+', ' ')
915
915
916 env = {}
916 env = {}
917 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
917 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
918 env['REQUEST_METHOD'] = self.command
918 env['REQUEST_METHOD'] = self.command
919 env['SERVER_NAME'] = self.server.server_name
919 env['SERVER_NAME'] = self.server.server_name
920 env['SERVER_PORT'] = str(self.server.server_port)
920 env['SERVER_PORT'] = str(self.server.server_port)
921 env['REQUEST_URI'] = "/"
921 env['REQUEST_URI'] = "/"
922 if query:
922 if query:
923 env['QUERY_STRING'] = query
923 env['QUERY_STRING'] = query
924 host = self.address_string()
924 host = self.address_string()
925 if host != self.client_address[0]:
925 if host != self.client_address[0]:
926 env['REMOTE_HOST'] = host
926 env['REMOTE_HOST'] = host
927 env['REMOTE_ADDR'] = self.client_address[0]
927 env['REMOTE_ADDR'] = self.client_address[0]
928
928
929 if self.headers.typeheader is None:
929 if self.headers.typeheader is None:
930 env['CONTENT_TYPE'] = self.headers.type
930 env['CONTENT_TYPE'] = self.headers.type
931 else:
931 else:
932 env['CONTENT_TYPE'] = self.headers.typeheader
932 env['CONTENT_TYPE'] = self.headers.typeheader
933 length = self.headers.getheader('content-length')
933 length = self.headers.getheader('content-length')
934 if length:
934 if length:
935 env['CONTENT_LENGTH'] = length
935 env['CONTENT_LENGTH'] = length
936 accept = []
936 accept = []
937 for line in self.headers.getallmatchingheaders('accept'):
937 for line in self.headers.getallmatchingheaders('accept'):
938 if line[:1] in "\t\n\r ":
938 if line[:1] in "\t\n\r ":
939 accept.append(line.strip())
939 accept.append(line.strip())
940 else:
940 else:
941 accept = accept + line[7:].split(',')
941 accept = accept + line[7:].split(',')
942 env['HTTP_ACCEPT'] = ','.join(accept)
942 env['HTTP_ACCEPT'] = ','.join(accept)
943
943
944 req = hgrequest(self.rfile, self.wfile, env)
944 req = hgrequest(self.rfile, self.wfile, env)
945 self.send_response(200, "Script output follows")
945 self.send_response(200, "Script output follows")
946 hg.run(req)
946 hg.run(req)
947
947
948 hg = hgweb(repo)
948 hg = hgweb(repo)
949 if use_ipv6:
949 if use_ipv6:
950 return IPv6HTTPServer((address, port), hgwebhandler)
950 return IPv6HTTPServer((address, port), hgwebhandler)
951 else:
951 else:
952 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
952 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
953
953
954 def server(path, name, templates, address, port, use_ipv6=False,
954 def server(path, name, templates, address, port, use_ipv6=False,
955 accesslog=sys.stdout, errorlog=sys.stderr):
955 accesslog=sys.stdout, errorlog=sys.stderr):
956 httpd = create_server(path, name, templates, address, port, use_ipv6,
956 httpd = create_server(path, name, templates, address, port, use_ipv6,
957 accesslog, errorlog)
957 accesslog, errorlog)
958 httpd.serve_forever()
958 httpd.serve_forever()
959
959
960 # This is a stopgap
960 # This is a stopgap
961 class hgwebdir:
961 class hgwebdir:
962 def __init__(self, config):
962 def __init__(self, config):
963 def cleannames(items):
963 def cleannames(items):
964 return [(name.strip('/'), path) for name, path in items]
964 return [(name.strip('/'), path) for name, path in items]
965
965
966 if type(config) == type([]):
966 if type(config) == type([]):
967 self.repos = cleannames(config)
967 self.repos = cleannames(config)
968 elif type(config) == type({}):
968 elif type(config) == type({}):
969 self.repos = cleannames(config.items())
969 self.repos = cleannames(config.items())
970 self.repos.sort()
970 self.repos.sort()
971 else:
971 else:
972 cp = ConfigParser.SafeConfigParser()
972 cp = ConfigParser.SafeConfigParser()
973 cp.read(config)
973 cp.read(config)
974 self.repos = cleannames(cp.items("paths"))
974 self.repos = cleannames(cp.items("paths"))
975 self.repos.sort()
975 self.repos.sort()
976
976
977 def run(self, req=hgrequest()):
977 def run(self, req=hgrequest()):
978 def header(**map):
978 def header(**map):
979 yield tmpl("header", **map)
979 yield tmpl("header", **map)
980
980
981 def footer(**map):
981 def footer(**map):
982 yield tmpl("footer", **map)
982 yield tmpl("footer", **map)
983
983
984 m = os.path.join(templatepath(), "map")
984 m = os.path.join(templatepath(), "map")
985 tmpl = templater(m, common_filters,
985 tmpl = templater(m, common_filters,
986 {"header": header, "footer": footer})
986 {"header": header, "footer": footer})
987
987
988 def entries(**map):
988 def entries(**map):
989 parity = 0
989 parity = 0
990 for name, path in self.repos:
990 for name, path in self.repos:
991 u = ui.ui()
991 u = ui.ui()
992 try:
992 try:
993 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
993 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
994 except IOError:
994 except IOError:
995 pass
995 pass
996 get = u.config
996 get = u.config
997
997
998 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
998 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
999 .replace("//", "/"))
999 .replace("//", "/"))
1000
1000
1001 # update time with local timezone
1001 # update time with local timezone
1002 d = (get_mtime(path), util.makedate()[1])
1002 d = (get_mtime(path), util.makedate()[1])
1003
1003
1004 yield dict(contact=(get("ui", "username") or # preferred
1004 yield dict(contact=(get("ui", "username") or # preferred
1005 get("web", "contact") or # deprecated
1005 get("web", "contact") or # deprecated
1006 get("web", "author", "unknown")), # also
1006 get("web", "author", "unknown")), # also
1007 name=get("web", "name", name),
1007 name=get("web", "name", name),
1008 url=url,
1008 url=url,
1009 parity=parity,
1009 parity=parity,
1010 shortdesc=get("web", "description", "unknown"),
1010 shortdesc=get("web", "description", "unknown"),
1011 lastupdate=d)
1011 lastupdate=d)
1012
1012
1013 parity = 1 - parity
1013 parity = 1 - parity
1014
1014
1015 virtual = req.env.get("PATH_INFO", "").strip('/')
1015 virtual = req.env.get("PATH_INFO", "").strip('/')
1016 if virtual:
1016 if virtual:
1017 real = dict(self.repos).get(virtual)
1017 real = dict(self.repos).get(virtual)
1018 if real:
1018 if real:
1019 hgweb(real).run(req)
1019 hgweb(real).run(req)
1020 else:
1020 else:
1021 req.write(tmpl("notfound", repo=virtual))
1021 req.write(tmpl("notfound", repo=virtual))
1022 else:
1022 else:
1023 req.write(tmpl("index", entries=entries))
1023 req.write(tmpl("index", entries=entries))
@@ -1,1736 +1,1736
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.opener("hgrc"))
46 self.ui.readconfig(os.path.join(self.path, "hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 s = self.ui.config("hooks", name)
50 s = self.ui.config("hooks", name)
51 if s:
51 if s:
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 old = {}
53 old = {}
54 for k, v in args.items():
54 for k, v in args.items():
55 k = k.upper()
55 k = k.upper()
56 old[k] = os.environ.get(k, None)
56 old[k] = os.environ.get(k, None)
57 os.environ[k] = v
57 os.environ[k] = v
58
58
59 # Hooks run in the repository root
59 # Hooks run in the repository root
60 olddir = os.getcwd()
60 olddir = os.getcwd()
61 os.chdir(self.root)
61 os.chdir(self.root)
62 r = os.system(s)
62 r = os.system(s)
63 os.chdir(olddir)
63 os.chdir(olddir)
64
64
65 for k, v in old.items():
65 for k, v in old.items():
66 if v != None:
66 if v != None:
67 os.environ[k] = v
67 os.environ[k] = v
68 else:
68 else:
69 del os.environ[k]
69 del os.environ[k]
70
70
71 if r:
71 if r:
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 (name, r))
73 (name, r))
74 return False
74 return False
75 return True
75 return True
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81 def addtag(self, k, n):
81 def addtag(self, k, n):
82 try:
82 try:
83 bin_n = bin(n)
83 bin_n = bin(n)
84 except TypeError:
84 except TypeError:
85 bin_n = ''
85 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
87
87
88 try:
88 try:
89 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
91 # taking precedence
91 # taking precedence
92 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
93 h = fl.heads()
93 h = fl.heads()
94 h.reverse()
94 h.reverse()
95 for r in h:
95 for r in h:
96 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
97 if l:
97 if l:
98 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
99 addtag(self, k, n)
99 addtag(self, k, n)
100 except KeyError:
100 except KeyError:
101 pass
101 pass
102
102
103 try:
103 try:
104 f = self.opener("localtags")
104 f = self.opener("localtags")
105 for l in f:
105 for l in f:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
112
112
113 return self.tagscache
113 return self.tagscache
114
114
115 def tagslist(self):
115 def tagslist(self):
116 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
117 l = []
117 l = []
118 for t, n in self.tags().items():
118 for t, n in self.tags().items():
119 try:
119 try:
120 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
121 except:
121 except:
122 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
123 l.append((r,t,n))
123 l.append((r,t,n))
124 l.sort()
124 l.sort()
125 return [(t,n) for r,t,n in l]
125 return [(t,n) for r,t,n in l]
126
126
127 def nodetags(self, node):
127 def nodetags(self, node):
128 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
129 if not self.nodetagscache:
129 if not self.nodetagscache:
130 self.nodetagscache = {}
130 self.nodetagscache = {}
131 for t,n in self.tags().items():
131 for t,n in self.tags().items():
132 self.nodetagscache.setdefault(n,[]).append(t)
132 self.nodetagscache.setdefault(n,[]).append(t)
133 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
134
134
135 def lookup(self, key):
135 def lookup(self, key):
136 try:
136 try:
137 return self.tags()[key]
137 return self.tags()[key]
138 except KeyError:
138 except KeyError:
139 try:
139 try:
140 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
141 except:
141 except:
142 raise repo.RepoError(_("unknown revision '%s'") % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143
143
144 def dev(self):
144 def dev(self):
145 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
146
146
147 def local(self):
147 def local(self):
148 return True
148 return True
149
149
150 def join(self, f):
150 def join(self, f):
151 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
152
152
153 def wjoin(self, f):
153 def wjoin(self, f):
154 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
155
155
156 def file(self, f):
156 def file(self, f):
157 if f[0] == '/': f = f[1:]
157 if f[0] == '/': f = f[1:]
158 return filelog.filelog(self.opener, f)
158 return filelog.filelog(self.opener, f)
159
159
160 def getcwd(self):
160 def getcwd(self):
161 return self.dirstate.getcwd()
161 return self.dirstate.getcwd()
162
162
163 def wfile(self, f, mode='r'):
163 def wfile(self, f, mode='r'):
164 return self.wopener(f, mode)
164 return self.wopener(f, mode)
165
165
166 def wread(self, filename):
166 def wread(self, filename):
167 if self.encodepats == None:
167 if self.encodepats == None:
168 l = []
168 l = []
169 for pat, cmd in self.ui.configitems("encode"):
169 for pat, cmd in self.ui.configitems("encode"):
170 mf = util.matcher("", "/", [pat], [], [])[1]
170 mf = util.matcher("", "/", [pat], [], [])[1]
171 l.append((mf, cmd))
171 l.append((mf, cmd))
172 self.encodepats = l
172 self.encodepats = l
173
173
174 data = self.wopener(filename, 'r').read()
174 data = self.wopener(filename, 'r').read()
175
175
176 for mf, cmd in self.encodepats:
176 for mf, cmd in self.encodepats:
177 if mf(filename):
177 if mf(filename):
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 data = util.filter(data, cmd)
179 data = util.filter(data, cmd)
180 break
180 break
181
181
182 return data
182 return data
183
183
184 def wwrite(self, filename, data, fd=None):
184 def wwrite(self, filename, data, fd=None):
185 if self.decodepats == None:
185 if self.decodepats == None:
186 l = []
186 l = []
187 for pat, cmd in self.ui.configitems("decode"):
187 for pat, cmd in self.ui.configitems("decode"):
188 mf = util.matcher("", "/", [pat], [], [])[1]
188 mf = util.matcher("", "/", [pat], [], [])[1]
189 l.append((mf, cmd))
189 l.append((mf, cmd))
190 self.decodepats = l
190 self.decodepats = l
191
191
192 for mf, cmd in self.decodepats:
192 for mf, cmd in self.decodepats:
193 if mf(filename):
193 if mf(filename):
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 data = util.filter(data, cmd)
195 data = util.filter(data, cmd)
196 break
196 break
197
197
198 if fd:
198 if fd:
199 return fd.write(data)
199 return fd.write(data)
200 return self.wopener(filename, 'w').write(data)
200 return self.wopener(filename, 'w').write(data)
201
201
202 def transaction(self):
202 def transaction(self):
203 # save dirstate for undo
203 # save dirstate for undo
204 try:
204 try:
205 ds = self.opener("dirstate").read()
205 ds = self.opener("dirstate").read()
206 except IOError:
206 except IOError:
207 ds = ""
207 ds = ""
208 self.opener("journal.dirstate", "w").write(ds)
208 self.opener("journal.dirstate", "w").write(ds)
209
209
210 def after():
210 def after():
211 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal"), self.join("undo"))
212 util.rename(self.join("journal.dirstate"),
212 util.rename(self.join("journal.dirstate"),
213 self.join("undo.dirstate"))
213 self.join("undo.dirstate"))
214
214
215 return transaction.transaction(self.ui.warn, self.opener,
215 return transaction.transaction(self.ui.warn, self.opener,
216 self.join("journal"), after)
216 self.join("journal"), after)
217
217
218 def recover(self):
218 def recover(self):
219 lock = self.lock()
219 lock = self.lock()
220 if os.path.exists(self.join("journal")):
220 if os.path.exists(self.join("journal")):
221 self.ui.status(_("rolling back interrupted transaction\n"))
221 self.ui.status(_("rolling back interrupted transaction\n"))
222 return transaction.rollback(self.opener, self.join("journal"))
222 return transaction.rollback(self.opener, self.join("journal"))
223 else:
223 else:
224 self.ui.warn(_("no interrupted transaction available\n"))
224 self.ui.warn(_("no interrupted transaction available\n"))
225
225
226 def undo(self):
226 def undo(self):
227 lock = self.lock()
227 lock = self.lock()
228 if os.path.exists(self.join("undo")):
228 if os.path.exists(self.join("undo")):
229 self.ui.status(_("rolling back last transaction\n"))
229 self.ui.status(_("rolling back last transaction\n"))
230 transaction.rollback(self.opener, self.join("undo"))
230 transaction.rollback(self.opener, self.join("undo"))
231 self.dirstate = None
231 self.dirstate = None
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 else:
234 else:
235 self.ui.warn(_("no undo information available\n"))
235 self.ui.warn(_("no undo information available\n"))
236
236
237 def lock(self, wait=1):
237 def lock(self, wait=1):
238 try:
238 try:
239 return lock.lock(self.join("lock"), 0)
239 return lock.lock(self.join("lock"), 0)
240 except lock.LockHeld, inst:
240 except lock.LockHeld, inst:
241 if wait:
241 if wait:
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 return lock.lock(self.join("lock"), wait)
243 return lock.lock(self.join("lock"), wait)
244 raise inst
244 raise inst
245
245
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 orig_parent = self.dirstate.parents()[0] or nullid
247 orig_parent = self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 c1 = self.changelog.read(p1)
250 c1 = self.changelog.read(p1)
251 c2 = self.changelog.read(p2)
251 c2 = self.changelog.read(p2)
252 m1 = self.manifest.read(c1[0])
252 m1 = self.manifest.read(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
254 m2 = self.manifest.read(c2[0])
254 m2 = self.manifest.read(c2[0])
255 changed = []
255 changed = []
256
256
257 if orig_parent == p1:
257 if orig_parent == p1:
258 update_dirstate = 1
258 update_dirstate = 1
259 else:
259 else:
260 update_dirstate = 0
260 update_dirstate = 0
261
261
262 tr = self.transaction()
262 tr = self.transaction()
263 mm = m1.copy()
263 mm = m1.copy()
264 mfm = mf1.copy()
264 mfm = mf1.copy()
265 linkrev = self.changelog.count()
265 linkrev = self.changelog.count()
266 for f in files:
266 for f in files:
267 try:
267 try:
268 t = self.wread(f)
268 t = self.wread(f)
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 r = self.file(f)
270 r = self.file(f)
271 mfm[f] = tm
271 mfm[f] = tm
272
272
273 fp1 = m1.get(f, nullid)
273 fp1 = m1.get(f, nullid)
274 fp2 = m2.get(f, nullid)
274 fp2 = m2.get(f, nullid)
275
275
276 # is the same revision on two branches of a merge?
276 # is the same revision on two branches of a merge?
277 if fp2 == fp1:
277 if fp2 == fp1:
278 fp2 = nullid
278 fp2 = nullid
279
279
280 if fp2 != nullid:
280 if fp2 != nullid:
281 # is one parent an ancestor of the other?
281 # is one parent an ancestor of the other?
282 fpa = r.ancestor(fp1, fp2)
282 fpa = r.ancestor(fp1, fp2)
283 if fpa == fp1:
283 if fpa == fp1:
284 fp1, fp2 = fp2, nullid
284 fp1, fp2 = fp2, nullid
285 elif fpa == fp2:
285 elif fpa == fp2:
286 fp2 = nullid
286 fp2 = nullid
287
287
288 # is the file unmodified from the parent?
288 # is the file unmodified from the parent?
289 if t == r.read(fp1):
289 if t == r.read(fp1):
290 # record the proper existing parent in manifest
290 # record the proper existing parent in manifest
291 # no need to add a revision
291 # no need to add a revision
292 mm[f] = fp1
292 mm[f] = fp1
293 continue
293 continue
294
294
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 changed.append(f)
296 changed.append(f)
297 if update_dirstate:
297 if update_dirstate:
298 self.dirstate.update([f], "n")
298 self.dirstate.update([f], "n")
299 except IOError:
299 except IOError:
300 try:
300 try:
301 del mm[f]
301 del mm[f]
302 del mfm[f]
302 del mfm[f]
303 if update_dirstate:
303 if update_dirstate:
304 self.dirstate.forget([f])
304 self.dirstate.forget([f])
305 except:
305 except:
306 # deleted from p2?
306 # deleted from p2?
307 pass
307 pass
308
308
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 user = user or self.ui.username()
310 user = user or self.ui.username()
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 tr.close()
312 tr.close()
313 if update_dirstate:
313 if update_dirstate:
314 self.dirstate.setparents(n, nullid)
314 self.dirstate.setparents(n, nullid)
315
315
316 def commit(self, files = None, text = "", user = None, date = None,
316 def commit(self, files = None, text = "", user = None, date = None,
317 match = util.always, force=False):
317 match = util.always, force=False):
318 commit = []
318 commit = []
319 remove = []
319 remove = []
320 changed = []
320 changed = []
321
321
322 if files:
322 if files:
323 for f in files:
323 for f in files:
324 s = self.dirstate.state(f)
324 s = self.dirstate.state(f)
325 if s in 'nmai':
325 if s in 'nmai':
326 commit.append(f)
326 commit.append(f)
327 elif s == 'r':
327 elif s == 'r':
328 remove.append(f)
328 remove.append(f)
329 else:
329 else:
330 self.ui.warn(_("%s not tracked!\n") % f)
330 self.ui.warn(_("%s not tracked!\n") % f)
331 else:
331 else:
332 (c, a, d, u) = self.changes(match=match)
332 (c, a, d, u) = self.changes(match=match)
333 commit = c + a
333 commit = c + a
334 remove = d
334 remove = d
335
335
336 p1, p2 = self.dirstate.parents()
336 p1, p2 = self.dirstate.parents()
337 c1 = self.changelog.read(p1)
337 c1 = self.changelog.read(p1)
338 c2 = self.changelog.read(p2)
338 c2 = self.changelog.read(p2)
339 m1 = self.manifest.read(c1[0])
339 m1 = self.manifest.read(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
341 m2 = self.manifest.read(c2[0])
341 m2 = self.manifest.read(c2[0])
342
342
343 if not commit and not remove and not force and p2 == nullid:
343 if not commit and not remove and not force and p2 == nullid:
344 self.ui.status(_("nothing changed\n"))
344 self.ui.status(_("nothing changed\n"))
345 return None
345 return None
346
346
347 if not self.hook("precommit"):
347 if not self.hook("precommit"):
348 return None
348 return None
349
349
350 lock = self.lock()
350 lock = self.lock()
351 tr = self.transaction()
351 tr = self.transaction()
352
352
353 # check in files
353 # check in files
354 new = {}
354 new = {}
355 linkrev = self.changelog.count()
355 linkrev = self.changelog.count()
356 commit.sort()
356 commit.sort()
357 for f in commit:
357 for f in commit:
358 self.ui.note(f + "\n")
358 self.ui.note(f + "\n")
359 try:
359 try:
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 t = self.wread(f)
361 t = self.wread(f)
362 except IOError:
362 except IOError:
363 self.ui.warn(_("trouble committing %s!\n") % f)
363 self.ui.warn(_("trouble committing %s!\n") % f)
364 raise
364 raise
365
365
366 r = self.file(f)
366 r = self.file(f)
367
367
368 meta = {}
368 meta = {}
369 cp = self.dirstate.copied(f)
369 cp = self.dirstate.copied(f)
370 if cp:
370 if cp:
371 meta["copy"] = cp
371 meta["copy"] = cp
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 fp1, fp2 = nullid, nullid
374 fp1, fp2 = nullid, nullid
375 else:
375 else:
376 fp1 = m1.get(f, nullid)
376 fp1 = m1.get(f, nullid)
377 fp2 = m2.get(f, nullid)
377 fp2 = m2.get(f, nullid)
378
378
379 # is the same revision on two branches of a merge?
379 # is the same revision on two branches of a merge?
380 if fp2 == fp1:
380 if fp2 == fp1:
381 fp2 = nullid
381 fp2 = nullid
382
382
383 if fp2 != nullid:
383 if fp2 != nullid:
384 # is one parent an ancestor of the other?
384 # is one parent an ancestor of the other?
385 fpa = r.ancestor(fp1, fp2)
385 fpa = r.ancestor(fp1, fp2)
386 if fpa == fp1:
386 if fpa == fp1:
387 fp1, fp2 = fp2, nullid
387 fp1, fp2 = fp2, nullid
388 elif fpa == fp2:
388 elif fpa == fp2:
389 fp2 = nullid
389 fp2 = nullid
390
390
391 # is the file unmodified from the parent?
391 # is the file unmodified from the parent?
392 if not meta and t == r.read(fp1):
392 if not meta and t == r.read(fp1):
393 # record the proper existing parent in manifest
393 # record the proper existing parent in manifest
394 # no need to add a revision
394 # no need to add a revision
395 new[f] = fp1
395 new[f] = fp1
396 continue
396 continue
397
397
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 # remember what we've added so that we can later calculate
399 # remember what we've added so that we can later calculate
400 # the files to pull from a set of changesets
400 # the files to pull from a set of changesets
401 changed.append(f)
401 changed.append(f)
402
402
403 # update manifest
403 # update manifest
404 m1.update(new)
404 m1.update(new)
405 for f in remove:
405 for f in remove:
406 if f in m1:
406 if f in m1:
407 del m1[f]
407 del m1[f]
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 (new, remove))
409 (new, remove))
410
410
411 # add changeset
411 # add changeset
412 new = new.keys()
412 new = new.keys()
413 new.sort()
413 new.sort()
414
414
415 if not text:
415 if not text:
416 edittext = ""
416 edittext = ""
417 if p2 != nullid:
417 if p2 != nullid:
418 edittext += "HG: branch merge\n"
418 edittext += "HG: branch merge\n"
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 if not changed and not remove:
422 if not changed and not remove:
423 edittext += "HG: no files changed\n"
423 edittext += "HG: no files changed\n"
424 edittext = self.ui.edit(edittext)
424 edittext = self.ui.edit(edittext)
425 if not edittext.rstrip():
425 if not edittext.rstrip():
426 return None
426 return None
427 text = edittext
427 text = edittext
428
428
429 user = user or self.ui.username()
429 user = user or self.ui.username()
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 tr.close()
431 tr.close()
432
432
433 self.dirstate.setparents(n)
433 self.dirstate.setparents(n)
434 self.dirstate.update(new, "n")
434 self.dirstate.update(new, "n")
435 self.dirstate.forget(remove)
435 self.dirstate.forget(remove)
436
436
437 if not self.hook("commit", node=hex(n)):
437 if not self.hook("commit", node=hex(n)):
438 return None
438 return None
439 return n
439 return n
440
440
441 def walk(self, node=None, files=[], match=util.always):
441 def walk(self, node=None, files=[], match=util.always):
442 if node:
442 if node:
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 if match(fn): yield 'm', fn
444 if match(fn): yield 'm', fn
445 else:
445 else:
446 for src, fn in self.dirstate.walk(files, match):
446 for src, fn in self.dirstate.walk(files, match):
447 yield src, fn
447 yield src, fn
448
448
449 def changes(self, node1 = None, node2 = None, files = [],
449 def changes(self, node1 = None, node2 = None, files = [],
450 match = util.always):
450 match = util.always):
451 mf2, u = None, []
451 mf2, u = None, []
452
452
453 def fcmp(fn, mf):
453 def fcmp(fn, mf):
454 t1 = self.wread(fn)
454 t1 = self.wread(fn)
455 t2 = self.file(fn).read(mf.get(fn, nullid))
455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 return cmp(t1, t2)
456 return cmp(t1, t2)
457
457
458 def mfmatches(node):
458 def mfmatches(node):
459 mf = dict(self.manifest.read(node))
459 mf = dict(self.manifest.read(node))
460 for fn in mf.keys():
460 for fn in mf.keys():
461 if not match(fn):
461 if not match(fn):
462 del mf[fn]
462 del mf[fn]
463 return mf
463 return mf
464
464
465 # are we comparing the working directory?
465 # are we comparing the working directory?
466 if not node2:
466 if not node2:
467 l, c, a, d, u = self.dirstate.changes(files, match)
467 l, c, a, d, u = self.dirstate.changes(files, match)
468
468
469 # are we comparing working dir against its parent?
469 # are we comparing working dir against its parent?
470 if not node1:
470 if not node1:
471 if l:
471 if l:
472 # do a full compare of any files that might have changed
472 # do a full compare of any files that might have changed
473 change = self.changelog.read(self.dirstate.parents()[0])
473 change = self.changelog.read(self.dirstate.parents()[0])
474 mf2 = mfmatches(change[0])
474 mf2 = mfmatches(change[0])
475 for f in l:
475 for f in l:
476 if fcmp(f, mf2):
476 if fcmp(f, mf2):
477 c.append(f)
477 c.append(f)
478
478
479 for l in c, a, d, u:
479 for l in c, a, d, u:
480 l.sort()
480 l.sort()
481
481
482 return (c, a, d, u)
482 return (c, a, d, u)
483
483
484 # are we comparing working dir against non-tip?
484 # are we comparing working dir against non-tip?
485 # generate a pseudo-manifest for the working dir
485 # generate a pseudo-manifest for the working dir
486 if not node2:
486 if not node2:
487 if not mf2:
487 if not mf2:
488 change = self.changelog.read(self.dirstate.parents()[0])
488 change = self.changelog.read(self.dirstate.parents()[0])
489 mf2 = mfmatches(change[0])
489 mf2 = mfmatches(change[0])
490 for f in a + c + l:
490 for f in a + c + l:
491 mf2[f] = ""
491 mf2[f] = ""
492 for f in d:
492 for f in d:
493 if f in mf2: del mf2[f]
493 if f in mf2: del mf2[f]
494 else:
494 else:
495 change = self.changelog.read(node2)
495 change = self.changelog.read(node2)
496 mf2 = mfmatches(change[0])
496 mf2 = mfmatches(change[0])
497
497
498 # flush lists from dirstate before comparing manifests
498 # flush lists from dirstate before comparing manifests
499 c, a = [], []
499 c, a = [], []
500
500
501 change = self.changelog.read(node1)
501 change = self.changelog.read(node1)
502 mf1 = mfmatches(change[0])
502 mf1 = mfmatches(change[0])
503
503
504 for fn in mf2:
504 for fn in mf2:
505 if mf1.has_key(fn):
505 if mf1.has_key(fn):
506 if mf1[fn] != mf2[fn]:
506 if mf1[fn] != mf2[fn]:
507 if mf2[fn] != "" or fcmp(fn, mf1):
507 if mf2[fn] != "" or fcmp(fn, mf1):
508 c.append(fn)
508 c.append(fn)
509 del mf1[fn]
509 del mf1[fn]
510 else:
510 else:
511 a.append(fn)
511 a.append(fn)
512
512
513 d = mf1.keys()
513 d = mf1.keys()
514
514
515 for l in c, a, d, u:
515 for l in c, a, d, u:
516 l.sort()
516 l.sort()
517
517
518 return (c, a, d, u)
518 return (c, a, d, u)
519
519
520 def add(self, list):
520 def add(self, list):
521 for f in list:
521 for f in list:
522 p = self.wjoin(f)
522 p = self.wjoin(f)
523 if not os.path.exists(p):
523 if not os.path.exists(p):
524 self.ui.warn(_("%s does not exist!\n") % f)
524 self.ui.warn(_("%s does not exist!\n") % f)
525 elif not os.path.isfile(p):
525 elif not os.path.isfile(p):
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 elif self.dirstate.state(f) in 'an':
527 elif self.dirstate.state(f) in 'an':
528 self.ui.warn(_("%s already tracked!\n") % f)
528 self.ui.warn(_("%s already tracked!\n") % f)
529 else:
529 else:
530 self.dirstate.update([f], "a")
530 self.dirstate.update([f], "a")
531
531
532 def forget(self, list):
532 def forget(self, list):
533 for f in list:
533 for f in list:
534 if self.dirstate.state(f) not in 'ai':
534 if self.dirstate.state(f) not in 'ai':
535 self.ui.warn(_("%s not added!\n") % f)
535 self.ui.warn(_("%s not added!\n") % f)
536 else:
536 else:
537 self.dirstate.forget([f])
537 self.dirstate.forget([f])
538
538
539 def remove(self, list, unlink=False):
539 def remove(self, list, unlink=False):
540 if unlink:
540 if unlink:
541 for f in list:
541 for f in list:
542 try:
542 try:
543 util.unlink(self.wjoin(f))
543 util.unlink(self.wjoin(f))
544 except OSError, inst:
544 except OSError, inst:
545 if inst.errno != errno.ENOENT: raise
545 if inst.errno != errno.ENOENT: raise
546 for f in list:
546 for f in list:
547 p = self.wjoin(f)
547 p = self.wjoin(f)
548 if os.path.exists(p):
548 if os.path.exists(p):
549 self.ui.warn(_("%s still exists!\n") % f)
549 self.ui.warn(_("%s still exists!\n") % f)
550 elif self.dirstate.state(f) == 'a':
550 elif self.dirstate.state(f) == 'a':
551 self.ui.warn(_("%s never committed!\n") % f)
551 self.ui.warn(_("%s never committed!\n") % f)
552 self.dirstate.forget([f])
552 self.dirstate.forget([f])
553 elif f not in self.dirstate:
553 elif f not in self.dirstate:
554 self.ui.warn(_("%s not tracked!\n") % f)
554 self.ui.warn(_("%s not tracked!\n") % f)
555 else:
555 else:
556 self.dirstate.update([f], "r")
556 self.dirstate.update([f], "r")
557
557
558 def undelete(self, list):
558 def undelete(self, list):
559 p = self.dirstate.parents()[0]
559 p = self.dirstate.parents()[0]
560 mn = self.changelog.read(p)[0]
560 mn = self.changelog.read(p)[0]
561 mf = self.manifest.readflags(mn)
561 mf = self.manifest.readflags(mn)
562 m = self.manifest.read(mn)
562 m = self.manifest.read(mn)
563 for f in list:
563 for f in list:
564 if self.dirstate.state(f) not in "r":
564 if self.dirstate.state(f) not in "r":
565 self.ui.warn("%s not removed!\n" % f)
565 self.ui.warn("%s not removed!\n" % f)
566 else:
566 else:
567 t = self.file(f).read(m[f])
567 t = self.file(f).read(m[f])
568 try:
568 try:
569 self.wwrite(f, t)
569 self.wwrite(f, t)
570 except IOError, e:
570 except IOError, e:
571 if e.errno != errno.ENOENT:
571 if e.errno != errno.ENOENT:
572 raise
572 raise
573 os.makedirs(os.path.dirname(self.wjoin(f)))
573 os.makedirs(os.path.dirname(self.wjoin(f)))
574 self.wwrite(f, t)
574 self.wwrite(f, t)
575 util.set_exec(self.wjoin(f), mf[f])
575 util.set_exec(self.wjoin(f), mf[f])
576 self.dirstate.update([f], "n")
576 self.dirstate.update([f], "n")
577
577
578 def copy(self, source, dest):
578 def copy(self, source, dest):
579 p = self.wjoin(dest)
579 p = self.wjoin(dest)
580 if not os.path.exists(p):
580 if not os.path.exists(p):
581 self.ui.warn(_("%s does not exist!\n") % dest)
581 self.ui.warn(_("%s does not exist!\n") % dest)
582 elif not os.path.isfile(p):
582 elif not os.path.isfile(p):
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
584 else:
584 else:
585 if self.dirstate.state(dest) == '?':
585 if self.dirstate.state(dest) == '?':
586 self.dirstate.update([dest], "a")
586 self.dirstate.update([dest], "a")
587 self.dirstate.copy(source, dest)
587 self.dirstate.copy(source, dest)
588
588
589 def heads(self):
589 def heads(self):
590 return self.changelog.heads()
590 return self.changelog.heads()
591
591
592 # branchlookup returns a dict giving a list of branches for
592 # branchlookup returns a dict giving a list of branches for
593 # each head. A branch is defined as the tag of a node or
593 # each head. A branch is defined as the tag of a node or
594 # the branch of the node's parents. If a node has multiple
594 # the branch of the node's parents. If a node has multiple
595 # branch tags, tags are eliminated if they are visible from other
595 # branch tags, tags are eliminated if they are visible from other
596 # branch tags.
596 # branch tags.
597 #
597 #
598 # So, for this graph: a->b->c->d->e
598 # So, for this graph: a->b->c->d->e
599 # \ /
599 # \ /
600 # aa -----/
600 # aa -----/
601 # a has tag 2.6.12
601 # a has tag 2.6.12
602 # d has tag 2.6.13
602 # d has tag 2.6.13
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
605 # from the list.
605 # from the list.
606 #
606 #
607 # It is possible that more than one head will have the same branch tag.
607 # It is possible that more than one head will have the same branch tag.
608 # callers need to check the result for multiple heads under the same
608 # callers need to check the result for multiple heads under the same
609 # branch tag if that is a problem for them (ie checkout of a specific
609 # branch tag if that is a problem for them (ie checkout of a specific
610 # branch).
610 # branch).
611 #
611 #
612 # passing in a specific branch will limit the depth of the search
612 # passing in a specific branch will limit the depth of the search
613 # through the parents. It won't limit the branches returned in the
613 # through the parents. It won't limit the branches returned in the
614 # result though.
614 # result though.
615 def branchlookup(self, heads=None, branch=None):
615 def branchlookup(self, heads=None, branch=None):
616 if not heads:
616 if not heads:
617 heads = self.heads()
617 heads = self.heads()
618 headt = [ h for h in heads ]
618 headt = [ h for h in heads ]
619 chlog = self.changelog
619 chlog = self.changelog
620 branches = {}
620 branches = {}
621 merges = []
621 merges = []
622 seenmerge = {}
622 seenmerge = {}
623
623
624 # traverse the tree once for each head, recording in the branches
624 # traverse the tree once for each head, recording in the branches
625 # dict which tags are visible from this head. The branches
625 # dict which tags are visible from this head. The branches
626 # dict also records which tags are visible from each tag
626 # dict also records which tags are visible from each tag
627 # while we traverse.
627 # while we traverse.
628 while headt or merges:
628 while headt or merges:
629 if merges:
629 if merges:
630 n, found = merges.pop()
630 n, found = merges.pop()
631 visit = [n]
631 visit = [n]
632 else:
632 else:
633 h = headt.pop()
633 h = headt.pop()
634 visit = [h]
634 visit = [h]
635 found = [h]
635 found = [h]
636 seen = {}
636 seen = {}
637 while visit:
637 while visit:
638 n = visit.pop()
638 n = visit.pop()
639 if n in seen:
639 if n in seen:
640 continue
640 continue
641 pp = chlog.parents(n)
641 pp = chlog.parents(n)
642 tags = self.nodetags(n)
642 tags = self.nodetags(n)
643 if tags:
643 if tags:
644 for x in tags:
644 for x in tags:
645 if x == 'tip':
645 if x == 'tip':
646 continue
646 continue
647 for f in found:
647 for f in found:
648 branches.setdefault(f, {})[n] = 1
648 branches.setdefault(f, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
650 break
650 break
651 if n not in found:
651 if n not in found:
652 found.append(n)
652 found.append(n)
653 if branch in tags:
653 if branch in tags:
654 continue
654 continue
655 seen[n] = 1
655 seen[n] = 1
656 if pp[1] != nullid and n not in seenmerge:
656 if pp[1] != nullid and n not in seenmerge:
657 merges.append((pp[1], [x for x in found]))
657 merges.append((pp[1], [x for x in found]))
658 seenmerge[n] = 1
658 seenmerge[n] = 1
659 if pp[0] != nullid:
659 if pp[0] != nullid:
660 visit.append(pp[0])
660 visit.append(pp[0])
661 # traverse the branches dict, eliminating branch tags from each
661 # traverse the branches dict, eliminating branch tags from each
662 # head that are visible from another branch tag for that head.
662 # head that are visible from another branch tag for that head.
663 out = {}
663 out = {}
664 viscache = {}
664 viscache = {}
665 for h in heads:
665 for h in heads:
666 def visible(node):
666 def visible(node):
667 if node in viscache:
667 if node in viscache:
668 return viscache[node]
668 return viscache[node]
669 ret = {}
669 ret = {}
670 visit = [node]
670 visit = [node]
671 while visit:
671 while visit:
672 x = visit.pop()
672 x = visit.pop()
673 if x in viscache:
673 if x in viscache:
674 ret.update(viscache[x])
674 ret.update(viscache[x])
675 elif x not in ret:
675 elif x not in ret:
676 ret[x] = 1
676 ret[x] = 1
677 if x in branches:
677 if x in branches:
678 visit[len(visit):] = branches[x].keys()
678 visit[len(visit):] = branches[x].keys()
679 viscache[node] = ret
679 viscache[node] = ret
680 return ret
680 return ret
681 if h not in branches:
681 if h not in branches:
682 continue
682 continue
683 # O(n^2), but somewhat limited. This only searches the
683 # O(n^2), but somewhat limited. This only searches the
684 # tags visible from a specific head, not all the tags in the
684 # tags visible from a specific head, not all the tags in the
685 # whole repo.
685 # whole repo.
686 for b in branches[h]:
686 for b in branches[h]:
687 vis = False
687 vis = False
688 for bb in branches[h].keys():
688 for bb in branches[h].keys():
689 if b != bb:
689 if b != bb:
690 if b in visible(bb):
690 if b in visible(bb):
691 vis = True
691 vis = True
692 break
692 break
693 if not vis:
693 if not vis:
694 l = out.setdefault(h, [])
694 l = out.setdefault(h, [])
695 l[len(l):] = self.nodetags(b)
695 l[len(l):] = self.nodetags(b)
696 return out
696 return out
697
697
698 def branches(self, nodes):
698 def branches(self, nodes):
699 if not nodes: nodes = [self.changelog.tip()]
699 if not nodes: nodes = [self.changelog.tip()]
700 b = []
700 b = []
701 for n in nodes:
701 for n in nodes:
702 t = n
702 t = n
703 while n:
703 while n:
704 p = self.changelog.parents(n)
704 p = self.changelog.parents(n)
705 if p[1] != nullid or p[0] == nullid:
705 if p[1] != nullid or p[0] == nullid:
706 b.append((t, n, p[0], p[1]))
706 b.append((t, n, p[0], p[1]))
707 break
707 break
708 n = p[0]
708 n = p[0]
709 return b
709 return b
710
710
711 def between(self, pairs):
711 def between(self, pairs):
712 r = []
712 r = []
713
713
714 for top, bottom in pairs:
714 for top, bottom in pairs:
715 n, l, i = top, [], 0
715 n, l, i = top, [], 0
716 f = 1
716 f = 1
717
717
718 while n != bottom:
718 while n != bottom:
719 p = self.changelog.parents(n)[0]
719 p = self.changelog.parents(n)[0]
720 if i == f:
720 if i == f:
721 l.append(n)
721 l.append(n)
722 f = f * 2
722 f = f * 2
723 n = p
723 n = p
724 i += 1
724 i += 1
725
725
726 r.append(l)
726 r.append(l)
727
727
728 return r
728 return r
729
729
730 def findincoming(self, remote, base=None, heads=None):
730 def findincoming(self, remote, base=None, heads=None):
731 m = self.changelog.nodemap
731 m = self.changelog.nodemap
732 search = []
732 search = []
733 fetch = {}
733 fetch = {}
734 seen = {}
734 seen = {}
735 seenbranch = {}
735 seenbranch = {}
736 if base == None:
736 if base == None:
737 base = {}
737 base = {}
738
738
739 # assume we're closer to the tip than the root
739 # assume we're closer to the tip than the root
740 # and start by examining the heads
740 # and start by examining the heads
741 self.ui.status(_("searching for changes\n"))
741 self.ui.status(_("searching for changes\n"))
742
742
743 if not heads:
743 if not heads:
744 heads = remote.heads()
744 heads = remote.heads()
745
745
746 unknown = []
746 unknown = []
747 for h in heads:
747 for h in heads:
748 if h not in m:
748 if h not in m:
749 unknown.append(h)
749 unknown.append(h)
750 else:
750 else:
751 base[h] = 1
751 base[h] = 1
752
752
753 if not unknown:
753 if not unknown:
754 return None
754 return None
755
755
756 rep = {}
756 rep = {}
757 reqcnt = 0
757 reqcnt = 0
758
758
759 # search through remote branches
759 # search through remote branches
760 # a 'branch' here is a linear segment of history, with four parts:
760 # a 'branch' here is a linear segment of history, with four parts:
761 # head, root, first parent, second parent
761 # head, root, first parent, second parent
762 # (a branch always has two parents (or none) by definition)
762 # (a branch always has two parents (or none) by definition)
763 unknown = remote.branches(unknown)
763 unknown = remote.branches(unknown)
764 while unknown:
764 while unknown:
765 r = []
765 r = []
766 while unknown:
766 while unknown:
767 n = unknown.pop(0)
767 n = unknown.pop(0)
768 if n[0] in seen:
768 if n[0] in seen:
769 continue
769 continue
770
770
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 if n[0] == nullid:
772 if n[0] == nullid:
773 break
773 break
774 if n in seenbranch:
774 if n in seenbranch:
775 self.ui.debug(_("branch already found\n"))
775 self.ui.debug(_("branch already found\n"))
776 continue
776 continue
777 if n[1] and n[1] in m: # do we know the base?
777 if n[1] and n[1] in m: # do we know the base?
778 self.ui.debug(_("found incomplete branch %s:%s\n")
778 self.ui.debug(_("found incomplete branch %s:%s\n")
779 % (short(n[0]), short(n[1])))
779 % (short(n[0]), short(n[1])))
780 search.append(n) # schedule branch range for scanning
780 search.append(n) # schedule branch range for scanning
781 seenbranch[n] = 1
781 seenbranch[n] = 1
782 else:
782 else:
783 if n[1] not in seen and n[1] not in fetch:
783 if n[1] not in seen and n[1] not in fetch:
784 if n[2] in m and n[3] in m:
784 if n[2] in m and n[3] in m:
785 self.ui.debug(_("found new changeset %s\n") %
785 self.ui.debug(_("found new changeset %s\n") %
786 short(n[1]))
786 short(n[1]))
787 fetch[n[1]] = 1 # earliest unknown
787 fetch[n[1]] = 1 # earliest unknown
788 base[n[2]] = 1 # latest known
788 base[n[2]] = 1 # latest known
789 continue
789 continue
790
790
791 for a in n[2:4]:
791 for a in n[2:4]:
792 if a not in rep:
792 if a not in rep:
793 r.append(a)
793 r.append(a)
794 rep[a] = 1
794 rep[a] = 1
795
795
796 seen[n[0]] = 1
796 seen[n[0]] = 1
797
797
798 if r:
798 if r:
799 reqcnt += 1
799 reqcnt += 1
800 self.ui.debug(_("request %d: %s\n") %
800 self.ui.debug(_("request %d: %s\n") %
801 (reqcnt, " ".join(map(short, r))))
801 (reqcnt, " ".join(map(short, r))))
802 for p in range(0, len(r), 10):
802 for p in range(0, len(r), 10):
803 for b in remote.branches(r[p:p+10]):
803 for b in remote.branches(r[p:p+10]):
804 self.ui.debug(_("received %s:%s\n") %
804 self.ui.debug(_("received %s:%s\n") %
805 (short(b[0]), short(b[1])))
805 (short(b[0]), short(b[1])))
806 if b[0] in m:
806 if b[0] in m:
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 base[b[0]] = 1
808 base[b[0]] = 1
809 elif b[0] not in seen:
809 elif b[0] not in seen:
810 unknown.append(b)
810 unknown.append(b)
811
811
812 # do binary search on the branches we found
812 # do binary search on the branches we found
813 while search:
813 while search:
814 n = search.pop(0)
814 n = search.pop(0)
815 reqcnt += 1
815 reqcnt += 1
816 l = remote.between([(n[0], n[1])])[0]
816 l = remote.between([(n[0], n[1])])[0]
817 l.append(n[1])
817 l.append(n[1])
818 p = n[0]
818 p = n[0]
819 f = 1
819 f = 1
820 for i in l:
820 for i in l:
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 if i in m:
822 if i in m:
823 if f <= 2:
823 if f <= 2:
824 self.ui.debug(_("found new branch changeset %s\n") %
824 self.ui.debug(_("found new branch changeset %s\n") %
825 short(p))
825 short(p))
826 fetch[p] = 1
826 fetch[p] = 1
827 base[i] = 1
827 base[i] = 1
828 else:
828 else:
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 % (short(p), short(i)))
830 % (short(p), short(i)))
831 search.append((p, i))
831 search.append((p, i))
832 break
832 break
833 p, f = i, f * 2
833 p, f = i, f * 2
834
834
835 # sanity check our fetch list
835 # sanity check our fetch list
836 for f in fetch.keys():
836 for f in fetch.keys():
837 if f in m:
837 if f in m:
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839
839
840 if base.keys() == [nullid]:
840 if base.keys() == [nullid]:
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842
842
843 self.ui.note(_("found new changesets starting at ") +
843 self.ui.note(_("found new changesets starting at ") +
844 " ".join([short(f) for f in fetch]) + "\n")
844 " ".join([short(f) for f in fetch]) + "\n")
845
845
846 self.ui.debug(_("%d total queries\n") % reqcnt)
846 self.ui.debug(_("%d total queries\n") % reqcnt)
847
847
848 return fetch.keys()
848 return fetch.keys()
849
849
850 def findoutgoing(self, remote, base=None, heads=None):
850 def findoutgoing(self, remote, base=None, heads=None):
851 if base == None:
851 if base == None:
852 base = {}
852 base = {}
853 self.findincoming(remote, base, heads)
853 self.findincoming(remote, base, heads)
854
854
855 self.ui.debug(_("common changesets up to ")
855 self.ui.debug(_("common changesets up to ")
856 + " ".join(map(short, base.keys())) + "\n")
856 + " ".join(map(short, base.keys())) + "\n")
857
857
858 remain = dict.fromkeys(self.changelog.nodemap)
858 remain = dict.fromkeys(self.changelog.nodemap)
859
859
860 # prune everything remote has from the tree
860 # prune everything remote has from the tree
861 del remain[nullid]
861 del remain[nullid]
862 remove = base.keys()
862 remove = base.keys()
863 while remove:
863 while remove:
864 n = remove.pop(0)
864 n = remove.pop(0)
865 if n in remain:
865 if n in remain:
866 del remain[n]
866 del remain[n]
867 for p in self.changelog.parents(n):
867 for p in self.changelog.parents(n):
868 remove.append(p)
868 remove.append(p)
869
869
870 # find every node whose parents have been pruned
870 # find every node whose parents have been pruned
871 subset = []
871 subset = []
872 for n in remain:
872 for n in remain:
873 p1, p2 = self.changelog.parents(n)
873 p1, p2 = self.changelog.parents(n)
874 if p1 not in remain and p2 not in remain:
874 if p1 not in remain and p2 not in remain:
875 subset.append(n)
875 subset.append(n)
876
876
877 # this is the set of all roots we have to push
877 # this is the set of all roots we have to push
878 return subset
878 return subset
879
879
880 def pull(self, remote, heads = None):
880 def pull(self, remote, heads = None):
881 lock = self.lock()
881 lock = self.lock()
882
882
883 # if we have an empty repo, fetch everything
883 # if we have an empty repo, fetch everything
884 if self.changelog.tip() == nullid:
884 if self.changelog.tip() == nullid:
885 self.ui.status(_("requesting all changes\n"))
885 self.ui.status(_("requesting all changes\n"))
886 fetch = [nullid]
886 fetch = [nullid]
887 else:
887 else:
888 fetch = self.findincoming(remote)
888 fetch = self.findincoming(remote)
889
889
890 if not fetch:
890 if not fetch:
891 self.ui.status(_("no changes found\n"))
891 self.ui.status(_("no changes found\n"))
892 return 1
892 return 1
893
893
894 if heads is None:
894 if heads is None:
895 cg = remote.changegroup(fetch)
895 cg = remote.changegroup(fetch)
896 else:
896 else:
897 cg = remote.changegroupsubset(fetch, heads)
897 cg = remote.changegroupsubset(fetch, heads)
898 return self.addchangegroup(cg)
898 return self.addchangegroup(cg)
899
899
900 def push(self, remote, force=False):
900 def push(self, remote, force=False):
901 lock = remote.lock()
901 lock = remote.lock()
902
902
903 base = {}
903 base = {}
904 heads = remote.heads()
904 heads = remote.heads()
905 inc = self.findincoming(remote, base, heads)
905 inc = self.findincoming(remote, base, heads)
906 if not force and inc:
906 if not force and inc:
907 self.ui.warn(_("abort: unsynced remote changes!\n"))
907 self.ui.warn(_("abort: unsynced remote changes!\n"))
908 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
908 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
909 return 1
909 return 1
910
910
911 update = self.findoutgoing(remote, base)
911 update = self.findoutgoing(remote, base)
912 if not update:
912 if not update:
913 self.ui.status(_("no changes found\n"))
913 self.ui.status(_("no changes found\n"))
914 return 1
914 return 1
915 elif not force:
915 elif not force:
916 if len(heads) < len(self.changelog.heads()):
916 if len(heads) < len(self.changelog.heads()):
917 self.ui.warn(_("abort: push creates new remote branches!\n"))
917 self.ui.warn(_("abort: push creates new remote branches!\n"))
918 self.ui.status(_("(did you forget to merge?"
918 self.ui.status(_("(did you forget to merge?"
919 " use push -f to force)\n"))
919 " use push -f to force)\n"))
920 return 1
920 return 1
921
921
922 cg = self.changegroup(update)
922 cg = self.changegroup(update)
923 return remote.addchangegroup(cg)
923 return remote.addchangegroup(cg)
924
924
925 def changegroupsubset(self, bases, heads):
925 def changegroupsubset(self, bases, heads):
926 """This function generates a changegroup consisting of all the nodes
926 """This function generates a changegroup consisting of all the nodes
927 that are descendents of any of the bases, and ancestors of any of
927 that are descendents of any of the bases, and ancestors of any of
928 the heads.
928 the heads.
929
929
930 It is fairly complex as determining which filenodes and which
930 It is fairly complex as determining which filenodes and which
931 manifest nodes need to be included for the changeset to be complete
931 manifest nodes need to be included for the changeset to be complete
932 is non-trivial.
932 is non-trivial.
933
933
934 Another wrinkle is doing the reverse, figuring out which changeset in
934 Another wrinkle is doing the reverse, figuring out which changeset in
935 the changegroup a particular filenode or manifestnode belongs to."""
935 the changegroup a particular filenode or manifestnode belongs to."""
936
936
937 # Set up some initial variables
937 # Set up some initial variables
938 # Make it easy to refer to self.changelog
938 # Make it easy to refer to self.changelog
939 cl = self.changelog
939 cl = self.changelog
940 # msng is short for missing - compute the list of changesets in this
940 # msng is short for missing - compute the list of changesets in this
941 # changegroup.
941 # changegroup.
942 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
942 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
943 # Some bases may turn out to be superfluous, and some heads may be
943 # Some bases may turn out to be superfluous, and some heads may be
944 # too. nodesbetween will return the minimal set of bases and heads
944 # too. nodesbetween will return the minimal set of bases and heads
945 # necessary to re-create the changegroup.
945 # necessary to re-create the changegroup.
946
946
947 # Known heads are the list of heads that it is assumed the recipient
947 # Known heads are the list of heads that it is assumed the recipient
948 # of this changegroup will know about.
948 # of this changegroup will know about.
949 knownheads = {}
949 knownheads = {}
950 # We assume that all parents of bases are known heads.
950 # We assume that all parents of bases are known heads.
951 for n in bases:
951 for n in bases:
952 for p in cl.parents(n):
952 for p in cl.parents(n):
953 if p != nullid:
953 if p != nullid:
954 knownheads[p] = 1
954 knownheads[p] = 1
955 knownheads = knownheads.keys()
955 knownheads = knownheads.keys()
956 if knownheads:
956 if knownheads:
957 # Now that we know what heads are known, we can compute which
957 # Now that we know what heads are known, we can compute which
958 # changesets are known. The recipient must know about all
958 # changesets are known. The recipient must know about all
959 # changesets required to reach the known heads from the null
959 # changesets required to reach the known heads from the null
960 # changeset.
960 # changeset.
961 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
961 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
962 junk = None
962 junk = None
963 # Transform the list into an ersatz set.
963 # Transform the list into an ersatz set.
964 has_cl_set = dict.fromkeys(has_cl_set)
964 has_cl_set = dict.fromkeys(has_cl_set)
965 else:
965 else:
966 # If there were no known heads, the recipient cannot be assumed to
966 # If there were no known heads, the recipient cannot be assumed to
967 # know about any changesets.
967 # know about any changesets.
968 has_cl_set = {}
968 has_cl_set = {}
969
969
970 # Make it easy to refer to self.manifest
970 # Make it easy to refer to self.manifest
971 mnfst = self.manifest
971 mnfst = self.manifest
972 # We don't know which manifests are missing yet
972 # We don't know which manifests are missing yet
973 msng_mnfst_set = {}
973 msng_mnfst_set = {}
974 # Nor do we know which filenodes are missing.
974 # Nor do we know which filenodes are missing.
975 msng_filenode_set = {}
975 msng_filenode_set = {}
976
976
977 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
977 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
978 junk = None
978 junk = None
979
979
980 # A changeset always belongs to itself, so the changenode lookup
980 # A changeset always belongs to itself, so the changenode lookup
981 # function for a changenode is identity.
981 # function for a changenode is identity.
982 def identity(x):
982 def identity(x):
983 return x
983 return x
984
984
985 # A function generating function. Sets up an environment for the
985 # A function generating function. Sets up an environment for the
986 # inner function.
986 # inner function.
987 def cmp_by_rev_func(revlog):
987 def cmp_by_rev_func(revlog):
988 # Compare two nodes by their revision number in the environment's
988 # Compare two nodes by their revision number in the environment's
989 # revision history. Since the revision number both represents the
989 # revision history. Since the revision number both represents the
990 # most efficient order to read the nodes in, and represents a
990 # most efficient order to read the nodes in, and represents a
991 # topological sorting of the nodes, this function is often useful.
991 # topological sorting of the nodes, this function is often useful.
992 def cmp_by_rev(a, b):
992 def cmp_by_rev(a, b):
993 return cmp(revlog.rev(a), revlog.rev(b))
993 return cmp(revlog.rev(a), revlog.rev(b))
994 return cmp_by_rev
994 return cmp_by_rev
995
995
996 # If we determine that a particular file or manifest node must be a
996 # If we determine that a particular file or manifest node must be a
997 # node that the recipient of the changegroup will already have, we can
997 # node that the recipient of the changegroup will already have, we can
998 # also assume the recipient will have all the parents. This function
998 # also assume the recipient will have all the parents. This function
999 # prunes them from the set of missing nodes.
999 # prunes them from the set of missing nodes.
1000 def prune_parents(revlog, hasset, msngset):
1000 def prune_parents(revlog, hasset, msngset):
1001 haslst = hasset.keys()
1001 haslst = hasset.keys()
1002 haslst.sort(cmp_by_rev_func(revlog))
1002 haslst.sort(cmp_by_rev_func(revlog))
1003 for node in haslst:
1003 for node in haslst:
1004 parentlst = [p for p in revlog.parents(node) if p != nullid]
1004 parentlst = [p for p in revlog.parents(node) if p != nullid]
1005 while parentlst:
1005 while parentlst:
1006 n = parentlst.pop()
1006 n = parentlst.pop()
1007 if n not in hasset:
1007 if n not in hasset:
1008 hasset[n] = 1
1008 hasset[n] = 1
1009 p = [p for p in revlog.parents(n) if p != nullid]
1009 p = [p for p in revlog.parents(n) if p != nullid]
1010 parentlst.extend(p)
1010 parentlst.extend(p)
1011 for n in hasset:
1011 for n in hasset:
1012 msngset.pop(n, None)
1012 msngset.pop(n, None)
1013
1013
1014 # This is a function generating function used to set up an environment
1014 # This is a function generating function used to set up an environment
1015 # for the inner function to execute in.
1015 # for the inner function to execute in.
1016 def manifest_and_file_collector(changedfileset):
1016 def manifest_and_file_collector(changedfileset):
1017 # This is an information gathering function that gathers
1017 # This is an information gathering function that gathers
1018 # information from each changeset node that goes out as part of
1018 # information from each changeset node that goes out as part of
1019 # the changegroup. The information gathered is a list of which
1019 # the changegroup. The information gathered is a list of which
1020 # manifest nodes are potentially required (the recipient may
1020 # manifest nodes are potentially required (the recipient may
1021 # already have them) and total list of all files which were
1021 # already have them) and total list of all files which were
1022 # changed in any changeset in the changegroup.
1022 # changed in any changeset in the changegroup.
1023 #
1023 #
1024 # We also remember the first changenode we saw any manifest
1024 # We also remember the first changenode we saw any manifest
1025 # referenced by so we can later determine which changenode 'owns'
1025 # referenced by so we can later determine which changenode 'owns'
1026 # the manifest.
1026 # the manifest.
1027 def collect_manifests_and_files(clnode):
1027 def collect_manifests_and_files(clnode):
1028 c = cl.read(clnode)
1028 c = cl.read(clnode)
1029 for f in c[3]:
1029 for f in c[3]:
1030 # This is to make sure we only have one instance of each
1030 # This is to make sure we only have one instance of each
1031 # filename string for each filename.
1031 # filename string for each filename.
1032 changedfileset.setdefault(f, f)
1032 changedfileset.setdefault(f, f)
1033 msng_mnfst_set.setdefault(c[0], clnode)
1033 msng_mnfst_set.setdefault(c[0], clnode)
1034 return collect_manifests_and_files
1034 return collect_manifests_and_files
1035
1035
1036 # Figure out which manifest nodes (of the ones we think might be part
1036 # Figure out which manifest nodes (of the ones we think might be part
1037 # of the changegroup) the recipient must know about and remove them
1037 # of the changegroup) the recipient must know about and remove them
1038 # from the changegroup.
1038 # from the changegroup.
1039 def prune_manifests():
1039 def prune_manifests():
1040 has_mnfst_set = {}
1040 has_mnfst_set = {}
1041 for n in msng_mnfst_set:
1041 for n in msng_mnfst_set:
1042 # If a 'missing' manifest thinks it belongs to a changenode
1042 # If a 'missing' manifest thinks it belongs to a changenode
1043 # the recipient is assumed to have, obviously the recipient
1043 # the recipient is assumed to have, obviously the recipient
1044 # must have that manifest.
1044 # must have that manifest.
1045 linknode = cl.node(mnfst.linkrev(n))
1045 linknode = cl.node(mnfst.linkrev(n))
1046 if linknode in has_cl_set:
1046 if linknode in has_cl_set:
1047 has_mnfst_set[n] = 1
1047 has_mnfst_set[n] = 1
1048 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1048 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1049
1049
1050 # Use the information collected in collect_manifests_and_files to say
1050 # Use the information collected in collect_manifests_and_files to say
1051 # which changenode any manifestnode belongs to.
1051 # which changenode any manifestnode belongs to.
1052 def lookup_manifest_link(mnfstnode):
1052 def lookup_manifest_link(mnfstnode):
1053 return msng_mnfst_set[mnfstnode]
1053 return msng_mnfst_set[mnfstnode]
1054
1054
1055 # A function generating function that sets up the initial environment
1055 # A function generating function that sets up the initial environment
1056 # the inner function.
1056 # the inner function.
1057 def filenode_collector(changedfiles):
1057 def filenode_collector(changedfiles):
1058 next_rev = [0]
1058 next_rev = [0]
1059 # This gathers information from each manifestnode included in the
1059 # This gathers information from each manifestnode included in the
1060 # changegroup about which filenodes the manifest node references
1060 # changegroup about which filenodes the manifest node references
1061 # so we can include those in the changegroup too.
1061 # so we can include those in the changegroup too.
1062 #
1062 #
1063 # It also remembers which changenode each filenode belongs to. It
1063 # It also remembers which changenode each filenode belongs to. It
1064 # does this by assuming the a filenode belongs to the changenode
1064 # does this by assuming the a filenode belongs to the changenode
1065 # the first manifest that references it belongs to.
1065 # the first manifest that references it belongs to.
1066 def collect_msng_filenodes(mnfstnode):
1066 def collect_msng_filenodes(mnfstnode):
1067 r = mnfst.rev(mnfstnode)
1067 r = mnfst.rev(mnfstnode)
1068 if r == next_rev[0]:
1068 if r == next_rev[0]:
1069 # If the last rev we looked at was the one just previous,
1069 # If the last rev we looked at was the one just previous,
1070 # we only need to see a diff.
1070 # we only need to see a diff.
1071 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1071 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1072 # For each line in the delta
1072 # For each line in the delta
1073 for dline in delta.splitlines():
1073 for dline in delta.splitlines():
1074 # get the filename and filenode for that line
1074 # get the filename and filenode for that line
1075 f, fnode = dline.split('\0')
1075 f, fnode = dline.split('\0')
1076 fnode = bin(fnode[:40])
1076 fnode = bin(fnode[:40])
1077 f = changedfiles.get(f, None)
1077 f = changedfiles.get(f, None)
1078 # And if the file is in the list of files we care
1078 # And if the file is in the list of files we care
1079 # about.
1079 # about.
1080 if f is not None:
1080 if f is not None:
1081 # Get the changenode this manifest belongs to
1081 # Get the changenode this manifest belongs to
1082 clnode = msng_mnfst_set[mnfstnode]
1082 clnode = msng_mnfst_set[mnfstnode]
1083 # Create the set of filenodes for the file if
1083 # Create the set of filenodes for the file if
1084 # there isn't one already.
1084 # there isn't one already.
1085 ndset = msng_filenode_set.setdefault(f, {})
1085 ndset = msng_filenode_set.setdefault(f, {})
1086 # And set the filenode's changelog node to the
1086 # And set the filenode's changelog node to the
1087 # manifest's if it hasn't been set already.
1087 # manifest's if it hasn't been set already.
1088 ndset.setdefault(fnode, clnode)
1088 ndset.setdefault(fnode, clnode)
1089 else:
1089 else:
1090 # Otherwise we need a full manifest.
1090 # Otherwise we need a full manifest.
1091 m = mnfst.read(mnfstnode)
1091 m = mnfst.read(mnfstnode)
1092 # For every file in we care about.
1092 # For every file in we care about.
1093 for f in changedfiles:
1093 for f in changedfiles:
1094 fnode = m.get(f, None)
1094 fnode = m.get(f, None)
1095 # If it's in the manifest
1095 # If it's in the manifest
1096 if fnode is not None:
1096 if fnode is not None:
1097 # See comments above.
1097 # See comments above.
1098 clnode = msng_mnfst_set[mnfstnode]
1098 clnode = msng_mnfst_set[mnfstnode]
1099 ndset = msng_filenode_set.setdefault(f, {})
1099 ndset = msng_filenode_set.setdefault(f, {})
1100 ndset.setdefault(fnode, clnode)
1100 ndset.setdefault(fnode, clnode)
1101 # Remember the revision we hope to see next.
1101 # Remember the revision we hope to see next.
1102 next_rev[0] = r + 1
1102 next_rev[0] = r + 1
1103 return collect_msng_filenodes
1103 return collect_msng_filenodes
1104
1104
1105 # We have a list of filenodes we think we need for a file, lets remove
1105 # We have a list of filenodes we think we need for a file, lets remove
1106 # all those we now the recipient must have.
1106 # all those we now the recipient must have.
1107 def prune_filenodes(f, filerevlog):
1107 def prune_filenodes(f, filerevlog):
1108 msngset = msng_filenode_set[f]
1108 msngset = msng_filenode_set[f]
1109 hasset = {}
1109 hasset = {}
1110 # If a 'missing' filenode thinks it belongs to a changenode we
1110 # If a 'missing' filenode thinks it belongs to a changenode we
1111 # assume the recipient must have, then the recipient must have
1111 # assume the recipient must have, then the recipient must have
1112 # that filenode.
1112 # that filenode.
1113 for n in msngset:
1113 for n in msngset:
1114 clnode = cl.node(filerevlog.linkrev(n))
1114 clnode = cl.node(filerevlog.linkrev(n))
1115 if clnode in has_cl_set:
1115 if clnode in has_cl_set:
1116 hasset[n] = 1
1116 hasset[n] = 1
1117 prune_parents(filerevlog, hasset, msngset)
1117 prune_parents(filerevlog, hasset, msngset)
1118
1118
1119 # A function generator function that sets up the a context for the
1119 # A function generator function that sets up the a context for the
1120 # inner function.
1120 # inner function.
1121 def lookup_filenode_link_func(fname):
1121 def lookup_filenode_link_func(fname):
1122 msngset = msng_filenode_set[fname]
1122 msngset = msng_filenode_set[fname]
1123 # Lookup the changenode the filenode belongs to.
1123 # Lookup the changenode the filenode belongs to.
1124 def lookup_filenode_link(fnode):
1124 def lookup_filenode_link(fnode):
1125 return msngset[fnode]
1125 return msngset[fnode]
1126 return lookup_filenode_link
1126 return lookup_filenode_link
1127
1127
1128 # Now that we have all theses utility functions to help out and
1128 # Now that we have all theses utility functions to help out and
1129 # logically divide up the task, generate the group.
1129 # logically divide up the task, generate the group.
1130 def gengroup():
1130 def gengroup():
1131 # The set of changed files starts empty.
1131 # The set of changed files starts empty.
1132 changedfiles = {}
1132 changedfiles = {}
1133 # Create a changenode group generator that will call our functions
1133 # Create a changenode group generator that will call our functions
1134 # back to lookup the owning changenode and collect information.
1134 # back to lookup the owning changenode and collect information.
1135 group = cl.group(msng_cl_lst, identity,
1135 group = cl.group(msng_cl_lst, identity,
1136 manifest_and_file_collector(changedfiles))
1136 manifest_and_file_collector(changedfiles))
1137 for chnk in group:
1137 for chnk in group:
1138 yield chnk
1138 yield chnk
1139
1139
1140 # The list of manifests has been collected by the generator
1140 # The list of manifests has been collected by the generator
1141 # calling our functions back.
1141 # calling our functions back.
1142 prune_manifests()
1142 prune_manifests()
1143 msng_mnfst_lst = msng_mnfst_set.keys()
1143 msng_mnfst_lst = msng_mnfst_set.keys()
1144 # Sort the manifestnodes by revision number.
1144 # Sort the manifestnodes by revision number.
1145 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1145 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1146 # Create a generator for the manifestnodes that calls our lookup
1146 # Create a generator for the manifestnodes that calls our lookup
1147 # and data collection functions back.
1147 # and data collection functions back.
1148 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1148 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1149 filenode_collector(changedfiles))
1149 filenode_collector(changedfiles))
1150 for chnk in group:
1150 for chnk in group:
1151 yield chnk
1151 yield chnk
1152
1152
1153 # These are no longer needed, dereference and toss the memory for
1153 # These are no longer needed, dereference and toss the memory for
1154 # them.
1154 # them.
1155 msng_mnfst_lst = None
1155 msng_mnfst_lst = None
1156 msng_mnfst_set.clear()
1156 msng_mnfst_set.clear()
1157
1157
1158 changedfiles = changedfiles.keys()
1158 changedfiles = changedfiles.keys()
1159 changedfiles.sort()
1159 changedfiles.sort()
1160 # Go through all our files in order sorted by name.
1160 # Go through all our files in order sorted by name.
1161 for fname in changedfiles:
1161 for fname in changedfiles:
1162 filerevlog = self.file(fname)
1162 filerevlog = self.file(fname)
1163 # Toss out the filenodes that the recipient isn't really
1163 # Toss out the filenodes that the recipient isn't really
1164 # missing.
1164 # missing.
1165 prune_filenodes(fname, filerevlog)
1165 prune_filenodes(fname, filerevlog)
1166 msng_filenode_lst = msng_filenode_set[fname].keys()
1166 msng_filenode_lst = msng_filenode_set[fname].keys()
1167 # If any filenodes are left, generate the group for them,
1167 # If any filenodes are left, generate the group for them,
1168 # otherwise don't bother.
1168 # otherwise don't bother.
1169 if len(msng_filenode_lst) > 0:
1169 if len(msng_filenode_lst) > 0:
1170 yield struct.pack(">l", len(fname) + 4) + fname
1170 yield struct.pack(">l", len(fname) + 4) + fname
1171 # Sort the filenodes by their revision #
1171 # Sort the filenodes by their revision #
1172 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1172 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1173 # Create a group generator and only pass in a changenode
1173 # Create a group generator and only pass in a changenode
1174 # lookup function as we need to collect no information
1174 # lookup function as we need to collect no information
1175 # from filenodes.
1175 # from filenodes.
1176 group = filerevlog.group(msng_filenode_lst,
1176 group = filerevlog.group(msng_filenode_lst,
1177 lookup_filenode_link_func(fname))
1177 lookup_filenode_link_func(fname))
1178 for chnk in group:
1178 for chnk in group:
1179 yield chnk
1179 yield chnk
1180 # Don't need this anymore, toss it to free memory.
1180 # Don't need this anymore, toss it to free memory.
1181 del msng_filenode_set[fname]
1181 del msng_filenode_set[fname]
1182 # Signal that no more groups are left.
1182 # Signal that no more groups are left.
1183 yield struct.pack(">l", 0)
1183 yield struct.pack(">l", 0)
1184
1184
1185 return util.chunkbuffer(gengroup())
1185 return util.chunkbuffer(gengroup())
1186
1186
1187 def changegroup(self, basenodes):
1187 def changegroup(self, basenodes):
1188 """Generate a changegroup of all nodes that we have that a recipient
1188 """Generate a changegroup of all nodes that we have that a recipient
1189 doesn't.
1189 doesn't.
1190
1190
1191 This is much easier than the previous function as we can assume that
1191 This is much easier than the previous function as we can assume that
1192 the recipient has any changenode we aren't sending them."""
1192 the recipient has any changenode we aren't sending them."""
1193 cl = self.changelog
1193 cl = self.changelog
1194 nodes = cl.nodesbetween(basenodes, None)[0]
1194 nodes = cl.nodesbetween(basenodes, None)[0]
1195 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1195 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1196
1196
1197 def identity(x):
1197 def identity(x):
1198 return x
1198 return x
1199
1199
1200 def gennodelst(revlog):
1200 def gennodelst(revlog):
1201 for r in xrange(0, revlog.count()):
1201 for r in xrange(0, revlog.count()):
1202 n = revlog.node(r)
1202 n = revlog.node(r)
1203 if revlog.linkrev(n) in revset:
1203 if revlog.linkrev(n) in revset:
1204 yield n
1204 yield n
1205
1205
1206 def changed_file_collector(changedfileset):
1206 def changed_file_collector(changedfileset):
1207 def collect_changed_files(clnode):
1207 def collect_changed_files(clnode):
1208 c = cl.read(clnode)
1208 c = cl.read(clnode)
1209 for fname in c[3]:
1209 for fname in c[3]:
1210 changedfileset[fname] = 1
1210 changedfileset[fname] = 1
1211 return collect_changed_files
1211 return collect_changed_files
1212
1212
1213 def lookuprevlink_func(revlog):
1213 def lookuprevlink_func(revlog):
1214 def lookuprevlink(n):
1214 def lookuprevlink(n):
1215 return cl.node(revlog.linkrev(n))
1215 return cl.node(revlog.linkrev(n))
1216 return lookuprevlink
1216 return lookuprevlink
1217
1217
1218 def gengroup():
1218 def gengroup():
1219 # construct a list of all changed files
1219 # construct a list of all changed files
1220 changedfiles = {}
1220 changedfiles = {}
1221
1221
1222 for chnk in cl.group(nodes, identity,
1222 for chnk in cl.group(nodes, identity,
1223 changed_file_collector(changedfiles)):
1223 changed_file_collector(changedfiles)):
1224 yield chnk
1224 yield chnk
1225 changedfiles = changedfiles.keys()
1225 changedfiles = changedfiles.keys()
1226 changedfiles.sort()
1226 changedfiles.sort()
1227
1227
1228 mnfst = self.manifest
1228 mnfst = self.manifest
1229 nodeiter = gennodelst(mnfst)
1229 nodeiter = gennodelst(mnfst)
1230 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1230 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1231 yield chnk
1231 yield chnk
1232
1232
1233 for fname in changedfiles:
1233 for fname in changedfiles:
1234 filerevlog = self.file(fname)
1234 filerevlog = self.file(fname)
1235 nodeiter = gennodelst(filerevlog)
1235 nodeiter = gennodelst(filerevlog)
1236 nodeiter = list(nodeiter)
1236 nodeiter = list(nodeiter)
1237 if nodeiter:
1237 if nodeiter:
1238 yield struct.pack(">l", len(fname) + 4) + fname
1238 yield struct.pack(">l", len(fname) + 4) + fname
1239 lookup = lookuprevlink_func(filerevlog)
1239 lookup = lookuprevlink_func(filerevlog)
1240 for chnk in filerevlog.group(nodeiter, lookup):
1240 for chnk in filerevlog.group(nodeiter, lookup):
1241 yield chnk
1241 yield chnk
1242
1242
1243 yield struct.pack(">l", 0)
1243 yield struct.pack(">l", 0)
1244
1244
1245 return util.chunkbuffer(gengroup())
1245 return util.chunkbuffer(gengroup())
1246
1246
1247 def addchangegroup(self, source):
1247 def addchangegroup(self, source):
1248
1248
1249 def getchunk():
1249 def getchunk():
1250 d = source.read(4)
1250 d = source.read(4)
1251 if not d: return ""
1251 if not d: return ""
1252 l = struct.unpack(">l", d)[0]
1252 l = struct.unpack(">l", d)[0]
1253 if l <= 4: return ""
1253 if l <= 4: return ""
1254 d = source.read(l - 4)
1254 d = source.read(l - 4)
1255 if len(d) < l - 4:
1255 if len(d) < l - 4:
1256 raise repo.RepoError(_("premature EOF reading chunk"
1256 raise repo.RepoError(_("premature EOF reading chunk"
1257 " (got %d bytes, expected %d)")
1257 " (got %d bytes, expected %d)")
1258 % (len(d), l - 4))
1258 % (len(d), l - 4))
1259 return d
1259 return d
1260
1260
1261 def getgroup():
1261 def getgroup():
1262 while 1:
1262 while 1:
1263 c = getchunk()
1263 c = getchunk()
1264 if not c: break
1264 if not c: break
1265 yield c
1265 yield c
1266
1266
1267 def csmap(x):
1267 def csmap(x):
1268 self.ui.debug(_("add changeset %s\n") % short(x))
1268 self.ui.debug(_("add changeset %s\n") % short(x))
1269 return self.changelog.count()
1269 return self.changelog.count()
1270
1270
1271 def revmap(x):
1271 def revmap(x):
1272 return self.changelog.rev(x)
1272 return self.changelog.rev(x)
1273
1273
1274 if not source: return
1274 if not source: return
1275 changesets = files = revisions = 0
1275 changesets = files = revisions = 0
1276
1276
1277 tr = self.transaction()
1277 tr = self.transaction()
1278
1278
1279 oldheads = len(self.changelog.heads())
1279 oldheads = len(self.changelog.heads())
1280
1280
1281 # pull off the changeset group
1281 # pull off the changeset group
1282 self.ui.status(_("adding changesets\n"))
1282 self.ui.status(_("adding changesets\n"))
1283 co = self.changelog.tip()
1283 co = self.changelog.tip()
1284 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1284 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1285 cnr, cor = map(self.changelog.rev, (cn, co))
1285 cnr, cor = map(self.changelog.rev, (cn, co))
1286 if cn == nullid:
1286 if cn == nullid:
1287 cnr = cor
1287 cnr = cor
1288 changesets = cnr - cor
1288 changesets = cnr - cor
1289
1289
1290 # pull off the manifest group
1290 # pull off the manifest group
1291 self.ui.status(_("adding manifests\n"))
1291 self.ui.status(_("adding manifests\n"))
1292 mm = self.manifest.tip()
1292 mm = self.manifest.tip()
1293 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1293 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1294
1294
1295 # process the files
1295 # process the files
1296 self.ui.status(_("adding file changes\n"))
1296 self.ui.status(_("adding file changes\n"))
1297 while 1:
1297 while 1:
1298 f = getchunk()
1298 f = getchunk()
1299 if not f: break
1299 if not f: break
1300 self.ui.debug(_("adding %s revisions\n") % f)
1300 self.ui.debug(_("adding %s revisions\n") % f)
1301 fl = self.file(f)
1301 fl = self.file(f)
1302 o = fl.count()
1302 o = fl.count()
1303 n = fl.addgroup(getgroup(), revmap, tr)
1303 n = fl.addgroup(getgroup(), revmap, tr)
1304 revisions += fl.count() - o
1304 revisions += fl.count() - o
1305 files += 1
1305 files += 1
1306
1306
1307 newheads = len(self.changelog.heads())
1307 newheads = len(self.changelog.heads())
1308 heads = ""
1308 heads = ""
1309 if oldheads and newheads > oldheads:
1309 if oldheads and newheads > oldheads:
1310 heads = _(" (+%d heads)") % (newheads - oldheads)
1310 heads = _(" (+%d heads)") % (newheads - oldheads)
1311
1311
1312 self.ui.status(_("added %d changesets"
1312 self.ui.status(_("added %d changesets"
1313 " with %d changes to %d files%s\n")
1313 " with %d changes to %d files%s\n")
1314 % (changesets, revisions, files, heads))
1314 % (changesets, revisions, files, heads))
1315
1315
1316 tr.close()
1316 tr.close()
1317
1317
1318 if changesets > 0:
1318 if changesets > 0:
1319 if not self.hook("changegroup",
1319 if not self.hook("changegroup",
1320 node=hex(self.changelog.node(cor+1))):
1320 node=hex(self.changelog.node(cor+1))):
1321 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1321 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1322 return 1
1322 return 1
1323
1323
1324 for i in range(cor + 1, cnr + 1):
1324 for i in range(cor + 1, cnr + 1):
1325 self.hook("commit", node=hex(self.changelog.node(i)))
1325 self.hook("commit", node=hex(self.changelog.node(i)))
1326
1326
1327 return
1327 return
1328
1328
1329 def update(self, node, allow=False, force=False, choose=None,
1329 def update(self, node, allow=False, force=False, choose=None,
1330 moddirstate=True):
1330 moddirstate=True):
1331 pl = self.dirstate.parents()
1331 pl = self.dirstate.parents()
1332 if not force and pl[1] != nullid:
1332 if not force and pl[1] != nullid:
1333 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1333 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1334 return 1
1334 return 1
1335
1335
1336 p1, p2 = pl[0], node
1336 p1, p2 = pl[0], node
1337 pa = self.changelog.ancestor(p1, p2)
1337 pa = self.changelog.ancestor(p1, p2)
1338 m1n = self.changelog.read(p1)[0]
1338 m1n = self.changelog.read(p1)[0]
1339 m2n = self.changelog.read(p2)[0]
1339 m2n = self.changelog.read(p2)[0]
1340 man = self.manifest.ancestor(m1n, m2n)
1340 man = self.manifest.ancestor(m1n, m2n)
1341 m1 = self.manifest.read(m1n)
1341 m1 = self.manifest.read(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1343 m2 = self.manifest.read(m2n)
1343 m2 = self.manifest.read(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1345 ma = self.manifest.read(man)
1345 ma = self.manifest.read(man)
1346 mfa = self.manifest.readflags(man)
1346 mfa = self.manifest.readflags(man)
1347
1347
1348 (c, a, d, u) = self.changes()
1348 (c, a, d, u) = self.changes()
1349
1349
1350 # is this a jump, or a merge? i.e. is there a linear path
1350 # is this a jump, or a merge? i.e. is there a linear path
1351 # from p1 to p2?
1351 # from p1 to p2?
1352 linear_path = (pa == p1 or pa == p2)
1352 linear_path = (pa == p1 or pa == p2)
1353
1353
1354 # resolve the manifest to determine which files
1354 # resolve the manifest to determine which files
1355 # we care about merging
1355 # we care about merging
1356 self.ui.note(_("resolving manifests\n"))
1356 self.ui.note(_("resolving manifests\n"))
1357 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1357 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1358 (force, allow, moddirstate, linear_path))
1358 (force, allow, moddirstate, linear_path))
1359 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1359 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1360 (short(man), short(m1n), short(m2n)))
1360 (short(man), short(m1n), short(m2n)))
1361
1361
1362 merge = {}
1362 merge = {}
1363 get = {}
1363 get = {}
1364 remove = []
1364 remove = []
1365
1365
1366 # construct a working dir manifest
1366 # construct a working dir manifest
1367 mw = m1.copy()
1367 mw = m1.copy()
1368 mfw = mf1.copy()
1368 mfw = mf1.copy()
1369 umap = dict.fromkeys(u)
1369 umap = dict.fromkeys(u)
1370
1370
1371 for f in a + c + u:
1371 for f in a + c + u:
1372 mw[f] = ""
1372 mw[f] = ""
1373 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1373 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1374
1374
1375 for f in d:
1375 for f in d:
1376 if f in mw: del mw[f]
1376 if f in mw: del mw[f]
1377
1377
1378 # If we're jumping between revisions (as opposed to merging),
1378 # If we're jumping between revisions (as opposed to merging),
1379 # and if neither the working directory nor the target rev has
1379 # and if neither the working directory nor the target rev has
1380 # the file, then we need to remove it from the dirstate, to
1380 # the file, then we need to remove it from the dirstate, to
1381 # prevent the dirstate from listing the file when it is no
1381 # prevent the dirstate from listing the file when it is no
1382 # longer in the manifest.
1382 # longer in the manifest.
1383 if moddirstate and linear_path and f not in m2:
1383 if moddirstate and linear_path and f not in m2:
1384 self.dirstate.forget((f,))
1384 self.dirstate.forget((f,))
1385
1385
1386 # Compare manifests
1386 # Compare manifests
1387 for f, n in mw.iteritems():
1387 for f, n in mw.iteritems():
1388 if choose and not choose(f): continue
1388 if choose and not choose(f): continue
1389 if f in m2:
1389 if f in m2:
1390 s = 0
1390 s = 0
1391
1391
1392 # is the wfile new since m1, and match m2?
1392 # is the wfile new since m1, and match m2?
1393 if f not in m1:
1393 if f not in m1:
1394 t1 = self.wread(f)
1394 t1 = self.wread(f)
1395 t2 = self.file(f).read(m2[f])
1395 t2 = self.file(f).read(m2[f])
1396 if cmp(t1, t2) == 0:
1396 if cmp(t1, t2) == 0:
1397 n = m2[f]
1397 n = m2[f]
1398 del t1, t2
1398 del t1, t2
1399
1399
1400 # are files different?
1400 # are files different?
1401 if n != m2[f]:
1401 if n != m2[f]:
1402 a = ma.get(f, nullid)
1402 a = ma.get(f, nullid)
1403 # are both different from the ancestor?
1403 # are both different from the ancestor?
1404 if n != a and m2[f] != a:
1404 if n != a and m2[f] != a:
1405 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1405 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1406 # merge executable bits
1406 # merge executable bits
1407 # "if we changed or they changed, change in merge"
1407 # "if we changed or they changed, change in merge"
1408 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1408 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1409 mode = ((a^b) | (a^c)) ^ a
1409 mode = ((a^b) | (a^c)) ^ a
1410 merge[f] = (m1.get(f, nullid), m2[f], mode)
1410 merge[f] = (m1.get(f, nullid), m2[f], mode)
1411 s = 1
1411 s = 1
1412 # are we clobbering?
1412 # are we clobbering?
1413 # is remote's version newer?
1413 # is remote's version newer?
1414 # or are we going back in time?
1414 # or are we going back in time?
1415 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1415 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1416 self.ui.debug(_(" remote %s is newer, get\n") % f)
1416 self.ui.debug(_(" remote %s is newer, get\n") % f)
1417 get[f] = m2[f]
1417 get[f] = m2[f]
1418 s = 1
1418 s = 1
1419 elif f in umap:
1419 elif f in umap:
1420 # this unknown file is the same as the checkout
1420 # this unknown file is the same as the checkout
1421 get[f] = m2[f]
1421 get[f] = m2[f]
1422
1422
1423 if not s and mfw[f] != mf2[f]:
1423 if not s and mfw[f] != mf2[f]:
1424 if force:
1424 if force:
1425 self.ui.debug(_(" updating permissions for %s\n") % f)
1425 self.ui.debug(_(" updating permissions for %s\n") % f)
1426 util.set_exec(self.wjoin(f), mf2[f])
1426 util.set_exec(self.wjoin(f), mf2[f])
1427 else:
1427 else:
1428 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1428 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1429 mode = ((a^b) | (a^c)) ^ a
1429 mode = ((a^b) | (a^c)) ^ a
1430 if mode != b:
1430 if mode != b:
1431 self.ui.debug(_(" updating permissions for %s\n") % f)
1431 self.ui.debug(_(" updating permissions for %s\n") % f)
1432 util.set_exec(self.wjoin(f), mode)
1432 util.set_exec(self.wjoin(f), mode)
1433 del m2[f]
1433 del m2[f]
1434 elif f in ma:
1434 elif f in ma:
1435 if n != ma[f]:
1435 if n != ma[f]:
1436 r = _("d")
1436 r = _("d")
1437 if not force and (linear_path or allow):
1437 if not force and (linear_path or allow):
1438 r = self.ui.prompt(
1438 r = self.ui.prompt(
1439 (_(" local changed %s which remote deleted\n") % f) +
1439 (_(" local changed %s which remote deleted\n") % f) +
1440 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1440 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1441 if r == _("d"):
1441 if r == _("d"):
1442 remove.append(f)
1442 remove.append(f)
1443 else:
1443 else:
1444 self.ui.debug(_("other deleted %s\n") % f)
1444 self.ui.debug(_("other deleted %s\n") % f)
1445 remove.append(f) # other deleted it
1445 remove.append(f) # other deleted it
1446 else:
1446 else:
1447 # file is created on branch or in working directory
1447 # file is created on branch or in working directory
1448 if force and f not in umap:
1448 if force and f not in umap:
1449 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1449 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1450 remove.append(f)
1450 remove.append(f)
1451 elif n == m1.get(f, nullid): # same as parent
1451 elif n == m1.get(f, nullid): # same as parent
1452 if p2 == pa: # going backwards?
1452 if p2 == pa: # going backwards?
1453 self.ui.debug(_("remote deleted %s\n") % f)
1453 self.ui.debug(_("remote deleted %s\n") % f)
1454 remove.append(f)
1454 remove.append(f)
1455 else:
1455 else:
1456 self.ui.debug(_("local modified %s, keeping\n") % f)
1456 self.ui.debug(_("local modified %s, keeping\n") % f)
1457 else:
1457 else:
1458 self.ui.debug(_("working dir created %s, keeping\n") % f)
1458 self.ui.debug(_("working dir created %s, keeping\n") % f)
1459
1459
1460 for f, n in m2.iteritems():
1460 for f, n in m2.iteritems():
1461 if choose and not choose(f): continue
1461 if choose and not choose(f): continue
1462 if f[0] == "/": continue
1462 if f[0] == "/": continue
1463 if f in ma and n != ma[f]:
1463 if f in ma and n != ma[f]:
1464 r = _("k")
1464 r = _("k")
1465 if not force and (linear_path or allow):
1465 if not force and (linear_path or allow):
1466 r = self.ui.prompt(
1466 r = self.ui.prompt(
1467 (_("remote changed %s which local deleted\n") % f) +
1467 (_("remote changed %s which local deleted\n") % f) +
1468 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1468 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1469 if r == _("k"): get[f] = n
1469 if r == _("k"): get[f] = n
1470 elif f not in ma:
1470 elif f not in ma:
1471 self.ui.debug(_("remote created %s\n") % f)
1471 self.ui.debug(_("remote created %s\n") % f)
1472 get[f] = n
1472 get[f] = n
1473 else:
1473 else:
1474 if force or p2 == pa: # going backwards?
1474 if force or p2 == pa: # going backwards?
1475 self.ui.debug(_("local deleted %s, recreating\n") % f)
1475 self.ui.debug(_("local deleted %s, recreating\n") % f)
1476 get[f] = n
1476 get[f] = n
1477 else:
1477 else:
1478 self.ui.debug(_("local deleted %s\n") % f)
1478 self.ui.debug(_("local deleted %s\n") % f)
1479
1479
1480 del mw, m1, m2, ma
1480 del mw, m1, m2, ma
1481
1481
1482 if force:
1482 if force:
1483 for f in merge:
1483 for f in merge:
1484 get[f] = merge[f][1]
1484 get[f] = merge[f][1]
1485 merge = {}
1485 merge = {}
1486
1486
1487 if linear_path or force:
1487 if linear_path or force:
1488 # we don't need to do any magic, just jump to the new rev
1488 # we don't need to do any magic, just jump to the new rev
1489 branch_merge = False
1489 branch_merge = False
1490 p1, p2 = p2, nullid
1490 p1, p2 = p2, nullid
1491 else:
1491 else:
1492 if not allow:
1492 if not allow:
1493 self.ui.status(_("this update spans a branch"
1493 self.ui.status(_("this update spans a branch"
1494 " affecting the following files:\n"))
1494 " affecting the following files:\n"))
1495 fl = merge.keys() + get.keys()
1495 fl = merge.keys() + get.keys()
1496 fl.sort()
1496 fl.sort()
1497 for f in fl:
1497 for f in fl:
1498 cf = ""
1498 cf = ""
1499 if f in merge: cf = _(" (resolve)")
1499 if f in merge: cf = _(" (resolve)")
1500 self.ui.status(" %s%s\n" % (f, cf))
1500 self.ui.status(" %s%s\n" % (f, cf))
1501 self.ui.warn(_("aborting update spanning branches!\n"))
1501 self.ui.warn(_("aborting update spanning branches!\n"))
1502 self.ui.status(_("(use update -m to merge across branches"
1502 self.ui.status(_("(use update -m to merge across branches"
1503 " or -C to lose changes)\n"))
1503 " or -C to lose changes)\n"))
1504 return 1
1504 return 1
1505 branch_merge = True
1505 branch_merge = True
1506
1506
1507 if moddirstate:
1507 if moddirstate:
1508 self.dirstate.setparents(p1, p2)
1508 self.dirstate.setparents(p1, p2)
1509
1509
1510 # get the files we don't need to change
1510 # get the files we don't need to change
1511 files = get.keys()
1511 files = get.keys()
1512 files.sort()
1512 files.sort()
1513 for f in files:
1513 for f in files:
1514 if f[0] == "/": continue
1514 if f[0] == "/": continue
1515 self.ui.note(_("getting %s\n") % f)
1515 self.ui.note(_("getting %s\n") % f)
1516 t = self.file(f).read(get[f])
1516 t = self.file(f).read(get[f])
1517 try:
1517 try:
1518 self.wwrite(f, t)
1518 self.wwrite(f, t)
1519 except IOError, e:
1519 except IOError, e:
1520 if e.errno != errno.ENOENT:
1520 if e.errno != errno.ENOENT:
1521 raise
1521 raise
1522 os.makedirs(os.path.dirname(self.wjoin(f)))
1522 os.makedirs(os.path.dirname(self.wjoin(f)))
1523 self.wwrite(f, t)
1523 self.wwrite(f, t)
1524 util.set_exec(self.wjoin(f), mf2[f])
1524 util.set_exec(self.wjoin(f), mf2[f])
1525 if moddirstate:
1525 if moddirstate:
1526 if branch_merge:
1526 if branch_merge:
1527 self.dirstate.update([f], 'n', st_mtime=-1)
1527 self.dirstate.update([f], 'n', st_mtime=-1)
1528 else:
1528 else:
1529 self.dirstate.update([f], 'n')
1529 self.dirstate.update([f], 'n')
1530
1530
1531 # merge the tricky bits
1531 # merge the tricky bits
1532 files = merge.keys()
1532 files = merge.keys()
1533 files.sort()
1533 files.sort()
1534 for f in files:
1534 for f in files:
1535 self.ui.status(_("merging %s\n") % f)
1535 self.ui.status(_("merging %s\n") % f)
1536 my, other, flag = merge[f]
1536 my, other, flag = merge[f]
1537 self.merge3(f, my, other)
1537 self.merge3(f, my, other)
1538 util.set_exec(self.wjoin(f), flag)
1538 util.set_exec(self.wjoin(f), flag)
1539 if moddirstate:
1539 if moddirstate:
1540 if branch_merge:
1540 if branch_merge:
1541 # We've done a branch merge, mark this file as merged
1541 # We've done a branch merge, mark this file as merged
1542 # so that we properly record the merger later
1542 # so that we properly record the merger later
1543 self.dirstate.update([f], 'm')
1543 self.dirstate.update([f], 'm')
1544 else:
1544 else:
1545 # We've update-merged a locally modified file, so
1545 # We've update-merged a locally modified file, so
1546 # we set the dirstate to emulate a normal checkout
1546 # we set the dirstate to emulate a normal checkout
1547 # of that file some time in the past. Thus our
1547 # of that file some time in the past. Thus our
1548 # merge will appear as a normal local file
1548 # merge will appear as a normal local file
1549 # modification.
1549 # modification.
1550 f_len = len(self.file(f).read(other))
1550 f_len = len(self.file(f).read(other))
1551 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1551 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1552
1552
1553 remove.sort()
1553 remove.sort()
1554 for f in remove:
1554 for f in remove:
1555 self.ui.note(_("removing %s\n") % f)
1555 self.ui.note(_("removing %s\n") % f)
1556 try:
1556 try:
1557 util.unlink(self.wjoin(f))
1557 util.unlink(self.wjoin(f))
1558 except OSError, inst:
1558 except OSError, inst:
1559 if inst.errno != errno.ENOENT:
1559 if inst.errno != errno.ENOENT:
1560 self.ui.warn(_("update failed to remove %s: %s!\n") %
1560 self.ui.warn(_("update failed to remove %s: %s!\n") %
1561 (f, inst.strerror))
1561 (f, inst.strerror))
1562 if moddirstate:
1562 if moddirstate:
1563 if branch_merge:
1563 if branch_merge:
1564 self.dirstate.update(remove, 'r')
1564 self.dirstate.update(remove, 'r')
1565 else:
1565 else:
1566 self.dirstate.forget(remove)
1566 self.dirstate.forget(remove)
1567
1567
1568 def merge3(self, fn, my, other):
1568 def merge3(self, fn, my, other):
1569 """perform a 3-way merge in the working directory"""
1569 """perform a 3-way merge in the working directory"""
1570
1570
1571 def temp(prefix, node):
1571 def temp(prefix, node):
1572 pre = "%s~%s." % (os.path.basename(fn), prefix)
1572 pre = "%s~%s." % (os.path.basename(fn), prefix)
1573 (fd, name) = tempfile.mkstemp("", pre)
1573 (fd, name) = tempfile.mkstemp("", pre)
1574 f = os.fdopen(fd, "wb")
1574 f = os.fdopen(fd, "wb")
1575 self.wwrite(fn, fl.read(node), f)
1575 self.wwrite(fn, fl.read(node), f)
1576 f.close()
1576 f.close()
1577 return name
1577 return name
1578
1578
1579 fl = self.file(fn)
1579 fl = self.file(fn)
1580 base = fl.ancestor(my, other)
1580 base = fl.ancestor(my, other)
1581 a = self.wjoin(fn)
1581 a = self.wjoin(fn)
1582 b = temp("base", base)
1582 b = temp("base", base)
1583 c = temp("other", other)
1583 c = temp("other", other)
1584
1584
1585 self.ui.note(_("resolving %s\n") % fn)
1585 self.ui.note(_("resolving %s\n") % fn)
1586 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1586 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1587 (fn, short(my), short(other), short(base)))
1587 (fn, short(my), short(other), short(base)))
1588
1588
1589 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1589 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1590 or "hgmerge")
1590 or "hgmerge")
1591 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1591 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1592 if r:
1592 if r:
1593 self.ui.warn(_("merging %s failed!\n") % fn)
1593 self.ui.warn(_("merging %s failed!\n") % fn)
1594
1594
1595 os.unlink(b)
1595 os.unlink(b)
1596 os.unlink(c)
1596 os.unlink(c)
1597
1597
1598 def verify(self):
1598 def verify(self):
1599 filelinkrevs = {}
1599 filelinkrevs = {}
1600 filenodes = {}
1600 filenodes = {}
1601 changesets = revisions = files = 0
1601 changesets = revisions = files = 0
1602 errors = [0]
1602 errors = [0]
1603 neededmanifests = {}
1603 neededmanifests = {}
1604
1604
1605 def err(msg):
1605 def err(msg):
1606 self.ui.warn(msg + "\n")
1606 self.ui.warn(msg + "\n")
1607 errors[0] += 1
1607 errors[0] += 1
1608
1608
1609 seen = {}
1609 seen = {}
1610 self.ui.status(_("checking changesets\n"))
1610 self.ui.status(_("checking changesets\n"))
1611 for i in range(self.changelog.count()):
1611 for i in range(self.changelog.count()):
1612 changesets += 1
1612 changesets += 1
1613 n = self.changelog.node(i)
1613 n = self.changelog.node(i)
1614 l = self.changelog.linkrev(n)
1614 l = self.changelog.linkrev(n)
1615 if l != i:
1615 if l != i:
1616 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1616 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1617 if n in seen:
1617 if n in seen:
1618 err(_("duplicate changeset at revision %d") % i)
1618 err(_("duplicate changeset at revision %d") % i)
1619 seen[n] = 1
1619 seen[n] = 1
1620
1620
1621 for p in self.changelog.parents(n):
1621 for p in self.changelog.parents(n):
1622 if p not in self.changelog.nodemap:
1622 if p not in self.changelog.nodemap:
1623 err(_("changeset %s has unknown parent %s") %
1623 err(_("changeset %s has unknown parent %s") %
1624 (short(n), short(p)))
1624 (short(n), short(p)))
1625 try:
1625 try:
1626 changes = self.changelog.read(n)
1626 changes = self.changelog.read(n)
1627 except Exception, inst:
1627 except Exception, inst:
1628 err(_("unpacking changeset %s: %s") % (short(n), inst))
1628 err(_("unpacking changeset %s: %s") % (short(n), inst))
1629
1629
1630 neededmanifests[changes[0]] = n
1630 neededmanifests[changes[0]] = n
1631
1631
1632 for f in changes[3]:
1632 for f in changes[3]:
1633 filelinkrevs.setdefault(f, []).append(i)
1633 filelinkrevs.setdefault(f, []).append(i)
1634
1634
1635 seen = {}
1635 seen = {}
1636 self.ui.status(_("checking manifests\n"))
1636 self.ui.status(_("checking manifests\n"))
1637 for i in range(self.manifest.count()):
1637 for i in range(self.manifest.count()):
1638 n = self.manifest.node(i)
1638 n = self.manifest.node(i)
1639 l = self.manifest.linkrev(n)
1639 l = self.manifest.linkrev(n)
1640
1640
1641 if l < 0 or l >= self.changelog.count():
1641 if l < 0 or l >= self.changelog.count():
1642 err(_("bad manifest link (%d) at revision %d") % (l, i))
1642 err(_("bad manifest link (%d) at revision %d") % (l, i))
1643
1643
1644 if n in neededmanifests:
1644 if n in neededmanifests:
1645 del neededmanifests[n]
1645 del neededmanifests[n]
1646
1646
1647 if n in seen:
1647 if n in seen:
1648 err(_("duplicate manifest at revision %d") % i)
1648 err(_("duplicate manifest at revision %d") % i)
1649
1649
1650 seen[n] = 1
1650 seen[n] = 1
1651
1651
1652 for p in self.manifest.parents(n):
1652 for p in self.manifest.parents(n):
1653 if p not in self.manifest.nodemap:
1653 if p not in self.manifest.nodemap:
1654 err(_("manifest %s has unknown parent %s") %
1654 err(_("manifest %s has unknown parent %s") %
1655 (short(n), short(p)))
1655 (short(n), short(p)))
1656
1656
1657 try:
1657 try:
1658 delta = mdiff.patchtext(self.manifest.delta(n))
1658 delta = mdiff.patchtext(self.manifest.delta(n))
1659 except KeyboardInterrupt:
1659 except KeyboardInterrupt:
1660 self.ui.warn(_("interrupted"))
1660 self.ui.warn(_("interrupted"))
1661 raise
1661 raise
1662 except Exception, inst:
1662 except Exception, inst:
1663 err(_("unpacking manifest %s: %s") % (short(n), inst))
1663 err(_("unpacking manifest %s: %s") % (short(n), inst))
1664
1664
1665 ff = [ l.split('\0') for l in delta.splitlines() ]
1665 ff = [ l.split('\0') for l in delta.splitlines() ]
1666 for f, fn in ff:
1666 for f, fn in ff:
1667 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1667 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1668
1668
1669 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1669 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1670
1670
1671 for m,c in neededmanifests.items():
1671 for m,c in neededmanifests.items():
1672 err(_("Changeset %s refers to unknown manifest %s") %
1672 err(_("Changeset %s refers to unknown manifest %s") %
1673 (short(m), short(c)))
1673 (short(m), short(c)))
1674 del neededmanifests
1674 del neededmanifests
1675
1675
1676 for f in filenodes:
1676 for f in filenodes:
1677 if f not in filelinkrevs:
1677 if f not in filelinkrevs:
1678 err(_("file %s in manifest but not in changesets") % f)
1678 err(_("file %s in manifest but not in changesets") % f)
1679
1679
1680 for f in filelinkrevs:
1680 for f in filelinkrevs:
1681 if f not in filenodes:
1681 if f not in filenodes:
1682 err(_("file %s in changeset but not in manifest") % f)
1682 err(_("file %s in changeset but not in manifest") % f)
1683
1683
1684 self.ui.status(_("checking files\n"))
1684 self.ui.status(_("checking files\n"))
1685 ff = filenodes.keys()
1685 ff = filenodes.keys()
1686 ff.sort()
1686 ff.sort()
1687 for f in ff:
1687 for f in ff:
1688 if f == "/dev/null": continue
1688 if f == "/dev/null": continue
1689 files += 1
1689 files += 1
1690 fl = self.file(f)
1690 fl = self.file(f)
1691 nodes = { nullid: 1 }
1691 nodes = { nullid: 1 }
1692 seen = {}
1692 seen = {}
1693 for i in range(fl.count()):
1693 for i in range(fl.count()):
1694 revisions += 1
1694 revisions += 1
1695 n = fl.node(i)
1695 n = fl.node(i)
1696
1696
1697 if n in seen:
1697 if n in seen:
1698 err(_("%s: duplicate revision %d") % (f, i))
1698 err(_("%s: duplicate revision %d") % (f, i))
1699 if n not in filenodes[f]:
1699 if n not in filenodes[f]:
1700 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1700 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1701 else:
1701 else:
1702 del filenodes[f][n]
1702 del filenodes[f][n]
1703
1703
1704 flr = fl.linkrev(n)
1704 flr = fl.linkrev(n)
1705 if flr not in filelinkrevs[f]:
1705 if flr not in filelinkrevs[f]:
1706 err(_("%s:%s points to unexpected changeset %d")
1706 err(_("%s:%s points to unexpected changeset %d")
1707 % (f, short(n), flr))
1707 % (f, short(n), flr))
1708 else:
1708 else:
1709 filelinkrevs[f].remove(flr)
1709 filelinkrevs[f].remove(flr)
1710
1710
1711 # verify contents
1711 # verify contents
1712 try:
1712 try:
1713 t = fl.read(n)
1713 t = fl.read(n)
1714 except Exception, inst:
1714 except Exception, inst:
1715 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1715 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1716
1716
1717 # verify parents
1717 # verify parents
1718 (p1, p2) = fl.parents(n)
1718 (p1, p2) = fl.parents(n)
1719 if p1 not in nodes:
1719 if p1 not in nodes:
1720 err(_("file %s:%s unknown parent 1 %s") %
1720 err(_("file %s:%s unknown parent 1 %s") %
1721 (f, short(n), short(p1)))
1721 (f, short(n), short(p1)))
1722 if p2 not in nodes:
1722 if p2 not in nodes:
1723 err(_("file %s:%s unknown parent 2 %s") %
1723 err(_("file %s:%s unknown parent 2 %s") %
1724 (f, short(n), short(p1)))
1724 (f, short(n), short(p1)))
1725 nodes[n] = 1
1725 nodes[n] = 1
1726
1726
1727 # cross-check
1727 # cross-check
1728 for node in filenodes[f]:
1728 for node in filenodes[f]:
1729 err(_("node %s in manifests not in %s") % (hex(node), f))
1729 err(_("node %s in manifests not in %s") % (hex(node), f))
1730
1730
1731 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1731 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1732 (files, changesets, revisions))
1732 (files, changesets, revisions))
1733
1733
1734 if errors[0]:
1734 if errors[0]:
1735 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1735 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1736 return 1
1736 return 1
@@ -1,149 +1,152
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, ConfigParser
8 import os, ConfigParser
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "re socket sys util")
11 demandload(globals(), "re socket sys util")
12
12
13 class ui:
13 class ui:
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True):
15 interactive=True):
16 self.overlay = {}
16 self.overlay = {}
17 self.cdata = ConfigParser.SafeConfigParser()
17 self.cdata = ConfigParser.SafeConfigParser()
18 self.cdata.read(util.rcpath)
18 self.readconfig(util.rcpath)
19
19
20 self.quiet = self.configbool("ui", "quiet")
20 self.quiet = self.configbool("ui", "quiet")
21 self.verbose = self.configbool("ui", "verbose")
21 self.verbose = self.configbool("ui", "verbose")
22 self.debugflag = self.configbool("ui", "debug")
22 self.debugflag = self.configbool("ui", "debug")
23 self.interactive = self.configbool("ui", "interactive", True)
23 self.interactive = self.configbool("ui", "interactive", True)
24
24
25 self.updateopts(verbose, debug, quiet, interactive)
25 self.updateopts(verbose, debug, quiet, interactive)
26
26
27 def updateopts(self, verbose=False, debug=False, quiet=False,
27 def updateopts(self, verbose=False, debug=False, quiet=False,
28 interactive=True):
28 interactive=True):
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
30 self.verbose = (self.verbose or verbose) or debug
30 self.verbose = (self.verbose or verbose) or debug
31 self.debugflag = (self.debugflag or debug)
31 self.debugflag = (self.debugflag or debug)
32 self.interactive = (self.interactive and interactive)
32 self.interactive = (self.interactive and interactive)
33
33
34 def readconfig(self, fp):
34 def readconfig(self, fn):
35 self.cdata.readfp(fp)
35 try:
36 self.cdata.read(fn)
37 except ConfigParser.ParsingError, inst:
38 raise util.Abort(_("Failed to parse %s\n%s") % (fn, inst))
36
39
37 def setconfig(self, section, name, val):
40 def setconfig(self, section, name, val):
38 self.overlay[(section, name)] = val
41 self.overlay[(section, name)] = val
39
42
40 def config(self, section, name, default=None):
43 def config(self, section, name, default=None):
41 if self.overlay.has_key((section, name)):
44 if self.overlay.has_key((section, name)):
42 return self.overlay[(section, name)]
45 return self.overlay[(section, name)]
43 if self.cdata.has_option(section, name):
46 if self.cdata.has_option(section, name):
44 return self.cdata.get(section, name)
47 return self.cdata.get(section, name)
45 return default
48 return default
46
49
47 def configbool(self, section, name, default=False):
50 def configbool(self, section, name, default=False):
48 if self.overlay.has_key((section, name)):
51 if self.overlay.has_key((section, name)):
49 return self.overlay[(section, name)]
52 return self.overlay[(section, name)]
50 if self.cdata.has_option(section, name):
53 if self.cdata.has_option(section, name):
51 return self.cdata.getboolean(section, name)
54 return self.cdata.getboolean(section, name)
52 return default
55 return default
53
56
54 def configitems(self, section):
57 def configitems(self, section):
55 if self.cdata.has_section(section):
58 if self.cdata.has_section(section):
56 return self.cdata.items(section)
59 return self.cdata.items(section)
57 return []
60 return []
58
61
59 def walkconfig(self):
62 def walkconfig(self):
60 seen = {}
63 seen = {}
61 for (section, name), value in self.overlay.iteritems():
64 for (section, name), value in self.overlay.iteritems():
62 yield section, name, value
65 yield section, name, value
63 seen[section, name] = 1
66 seen[section, name] = 1
64 for section in self.cdata.sections():
67 for section in self.cdata.sections():
65 for name, value in self.cdata.items(section):
68 for name, value in self.cdata.items(section):
66 if (section, name) in seen: continue
69 if (section, name) in seen: continue
67 yield section, name, value.replace('\n', '\\n')
70 yield section, name, value.replace('\n', '\\n')
68 seen[section, name] = 1
71 seen[section, name] = 1
69
72
70 def extensions(self):
73 def extensions(self):
71 return self.configitems("extensions")
74 return self.configitems("extensions")
72
75
73 def username(self):
76 def username(self):
74 return (os.environ.get("HGUSER") or
77 return (os.environ.get("HGUSER") or
75 self.config("ui", "username") or
78 self.config("ui", "username") or
76 os.environ.get("EMAIL") or
79 os.environ.get("EMAIL") or
77 (os.environ.get("LOGNAME",
80 (os.environ.get("LOGNAME",
78 os.environ.get("USERNAME", "unknown"))
81 os.environ.get("USERNAME", "unknown"))
79 + '@' + socket.getfqdn()))
82 + '@' + socket.getfqdn()))
80
83
81 def shortuser(self, user):
84 def shortuser(self, user):
82 """Return a short representation of a user name or email address."""
85 """Return a short representation of a user name or email address."""
83 if not self.verbose:
86 if not self.verbose:
84 f = user.find('@')
87 f = user.find('@')
85 if f >= 0:
88 if f >= 0:
86 user = user[:f]
89 user = user[:f]
87 f = user.find('<')
90 f = user.find('<')
88 if f >= 0:
91 if f >= 0:
89 user = user[f+1:]
92 user = user[f+1:]
90 return user
93 return user
91
94
92 def expandpath(self, loc, root=""):
95 def expandpath(self, loc, root=""):
93 paths = {}
96 paths = {}
94 for name, path in self.configitems("paths"):
97 for name, path in self.configitems("paths"):
95 m = path.find("://")
98 m = path.find("://")
96 if m == -1:
99 if m == -1:
97 path = os.path.join(root, path)
100 path = os.path.join(root, path)
98 paths[name] = path
101 paths[name] = path
99
102
100 return paths.get(loc, loc)
103 return paths.get(loc, loc)
101
104
102 def write(self, *args):
105 def write(self, *args):
103 for a in args:
106 for a in args:
104 sys.stdout.write(str(a))
107 sys.stdout.write(str(a))
105
108
106 def write_err(self, *args):
109 def write_err(self, *args):
107 sys.stdout.flush()
110 sys.stdout.flush()
108 for a in args:
111 for a in args:
109 sys.stderr.write(str(a))
112 sys.stderr.write(str(a))
110
113
111 def readline(self):
114 def readline(self):
112 return sys.stdin.readline()[:-1]
115 return sys.stdin.readline()[:-1]
113 def prompt(self, msg, pat, default="y"):
116 def prompt(self, msg, pat, default="y"):
114 if not self.interactive: return default
117 if not self.interactive: return default
115 while 1:
118 while 1:
116 self.write(msg, " ")
119 self.write(msg, " ")
117 r = self.readline()
120 r = self.readline()
118 if re.match(pat, r):
121 if re.match(pat, r):
119 return r
122 return r
120 else:
123 else:
121 self.write(_("unrecognized response\n"))
124 self.write(_("unrecognized response\n"))
122 def status(self, *msg):
125 def status(self, *msg):
123 if not self.quiet: self.write(*msg)
126 if not self.quiet: self.write(*msg)
124 def warn(self, *msg):
127 def warn(self, *msg):
125 self.write_err(*msg)
128 self.write_err(*msg)
126 def note(self, *msg):
129 def note(self, *msg):
127 if self.verbose: self.write(*msg)
130 if self.verbose: self.write(*msg)
128 def debug(self, *msg):
131 def debug(self, *msg):
129 if self.debugflag: self.write(*msg)
132 if self.debugflag: self.write(*msg)
130 def edit(self, text):
133 def edit(self, text):
131 import tempfile
134 import tempfile
132 (fd, name) = tempfile.mkstemp("hg")
135 (fd, name) = tempfile.mkstemp("hg")
133 f = os.fdopen(fd, "w")
136 f = os.fdopen(fd, "w")
134 f.write(text)
137 f.write(text)
135 f.close()
138 f.close()
136
139
137 editor = (os.environ.get("HGEDITOR") or
140 editor = (os.environ.get("HGEDITOR") or
138 self.config("ui", "editor") or
141 self.config("ui", "editor") or
139 os.environ.get("EDITOR", "vi"))
142 os.environ.get("EDITOR", "vi"))
140
143
141 os.environ["HGUSER"] = self.username()
144 os.environ["HGUSER"] = self.username()
142 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
145 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
143
146
144 t = open(name).read()
147 t = open(name).read()
145 t = re.sub("(?m)^HG:.*\n", "", t)
148 t = re.sub("(?m)^HG:.*\n", "", t)
146
149
147 os.unlink(name)
150 os.unlink(name)
148
151
149 return t
152 return t
General Comments 0
You need to be logged in to leave comments. Login now