##// END OF EJS Templates
Simplify content type checking
mpm@selenic.com -
r752:c693eafd default
parent child Browse files
Show More
@@ -1,1948 +1,1941 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if not t.startswith('\1\n'):
24 if not t.startswith('\1\n'):
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if not t.startswith('\1\n'):
31 if not t.startswith('\1\n'):
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text.startswith('\1\n'):
41 if meta or text.startswith('\1\n'):
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
129 changed=None):
129 changed=None):
130 # directly generate the mdiff delta from the data collected during
130 # directly generate the mdiff delta from the data collected during
131 # the bisect loop below
131 # the bisect loop below
132 def gendelta(delta):
132 def gendelta(delta):
133 i = 0
133 i = 0
134 result = []
134 result = []
135 while i < len(delta):
135 while i < len(delta):
136 start = delta[i][2]
136 start = delta[i][2]
137 end = delta[i][3]
137 end = delta[i][3]
138 l = delta[i][4]
138 l = delta[i][4]
139 if l == None:
139 if l == None:
140 l = ""
140 l = ""
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
142 and end >= delta[i+1][2]:
142 and end >= delta[i+1][2]:
143 if delta[i+1][3] > end:
143 if delta[i+1][3] > end:
144 end = delta[i+1][3]
144 end = delta[i+1][3]
145 if delta[i+1][4]:
145 if delta[i+1][4]:
146 l += delta[i+1][4]
146 l += delta[i+1][4]
147 i += 1
147 i += 1
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
149 i += 1
149 i += 1
150 return result
150 return result
151
151
152 # apply the changes collected during the bisect loop to our addlist
152 # apply the changes collected during the bisect loop to our addlist
153 def addlistdelta(addlist, delta):
153 def addlistdelta(addlist, delta):
154 # apply the deltas to the addlist. start from the bottom up
154 # apply the deltas to the addlist. start from the bottom up
155 # so changes to the offsets don't mess things up.
155 # so changes to the offsets don't mess things up.
156 i = len(delta)
156 i = len(delta)
157 while i > 0:
157 while i > 0:
158 i -= 1
158 i -= 1
159 start = delta[i][0]
159 start = delta[i][0]
160 end = delta[i][1]
160 end = delta[i][1]
161 if delta[i][4]:
161 if delta[i][4]:
162 addlist[start:end] = [delta[i][4]]
162 addlist[start:end] = [delta[i][4]]
163 else:
163 else:
164 del addlist[start:end]
164 del addlist[start:end]
165 return addlist
165 return addlist
166
166
167 # calculate the byte offset of the start of each line in the
167 # calculate the byte offset of the start of each line in the
168 # manifest
168 # manifest
169 def calcoffsets(addlist):
169 def calcoffsets(addlist):
170 offsets = [0] * (len(addlist) + 1)
170 offsets = [0] * (len(addlist) + 1)
171 offset = 0
171 offset = 0
172 i = 0
172 i = 0
173 while i < len(addlist):
173 while i < len(addlist):
174 offsets[i] = offset
174 offsets[i] = offset
175 offset += len(addlist[i])
175 offset += len(addlist[i])
176 i += 1
176 i += 1
177 offsets[i] = offset
177 offsets[i] = offset
178 return offsets
178 return offsets
179
179
180 # if we're using the listcache, make sure it is valid and
180 # if we're using the listcache, make sure it is valid and
181 # parented by the same node we're diffing against
181 # parented by the same node we're diffing against
182 if not changed or not self.listcache or not p1 or \
182 if not changed or not self.listcache or not p1 or \
183 self.mapcache[0] != p1:
183 self.mapcache[0] != p1:
184 files = map.keys()
184 files = map.keys()
185 files.sort()
185 files.sort()
186
186
187 self.addlist = ["%s\000%s%s\n" %
187 self.addlist = ["%s\000%s%s\n" %
188 (f, hex(map[f]), flags[f] and "x" or '')
188 (f, hex(map[f]), flags[f] and "x" or '')
189 for f in files]
189 for f in files]
190 cachedelta = None
190 cachedelta = None
191 else:
191 else:
192 addlist = self.listcache[1]
192 addlist = self.listcache[1]
193
193
194 # find the starting offset for each line in the add list
194 # find the starting offset for each line in the add list
195 offsets = calcoffsets(addlist)
195 offsets = calcoffsets(addlist)
196
196
197 # combine the changed lists into one list for sorting
197 # combine the changed lists into one list for sorting
198 work = [[x, 0] for x in changed[0]]
198 work = [[x, 0] for x in changed[0]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
200 work.sort()
200 work.sort()
201
201
202 delta = []
202 delta = []
203 bs = 0
203 bs = 0
204
204
205 for w in work:
205 for w in work:
206 f = w[0]
206 f = w[0]
207 # bs will either be the index of the item or the insert point
207 # bs will either be the index of the item or the insert point
208 bs = bisect.bisect(addlist, f, bs)
208 bs = bisect.bisect(addlist, f, bs)
209 if bs < len(addlist):
209 if bs < len(addlist):
210 fn = addlist[bs][:addlist[bs].index('\0')]
210 fn = addlist[bs][:addlist[bs].index('\0')]
211 else:
211 else:
212 fn = None
212 fn = None
213 if w[1] == 0:
213 if w[1] == 0:
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
215 flags[f] and "x" or '')
215 flags[f] and "x" or '')
216 else:
216 else:
217 l = None
217 l = None
218 start = bs
218 start = bs
219 if fn != f:
219 if fn != f:
220 # item not found, insert a new one
220 # item not found, insert a new one
221 end = bs
221 end = bs
222 if w[1] == 1:
222 if w[1] == 1:
223 sys.stderr.write("failed to remove %s from manifest\n"
223 sys.stderr.write("failed to remove %s from manifest\n"
224 % f)
224 % f)
225 sys.exit(1)
225 sys.exit(1)
226 else:
226 else:
227 # item is found, replace/delete the existing line
227 # item is found, replace/delete the existing line
228 end = bs + 1
228 end = bs + 1
229 delta.append([start, end, offsets[start], offsets[end], l])
229 delta.append([start, end, offsets[start], offsets[end], l])
230
230
231 self.addlist = addlistdelta(addlist, delta)
231 self.addlist = addlistdelta(addlist, delta)
232 if self.mapcache[0] == self.tip():
232 if self.mapcache[0] == self.tip():
233 cachedelta = "".join(gendelta(delta))
233 cachedelta = "".join(gendelta(delta))
234 else:
234 else:
235 cachedelta = None
235 cachedelta = None
236
236
237 text = "".join(self.addlist)
237 text = "".join(self.addlist)
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
239 sys.stderr.write("manifest delta failure\n")
239 sys.stderr.write("manifest delta failure\n")
240 sys.exit(1)
240 sys.exit(1)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
242 self.mapcache = (n, map, flags)
242 self.mapcache = (n, map, flags)
243 self.listcache = (text, self.addlist)
243 self.listcache = (text, self.addlist)
244 self.addlist = None
244 self.addlist = None
245
245
246 return n
246 return n
247
247
248 class changelog(revlog):
248 class changelog(revlog):
249 def __init__(self, opener):
249 def __init__(self, opener):
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
251
251
252 def extract(self, text):
252 def extract(self, text):
253 if not text:
253 if not text:
254 return (nullid, "", "0", [], "")
254 return (nullid, "", "0", [], "")
255 last = text.index("\n\n")
255 last = text.index("\n\n")
256 desc = text[last + 2:]
256 desc = text[last + 2:]
257 l = text[:last].splitlines()
257 l = text[:last].splitlines()
258 manifest = bin(l[0])
258 manifest = bin(l[0])
259 user = l[1]
259 user = l[1]
260 date = l[2]
260 date = l[2]
261 files = l[3:]
261 files = l[3:]
262 return (manifest, user, date, files, desc)
262 return (manifest, user, date, files, desc)
263
263
264 def read(self, node):
264 def read(self, node):
265 return self.extract(self.revision(node))
265 return self.extract(self.revision(node))
266
266
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
268 user=None, date=None):
268 user=None, date=None):
269 date = date or "%d %d" % (time.time(), time.timezone)
269 date = date or "%d %d" % (time.time(), time.timezone)
270 list.sort()
270 list.sort()
271 l = [hex(manifest), user, date] + list + ["", desc]
271 l = [hex(manifest), user, date] + list + ["", desc]
272 text = "\n".join(l)
272 text = "\n".join(l)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
274
274
275 class dirstate:
275 class dirstate:
276 def __init__(self, opener, ui, root):
276 def __init__(self, opener, ui, root):
277 self.opener = opener
277 self.opener = opener
278 self.root = root
278 self.root = root
279 self.dirty = 0
279 self.dirty = 0
280 self.ui = ui
280 self.ui = ui
281 self.map = None
281 self.map = None
282 self.pl = None
282 self.pl = None
283 self.copies = {}
283 self.copies = {}
284 self.ignorefunc = None
284 self.ignorefunc = None
285
285
286 def wjoin(self, f):
286 def wjoin(self, f):
287 return os.path.join(self.root, f)
287 return os.path.join(self.root, f)
288
288
289 def ignore(self, f):
289 def ignore(self, f):
290 if not self.ignorefunc:
290 if not self.ignorefunc:
291 bigpat = []
291 bigpat = []
292 try:
292 try:
293 l = file(self.wjoin(".hgignore"))
293 l = file(self.wjoin(".hgignore"))
294 for pat in l:
294 for pat in l:
295 if pat != "\n":
295 if pat != "\n":
296 p = util.pconvert(pat[:-1])
296 p = util.pconvert(pat[:-1])
297 try:
297 try:
298 r = re.compile(p)
298 r = re.compile(p)
299 except:
299 except:
300 self.ui.warn("ignoring invalid ignore"
300 self.ui.warn("ignoring invalid ignore"
301 + " regular expression '%s'\n" % p)
301 + " regular expression '%s'\n" % p)
302 else:
302 else:
303 bigpat.append(util.pconvert(pat[:-1]))
303 bigpat.append(util.pconvert(pat[:-1]))
304 except IOError: pass
304 except IOError: pass
305
305
306 if bigpat:
306 if bigpat:
307 s = "(?:%s)" % (")|(?:".join(bigpat))
307 s = "(?:%s)" % (")|(?:".join(bigpat))
308 r = re.compile(s)
308 r = re.compile(s)
309 self.ignorefunc = r.search
309 self.ignorefunc = r.search
310 else:
310 else:
311 self.ignorefunc = util.never
311 self.ignorefunc = util.never
312
312
313 return self.ignorefunc(f)
313 return self.ignorefunc(f)
314
314
315 def __del__(self):
315 def __del__(self):
316 if self.dirty:
316 if self.dirty:
317 self.write()
317 self.write()
318
318
319 def __getitem__(self, key):
319 def __getitem__(self, key):
320 try:
320 try:
321 return self.map[key]
321 return self.map[key]
322 except TypeError:
322 except TypeError:
323 self.read()
323 self.read()
324 return self[key]
324 return self[key]
325
325
326 def __contains__(self, key):
326 def __contains__(self, key):
327 if not self.map: self.read()
327 if not self.map: self.read()
328 return key in self.map
328 return key in self.map
329
329
330 def parents(self):
330 def parents(self):
331 if not self.pl:
331 if not self.pl:
332 self.read()
332 self.read()
333 return self.pl
333 return self.pl
334
334
335 def markdirty(self):
335 def markdirty(self):
336 if not self.dirty:
336 if not self.dirty:
337 self.dirty = 1
337 self.dirty = 1
338
338
339 def setparents(self, p1, p2 = nullid):
339 def setparents(self, p1, p2 = nullid):
340 self.markdirty()
340 self.markdirty()
341 self.pl = p1, p2
341 self.pl = p1, p2
342
342
343 def state(self, key):
343 def state(self, key):
344 try:
344 try:
345 return self[key][0]
345 return self[key][0]
346 except KeyError:
346 except KeyError:
347 return "?"
347 return "?"
348
348
349 def read(self):
349 def read(self):
350 if self.map is not None: return self.map
350 if self.map is not None: return self.map
351
351
352 self.map = {}
352 self.map = {}
353 self.pl = [nullid, nullid]
353 self.pl = [nullid, nullid]
354 try:
354 try:
355 st = self.opener("dirstate").read()
355 st = self.opener("dirstate").read()
356 if not st: return
356 if not st: return
357 except: return
357 except: return
358
358
359 self.pl = [st[:20], st[20: 40]]
359 self.pl = [st[:20], st[20: 40]]
360
360
361 pos = 40
361 pos = 40
362 while pos < len(st):
362 while pos < len(st):
363 e = struct.unpack(">cllll", st[pos:pos+17])
363 e = struct.unpack(">cllll", st[pos:pos+17])
364 l = e[4]
364 l = e[4]
365 pos += 17
365 pos += 17
366 f = st[pos:pos + l]
366 f = st[pos:pos + l]
367 if '\0' in f:
367 if '\0' in f:
368 f, c = f.split('\0')
368 f, c = f.split('\0')
369 self.copies[f] = c
369 self.copies[f] = c
370 self.map[f] = e[:4]
370 self.map[f] = e[:4]
371 pos += l
371 pos += l
372
372
373 def copy(self, source, dest):
373 def copy(self, source, dest):
374 self.read()
374 self.read()
375 self.markdirty()
375 self.markdirty()
376 self.copies[dest] = source
376 self.copies[dest] = source
377
377
378 def copied(self, file):
378 def copied(self, file):
379 return self.copies.get(file, None)
379 return self.copies.get(file, None)
380
380
381 def update(self, files, state):
381 def update(self, files, state):
382 ''' current states:
382 ''' current states:
383 n normal
383 n normal
384 m needs merging
384 m needs merging
385 r marked for removal
385 r marked for removal
386 a marked for addition'''
386 a marked for addition'''
387
387
388 if not files: return
388 if not files: return
389 self.read()
389 self.read()
390 self.markdirty()
390 self.markdirty()
391 for f in files:
391 for f in files:
392 if state == "r":
392 if state == "r":
393 self.map[f] = ('r', 0, 0, 0)
393 self.map[f] = ('r', 0, 0, 0)
394 else:
394 else:
395 s = os.stat(os.path.join(self.root, f))
395 s = os.stat(os.path.join(self.root, f))
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
397
397
398 def forget(self, files):
398 def forget(self, files):
399 if not files: return
399 if not files: return
400 self.read()
400 self.read()
401 self.markdirty()
401 self.markdirty()
402 for f in files:
402 for f in files:
403 try:
403 try:
404 del self.map[f]
404 del self.map[f]
405 except KeyError:
405 except KeyError:
406 self.ui.warn("not in dirstate: %s!\n" % f)
406 self.ui.warn("not in dirstate: %s!\n" % f)
407 pass
407 pass
408
408
409 def clear(self):
409 def clear(self):
410 self.map = {}
410 self.map = {}
411 self.markdirty()
411 self.markdirty()
412
412
413 def write(self):
413 def write(self):
414 st = self.opener("dirstate", "w")
414 st = self.opener("dirstate", "w")
415 st.write("".join(self.pl))
415 st.write("".join(self.pl))
416 for f, e in self.map.items():
416 for f, e in self.map.items():
417 c = self.copied(f)
417 c = self.copied(f)
418 if c:
418 if c:
419 f = f + "\0" + c
419 f = f + "\0" + c
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
421 st.write(e + f)
421 st.write(e + f)
422 self.dirty = 0
422 self.dirty = 0
423
423
424 def walk(self, files = None, match = util.always):
424 def walk(self, files = None, match = util.always):
425 self.read()
425 self.read()
426 dc = self.map.copy()
426 dc = self.map.copy()
427 # walk all files by default
427 # walk all files by default
428 if not files: files = [self.root]
428 if not files: files = [self.root]
429 def traverse():
429 def traverse():
430 for f in util.unique(files):
430 for f in util.unique(files):
431 f = os.path.join(self.root, f)
431 f = os.path.join(self.root, f)
432 if os.path.isdir(f):
432 if os.path.isdir(f):
433 for dir, subdirs, fl in os.walk(f):
433 for dir, subdirs, fl in os.walk(f):
434 d = dir[len(self.root) + 1:]
434 d = dir[len(self.root) + 1:]
435 if d == '.hg':
435 if d == '.hg':
436 subdirs[:] = []
436 subdirs[:] = []
437 continue
437 continue
438 for sd in subdirs:
438 for sd in subdirs:
439 ds = os.path.join(d, sd +'/')
439 ds = os.path.join(d, sd +'/')
440 if self.ignore(ds) or not match(ds):
440 if self.ignore(ds) or not match(ds):
441 subdirs.remove(sd)
441 subdirs.remove(sd)
442 for fn in fl:
442 for fn in fl:
443 fn = util.pconvert(os.path.join(d, fn))
443 fn = util.pconvert(os.path.join(d, fn))
444 yield 'f', fn
444 yield 'f', fn
445 else:
445 else:
446 yield 'f', f[len(self.root) + 1:]
446 yield 'f', f[len(self.root) + 1:]
447
447
448 for k in dc.keys():
448 for k in dc.keys():
449 yield 'm', k
449 yield 'm', k
450
450
451 # yield only files that match: all in dirstate, others only if
451 # yield only files that match: all in dirstate, others only if
452 # not in .hgignore
452 # not in .hgignore
453
453
454 for src, fn in util.unique(traverse()):
454 for src, fn in util.unique(traverse()):
455 if fn in dc:
455 if fn in dc:
456 del dc[fn]
456 del dc[fn]
457 elif self.ignore(fn):
457 elif self.ignore(fn):
458 continue
458 continue
459 if match(fn):
459 if match(fn):
460 yield src, fn
460 yield src, fn
461
461
462 def changes(self, files = None, match = util.always):
462 def changes(self, files = None, match = util.always):
463 self.read()
463 self.read()
464 dc = self.map.copy()
464 dc = self.map.copy()
465 lookup, changed, added, unknown = [], [], [], []
465 lookup, changed, added, unknown = [], [], [], []
466
466
467 for src, fn in self.walk(files, match):
467 for src, fn in self.walk(files, match):
468 try: s = os.stat(os.path.join(self.root, fn))
468 try: s = os.stat(os.path.join(self.root, fn))
469 except: continue
469 except: continue
470
470
471 if fn in dc:
471 if fn in dc:
472 c = dc[fn]
472 c = dc[fn]
473 del dc[fn]
473 del dc[fn]
474
474
475 if c[0] == 'm':
475 if c[0] == 'm':
476 changed.append(fn)
476 changed.append(fn)
477 elif c[0] == 'a':
477 elif c[0] == 'a':
478 added.append(fn)
478 added.append(fn)
479 elif c[0] == 'r':
479 elif c[0] == 'r':
480 unknown.append(fn)
480 unknown.append(fn)
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
482 changed.append(fn)
482 changed.append(fn)
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
484 lookup.append(fn)
484 lookup.append(fn)
485 else:
485 else:
486 if match(fn): unknown.append(fn)
486 if match(fn): unknown.append(fn)
487
487
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
489
489
490 # used to avoid circular references so destructors work
490 # used to avoid circular references so destructors work
491 def opener(base):
491 def opener(base):
492 p = base
492 p = base
493 def o(path, mode="r"):
493 def o(path, mode="r"):
494 if p.startswith("http://"):
494 if p.startswith("http://"):
495 f = os.path.join(p, urllib.quote(path))
495 f = os.path.join(p, urllib.quote(path))
496 return httprangereader.httprangereader(f)
496 return httprangereader.httprangereader(f)
497
497
498 f = os.path.join(p, path)
498 f = os.path.join(p, path)
499
499
500 mode += "b" # for that other OS
500 mode += "b" # for that other OS
501
501
502 if mode[0] != "r":
502 if mode[0] != "r":
503 try:
503 try:
504 s = os.stat(f)
504 s = os.stat(f)
505 except OSError:
505 except OSError:
506 d = os.path.dirname(f)
506 d = os.path.dirname(f)
507 if not os.path.isdir(d):
507 if not os.path.isdir(d):
508 os.makedirs(d)
508 os.makedirs(d)
509 else:
509 else:
510 if s.st_nlink > 1:
510 if s.st_nlink > 1:
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
512 util.rename(f+".tmp", f)
512 util.rename(f+".tmp", f)
513
513
514 return file(f, mode)
514 return file(f, mode)
515
515
516 return o
516 return o
517
517
518 class RepoError(Exception): pass
518 class RepoError(Exception): pass
519
519
520 class localrepository:
520 class localrepository:
521 def __init__(self, ui, path=None, create=0):
521 def __init__(self, ui, path=None, create=0):
522 self.remote = 0
522 self.remote = 0
523 if path and path.startswith("http://"):
523 if path and path.startswith("http://"):
524 self.remote = 1
524 self.remote = 1
525 self.path = path
525 self.path = path
526 else:
526 else:
527 if not path:
527 if not path:
528 p = os.getcwd()
528 p = os.getcwd()
529 while not os.path.isdir(os.path.join(p, ".hg")):
529 while not os.path.isdir(os.path.join(p, ".hg")):
530 oldp = p
530 oldp = p
531 p = os.path.dirname(p)
531 p = os.path.dirname(p)
532 if p == oldp: raise RepoError("no repo found")
532 if p == oldp: raise RepoError("no repo found")
533 path = p
533 path = p
534 self.path = os.path.join(path, ".hg")
534 self.path = os.path.join(path, ".hg")
535
535
536 if not create and not os.path.isdir(self.path):
536 if not create and not os.path.isdir(self.path):
537 raise RepoError("repository %s not found" % self.path)
537 raise RepoError("repository %s not found" % self.path)
538
538
539 self.root = path
539 self.root = path
540 self.ui = ui
540 self.ui = ui
541
541
542 if create:
542 if create:
543 os.mkdir(self.path)
543 os.mkdir(self.path)
544 os.mkdir(self.join("data"))
544 os.mkdir(self.join("data"))
545
545
546 self.opener = opener(self.path)
546 self.opener = opener(self.path)
547 self.wopener = opener(self.root)
547 self.wopener = opener(self.root)
548 self.manifest = manifest(self.opener)
548 self.manifest = manifest(self.opener)
549 self.changelog = changelog(self.opener)
549 self.changelog = changelog(self.opener)
550 self.tagscache = None
550 self.tagscache = None
551 self.nodetagscache = None
551 self.nodetagscache = None
552
552
553 if not self.remote:
553 if not self.remote:
554 self.dirstate = dirstate(self.opener, ui, self.root)
554 self.dirstate = dirstate(self.opener, ui, self.root)
555 try:
555 try:
556 self.ui.readconfig(self.opener("hgrc"))
556 self.ui.readconfig(self.opener("hgrc"))
557 except IOError: pass
557 except IOError: pass
558
558
559 def hook(self, name, **args):
559 def hook(self, name, **args):
560 s = self.ui.config("hooks", name)
560 s = self.ui.config("hooks", name)
561 if s:
561 if s:
562 self.ui.note("running hook %s: %s\n" % (name, s))
562 self.ui.note("running hook %s: %s\n" % (name, s))
563 old = {}
563 old = {}
564 for k, v in args.items():
564 for k, v in args.items():
565 k = k.upper()
565 k = k.upper()
566 old[k] = os.environ.get(k, None)
566 old[k] = os.environ.get(k, None)
567 os.environ[k] = v
567 os.environ[k] = v
568
568
569 r = os.system(s)
569 r = os.system(s)
570
570
571 for k, v in old.items():
571 for k, v in old.items():
572 if v != None:
572 if v != None:
573 os.environ[k] = v
573 os.environ[k] = v
574 else:
574 else:
575 del os.environ[k]
575 del os.environ[k]
576
576
577 if r:
577 if r:
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
579 (name, r))
579 (name, r))
580 return False
580 return False
581 return True
581 return True
582
582
583 def tags(self):
583 def tags(self):
584 '''return a mapping of tag to node'''
584 '''return a mapping of tag to node'''
585 if not self.tagscache:
585 if not self.tagscache:
586 self.tagscache = {}
586 self.tagscache = {}
587 def addtag(self, k, n):
587 def addtag(self, k, n):
588 try:
588 try:
589 bin_n = bin(n)
589 bin_n = bin(n)
590 except TypeError:
590 except TypeError:
591 bin_n = ''
591 bin_n = ''
592 self.tagscache[k.strip()] = bin_n
592 self.tagscache[k.strip()] = bin_n
593
593
594 try:
594 try:
595 # read each head of the tags file, ending with the tip
595 # read each head of the tags file, ending with the tip
596 # and add each tag found to the map, with "newer" ones
596 # and add each tag found to the map, with "newer" ones
597 # taking precedence
597 # taking precedence
598 fl = self.file(".hgtags")
598 fl = self.file(".hgtags")
599 h = fl.heads()
599 h = fl.heads()
600 h.reverse()
600 h.reverse()
601 for r in h:
601 for r in h:
602 for l in fl.revision(r).splitlines():
602 for l in fl.revision(r).splitlines():
603 if l:
603 if l:
604 n, k = l.split(" ", 1)
604 n, k = l.split(" ", 1)
605 addtag(self, k, n)
605 addtag(self, k, n)
606 except KeyError:
606 except KeyError:
607 pass
607 pass
608
608
609 try:
609 try:
610 f = self.opener("localtags")
610 f = self.opener("localtags")
611 for l in f:
611 for l in f:
612 n, k = l.split(" ", 1)
612 n, k = l.split(" ", 1)
613 addtag(self, k, n)
613 addtag(self, k, n)
614 except IOError:
614 except IOError:
615 pass
615 pass
616
616
617 self.tagscache['tip'] = self.changelog.tip()
617 self.tagscache['tip'] = self.changelog.tip()
618
618
619 return self.tagscache
619 return self.tagscache
620
620
621 def tagslist(self):
621 def tagslist(self):
622 '''return a list of tags ordered by revision'''
622 '''return a list of tags ordered by revision'''
623 l = []
623 l = []
624 for t, n in self.tags().items():
624 for t, n in self.tags().items():
625 try:
625 try:
626 r = self.changelog.rev(n)
626 r = self.changelog.rev(n)
627 except:
627 except:
628 r = -2 # sort to the beginning of the list if unknown
628 r = -2 # sort to the beginning of the list if unknown
629 l.append((r,t,n))
629 l.append((r,t,n))
630 l.sort()
630 l.sort()
631 return [(t,n) for r,t,n in l]
631 return [(t,n) for r,t,n in l]
632
632
633 def nodetags(self, node):
633 def nodetags(self, node):
634 '''return the tags associated with a node'''
634 '''return the tags associated with a node'''
635 if not self.nodetagscache:
635 if not self.nodetagscache:
636 self.nodetagscache = {}
636 self.nodetagscache = {}
637 for t,n in self.tags().items():
637 for t,n in self.tags().items():
638 self.nodetagscache.setdefault(n,[]).append(t)
638 self.nodetagscache.setdefault(n,[]).append(t)
639 return self.nodetagscache.get(node, [])
639 return self.nodetagscache.get(node, [])
640
640
641 def lookup(self, key):
641 def lookup(self, key):
642 try:
642 try:
643 return self.tags()[key]
643 return self.tags()[key]
644 except KeyError:
644 except KeyError:
645 try:
645 try:
646 return self.changelog.lookup(key)
646 return self.changelog.lookup(key)
647 except:
647 except:
648 raise RepoError("unknown revision '%s'" % key)
648 raise RepoError("unknown revision '%s'" % key)
649
649
650 def dev(self):
650 def dev(self):
651 if self.remote: return -1
651 if self.remote: return -1
652 return os.stat(self.path).st_dev
652 return os.stat(self.path).st_dev
653
653
654 def join(self, f):
654 def join(self, f):
655 return os.path.join(self.path, f)
655 return os.path.join(self.path, f)
656
656
657 def wjoin(self, f):
657 def wjoin(self, f):
658 return os.path.join(self.root, f)
658 return os.path.join(self.root, f)
659
659
660 def file(self, f):
660 def file(self, f):
661 if f[0] == '/': f = f[1:]
661 if f[0] == '/': f = f[1:]
662 return filelog(self.opener, f)
662 return filelog(self.opener, f)
663
663
664 def getcwd(self):
664 def getcwd(self):
665 cwd = os.getcwd()
665 cwd = os.getcwd()
666 if cwd == self.root: return ''
666 if cwd == self.root: return ''
667 return cwd[len(self.root) + 1:]
667 return cwd[len(self.root) + 1:]
668
668
669 def wfile(self, f, mode='r'):
669 def wfile(self, f, mode='r'):
670 return self.wopener(f, mode)
670 return self.wopener(f, mode)
671
671
672 def transaction(self):
672 def transaction(self):
673 # save dirstate for undo
673 # save dirstate for undo
674 try:
674 try:
675 ds = self.opener("dirstate").read()
675 ds = self.opener("dirstate").read()
676 except IOError:
676 except IOError:
677 ds = ""
677 ds = ""
678 self.opener("undo.dirstate", "w").write(ds)
678 self.opener("undo.dirstate", "w").write(ds)
679
679
680 return transaction.transaction(self.ui.warn,
680 return transaction.transaction(self.ui.warn,
681 self.opener, self.join("journal"),
681 self.opener, self.join("journal"),
682 self.join("undo"))
682 self.join("undo"))
683
683
684 def recover(self):
684 def recover(self):
685 lock = self.lock()
685 lock = self.lock()
686 if os.path.exists(self.join("journal")):
686 if os.path.exists(self.join("journal")):
687 self.ui.status("rolling back interrupted transaction\n")
687 self.ui.status("rolling back interrupted transaction\n")
688 return transaction.rollback(self.opener, self.join("journal"))
688 return transaction.rollback(self.opener, self.join("journal"))
689 else:
689 else:
690 self.ui.warn("no interrupted transaction available\n")
690 self.ui.warn("no interrupted transaction available\n")
691
691
692 def undo(self):
692 def undo(self):
693 lock = self.lock()
693 lock = self.lock()
694 if os.path.exists(self.join("undo")):
694 if os.path.exists(self.join("undo")):
695 self.ui.status("rolling back last transaction\n")
695 self.ui.status("rolling back last transaction\n")
696 transaction.rollback(self.opener, self.join("undo"))
696 transaction.rollback(self.opener, self.join("undo"))
697 self.dirstate = None
697 self.dirstate = None
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
700 else:
700 else:
701 self.ui.warn("no undo information available\n")
701 self.ui.warn("no undo information available\n")
702
702
703 def lock(self, wait = 1):
703 def lock(self, wait = 1):
704 try:
704 try:
705 return lock.lock(self.join("lock"), 0)
705 return lock.lock(self.join("lock"), 0)
706 except lock.LockHeld, inst:
706 except lock.LockHeld, inst:
707 if wait:
707 if wait:
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
709 return lock.lock(self.join("lock"), wait)
709 return lock.lock(self.join("lock"), wait)
710 raise inst
710 raise inst
711
711
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
713 orig_parent = self.dirstate.parents()[0] or nullid
713 orig_parent = self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
716 c1 = self.changelog.read(p1)
716 c1 = self.changelog.read(p1)
717 c2 = self.changelog.read(p2)
717 c2 = self.changelog.read(p2)
718 m1 = self.manifest.read(c1[0])
718 m1 = self.manifest.read(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
720 m2 = self.manifest.read(c2[0])
720 m2 = self.manifest.read(c2[0])
721
721
722 if orig_parent == p1:
722 if orig_parent == p1:
723 update_dirstate = 1
723 update_dirstate = 1
724 else:
724 else:
725 update_dirstate = 0
725 update_dirstate = 0
726
726
727 tr = self.transaction()
727 tr = self.transaction()
728 mm = m1.copy()
728 mm = m1.copy()
729 mfm = mf1.copy()
729 mfm = mf1.copy()
730 linkrev = self.changelog.count()
730 linkrev = self.changelog.count()
731 for f in files:
731 for f in files:
732 try:
732 try:
733 t = self.wfile(f).read()
733 t = self.wfile(f).read()
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
735 r = self.file(f)
735 r = self.file(f)
736 mfm[f] = tm
736 mfm[f] = tm
737 mm[f] = r.add(t, {}, tr, linkrev,
737 mm[f] = r.add(t, {}, tr, linkrev,
738 m1.get(f, nullid), m2.get(f, nullid))
738 m1.get(f, nullid), m2.get(f, nullid))
739 if update_dirstate:
739 if update_dirstate:
740 self.dirstate.update([f], "n")
740 self.dirstate.update([f], "n")
741 except IOError:
741 except IOError:
742 try:
742 try:
743 del mm[f]
743 del mm[f]
744 del mfm[f]
744 del mfm[f]
745 if update_dirstate:
745 if update_dirstate:
746 self.dirstate.forget([f])
746 self.dirstate.forget([f])
747 except:
747 except:
748 # deleted from p2?
748 # deleted from p2?
749 pass
749 pass
750
750
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
752 user = user or self.ui.username()
752 user = user or self.ui.username()
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
754 tr.close()
754 tr.close()
755 if update_dirstate:
755 if update_dirstate:
756 self.dirstate.setparents(n, nullid)
756 self.dirstate.setparents(n, nullid)
757
757
758 def commit(self, files = None, text = "", user = None, date = None):
758 def commit(self, files = None, text = "", user = None, date = None):
759 commit = []
759 commit = []
760 remove = []
760 remove = []
761 if files:
761 if files:
762 for f in files:
762 for f in files:
763 s = self.dirstate.state(f)
763 s = self.dirstate.state(f)
764 if s in 'nmai':
764 if s in 'nmai':
765 commit.append(f)
765 commit.append(f)
766 elif s == 'r':
766 elif s == 'r':
767 remove.append(f)
767 remove.append(f)
768 else:
768 else:
769 self.ui.warn("%s not tracked!\n" % f)
769 self.ui.warn("%s not tracked!\n" % f)
770 else:
770 else:
771 (c, a, d, u) = self.changes()
771 (c, a, d, u) = self.changes()
772 commit = c + a
772 commit = c + a
773 remove = d
773 remove = d
774
774
775 if not commit and not remove:
775 if not commit and not remove:
776 self.ui.status("nothing changed\n")
776 self.ui.status("nothing changed\n")
777 return
777 return
778
778
779 if not self.hook("precommit"):
779 if not self.hook("precommit"):
780 return 1
780 return 1
781
781
782 p1, p2 = self.dirstate.parents()
782 p1, p2 = self.dirstate.parents()
783 c1 = self.changelog.read(p1)
783 c1 = self.changelog.read(p1)
784 c2 = self.changelog.read(p2)
784 c2 = self.changelog.read(p2)
785 m1 = self.manifest.read(c1[0])
785 m1 = self.manifest.read(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
787 m2 = self.manifest.read(c2[0])
787 m2 = self.manifest.read(c2[0])
788 lock = self.lock()
788 lock = self.lock()
789 tr = self.transaction()
789 tr = self.transaction()
790
790
791 # check in files
791 # check in files
792 new = {}
792 new = {}
793 linkrev = self.changelog.count()
793 linkrev = self.changelog.count()
794 commit.sort()
794 commit.sort()
795 for f in commit:
795 for f in commit:
796 self.ui.note(f + "\n")
796 self.ui.note(f + "\n")
797 try:
797 try:
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
799 t = self.wfile(f).read()
799 t = self.wfile(f).read()
800 except IOError:
800 except IOError:
801 self.ui.warn("trouble committing %s!\n" % f)
801 self.ui.warn("trouble committing %s!\n" % f)
802 raise
802 raise
803
803
804 meta = {}
804 meta = {}
805 cp = self.dirstate.copied(f)
805 cp = self.dirstate.copied(f)
806 if cp:
806 if cp:
807 meta["copy"] = cp
807 meta["copy"] = cp
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
810
810
811 r = self.file(f)
811 r = self.file(f)
812 fp1 = m1.get(f, nullid)
812 fp1 = m1.get(f, nullid)
813 fp2 = m2.get(f, nullid)
813 fp2 = m2.get(f, nullid)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
815
815
816 # update manifest
816 # update manifest
817 m1.update(new)
817 m1.update(new)
818 for f in remove:
818 for f in remove:
819 if f in m1:
819 if f in m1:
820 del m1[f]
820 del m1[f]
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
822 (new, remove))
822 (new, remove))
823
823
824 # add changeset
824 # add changeset
825 new = new.keys()
825 new = new.keys()
826 new.sort()
826 new.sort()
827
827
828 if not text:
828 if not text:
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
832 edittext = self.ui.edit(edittext)
832 edittext = self.ui.edit(edittext)
833 if not edittext.rstrip():
833 if not edittext.rstrip():
834 return 1
834 return 1
835 text = edittext
835 text = edittext
836
836
837 user = user or self.ui.username()
837 user = user or self.ui.username()
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
839
839
840 tr.close()
840 tr.close()
841
841
842 self.dirstate.setparents(n)
842 self.dirstate.setparents(n)
843 self.dirstate.update(new, "n")
843 self.dirstate.update(new, "n")
844 self.dirstate.forget(remove)
844 self.dirstate.forget(remove)
845
845
846 if not self.hook("commit", node=hex(n)):
846 if not self.hook("commit", node=hex(n)):
847 return 1
847 return 1
848
848
849 def walk(self, node = None, files = [], match = util.always):
849 def walk(self, node = None, files = [], match = util.always):
850 if node:
850 if node:
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
852 yield 'm', fn
852 yield 'm', fn
853 else:
853 else:
854 for src, fn in self.dirstate.walk(files, match):
854 for src, fn in self.dirstate.walk(files, match):
855 yield src, fn
855 yield src, fn
856
856
857 def changes(self, node1 = None, node2 = None, files = [],
857 def changes(self, node1 = None, node2 = None, files = [],
858 match = util.always):
858 match = util.always):
859 mf2, u = None, []
859 mf2, u = None, []
860
860
861 def fcmp(fn, mf):
861 def fcmp(fn, mf):
862 t1 = self.wfile(fn).read()
862 t1 = self.wfile(fn).read()
863 t2 = self.file(fn).revision(mf[fn])
863 t2 = self.file(fn).revision(mf[fn])
864 return cmp(t1, t2)
864 return cmp(t1, t2)
865
865
866 def mfmatches(node):
866 def mfmatches(node):
867 mf = dict(self.manifest.read(node))
867 mf = dict(self.manifest.read(node))
868 for fn in mf.keys():
868 for fn in mf.keys():
869 if not match(fn):
869 if not match(fn):
870 del mf[fn]
870 del mf[fn]
871 return mf
871 return mf
872
872
873 # are we comparing the working directory?
873 # are we comparing the working directory?
874 if not node2:
874 if not node2:
875 l, c, a, d, u = self.dirstate.changes(files, match)
875 l, c, a, d, u = self.dirstate.changes(files, match)
876
876
877 # are we comparing working dir against its parent?
877 # are we comparing working dir against its parent?
878 if not node1:
878 if not node1:
879 if l:
879 if l:
880 # do a full compare of any files that might have changed
880 # do a full compare of any files that might have changed
881 change = self.changelog.read(self.dirstate.parents()[0])
881 change = self.changelog.read(self.dirstate.parents()[0])
882 mf2 = mfmatches(change[0])
882 mf2 = mfmatches(change[0])
883 for f in l:
883 for f in l:
884 if fcmp(f, mf2):
884 if fcmp(f, mf2):
885 c.append(f)
885 c.append(f)
886
886
887 for l in c, a, d, u:
887 for l in c, a, d, u:
888 l.sort()
888 l.sort()
889
889
890 return (c, a, d, u)
890 return (c, a, d, u)
891
891
892 # are we comparing working dir against non-tip?
892 # are we comparing working dir against non-tip?
893 # generate a pseudo-manifest for the working dir
893 # generate a pseudo-manifest for the working dir
894 if not node2:
894 if not node2:
895 if not mf2:
895 if not mf2:
896 change = self.changelog.read(self.dirstate.parents()[0])
896 change = self.changelog.read(self.dirstate.parents()[0])
897 mf2 = mfmatches(change[0])
897 mf2 = mfmatches(change[0])
898 for f in a + c + l:
898 for f in a + c + l:
899 mf2[f] = ""
899 mf2[f] = ""
900 for f in d:
900 for f in d:
901 if f in mf2: del mf2[f]
901 if f in mf2: del mf2[f]
902 else:
902 else:
903 change = self.changelog.read(node2)
903 change = self.changelog.read(node2)
904 mf2 = mfmatches(change[0])
904 mf2 = mfmatches(change[0])
905
905
906 # flush lists from dirstate before comparing manifests
906 # flush lists from dirstate before comparing manifests
907 c, a = [], []
907 c, a = [], []
908
908
909 change = self.changelog.read(node1)
909 change = self.changelog.read(node1)
910 mf1 = mfmatches(change[0])
910 mf1 = mfmatches(change[0])
911
911
912 for fn in mf2:
912 for fn in mf2:
913 if mf1.has_key(fn):
913 if mf1.has_key(fn):
914 if mf1[fn] != mf2[fn]:
914 if mf1[fn] != mf2[fn]:
915 if mf2[fn] != "" or fcmp(fn, mf1):
915 if mf2[fn] != "" or fcmp(fn, mf1):
916 c.append(fn)
916 c.append(fn)
917 del mf1[fn]
917 del mf1[fn]
918 else:
918 else:
919 a.append(fn)
919 a.append(fn)
920
920
921 d = mf1.keys()
921 d = mf1.keys()
922
922
923 for l in c, a, d, u:
923 for l in c, a, d, u:
924 l.sort()
924 l.sort()
925
925
926 return (c, a, d, u)
926 return (c, a, d, u)
927
927
928 def add(self, list):
928 def add(self, list):
929 for f in list:
929 for f in list:
930 p = self.wjoin(f)
930 p = self.wjoin(f)
931 if not os.path.exists(p):
931 if not os.path.exists(p):
932 self.ui.warn("%s does not exist!\n" % f)
932 self.ui.warn("%s does not exist!\n" % f)
933 elif not os.path.isfile(p):
933 elif not os.path.isfile(p):
934 self.ui.warn("%s not added: only files supported currently\n" % f)
934 self.ui.warn("%s not added: only files supported currently\n" % f)
935 elif self.dirstate.state(f) in 'an':
935 elif self.dirstate.state(f) in 'an':
936 self.ui.warn("%s already tracked!\n" % f)
936 self.ui.warn("%s already tracked!\n" % f)
937 else:
937 else:
938 self.dirstate.update([f], "a")
938 self.dirstate.update([f], "a")
939
939
940 def forget(self, list):
940 def forget(self, list):
941 for f in list:
941 for f in list:
942 if self.dirstate.state(f) not in 'ai':
942 if self.dirstate.state(f) not in 'ai':
943 self.ui.warn("%s not added!\n" % f)
943 self.ui.warn("%s not added!\n" % f)
944 else:
944 else:
945 self.dirstate.forget([f])
945 self.dirstate.forget([f])
946
946
947 def remove(self, list):
947 def remove(self, list):
948 for f in list:
948 for f in list:
949 p = self.wjoin(f)
949 p = self.wjoin(f)
950 if os.path.exists(p):
950 if os.path.exists(p):
951 self.ui.warn("%s still exists!\n" % f)
951 self.ui.warn("%s still exists!\n" % f)
952 elif self.dirstate.state(f) == 'a':
952 elif self.dirstate.state(f) == 'a':
953 self.ui.warn("%s never committed!\n" % f)
953 self.ui.warn("%s never committed!\n" % f)
954 self.dirstate.forget([f])
954 self.dirstate.forget([f])
955 elif f not in self.dirstate:
955 elif f not in self.dirstate:
956 self.ui.warn("%s not tracked!\n" % f)
956 self.ui.warn("%s not tracked!\n" % f)
957 else:
957 else:
958 self.dirstate.update([f], "r")
958 self.dirstate.update([f], "r")
959
959
960 def copy(self, source, dest):
960 def copy(self, source, dest):
961 p = self.wjoin(dest)
961 p = self.wjoin(dest)
962 if not os.path.exists(dest):
962 if not os.path.exists(dest):
963 self.ui.warn("%s does not exist!\n" % dest)
963 self.ui.warn("%s does not exist!\n" % dest)
964 elif not os.path.isfile(dest):
964 elif not os.path.isfile(dest):
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
966 else:
966 else:
967 if self.dirstate.state(dest) == '?':
967 if self.dirstate.state(dest) == '?':
968 self.dirstate.update([dest], "a")
968 self.dirstate.update([dest], "a")
969 self.dirstate.copy(source, dest)
969 self.dirstate.copy(source, dest)
970
970
971 def heads(self):
971 def heads(self):
972 return self.changelog.heads()
972 return self.changelog.heads()
973
973
974 def branches(self, nodes):
974 def branches(self, nodes):
975 if not nodes: nodes = [self.changelog.tip()]
975 if not nodes: nodes = [self.changelog.tip()]
976 b = []
976 b = []
977 for n in nodes:
977 for n in nodes:
978 t = n
978 t = n
979 while n:
979 while n:
980 p = self.changelog.parents(n)
980 p = self.changelog.parents(n)
981 if p[1] != nullid or p[0] == nullid:
981 if p[1] != nullid or p[0] == nullid:
982 b.append((t, n, p[0], p[1]))
982 b.append((t, n, p[0], p[1]))
983 break
983 break
984 n = p[0]
984 n = p[0]
985 return b
985 return b
986
986
987 def between(self, pairs):
987 def between(self, pairs):
988 r = []
988 r = []
989
989
990 for top, bottom in pairs:
990 for top, bottom in pairs:
991 n, l, i = top, [], 0
991 n, l, i = top, [], 0
992 f = 1
992 f = 1
993
993
994 while n != bottom:
994 while n != bottom:
995 p = self.changelog.parents(n)[0]
995 p = self.changelog.parents(n)[0]
996 if i == f:
996 if i == f:
997 l.append(n)
997 l.append(n)
998 f = f * 2
998 f = f * 2
999 n = p
999 n = p
1000 i += 1
1000 i += 1
1001
1001
1002 r.append(l)
1002 r.append(l)
1003
1003
1004 return r
1004 return r
1005
1005
1006 def newer(self, nodes):
1006 def newer(self, nodes):
1007 m = {}
1007 m = {}
1008 nl = []
1008 nl = []
1009 pm = {}
1009 pm = {}
1010 cl = self.changelog
1010 cl = self.changelog
1011 t = l = cl.count()
1011 t = l = cl.count()
1012
1012
1013 # find the lowest numbered node
1013 # find the lowest numbered node
1014 for n in nodes:
1014 for n in nodes:
1015 l = min(l, cl.rev(n))
1015 l = min(l, cl.rev(n))
1016 m[n] = 1
1016 m[n] = 1
1017
1017
1018 for i in xrange(l, t):
1018 for i in xrange(l, t):
1019 n = cl.node(i)
1019 n = cl.node(i)
1020 if n in m: # explicitly listed
1020 if n in m: # explicitly listed
1021 pm[n] = 1
1021 pm[n] = 1
1022 nl.append(n)
1022 nl.append(n)
1023 continue
1023 continue
1024 for p in cl.parents(n):
1024 for p in cl.parents(n):
1025 if p in pm: # parent listed
1025 if p in pm: # parent listed
1026 pm[n] = 1
1026 pm[n] = 1
1027 nl.append(n)
1027 nl.append(n)
1028 break
1028 break
1029
1029
1030 return nl
1030 return nl
1031
1031
1032 def findincoming(self, remote, base={}):
1032 def findincoming(self, remote, base={}):
1033 m = self.changelog.nodemap
1033 m = self.changelog.nodemap
1034 search = []
1034 search = []
1035 fetch = []
1035 fetch = []
1036 seen = {}
1036 seen = {}
1037 seenbranch = {}
1037 seenbranch = {}
1038
1038
1039 # assume we're closer to the tip than the root
1039 # assume we're closer to the tip than the root
1040 # and start by examining the heads
1040 # and start by examining the heads
1041 self.ui.status("searching for changes\n")
1041 self.ui.status("searching for changes\n")
1042 heads = remote.heads()
1042 heads = remote.heads()
1043 unknown = []
1043 unknown = []
1044 for h in heads:
1044 for h in heads:
1045 if h not in m:
1045 if h not in m:
1046 unknown.append(h)
1046 unknown.append(h)
1047 else:
1047 else:
1048 base[h] = 1
1048 base[h] = 1
1049
1049
1050 if not unknown:
1050 if not unknown:
1051 return None
1051 return None
1052
1052
1053 rep = {}
1053 rep = {}
1054 reqcnt = 0
1054 reqcnt = 0
1055
1055
1056 # search through remote branches
1056 # search through remote branches
1057 # a 'branch' here is a linear segment of history, with four parts:
1057 # a 'branch' here is a linear segment of history, with four parts:
1058 # head, root, first parent, second parent
1058 # head, root, first parent, second parent
1059 # (a branch always has two parents (or none) by definition)
1059 # (a branch always has two parents (or none) by definition)
1060 unknown = remote.branches(unknown)
1060 unknown = remote.branches(unknown)
1061 while unknown:
1061 while unknown:
1062 r = []
1062 r = []
1063 while unknown:
1063 while unknown:
1064 n = unknown.pop(0)
1064 n = unknown.pop(0)
1065 if n[0] in seen:
1065 if n[0] in seen:
1066 continue
1066 continue
1067
1067
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1069 if n[0] == nullid:
1069 if n[0] == nullid:
1070 break
1070 break
1071 if n in seenbranch:
1071 if n in seenbranch:
1072 self.ui.debug("branch already found\n")
1072 self.ui.debug("branch already found\n")
1073 continue
1073 continue
1074 if n[1] and n[1] in m: # do we know the base?
1074 if n[1] and n[1] in m: # do we know the base?
1075 self.ui.debug("found incomplete branch %s:%s\n"
1075 self.ui.debug("found incomplete branch %s:%s\n"
1076 % (short(n[0]), short(n[1])))
1076 % (short(n[0]), short(n[1])))
1077 search.append(n) # schedule branch range for scanning
1077 search.append(n) # schedule branch range for scanning
1078 seenbranch[n] = 1
1078 seenbranch[n] = 1
1079 else:
1079 else:
1080 if n[1] not in seen and n[1] not in fetch:
1080 if n[1] not in seen and n[1] not in fetch:
1081 if n[2] in m and n[3] in m:
1081 if n[2] in m and n[3] in m:
1082 self.ui.debug("found new changeset %s\n" %
1082 self.ui.debug("found new changeset %s\n" %
1083 short(n[1]))
1083 short(n[1]))
1084 fetch.append(n[1]) # earliest unknown
1084 fetch.append(n[1]) # earliest unknown
1085 base[n[2]] = 1 # latest known
1085 base[n[2]] = 1 # latest known
1086 continue
1086 continue
1087
1087
1088 for a in n[2:4]:
1088 for a in n[2:4]:
1089 if a not in rep:
1089 if a not in rep:
1090 r.append(a)
1090 r.append(a)
1091 rep[a] = 1
1091 rep[a] = 1
1092
1092
1093 seen[n[0]] = 1
1093 seen[n[0]] = 1
1094
1094
1095 if r:
1095 if r:
1096 reqcnt += 1
1096 reqcnt += 1
1097 self.ui.debug("request %d: %s\n" %
1097 self.ui.debug("request %d: %s\n" %
1098 (reqcnt, " ".join(map(short, r))))
1098 (reqcnt, " ".join(map(short, r))))
1099 for p in range(0, len(r), 10):
1099 for p in range(0, len(r), 10):
1100 for b in remote.branches(r[p:p+10]):
1100 for b in remote.branches(r[p:p+10]):
1101 self.ui.debug("received %s:%s\n" %
1101 self.ui.debug("received %s:%s\n" %
1102 (short(b[0]), short(b[1])))
1102 (short(b[0]), short(b[1])))
1103 if b[0] not in m and b[0] not in seen:
1103 if b[0] not in m and b[0] not in seen:
1104 unknown.append(b)
1104 unknown.append(b)
1105
1105
1106 # do binary search on the branches we found
1106 # do binary search on the branches we found
1107 while search:
1107 while search:
1108 n = search.pop(0)
1108 n = search.pop(0)
1109 reqcnt += 1
1109 reqcnt += 1
1110 l = remote.between([(n[0], n[1])])[0]
1110 l = remote.between([(n[0], n[1])])[0]
1111 l.append(n[1])
1111 l.append(n[1])
1112 p = n[0]
1112 p = n[0]
1113 f = 1
1113 f = 1
1114 for i in l:
1114 for i in l:
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1116 if i in m:
1116 if i in m:
1117 if f <= 2:
1117 if f <= 2:
1118 self.ui.debug("found new branch changeset %s\n" %
1118 self.ui.debug("found new branch changeset %s\n" %
1119 short(p))
1119 short(p))
1120 fetch.append(p)
1120 fetch.append(p)
1121 base[i] = 1
1121 base[i] = 1
1122 else:
1122 else:
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1124 % (short(p), short(i)))
1124 % (short(p), short(i)))
1125 search.append((p, i))
1125 search.append((p, i))
1126 break
1126 break
1127 p, f = i, f * 2
1127 p, f = i, f * 2
1128
1128
1129 # sanity check our fetch list
1129 # sanity check our fetch list
1130 for f in fetch:
1130 for f in fetch:
1131 if f in m:
1131 if f in m:
1132 raise RepoError("already have changeset " + short(f[:4]))
1132 raise RepoError("already have changeset " + short(f[:4]))
1133
1133
1134 if base.keys() == [nullid]:
1134 if base.keys() == [nullid]:
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1136
1136
1137 self.ui.note("adding new changesets starting at " +
1137 self.ui.note("adding new changesets starting at " +
1138 " ".join([short(f) for f in fetch]) + "\n")
1138 " ".join([short(f) for f in fetch]) + "\n")
1139
1139
1140 self.ui.debug("%d total queries\n" % reqcnt)
1140 self.ui.debug("%d total queries\n" % reqcnt)
1141
1141
1142 return fetch
1142 return fetch
1143
1143
1144 def findoutgoing(self, remote):
1144 def findoutgoing(self, remote):
1145 base = {}
1145 base = {}
1146 self.findincoming(remote, base)
1146 self.findincoming(remote, base)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1148
1148
1149 # prune everything remote has from the tree
1149 # prune everything remote has from the tree
1150 del remain[nullid]
1150 del remain[nullid]
1151 remove = base.keys()
1151 remove = base.keys()
1152 while remove:
1152 while remove:
1153 n = remove.pop(0)
1153 n = remove.pop(0)
1154 if n in remain:
1154 if n in remain:
1155 del remain[n]
1155 del remain[n]
1156 for p in self.changelog.parents(n):
1156 for p in self.changelog.parents(n):
1157 remove.append(p)
1157 remove.append(p)
1158
1158
1159 # find every node whose parents have been pruned
1159 # find every node whose parents have been pruned
1160 subset = []
1160 subset = []
1161 for n in remain:
1161 for n in remain:
1162 p1, p2 = self.changelog.parents(n)
1162 p1, p2 = self.changelog.parents(n)
1163 if p1 not in remain and p2 not in remain:
1163 if p1 not in remain and p2 not in remain:
1164 subset.append(n)
1164 subset.append(n)
1165
1165
1166 # this is the set of all roots we have to push
1166 # this is the set of all roots we have to push
1167 return subset
1167 return subset
1168
1168
1169 def pull(self, remote):
1169 def pull(self, remote):
1170 lock = self.lock()
1170 lock = self.lock()
1171
1171
1172 # if we have an empty repo, fetch everything
1172 # if we have an empty repo, fetch everything
1173 if self.changelog.tip() == nullid:
1173 if self.changelog.tip() == nullid:
1174 self.ui.status("requesting all changes\n")
1174 self.ui.status("requesting all changes\n")
1175 fetch = [nullid]
1175 fetch = [nullid]
1176 else:
1176 else:
1177 fetch = self.findincoming(remote)
1177 fetch = self.findincoming(remote)
1178
1178
1179 if not fetch:
1179 if not fetch:
1180 self.ui.status("no changes found\n")
1180 self.ui.status("no changes found\n")
1181 return 1
1181 return 1
1182
1182
1183 cg = remote.changegroup(fetch)
1183 cg = remote.changegroup(fetch)
1184 return self.addchangegroup(cg)
1184 return self.addchangegroup(cg)
1185
1185
1186 def push(self, remote):
1186 def push(self, remote):
1187 lock = remote.lock()
1187 lock = remote.lock()
1188 update = self.findoutgoing(remote)
1188 update = self.findoutgoing(remote)
1189 if not update:
1189 if not update:
1190 self.ui.status("no changes found\n")
1190 self.ui.status("no changes found\n")
1191 return 1
1191 return 1
1192
1192
1193 cg = self.changegroup(update)
1193 cg = self.changegroup(update)
1194 return remote.addchangegroup(cg)
1194 return remote.addchangegroup(cg)
1195
1195
1196 def changegroup(self, basenodes):
1196 def changegroup(self, basenodes):
1197 class genread:
1197 class genread:
1198 def __init__(self, generator):
1198 def __init__(self, generator):
1199 self.g = generator
1199 self.g = generator
1200 self.buf = ""
1200 self.buf = ""
1201 def read(self, l):
1201 def read(self, l):
1202 while l > len(self.buf):
1202 while l > len(self.buf):
1203 try:
1203 try:
1204 self.buf += self.g.next()
1204 self.buf += self.g.next()
1205 except StopIteration:
1205 except StopIteration:
1206 break
1206 break
1207 d, self.buf = self.buf[:l], self.buf[l:]
1207 d, self.buf = self.buf[:l], self.buf[l:]
1208 return d
1208 return d
1209
1209
1210 def gengroup():
1210 def gengroup():
1211 nodes = self.newer(basenodes)
1211 nodes = self.newer(basenodes)
1212
1212
1213 # construct the link map
1213 # construct the link map
1214 linkmap = {}
1214 linkmap = {}
1215 for n in nodes:
1215 for n in nodes:
1216 linkmap[self.changelog.rev(n)] = n
1216 linkmap[self.changelog.rev(n)] = n
1217
1217
1218 # construct a list of all changed files
1218 # construct a list of all changed files
1219 changed = {}
1219 changed = {}
1220 for n in nodes:
1220 for n in nodes:
1221 c = self.changelog.read(n)
1221 c = self.changelog.read(n)
1222 for f in c[3]:
1222 for f in c[3]:
1223 changed[f] = 1
1223 changed[f] = 1
1224 changed = changed.keys()
1224 changed = changed.keys()
1225 changed.sort()
1225 changed.sort()
1226
1226
1227 # the changegroup is changesets + manifests + all file revs
1227 # the changegroup is changesets + manifests + all file revs
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1229
1229
1230 for y in self.changelog.group(linkmap): yield y
1230 for y in self.changelog.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1232 for f in changed:
1232 for f in changed:
1233 yield struct.pack(">l", len(f) + 4) + f
1233 yield struct.pack(">l", len(f) + 4) + f
1234 g = self.file(f).group(linkmap)
1234 g = self.file(f).group(linkmap)
1235 for y in g:
1235 for y in g:
1236 yield y
1236 yield y
1237
1237
1238 yield struct.pack(">l", 0)
1238 yield struct.pack(">l", 0)
1239
1239
1240 return genread(gengroup())
1240 return genread(gengroup())
1241
1241
1242 def addchangegroup(self, source):
1242 def addchangegroup(self, source):
1243
1243
1244 def getchunk():
1244 def getchunk():
1245 d = source.read(4)
1245 d = source.read(4)
1246 if not d: return ""
1246 if not d: return ""
1247 l = struct.unpack(">l", d)[0]
1247 l = struct.unpack(">l", d)[0]
1248 if l <= 4: return ""
1248 if l <= 4: return ""
1249 return source.read(l - 4)
1249 return source.read(l - 4)
1250
1250
1251 def getgroup():
1251 def getgroup():
1252 while 1:
1252 while 1:
1253 c = getchunk()
1253 c = getchunk()
1254 if not c: break
1254 if not c: break
1255 yield c
1255 yield c
1256
1256
1257 def csmap(x):
1257 def csmap(x):
1258 self.ui.debug("add changeset %s\n" % short(x))
1258 self.ui.debug("add changeset %s\n" % short(x))
1259 return self.changelog.count()
1259 return self.changelog.count()
1260
1260
1261 def revmap(x):
1261 def revmap(x):
1262 return self.changelog.rev(x)
1262 return self.changelog.rev(x)
1263
1263
1264 if not source: return
1264 if not source: return
1265 changesets = files = revisions = 0
1265 changesets = files = revisions = 0
1266
1266
1267 tr = self.transaction()
1267 tr = self.transaction()
1268
1268
1269 # pull off the changeset group
1269 # pull off the changeset group
1270 self.ui.status("adding changesets\n")
1270 self.ui.status("adding changesets\n")
1271 co = self.changelog.tip()
1271 co = self.changelog.tip()
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1274
1274
1275 # pull off the manifest group
1275 # pull off the manifest group
1276 self.ui.status("adding manifests\n")
1276 self.ui.status("adding manifests\n")
1277 mm = self.manifest.tip()
1277 mm = self.manifest.tip()
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1279
1279
1280 # process the files
1280 # process the files
1281 self.ui.status("adding file revisions\n")
1281 self.ui.status("adding file revisions\n")
1282 while 1:
1282 while 1:
1283 f = getchunk()
1283 f = getchunk()
1284 if not f: break
1284 if not f: break
1285 self.ui.debug("adding %s revisions\n" % f)
1285 self.ui.debug("adding %s revisions\n" % f)
1286 fl = self.file(f)
1286 fl = self.file(f)
1287 o = fl.count()
1287 o = fl.count()
1288 n = fl.addgroup(getgroup(), revmap, tr)
1288 n = fl.addgroup(getgroup(), revmap, tr)
1289 revisions += fl.count() - o
1289 revisions += fl.count() - o
1290 files += 1
1290 files += 1
1291
1291
1292 self.ui.status(("modified %d files, added %d changesets" +
1292 self.ui.status(("modified %d files, added %d changesets" +
1293 " and %d new revisions\n")
1293 " and %d new revisions\n")
1294 % (files, changesets, revisions))
1294 % (files, changesets, revisions))
1295
1295
1296 tr.close()
1296 tr.close()
1297 return
1297 return
1298
1298
1299 def update(self, node, allow=False, force=False, choose=None,
1299 def update(self, node, allow=False, force=False, choose=None,
1300 moddirstate=True):
1300 moddirstate=True):
1301 pl = self.dirstate.parents()
1301 pl = self.dirstate.parents()
1302 if not force and pl[1] != nullid:
1302 if not force and pl[1] != nullid:
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1304 return 1
1304 return 1
1305
1305
1306 p1, p2 = pl[0], node
1306 p1, p2 = pl[0], node
1307 pa = self.changelog.ancestor(p1, p2)
1307 pa = self.changelog.ancestor(p1, p2)
1308 m1n = self.changelog.read(p1)[0]
1308 m1n = self.changelog.read(p1)[0]
1309 m2n = self.changelog.read(p2)[0]
1309 m2n = self.changelog.read(p2)[0]
1310 man = self.manifest.ancestor(m1n, m2n)
1310 man = self.manifest.ancestor(m1n, m2n)
1311 m1 = self.manifest.read(m1n)
1311 m1 = self.manifest.read(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1313 m2 = self.manifest.read(m2n)
1313 m2 = self.manifest.read(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1315 ma = self.manifest.read(man)
1315 ma = self.manifest.read(man)
1316 mfa = self.manifest.readflags(man)
1316 mfa = self.manifest.readflags(man)
1317
1317
1318 (c, a, d, u) = self.changes()
1318 (c, a, d, u) = self.changes()
1319
1319
1320 # is this a jump, or a merge? i.e. is there a linear path
1320 # is this a jump, or a merge? i.e. is there a linear path
1321 # from p1 to p2?
1321 # from p1 to p2?
1322 linear_path = (pa == p1 or pa == p2)
1322 linear_path = (pa == p1 or pa == p2)
1323
1323
1324 # resolve the manifest to determine which files
1324 # resolve the manifest to determine which files
1325 # we care about merging
1325 # we care about merging
1326 self.ui.note("resolving manifests\n")
1326 self.ui.note("resolving manifests\n")
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1328 (force, allow, moddirstate, linear_path))
1328 (force, allow, moddirstate, linear_path))
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1330 (short(man), short(m1n), short(m2n)))
1330 (short(man), short(m1n), short(m2n)))
1331
1331
1332 merge = {}
1332 merge = {}
1333 get = {}
1333 get = {}
1334 remove = []
1334 remove = []
1335 mark = {}
1335 mark = {}
1336
1336
1337 # construct a working dir manifest
1337 # construct a working dir manifest
1338 mw = m1.copy()
1338 mw = m1.copy()
1339 mfw = mf1.copy()
1339 mfw = mf1.copy()
1340 umap = dict.fromkeys(u)
1340 umap = dict.fromkeys(u)
1341
1341
1342 for f in a + c + u:
1342 for f in a + c + u:
1343 mw[f] = ""
1343 mw[f] = ""
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1345
1345
1346 for f in d:
1346 for f in d:
1347 if f in mw: del mw[f]
1347 if f in mw: del mw[f]
1348
1348
1349 # If we're jumping between revisions (as opposed to merging),
1349 # If we're jumping between revisions (as opposed to merging),
1350 # and if neither the working directory nor the target rev has
1350 # and if neither the working directory nor the target rev has
1351 # the file, then we need to remove it from the dirstate, to
1351 # the file, then we need to remove it from the dirstate, to
1352 # prevent the dirstate from listing the file when it is no
1352 # prevent the dirstate from listing the file when it is no
1353 # longer in the manifest.
1353 # longer in the manifest.
1354 if moddirstate and linear_path and f not in m2:
1354 if moddirstate and linear_path and f not in m2:
1355 self.dirstate.forget((f,))
1355 self.dirstate.forget((f,))
1356
1356
1357 # Compare manifests
1357 # Compare manifests
1358 for f, n in mw.iteritems():
1358 for f, n in mw.iteritems():
1359 if choose and not choose(f): continue
1359 if choose and not choose(f): continue
1360 if f in m2:
1360 if f in m2:
1361 s = 0
1361 s = 0
1362
1362
1363 # is the wfile new since m1, and match m2?
1363 # is the wfile new since m1, and match m2?
1364 if f not in m1:
1364 if f not in m1:
1365 t1 = self.wfile(f).read()
1365 t1 = self.wfile(f).read()
1366 t2 = self.file(f).revision(m2[f])
1366 t2 = self.file(f).revision(m2[f])
1367 if cmp(t1, t2) == 0:
1367 if cmp(t1, t2) == 0:
1368 mark[f] = 1
1368 mark[f] = 1
1369 n = m2[f]
1369 n = m2[f]
1370 del t1, t2
1370 del t1, t2
1371
1371
1372 # are files different?
1372 # are files different?
1373 if n != m2[f]:
1373 if n != m2[f]:
1374 a = ma.get(f, nullid)
1374 a = ma.get(f, nullid)
1375 # are both different from the ancestor?
1375 # are both different from the ancestor?
1376 if n != a and m2[f] != a:
1376 if n != a and m2[f] != a:
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1378 # merge executable bits
1378 # merge executable bits
1379 # "if we changed or they changed, change in merge"
1379 # "if we changed or they changed, change in merge"
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1381 mode = ((a^b) | (a^c)) ^ a
1381 mode = ((a^b) | (a^c)) ^ a
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1383 s = 1
1383 s = 1
1384 # are we clobbering?
1384 # are we clobbering?
1385 # is remote's version newer?
1385 # is remote's version newer?
1386 # or are we going back in time?
1386 # or are we going back in time?
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1389 get[f] = m2[f]
1389 get[f] = m2[f]
1390 s = 1
1390 s = 1
1391 else:
1391 else:
1392 mark[f] = 1
1392 mark[f] = 1
1393 elif f in umap:
1393 elif f in umap:
1394 # this unknown file is the same as the checkout
1394 # this unknown file is the same as the checkout
1395 get[f] = m2[f]
1395 get[f] = m2[f]
1396
1396
1397 if not s and mfw[f] != mf2[f]:
1397 if not s and mfw[f] != mf2[f]:
1398 if force:
1398 if force:
1399 self.ui.debug(" updating permissions for %s\n" % f)
1399 self.ui.debug(" updating permissions for %s\n" % f)
1400 util.set_exec(self.wjoin(f), mf2[f])
1400 util.set_exec(self.wjoin(f), mf2[f])
1401 else:
1401 else:
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1403 mode = ((a^b) | (a^c)) ^ a
1403 mode = ((a^b) | (a^c)) ^ a
1404 if mode != b:
1404 if mode != b:
1405 self.ui.debug(" updating permissions for %s\n" % f)
1405 self.ui.debug(" updating permissions for %s\n" % f)
1406 util.set_exec(self.wjoin(f), mode)
1406 util.set_exec(self.wjoin(f), mode)
1407 mark[f] = 1
1407 mark[f] = 1
1408 del m2[f]
1408 del m2[f]
1409 elif f in ma:
1409 elif f in ma:
1410 if n != ma[f]:
1410 if n != ma[f]:
1411 r = "d"
1411 r = "d"
1412 if not force and (linear_path or allow):
1412 if not force and (linear_path or allow):
1413 r = self.ui.prompt(
1413 r = self.ui.prompt(
1414 (" local changed %s which remote deleted\n" % f) +
1414 (" local changed %s which remote deleted\n" % f) +
1415 "(k)eep or (d)elete?", "[kd]", "k")
1415 "(k)eep or (d)elete?", "[kd]", "k")
1416 if r == "d":
1416 if r == "d":
1417 remove.append(f)
1417 remove.append(f)
1418 else:
1418 else:
1419 self.ui.debug("other deleted %s\n" % f)
1419 self.ui.debug("other deleted %s\n" % f)
1420 remove.append(f) # other deleted it
1420 remove.append(f) # other deleted it
1421 else:
1421 else:
1422 if n == m1.get(f, nullid): # same as parent
1422 if n == m1.get(f, nullid): # same as parent
1423 if p2 == pa: # going backwards?
1423 if p2 == pa: # going backwards?
1424 self.ui.debug("remote deleted %s\n" % f)
1424 self.ui.debug("remote deleted %s\n" % f)
1425 remove.append(f)
1425 remove.append(f)
1426 else:
1426 else:
1427 self.ui.debug("local created %s, keeping\n" % f)
1427 self.ui.debug("local created %s, keeping\n" % f)
1428 else:
1428 else:
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1430
1430
1431 for f, n in m2.iteritems():
1431 for f, n in m2.iteritems():
1432 if choose and not choose(f): continue
1432 if choose and not choose(f): continue
1433 if f[0] == "/": continue
1433 if f[0] == "/": continue
1434 if f in ma and n != ma[f]:
1434 if f in ma and n != ma[f]:
1435 r = "k"
1435 r = "k"
1436 if not force and (linear_path or allow):
1436 if not force and (linear_path or allow):
1437 r = self.ui.prompt(
1437 r = self.ui.prompt(
1438 ("remote changed %s which local deleted\n" % f) +
1438 ("remote changed %s which local deleted\n" % f) +
1439 "(k)eep or (d)elete?", "[kd]", "k")
1439 "(k)eep or (d)elete?", "[kd]", "k")
1440 if r == "k": get[f] = n
1440 if r == "k": get[f] = n
1441 elif f not in ma:
1441 elif f not in ma:
1442 self.ui.debug("remote created %s\n" % f)
1442 self.ui.debug("remote created %s\n" % f)
1443 get[f] = n
1443 get[f] = n
1444 else:
1444 else:
1445 if force or p2 == pa: # going backwards?
1445 if force or p2 == pa: # going backwards?
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1447 get[f] = n
1447 get[f] = n
1448 else:
1448 else:
1449 self.ui.debug("local deleted %s\n" % f)
1449 self.ui.debug("local deleted %s\n" % f)
1450
1450
1451 del mw, m1, m2, ma
1451 del mw, m1, m2, ma
1452
1452
1453 if force:
1453 if force:
1454 for f in merge:
1454 for f in merge:
1455 get[f] = merge[f][1]
1455 get[f] = merge[f][1]
1456 merge = {}
1456 merge = {}
1457
1457
1458 if linear_path or force:
1458 if linear_path or force:
1459 # we don't need to do any magic, just jump to the new rev
1459 # we don't need to do any magic, just jump to the new rev
1460 mode = 'n'
1460 mode = 'n'
1461 p1, p2 = p2, nullid
1461 p1, p2 = p2, nullid
1462 else:
1462 else:
1463 if not allow:
1463 if not allow:
1464 self.ui.status("this update spans a branch" +
1464 self.ui.status("this update spans a branch" +
1465 " affecting the following files:\n")
1465 " affecting the following files:\n")
1466 fl = merge.keys() + get.keys()
1466 fl = merge.keys() + get.keys()
1467 fl.sort()
1467 fl.sort()
1468 for f in fl:
1468 for f in fl:
1469 cf = ""
1469 cf = ""
1470 if f in merge: cf = " (resolve)"
1470 if f in merge: cf = " (resolve)"
1471 self.ui.status(" %s%s\n" % (f, cf))
1471 self.ui.status(" %s%s\n" % (f, cf))
1472 self.ui.warn("aborting update spanning branches!\n")
1472 self.ui.warn("aborting update spanning branches!\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1474 return 1
1474 return 1
1475 # we have to remember what files we needed to get/change
1475 # we have to remember what files we needed to get/change
1476 # because any file that's different from either one of its
1476 # because any file that's different from either one of its
1477 # parents must be in the changeset
1477 # parents must be in the changeset
1478 mode = 'm'
1478 mode = 'm'
1479 if moddirstate:
1479 if moddirstate:
1480 self.dirstate.update(mark.keys(), "m")
1480 self.dirstate.update(mark.keys(), "m")
1481
1481
1482 if moddirstate:
1482 if moddirstate:
1483 self.dirstate.setparents(p1, p2)
1483 self.dirstate.setparents(p1, p2)
1484
1484
1485 # get the files we don't need to change
1485 # get the files we don't need to change
1486 files = get.keys()
1486 files = get.keys()
1487 files.sort()
1487 files.sort()
1488 for f in files:
1488 for f in files:
1489 if f[0] == "/": continue
1489 if f[0] == "/": continue
1490 self.ui.note("getting %s\n" % f)
1490 self.ui.note("getting %s\n" % f)
1491 t = self.file(f).read(get[f])
1491 t = self.file(f).read(get[f])
1492 try:
1492 try:
1493 self.wfile(f, "w").write(t)
1493 self.wfile(f, "w").write(t)
1494 except IOError:
1494 except IOError:
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1496 self.wfile(f, "w").write(t)
1496 self.wfile(f, "w").write(t)
1497 util.set_exec(self.wjoin(f), mf2[f])
1497 util.set_exec(self.wjoin(f), mf2[f])
1498 if moddirstate:
1498 if moddirstate:
1499 self.dirstate.update([f], mode)
1499 self.dirstate.update([f], mode)
1500
1500
1501 # merge the tricky bits
1501 # merge the tricky bits
1502 files = merge.keys()
1502 files = merge.keys()
1503 files.sort()
1503 files.sort()
1504 for f in files:
1504 for f in files:
1505 self.ui.status("merging %s\n" % f)
1505 self.ui.status("merging %s\n" % f)
1506 m, o, flag = merge[f]
1506 m, o, flag = merge[f]
1507 self.merge3(f, m, o)
1507 self.merge3(f, m, o)
1508 util.set_exec(self.wjoin(f), flag)
1508 util.set_exec(self.wjoin(f), flag)
1509 if moddirstate:
1509 if moddirstate:
1510 self.dirstate.update([f], 'm')
1510 self.dirstate.update([f], 'm')
1511
1511
1512 remove.sort()
1512 remove.sort()
1513 for f in remove:
1513 for f in remove:
1514 self.ui.note("removing %s\n" % f)
1514 self.ui.note("removing %s\n" % f)
1515 try:
1515 try:
1516 os.unlink(f)
1516 os.unlink(f)
1517 except OSError, inst:
1517 except OSError, inst:
1518 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1518 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1519 # try removing directories that might now be empty
1519 # try removing directories that might now be empty
1520 try: os.removedirs(os.path.dirname(f))
1520 try: os.removedirs(os.path.dirname(f))
1521 except: pass
1521 except: pass
1522 if moddirstate:
1522 if moddirstate:
1523 if mode == 'n':
1523 if mode == 'n':
1524 self.dirstate.forget(remove)
1524 self.dirstate.forget(remove)
1525 else:
1525 else:
1526 self.dirstate.update(remove, 'r')
1526 self.dirstate.update(remove, 'r')
1527
1527
1528 def merge3(self, fn, my, other):
1528 def merge3(self, fn, my, other):
1529 """perform a 3-way merge in the working directory"""
1529 """perform a 3-way merge in the working directory"""
1530
1530
1531 def temp(prefix, node):
1531 def temp(prefix, node):
1532 pre = "%s~%s." % (os.path.basename(fn), prefix)
1532 pre = "%s~%s." % (os.path.basename(fn), prefix)
1533 (fd, name) = tempfile.mkstemp("", pre)
1533 (fd, name) = tempfile.mkstemp("", pre)
1534 f = os.fdopen(fd, "wb")
1534 f = os.fdopen(fd, "wb")
1535 f.write(fl.revision(node))
1535 f.write(fl.revision(node))
1536 f.close()
1536 f.close()
1537 return name
1537 return name
1538
1538
1539 fl = self.file(fn)
1539 fl = self.file(fn)
1540 base = fl.ancestor(my, other)
1540 base = fl.ancestor(my, other)
1541 a = self.wjoin(fn)
1541 a = self.wjoin(fn)
1542 b = temp("base", base)
1542 b = temp("base", base)
1543 c = temp("other", other)
1543 c = temp("other", other)
1544
1544
1545 self.ui.note("resolving %s\n" % fn)
1545 self.ui.note("resolving %s\n" % fn)
1546 self.ui.debug("file %s: other %s ancestor %s\n" %
1546 self.ui.debug("file %s: other %s ancestor %s\n" %
1547 (fn, short(other), short(base)))
1547 (fn, short(other), short(base)))
1548
1548
1549 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1549 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1550 or "hgmerge")
1550 or "hgmerge")
1551 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1551 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1552 if r:
1552 if r:
1553 self.ui.warn("merging %s failed!\n" % fn)
1553 self.ui.warn("merging %s failed!\n" % fn)
1554
1554
1555 os.unlink(b)
1555 os.unlink(b)
1556 os.unlink(c)
1556 os.unlink(c)
1557
1557
1558 def verify(self):
1558 def verify(self):
1559 filelinkrevs = {}
1559 filelinkrevs = {}
1560 filenodes = {}
1560 filenodes = {}
1561 changesets = revisions = files = 0
1561 changesets = revisions = files = 0
1562 errors = 0
1562 errors = 0
1563
1563
1564 seen = {}
1564 seen = {}
1565 self.ui.status("checking changesets\n")
1565 self.ui.status("checking changesets\n")
1566 for i in range(self.changelog.count()):
1566 for i in range(self.changelog.count()):
1567 changesets += 1
1567 changesets += 1
1568 n = self.changelog.node(i)
1568 n = self.changelog.node(i)
1569 if n in seen:
1569 if n in seen:
1570 self.ui.warn("duplicate changeset at revision %d\n" % i)
1570 self.ui.warn("duplicate changeset at revision %d\n" % i)
1571 errors += 1
1571 errors += 1
1572 seen[n] = 1
1572 seen[n] = 1
1573
1573
1574 for p in self.changelog.parents(n):
1574 for p in self.changelog.parents(n):
1575 if p not in self.changelog.nodemap:
1575 if p not in self.changelog.nodemap:
1576 self.ui.warn("changeset %s has unknown parent %s\n" %
1576 self.ui.warn("changeset %s has unknown parent %s\n" %
1577 (short(n), short(p)))
1577 (short(n), short(p)))
1578 errors += 1
1578 errors += 1
1579 try:
1579 try:
1580 changes = self.changelog.read(n)
1580 changes = self.changelog.read(n)
1581 except Exception, inst:
1581 except Exception, inst:
1582 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1582 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1583 errors += 1
1583 errors += 1
1584
1584
1585 for f in changes[3]:
1585 for f in changes[3]:
1586 filelinkrevs.setdefault(f, []).append(i)
1586 filelinkrevs.setdefault(f, []).append(i)
1587
1587
1588 seen = {}
1588 seen = {}
1589 self.ui.status("checking manifests\n")
1589 self.ui.status("checking manifests\n")
1590 for i in range(self.manifest.count()):
1590 for i in range(self.manifest.count()):
1591 n = self.manifest.node(i)
1591 n = self.manifest.node(i)
1592 if n in seen:
1592 if n in seen:
1593 self.ui.warn("duplicate manifest at revision %d\n" % i)
1593 self.ui.warn("duplicate manifest at revision %d\n" % i)
1594 errors += 1
1594 errors += 1
1595 seen[n] = 1
1595 seen[n] = 1
1596
1596
1597 for p in self.manifest.parents(n):
1597 for p in self.manifest.parents(n):
1598 if p not in self.manifest.nodemap:
1598 if p not in self.manifest.nodemap:
1599 self.ui.warn("manifest %s has unknown parent %s\n" %
1599 self.ui.warn("manifest %s has unknown parent %s\n" %
1600 (short(n), short(p)))
1600 (short(n), short(p)))
1601 errors += 1
1601 errors += 1
1602
1602
1603 try:
1603 try:
1604 delta = mdiff.patchtext(self.manifest.delta(n))
1604 delta = mdiff.patchtext(self.manifest.delta(n))
1605 except KeyboardInterrupt:
1605 except KeyboardInterrupt:
1606 self.ui.warn("aborted")
1606 self.ui.warn("aborted")
1607 sys.exit(0)
1607 sys.exit(0)
1608 except Exception, inst:
1608 except Exception, inst:
1609 self.ui.warn("unpacking manifest %s: %s\n"
1609 self.ui.warn("unpacking manifest %s: %s\n"
1610 % (short(n), inst))
1610 % (short(n), inst))
1611 errors += 1
1611 errors += 1
1612
1612
1613 ff = [ l.split('\0') for l in delta.splitlines() ]
1613 ff = [ l.split('\0') for l in delta.splitlines() ]
1614 for f, fn in ff:
1614 for f, fn in ff:
1615 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1615 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1616
1616
1617 self.ui.status("crosschecking files in changesets and manifests\n")
1617 self.ui.status("crosschecking files in changesets and manifests\n")
1618 for f in filenodes:
1618 for f in filenodes:
1619 if f not in filelinkrevs:
1619 if f not in filelinkrevs:
1620 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1620 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1621 errors += 1
1621 errors += 1
1622
1622
1623 for f in filelinkrevs:
1623 for f in filelinkrevs:
1624 if f not in filenodes:
1624 if f not in filenodes:
1625 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1625 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1626 errors += 1
1626 errors += 1
1627
1627
1628 self.ui.status("checking files\n")
1628 self.ui.status("checking files\n")
1629 ff = filenodes.keys()
1629 ff = filenodes.keys()
1630 ff.sort()
1630 ff.sort()
1631 for f in ff:
1631 for f in ff:
1632 if f == "/dev/null": continue
1632 if f == "/dev/null": continue
1633 files += 1
1633 files += 1
1634 fl = self.file(f)
1634 fl = self.file(f)
1635 nodes = { nullid: 1 }
1635 nodes = { nullid: 1 }
1636 seen = {}
1636 seen = {}
1637 for i in range(fl.count()):
1637 for i in range(fl.count()):
1638 revisions += 1
1638 revisions += 1
1639 n = fl.node(i)
1639 n = fl.node(i)
1640
1640
1641 if n in seen:
1641 if n in seen:
1642 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1642 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1643 errors += 1
1643 errors += 1
1644
1644
1645 if n not in filenodes[f]:
1645 if n not in filenodes[f]:
1646 self.ui.warn("%s: %d:%s not in manifests\n"
1646 self.ui.warn("%s: %d:%s not in manifests\n"
1647 % (f, i, short(n)))
1647 % (f, i, short(n)))
1648 errors += 1
1648 errors += 1
1649 else:
1649 else:
1650 del filenodes[f][n]
1650 del filenodes[f][n]
1651
1651
1652 flr = fl.linkrev(n)
1652 flr = fl.linkrev(n)
1653 if flr not in filelinkrevs[f]:
1653 if flr not in filelinkrevs[f]:
1654 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1654 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1655 % (f, short(n), fl.linkrev(n)))
1655 % (f, short(n), fl.linkrev(n)))
1656 errors += 1
1656 errors += 1
1657 else:
1657 else:
1658 filelinkrevs[f].remove(flr)
1658 filelinkrevs[f].remove(flr)
1659
1659
1660 # verify contents
1660 # verify contents
1661 try:
1661 try:
1662 t = fl.read(n)
1662 t = fl.read(n)
1663 except Exception, inst:
1663 except Exception, inst:
1664 self.ui.warn("unpacking file %s %s: %s\n"
1664 self.ui.warn("unpacking file %s %s: %s\n"
1665 % (f, short(n), inst))
1665 % (f, short(n), inst))
1666 errors += 1
1666 errors += 1
1667
1667
1668 # verify parents
1668 # verify parents
1669 (p1, p2) = fl.parents(n)
1669 (p1, p2) = fl.parents(n)
1670 if p1 not in nodes:
1670 if p1 not in nodes:
1671 self.ui.warn("file %s:%s unknown parent 1 %s" %
1671 self.ui.warn("file %s:%s unknown parent 1 %s" %
1672 (f, short(n), short(p1)))
1672 (f, short(n), short(p1)))
1673 errors += 1
1673 errors += 1
1674 if p2 not in nodes:
1674 if p2 not in nodes:
1675 self.ui.warn("file %s:%s unknown parent 2 %s" %
1675 self.ui.warn("file %s:%s unknown parent 2 %s" %
1676 (f, short(n), short(p1)))
1676 (f, short(n), short(p1)))
1677 errors += 1
1677 errors += 1
1678 nodes[n] = 1
1678 nodes[n] = 1
1679
1679
1680 # cross-check
1680 # cross-check
1681 for node in filenodes[f]:
1681 for node in filenodes[f]:
1682 self.ui.warn("node %s in manifests not in %s\n"
1682 self.ui.warn("node %s in manifests not in %s\n"
1683 % (hex(node), f))
1683 % (hex(node), f))
1684 errors += 1
1684 errors += 1
1685
1685
1686 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1686 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1687 (files, changesets, revisions))
1687 (files, changesets, revisions))
1688
1688
1689 if errors:
1689 if errors:
1690 self.ui.warn("%d integrity errors encountered!\n" % errors)
1690 self.ui.warn("%d integrity errors encountered!\n" % errors)
1691 return 1
1691 return 1
1692
1692
1693 class httprepository:
1693 class httprepository:
1694 def __init__(self, ui, path):
1694 def __init__(self, ui, path):
1695 self.url = path
1695 self.url = path
1696 self.ui = ui
1696 self.ui = ui
1697 no_list = [ "localhost", "127.0.0.1" ]
1697 no_list = [ "localhost", "127.0.0.1" ]
1698 host = ui.config("http_proxy", "host")
1698 host = ui.config("http_proxy", "host")
1699 if host is None:
1699 if host is None:
1700 host = os.environ.get("http_proxy")
1700 host = os.environ.get("http_proxy")
1701 if host and host.startswith('http://'):
1701 if host and host.startswith('http://'):
1702 host = host[7:]
1702 host = host[7:]
1703 user = ui.config("http_proxy", "user")
1703 user = ui.config("http_proxy", "user")
1704 passwd = ui.config("http_proxy", "passwd")
1704 passwd = ui.config("http_proxy", "passwd")
1705 no = ui.config("http_proxy", "no")
1705 no = ui.config("http_proxy", "no")
1706 if no is None:
1706 if no is None:
1707 no = os.environ.get("no_proxy")
1707 no = os.environ.get("no_proxy")
1708 if no:
1708 if no:
1709 no_list = no_list + no.split(",")
1709 no_list = no_list + no.split(",")
1710
1710
1711 no_proxy = 0
1711 no_proxy = 0
1712 for h in no_list:
1712 for h in no_list:
1713 if (path.startswith("http://" + h + "/") or
1713 if (path.startswith("http://" + h + "/") or
1714 path.startswith("http://" + h + ":") or
1714 path.startswith("http://" + h + ":") or
1715 path == "http://" + h):
1715 path == "http://" + h):
1716 no_proxy = 1
1716 no_proxy = 1
1717
1717
1718 # Note: urllib2 takes proxy values from the environment and those will
1718 # Note: urllib2 takes proxy values from the environment and those will
1719 # take precedence
1719 # take precedence
1720 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1720 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1721 if os.environ.has_key(env):
1721 if os.environ.has_key(env):
1722 del os.environ[env]
1722 del os.environ[env]
1723
1723
1724 proxy_handler = urllib2.BaseHandler()
1724 proxy_handler = urllib2.BaseHandler()
1725 if host and not no_proxy:
1725 if host and not no_proxy:
1726 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1726 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1727
1727
1728 authinfo = None
1728 authinfo = None
1729 if user and passwd:
1729 if user and passwd:
1730 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1730 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1731 passmgr.add_password(None, host, user, passwd)
1731 passmgr.add_password(None, host, user, passwd)
1732 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1732 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1733
1733
1734 opener = urllib2.build_opener(proxy_handler, authinfo)
1734 opener = urllib2.build_opener(proxy_handler, authinfo)
1735 urllib2.install_opener(opener)
1735 urllib2.install_opener(opener)
1736
1736
1737 def dev(self):
1737 def dev(self):
1738 return -1
1738 return -1
1739
1739
1740 def do_cmd(self, cmd, **args):
1740 def do_cmd(self, cmd, **args):
1741 self.ui.debug("sending %s command\n" % cmd)
1741 self.ui.debug("sending %s command\n" % cmd)
1742 q = {"cmd": cmd}
1742 q = {"cmd": cmd}
1743 q.update(args)
1743 q.update(args)
1744 qs = urllib.urlencode(q)
1744 qs = urllib.urlencode(q)
1745 cu = "%s?%s" % (self.url, qs)
1745 cu = "%s?%s" % (self.url, qs)
1746 return urllib2.urlopen(cu)
1746 resp = urllib2.urlopen(cu)
1747
1748 if not resp.headers['content-type'].startswith('application/hg'):
1749 raise RepoError("'%s' does not appear to be an hg repository"
1750 % self.url)
1751
1752 return resp
1747
1753
1748 def heads(self):
1754 def heads(self):
1749 d = self.do_cmd("heads").read()
1755 d = self.do_cmd("heads").read()
1750 try:
1756 try:
1751 return map(bin, d[:-1].split(" "))
1757 return map(bin, d[:-1].split(" "))
1752 except:
1758 except:
1753 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1759 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1754 raise
1760 raise
1755
1761
1756 def verify_hg_repo(self, resp):
1757 if (resp.headers['content-type'] == 'application/hg-0.1'):
1758 pass
1759 else:
1760 msg = """'%s' does not appear to be a valid hg repository -
1761 missing a 'Content-type: application/hg-0.1' HTTP header""" % (self.url,)
1762 raise RepoError(msg)
1763
1764 def branches(self, nodes):
1762 def branches(self, nodes):
1765 n = " ".join(map(hex, nodes))
1763 n = " ".join(map(hex, nodes))
1766 resp = self.do_cmd("branches", nodes=n);
1764 d = self.do_cmd("branches", nodes=n).read()
1767 self.verify_hg_repo(resp);
1768 try:
1765 try:
1769 d = resp.read()
1770 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1766 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1771 return br
1767 return br
1772 except:
1768 except:
1773 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1769 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1774 raise
1770 raise
1775
1771
1776 def between(self, pairs):
1772 def between(self, pairs):
1777 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1773 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1778 resp = self.do_cmd("between", pairs=n)
1774 d = self.do_cmd("between", pairs=n).read()
1779 self.verify_hg_repo(resp)
1780 try:
1775 try:
1781 d = resp.read()
1782 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1776 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1783 return p
1777 return p
1784 except:
1778 except:
1785 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1779 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1786 raise
1780 raise
1787
1781
1788 def changegroup(self, nodes):
1782 def changegroup(self, nodes):
1789 n = " ".join(map(hex, nodes))
1783 n = " ".join(map(hex, nodes))
1790 resp = self.do_cmd("changegroup", roots=n)
1784 f = self.do_cmd("changegroup", roots=n)
1791 self.verify_hg_repo(resp)
1792 bytes = 0
1785 bytes = 0
1793
1786
1794 class zread:
1787 class zread:
1795 def __init__(self, f):
1788 def __init__(self, f):
1796 self.zd = zlib.decompressobj()
1789 self.zd = zlib.decompressobj()
1797 self.f = f
1790 self.f = f
1798 self.buf = ""
1791 self.buf = ""
1799 def read(self, l):
1792 def read(self, l):
1800 while l > len(self.buf):
1793 while l > len(self.buf):
1801 r = self.f.read(4096)
1794 r = self.f.read(4096)
1802 if r:
1795 if r:
1803 self.buf += self.zd.decompress(r)
1796 self.buf += self.zd.decompress(r)
1804 else:
1797 else:
1805 self.buf += self.zd.flush()
1798 self.buf += self.zd.flush()
1806 break
1799 break
1807 d, self.buf = self.buf[:l], self.buf[l:]
1800 d, self.buf = self.buf[:l], self.buf[l:]
1808 return d
1801 return d
1809
1802
1810 return zread(resp)
1803 return zread(f)
1811
1804
1812 class remotelock:
1805 class remotelock:
1813 def __init__(self, repo):
1806 def __init__(self, repo):
1814 self.repo = repo
1807 self.repo = repo
1815 def release(self):
1808 def release(self):
1816 self.repo.unlock()
1809 self.repo.unlock()
1817 self.repo = None
1810 self.repo = None
1818 def __del__(self):
1811 def __del__(self):
1819 if self.repo:
1812 if self.repo:
1820 self.release()
1813 self.release()
1821
1814
1822 class sshrepository:
1815 class sshrepository:
1823 def __init__(self, ui, path):
1816 def __init__(self, ui, path):
1824 self.url = path
1817 self.url = path
1825 self.ui = ui
1818 self.ui = ui
1826
1819
1827 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1820 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1828 if not m:
1821 if not m:
1829 raise RepoError("couldn't parse destination %s\n" % path)
1822 raise RepoError("couldn't parse destination %s\n" % path)
1830
1823
1831 self.user = m.group(2)
1824 self.user = m.group(2)
1832 self.host = m.group(3)
1825 self.host = m.group(3)
1833 self.port = m.group(5)
1826 self.port = m.group(5)
1834 self.path = m.group(7)
1827 self.path = m.group(7)
1835
1828
1836 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1829 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1837 args = self.port and ("%s -p %s") % (args, self.port) or args
1830 args = self.port and ("%s -p %s") % (args, self.port) or args
1838 path = self.path or ""
1831 path = self.path or ""
1839
1832
1840 cmd = "ssh %s 'hg -R %s serve --stdio'"
1833 cmd = "ssh %s 'hg -R %s serve --stdio'"
1841 cmd = cmd % (args, path)
1834 cmd = cmd % (args, path)
1842
1835
1843 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1836 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1844
1837
1845 def readerr(self):
1838 def readerr(self):
1846 while 1:
1839 while 1:
1847 r,w,x = select.select([self.pipee], [], [], 0)
1840 r,w,x = select.select([self.pipee], [], [], 0)
1848 if not r: break
1841 if not r: break
1849 l = self.pipee.readline()
1842 l = self.pipee.readline()
1850 if not l: break
1843 if not l: break
1851 self.ui.status("remote: ", l)
1844 self.ui.status("remote: ", l)
1852
1845
1853 def __del__(self):
1846 def __del__(self):
1854 self.pipeo.close()
1847 self.pipeo.close()
1855 self.pipei.close()
1848 self.pipei.close()
1856 for l in self.pipee:
1849 for l in self.pipee:
1857 self.ui.status("remote: ", l)
1850 self.ui.status("remote: ", l)
1858 self.pipee.close()
1851 self.pipee.close()
1859
1852
1860 def dev(self):
1853 def dev(self):
1861 return -1
1854 return -1
1862
1855
1863 def do_cmd(self, cmd, **args):
1856 def do_cmd(self, cmd, **args):
1864 self.ui.debug("sending %s command\n" % cmd)
1857 self.ui.debug("sending %s command\n" % cmd)
1865 self.pipeo.write("%s\n" % cmd)
1858 self.pipeo.write("%s\n" % cmd)
1866 for k, v in args.items():
1859 for k, v in args.items():
1867 self.pipeo.write("%s %d\n" % (k, len(v)))
1860 self.pipeo.write("%s %d\n" % (k, len(v)))
1868 self.pipeo.write(v)
1861 self.pipeo.write(v)
1869 self.pipeo.flush()
1862 self.pipeo.flush()
1870
1863
1871 return self.pipei
1864 return self.pipei
1872
1865
1873 def call(self, cmd, **args):
1866 def call(self, cmd, **args):
1874 r = self.do_cmd(cmd, **args)
1867 r = self.do_cmd(cmd, **args)
1875 l = r.readline()
1868 l = r.readline()
1876 self.readerr()
1869 self.readerr()
1877 try:
1870 try:
1878 l = int(l)
1871 l = int(l)
1879 except:
1872 except:
1880 raise RepoError("unexpected response '%s'" % l)
1873 raise RepoError("unexpected response '%s'" % l)
1881 return r.read(l)
1874 return r.read(l)
1882
1875
1883 def lock(self):
1876 def lock(self):
1884 self.call("lock")
1877 self.call("lock")
1885 return remotelock(self)
1878 return remotelock(self)
1886
1879
1887 def unlock(self):
1880 def unlock(self):
1888 self.call("unlock")
1881 self.call("unlock")
1889
1882
1890 def heads(self):
1883 def heads(self):
1891 d = self.call("heads")
1884 d = self.call("heads")
1892 try:
1885 try:
1893 return map(bin, d[:-1].split(" "))
1886 return map(bin, d[:-1].split(" "))
1894 except:
1887 except:
1895 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1888 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1896
1889
1897 def branches(self, nodes):
1890 def branches(self, nodes):
1898 n = " ".join(map(hex, nodes))
1891 n = " ".join(map(hex, nodes))
1899 d = self.call("branches", nodes=n)
1892 d = self.call("branches", nodes=n)
1900 try:
1893 try:
1901 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1894 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1902 return br
1895 return br
1903 except:
1896 except:
1904 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1897 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1905
1898
1906 def between(self, pairs):
1899 def between(self, pairs):
1907 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1900 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1908 d = self.call("between", pairs=n)
1901 d = self.call("between", pairs=n)
1909 try:
1902 try:
1910 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1903 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1911 return p
1904 return p
1912 except:
1905 except:
1913 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1906 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1914
1907
1915 def changegroup(self, nodes):
1908 def changegroup(self, nodes):
1916 n = " ".join(map(hex, nodes))
1909 n = " ".join(map(hex, nodes))
1917 f = self.do_cmd("changegroup", roots=n)
1910 f = self.do_cmd("changegroup", roots=n)
1918 return self.pipei
1911 return self.pipei
1919
1912
1920 def addchangegroup(self, cg):
1913 def addchangegroup(self, cg):
1921 d = self.call("addchangegroup")
1914 d = self.call("addchangegroup")
1922 if d:
1915 if d:
1923 raise RepoError("push refused: %s", d)
1916 raise RepoError("push refused: %s", d)
1924
1917
1925 while 1:
1918 while 1:
1926 d = cg.read(4096)
1919 d = cg.read(4096)
1927 if not d: break
1920 if not d: break
1928 self.pipeo.write(d)
1921 self.pipeo.write(d)
1929 self.readerr()
1922 self.readerr()
1930
1923
1931 self.pipeo.flush()
1924 self.pipeo.flush()
1932
1925
1933 self.readerr()
1926 self.readerr()
1934 l = int(self.pipei.readline())
1927 l = int(self.pipei.readline())
1935 return self.pipei.read(l) != ""
1928 return self.pipei.read(l) != ""
1936
1929
1937 def repository(ui, path=None, create=0):
1930 def repository(ui, path=None, create=0):
1938 if path:
1931 if path:
1939 if path.startswith("http://"):
1932 if path.startswith("http://"):
1940 return httprepository(ui, path)
1933 return httprepository(ui, path)
1941 if path.startswith("hg://"):
1934 if path.startswith("hg://"):
1942 return httprepository(ui, path.replace("hg://", "http://"))
1935 return httprepository(ui, path.replace("hg://", "http://"))
1943 if path.startswith("old-http://"):
1936 if path.startswith("old-http://"):
1944 return localrepository(ui, path.replace("old-http://", "http://"))
1937 return localrepository(ui, path.replace("old-http://", "http://"))
1945 if path.startswith("ssh://"):
1938 if path.startswith("ssh://"):
1946 return sshrepository(ui, path)
1939 return sshrepository(ui, path)
1947
1940
1948 return localrepository(ui, path, create)
1941 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now