##// END OF EJS Templates
dirstate.forget() takes a list...
Matt Mackall -
r657:22bc6fb9 default
parent child Browse files
Show More
@@ -1,1875 +1,1875 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if t[:2] != '\1\n':
24 if t[:2] != '\1\n':
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if t[:2] != '\1\n':
31 if t[:2] != '\1\n':
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text[:2] == '\1\n':
41 if meta or text[:2] == '\1\n':
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
129 # directly generate the mdiff delta from the data collected during
129 # directly generate the mdiff delta from the data collected during
130 # the bisect loop below
130 # the bisect loop below
131 def gendelta(delta):
131 def gendelta(delta):
132 i = 0
132 i = 0
133 result = []
133 result = []
134 while i < len(delta):
134 while i < len(delta):
135 start = delta[i][2]
135 start = delta[i][2]
136 end = delta[i][3]
136 end = delta[i][3]
137 l = delta[i][4]
137 l = delta[i][4]
138 if l == None:
138 if l == None:
139 l = ""
139 l = ""
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
141 if delta[i+1][3] > end:
141 if delta[i+1][3] > end:
142 end = delta[i+1][3]
142 end = delta[i+1][3]
143 if delta[i+1][4]:
143 if delta[i+1][4]:
144 l += delta[i+1][4]
144 l += delta[i+1][4]
145 i += 1
145 i += 1
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
147 i += 1
147 i += 1
148 return result
148 return result
149
149
150 # apply the changes collected during the bisect loop to our addlist
150 # apply the changes collected during the bisect loop to our addlist
151 def addlistdelta(addlist, delta):
151 def addlistdelta(addlist, delta):
152 # apply the deltas to the addlist. start from the bottom up
152 # apply the deltas to the addlist. start from the bottom up
153 # so changes to the offsets don't mess things up.
153 # so changes to the offsets don't mess things up.
154 i = len(delta)
154 i = len(delta)
155 while i > 0:
155 while i > 0:
156 i -= 1
156 i -= 1
157 start = delta[i][0]
157 start = delta[i][0]
158 end = delta[i][1]
158 end = delta[i][1]
159 if delta[i][4]:
159 if delta[i][4]:
160 addlist[start:end] = [delta[i][4]]
160 addlist[start:end] = [delta[i][4]]
161 else:
161 else:
162 del addlist[start:end]
162 del addlist[start:end]
163 return addlist
163 return addlist
164
164
165 # calculate the byte offset of the start of each line in the
165 # calculate the byte offset of the start of each line in the
166 # manifest
166 # manifest
167 def calcoffsets(addlist):
167 def calcoffsets(addlist):
168 offsets = [0] * (len(addlist) + 1)
168 offsets = [0] * (len(addlist) + 1)
169 offset = 0
169 offset = 0
170 i = 0
170 i = 0
171 while i < len(addlist):
171 while i < len(addlist):
172 offsets[i] = offset
172 offsets[i] = offset
173 offset += len(addlist[i])
173 offset += len(addlist[i])
174 i += 1
174 i += 1
175 offsets[i] = offset
175 offsets[i] = offset
176 return offsets
176 return offsets
177
177
178 # if we're using the listcache, make sure it is valid and
178 # if we're using the listcache, make sure it is valid and
179 # parented by the same node we're diffing against
179 # parented by the same node we're diffing against
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
181 files = map.keys()
181 files = map.keys()
182 files.sort()
182 files.sort()
183
183
184 self.addlist = ["%s\000%s%s\n" %
184 self.addlist = ["%s\000%s%s\n" %
185 (f, hex(map[f]), flags[f] and "x" or '')
185 (f, hex(map[f]), flags[f] and "x" or '')
186 for f in files]
186 for f in files]
187 cachedelta = None
187 cachedelta = None
188 else:
188 else:
189 addlist = self.listcache[1]
189 addlist = self.listcache[1]
190
190
191 # find the starting offset for each line in the add list
191 # find the starting offset for each line in the add list
192 offsets = calcoffsets(addlist)
192 offsets = calcoffsets(addlist)
193
193
194 # combine the changed lists into one list for sorting
194 # combine the changed lists into one list for sorting
195 work = [[x, 0] for x in changed[0]]
195 work = [[x, 0] for x in changed[0]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
197 work.sort()
197 work.sort()
198
198
199 delta = []
199 delta = []
200 bs = 0
200 bs = 0
201
201
202 for w in work:
202 for w in work:
203 f = w[0]
203 f = w[0]
204 # bs will either be the index of the item or the insertion point
204 # bs will either be the index of the item or the insertion point
205 bs = bisect.bisect(addlist, f, bs)
205 bs = bisect.bisect(addlist, f, bs)
206 if bs < len(addlist):
206 if bs < len(addlist):
207 fn = addlist[bs][:addlist[bs].index('\0')]
207 fn = addlist[bs][:addlist[bs].index('\0')]
208 else:
208 else:
209 fn = None
209 fn = None
210 if w[1] == 0:
210 if w[1] == 0:
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
212 else:
212 else:
213 l = None
213 l = None
214 start = bs
214 start = bs
215 if fn != f:
215 if fn != f:
216 # item not found, insert a new one
216 # item not found, insert a new one
217 end = bs
217 end = bs
218 if w[1] == 1:
218 if w[1] == 1:
219 sys.stderr.write("failed to remove %s from manifest" % f)
219 sys.stderr.write("failed to remove %s from manifest" % f)
220 sys.exit(1)
220 sys.exit(1)
221 else:
221 else:
222 # item is found, replace/delete the existing line
222 # item is found, replace/delete the existing line
223 end = bs + 1
223 end = bs + 1
224 delta.append([start, end, offsets[start], offsets[end], l])
224 delta.append([start, end, offsets[start], offsets[end], l])
225
225
226 self.addlist = addlistdelta(addlist, delta)
226 self.addlist = addlistdelta(addlist, delta)
227 if self.mapcache[0] == self.tip():
227 if self.mapcache[0] == self.tip():
228 cachedelta = "".join(gendelta(delta))
228 cachedelta = "".join(gendelta(delta))
229 else:
229 else:
230 cachedelta = None
230 cachedelta = None
231
231
232 text = "".join(self.addlist)
232 text = "".join(self.addlist)
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
234 sys.stderr.write("manifest delta failure")
234 sys.stderr.write("manifest delta failure")
235 sys.exit(1)
235 sys.exit(1)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
237 self.mapcache = (n, map, flags)
237 self.mapcache = (n, map, flags)
238 self.listcache = (text, self.addlist)
238 self.listcache = (text, self.addlist)
239 self.addlist = None
239 self.addlist = None
240
240
241 return n
241 return n
242
242
243 class changelog(revlog):
243 class changelog(revlog):
244 def __init__(self, opener):
244 def __init__(self, opener):
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
246
246
247 def extract(self, text):
247 def extract(self, text):
248 if not text:
248 if not text:
249 return (nullid, "", "0", [], "")
249 return (nullid, "", "0", [], "")
250 last = text.index("\n\n")
250 last = text.index("\n\n")
251 desc = text[last + 2:]
251 desc = text[last + 2:]
252 l = text[:last].splitlines()
252 l = text[:last].splitlines()
253 manifest = bin(l[0])
253 manifest = bin(l[0])
254 user = l[1]
254 user = l[1]
255 date = l[2]
255 date = l[2]
256 files = l[3:]
256 files = l[3:]
257 return (manifest, user, date, files, desc)
257 return (manifest, user, date, files, desc)
258
258
259 def read(self, node):
259 def read(self, node):
260 return self.extract(self.revision(node))
260 return self.extract(self.revision(node))
261
261
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
263 user=None, date=None):
263 user=None, date=None):
264 date = date or "%d %d" % (time.time(), time.timezone)
264 date = date or "%d %d" % (time.time(), time.timezone)
265 list.sort()
265 list.sort()
266 l = [hex(manifest), user, date] + list + ["", desc]
266 l = [hex(manifest), user, date] + list + ["", desc]
267 text = "\n".join(l)
267 text = "\n".join(l)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
269
269
270 class dirstate:
270 class dirstate:
271 def __init__(self, opener, ui, root):
271 def __init__(self, opener, ui, root):
272 self.opener = opener
272 self.opener = opener
273 self.root = root
273 self.root = root
274 self.dirty = 0
274 self.dirty = 0
275 self.ui = ui
275 self.ui = ui
276 self.map = None
276 self.map = None
277 self.pl = None
277 self.pl = None
278 self.copies = {}
278 self.copies = {}
279
279
280 def __del__(self):
280 def __del__(self):
281 if self.dirty:
281 if self.dirty:
282 self.write()
282 self.write()
283
283
284 def __getitem__(self, key):
284 def __getitem__(self, key):
285 try:
285 try:
286 return self.map[key]
286 return self.map[key]
287 except TypeError:
287 except TypeError:
288 self.read()
288 self.read()
289 return self[key]
289 return self[key]
290
290
291 def __contains__(self, key):
291 def __contains__(self, key):
292 if not self.map: self.read()
292 if not self.map: self.read()
293 return key in self.map
293 return key in self.map
294
294
295 def parents(self):
295 def parents(self):
296 if not self.pl:
296 if not self.pl:
297 self.read()
297 self.read()
298 return self.pl
298 return self.pl
299
299
300 def setparents(self, p1, p2 = nullid):
300 def setparents(self, p1, p2 = nullid):
301 self.dirty = 1
301 self.dirty = 1
302 self.pl = p1, p2
302 self.pl = p1, p2
303
303
304 def state(self, key):
304 def state(self, key):
305 try:
305 try:
306 return self[key][0]
306 return self[key][0]
307 except KeyError:
307 except KeyError:
308 return "?"
308 return "?"
309
309
310 def read(self):
310 def read(self):
311 if self.map is not None: return self.map
311 if self.map is not None: return self.map
312
312
313 self.map = {}
313 self.map = {}
314 self.pl = [nullid, nullid]
314 self.pl = [nullid, nullid]
315 try:
315 try:
316 st = self.opener("dirstate").read()
316 st = self.opener("dirstate").read()
317 if not st: return
317 if not st: return
318 except: return
318 except: return
319
319
320 self.pl = [st[:20], st[20: 40]]
320 self.pl = [st[:20], st[20: 40]]
321
321
322 pos = 40
322 pos = 40
323 while pos < len(st):
323 while pos < len(st):
324 e = struct.unpack(">cllll", st[pos:pos+17])
324 e = struct.unpack(">cllll", st[pos:pos+17])
325 l = e[4]
325 l = e[4]
326 pos += 17
326 pos += 17
327 f = st[pos:pos + l]
327 f = st[pos:pos + l]
328 if '\0' in f:
328 if '\0' in f:
329 f, c = f.split('\0')
329 f, c = f.split('\0')
330 self.copies[f] = c
330 self.copies[f] = c
331 self.map[f] = e[:4]
331 self.map[f] = e[:4]
332 pos += l
332 pos += l
333
333
334 def copy(self, source, dest):
334 def copy(self, source, dest):
335 self.read()
335 self.read()
336 self.dirty = 1
336 self.dirty = 1
337 self.copies[dest] = source
337 self.copies[dest] = source
338
338
339 def copied(self, file):
339 def copied(self, file):
340 return self.copies.get(file, None)
340 return self.copies.get(file, None)
341
341
342 def update(self, files, state):
342 def update(self, files, state):
343 ''' current states:
343 ''' current states:
344 n normal
344 n normal
345 m needs merging
345 m needs merging
346 r marked for removal
346 r marked for removal
347 a marked for addition'''
347 a marked for addition'''
348
348
349 if not files: return
349 if not files: return
350 self.read()
350 self.read()
351 self.dirty = 1
351 self.dirty = 1
352 for f in files:
352 for f in files:
353 if state == "r":
353 if state == "r":
354 self.map[f] = ('r', 0, 0, 0)
354 self.map[f] = ('r', 0, 0, 0)
355 else:
355 else:
356 s = os.stat(os.path.join(self.root, f))
356 s = os.stat(os.path.join(self.root, f))
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
358
358
359 def forget(self, files):
359 def forget(self, files):
360 if not files: return
360 if not files: return
361 self.read()
361 self.read()
362 self.dirty = 1
362 self.dirty = 1
363 for f in files:
363 for f in files:
364 try:
364 try:
365 del self.map[f]
365 del self.map[f]
366 except KeyError:
366 except KeyError:
367 self.ui.warn("not in dirstate: %s!\n" % f)
367 self.ui.warn("not in dirstate: %s!\n" % f)
368 pass
368 pass
369
369
370 def clear(self):
370 def clear(self):
371 self.map = {}
371 self.map = {}
372 self.dirty = 1
372 self.dirty = 1
373
373
374 def write(self):
374 def write(self):
375 st = self.opener("dirstate", "w")
375 st = self.opener("dirstate", "w")
376 st.write("".join(self.pl))
376 st.write("".join(self.pl))
377 for f, e in self.map.items():
377 for f, e in self.map.items():
378 c = self.copied(f)
378 c = self.copied(f)
379 if c:
379 if c:
380 f = f + "\0" + c
380 f = f + "\0" + c
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
382 st.write(e + f)
382 st.write(e + f)
383 self.dirty = 0
383 self.dirty = 0
384
384
385 def changes(self, files, ignore):
385 def changes(self, files, ignore):
386 self.read()
386 self.read()
387 dc = self.map.copy()
387 dc = self.map.copy()
388 lookup, changed, added, unknown = [], [], [], []
388 lookup, changed, added, unknown = [], [], [], []
389
389
390 # compare all files by default
390 # compare all files by default
391 if not files: files = [self.root]
391 if not files: files = [self.root]
392
392
393 # recursive generator of all files listed
393 # recursive generator of all files listed
394 def walk(files):
394 def walk(files):
395 for f in util.unique(files):
395 for f in util.unique(files):
396 f = os.path.join(self.root, f)
396 f = os.path.join(self.root, f)
397 if os.path.isdir(f):
397 if os.path.isdir(f):
398 for dir, subdirs, fl in os.walk(f):
398 for dir, subdirs, fl in os.walk(f):
399 d = dir[len(self.root) + 1:]
399 d = dir[len(self.root) + 1:]
400 if ".hg" in subdirs: subdirs.remove(".hg")
400 if ".hg" in subdirs: subdirs.remove(".hg")
401 for fn in fl:
401 for fn in fl:
402 fn = util.pconvert(os.path.join(d, fn))
402 fn = util.pconvert(os.path.join(d, fn))
403 yield fn
403 yield fn
404 else:
404 else:
405 yield f[len(self.root) + 1:]
405 yield f[len(self.root) + 1:]
406
406
407 for fn in util.unique(walk(files)):
407 for fn in util.unique(walk(files)):
408 try: s = os.stat(os.path.join(self.root, fn))
408 try: s = os.stat(os.path.join(self.root, fn))
409 except: continue
409 except: continue
410
410
411 if fn in dc:
411 if fn in dc:
412 c = dc[fn]
412 c = dc[fn]
413 del dc[fn]
413 del dc[fn]
414
414
415 if c[0] == 'm':
415 if c[0] == 'm':
416 changed.append(fn)
416 changed.append(fn)
417 elif c[0] == 'a':
417 elif c[0] == 'a':
418 added.append(fn)
418 added.append(fn)
419 elif c[0] == 'r':
419 elif c[0] == 'r':
420 unknown.append(fn)
420 unknown.append(fn)
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
422 changed.append(fn)
422 changed.append(fn)
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
424 lookup.append(fn)
424 lookup.append(fn)
425 else:
425 else:
426 if not ignore(fn): unknown.append(fn)
426 if not ignore(fn): unknown.append(fn)
427
427
428 return (lookup, changed, added, dc.keys(), unknown)
428 return (lookup, changed, added, dc.keys(), unknown)
429
429
430 # used to avoid circular references so destructors work
430 # used to avoid circular references so destructors work
431 def opener(base):
431 def opener(base):
432 p = base
432 p = base
433 def o(path, mode="r"):
433 def o(path, mode="r"):
434 if p[:7] == "http://":
434 if p[:7] == "http://":
435 f = os.path.join(p, urllib.quote(path))
435 f = os.path.join(p, urllib.quote(path))
436 return httprangereader.httprangereader(f)
436 return httprangereader.httprangereader(f)
437
437
438 f = os.path.join(p, path)
438 f = os.path.join(p, path)
439
439
440 mode += "b" # for that other OS
440 mode += "b" # for that other OS
441
441
442 if mode[0] != "r":
442 if mode[0] != "r":
443 try:
443 try:
444 s = os.stat(f)
444 s = os.stat(f)
445 except OSError:
445 except OSError:
446 d = os.path.dirname(f)
446 d = os.path.dirname(f)
447 if not os.path.isdir(d):
447 if not os.path.isdir(d):
448 os.makedirs(d)
448 os.makedirs(d)
449 else:
449 else:
450 if s.st_nlink > 1:
450 if s.st_nlink > 1:
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
452 util.rename(f+".tmp", f)
452 util.rename(f+".tmp", f)
453
453
454 return file(f, mode)
454 return file(f, mode)
455
455
456 return o
456 return o
457
457
458 class RepoError(Exception): pass
458 class RepoError(Exception): pass
459
459
460 class localrepository:
460 class localrepository:
461 def __init__(self, ui, path=None, create=0):
461 def __init__(self, ui, path=None, create=0):
462 self.remote = 0
462 self.remote = 0
463 if path and path[:7] == "http://":
463 if path and path[:7] == "http://":
464 self.remote = 1
464 self.remote = 1
465 self.path = path
465 self.path = path
466 else:
466 else:
467 if not path:
467 if not path:
468 p = os.getcwd()
468 p = os.getcwd()
469 while not os.path.isdir(os.path.join(p, ".hg")):
469 while not os.path.isdir(os.path.join(p, ".hg")):
470 oldp = p
470 oldp = p
471 p = os.path.dirname(p)
471 p = os.path.dirname(p)
472 if p == oldp: raise RepoError("no repo found")
472 if p == oldp: raise RepoError("no repo found")
473 path = p
473 path = p
474 self.path = os.path.join(path, ".hg")
474 self.path = os.path.join(path, ".hg")
475
475
476 if not create and not os.path.isdir(self.path):
476 if not create and not os.path.isdir(self.path):
477 raise RepoError("repository %s not found" % self.path)
477 raise RepoError("repository %s not found" % self.path)
478
478
479 self.root = path
479 self.root = path
480 self.ui = ui
480 self.ui = ui
481
481
482 if create:
482 if create:
483 os.mkdir(self.path)
483 os.mkdir(self.path)
484 os.mkdir(self.join("data"))
484 os.mkdir(self.join("data"))
485
485
486 self.opener = opener(self.path)
486 self.opener = opener(self.path)
487 self.wopener = opener(self.root)
487 self.wopener = opener(self.root)
488 self.manifest = manifest(self.opener)
488 self.manifest = manifest(self.opener)
489 self.changelog = changelog(self.opener)
489 self.changelog = changelog(self.opener)
490 self.ignorefunc = None
490 self.ignorefunc = None
491 self.tagscache = None
491 self.tagscache = None
492 self.nodetagscache = None
492 self.nodetagscache = None
493
493
494 if not self.remote:
494 if not self.remote:
495 self.dirstate = dirstate(self.opener, ui, self.root)
495 self.dirstate = dirstate(self.opener, ui, self.root)
496 try:
496 try:
497 self.ui.readconfig(self.opener("hgrc"))
497 self.ui.readconfig(self.opener("hgrc"))
498 except IOError: pass
498 except IOError: pass
499
499
500 def ignore(self, f):
500 def ignore(self, f):
501 if not self.ignorefunc:
501 if not self.ignorefunc:
502 bigpat = []
502 bigpat = []
503 try:
503 try:
504 l = file(self.wjoin(".hgignore"))
504 l = file(self.wjoin(".hgignore"))
505 for pat in l:
505 for pat in l:
506 if pat != "\n":
506 if pat != "\n":
507 p = util.pconvert(pat[:-1])
507 p = util.pconvert(pat[:-1])
508 try:
508 try:
509 r = re.compile(p)
509 r = re.compile(p)
510 except:
510 except:
511 self.ui.warn("ignoring invalid ignore"
511 self.ui.warn("ignoring invalid ignore"
512 + " regular expression '%s'\n" % p)
512 + " regular expression '%s'\n" % p)
513 else:
513 else:
514 bigpat.append(util.pconvert(pat[:-1]))
514 bigpat.append(util.pconvert(pat[:-1]))
515 except IOError: pass
515 except IOError: pass
516 if bigpat:
516 if bigpat:
517 s = "(?:%s)" % (")|(?:".join(bigpat))
517 s = "(?:%s)" % (")|(?:".join(bigpat))
518 r = re.compile(s)
518 r = re.compile(s)
519 self.ignorefunc = r.search
519 self.ignorefunc = r.search
520 else:
520 else:
521 self.ignorefunc = lambda x: False
521 self.ignorefunc = lambda x: False
522
522
523 return self.ignorefunc(f)
523 return self.ignorefunc(f)
524
524
525 def hook(self, name, **args):
525 def hook(self, name, **args):
526 s = self.ui.config("hooks", name)
526 s = self.ui.config("hooks", name)
527 if s:
527 if s:
528 self.ui.note("running hook %s: %s\n" % (name, s))
528 self.ui.note("running hook %s: %s\n" % (name, s))
529 old = {}
529 old = {}
530 for k, v in args.items():
530 for k, v in args.items():
531 k = k.upper()
531 k = k.upper()
532 old[k] = os.environ.get(k, None)
532 old[k] = os.environ.get(k, None)
533 os.environ[k] = v
533 os.environ[k] = v
534
534
535 r = os.system(s)
535 r = os.system(s)
536
536
537 for k, v in old.items():
537 for k, v in old.items():
538 if v != None:
538 if v != None:
539 os.environ[k] = v
539 os.environ[k] = v
540 else:
540 else:
541 del os.environ[k]
541 del os.environ[k]
542
542
543 if r:
543 if r:
544 self.ui.warn("abort: %s hook failed with status %d!\n" %
544 self.ui.warn("abort: %s hook failed with status %d!\n" %
545 (name, r))
545 (name, r))
546 return False
546 return False
547 return True
547 return True
548
548
549 def tags(self):
549 def tags(self):
550 '''return a mapping of tag to node'''
550 '''return a mapping of tag to node'''
551 if not self.tagscache:
551 if not self.tagscache:
552 self.tagscache = {}
552 self.tagscache = {}
553 def addtag(self, k, n):
553 def addtag(self, k, n):
554 try:
554 try:
555 bin_n = bin(n)
555 bin_n = bin(n)
556 except TypeError:
556 except TypeError:
557 bin_n = ''
557 bin_n = ''
558 self.tagscache[k.strip()] = bin_n
558 self.tagscache[k.strip()] = bin_n
559
559
560 try:
560 try:
561 # read each head of the tags file, ending with the tip
561 # read each head of the tags file, ending with the tip
562 # and add each tag found to the map, with "newer" ones
562 # and add each tag found to the map, with "newer" ones
563 # taking precedence
563 # taking precedence
564 fl = self.file(".hgtags")
564 fl = self.file(".hgtags")
565 h = fl.heads()
565 h = fl.heads()
566 h.reverse()
566 h.reverse()
567 for r in h:
567 for r in h:
568 for l in fl.revision(r).splitlines():
568 for l in fl.revision(r).splitlines():
569 if l:
569 if l:
570 n, k = l.split(" ", 1)
570 n, k = l.split(" ", 1)
571 addtag(self, k, n)
571 addtag(self, k, n)
572 except KeyError:
572 except KeyError:
573 pass
573 pass
574
574
575 try:
575 try:
576 f = self.opener("localtags")
576 f = self.opener("localtags")
577 for l in f:
577 for l in f:
578 n, k = l.split(" ", 1)
578 n, k = l.split(" ", 1)
579 addtag(self, k, n)
579 addtag(self, k, n)
580 except IOError:
580 except IOError:
581 pass
581 pass
582
582
583 self.tagscache['tip'] = self.changelog.tip()
583 self.tagscache['tip'] = self.changelog.tip()
584
584
585 return self.tagscache
585 return self.tagscache
586
586
587 def tagslist(self):
587 def tagslist(self):
588 '''return a list of tags ordered by revision'''
588 '''return a list of tags ordered by revision'''
589 l = []
589 l = []
590 for t, n in self.tags().items():
590 for t, n in self.tags().items():
591 try:
591 try:
592 r = self.changelog.rev(n)
592 r = self.changelog.rev(n)
593 except:
593 except:
594 r = -2 # sort to the beginning of the list if unknown
594 r = -2 # sort to the beginning of the list if unknown
595 l.append((r,t,n))
595 l.append((r,t,n))
596 l.sort()
596 l.sort()
597 return [(t,n) for r,t,n in l]
597 return [(t,n) for r,t,n in l]
598
598
599 def nodetags(self, node):
599 def nodetags(self, node):
600 '''return the tags associated with a node'''
600 '''return the tags associated with a node'''
601 if not self.nodetagscache:
601 if not self.nodetagscache:
602 self.nodetagscache = {}
602 self.nodetagscache = {}
603 for t,n in self.tags().items():
603 for t,n in self.tags().items():
604 self.nodetagscache.setdefault(n,[]).append(t)
604 self.nodetagscache.setdefault(n,[]).append(t)
605 return self.nodetagscache.get(node, [])
605 return self.nodetagscache.get(node, [])
606
606
607 def lookup(self, key):
607 def lookup(self, key):
608 try:
608 try:
609 return self.tags()[key]
609 return self.tags()[key]
610 except KeyError:
610 except KeyError:
611 return self.changelog.lookup(key)
611 return self.changelog.lookup(key)
612
612
613 def dev(self):
613 def dev(self):
614 if self.remote: return -1
614 if self.remote: return -1
615 return os.stat(self.path).st_dev
615 return os.stat(self.path).st_dev
616
616
617 def join(self, f):
617 def join(self, f):
618 return os.path.join(self.path, f)
618 return os.path.join(self.path, f)
619
619
620 def wjoin(self, f):
620 def wjoin(self, f):
621 return os.path.join(self.root, f)
621 return os.path.join(self.root, f)
622
622
623 def file(self, f):
623 def file(self, f):
624 if f[0] == '/': f = f[1:]
624 if f[0] == '/': f = f[1:]
625 return filelog(self.opener, f)
625 return filelog(self.opener, f)
626
626
627 def getcwd(self):
627 def getcwd(self):
628 cwd = os.getcwd()
628 cwd = os.getcwd()
629 if cwd == self.root: return ''
629 if cwd == self.root: return ''
630 return cwd[len(self.root) + 1:]
630 return cwd[len(self.root) + 1:]
631
631
632 def wfile(self, f, mode='r'):
632 def wfile(self, f, mode='r'):
633 return self.wopener(f, mode)
633 return self.wopener(f, mode)
634
634
635 def transaction(self):
635 def transaction(self):
636 # save dirstate for undo
636 # save dirstate for undo
637 try:
637 try:
638 ds = self.opener("dirstate").read()
638 ds = self.opener("dirstate").read()
639 except IOError:
639 except IOError:
640 ds = ""
640 ds = ""
641 self.opener("undo.dirstate", "w").write(ds)
641 self.opener("undo.dirstate", "w").write(ds)
642
642
643 return transaction.transaction(self.ui.warn,
643 return transaction.transaction(self.ui.warn,
644 self.opener, self.join("journal"),
644 self.opener, self.join("journal"),
645 self.join("undo"))
645 self.join("undo"))
646
646
647 def recover(self):
647 def recover(self):
648 lock = self.lock()
648 lock = self.lock()
649 if os.path.exists(self.join("journal")):
649 if os.path.exists(self.join("journal")):
650 self.ui.status("rolling back interrupted transaction\n")
650 self.ui.status("rolling back interrupted transaction\n")
651 return transaction.rollback(self.opener, self.join("journal"))
651 return transaction.rollback(self.opener, self.join("journal"))
652 else:
652 else:
653 self.ui.warn("no interrupted transaction available\n")
653 self.ui.warn("no interrupted transaction available\n")
654
654
655 def undo(self):
655 def undo(self):
656 lock = self.lock()
656 lock = self.lock()
657 if os.path.exists(self.join("undo")):
657 if os.path.exists(self.join("undo")):
658 self.ui.status("rolling back last transaction\n")
658 self.ui.status("rolling back last transaction\n")
659 transaction.rollback(self.opener, self.join("undo"))
659 transaction.rollback(self.opener, self.join("undo"))
660 self.dirstate = None
660 self.dirstate = None
661 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
661 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
662 self.dirstate = dirstate(self.opener, self.ui, self.root)
662 self.dirstate = dirstate(self.opener, self.ui, self.root)
663 else:
663 else:
664 self.ui.warn("no undo information available\n")
664 self.ui.warn("no undo information available\n")
665
665
666 def lock(self, wait = 1):
666 def lock(self, wait = 1):
667 try:
667 try:
668 return lock.lock(self.join("lock"), 0)
668 return lock.lock(self.join("lock"), 0)
669 except lock.LockHeld, inst:
669 except lock.LockHeld, inst:
670 if wait:
670 if wait:
671 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
671 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
672 return lock.lock(self.join("lock"), wait)
672 return lock.lock(self.join("lock"), wait)
673 raise inst
673 raise inst
674
674
675 def rawcommit(self, files, text, user, date, p1=None, p2=None):
675 def rawcommit(self, files, text, user, date, p1=None, p2=None):
676 orig_parent = self.dirstate.parents()[0] or nullid
676 orig_parent = self.dirstate.parents()[0] or nullid
677 p1 = p1 or self.dirstate.parents()[0] or nullid
677 p1 = p1 or self.dirstate.parents()[0] or nullid
678 p2 = p2 or self.dirstate.parents()[1] or nullid
678 p2 = p2 or self.dirstate.parents()[1] or nullid
679 c1 = self.changelog.read(p1)
679 c1 = self.changelog.read(p1)
680 c2 = self.changelog.read(p2)
680 c2 = self.changelog.read(p2)
681 m1 = self.manifest.read(c1[0])
681 m1 = self.manifest.read(c1[0])
682 mf1 = self.manifest.readflags(c1[0])
682 mf1 = self.manifest.readflags(c1[0])
683 m2 = self.manifest.read(c2[0])
683 m2 = self.manifest.read(c2[0])
684
684
685 if orig_parent == p1:
685 if orig_parent == p1:
686 update_dirstate = 1
686 update_dirstate = 1
687 else:
687 else:
688 update_dirstate = 0
688 update_dirstate = 0
689
689
690 tr = self.transaction()
690 tr = self.transaction()
691 mm = m1.copy()
691 mm = m1.copy()
692 mfm = mf1.copy()
692 mfm = mf1.copy()
693 linkrev = self.changelog.count()
693 linkrev = self.changelog.count()
694 for f in files:
694 for f in files:
695 try:
695 try:
696 t = self.wfile(f).read()
696 t = self.wfile(f).read()
697 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
697 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
698 r = self.file(f)
698 r = self.file(f)
699 mfm[f] = tm
699 mfm[f] = tm
700 mm[f] = r.add(t, {}, tr, linkrev,
700 mm[f] = r.add(t, {}, tr, linkrev,
701 m1.get(f, nullid), m2.get(f, nullid))
701 m1.get(f, nullid), m2.get(f, nullid))
702 if update_dirstate:
702 if update_dirstate:
703 self.dirstate.update([f], "n")
703 self.dirstate.update([f], "n")
704 except IOError:
704 except IOError:
705 try:
705 try:
706 del mm[f]
706 del mm[f]
707 del mfm[f]
707 del mfm[f]
708 if update_dirstate:
708 if update_dirstate:
709 self.dirstate.forget([f])
709 self.dirstate.forget([f])
710 except:
710 except:
711 # deleted from p2?
711 # deleted from p2?
712 pass
712 pass
713
713
714 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
714 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
715 user = user or self.ui.username()
715 user = user or self.ui.username()
716 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
716 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
717 tr.close()
717 tr.close()
718 if update_dirstate:
718 if update_dirstate:
719 self.dirstate.setparents(n, nullid)
719 self.dirstate.setparents(n, nullid)
720
720
721 def commit(self, files = None, text = "", user = None, date = None):
721 def commit(self, files = None, text = "", user = None, date = None):
722 commit = []
722 commit = []
723 remove = []
723 remove = []
724 if files:
724 if files:
725 for f in files:
725 for f in files:
726 s = self.dirstate.state(f)
726 s = self.dirstate.state(f)
727 if s in 'nmai':
727 if s in 'nmai':
728 commit.append(f)
728 commit.append(f)
729 elif s == 'r':
729 elif s == 'r':
730 remove.append(f)
730 remove.append(f)
731 else:
731 else:
732 self.ui.warn("%s not tracked!\n" % f)
732 self.ui.warn("%s not tracked!\n" % f)
733 else:
733 else:
734 (c, a, d, u) = self.changes(None, None)
734 (c, a, d, u) = self.changes(None, None)
735 commit = c + a
735 commit = c + a
736 remove = d
736 remove = d
737
737
738 if not commit and not remove:
738 if not commit and not remove:
739 self.ui.status("nothing changed\n")
739 self.ui.status("nothing changed\n")
740 return
740 return
741
741
742 if not self.hook("precommit"):
742 if not self.hook("precommit"):
743 return 1
743 return 1
744
744
745 p1, p2 = self.dirstate.parents()
745 p1, p2 = self.dirstate.parents()
746 c1 = self.changelog.read(p1)
746 c1 = self.changelog.read(p1)
747 c2 = self.changelog.read(p2)
747 c2 = self.changelog.read(p2)
748 m1 = self.manifest.read(c1[0])
748 m1 = self.manifest.read(c1[0])
749 mf1 = self.manifest.readflags(c1[0])
749 mf1 = self.manifest.readflags(c1[0])
750 m2 = self.manifest.read(c2[0])
750 m2 = self.manifest.read(c2[0])
751 lock = self.lock()
751 lock = self.lock()
752 tr = self.transaction()
752 tr = self.transaction()
753
753
754 # check in files
754 # check in files
755 new = {}
755 new = {}
756 linkrev = self.changelog.count()
756 linkrev = self.changelog.count()
757 commit.sort()
757 commit.sort()
758 for f in commit:
758 for f in commit:
759 self.ui.note(f + "\n")
759 self.ui.note(f + "\n")
760 try:
760 try:
761 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
761 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
762 t = self.wfile(f).read()
762 t = self.wfile(f).read()
763 except IOError:
763 except IOError:
764 self.warn("trouble committing %s!\n" % f)
764 self.warn("trouble committing %s!\n" % f)
765 raise
765 raise
766
766
767 meta = {}
767 meta = {}
768 cp = self.dirstate.copied(f)
768 cp = self.dirstate.copied(f)
769 if cp:
769 if cp:
770 meta["copy"] = cp
770 meta["copy"] = cp
771 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
771 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
772 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
772 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
773
773
774 r = self.file(f)
774 r = self.file(f)
775 fp1 = m1.get(f, nullid)
775 fp1 = m1.get(f, nullid)
776 fp2 = m2.get(f, nullid)
776 fp2 = m2.get(f, nullid)
777 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
777 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
778
778
779 # update manifest
779 # update manifest
780 m1.update(new)
780 m1.update(new)
781 for f in remove:
781 for f in remove:
782 if f in m1:
782 if f in m1:
783 del m1[f]
783 del m1[f]
784 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
784 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
785
785
786 # add changeset
786 # add changeset
787 new = new.keys()
787 new = new.keys()
788 new.sort()
788 new.sort()
789
789
790 if not text:
790 if not text:
791 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
791 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
792 edittext += "".join(["HG: changed %s\n" % f for f in new])
792 edittext += "".join(["HG: changed %s\n" % f for f in new])
793 edittext += "".join(["HG: removed %s\n" % f for f in remove])
793 edittext += "".join(["HG: removed %s\n" % f for f in remove])
794 edittext = self.ui.edit(edittext)
794 edittext = self.ui.edit(edittext)
795 if not edittext.rstrip():
795 if not edittext.rstrip():
796 return 1
796 return 1
797 text = edittext
797 text = edittext
798
798
799 user = user or self.ui.username()
799 user = user or self.ui.username()
800 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
800 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
801
801
802 if not self.hook("commit", node=hex(n)):
802 if not self.hook("commit", node=hex(n)):
803 return 1
803 return 1
804
804
805 tr.close()
805 tr.close()
806
806
807 self.dirstate.setparents(n)
807 self.dirstate.setparents(n)
808 self.dirstate.update(new, "n")
808 self.dirstate.update(new, "n")
809 self.dirstate.forget(remove)
809 self.dirstate.forget(remove)
810
810
811 def changes(self, node1, node2, files=None):
811 def changes(self, node1, node2, files=None):
812 mf2, u = None, []
812 mf2, u = None, []
813
813
814 def fcmp(fn, mf):
814 def fcmp(fn, mf):
815 t1 = self.wfile(fn).read()
815 t1 = self.wfile(fn).read()
816 t2 = self.file(fn).revision(mf[fn])
816 t2 = self.file(fn).revision(mf[fn])
817 return cmp(t1, t2)
817 return cmp(t1, t2)
818
818
819 # are we comparing the working directory?
819 # are we comparing the working directory?
820 if not node2:
820 if not node2:
821 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
821 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
822
822
823 # are we comparing working dir against its parent?
823 # are we comparing working dir against its parent?
824 if not node1:
824 if not node1:
825 if l:
825 if l:
826 # do a full compare of any files that might have changed
826 # do a full compare of any files that might have changed
827 change = self.changelog.read(self.dirstate.parents()[0])
827 change = self.changelog.read(self.dirstate.parents()[0])
828 mf2 = self.manifest.read(change[0])
828 mf2 = self.manifest.read(change[0])
829 for f in l:
829 for f in l:
830 if fcmp(f, mf2):
830 if fcmp(f, mf2):
831 c.append(f)
831 c.append(f)
832
832
833 for l in c, a, d, u:
833 for l in c, a, d, u:
834 l.sort()
834 l.sort()
835
835
836 return (c, a, d, u)
836 return (c, a, d, u)
837
837
838 # are we comparing working dir against non-tip?
838 # are we comparing working dir against non-tip?
839 # generate a pseudo-manifest for the working dir
839 # generate a pseudo-manifest for the working dir
840 if not node2:
840 if not node2:
841 if not mf2:
841 if not mf2:
842 change = self.changelog.read(self.dirstate.parents()[0])
842 change = self.changelog.read(self.dirstate.parents()[0])
843 mf2 = self.manifest.read(change[0]).copy()
843 mf2 = self.manifest.read(change[0]).copy()
844 for f in a + c + l:
844 for f in a + c + l:
845 mf2[f] = ""
845 mf2[f] = ""
846 for f in d:
846 for f in d:
847 if f in mf2: del mf2[f]
847 if f in mf2: del mf2[f]
848 else:
848 else:
849 change = self.changelog.read(node2)
849 change = self.changelog.read(node2)
850 mf2 = self.manifest.read(change[0])
850 mf2 = self.manifest.read(change[0])
851
851
852 # flush lists from dirstate before comparing manifests
852 # flush lists from dirstate before comparing manifests
853 c, a = [], []
853 c, a = [], []
854
854
855 change = self.changelog.read(node1)
855 change = self.changelog.read(node1)
856 mf1 = self.manifest.read(change[0]).copy()
856 mf1 = self.manifest.read(change[0]).copy()
857
857
858 for fn in mf2:
858 for fn in mf2:
859 if mf1.has_key(fn):
859 if mf1.has_key(fn):
860 if mf1[fn] != mf2[fn]:
860 if mf1[fn] != mf2[fn]:
861 if mf2[fn] != "" or fcmp(fn, mf1):
861 if mf2[fn] != "" or fcmp(fn, mf1):
862 c.append(fn)
862 c.append(fn)
863 del mf1[fn]
863 del mf1[fn]
864 else:
864 else:
865 a.append(fn)
865 a.append(fn)
866
866
867 d = mf1.keys()
867 d = mf1.keys()
868
868
869 for l in c, a, d, u:
869 for l in c, a, d, u:
870 l.sort()
870 l.sort()
871
871
872 return (c, a, d, u)
872 return (c, a, d, u)
873
873
874 def add(self, list):
874 def add(self, list):
875 for f in list:
875 for f in list:
876 p = self.wjoin(f)
876 p = self.wjoin(f)
877 if not os.path.exists(p):
877 if not os.path.exists(p):
878 self.ui.warn("%s does not exist!\n" % f)
878 self.ui.warn("%s does not exist!\n" % f)
879 elif not os.path.isfile(p):
879 elif not os.path.isfile(p):
880 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
880 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
881 elif self.dirstate.state(f) == 'n':
881 elif self.dirstate.state(f) == 'n':
882 self.ui.warn("%s already tracked!\n" % f)
882 self.ui.warn("%s already tracked!\n" % f)
883 else:
883 else:
884 self.dirstate.update([f], "a")
884 self.dirstate.update([f], "a")
885
885
886 def forget(self, list):
886 def forget(self, list):
887 for f in list:
887 for f in list:
888 if self.dirstate.state(f) not in 'ai':
888 if self.dirstate.state(f) not in 'ai':
889 self.ui.warn("%s not added!\n" % f)
889 self.ui.warn("%s not added!\n" % f)
890 else:
890 else:
891 self.dirstate.forget([f])
891 self.dirstate.forget([f])
892
892
893 def remove(self, list):
893 def remove(self, list):
894 for f in list:
894 for f in list:
895 p = self.wjoin(f)
895 p = self.wjoin(f)
896 if os.path.exists(p):
896 if os.path.exists(p):
897 self.ui.warn("%s still exists!\n" % f)
897 self.ui.warn("%s still exists!\n" % f)
898 elif self.dirstate.state(f) == 'a':
898 elif self.dirstate.state(f) == 'a':
899 self.ui.warn("%s never committed!\n" % f)
899 self.ui.warn("%s never committed!\n" % f)
900 self.dirstate.forget(f)
900 self.dirstate.forget([f])
901 elif f not in self.dirstate:
901 elif f not in self.dirstate:
902 self.ui.warn("%s not tracked!\n" % f)
902 self.ui.warn("%s not tracked!\n" % f)
903 else:
903 else:
904 self.dirstate.update([f], "r")
904 self.dirstate.update([f], "r")
905
905
906 def copy(self, source, dest):
906 def copy(self, source, dest):
907 p = self.wjoin(dest)
907 p = self.wjoin(dest)
908 if not os.path.exists(dest):
908 if not os.path.exists(dest):
909 self.ui.warn("%s does not exist!\n" % dest)
909 self.ui.warn("%s does not exist!\n" % dest)
910 elif not os.path.isfile(dest):
910 elif not os.path.isfile(dest):
911 self.ui.warn("copy failed: %s is not a file\n" % dest)
911 self.ui.warn("copy failed: %s is not a file\n" % dest)
912 else:
912 else:
913 if self.dirstate.state(dest) == '?':
913 if self.dirstate.state(dest) == '?':
914 self.dirstate.update([dest], "a")
914 self.dirstate.update([dest], "a")
915 self.dirstate.copy(source, dest)
915 self.dirstate.copy(source, dest)
916
916
917 def heads(self):
917 def heads(self):
918 return self.changelog.heads()
918 return self.changelog.heads()
919
919
920 def branches(self, nodes):
920 def branches(self, nodes):
921 if not nodes: nodes = [self.changelog.tip()]
921 if not nodes: nodes = [self.changelog.tip()]
922 b = []
922 b = []
923 for n in nodes:
923 for n in nodes:
924 t = n
924 t = n
925 while n:
925 while n:
926 p = self.changelog.parents(n)
926 p = self.changelog.parents(n)
927 if p[1] != nullid or p[0] == nullid:
927 if p[1] != nullid or p[0] == nullid:
928 b.append((t, n, p[0], p[1]))
928 b.append((t, n, p[0], p[1]))
929 break
929 break
930 n = p[0]
930 n = p[0]
931 return b
931 return b
932
932
933 def between(self, pairs):
933 def between(self, pairs):
934 r = []
934 r = []
935
935
936 for top, bottom in pairs:
936 for top, bottom in pairs:
937 n, l, i = top, [], 0
937 n, l, i = top, [], 0
938 f = 1
938 f = 1
939
939
940 while n != bottom:
940 while n != bottom:
941 p = self.changelog.parents(n)[0]
941 p = self.changelog.parents(n)[0]
942 if i == f:
942 if i == f:
943 l.append(n)
943 l.append(n)
944 f = f * 2
944 f = f * 2
945 n = p
945 n = p
946 i += 1
946 i += 1
947
947
948 r.append(l)
948 r.append(l)
949
949
950 return r
950 return r
951
951
952 def newer(self, nodes):
952 def newer(self, nodes):
953 m = {}
953 m = {}
954 nl = []
954 nl = []
955 pm = {}
955 pm = {}
956 cl = self.changelog
956 cl = self.changelog
957 t = l = cl.count()
957 t = l = cl.count()
958
958
959 # find the lowest numbered node
959 # find the lowest numbered node
960 for n in nodes:
960 for n in nodes:
961 l = min(l, cl.rev(n))
961 l = min(l, cl.rev(n))
962 m[n] = 1
962 m[n] = 1
963
963
964 for i in xrange(l, t):
964 for i in xrange(l, t):
965 n = cl.node(i)
965 n = cl.node(i)
966 if n in m: # explicitly listed
966 if n in m: # explicitly listed
967 pm[n] = 1
967 pm[n] = 1
968 nl.append(n)
968 nl.append(n)
969 continue
969 continue
970 for p in cl.parents(n):
970 for p in cl.parents(n):
971 if p in pm: # parent listed
971 if p in pm: # parent listed
972 pm[n] = 1
972 pm[n] = 1
973 nl.append(n)
973 nl.append(n)
974 break
974 break
975
975
976 return nl
976 return nl
977
977
978 def findincoming(self, remote, base={}):
978 def findincoming(self, remote, base={}):
979 m = self.changelog.nodemap
979 m = self.changelog.nodemap
980 search = []
980 search = []
981 fetch = []
981 fetch = []
982 seen = {}
982 seen = {}
983 seenbranch = {}
983 seenbranch = {}
984
984
985 # assume we're closer to the tip than the root
985 # assume we're closer to the tip than the root
986 # and start by examining the heads
986 # and start by examining the heads
987 self.ui.status("searching for changes\n")
987 self.ui.status("searching for changes\n")
988 heads = remote.heads()
988 heads = remote.heads()
989 unknown = []
989 unknown = []
990 for h in heads:
990 for h in heads:
991 if h not in m:
991 if h not in m:
992 unknown.append(h)
992 unknown.append(h)
993 else:
993 else:
994 base[h] = 1
994 base[h] = 1
995
995
996 if not unknown:
996 if not unknown:
997 return None
997 return None
998
998
999 rep = {}
999 rep = {}
1000 reqcnt = 0
1000 reqcnt = 0
1001
1001
1002 # search through remote branches
1002 # search through remote branches
1003 # a 'branch' here is a linear segment of history, with four parts:
1003 # a 'branch' here is a linear segment of history, with four parts:
1004 # head, root, first parent, second parent
1004 # head, root, first parent, second parent
1005 # (a branch always has two parents (or none) by definition)
1005 # (a branch always has two parents (or none) by definition)
1006 unknown = remote.branches(unknown)
1006 unknown = remote.branches(unknown)
1007 while unknown:
1007 while unknown:
1008 r = []
1008 r = []
1009 while unknown:
1009 while unknown:
1010 n = unknown.pop(0)
1010 n = unknown.pop(0)
1011 if n[0] in seen:
1011 if n[0] in seen:
1012 continue
1012 continue
1013
1013
1014 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1014 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1015 if n[0] == nullid:
1015 if n[0] == nullid:
1016 break
1016 break
1017 if n in seenbranch:
1017 if n in seenbranch:
1018 self.ui.debug("branch already found\n")
1018 self.ui.debug("branch already found\n")
1019 continue
1019 continue
1020 if n[1] and n[1] in m: # do we know the base?
1020 if n[1] and n[1] in m: # do we know the base?
1021 self.ui.debug("found incomplete branch %s:%s\n"
1021 self.ui.debug("found incomplete branch %s:%s\n"
1022 % (short(n[0]), short(n[1])))
1022 % (short(n[0]), short(n[1])))
1023 search.append(n) # schedule branch range for scanning
1023 search.append(n) # schedule branch range for scanning
1024 seenbranch[n] = 1
1024 seenbranch[n] = 1
1025 else:
1025 else:
1026 if n[1] not in seen and n[1] not in fetch:
1026 if n[1] not in seen and n[1] not in fetch:
1027 if n[2] in m and n[3] in m:
1027 if n[2] in m and n[3] in m:
1028 self.ui.debug("found new changeset %s\n" %
1028 self.ui.debug("found new changeset %s\n" %
1029 short(n[1]))
1029 short(n[1]))
1030 fetch.append(n[1]) # earliest unknown
1030 fetch.append(n[1]) # earliest unknown
1031 base[n[2]] = 1 # latest known
1031 base[n[2]] = 1 # latest known
1032 continue
1032 continue
1033
1033
1034 for a in n[2:4]:
1034 for a in n[2:4]:
1035 if a not in rep:
1035 if a not in rep:
1036 r.append(a)
1036 r.append(a)
1037 rep[a] = 1
1037 rep[a] = 1
1038
1038
1039 seen[n[0]] = 1
1039 seen[n[0]] = 1
1040
1040
1041 if r:
1041 if r:
1042 reqcnt += 1
1042 reqcnt += 1
1043 self.ui.debug("request %d: %s\n" %
1043 self.ui.debug("request %d: %s\n" %
1044 (reqcnt, " ".join(map(short, r))))
1044 (reqcnt, " ".join(map(short, r))))
1045 for p in range(0, len(r), 10):
1045 for p in range(0, len(r), 10):
1046 for b in remote.branches(r[p:p+10]):
1046 for b in remote.branches(r[p:p+10]):
1047 self.ui.debug("received %s:%s\n" %
1047 self.ui.debug("received %s:%s\n" %
1048 (short(b[0]), short(b[1])))
1048 (short(b[0]), short(b[1])))
1049 if b[0] not in m and b[0] not in seen:
1049 if b[0] not in m and b[0] not in seen:
1050 unknown.append(b)
1050 unknown.append(b)
1051
1051
1052 # do binary search on the branches we found
1052 # do binary search on the branches we found
1053 while search:
1053 while search:
1054 n = search.pop(0)
1054 n = search.pop(0)
1055 reqcnt += 1
1055 reqcnt += 1
1056 l = remote.between([(n[0], n[1])])[0]
1056 l = remote.between([(n[0], n[1])])[0]
1057 l.append(n[1])
1057 l.append(n[1])
1058 p = n[0]
1058 p = n[0]
1059 f = 1
1059 f = 1
1060 for i in l:
1060 for i in l:
1061 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1061 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1062 if i in m:
1062 if i in m:
1063 if f <= 2:
1063 if f <= 2:
1064 self.ui.debug("found new branch changeset %s\n" %
1064 self.ui.debug("found new branch changeset %s\n" %
1065 short(p))
1065 short(p))
1066 fetch.append(p)
1066 fetch.append(p)
1067 base[i] = 1
1067 base[i] = 1
1068 else:
1068 else:
1069 self.ui.debug("narrowed branch search to %s:%s\n"
1069 self.ui.debug("narrowed branch search to %s:%s\n"
1070 % (short(p), short(i)))
1070 % (short(p), short(i)))
1071 search.append((p, i))
1071 search.append((p, i))
1072 break
1072 break
1073 p, f = i, f * 2
1073 p, f = i, f * 2
1074
1074
1075 # sanity check our fetch list
1075 # sanity check our fetch list
1076 for f in fetch:
1076 for f in fetch:
1077 if f in m:
1077 if f in m:
1078 raise RepoError("already have changeset " + short(f[:4]))
1078 raise RepoError("already have changeset " + short(f[:4]))
1079
1079
1080 if base.keys() == [nullid]:
1080 if base.keys() == [nullid]:
1081 self.ui.warn("warning: pulling from an unrelated repository!\n")
1081 self.ui.warn("warning: pulling from an unrelated repository!\n")
1082
1082
1083 self.ui.note("adding new changesets starting at " +
1083 self.ui.note("adding new changesets starting at " +
1084 " ".join([short(f) for f in fetch]) + "\n")
1084 " ".join([short(f) for f in fetch]) + "\n")
1085
1085
1086 self.ui.debug("%d total queries\n" % reqcnt)
1086 self.ui.debug("%d total queries\n" % reqcnt)
1087
1087
1088 return fetch
1088 return fetch
1089
1089
1090 def findoutgoing(self, remote):
1090 def findoutgoing(self, remote):
1091 base = {}
1091 base = {}
1092 self.findincoming(remote, base)
1092 self.findincoming(remote, base)
1093 remain = dict.fromkeys(self.changelog.nodemap)
1093 remain = dict.fromkeys(self.changelog.nodemap)
1094
1094
1095 # prune everything remote has from the tree
1095 # prune everything remote has from the tree
1096 del remain[nullid]
1096 del remain[nullid]
1097 remove = base.keys()
1097 remove = base.keys()
1098 while remove:
1098 while remove:
1099 n = remove.pop(0)
1099 n = remove.pop(0)
1100 if n in remain:
1100 if n in remain:
1101 del remain[n]
1101 del remain[n]
1102 for p in self.changelog.parents(n):
1102 for p in self.changelog.parents(n):
1103 remove.append(p)
1103 remove.append(p)
1104
1104
1105 # find every node whose parents have been pruned
1105 # find every node whose parents have been pruned
1106 subset = []
1106 subset = []
1107 for n in remain:
1107 for n in remain:
1108 p1, p2 = self.changelog.parents(n)
1108 p1, p2 = self.changelog.parents(n)
1109 if p1 not in remain and p2 not in remain:
1109 if p1 not in remain and p2 not in remain:
1110 subset.append(n)
1110 subset.append(n)
1111
1111
1112 # this is the set of all roots we have to push
1112 # this is the set of all roots we have to push
1113 return subset
1113 return subset
1114
1114
1115 def pull(self, remote):
1115 def pull(self, remote):
1116 lock = self.lock()
1116 lock = self.lock()
1117
1117
1118 # if we have an empty repo, fetch everything
1118 # if we have an empty repo, fetch everything
1119 if self.changelog.tip() == nullid:
1119 if self.changelog.tip() == nullid:
1120 self.ui.status("requesting all changes\n")
1120 self.ui.status("requesting all changes\n")
1121 fetch = [nullid]
1121 fetch = [nullid]
1122 else:
1122 else:
1123 fetch = self.findincoming(remote)
1123 fetch = self.findincoming(remote)
1124
1124
1125 if not fetch:
1125 if not fetch:
1126 self.ui.status("no changes found\n")
1126 self.ui.status("no changes found\n")
1127 return 1
1127 return 1
1128
1128
1129 cg = remote.changegroup(fetch)
1129 cg = remote.changegroup(fetch)
1130 return self.addchangegroup(cg)
1130 return self.addchangegroup(cg)
1131
1131
1132 def push(self, remote):
1132 def push(self, remote):
1133 lock = remote.lock()
1133 lock = remote.lock()
1134 update = self.findoutgoing(remote)
1134 update = self.findoutgoing(remote)
1135 if not update:
1135 if not update:
1136 self.ui.status("no changes found\n")
1136 self.ui.status("no changes found\n")
1137 return 1
1137 return 1
1138
1138
1139 cg = self.changegroup(update)
1139 cg = self.changegroup(update)
1140 return remote.addchangegroup(cg)
1140 return remote.addchangegroup(cg)
1141
1141
1142 def changegroup(self, basenodes):
1142 def changegroup(self, basenodes):
1143 class genread:
1143 class genread:
1144 def __init__(self, generator):
1144 def __init__(self, generator):
1145 self.g = generator
1145 self.g = generator
1146 self.buf = ""
1146 self.buf = ""
1147 def read(self, l):
1147 def read(self, l):
1148 while l > len(self.buf):
1148 while l > len(self.buf):
1149 try:
1149 try:
1150 self.buf += self.g.next()
1150 self.buf += self.g.next()
1151 except StopIteration:
1151 except StopIteration:
1152 break
1152 break
1153 d, self.buf = self.buf[:l], self.buf[l:]
1153 d, self.buf = self.buf[:l], self.buf[l:]
1154 return d
1154 return d
1155
1155
1156 def gengroup():
1156 def gengroup():
1157 nodes = self.newer(basenodes)
1157 nodes = self.newer(basenodes)
1158
1158
1159 # construct the link map
1159 # construct the link map
1160 linkmap = {}
1160 linkmap = {}
1161 for n in nodes:
1161 for n in nodes:
1162 linkmap[self.changelog.rev(n)] = n
1162 linkmap[self.changelog.rev(n)] = n
1163
1163
1164 # construct a list of all changed files
1164 # construct a list of all changed files
1165 changed = {}
1165 changed = {}
1166 for n in nodes:
1166 for n in nodes:
1167 c = self.changelog.read(n)
1167 c = self.changelog.read(n)
1168 for f in c[3]:
1168 for f in c[3]:
1169 changed[f] = 1
1169 changed[f] = 1
1170 changed = changed.keys()
1170 changed = changed.keys()
1171 changed.sort()
1171 changed.sort()
1172
1172
1173 # the changegroup is changesets + manifests + all file revs
1173 # the changegroup is changesets + manifests + all file revs
1174 revs = [ self.changelog.rev(n) for n in nodes ]
1174 revs = [ self.changelog.rev(n) for n in nodes ]
1175
1175
1176 for y in self.changelog.group(linkmap): yield y
1176 for y in self.changelog.group(linkmap): yield y
1177 for y in self.manifest.group(linkmap): yield y
1177 for y in self.manifest.group(linkmap): yield y
1178 for f in changed:
1178 for f in changed:
1179 yield struct.pack(">l", len(f) + 4) + f
1179 yield struct.pack(">l", len(f) + 4) + f
1180 g = self.file(f).group(linkmap)
1180 g = self.file(f).group(linkmap)
1181 for y in g:
1181 for y in g:
1182 yield y
1182 yield y
1183
1183
1184 yield struct.pack(">l", 0)
1184 yield struct.pack(">l", 0)
1185
1185
1186 return genread(gengroup())
1186 return genread(gengroup())
1187
1187
1188 def addchangegroup(self, source):
1188 def addchangegroup(self, source):
1189
1189
1190 def getchunk():
1190 def getchunk():
1191 d = source.read(4)
1191 d = source.read(4)
1192 if not d: return ""
1192 if not d: return ""
1193 l = struct.unpack(">l", d)[0]
1193 l = struct.unpack(">l", d)[0]
1194 if l <= 4: return ""
1194 if l <= 4: return ""
1195 return source.read(l - 4)
1195 return source.read(l - 4)
1196
1196
1197 def getgroup():
1197 def getgroup():
1198 while 1:
1198 while 1:
1199 c = getchunk()
1199 c = getchunk()
1200 if not c: break
1200 if not c: break
1201 yield c
1201 yield c
1202
1202
1203 def csmap(x):
1203 def csmap(x):
1204 self.ui.debug("add changeset %s\n" % short(x))
1204 self.ui.debug("add changeset %s\n" % short(x))
1205 return self.changelog.count()
1205 return self.changelog.count()
1206
1206
1207 def revmap(x):
1207 def revmap(x):
1208 return self.changelog.rev(x)
1208 return self.changelog.rev(x)
1209
1209
1210 if not source: return
1210 if not source: return
1211 changesets = files = revisions = 0
1211 changesets = files = revisions = 0
1212
1212
1213 tr = self.transaction()
1213 tr = self.transaction()
1214
1214
1215 # pull off the changeset group
1215 # pull off the changeset group
1216 self.ui.status("adding changesets\n")
1216 self.ui.status("adding changesets\n")
1217 co = self.changelog.tip()
1217 co = self.changelog.tip()
1218 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1218 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1219 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1219 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1220
1220
1221 # pull off the manifest group
1221 # pull off the manifest group
1222 self.ui.status("adding manifests\n")
1222 self.ui.status("adding manifests\n")
1223 mm = self.manifest.tip()
1223 mm = self.manifest.tip()
1224 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1224 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1225
1225
1226 # process the files
1226 # process the files
1227 self.ui.status("adding file revisions\n")
1227 self.ui.status("adding file revisions\n")
1228 while 1:
1228 while 1:
1229 f = getchunk()
1229 f = getchunk()
1230 if not f: break
1230 if not f: break
1231 self.ui.debug("adding %s revisions\n" % f)
1231 self.ui.debug("adding %s revisions\n" % f)
1232 fl = self.file(f)
1232 fl = self.file(f)
1233 o = fl.count()
1233 o = fl.count()
1234 n = fl.addgroup(getgroup(), revmap, tr)
1234 n = fl.addgroup(getgroup(), revmap, tr)
1235 revisions += fl.count() - o
1235 revisions += fl.count() - o
1236 files += 1
1236 files += 1
1237
1237
1238 self.ui.status(("modified %d files, added %d changesets" +
1238 self.ui.status(("modified %d files, added %d changesets" +
1239 " and %d new revisions\n")
1239 " and %d new revisions\n")
1240 % (files, changesets, revisions))
1240 % (files, changesets, revisions))
1241
1241
1242 tr.close()
1242 tr.close()
1243 return
1243 return
1244
1244
1245 def update(self, node, allow=False, force=False, choose=None,
1245 def update(self, node, allow=False, force=False, choose=None,
1246 moddirstate=True):
1246 moddirstate=True):
1247 pl = self.dirstate.parents()
1247 pl = self.dirstate.parents()
1248 if not force and pl[1] != nullid:
1248 if not force and pl[1] != nullid:
1249 self.ui.warn("aborting: outstanding uncommitted merges\n")
1249 self.ui.warn("aborting: outstanding uncommitted merges\n")
1250 return
1250 return
1251
1251
1252 p1, p2 = pl[0], node
1252 p1, p2 = pl[0], node
1253 pa = self.changelog.ancestor(p1, p2)
1253 pa = self.changelog.ancestor(p1, p2)
1254 m1n = self.changelog.read(p1)[0]
1254 m1n = self.changelog.read(p1)[0]
1255 m2n = self.changelog.read(p2)[0]
1255 m2n = self.changelog.read(p2)[0]
1256 man = self.manifest.ancestor(m1n, m2n)
1256 man = self.manifest.ancestor(m1n, m2n)
1257 m1 = self.manifest.read(m1n)
1257 m1 = self.manifest.read(m1n)
1258 mf1 = self.manifest.readflags(m1n)
1258 mf1 = self.manifest.readflags(m1n)
1259 m2 = self.manifest.read(m2n)
1259 m2 = self.manifest.read(m2n)
1260 mf2 = self.manifest.readflags(m2n)
1260 mf2 = self.manifest.readflags(m2n)
1261 ma = self.manifest.read(man)
1261 ma = self.manifest.read(man)
1262 mfa = self.manifest.readflags(man)
1262 mfa = self.manifest.readflags(man)
1263
1263
1264 (c, a, d, u) = self.changes(None, None)
1264 (c, a, d, u) = self.changes(None, None)
1265
1265
1266 # is this a jump, or a merge? i.e. is there a linear path
1266 # is this a jump, or a merge? i.e. is there a linear path
1267 # from p1 to p2?
1267 # from p1 to p2?
1268 linear_path = (pa == p1 or pa == p2)
1268 linear_path = (pa == p1 or pa == p2)
1269
1269
1270 # resolve the manifest to determine which files
1270 # resolve the manifest to determine which files
1271 # we care about merging
1271 # we care about merging
1272 self.ui.note("resolving manifests\n")
1272 self.ui.note("resolving manifests\n")
1273 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1273 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1274 (force, allow, moddirstate, linear_path))
1274 (force, allow, moddirstate, linear_path))
1275 self.ui.debug(" ancestor %s local %s remote %s\n" %
1275 self.ui.debug(" ancestor %s local %s remote %s\n" %
1276 (short(man), short(m1n), short(m2n)))
1276 (short(man), short(m1n), short(m2n)))
1277
1277
1278 merge = {}
1278 merge = {}
1279 get = {}
1279 get = {}
1280 remove = []
1280 remove = []
1281 mark = {}
1281 mark = {}
1282
1282
1283 # construct a working dir manifest
1283 # construct a working dir manifest
1284 mw = m1.copy()
1284 mw = m1.copy()
1285 mfw = mf1.copy()
1285 mfw = mf1.copy()
1286 umap = dict.fromkeys(u)
1286 umap = dict.fromkeys(u)
1287
1287
1288 for f in a + c + u:
1288 for f in a + c + u:
1289 mw[f] = ""
1289 mw[f] = ""
1290 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1290 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1291
1291
1292 for f in d:
1292 for f in d:
1293 if f in mw: del mw[f]
1293 if f in mw: del mw[f]
1294
1294
1295 # If we're jumping between revisions (as opposed to merging),
1295 # If we're jumping between revisions (as opposed to merging),
1296 # and if neither the working directory nor the target rev has
1296 # and if neither the working directory nor the target rev has
1297 # the file, then we need to remove it from the dirstate, to
1297 # the file, then we need to remove it from the dirstate, to
1298 # prevent the dirstate from listing the file when it is no
1298 # prevent the dirstate from listing the file when it is no
1299 # longer in the manifest.
1299 # longer in the manifest.
1300 if moddirstate and linear_path and f not in m2:
1300 if moddirstate and linear_path and f not in m2:
1301 self.dirstate.forget((f,))
1301 self.dirstate.forget((f,))
1302
1302
1303 # Compare manifests
1303 # Compare manifests
1304 for f, n in mw.iteritems():
1304 for f, n in mw.iteritems():
1305 if choose and not choose(f): continue
1305 if choose and not choose(f): continue
1306 if f in m2:
1306 if f in m2:
1307 s = 0
1307 s = 0
1308
1308
1309 # is the wfile new since m1, and match m2?
1309 # is the wfile new since m1, and match m2?
1310 if f not in m1:
1310 if f not in m1:
1311 t1 = self.wfile(f).read()
1311 t1 = self.wfile(f).read()
1312 t2 = self.file(f).revision(m2[f])
1312 t2 = self.file(f).revision(m2[f])
1313 if cmp(t1, t2) == 0:
1313 if cmp(t1, t2) == 0:
1314 mark[f] = 1
1314 mark[f] = 1
1315 n = m2[f]
1315 n = m2[f]
1316 del t1, t2
1316 del t1, t2
1317
1317
1318 # are files different?
1318 # are files different?
1319 if n != m2[f]:
1319 if n != m2[f]:
1320 a = ma.get(f, nullid)
1320 a = ma.get(f, nullid)
1321 # are both different from the ancestor?
1321 # are both different from the ancestor?
1322 if n != a and m2[f] != a:
1322 if n != a and m2[f] != a:
1323 self.ui.debug(" %s versions differ, resolve\n" % f)
1323 self.ui.debug(" %s versions differ, resolve\n" % f)
1324 # merge executable bits
1324 # merge executable bits
1325 # "if we changed or they changed, change in merge"
1325 # "if we changed or they changed, change in merge"
1326 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1326 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1327 mode = ((a^b) | (a^c)) ^ a
1327 mode = ((a^b) | (a^c)) ^ a
1328 merge[f] = (m1.get(f, nullid), m2[f], mode)
1328 merge[f] = (m1.get(f, nullid), m2[f], mode)
1329 s = 1
1329 s = 1
1330 # are we clobbering?
1330 # are we clobbering?
1331 # is remote's version newer?
1331 # is remote's version newer?
1332 # or are we going back in time?
1332 # or are we going back in time?
1333 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1333 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1334 self.ui.debug(" remote %s is newer, get\n" % f)
1334 self.ui.debug(" remote %s is newer, get\n" % f)
1335 get[f] = m2[f]
1335 get[f] = m2[f]
1336 s = 1
1336 s = 1
1337 else:
1337 else:
1338 mark[f] = 1
1338 mark[f] = 1
1339 elif f in umap:
1339 elif f in umap:
1340 # this unknown file is the same as the checkout
1340 # this unknown file is the same as the checkout
1341 get[f] = m2[f]
1341 get[f] = m2[f]
1342
1342
1343 if not s and mfw[f] != mf2[f]:
1343 if not s and mfw[f] != mf2[f]:
1344 if force:
1344 if force:
1345 self.ui.debug(" updating permissions for %s\n" % f)
1345 self.ui.debug(" updating permissions for %s\n" % f)
1346 util.set_exec(self.wjoin(f), mf2[f])
1346 util.set_exec(self.wjoin(f), mf2[f])
1347 else:
1347 else:
1348 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1348 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1349 mode = ((a^b) | (a^c)) ^ a
1349 mode = ((a^b) | (a^c)) ^ a
1350 if mode != b:
1350 if mode != b:
1351 self.ui.debug(" updating permissions for %s\n" % f)
1351 self.ui.debug(" updating permissions for %s\n" % f)
1352 util.set_exec(self.wjoin(f), mode)
1352 util.set_exec(self.wjoin(f), mode)
1353 mark[f] = 1
1353 mark[f] = 1
1354 del m2[f]
1354 del m2[f]
1355 elif f in ma:
1355 elif f in ma:
1356 if n != ma[f]:
1356 if n != ma[f]:
1357 r = "d"
1357 r = "d"
1358 if not force and (linear_path or allow):
1358 if not force and (linear_path or allow):
1359 r = self.ui.prompt(
1359 r = self.ui.prompt(
1360 (" local changed %s which remote deleted\n" % f) +
1360 (" local changed %s which remote deleted\n" % f) +
1361 "(k)eep or (d)elete?", "[kd]", "k")
1361 "(k)eep or (d)elete?", "[kd]", "k")
1362 if r == "d":
1362 if r == "d":
1363 remove.append(f)
1363 remove.append(f)
1364 else:
1364 else:
1365 self.ui.debug("other deleted %s\n" % f)
1365 self.ui.debug("other deleted %s\n" % f)
1366 remove.append(f) # other deleted it
1366 remove.append(f) # other deleted it
1367 else:
1367 else:
1368 if n == m1.get(f, nullid): # same as parent
1368 if n == m1.get(f, nullid): # same as parent
1369 if p2 == pa: # going backwards?
1369 if p2 == pa: # going backwards?
1370 self.ui.debug("remote deleted %s\n" % f)
1370 self.ui.debug("remote deleted %s\n" % f)
1371 remove.append(f)
1371 remove.append(f)
1372 else:
1372 else:
1373 self.ui.debug("local created %s, keeping\n" % f)
1373 self.ui.debug("local created %s, keeping\n" % f)
1374 else:
1374 else:
1375 self.ui.debug("working dir created %s, keeping\n" % f)
1375 self.ui.debug("working dir created %s, keeping\n" % f)
1376
1376
1377 for f, n in m2.iteritems():
1377 for f, n in m2.iteritems():
1378 if choose and not choose(f): continue
1378 if choose and not choose(f): continue
1379 if f[0] == "/": continue
1379 if f[0] == "/": continue
1380 if f in ma and n != ma[f]:
1380 if f in ma and n != ma[f]:
1381 r = "k"
1381 r = "k"
1382 if not force and (linear_path or allow):
1382 if not force and (linear_path or allow):
1383 r = self.ui.prompt(
1383 r = self.ui.prompt(
1384 ("remote changed %s which local deleted\n" % f) +
1384 ("remote changed %s which local deleted\n" % f) +
1385 "(k)eep or (d)elete?", "[kd]", "k")
1385 "(k)eep or (d)elete?", "[kd]", "k")
1386 if r == "k": get[f] = n
1386 if r == "k": get[f] = n
1387 elif f not in ma:
1387 elif f not in ma:
1388 self.ui.debug("remote created %s\n" % f)
1388 self.ui.debug("remote created %s\n" % f)
1389 get[f] = n
1389 get[f] = n
1390 else:
1390 else:
1391 self.ui.debug("local deleted %s\n" % f)
1391 self.ui.debug("local deleted %s\n" % f)
1392 if force:
1392 if force:
1393 get[f] = n
1393 get[f] = n
1394
1394
1395 del mw, m1, m2, ma
1395 del mw, m1, m2, ma
1396
1396
1397 if force:
1397 if force:
1398 for f in merge:
1398 for f in merge:
1399 get[f] = merge[f][1]
1399 get[f] = merge[f][1]
1400 merge = {}
1400 merge = {}
1401
1401
1402 if linear_path:
1402 if linear_path:
1403 # we don't need to do any magic, just jump to the new rev
1403 # we don't need to do any magic, just jump to the new rev
1404 mode = 'n'
1404 mode = 'n'
1405 p1, p2 = p2, nullid
1405 p1, p2 = p2, nullid
1406 else:
1406 else:
1407 if not allow:
1407 if not allow:
1408 self.ui.status("this update spans a branch" +
1408 self.ui.status("this update spans a branch" +
1409 " affecting the following files:\n")
1409 " affecting the following files:\n")
1410 fl = merge.keys() + get.keys()
1410 fl = merge.keys() + get.keys()
1411 fl.sort()
1411 fl.sort()
1412 for f in fl:
1412 for f in fl:
1413 cf = ""
1413 cf = ""
1414 if f in merge: cf = " (resolve)"
1414 if f in merge: cf = " (resolve)"
1415 self.ui.status(" %s%s\n" % (f, cf))
1415 self.ui.status(" %s%s\n" % (f, cf))
1416 self.ui.warn("aborting update spanning branches!\n")
1416 self.ui.warn("aborting update spanning branches!\n")
1417 self.ui.status("(use update -m to perform a branch merge)\n")
1417 self.ui.status("(use update -m to perform a branch merge)\n")
1418 return 1
1418 return 1
1419 # we have to remember what files we needed to get/change
1419 # we have to remember what files we needed to get/change
1420 # because any file that's different from either one of its
1420 # because any file that's different from either one of its
1421 # parents must be in the changeset
1421 # parents must be in the changeset
1422 mode = 'm'
1422 mode = 'm'
1423 if moddirstate:
1423 if moddirstate:
1424 self.dirstate.update(mark.keys(), "m")
1424 self.dirstate.update(mark.keys(), "m")
1425
1425
1426 if moddirstate:
1426 if moddirstate:
1427 self.dirstate.setparents(p1, p2)
1427 self.dirstate.setparents(p1, p2)
1428
1428
1429 # get the files we don't need to change
1429 # get the files we don't need to change
1430 files = get.keys()
1430 files = get.keys()
1431 files.sort()
1431 files.sort()
1432 for f in files:
1432 for f in files:
1433 if f[0] == "/": continue
1433 if f[0] == "/": continue
1434 self.ui.note("getting %s\n" % f)
1434 self.ui.note("getting %s\n" % f)
1435 t = self.file(f).read(get[f])
1435 t = self.file(f).read(get[f])
1436 try:
1436 try:
1437 self.wfile(f, "w").write(t)
1437 self.wfile(f, "w").write(t)
1438 except IOError:
1438 except IOError:
1439 os.makedirs(os.path.dirname(self.wjoin(f)))
1439 os.makedirs(os.path.dirname(self.wjoin(f)))
1440 self.wfile(f, "w").write(t)
1440 self.wfile(f, "w").write(t)
1441 util.set_exec(self.wjoin(f), mf2[f])
1441 util.set_exec(self.wjoin(f), mf2[f])
1442 if moddirstate:
1442 if moddirstate:
1443 self.dirstate.update([f], mode)
1443 self.dirstate.update([f], mode)
1444
1444
1445 # merge the tricky bits
1445 # merge the tricky bits
1446 files = merge.keys()
1446 files = merge.keys()
1447 files.sort()
1447 files.sort()
1448 for f in files:
1448 for f in files:
1449 self.ui.status("merging %s\n" % f)
1449 self.ui.status("merging %s\n" % f)
1450 m, o, flag = merge[f]
1450 m, o, flag = merge[f]
1451 self.merge3(f, m, o)
1451 self.merge3(f, m, o)
1452 util.set_exec(self.wjoin(f), flag)
1452 util.set_exec(self.wjoin(f), flag)
1453 if moddirstate:
1453 if moddirstate:
1454 self.dirstate.update([f], 'm')
1454 self.dirstate.update([f], 'm')
1455
1455
1456 for f in remove:
1456 for f in remove:
1457 self.ui.note("removing %s\n" % f)
1457 self.ui.note("removing %s\n" % f)
1458 os.unlink(f)
1458 os.unlink(f)
1459 # try removing directories that might now be empty
1459 # try removing directories that might now be empty
1460 try: os.removedirs(os.path.dirname(f))
1460 try: os.removedirs(os.path.dirname(f))
1461 except: pass
1461 except: pass
1462 if moddirstate:
1462 if moddirstate:
1463 if mode == 'n':
1463 if mode == 'n':
1464 self.dirstate.forget(remove)
1464 self.dirstate.forget(remove)
1465 else:
1465 else:
1466 self.dirstate.update(remove, 'r')
1466 self.dirstate.update(remove, 'r')
1467
1467
1468 def merge3(self, fn, my, other):
1468 def merge3(self, fn, my, other):
1469 """perform a 3-way merge in the working directory"""
1469 """perform a 3-way merge in the working directory"""
1470
1470
1471 def temp(prefix, node):
1471 def temp(prefix, node):
1472 pre = "%s~%s." % (os.path.basename(fn), prefix)
1472 pre = "%s~%s." % (os.path.basename(fn), prefix)
1473 (fd, name) = tempfile.mkstemp("", pre)
1473 (fd, name) = tempfile.mkstemp("", pre)
1474 f = os.fdopen(fd, "wb")
1474 f = os.fdopen(fd, "wb")
1475 f.write(fl.revision(node))
1475 f.write(fl.revision(node))
1476 f.close()
1476 f.close()
1477 return name
1477 return name
1478
1478
1479 fl = self.file(fn)
1479 fl = self.file(fn)
1480 base = fl.ancestor(my, other)
1480 base = fl.ancestor(my, other)
1481 a = self.wjoin(fn)
1481 a = self.wjoin(fn)
1482 b = temp("base", base)
1482 b = temp("base", base)
1483 c = temp("other", other)
1483 c = temp("other", other)
1484
1484
1485 self.ui.note("resolving %s\n" % fn)
1485 self.ui.note("resolving %s\n" % fn)
1486 self.ui.debug("file %s: other %s ancestor %s\n" %
1486 self.ui.debug("file %s: other %s ancestor %s\n" %
1487 (fn, short(other), short(base)))
1487 (fn, short(other), short(base)))
1488
1488
1489 cmd = self.ui.config("ui", "merge") or \
1489 cmd = self.ui.config("ui", "merge") or \
1490 os.environ.get("HGMERGE", "hgmerge")
1490 os.environ.get("HGMERGE", "hgmerge")
1491 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1491 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1492 if r:
1492 if r:
1493 self.ui.warn("merging %s failed!\n" % fn)
1493 self.ui.warn("merging %s failed!\n" % fn)
1494
1494
1495 os.unlink(b)
1495 os.unlink(b)
1496 os.unlink(c)
1496 os.unlink(c)
1497
1497
1498 def verify(self):
1498 def verify(self):
1499 filelinkrevs = {}
1499 filelinkrevs = {}
1500 filenodes = {}
1500 filenodes = {}
1501 changesets = revisions = files = 0
1501 changesets = revisions = files = 0
1502 errors = 0
1502 errors = 0
1503
1503
1504 seen = {}
1504 seen = {}
1505 self.ui.status("checking changesets\n")
1505 self.ui.status("checking changesets\n")
1506 for i in range(self.changelog.count()):
1506 for i in range(self.changelog.count()):
1507 changesets += 1
1507 changesets += 1
1508 n = self.changelog.node(i)
1508 n = self.changelog.node(i)
1509 if n in seen:
1509 if n in seen:
1510 self.ui.warn("duplicate changeset at revision %d\n" % i)
1510 self.ui.warn("duplicate changeset at revision %d\n" % i)
1511 errors += 1
1511 errors += 1
1512 seen[n] = 1
1512 seen[n] = 1
1513
1513
1514 for p in self.changelog.parents(n):
1514 for p in self.changelog.parents(n):
1515 if p not in self.changelog.nodemap:
1515 if p not in self.changelog.nodemap:
1516 self.ui.warn("changeset %s has unknown parent %s\n" %
1516 self.ui.warn("changeset %s has unknown parent %s\n" %
1517 (short(n), short(p)))
1517 (short(n), short(p)))
1518 errors += 1
1518 errors += 1
1519 try:
1519 try:
1520 changes = self.changelog.read(n)
1520 changes = self.changelog.read(n)
1521 except Exception, inst:
1521 except Exception, inst:
1522 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1522 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1523 errors += 1
1523 errors += 1
1524
1524
1525 for f in changes[3]:
1525 for f in changes[3]:
1526 filelinkrevs.setdefault(f, []).append(i)
1526 filelinkrevs.setdefault(f, []).append(i)
1527
1527
1528 seen = {}
1528 seen = {}
1529 self.ui.status("checking manifests\n")
1529 self.ui.status("checking manifests\n")
1530 for i in range(self.manifest.count()):
1530 for i in range(self.manifest.count()):
1531 n = self.manifest.node(i)
1531 n = self.manifest.node(i)
1532 if n in seen:
1532 if n in seen:
1533 self.ui.warn("duplicate manifest at revision %d\n" % i)
1533 self.ui.warn("duplicate manifest at revision %d\n" % i)
1534 errors += 1
1534 errors += 1
1535 seen[n] = 1
1535 seen[n] = 1
1536
1536
1537 for p in self.manifest.parents(n):
1537 for p in self.manifest.parents(n):
1538 if p not in self.manifest.nodemap:
1538 if p not in self.manifest.nodemap:
1539 self.ui.warn("manifest %s has unknown parent %s\n" %
1539 self.ui.warn("manifest %s has unknown parent %s\n" %
1540 (short(n), short(p)))
1540 (short(n), short(p)))
1541 errors += 1
1541 errors += 1
1542
1542
1543 try:
1543 try:
1544 delta = mdiff.patchtext(self.manifest.delta(n))
1544 delta = mdiff.patchtext(self.manifest.delta(n))
1545 except KeyboardInterrupt:
1545 except KeyboardInterrupt:
1546 self.ui.warn("aborted")
1546 self.ui.warn("aborted")
1547 sys.exit(0)
1547 sys.exit(0)
1548 except Exception, inst:
1548 except Exception, inst:
1549 self.ui.warn("unpacking manifest %s: %s\n"
1549 self.ui.warn("unpacking manifest %s: %s\n"
1550 % (short(n), inst))
1550 % (short(n), inst))
1551 errors += 1
1551 errors += 1
1552
1552
1553 ff = [ l.split('\0') for l in delta.splitlines() ]
1553 ff = [ l.split('\0') for l in delta.splitlines() ]
1554 for f, fn in ff:
1554 for f, fn in ff:
1555 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1555 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1556
1556
1557 self.ui.status("crosschecking files in changesets and manifests\n")
1557 self.ui.status("crosschecking files in changesets and manifests\n")
1558 for f in filenodes:
1558 for f in filenodes:
1559 if f not in filelinkrevs:
1559 if f not in filelinkrevs:
1560 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1560 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1561 errors += 1
1561 errors += 1
1562
1562
1563 for f in filelinkrevs:
1563 for f in filelinkrevs:
1564 if f not in filenodes:
1564 if f not in filenodes:
1565 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1565 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1566 errors += 1
1566 errors += 1
1567
1567
1568 self.ui.status("checking files\n")
1568 self.ui.status("checking files\n")
1569 ff = filenodes.keys()
1569 ff = filenodes.keys()
1570 ff.sort()
1570 ff.sort()
1571 for f in ff:
1571 for f in ff:
1572 if f == "/dev/null": continue
1572 if f == "/dev/null": continue
1573 files += 1
1573 files += 1
1574 fl = self.file(f)
1574 fl = self.file(f)
1575 nodes = { nullid: 1 }
1575 nodes = { nullid: 1 }
1576 seen = {}
1576 seen = {}
1577 for i in range(fl.count()):
1577 for i in range(fl.count()):
1578 revisions += 1
1578 revisions += 1
1579 n = fl.node(i)
1579 n = fl.node(i)
1580
1580
1581 if n in seen:
1581 if n in seen:
1582 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1582 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1583 errors += 1
1583 errors += 1
1584
1584
1585 if n not in filenodes[f]:
1585 if n not in filenodes[f]:
1586 self.ui.warn("%s: %d:%s not in manifests\n"
1586 self.ui.warn("%s: %d:%s not in manifests\n"
1587 % (f, i, short(n)))
1587 % (f, i, short(n)))
1588 errors += 1
1588 errors += 1
1589 else:
1589 else:
1590 del filenodes[f][n]
1590 del filenodes[f][n]
1591
1591
1592 flr = fl.linkrev(n)
1592 flr = fl.linkrev(n)
1593 if flr not in filelinkrevs[f]:
1593 if flr not in filelinkrevs[f]:
1594 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1594 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1595 % (f, short(n), fl.linkrev(n)))
1595 % (f, short(n), fl.linkrev(n)))
1596 errors += 1
1596 errors += 1
1597 else:
1597 else:
1598 filelinkrevs[f].remove(flr)
1598 filelinkrevs[f].remove(flr)
1599
1599
1600 # verify contents
1600 # verify contents
1601 try:
1601 try:
1602 t = fl.read(n)
1602 t = fl.read(n)
1603 except Exception, inst:
1603 except Exception, inst:
1604 self.ui.warn("unpacking file %s %s: %s\n"
1604 self.ui.warn("unpacking file %s %s: %s\n"
1605 % (f, short(n), inst))
1605 % (f, short(n), inst))
1606 errors += 1
1606 errors += 1
1607
1607
1608 # verify parents
1608 # verify parents
1609 (p1, p2) = fl.parents(n)
1609 (p1, p2) = fl.parents(n)
1610 if p1 not in nodes:
1610 if p1 not in nodes:
1611 self.ui.warn("file %s:%s unknown parent 1 %s" %
1611 self.ui.warn("file %s:%s unknown parent 1 %s" %
1612 (f, short(n), short(p1)))
1612 (f, short(n), short(p1)))
1613 errors += 1
1613 errors += 1
1614 if p2 not in nodes:
1614 if p2 not in nodes:
1615 self.ui.warn("file %s:%s unknown parent 2 %s" %
1615 self.ui.warn("file %s:%s unknown parent 2 %s" %
1616 (f, short(n), short(p1)))
1616 (f, short(n), short(p1)))
1617 errors += 1
1617 errors += 1
1618 nodes[n] = 1
1618 nodes[n] = 1
1619
1619
1620 # cross-check
1620 # cross-check
1621 for node in filenodes[f]:
1621 for node in filenodes[f]:
1622 self.ui.warn("node %s in manifests not in %s\n"
1622 self.ui.warn("node %s in manifests not in %s\n"
1623 % (hex(n), f))
1623 % (hex(n), f))
1624 errors += 1
1624 errors += 1
1625
1625
1626 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1626 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1627 (files, changesets, revisions))
1627 (files, changesets, revisions))
1628
1628
1629 if errors:
1629 if errors:
1630 self.ui.warn("%d integrity errors encountered!\n" % errors)
1630 self.ui.warn("%d integrity errors encountered!\n" % errors)
1631 return 1
1631 return 1
1632
1632
1633 class httprepository:
1633 class httprepository:
1634 def __init__(self, ui, path):
1634 def __init__(self, ui, path):
1635 self.url = path
1635 self.url = path
1636 self.ui = ui
1636 self.ui = ui
1637 no_list = [ "localhost", "127.0.0.1" ]
1637 no_list = [ "localhost", "127.0.0.1" ]
1638 host = ui.config("http_proxy", "host")
1638 host = ui.config("http_proxy", "host")
1639 if host is None:
1639 if host is None:
1640 host = os.environ.get("http_proxy")
1640 host = os.environ.get("http_proxy")
1641 if host and host.startswith('http://'):
1641 if host and host.startswith('http://'):
1642 host = host[7:]
1642 host = host[7:]
1643 user = ui.config("http_proxy", "user")
1643 user = ui.config("http_proxy", "user")
1644 passwd = ui.config("http_proxy", "passwd")
1644 passwd = ui.config("http_proxy", "passwd")
1645 no = ui.config("http_proxy", "no")
1645 no = ui.config("http_proxy", "no")
1646 if no is None:
1646 if no is None:
1647 no = os.environ.get("no_proxy")
1647 no = os.environ.get("no_proxy")
1648 if no:
1648 if no:
1649 no_list = no_list + no.split(",")
1649 no_list = no_list + no.split(",")
1650
1650
1651 no_proxy = 0
1651 no_proxy = 0
1652 for h in no_list:
1652 for h in no_list:
1653 if (path.startswith("http://" + h + "/") or
1653 if (path.startswith("http://" + h + "/") or
1654 path.startswith("http://" + h + ":") or
1654 path.startswith("http://" + h + ":") or
1655 path == "http://" + h):
1655 path == "http://" + h):
1656 no_proxy = 1
1656 no_proxy = 1
1657
1657
1658 # Note: urllib2 takes proxy values from the environment and those will
1658 # Note: urllib2 takes proxy values from the environment and those will
1659 # take precedence
1659 # take precedence
1660 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1660 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1661 if os.environ.has_key(env):
1661 if os.environ.has_key(env):
1662 del os.environ[env]
1662 del os.environ[env]
1663
1663
1664 proxy_handler = urllib2.BaseHandler()
1664 proxy_handler = urllib2.BaseHandler()
1665 if host and not no_proxy:
1665 if host and not no_proxy:
1666 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1666 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1667
1667
1668 authinfo = None
1668 authinfo = None
1669 if user and passwd:
1669 if user and passwd:
1670 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1670 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1671 passmgr.add_password(None, host, user, passwd)
1671 passmgr.add_password(None, host, user, passwd)
1672 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1672 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1673
1673
1674 opener = urllib2.build_opener(proxy_handler, authinfo)
1674 opener = urllib2.build_opener(proxy_handler, authinfo)
1675 urllib2.install_opener(opener)
1675 urllib2.install_opener(opener)
1676
1676
1677 def dev(self):
1677 def dev(self):
1678 return -1
1678 return -1
1679
1679
1680 def do_cmd(self, cmd, **args):
1680 def do_cmd(self, cmd, **args):
1681 self.ui.debug("sending %s command\n" % cmd)
1681 self.ui.debug("sending %s command\n" % cmd)
1682 q = {"cmd": cmd}
1682 q = {"cmd": cmd}
1683 q.update(args)
1683 q.update(args)
1684 qs = urllib.urlencode(q)
1684 qs = urllib.urlencode(q)
1685 cu = "%s?%s" % (self.url, qs)
1685 cu = "%s?%s" % (self.url, qs)
1686 return urllib2.urlopen(cu)
1686 return urllib2.urlopen(cu)
1687
1687
1688 def heads(self):
1688 def heads(self):
1689 d = self.do_cmd("heads").read()
1689 d = self.do_cmd("heads").read()
1690 try:
1690 try:
1691 return map(bin, d[:-1].split(" "))
1691 return map(bin, d[:-1].split(" "))
1692 except:
1692 except:
1693 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1693 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1694 raise
1694 raise
1695
1695
1696 def branches(self, nodes):
1696 def branches(self, nodes):
1697 n = " ".join(map(hex, nodes))
1697 n = " ".join(map(hex, nodes))
1698 d = self.do_cmd("branches", nodes=n).read()
1698 d = self.do_cmd("branches", nodes=n).read()
1699 try:
1699 try:
1700 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1700 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1701 return br
1701 return br
1702 except:
1702 except:
1703 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1703 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1704 raise
1704 raise
1705
1705
1706 def between(self, pairs):
1706 def between(self, pairs):
1707 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1707 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1708 d = self.do_cmd("between", pairs=n).read()
1708 d = self.do_cmd("between", pairs=n).read()
1709 try:
1709 try:
1710 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1710 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1711 return p
1711 return p
1712 except:
1712 except:
1713 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1713 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1714 raise
1714 raise
1715
1715
1716 def changegroup(self, nodes):
1716 def changegroup(self, nodes):
1717 n = " ".join(map(hex, nodes))
1717 n = " ".join(map(hex, nodes))
1718 f = self.do_cmd("changegroup", roots=n)
1718 f = self.do_cmd("changegroup", roots=n)
1719 bytes = 0
1719 bytes = 0
1720
1720
1721 class zread:
1721 class zread:
1722 def __init__(self, f):
1722 def __init__(self, f):
1723 self.zd = zlib.decompressobj()
1723 self.zd = zlib.decompressobj()
1724 self.f = f
1724 self.f = f
1725 self.buf = ""
1725 self.buf = ""
1726 def read(self, l):
1726 def read(self, l):
1727 while l > len(self.buf):
1727 while l > len(self.buf):
1728 r = f.read(4096)
1728 r = f.read(4096)
1729 if r:
1729 if r:
1730 self.buf += self.zd.decompress(r)
1730 self.buf += self.zd.decompress(r)
1731 else:
1731 else:
1732 self.buf += self.zd.flush()
1732 self.buf += self.zd.flush()
1733 break
1733 break
1734 d, self.buf = self.buf[:l], self.buf[l:]
1734 d, self.buf = self.buf[:l], self.buf[l:]
1735 return d
1735 return d
1736
1736
1737 return zread(f)
1737 return zread(f)
1738
1738
1739 class remotelock:
1739 class remotelock:
1740 def __init__(self, repo):
1740 def __init__(self, repo):
1741 self.repo = repo
1741 self.repo = repo
1742 def release(self):
1742 def release(self):
1743 self.repo.unlock()
1743 self.repo.unlock()
1744 self.repo = None
1744 self.repo = None
1745 def __del__(self):
1745 def __del__(self):
1746 if self.repo:
1746 if self.repo:
1747 self.release()
1747 self.release()
1748
1748
1749 class sshrepository:
1749 class sshrepository:
1750 def __init__(self, ui, path):
1750 def __init__(self, ui, path):
1751 self.url = path
1751 self.url = path
1752 self.ui = ui
1752 self.ui = ui
1753
1753
1754 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1754 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1755 if not m:
1755 if not m:
1756 raise RepoError("couldn't parse destination %s\n" % path)
1756 raise RepoError("couldn't parse destination %s\n" % path)
1757
1757
1758 self.user = m.group(2)
1758 self.user = m.group(2)
1759 self.host = m.group(3)
1759 self.host = m.group(3)
1760 self.port = m.group(5)
1760 self.port = m.group(5)
1761 self.path = m.group(7)
1761 self.path = m.group(7)
1762
1762
1763 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1763 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1764 args = self.port and ("%s -p %s") % (args, self.port) or args
1764 args = self.port and ("%s -p %s") % (args, self.port) or args
1765 path = self.path or ""
1765 path = self.path or ""
1766
1766
1767 cmd = "ssh %s 'hg -R %s serve --stdio'"
1767 cmd = "ssh %s 'hg -R %s serve --stdio'"
1768 cmd = cmd % (args, path)
1768 cmd = cmd % (args, path)
1769
1769
1770 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1770 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1771
1771
1772 def readerr(self):
1772 def readerr(self):
1773 while 1:
1773 while 1:
1774 r,w,x = select.select([self.pipee], [], [], 0)
1774 r,w,x = select.select([self.pipee], [], [], 0)
1775 if not r: break
1775 if not r: break
1776 l = self.pipee.readline()
1776 l = self.pipee.readline()
1777 if not l: break
1777 if not l: break
1778 self.ui.status("remote: ", l)
1778 self.ui.status("remote: ", l)
1779
1779
1780 def __del__(self):
1780 def __del__(self):
1781 self.pipeo.close()
1781 self.pipeo.close()
1782 self.pipei.close()
1782 self.pipei.close()
1783 for l in self.pipee:
1783 for l in self.pipee:
1784 self.ui.status("remote: ", l)
1784 self.ui.status("remote: ", l)
1785 self.pipee.close()
1785 self.pipee.close()
1786
1786
1787 def dev(self):
1787 def dev(self):
1788 return -1
1788 return -1
1789
1789
1790 def do_cmd(self, cmd, **args):
1790 def do_cmd(self, cmd, **args):
1791 self.ui.debug("sending %s command\n" % cmd)
1791 self.ui.debug("sending %s command\n" % cmd)
1792 self.pipeo.write("%s\n" % cmd)
1792 self.pipeo.write("%s\n" % cmd)
1793 for k, v in args.items():
1793 for k, v in args.items():
1794 self.pipeo.write("%s %d\n" % (k, len(v)))
1794 self.pipeo.write("%s %d\n" % (k, len(v)))
1795 self.pipeo.write(v)
1795 self.pipeo.write(v)
1796 self.pipeo.flush()
1796 self.pipeo.flush()
1797
1797
1798 return self.pipei
1798 return self.pipei
1799
1799
1800 def call(self, cmd, **args):
1800 def call(self, cmd, **args):
1801 r = self.do_cmd(cmd, **args)
1801 r = self.do_cmd(cmd, **args)
1802 l = r.readline()
1802 l = r.readline()
1803 self.readerr()
1803 self.readerr()
1804 try:
1804 try:
1805 l = int(l)
1805 l = int(l)
1806 except:
1806 except:
1807 raise RepoError("unexpected response '%s'" % l)
1807 raise RepoError("unexpected response '%s'" % l)
1808 return r.read(l)
1808 return r.read(l)
1809
1809
1810 def lock(self):
1810 def lock(self):
1811 self.call("lock")
1811 self.call("lock")
1812 return remotelock(self)
1812 return remotelock(self)
1813
1813
1814 def unlock(self):
1814 def unlock(self):
1815 self.call("unlock")
1815 self.call("unlock")
1816
1816
1817 def heads(self):
1817 def heads(self):
1818 d = self.call("heads")
1818 d = self.call("heads")
1819 try:
1819 try:
1820 return map(bin, d[:-1].split(" "))
1820 return map(bin, d[:-1].split(" "))
1821 except:
1821 except:
1822 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1822 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1823
1823
1824 def branches(self, nodes):
1824 def branches(self, nodes):
1825 n = " ".join(map(hex, nodes))
1825 n = " ".join(map(hex, nodes))
1826 d = self.call("branches", nodes=n)
1826 d = self.call("branches", nodes=n)
1827 try:
1827 try:
1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1829 return br
1829 return br
1830 except:
1830 except:
1831 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1831 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1832
1832
1833 def between(self, pairs):
1833 def between(self, pairs):
1834 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1834 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1835 d = self.call("between", pairs=n)
1835 d = self.call("between", pairs=n)
1836 try:
1836 try:
1837 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1837 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1838 return p
1838 return p
1839 except:
1839 except:
1840 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1840 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1841
1841
1842 def changegroup(self, nodes):
1842 def changegroup(self, nodes):
1843 n = " ".join(map(hex, nodes))
1843 n = " ".join(map(hex, nodes))
1844 f = self.do_cmd("changegroup", roots=n)
1844 f = self.do_cmd("changegroup", roots=n)
1845 return self.pipei
1845 return self.pipei
1846
1846
1847 def addchangegroup(self, cg):
1847 def addchangegroup(self, cg):
1848 d = self.call("addchangegroup")
1848 d = self.call("addchangegroup")
1849 if d:
1849 if d:
1850 raise RepoError("push refused: %s", d)
1850 raise RepoError("push refused: %s", d)
1851
1851
1852 while 1:
1852 while 1:
1853 d = cg.read(4096)
1853 d = cg.read(4096)
1854 if not d: break
1854 if not d: break
1855 self.pipeo.write(d)
1855 self.pipeo.write(d)
1856 self.readerr()
1856 self.readerr()
1857
1857
1858 self.pipeo.flush()
1858 self.pipeo.flush()
1859
1859
1860 self.readerr()
1860 self.readerr()
1861 l = int(self.pipei.readline())
1861 l = int(self.pipei.readline())
1862 return self.pipei.read(l) != ""
1862 return self.pipei.read(l) != ""
1863
1863
1864 def repository(ui, path=None, create=0):
1864 def repository(ui, path=None, create=0):
1865 if path:
1865 if path:
1866 if path.startswith("http://"):
1866 if path.startswith("http://"):
1867 return httprepository(ui, path)
1867 return httprepository(ui, path)
1868 if path.startswith("hg://"):
1868 if path.startswith("hg://"):
1869 return httprepository(ui, path.replace("hg://", "http://"))
1869 return httprepository(ui, path.replace("hg://", "http://"))
1870 if path.startswith("old-http://"):
1870 if path.startswith("old-http://"):
1871 return localrepository(ui, path.replace("old-http://", "http://"))
1871 return localrepository(ui, path.replace("old-http://", "http://"))
1872 if path.startswith("ssh://"):
1872 if path.startswith("ssh://"):
1873 return sshrepository(ui, path)
1873 return sshrepository(ui, path)
1874
1874
1875 return localrepository(ui, path, create)
1875 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now