##// END OF EJS Templates
Generate a friendlier except for failed lookups...
Matt Mackall -
r658:f8098ae9 default
parent child Browse files
Show More
@@ -1,1875 +1,1878 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if t[:2] != '\1\n':
24 if t[:2] != '\1\n':
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if t[:2] != '\1\n':
31 if t[:2] != '\1\n':
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text[:2] == '\1\n':
41 if meta or text[:2] == '\1\n':
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
129 # directly generate the mdiff delta from the data collected during
129 # directly generate the mdiff delta from the data collected during
130 # the bisect loop below
130 # the bisect loop below
131 def gendelta(delta):
131 def gendelta(delta):
132 i = 0
132 i = 0
133 result = []
133 result = []
134 while i < len(delta):
134 while i < len(delta):
135 start = delta[i][2]
135 start = delta[i][2]
136 end = delta[i][3]
136 end = delta[i][3]
137 l = delta[i][4]
137 l = delta[i][4]
138 if l == None:
138 if l == None:
139 l = ""
139 l = ""
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
141 if delta[i+1][3] > end:
141 if delta[i+1][3] > end:
142 end = delta[i+1][3]
142 end = delta[i+1][3]
143 if delta[i+1][4]:
143 if delta[i+1][4]:
144 l += delta[i+1][4]
144 l += delta[i+1][4]
145 i += 1
145 i += 1
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
147 i += 1
147 i += 1
148 return result
148 return result
149
149
150 # apply the changes collected during the bisect loop to our addlist
150 # apply the changes collected during the bisect loop to our addlist
151 def addlistdelta(addlist, delta):
151 def addlistdelta(addlist, delta):
152 # apply the deltas to the addlist. start from the bottom up
152 # apply the deltas to the addlist. start from the bottom up
153 # so changes to the offsets don't mess things up.
153 # so changes to the offsets don't mess things up.
154 i = len(delta)
154 i = len(delta)
155 while i > 0:
155 while i > 0:
156 i -= 1
156 i -= 1
157 start = delta[i][0]
157 start = delta[i][0]
158 end = delta[i][1]
158 end = delta[i][1]
159 if delta[i][4]:
159 if delta[i][4]:
160 addlist[start:end] = [delta[i][4]]
160 addlist[start:end] = [delta[i][4]]
161 else:
161 else:
162 del addlist[start:end]
162 del addlist[start:end]
163 return addlist
163 return addlist
164
164
165 # calculate the byte offset of the start of each line in the
165 # calculate the byte offset of the start of each line in the
166 # manifest
166 # manifest
167 def calcoffsets(addlist):
167 def calcoffsets(addlist):
168 offsets = [0] * (len(addlist) + 1)
168 offsets = [0] * (len(addlist) + 1)
169 offset = 0
169 offset = 0
170 i = 0
170 i = 0
171 while i < len(addlist):
171 while i < len(addlist):
172 offsets[i] = offset
172 offsets[i] = offset
173 offset += len(addlist[i])
173 offset += len(addlist[i])
174 i += 1
174 i += 1
175 offsets[i] = offset
175 offsets[i] = offset
176 return offsets
176 return offsets
177
177
178 # if we're using the listcache, make sure it is valid and
178 # if we're using the listcache, make sure it is valid and
179 # parented by the same node we're diffing against
179 # parented by the same node we're diffing against
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
181 files = map.keys()
181 files = map.keys()
182 files.sort()
182 files.sort()
183
183
184 self.addlist = ["%s\000%s%s\n" %
184 self.addlist = ["%s\000%s%s\n" %
185 (f, hex(map[f]), flags[f] and "x" or '')
185 (f, hex(map[f]), flags[f] and "x" or '')
186 for f in files]
186 for f in files]
187 cachedelta = None
187 cachedelta = None
188 else:
188 else:
189 addlist = self.listcache[1]
189 addlist = self.listcache[1]
190
190
191 # find the starting offset for each line in the add list
191 # find the starting offset for each line in the add list
192 offsets = calcoffsets(addlist)
192 offsets = calcoffsets(addlist)
193
193
194 # combine the changed lists into one list for sorting
194 # combine the changed lists into one list for sorting
195 work = [[x, 0] for x in changed[0]]
195 work = [[x, 0] for x in changed[0]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
197 work.sort()
197 work.sort()
198
198
199 delta = []
199 delta = []
200 bs = 0
200 bs = 0
201
201
202 for w in work:
202 for w in work:
203 f = w[0]
203 f = w[0]
204 # bs will either be the index of the item or the insertion point
204 # bs will either be the index of the item or the insertion point
205 bs = bisect.bisect(addlist, f, bs)
205 bs = bisect.bisect(addlist, f, bs)
206 if bs < len(addlist):
206 if bs < len(addlist):
207 fn = addlist[bs][:addlist[bs].index('\0')]
207 fn = addlist[bs][:addlist[bs].index('\0')]
208 else:
208 else:
209 fn = None
209 fn = None
210 if w[1] == 0:
210 if w[1] == 0:
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
212 else:
212 else:
213 l = None
213 l = None
214 start = bs
214 start = bs
215 if fn != f:
215 if fn != f:
216 # item not found, insert a new one
216 # item not found, insert a new one
217 end = bs
217 end = bs
218 if w[1] == 1:
218 if w[1] == 1:
219 sys.stderr.write("failed to remove %s from manifest" % f)
219 sys.stderr.write("failed to remove %s from manifest" % f)
220 sys.exit(1)
220 sys.exit(1)
221 else:
221 else:
222 # item is found, replace/delete the existing line
222 # item is found, replace/delete the existing line
223 end = bs + 1
223 end = bs + 1
224 delta.append([start, end, offsets[start], offsets[end], l])
224 delta.append([start, end, offsets[start], offsets[end], l])
225
225
226 self.addlist = addlistdelta(addlist, delta)
226 self.addlist = addlistdelta(addlist, delta)
227 if self.mapcache[0] == self.tip():
227 if self.mapcache[0] == self.tip():
228 cachedelta = "".join(gendelta(delta))
228 cachedelta = "".join(gendelta(delta))
229 else:
229 else:
230 cachedelta = None
230 cachedelta = None
231
231
232 text = "".join(self.addlist)
232 text = "".join(self.addlist)
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
234 sys.stderr.write("manifest delta failure")
234 sys.stderr.write("manifest delta failure")
235 sys.exit(1)
235 sys.exit(1)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
237 self.mapcache = (n, map, flags)
237 self.mapcache = (n, map, flags)
238 self.listcache = (text, self.addlist)
238 self.listcache = (text, self.addlist)
239 self.addlist = None
239 self.addlist = None
240
240
241 return n
241 return n
242
242
243 class changelog(revlog):
243 class changelog(revlog):
244 def __init__(self, opener):
244 def __init__(self, opener):
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
246
246
247 def extract(self, text):
247 def extract(self, text):
248 if not text:
248 if not text:
249 return (nullid, "", "0", [], "")
249 return (nullid, "", "0", [], "")
250 last = text.index("\n\n")
250 last = text.index("\n\n")
251 desc = text[last + 2:]
251 desc = text[last + 2:]
252 l = text[:last].splitlines()
252 l = text[:last].splitlines()
253 manifest = bin(l[0])
253 manifest = bin(l[0])
254 user = l[1]
254 user = l[1]
255 date = l[2]
255 date = l[2]
256 files = l[3:]
256 files = l[3:]
257 return (manifest, user, date, files, desc)
257 return (manifest, user, date, files, desc)
258
258
259 def read(self, node):
259 def read(self, node):
260 return self.extract(self.revision(node))
260 return self.extract(self.revision(node))
261
261
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
263 user=None, date=None):
263 user=None, date=None):
264 date = date or "%d %d" % (time.time(), time.timezone)
264 date = date or "%d %d" % (time.time(), time.timezone)
265 list.sort()
265 list.sort()
266 l = [hex(manifest), user, date] + list + ["", desc]
266 l = [hex(manifest), user, date] + list + ["", desc]
267 text = "\n".join(l)
267 text = "\n".join(l)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
269
269
270 class dirstate:
270 class dirstate:
271 def __init__(self, opener, ui, root):
271 def __init__(self, opener, ui, root):
272 self.opener = opener
272 self.opener = opener
273 self.root = root
273 self.root = root
274 self.dirty = 0
274 self.dirty = 0
275 self.ui = ui
275 self.ui = ui
276 self.map = None
276 self.map = None
277 self.pl = None
277 self.pl = None
278 self.copies = {}
278 self.copies = {}
279
279
280 def __del__(self):
280 def __del__(self):
281 if self.dirty:
281 if self.dirty:
282 self.write()
282 self.write()
283
283
284 def __getitem__(self, key):
284 def __getitem__(self, key):
285 try:
285 try:
286 return self.map[key]
286 return self.map[key]
287 except TypeError:
287 except TypeError:
288 self.read()
288 self.read()
289 return self[key]
289 return self[key]
290
290
291 def __contains__(self, key):
291 def __contains__(self, key):
292 if not self.map: self.read()
292 if not self.map: self.read()
293 return key in self.map
293 return key in self.map
294
294
295 def parents(self):
295 def parents(self):
296 if not self.pl:
296 if not self.pl:
297 self.read()
297 self.read()
298 return self.pl
298 return self.pl
299
299
300 def setparents(self, p1, p2 = nullid):
300 def setparents(self, p1, p2 = nullid):
301 self.dirty = 1
301 self.dirty = 1
302 self.pl = p1, p2
302 self.pl = p1, p2
303
303
304 def state(self, key):
304 def state(self, key):
305 try:
305 try:
306 return self[key][0]
306 return self[key][0]
307 except KeyError:
307 except KeyError:
308 return "?"
308 return "?"
309
309
310 def read(self):
310 def read(self):
311 if self.map is not None: return self.map
311 if self.map is not None: return self.map
312
312
313 self.map = {}
313 self.map = {}
314 self.pl = [nullid, nullid]
314 self.pl = [nullid, nullid]
315 try:
315 try:
316 st = self.opener("dirstate").read()
316 st = self.opener("dirstate").read()
317 if not st: return
317 if not st: return
318 except: return
318 except: return
319
319
320 self.pl = [st[:20], st[20: 40]]
320 self.pl = [st[:20], st[20: 40]]
321
321
322 pos = 40
322 pos = 40
323 while pos < len(st):
323 while pos < len(st):
324 e = struct.unpack(">cllll", st[pos:pos+17])
324 e = struct.unpack(">cllll", st[pos:pos+17])
325 l = e[4]
325 l = e[4]
326 pos += 17
326 pos += 17
327 f = st[pos:pos + l]
327 f = st[pos:pos + l]
328 if '\0' in f:
328 if '\0' in f:
329 f, c = f.split('\0')
329 f, c = f.split('\0')
330 self.copies[f] = c
330 self.copies[f] = c
331 self.map[f] = e[:4]
331 self.map[f] = e[:4]
332 pos += l
332 pos += l
333
333
334 def copy(self, source, dest):
334 def copy(self, source, dest):
335 self.read()
335 self.read()
336 self.dirty = 1
336 self.dirty = 1
337 self.copies[dest] = source
337 self.copies[dest] = source
338
338
339 def copied(self, file):
339 def copied(self, file):
340 return self.copies.get(file, None)
340 return self.copies.get(file, None)
341
341
342 def update(self, files, state):
342 def update(self, files, state):
343 ''' current states:
343 ''' current states:
344 n normal
344 n normal
345 m needs merging
345 m needs merging
346 r marked for removal
346 r marked for removal
347 a marked for addition'''
347 a marked for addition'''
348
348
349 if not files: return
349 if not files: return
350 self.read()
350 self.read()
351 self.dirty = 1
351 self.dirty = 1
352 for f in files:
352 for f in files:
353 if state == "r":
353 if state == "r":
354 self.map[f] = ('r', 0, 0, 0)
354 self.map[f] = ('r', 0, 0, 0)
355 else:
355 else:
356 s = os.stat(os.path.join(self.root, f))
356 s = os.stat(os.path.join(self.root, f))
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
358
358
359 def forget(self, files):
359 def forget(self, files):
360 if not files: return
360 if not files: return
361 self.read()
361 self.read()
362 self.dirty = 1
362 self.dirty = 1
363 for f in files:
363 for f in files:
364 try:
364 try:
365 del self.map[f]
365 del self.map[f]
366 except KeyError:
366 except KeyError:
367 self.ui.warn("not in dirstate: %s!\n" % f)
367 self.ui.warn("not in dirstate: %s!\n" % f)
368 pass
368 pass
369
369
370 def clear(self):
370 def clear(self):
371 self.map = {}
371 self.map = {}
372 self.dirty = 1
372 self.dirty = 1
373
373
374 def write(self):
374 def write(self):
375 st = self.opener("dirstate", "w")
375 st = self.opener("dirstate", "w")
376 st.write("".join(self.pl))
376 st.write("".join(self.pl))
377 for f, e in self.map.items():
377 for f, e in self.map.items():
378 c = self.copied(f)
378 c = self.copied(f)
379 if c:
379 if c:
380 f = f + "\0" + c
380 f = f + "\0" + c
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
382 st.write(e + f)
382 st.write(e + f)
383 self.dirty = 0
383 self.dirty = 0
384
384
385 def changes(self, files, ignore):
385 def changes(self, files, ignore):
386 self.read()
386 self.read()
387 dc = self.map.copy()
387 dc = self.map.copy()
388 lookup, changed, added, unknown = [], [], [], []
388 lookup, changed, added, unknown = [], [], [], []
389
389
390 # compare all files by default
390 # compare all files by default
391 if not files: files = [self.root]
391 if not files: files = [self.root]
392
392
393 # recursive generator of all files listed
393 # recursive generator of all files listed
394 def walk(files):
394 def walk(files):
395 for f in util.unique(files):
395 for f in util.unique(files):
396 f = os.path.join(self.root, f)
396 f = os.path.join(self.root, f)
397 if os.path.isdir(f):
397 if os.path.isdir(f):
398 for dir, subdirs, fl in os.walk(f):
398 for dir, subdirs, fl in os.walk(f):
399 d = dir[len(self.root) + 1:]
399 d = dir[len(self.root) + 1:]
400 if ".hg" in subdirs: subdirs.remove(".hg")
400 if ".hg" in subdirs: subdirs.remove(".hg")
401 for fn in fl:
401 for fn in fl:
402 fn = util.pconvert(os.path.join(d, fn))
402 fn = util.pconvert(os.path.join(d, fn))
403 yield fn
403 yield fn
404 else:
404 else:
405 yield f[len(self.root) + 1:]
405 yield f[len(self.root) + 1:]
406
406
407 for fn in util.unique(walk(files)):
407 for fn in util.unique(walk(files)):
408 try: s = os.stat(os.path.join(self.root, fn))
408 try: s = os.stat(os.path.join(self.root, fn))
409 except: continue
409 except: continue
410
410
411 if fn in dc:
411 if fn in dc:
412 c = dc[fn]
412 c = dc[fn]
413 del dc[fn]
413 del dc[fn]
414
414
415 if c[0] == 'm':
415 if c[0] == 'm':
416 changed.append(fn)
416 changed.append(fn)
417 elif c[0] == 'a':
417 elif c[0] == 'a':
418 added.append(fn)
418 added.append(fn)
419 elif c[0] == 'r':
419 elif c[0] == 'r':
420 unknown.append(fn)
420 unknown.append(fn)
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
422 changed.append(fn)
422 changed.append(fn)
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
424 lookup.append(fn)
424 lookup.append(fn)
425 else:
425 else:
426 if not ignore(fn): unknown.append(fn)
426 if not ignore(fn): unknown.append(fn)
427
427
428 return (lookup, changed, added, dc.keys(), unknown)
428 return (lookup, changed, added, dc.keys(), unknown)
429
429
430 # used to avoid circular references so destructors work
430 # used to avoid circular references so destructors work
431 def opener(base):
431 def opener(base):
432 p = base
432 p = base
433 def o(path, mode="r"):
433 def o(path, mode="r"):
434 if p[:7] == "http://":
434 if p[:7] == "http://":
435 f = os.path.join(p, urllib.quote(path))
435 f = os.path.join(p, urllib.quote(path))
436 return httprangereader.httprangereader(f)
436 return httprangereader.httprangereader(f)
437
437
438 f = os.path.join(p, path)
438 f = os.path.join(p, path)
439
439
440 mode += "b" # for that other OS
440 mode += "b" # for that other OS
441
441
442 if mode[0] != "r":
442 if mode[0] != "r":
443 try:
443 try:
444 s = os.stat(f)
444 s = os.stat(f)
445 except OSError:
445 except OSError:
446 d = os.path.dirname(f)
446 d = os.path.dirname(f)
447 if not os.path.isdir(d):
447 if not os.path.isdir(d):
448 os.makedirs(d)
448 os.makedirs(d)
449 else:
449 else:
450 if s.st_nlink > 1:
450 if s.st_nlink > 1:
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
452 util.rename(f+".tmp", f)
452 util.rename(f+".tmp", f)
453
453
454 return file(f, mode)
454 return file(f, mode)
455
455
456 return o
456 return o
457
457
458 class RepoError(Exception): pass
458 class RepoError(Exception): pass
459
459
460 class localrepository:
460 class localrepository:
461 def __init__(self, ui, path=None, create=0):
461 def __init__(self, ui, path=None, create=0):
462 self.remote = 0
462 self.remote = 0
463 if path and path[:7] == "http://":
463 if path and path[:7] == "http://":
464 self.remote = 1
464 self.remote = 1
465 self.path = path
465 self.path = path
466 else:
466 else:
467 if not path:
467 if not path:
468 p = os.getcwd()
468 p = os.getcwd()
469 while not os.path.isdir(os.path.join(p, ".hg")):
469 while not os.path.isdir(os.path.join(p, ".hg")):
470 oldp = p
470 oldp = p
471 p = os.path.dirname(p)
471 p = os.path.dirname(p)
472 if p == oldp: raise RepoError("no repo found")
472 if p == oldp: raise RepoError("no repo found")
473 path = p
473 path = p
474 self.path = os.path.join(path, ".hg")
474 self.path = os.path.join(path, ".hg")
475
475
476 if not create and not os.path.isdir(self.path):
476 if not create and not os.path.isdir(self.path):
477 raise RepoError("repository %s not found" % self.path)
477 raise RepoError("repository %s not found" % self.path)
478
478
479 self.root = path
479 self.root = path
480 self.ui = ui
480 self.ui = ui
481
481
482 if create:
482 if create:
483 os.mkdir(self.path)
483 os.mkdir(self.path)
484 os.mkdir(self.join("data"))
484 os.mkdir(self.join("data"))
485
485
486 self.opener = opener(self.path)
486 self.opener = opener(self.path)
487 self.wopener = opener(self.root)
487 self.wopener = opener(self.root)
488 self.manifest = manifest(self.opener)
488 self.manifest = manifest(self.opener)
489 self.changelog = changelog(self.opener)
489 self.changelog = changelog(self.opener)
490 self.ignorefunc = None
490 self.ignorefunc = None
491 self.tagscache = None
491 self.tagscache = None
492 self.nodetagscache = None
492 self.nodetagscache = None
493
493
494 if not self.remote:
494 if not self.remote:
495 self.dirstate = dirstate(self.opener, ui, self.root)
495 self.dirstate = dirstate(self.opener, ui, self.root)
496 try:
496 try:
497 self.ui.readconfig(self.opener("hgrc"))
497 self.ui.readconfig(self.opener("hgrc"))
498 except IOError: pass
498 except IOError: pass
499
499
500 def ignore(self, f):
500 def ignore(self, f):
501 if not self.ignorefunc:
501 if not self.ignorefunc:
502 bigpat = []
502 bigpat = []
503 try:
503 try:
504 l = file(self.wjoin(".hgignore"))
504 l = file(self.wjoin(".hgignore"))
505 for pat in l:
505 for pat in l:
506 if pat != "\n":
506 if pat != "\n":
507 p = util.pconvert(pat[:-1])
507 p = util.pconvert(pat[:-1])
508 try:
508 try:
509 r = re.compile(p)
509 r = re.compile(p)
510 except:
510 except:
511 self.ui.warn("ignoring invalid ignore"
511 self.ui.warn("ignoring invalid ignore"
512 + " regular expression '%s'\n" % p)
512 + " regular expression '%s'\n" % p)
513 else:
513 else:
514 bigpat.append(util.pconvert(pat[:-1]))
514 bigpat.append(util.pconvert(pat[:-1]))
515 except IOError: pass
515 except IOError: pass
516 if bigpat:
516 if bigpat:
517 s = "(?:%s)" % (")|(?:".join(bigpat))
517 s = "(?:%s)" % (")|(?:".join(bigpat))
518 r = re.compile(s)
518 r = re.compile(s)
519 self.ignorefunc = r.search
519 self.ignorefunc = r.search
520 else:
520 else:
521 self.ignorefunc = lambda x: False
521 self.ignorefunc = lambda x: False
522
522
523 return self.ignorefunc(f)
523 return self.ignorefunc(f)
524
524
525 def hook(self, name, **args):
525 def hook(self, name, **args):
526 s = self.ui.config("hooks", name)
526 s = self.ui.config("hooks", name)
527 if s:
527 if s:
528 self.ui.note("running hook %s: %s\n" % (name, s))
528 self.ui.note("running hook %s: %s\n" % (name, s))
529 old = {}
529 old = {}
530 for k, v in args.items():
530 for k, v in args.items():
531 k = k.upper()
531 k = k.upper()
532 old[k] = os.environ.get(k, None)
532 old[k] = os.environ.get(k, None)
533 os.environ[k] = v
533 os.environ[k] = v
534
534
535 r = os.system(s)
535 r = os.system(s)
536
536
537 for k, v in old.items():
537 for k, v in old.items():
538 if v != None:
538 if v != None:
539 os.environ[k] = v
539 os.environ[k] = v
540 else:
540 else:
541 del os.environ[k]
541 del os.environ[k]
542
542
543 if r:
543 if r:
544 self.ui.warn("abort: %s hook failed with status %d!\n" %
544 self.ui.warn("abort: %s hook failed with status %d!\n" %
545 (name, r))
545 (name, r))
546 return False
546 return False
547 return True
547 return True
548
548
549 def tags(self):
549 def tags(self):
550 '''return a mapping of tag to node'''
550 '''return a mapping of tag to node'''
551 if not self.tagscache:
551 if not self.tagscache:
552 self.tagscache = {}
552 self.tagscache = {}
553 def addtag(self, k, n):
553 def addtag(self, k, n):
554 try:
554 try:
555 bin_n = bin(n)
555 bin_n = bin(n)
556 except TypeError:
556 except TypeError:
557 bin_n = ''
557 bin_n = ''
558 self.tagscache[k.strip()] = bin_n
558 self.tagscache[k.strip()] = bin_n
559
559
560 try:
560 try:
561 # read each head of the tags file, ending with the tip
561 # read each head of the tags file, ending with the tip
562 # and add each tag found to the map, with "newer" ones
562 # and add each tag found to the map, with "newer" ones
563 # taking precedence
563 # taking precedence
564 fl = self.file(".hgtags")
564 fl = self.file(".hgtags")
565 h = fl.heads()
565 h = fl.heads()
566 h.reverse()
566 h.reverse()
567 for r in h:
567 for r in h:
568 for l in fl.revision(r).splitlines():
568 for l in fl.revision(r).splitlines():
569 if l:
569 if l:
570 n, k = l.split(" ", 1)
570 n, k = l.split(" ", 1)
571 addtag(self, k, n)
571 addtag(self, k, n)
572 except KeyError:
572 except KeyError:
573 pass
573 pass
574
574
575 try:
575 try:
576 f = self.opener("localtags")
576 f = self.opener("localtags")
577 for l in f:
577 for l in f:
578 n, k = l.split(" ", 1)
578 n, k = l.split(" ", 1)
579 addtag(self, k, n)
579 addtag(self, k, n)
580 except IOError:
580 except IOError:
581 pass
581 pass
582
582
583 self.tagscache['tip'] = self.changelog.tip()
583 self.tagscache['tip'] = self.changelog.tip()
584
584
585 return self.tagscache
585 return self.tagscache
586
586
587 def tagslist(self):
587 def tagslist(self):
588 '''return a list of tags ordered by revision'''
588 '''return a list of tags ordered by revision'''
589 l = []
589 l = []
590 for t, n in self.tags().items():
590 for t, n in self.tags().items():
591 try:
591 try:
592 r = self.changelog.rev(n)
592 r = self.changelog.rev(n)
593 except:
593 except:
594 r = -2 # sort to the beginning of the list if unknown
594 r = -2 # sort to the beginning of the list if unknown
595 l.append((r,t,n))
595 l.append((r,t,n))
596 l.sort()
596 l.sort()
597 return [(t,n) for r,t,n in l]
597 return [(t,n) for r,t,n in l]
598
598
599 def nodetags(self, node):
599 def nodetags(self, node):
600 '''return the tags associated with a node'''
600 '''return the tags associated with a node'''
601 if not self.nodetagscache:
601 if not self.nodetagscache:
602 self.nodetagscache = {}
602 self.nodetagscache = {}
603 for t,n in self.tags().items():
603 for t,n in self.tags().items():
604 self.nodetagscache.setdefault(n,[]).append(t)
604 self.nodetagscache.setdefault(n,[]).append(t)
605 return self.nodetagscache.get(node, [])
605 return self.nodetagscache.get(node, [])
606
606
607 def lookup(self, key):
607 def lookup(self, key):
608 try:
608 try:
609 return self.tags()[key]
609 return self.tags()[key]
610 except KeyError:
610 except KeyError:
611 return self.changelog.lookup(key)
611 try:
612 return self.changelog.lookup(key)
613 except:
614 raise RepoError("unknown revision '%s'" % key)
612
615
613 def dev(self):
616 def dev(self):
614 if self.remote: return -1
617 if self.remote: return -1
615 return os.stat(self.path).st_dev
618 return os.stat(self.path).st_dev
616
619
617 def join(self, f):
620 def join(self, f):
618 return os.path.join(self.path, f)
621 return os.path.join(self.path, f)
619
622
620 def wjoin(self, f):
623 def wjoin(self, f):
621 return os.path.join(self.root, f)
624 return os.path.join(self.root, f)
622
625
623 def file(self, f):
626 def file(self, f):
624 if f[0] == '/': f = f[1:]
627 if f[0] == '/': f = f[1:]
625 return filelog(self.opener, f)
628 return filelog(self.opener, f)
626
629
627 def getcwd(self):
630 def getcwd(self):
628 cwd = os.getcwd()
631 cwd = os.getcwd()
629 if cwd == self.root: return ''
632 if cwd == self.root: return ''
630 return cwd[len(self.root) + 1:]
633 return cwd[len(self.root) + 1:]
631
634
632 def wfile(self, f, mode='r'):
635 def wfile(self, f, mode='r'):
633 return self.wopener(f, mode)
636 return self.wopener(f, mode)
634
637
635 def transaction(self):
638 def transaction(self):
636 # save dirstate for undo
639 # save dirstate for undo
637 try:
640 try:
638 ds = self.opener("dirstate").read()
641 ds = self.opener("dirstate").read()
639 except IOError:
642 except IOError:
640 ds = ""
643 ds = ""
641 self.opener("undo.dirstate", "w").write(ds)
644 self.opener("undo.dirstate", "w").write(ds)
642
645
643 return transaction.transaction(self.ui.warn,
646 return transaction.transaction(self.ui.warn,
644 self.opener, self.join("journal"),
647 self.opener, self.join("journal"),
645 self.join("undo"))
648 self.join("undo"))
646
649
647 def recover(self):
650 def recover(self):
648 lock = self.lock()
651 lock = self.lock()
649 if os.path.exists(self.join("journal")):
652 if os.path.exists(self.join("journal")):
650 self.ui.status("rolling back interrupted transaction\n")
653 self.ui.status("rolling back interrupted transaction\n")
651 return transaction.rollback(self.opener, self.join("journal"))
654 return transaction.rollback(self.opener, self.join("journal"))
652 else:
655 else:
653 self.ui.warn("no interrupted transaction available\n")
656 self.ui.warn("no interrupted transaction available\n")
654
657
655 def undo(self):
658 def undo(self):
656 lock = self.lock()
659 lock = self.lock()
657 if os.path.exists(self.join("undo")):
660 if os.path.exists(self.join("undo")):
658 self.ui.status("rolling back last transaction\n")
661 self.ui.status("rolling back last transaction\n")
659 transaction.rollback(self.opener, self.join("undo"))
662 transaction.rollback(self.opener, self.join("undo"))
660 self.dirstate = None
663 self.dirstate = None
661 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
664 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
662 self.dirstate = dirstate(self.opener, self.ui, self.root)
665 self.dirstate = dirstate(self.opener, self.ui, self.root)
663 else:
666 else:
664 self.ui.warn("no undo information available\n")
667 self.ui.warn("no undo information available\n")
665
668
666 def lock(self, wait = 1):
669 def lock(self, wait = 1):
667 try:
670 try:
668 return lock.lock(self.join("lock"), 0)
671 return lock.lock(self.join("lock"), 0)
669 except lock.LockHeld, inst:
672 except lock.LockHeld, inst:
670 if wait:
673 if wait:
671 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
674 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
672 return lock.lock(self.join("lock"), wait)
675 return lock.lock(self.join("lock"), wait)
673 raise inst
676 raise inst
674
677
675 def rawcommit(self, files, text, user, date, p1=None, p2=None):
678 def rawcommit(self, files, text, user, date, p1=None, p2=None):
676 orig_parent = self.dirstate.parents()[0] or nullid
679 orig_parent = self.dirstate.parents()[0] or nullid
677 p1 = p1 or self.dirstate.parents()[0] or nullid
680 p1 = p1 or self.dirstate.parents()[0] or nullid
678 p2 = p2 or self.dirstate.parents()[1] or nullid
681 p2 = p2 or self.dirstate.parents()[1] or nullid
679 c1 = self.changelog.read(p1)
682 c1 = self.changelog.read(p1)
680 c2 = self.changelog.read(p2)
683 c2 = self.changelog.read(p2)
681 m1 = self.manifest.read(c1[0])
684 m1 = self.manifest.read(c1[0])
682 mf1 = self.manifest.readflags(c1[0])
685 mf1 = self.manifest.readflags(c1[0])
683 m2 = self.manifest.read(c2[0])
686 m2 = self.manifest.read(c2[0])
684
687
685 if orig_parent == p1:
688 if orig_parent == p1:
686 update_dirstate = 1
689 update_dirstate = 1
687 else:
690 else:
688 update_dirstate = 0
691 update_dirstate = 0
689
692
690 tr = self.transaction()
693 tr = self.transaction()
691 mm = m1.copy()
694 mm = m1.copy()
692 mfm = mf1.copy()
695 mfm = mf1.copy()
693 linkrev = self.changelog.count()
696 linkrev = self.changelog.count()
694 for f in files:
697 for f in files:
695 try:
698 try:
696 t = self.wfile(f).read()
699 t = self.wfile(f).read()
697 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
700 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
698 r = self.file(f)
701 r = self.file(f)
699 mfm[f] = tm
702 mfm[f] = tm
700 mm[f] = r.add(t, {}, tr, linkrev,
703 mm[f] = r.add(t, {}, tr, linkrev,
701 m1.get(f, nullid), m2.get(f, nullid))
704 m1.get(f, nullid), m2.get(f, nullid))
702 if update_dirstate:
705 if update_dirstate:
703 self.dirstate.update([f], "n")
706 self.dirstate.update([f], "n")
704 except IOError:
707 except IOError:
705 try:
708 try:
706 del mm[f]
709 del mm[f]
707 del mfm[f]
710 del mfm[f]
708 if update_dirstate:
711 if update_dirstate:
709 self.dirstate.forget([f])
712 self.dirstate.forget([f])
710 except:
713 except:
711 # deleted from p2?
714 # deleted from p2?
712 pass
715 pass
713
716
714 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
717 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
715 user = user or self.ui.username()
718 user = user or self.ui.username()
716 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
719 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
717 tr.close()
720 tr.close()
718 if update_dirstate:
721 if update_dirstate:
719 self.dirstate.setparents(n, nullid)
722 self.dirstate.setparents(n, nullid)
720
723
721 def commit(self, files = None, text = "", user = None, date = None):
724 def commit(self, files = None, text = "", user = None, date = None):
722 commit = []
725 commit = []
723 remove = []
726 remove = []
724 if files:
727 if files:
725 for f in files:
728 for f in files:
726 s = self.dirstate.state(f)
729 s = self.dirstate.state(f)
727 if s in 'nmai':
730 if s in 'nmai':
728 commit.append(f)
731 commit.append(f)
729 elif s == 'r':
732 elif s == 'r':
730 remove.append(f)
733 remove.append(f)
731 else:
734 else:
732 self.ui.warn("%s not tracked!\n" % f)
735 self.ui.warn("%s not tracked!\n" % f)
733 else:
736 else:
734 (c, a, d, u) = self.changes(None, None)
737 (c, a, d, u) = self.changes(None, None)
735 commit = c + a
738 commit = c + a
736 remove = d
739 remove = d
737
740
738 if not commit and not remove:
741 if not commit and not remove:
739 self.ui.status("nothing changed\n")
742 self.ui.status("nothing changed\n")
740 return
743 return
741
744
742 if not self.hook("precommit"):
745 if not self.hook("precommit"):
743 return 1
746 return 1
744
747
745 p1, p2 = self.dirstate.parents()
748 p1, p2 = self.dirstate.parents()
746 c1 = self.changelog.read(p1)
749 c1 = self.changelog.read(p1)
747 c2 = self.changelog.read(p2)
750 c2 = self.changelog.read(p2)
748 m1 = self.manifest.read(c1[0])
751 m1 = self.manifest.read(c1[0])
749 mf1 = self.manifest.readflags(c1[0])
752 mf1 = self.manifest.readflags(c1[0])
750 m2 = self.manifest.read(c2[0])
753 m2 = self.manifest.read(c2[0])
751 lock = self.lock()
754 lock = self.lock()
752 tr = self.transaction()
755 tr = self.transaction()
753
756
754 # check in files
757 # check in files
755 new = {}
758 new = {}
756 linkrev = self.changelog.count()
759 linkrev = self.changelog.count()
757 commit.sort()
760 commit.sort()
758 for f in commit:
761 for f in commit:
759 self.ui.note(f + "\n")
762 self.ui.note(f + "\n")
760 try:
763 try:
761 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
764 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
762 t = self.wfile(f).read()
765 t = self.wfile(f).read()
763 except IOError:
766 except IOError:
764 self.warn("trouble committing %s!\n" % f)
767 self.warn("trouble committing %s!\n" % f)
765 raise
768 raise
766
769
767 meta = {}
770 meta = {}
768 cp = self.dirstate.copied(f)
771 cp = self.dirstate.copied(f)
769 if cp:
772 if cp:
770 meta["copy"] = cp
773 meta["copy"] = cp
771 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
774 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
772 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
775 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
773
776
774 r = self.file(f)
777 r = self.file(f)
775 fp1 = m1.get(f, nullid)
778 fp1 = m1.get(f, nullid)
776 fp2 = m2.get(f, nullid)
779 fp2 = m2.get(f, nullid)
777 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
780 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
778
781
779 # update manifest
782 # update manifest
780 m1.update(new)
783 m1.update(new)
781 for f in remove:
784 for f in remove:
782 if f in m1:
785 if f in m1:
783 del m1[f]
786 del m1[f]
784 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
787 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
785
788
786 # add changeset
789 # add changeset
787 new = new.keys()
790 new = new.keys()
788 new.sort()
791 new.sort()
789
792
790 if not text:
793 if not text:
791 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
794 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
792 edittext += "".join(["HG: changed %s\n" % f for f in new])
795 edittext += "".join(["HG: changed %s\n" % f for f in new])
793 edittext += "".join(["HG: removed %s\n" % f for f in remove])
796 edittext += "".join(["HG: removed %s\n" % f for f in remove])
794 edittext = self.ui.edit(edittext)
797 edittext = self.ui.edit(edittext)
795 if not edittext.rstrip():
798 if not edittext.rstrip():
796 return 1
799 return 1
797 text = edittext
800 text = edittext
798
801
799 user = user or self.ui.username()
802 user = user or self.ui.username()
800 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
803 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
801
804
802 if not self.hook("commit", node=hex(n)):
805 if not self.hook("commit", node=hex(n)):
803 return 1
806 return 1
804
807
805 tr.close()
808 tr.close()
806
809
807 self.dirstate.setparents(n)
810 self.dirstate.setparents(n)
808 self.dirstate.update(new, "n")
811 self.dirstate.update(new, "n")
809 self.dirstate.forget(remove)
812 self.dirstate.forget(remove)
810
813
811 def changes(self, node1, node2, files=None):
814 def changes(self, node1, node2, files=None):
812 mf2, u = None, []
815 mf2, u = None, []
813
816
814 def fcmp(fn, mf):
817 def fcmp(fn, mf):
815 t1 = self.wfile(fn).read()
818 t1 = self.wfile(fn).read()
816 t2 = self.file(fn).revision(mf[fn])
819 t2 = self.file(fn).revision(mf[fn])
817 return cmp(t1, t2)
820 return cmp(t1, t2)
818
821
819 # are we comparing the working directory?
822 # are we comparing the working directory?
820 if not node2:
823 if not node2:
821 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
824 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
822
825
823 # are we comparing working dir against its parent?
826 # are we comparing working dir against its parent?
824 if not node1:
827 if not node1:
825 if l:
828 if l:
826 # do a full compare of any files that might have changed
829 # do a full compare of any files that might have changed
827 change = self.changelog.read(self.dirstate.parents()[0])
830 change = self.changelog.read(self.dirstate.parents()[0])
828 mf2 = self.manifest.read(change[0])
831 mf2 = self.manifest.read(change[0])
829 for f in l:
832 for f in l:
830 if fcmp(f, mf2):
833 if fcmp(f, mf2):
831 c.append(f)
834 c.append(f)
832
835
833 for l in c, a, d, u:
836 for l in c, a, d, u:
834 l.sort()
837 l.sort()
835
838
836 return (c, a, d, u)
839 return (c, a, d, u)
837
840
838 # are we comparing working dir against non-tip?
841 # are we comparing working dir against non-tip?
839 # generate a pseudo-manifest for the working dir
842 # generate a pseudo-manifest for the working dir
840 if not node2:
843 if not node2:
841 if not mf2:
844 if not mf2:
842 change = self.changelog.read(self.dirstate.parents()[0])
845 change = self.changelog.read(self.dirstate.parents()[0])
843 mf2 = self.manifest.read(change[0]).copy()
846 mf2 = self.manifest.read(change[0]).copy()
844 for f in a + c + l:
847 for f in a + c + l:
845 mf2[f] = ""
848 mf2[f] = ""
846 for f in d:
849 for f in d:
847 if f in mf2: del mf2[f]
850 if f in mf2: del mf2[f]
848 else:
851 else:
849 change = self.changelog.read(node2)
852 change = self.changelog.read(node2)
850 mf2 = self.manifest.read(change[0])
853 mf2 = self.manifest.read(change[0])
851
854
852 # flush lists from dirstate before comparing manifests
855 # flush lists from dirstate before comparing manifests
853 c, a = [], []
856 c, a = [], []
854
857
855 change = self.changelog.read(node1)
858 change = self.changelog.read(node1)
856 mf1 = self.manifest.read(change[0]).copy()
859 mf1 = self.manifest.read(change[0]).copy()
857
860
858 for fn in mf2:
861 for fn in mf2:
859 if mf1.has_key(fn):
862 if mf1.has_key(fn):
860 if mf1[fn] != mf2[fn]:
863 if mf1[fn] != mf2[fn]:
861 if mf2[fn] != "" or fcmp(fn, mf1):
864 if mf2[fn] != "" or fcmp(fn, mf1):
862 c.append(fn)
865 c.append(fn)
863 del mf1[fn]
866 del mf1[fn]
864 else:
867 else:
865 a.append(fn)
868 a.append(fn)
866
869
867 d = mf1.keys()
870 d = mf1.keys()
868
871
869 for l in c, a, d, u:
872 for l in c, a, d, u:
870 l.sort()
873 l.sort()
871
874
872 return (c, a, d, u)
875 return (c, a, d, u)
873
876
874 def add(self, list):
877 def add(self, list):
875 for f in list:
878 for f in list:
876 p = self.wjoin(f)
879 p = self.wjoin(f)
877 if not os.path.exists(p):
880 if not os.path.exists(p):
878 self.ui.warn("%s does not exist!\n" % f)
881 self.ui.warn("%s does not exist!\n" % f)
879 elif not os.path.isfile(p):
882 elif not os.path.isfile(p):
880 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
883 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
881 elif self.dirstate.state(f) == 'n':
884 elif self.dirstate.state(f) == 'n':
882 self.ui.warn("%s already tracked!\n" % f)
885 self.ui.warn("%s already tracked!\n" % f)
883 else:
886 else:
884 self.dirstate.update([f], "a")
887 self.dirstate.update([f], "a")
885
888
886 def forget(self, list):
889 def forget(self, list):
887 for f in list:
890 for f in list:
888 if self.dirstate.state(f) not in 'ai':
891 if self.dirstate.state(f) not in 'ai':
889 self.ui.warn("%s not added!\n" % f)
892 self.ui.warn("%s not added!\n" % f)
890 else:
893 else:
891 self.dirstate.forget([f])
894 self.dirstate.forget([f])
892
895
893 def remove(self, list):
896 def remove(self, list):
894 for f in list:
897 for f in list:
895 p = self.wjoin(f)
898 p = self.wjoin(f)
896 if os.path.exists(p):
899 if os.path.exists(p):
897 self.ui.warn("%s still exists!\n" % f)
900 self.ui.warn("%s still exists!\n" % f)
898 elif self.dirstate.state(f) == 'a':
901 elif self.dirstate.state(f) == 'a':
899 self.ui.warn("%s never committed!\n" % f)
902 self.ui.warn("%s never committed!\n" % f)
900 self.dirstate.forget([f])
903 self.dirstate.forget([f])
901 elif f not in self.dirstate:
904 elif f not in self.dirstate:
902 self.ui.warn("%s not tracked!\n" % f)
905 self.ui.warn("%s not tracked!\n" % f)
903 else:
906 else:
904 self.dirstate.update([f], "r")
907 self.dirstate.update([f], "r")
905
908
906 def copy(self, source, dest):
909 def copy(self, source, dest):
907 p = self.wjoin(dest)
910 p = self.wjoin(dest)
908 if not os.path.exists(dest):
911 if not os.path.exists(dest):
909 self.ui.warn("%s does not exist!\n" % dest)
912 self.ui.warn("%s does not exist!\n" % dest)
910 elif not os.path.isfile(dest):
913 elif not os.path.isfile(dest):
911 self.ui.warn("copy failed: %s is not a file\n" % dest)
914 self.ui.warn("copy failed: %s is not a file\n" % dest)
912 else:
915 else:
913 if self.dirstate.state(dest) == '?':
916 if self.dirstate.state(dest) == '?':
914 self.dirstate.update([dest], "a")
917 self.dirstate.update([dest], "a")
915 self.dirstate.copy(source, dest)
918 self.dirstate.copy(source, dest)
916
919
917 def heads(self):
920 def heads(self):
918 return self.changelog.heads()
921 return self.changelog.heads()
919
922
920 def branches(self, nodes):
923 def branches(self, nodes):
921 if not nodes: nodes = [self.changelog.tip()]
924 if not nodes: nodes = [self.changelog.tip()]
922 b = []
925 b = []
923 for n in nodes:
926 for n in nodes:
924 t = n
927 t = n
925 while n:
928 while n:
926 p = self.changelog.parents(n)
929 p = self.changelog.parents(n)
927 if p[1] != nullid or p[0] == nullid:
930 if p[1] != nullid or p[0] == nullid:
928 b.append((t, n, p[0], p[1]))
931 b.append((t, n, p[0], p[1]))
929 break
932 break
930 n = p[0]
933 n = p[0]
931 return b
934 return b
932
935
933 def between(self, pairs):
936 def between(self, pairs):
934 r = []
937 r = []
935
938
936 for top, bottom in pairs:
939 for top, bottom in pairs:
937 n, l, i = top, [], 0
940 n, l, i = top, [], 0
938 f = 1
941 f = 1
939
942
940 while n != bottom:
943 while n != bottom:
941 p = self.changelog.parents(n)[0]
944 p = self.changelog.parents(n)[0]
942 if i == f:
945 if i == f:
943 l.append(n)
946 l.append(n)
944 f = f * 2
947 f = f * 2
945 n = p
948 n = p
946 i += 1
949 i += 1
947
950
948 r.append(l)
951 r.append(l)
949
952
950 return r
953 return r
951
954
952 def newer(self, nodes):
955 def newer(self, nodes):
953 m = {}
956 m = {}
954 nl = []
957 nl = []
955 pm = {}
958 pm = {}
956 cl = self.changelog
959 cl = self.changelog
957 t = l = cl.count()
960 t = l = cl.count()
958
961
959 # find the lowest numbered node
962 # find the lowest numbered node
960 for n in nodes:
963 for n in nodes:
961 l = min(l, cl.rev(n))
964 l = min(l, cl.rev(n))
962 m[n] = 1
965 m[n] = 1
963
966
964 for i in xrange(l, t):
967 for i in xrange(l, t):
965 n = cl.node(i)
968 n = cl.node(i)
966 if n in m: # explicitly listed
969 if n in m: # explicitly listed
967 pm[n] = 1
970 pm[n] = 1
968 nl.append(n)
971 nl.append(n)
969 continue
972 continue
970 for p in cl.parents(n):
973 for p in cl.parents(n):
971 if p in pm: # parent listed
974 if p in pm: # parent listed
972 pm[n] = 1
975 pm[n] = 1
973 nl.append(n)
976 nl.append(n)
974 break
977 break
975
978
976 return nl
979 return nl
977
980
978 def findincoming(self, remote, base={}):
981 def findincoming(self, remote, base={}):
979 m = self.changelog.nodemap
982 m = self.changelog.nodemap
980 search = []
983 search = []
981 fetch = []
984 fetch = []
982 seen = {}
985 seen = {}
983 seenbranch = {}
986 seenbranch = {}
984
987
985 # assume we're closer to the tip than the root
988 # assume we're closer to the tip than the root
986 # and start by examining the heads
989 # and start by examining the heads
987 self.ui.status("searching for changes\n")
990 self.ui.status("searching for changes\n")
988 heads = remote.heads()
991 heads = remote.heads()
989 unknown = []
992 unknown = []
990 for h in heads:
993 for h in heads:
991 if h not in m:
994 if h not in m:
992 unknown.append(h)
995 unknown.append(h)
993 else:
996 else:
994 base[h] = 1
997 base[h] = 1
995
998
996 if not unknown:
999 if not unknown:
997 return None
1000 return None
998
1001
999 rep = {}
1002 rep = {}
1000 reqcnt = 0
1003 reqcnt = 0
1001
1004
1002 # search through remote branches
1005 # search through remote branches
1003 # a 'branch' here is a linear segment of history, with four parts:
1006 # a 'branch' here is a linear segment of history, with four parts:
1004 # head, root, first parent, second parent
1007 # head, root, first parent, second parent
1005 # (a branch always has two parents (or none) by definition)
1008 # (a branch always has two parents (or none) by definition)
1006 unknown = remote.branches(unknown)
1009 unknown = remote.branches(unknown)
1007 while unknown:
1010 while unknown:
1008 r = []
1011 r = []
1009 while unknown:
1012 while unknown:
1010 n = unknown.pop(0)
1013 n = unknown.pop(0)
1011 if n[0] in seen:
1014 if n[0] in seen:
1012 continue
1015 continue
1013
1016
1014 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1017 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1015 if n[0] == nullid:
1018 if n[0] == nullid:
1016 break
1019 break
1017 if n in seenbranch:
1020 if n in seenbranch:
1018 self.ui.debug("branch already found\n")
1021 self.ui.debug("branch already found\n")
1019 continue
1022 continue
1020 if n[1] and n[1] in m: # do we know the base?
1023 if n[1] and n[1] in m: # do we know the base?
1021 self.ui.debug("found incomplete branch %s:%s\n"
1024 self.ui.debug("found incomplete branch %s:%s\n"
1022 % (short(n[0]), short(n[1])))
1025 % (short(n[0]), short(n[1])))
1023 search.append(n) # schedule branch range for scanning
1026 search.append(n) # schedule branch range for scanning
1024 seenbranch[n] = 1
1027 seenbranch[n] = 1
1025 else:
1028 else:
1026 if n[1] not in seen and n[1] not in fetch:
1029 if n[1] not in seen and n[1] not in fetch:
1027 if n[2] in m and n[3] in m:
1030 if n[2] in m and n[3] in m:
1028 self.ui.debug("found new changeset %s\n" %
1031 self.ui.debug("found new changeset %s\n" %
1029 short(n[1]))
1032 short(n[1]))
1030 fetch.append(n[1]) # earliest unknown
1033 fetch.append(n[1]) # earliest unknown
1031 base[n[2]] = 1 # latest known
1034 base[n[2]] = 1 # latest known
1032 continue
1035 continue
1033
1036
1034 for a in n[2:4]:
1037 for a in n[2:4]:
1035 if a not in rep:
1038 if a not in rep:
1036 r.append(a)
1039 r.append(a)
1037 rep[a] = 1
1040 rep[a] = 1
1038
1041
1039 seen[n[0]] = 1
1042 seen[n[0]] = 1
1040
1043
1041 if r:
1044 if r:
1042 reqcnt += 1
1045 reqcnt += 1
1043 self.ui.debug("request %d: %s\n" %
1046 self.ui.debug("request %d: %s\n" %
1044 (reqcnt, " ".join(map(short, r))))
1047 (reqcnt, " ".join(map(short, r))))
1045 for p in range(0, len(r), 10):
1048 for p in range(0, len(r), 10):
1046 for b in remote.branches(r[p:p+10]):
1049 for b in remote.branches(r[p:p+10]):
1047 self.ui.debug("received %s:%s\n" %
1050 self.ui.debug("received %s:%s\n" %
1048 (short(b[0]), short(b[1])))
1051 (short(b[0]), short(b[1])))
1049 if b[0] not in m and b[0] not in seen:
1052 if b[0] not in m and b[0] not in seen:
1050 unknown.append(b)
1053 unknown.append(b)
1051
1054
1052 # do binary search on the branches we found
1055 # do binary search on the branches we found
1053 while search:
1056 while search:
1054 n = search.pop(0)
1057 n = search.pop(0)
1055 reqcnt += 1
1058 reqcnt += 1
1056 l = remote.between([(n[0], n[1])])[0]
1059 l = remote.between([(n[0], n[1])])[0]
1057 l.append(n[1])
1060 l.append(n[1])
1058 p = n[0]
1061 p = n[0]
1059 f = 1
1062 f = 1
1060 for i in l:
1063 for i in l:
1061 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1064 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1062 if i in m:
1065 if i in m:
1063 if f <= 2:
1066 if f <= 2:
1064 self.ui.debug("found new branch changeset %s\n" %
1067 self.ui.debug("found new branch changeset %s\n" %
1065 short(p))
1068 short(p))
1066 fetch.append(p)
1069 fetch.append(p)
1067 base[i] = 1
1070 base[i] = 1
1068 else:
1071 else:
1069 self.ui.debug("narrowed branch search to %s:%s\n"
1072 self.ui.debug("narrowed branch search to %s:%s\n"
1070 % (short(p), short(i)))
1073 % (short(p), short(i)))
1071 search.append((p, i))
1074 search.append((p, i))
1072 break
1075 break
1073 p, f = i, f * 2
1076 p, f = i, f * 2
1074
1077
1075 # sanity check our fetch list
1078 # sanity check our fetch list
1076 for f in fetch:
1079 for f in fetch:
1077 if f in m:
1080 if f in m:
1078 raise RepoError("already have changeset " + short(f[:4]))
1081 raise RepoError("already have changeset " + short(f[:4]))
1079
1082
1080 if base.keys() == [nullid]:
1083 if base.keys() == [nullid]:
1081 self.ui.warn("warning: pulling from an unrelated repository!\n")
1084 self.ui.warn("warning: pulling from an unrelated repository!\n")
1082
1085
1083 self.ui.note("adding new changesets starting at " +
1086 self.ui.note("adding new changesets starting at " +
1084 " ".join([short(f) for f in fetch]) + "\n")
1087 " ".join([short(f) for f in fetch]) + "\n")
1085
1088
1086 self.ui.debug("%d total queries\n" % reqcnt)
1089 self.ui.debug("%d total queries\n" % reqcnt)
1087
1090
1088 return fetch
1091 return fetch
1089
1092
1090 def findoutgoing(self, remote):
1093 def findoutgoing(self, remote):
1091 base = {}
1094 base = {}
1092 self.findincoming(remote, base)
1095 self.findincoming(remote, base)
1093 remain = dict.fromkeys(self.changelog.nodemap)
1096 remain = dict.fromkeys(self.changelog.nodemap)
1094
1097
1095 # prune everything remote has from the tree
1098 # prune everything remote has from the tree
1096 del remain[nullid]
1099 del remain[nullid]
1097 remove = base.keys()
1100 remove = base.keys()
1098 while remove:
1101 while remove:
1099 n = remove.pop(0)
1102 n = remove.pop(0)
1100 if n in remain:
1103 if n in remain:
1101 del remain[n]
1104 del remain[n]
1102 for p in self.changelog.parents(n):
1105 for p in self.changelog.parents(n):
1103 remove.append(p)
1106 remove.append(p)
1104
1107
1105 # find every node whose parents have been pruned
1108 # find every node whose parents have been pruned
1106 subset = []
1109 subset = []
1107 for n in remain:
1110 for n in remain:
1108 p1, p2 = self.changelog.parents(n)
1111 p1, p2 = self.changelog.parents(n)
1109 if p1 not in remain and p2 not in remain:
1112 if p1 not in remain and p2 not in remain:
1110 subset.append(n)
1113 subset.append(n)
1111
1114
1112 # this is the set of all roots we have to push
1115 # this is the set of all roots we have to push
1113 return subset
1116 return subset
1114
1117
1115 def pull(self, remote):
1118 def pull(self, remote):
1116 lock = self.lock()
1119 lock = self.lock()
1117
1120
1118 # if we have an empty repo, fetch everything
1121 # if we have an empty repo, fetch everything
1119 if self.changelog.tip() == nullid:
1122 if self.changelog.tip() == nullid:
1120 self.ui.status("requesting all changes\n")
1123 self.ui.status("requesting all changes\n")
1121 fetch = [nullid]
1124 fetch = [nullid]
1122 else:
1125 else:
1123 fetch = self.findincoming(remote)
1126 fetch = self.findincoming(remote)
1124
1127
1125 if not fetch:
1128 if not fetch:
1126 self.ui.status("no changes found\n")
1129 self.ui.status("no changes found\n")
1127 return 1
1130 return 1
1128
1131
1129 cg = remote.changegroup(fetch)
1132 cg = remote.changegroup(fetch)
1130 return self.addchangegroup(cg)
1133 return self.addchangegroup(cg)
1131
1134
1132 def push(self, remote):
1135 def push(self, remote):
1133 lock = remote.lock()
1136 lock = remote.lock()
1134 update = self.findoutgoing(remote)
1137 update = self.findoutgoing(remote)
1135 if not update:
1138 if not update:
1136 self.ui.status("no changes found\n")
1139 self.ui.status("no changes found\n")
1137 return 1
1140 return 1
1138
1141
1139 cg = self.changegroup(update)
1142 cg = self.changegroup(update)
1140 return remote.addchangegroup(cg)
1143 return remote.addchangegroup(cg)
1141
1144
1142 def changegroup(self, basenodes):
1145 def changegroup(self, basenodes):
1143 class genread:
1146 class genread:
1144 def __init__(self, generator):
1147 def __init__(self, generator):
1145 self.g = generator
1148 self.g = generator
1146 self.buf = ""
1149 self.buf = ""
1147 def read(self, l):
1150 def read(self, l):
1148 while l > len(self.buf):
1151 while l > len(self.buf):
1149 try:
1152 try:
1150 self.buf += self.g.next()
1153 self.buf += self.g.next()
1151 except StopIteration:
1154 except StopIteration:
1152 break
1155 break
1153 d, self.buf = self.buf[:l], self.buf[l:]
1156 d, self.buf = self.buf[:l], self.buf[l:]
1154 return d
1157 return d
1155
1158
1156 def gengroup():
1159 def gengroup():
1157 nodes = self.newer(basenodes)
1160 nodes = self.newer(basenodes)
1158
1161
1159 # construct the link map
1162 # construct the link map
1160 linkmap = {}
1163 linkmap = {}
1161 for n in nodes:
1164 for n in nodes:
1162 linkmap[self.changelog.rev(n)] = n
1165 linkmap[self.changelog.rev(n)] = n
1163
1166
1164 # construct a list of all changed files
1167 # construct a list of all changed files
1165 changed = {}
1168 changed = {}
1166 for n in nodes:
1169 for n in nodes:
1167 c = self.changelog.read(n)
1170 c = self.changelog.read(n)
1168 for f in c[3]:
1171 for f in c[3]:
1169 changed[f] = 1
1172 changed[f] = 1
1170 changed = changed.keys()
1173 changed = changed.keys()
1171 changed.sort()
1174 changed.sort()
1172
1175
1173 # the changegroup is changesets + manifests + all file revs
1176 # the changegroup is changesets + manifests + all file revs
1174 revs = [ self.changelog.rev(n) for n in nodes ]
1177 revs = [ self.changelog.rev(n) for n in nodes ]
1175
1178
1176 for y in self.changelog.group(linkmap): yield y
1179 for y in self.changelog.group(linkmap): yield y
1177 for y in self.manifest.group(linkmap): yield y
1180 for y in self.manifest.group(linkmap): yield y
1178 for f in changed:
1181 for f in changed:
1179 yield struct.pack(">l", len(f) + 4) + f
1182 yield struct.pack(">l", len(f) + 4) + f
1180 g = self.file(f).group(linkmap)
1183 g = self.file(f).group(linkmap)
1181 for y in g:
1184 for y in g:
1182 yield y
1185 yield y
1183
1186
1184 yield struct.pack(">l", 0)
1187 yield struct.pack(">l", 0)
1185
1188
1186 return genread(gengroup())
1189 return genread(gengroup())
1187
1190
1188 def addchangegroup(self, source):
1191 def addchangegroup(self, source):
1189
1192
1190 def getchunk():
1193 def getchunk():
1191 d = source.read(4)
1194 d = source.read(4)
1192 if not d: return ""
1195 if not d: return ""
1193 l = struct.unpack(">l", d)[0]
1196 l = struct.unpack(">l", d)[0]
1194 if l <= 4: return ""
1197 if l <= 4: return ""
1195 return source.read(l - 4)
1198 return source.read(l - 4)
1196
1199
1197 def getgroup():
1200 def getgroup():
1198 while 1:
1201 while 1:
1199 c = getchunk()
1202 c = getchunk()
1200 if not c: break
1203 if not c: break
1201 yield c
1204 yield c
1202
1205
1203 def csmap(x):
1206 def csmap(x):
1204 self.ui.debug("add changeset %s\n" % short(x))
1207 self.ui.debug("add changeset %s\n" % short(x))
1205 return self.changelog.count()
1208 return self.changelog.count()
1206
1209
1207 def revmap(x):
1210 def revmap(x):
1208 return self.changelog.rev(x)
1211 return self.changelog.rev(x)
1209
1212
1210 if not source: return
1213 if not source: return
1211 changesets = files = revisions = 0
1214 changesets = files = revisions = 0
1212
1215
1213 tr = self.transaction()
1216 tr = self.transaction()
1214
1217
1215 # pull off the changeset group
1218 # pull off the changeset group
1216 self.ui.status("adding changesets\n")
1219 self.ui.status("adding changesets\n")
1217 co = self.changelog.tip()
1220 co = self.changelog.tip()
1218 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1221 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1219 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1222 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1220
1223
1221 # pull off the manifest group
1224 # pull off the manifest group
1222 self.ui.status("adding manifests\n")
1225 self.ui.status("adding manifests\n")
1223 mm = self.manifest.tip()
1226 mm = self.manifest.tip()
1224 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1227 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1225
1228
1226 # process the files
1229 # process the files
1227 self.ui.status("adding file revisions\n")
1230 self.ui.status("adding file revisions\n")
1228 while 1:
1231 while 1:
1229 f = getchunk()
1232 f = getchunk()
1230 if not f: break
1233 if not f: break
1231 self.ui.debug("adding %s revisions\n" % f)
1234 self.ui.debug("adding %s revisions\n" % f)
1232 fl = self.file(f)
1235 fl = self.file(f)
1233 o = fl.count()
1236 o = fl.count()
1234 n = fl.addgroup(getgroup(), revmap, tr)
1237 n = fl.addgroup(getgroup(), revmap, tr)
1235 revisions += fl.count() - o
1238 revisions += fl.count() - o
1236 files += 1
1239 files += 1
1237
1240
1238 self.ui.status(("modified %d files, added %d changesets" +
1241 self.ui.status(("modified %d files, added %d changesets" +
1239 " and %d new revisions\n")
1242 " and %d new revisions\n")
1240 % (files, changesets, revisions))
1243 % (files, changesets, revisions))
1241
1244
1242 tr.close()
1245 tr.close()
1243 return
1246 return
1244
1247
1245 def update(self, node, allow=False, force=False, choose=None,
1248 def update(self, node, allow=False, force=False, choose=None,
1246 moddirstate=True):
1249 moddirstate=True):
1247 pl = self.dirstate.parents()
1250 pl = self.dirstate.parents()
1248 if not force and pl[1] != nullid:
1251 if not force and pl[1] != nullid:
1249 self.ui.warn("aborting: outstanding uncommitted merges\n")
1252 self.ui.warn("aborting: outstanding uncommitted merges\n")
1250 return
1253 return
1251
1254
1252 p1, p2 = pl[0], node
1255 p1, p2 = pl[0], node
1253 pa = self.changelog.ancestor(p1, p2)
1256 pa = self.changelog.ancestor(p1, p2)
1254 m1n = self.changelog.read(p1)[0]
1257 m1n = self.changelog.read(p1)[0]
1255 m2n = self.changelog.read(p2)[0]
1258 m2n = self.changelog.read(p2)[0]
1256 man = self.manifest.ancestor(m1n, m2n)
1259 man = self.manifest.ancestor(m1n, m2n)
1257 m1 = self.manifest.read(m1n)
1260 m1 = self.manifest.read(m1n)
1258 mf1 = self.manifest.readflags(m1n)
1261 mf1 = self.manifest.readflags(m1n)
1259 m2 = self.manifest.read(m2n)
1262 m2 = self.manifest.read(m2n)
1260 mf2 = self.manifest.readflags(m2n)
1263 mf2 = self.manifest.readflags(m2n)
1261 ma = self.manifest.read(man)
1264 ma = self.manifest.read(man)
1262 mfa = self.manifest.readflags(man)
1265 mfa = self.manifest.readflags(man)
1263
1266
1264 (c, a, d, u) = self.changes(None, None)
1267 (c, a, d, u) = self.changes(None, None)
1265
1268
1266 # is this a jump, or a merge? i.e. is there a linear path
1269 # is this a jump, or a merge? i.e. is there a linear path
1267 # from p1 to p2?
1270 # from p1 to p2?
1268 linear_path = (pa == p1 or pa == p2)
1271 linear_path = (pa == p1 or pa == p2)
1269
1272
1270 # resolve the manifest to determine which files
1273 # resolve the manifest to determine which files
1271 # we care about merging
1274 # we care about merging
1272 self.ui.note("resolving manifests\n")
1275 self.ui.note("resolving manifests\n")
1273 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1276 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1274 (force, allow, moddirstate, linear_path))
1277 (force, allow, moddirstate, linear_path))
1275 self.ui.debug(" ancestor %s local %s remote %s\n" %
1278 self.ui.debug(" ancestor %s local %s remote %s\n" %
1276 (short(man), short(m1n), short(m2n)))
1279 (short(man), short(m1n), short(m2n)))
1277
1280
1278 merge = {}
1281 merge = {}
1279 get = {}
1282 get = {}
1280 remove = []
1283 remove = []
1281 mark = {}
1284 mark = {}
1282
1285
1283 # construct a working dir manifest
1286 # construct a working dir manifest
1284 mw = m1.copy()
1287 mw = m1.copy()
1285 mfw = mf1.copy()
1288 mfw = mf1.copy()
1286 umap = dict.fromkeys(u)
1289 umap = dict.fromkeys(u)
1287
1290
1288 for f in a + c + u:
1291 for f in a + c + u:
1289 mw[f] = ""
1292 mw[f] = ""
1290 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1293 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1291
1294
1292 for f in d:
1295 for f in d:
1293 if f in mw: del mw[f]
1296 if f in mw: del mw[f]
1294
1297
1295 # If we're jumping between revisions (as opposed to merging),
1298 # If we're jumping between revisions (as opposed to merging),
1296 # and if neither the working directory nor the target rev has
1299 # and if neither the working directory nor the target rev has
1297 # the file, then we need to remove it from the dirstate, to
1300 # the file, then we need to remove it from the dirstate, to
1298 # prevent the dirstate from listing the file when it is no
1301 # prevent the dirstate from listing the file when it is no
1299 # longer in the manifest.
1302 # longer in the manifest.
1300 if moddirstate and linear_path and f not in m2:
1303 if moddirstate and linear_path and f not in m2:
1301 self.dirstate.forget((f,))
1304 self.dirstate.forget((f,))
1302
1305
1303 # Compare manifests
1306 # Compare manifests
1304 for f, n in mw.iteritems():
1307 for f, n in mw.iteritems():
1305 if choose and not choose(f): continue
1308 if choose and not choose(f): continue
1306 if f in m2:
1309 if f in m2:
1307 s = 0
1310 s = 0
1308
1311
1309 # is the wfile new since m1, and match m2?
1312 # is the wfile new since m1, and match m2?
1310 if f not in m1:
1313 if f not in m1:
1311 t1 = self.wfile(f).read()
1314 t1 = self.wfile(f).read()
1312 t2 = self.file(f).revision(m2[f])
1315 t2 = self.file(f).revision(m2[f])
1313 if cmp(t1, t2) == 0:
1316 if cmp(t1, t2) == 0:
1314 mark[f] = 1
1317 mark[f] = 1
1315 n = m2[f]
1318 n = m2[f]
1316 del t1, t2
1319 del t1, t2
1317
1320
1318 # are files different?
1321 # are files different?
1319 if n != m2[f]:
1322 if n != m2[f]:
1320 a = ma.get(f, nullid)
1323 a = ma.get(f, nullid)
1321 # are both different from the ancestor?
1324 # are both different from the ancestor?
1322 if n != a and m2[f] != a:
1325 if n != a and m2[f] != a:
1323 self.ui.debug(" %s versions differ, resolve\n" % f)
1326 self.ui.debug(" %s versions differ, resolve\n" % f)
1324 # merge executable bits
1327 # merge executable bits
1325 # "if we changed or they changed, change in merge"
1328 # "if we changed or they changed, change in merge"
1326 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1329 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1327 mode = ((a^b) | (a^c)) ^ a
1330 mode = ((a^b) | (a^c)) ^ a
1328 merge[f] = (m1.get(f, nullid), m2[f], mode)
1331 merge[f] = (m1.get(f, nullid), m2[f], mode)
1329 s = 1
1332 s = 1
1330 # are we clobbering?
1333 # are we clobbering?
1331 # is remote's version newer?
1334 # is remote's version newer?
1332 # or are we going back in time?
1335 # or are we going back in time?
1333 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1336 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1334 self.ui.debug(" remote %s is newer, get\n" % f)
1337 self.ui.debug(" remote %s is newer, get\n" % f)
1335 get[f] = m2[f]
1338 get[f] = m2[f]
1336 s = 1
1339 s = 1
1337 else:
1340 else:
1338 mark[f] = 1
1341 mark[f] = 1
1339 elif f in umap:
1342 elif f in umap:
1340 # this unknown file is the same as the checkout
1343 # this unknown file is the same as the checkout
1341 get[f] = m2[f]
1344 get[f] = m2[f]
1342
1345
1343 if not s and mfw[f] != mf2[f]:
1346 if not s and mfw[f] != mf2[f]:
1344 if force:
1347 if force:
1345 self.ui.debug(" updating permissions for %s\n" % f)
1348 self.ui.debug(" updating permissions for %s\n" % f)
1346 util.set_exec(self.wjoin(f), mf2[f])
1349 util.set_exec(self.wjoin(f), mf2[f])
1347 else:
1350 else:
1348 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1351 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1349 mode = ((a^b) | (a^c)) ^ a
1352 mode = ((a^b) | (a^c)) ^ a
1350 if mode != b:
1353 if mode != b:
1351 self.ui.debug(" updating permissions for %s\n" % f)
1354 self.ui.debug(" updating permissions for %s\n" % f)
1352 util.set_exec(self.wjoin(f), mode)
1355 util.set_exec(self.wjoin(f), mode)
1353 mark[f] = 1
1356 mark[f] = 1
1354 del m2[f]
1357 del m2[f]
1355 elif f in ma:
1358 elif f in ma:
1356 if n != ma[f]:
1359 if n != ma[f]:
1357 r = "d"
1360 r = "d"
1358 if not force and (linear_path or allow):
1361 if not force and (linear_path or allow):
1359 r = self.ui.prompt(
1362 r = self.ui.prompt(
1360 (" local changed %s which remote deleted\n" % f) +
1363 (" local changed %s which remote deleted\n" % f) +
1361 "(k)eep or (d)elete?", "[kd]", "k")
1364 "(k)eep or (d)elete?", "[kd]", "k")
1362 if r == "d":
1365 if r == "d":
1363 remove.append(f)
1366 remove.append(f)
1364 else:
1367 else:
1365 self.ui.debug("other deleted %s\n" % f)
1368 self.ui.debug("other deleted %s\n" % f)
1366 remove.append(f) # other deleted it
1369 remove.append(f) # other deleted it
1367 else:
1370 else:
1368 if n == m1.get(f, nullid): # same as parent
1371 if n == m1.get(f, nullid): # same as parent
1369 if p2 == pa: # going backwards?
1372 if p2 == pa: # going backwards?
1370 self.ui.debug("remote deleted %s\n" % f)
1373 self.ui.debug("remote deleted %s\n" % f)
1371 remove.append(f)
1374 remove.append(f)
1372 else:
1375 else:
1373 self.ui.debug("local created %s, keeping\n" % f)
1376 self.ui.debug("local created %s, keeping\n" % f)
1374 else:
1377 else:
1375 self.ui.debug("working dir created %s, keeping\n" % f)
1378 self.ui.debug("working dir created %s, keeping\n" % f)
1376
1379
1377 for f, n in m2.iteritems():
1380 for f, n in m2.iteritems():
1378 if choose and not choose(f): continue
1381 if choose and not choose(f): continue
1379 if f[0] == "/": continue
1382 if f[0] == "/": continue
1380 if f in ma and n != ma[f]:
1383 if f in ma and n != ma[f]:
1381 r = "k"
1384 r = "k"
1382 if not force and (linear_path or allow):
1385 if not force and (linear_path or allow):
1383 r = self.ui.prompt(
1386 r = self.ui.prompt(
1384 ("remote changed %s which local deleted\n" % f) +
1387 ("remote changed %s which local deleted\n" % f) +
1385 "(k)eep or (d)elete?", "[kd]", "k")
1388 "(k)eep or (d)elete?", "[kd]", "k")
1386 if r == "k": get[f] = n
1389 if r == "k": get[f] = n
1387 elif f not in ma:
1390 elif f not in ma:
1388 self.ui.debug("remote created %s\n" % f)
1391 self.ui.debug("remote created %s\n" % f)
1389 get[f] = n
1392 get[f] = n
1390 else:
1393 else:
1391 self.ui.debug("local deleted %s\n" % f)
1394 self.ui.debug("local deleted %s\n" % f)
1392 if force:
1395 if force:
1393 get[f] = n
1396 get[f] = n
1394
1397
1395 del mw, m1, m2, ma
1398 del mw, m1, m2, ma
1396
1399
1397 if force:
1400 if force:
1398 for f in merge:
1401 for f in merge:
1399 get[f] = merge[f][1]
1402 get[f] = merge[f][1]
1400 merge = {}
1403 merge = {}
1401
1404
1402 if linear_path:
1405 if linear_path:
1403 # we don't need to do any magic, just jump to the new rev
1406 # we don't need to do any magic, just jump to the new rev
1404 mode = 'n'
1407 mode = 'n'
1405 p1, p2 = p2, nullid
1408 p1, p2 = p2, nullid
1406 else:
1409 else:
1407 if not allow:
1410 if not allow:
1408 self.ui.status("this update spans a branch" +
1411 self.ui.status("this update spans a branch" +
1409 " affecting the following files:\n")
1412 " affecting the following files:\n")
1410 fl = merge.keys() + get.keys()
1413 fl = merge.keys() + get.keys()
1411 fl.sort()
1414 fl.sort()
1412 for f in fl:
1415 for f in fl:
1413 cf = ""
1416 cf = ""
1414 if f in merge: cf = " (resolve)"
1417 if f in merge: cf = " (resolve)"
1415 self.ui.status(" %s%s\n" % (f, cf))
1418 self.ui.status(" %s%s\n" % (f, cf))
1416 self.ui.warn("aborting update spanning branches!\n")
1419 self.ui.warn("aborting update spanning branches!\n")
1417 self.ui.status("(use update -m to perform a branch merge)\n")
1420 self.ui.status("(use update -m to perform a branch merge)\n")
1418 return 1
1421 return 1
1419 # we have to remember what files we needed to get/change
1422 # we have to remember what files we needed to get/change
1420 # because any file that's different from either one of its
1423 # because any file that's different from either one of its
1421 # parents must be in the changeset
1424 # parents must be in the changeset
1422 mode = 'm'
1425 mode = 'm'
1423 if moddirstate:
1426 if moddirstate:
1424 self.dirstate.update(mark.keys(), "m")
1427 self.dirstate.update(mark.keys(), "m")
1425
1428
1426 if moddirstate:
1429 if moddirstate:
1427 self.dirstate.setparents(p1, p2)
1430 self.dirstate.setparents(p1, p2)
1428
1431
1429 # get the files we don't need to change
1432 # get the files we don't need to change
1430 files = get.keys()
1433 files = get.keys()
1431 files.sort()
1434 files.sort()
1432 for f in files:
1435 for f in files:
1433 if f[0] == "/": continue
1436 if f[0] == "/": continue
1434 self.ui.note("getting %s\n" % f)
1437 self.ui.note("getting %s\n" % f)
1435 t = self.file(f).read(get[f])
1438 t = self.file(f).read(get[f])
1436 try:
1439 try:
1437 self.wfile(f, "w").write(t)
1440 self.wfile(f, "w").write(t)
1438 except IOError:
1441 except IOError:
1439 os.makedirs(os.path.dirname(self.wjoin(f)))
1442 os.makedirs(os.path.dirname(self.wjoin(f)))
1440 self.wfile(f, "w").write(t)
1443 self.wfile(f, "w").write(t)
1441 util.set_exec(self.wjoin(f), mf2[f])
1444 util.set_exec(self.wjoin(f), mf2[f])
1442 if moddirstate:
1445 if moddirstate:
1443 self.dirstate.update([f], mode)
1446 self.dirstate.update([f], mode)
1444
1447
1445 # merge the tricky bits
1448 # merge the tricky bits
1446 files = merge.keys()
1449 files = merge.keys()
1447 files.sort()
1450 files.sort()
1448 for f in files:
1451 for f in files:
1449 self.ui.status("merging %s\n" % f)
1452 self.ui.status("merging %s\n" % f)
1450 m, o, flag = merge[f]
1453 m, o, flag = merge[f]
1451 self.merge3(f, m, o)
1454 self.merge3(f, m, o)
1452 util.set_exec(self.wjoin(f), flag)
1455 util.set_exec(self.wjoin(f), flag)
1453 if moddirstate:
1456 if moddirstate:
1454 self.dirstate.update([f], 'm')
1457 self.dirstate.update([f], 'm')
1455
1458
1456 for f in remove:
1459 for f in remove:
1457 self.ui.note("removing %s\n" % f)
1460 self.ui.note("removing %s\n" % f)
1458 os.unlink(f)
1461 os.unlink(f)
1459 # try removing directories that might now be empty
1462 # try removing directories that might now be empty
1460 try: os.removedirs(os.path.dirname(f))
1463 try: os.removedirs(os.path.dirname(f))
1461 except: pass
1464 except: pass
1462 if moddirstate:
1465 if moddirstate:
1463 if mode == 'n':
1466 if mode == 'n':
1464 self.dirstate.forget(remove)
1467 self.dirstate.forget(remove)
1465 else:
1468 else:
1466 self.dirstate.update(remove, 'r')
1469 self.dirstate.update(remove, 'r')
1467
1470
1468 def merge3(self, fn, my, other):
1471 def merge3(self, fn, my, other):
1469 """perform a 3-way merge in the working directory"""
1472 """perform a 3-way merge in the working directory"""
1470
1473
1471 def temp(prefix, node):
1474 def temp(prefix, node):
1472 pre = "%s~%s." % (os.path.basename(fn), prefix)
1475 pre = "%s~%s." % (os.path.basename(fn), prefix)
1473 (fd, name) = tempfile.mkstemp("", pre)
1476 (fd, name) = tempfile.mkstemp("", pre)
1474 f = os.fdopen(fd, "wb")
1477 f = os.fdopen(fd, "wb")
1475 f.write(fl.revision(node))
1478 f.write(fl.revision(node))
1476 f.close()
1479 f.close()
1477 return name
1480 return name
1478
1481
1479 fl = self.file(fn)
1482 fl = self.file(fn)
1480 base = fl.ancestor(my, other)
1483 base = fl.ancestor(my, other)
1481 a = self.wjoin(fn)
1484 a = self.wjoin(fn)
1482 b = temp("base", base)
1485 b = temp("base", base)
1483 c = temp("other", other)
1486 c = temp("other", other)
1484
1487
1485 self.ui.note("resolving %s\n" % fn)
1488 self.ui.note("resolving %s\n" % fn)
1486 self.ui.debug("file %s: other %s ancestor %s\n" %
1489 self.ui.debug("file %s: other %s ancestor %s\n" %
1487 (fn, short(other), short(base)))
1490 (fn, short(other), short(base)))
1488
1491
1489 cmd = self.ui.config("ui", "merge") or \
1492 cmd = self.ui.config("ui", "merge") or \
1490 os.environ.get("HGMERGE", "hgmerge")
1493 os.environ.get("HGMERGE", "hgmerge")
1491 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1494 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1492 if r:
1495 if r:
1493 self.ui.warn("merging %s failed!\n" % fn)
1496 self.ui.warn("merging %s failed!\n" % fn)
1494
1497
1495 os.unlink(b)
1498 os.unlink(b)
1496 os.unlink(c)
1499 os.unlink(c)
1497
1500
1498 def verify(self):
1501 def verify(self):
1499 filelinkrevs = {}
1502 filelinkrevs = {}
1500 filenodes = {}
1503 filenodes = {}
1501 changesets = revisions = files = 0
1504 changesets = revisions = files = 0
1502 errors = 0
1505 errors = 0
1503
1506
1504 seen = {}
1507 seen = {}
1505 self.ui.status("checking changesets\n")
1508 self.ui.status("checking changesets\n")
1506 for i in range(self.changelog.count()):
1509 for i in range(self.changelog.count()):
1507 changesets += 1
1510 changesets += 1
1508 n = self.changelog.node(i)
1511 n = self.changelog.node(i)
1509 if n in seen:
1512 if n in seen:
1510 self.ui.warn("duplicate changeset at revision %d\n" % i)
1513 self.ui.warn("duplicate changeset at revision %d\n" % i)
1511 errors += 1
1514 errors += 1
1512 seen[n] = 1
1515 seen[n] = 1
1513
1516
1514 for p in self.changelog.parents(n):
1517 for p in self.changelog.parents(n):
1515 if p not in self.changelog.nodemap:
1518 if p not in self.changelog.nodemap:
1516 self.ui.warn("changeset %s has unknown parent %s\n" %
1519 self.ui.warn("changeset %s has unknown parent %s\n" %
1517 (short(n), short(p)))
1520 (short(n), short(p)))
1518 errors += 1
1521 errors += 1
1519 try:
1522 try:
1520 changes = self.changelog.read(n)
1523 changes = self.changelog.read(n)
1521 except Exception, inst:
1524 except Exception, inst:
1522 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1525 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1523 errors += 1
1526 errors += 1
1524
1527
1525 for f in changes[3]:
1528 for f in changes[3]:
1526 filelinkrevs.setdefault(f, []).append(i)
1529 filelinkrevs.setdefault(f, []).append(i)
1527
1530
1528 seen = {}
1531 seen = {}
1529 self.ui.status("checking manifests\n")
1532 self.ui.status("checking manifests\n")
1530 for i in range(self.manifest.count()):
1533 for i in range(self.manifest.count()):
1531 n = self.manifest.node(i)
1534 n = self.manifest.node(i)
1532 if n in seen:
1535 if n in seen:
1533 self.ui.warn("duplicate manifest at revision %d\n" % i)
1536 self.ui.warn("duplicate manifest at revision %d\n" % i)
1534 errors += 1
1537 errors += 1
1535 seen[n] = 1
1538 seen[n] = 1
1536
1539
1537 for p in self.manifest.parents(n):
1540 for p in self.manifest.parents(n):
1538 if p not in self.manifest.nodemap:
1541 if p not in self.manifest.nodemap:
1539 self.ui.warn("manifest %s has unknown parent %s\n" %
1542 self.ui.warn("manifest %s has unknown parent %s\n" %
1540 (short(n), short(p)))
1543 (short(n), short(p)))
1541 errors += 1
1544 errors += 1
1542
1545
1543 try:
1546 try:
1544 delta = mdiff.patchtext(self.manifest.delta(n))
1547 delta = mdiff.patchtext(self.manifest.delta(n))
1545 except KeyboardInterrupt:
1548 except KeyboardInterrupt:
1546 self.ui.warn("aborted")
1549 self.ui.warn("aborted")
1547 sys.exit(0)
1550 sys.exit(0)
1548 except Exception, inst:
1551 except Exception, inst:
1549 self.ui.warn("unpacking manifest %s: %s\n"
1552 self.ui.warn("unpacking manifest %s: %s\n"
1550 % (short(n), inst))
1553 % (short(n), inst))
1551 errors += 1
1554 errors += 1
1552
1555
1553 ff = [ l.split('\0') for l in delta.splitlines() ]
1556 ff = [ l.split('\0') for l in delta.splitlines() ]
1554 for f, fn in ff:
1557 for f, fn in ff:
1555 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1558 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1556
1559
1557 self.ui.status("crosschecking files in changesets and manifests\n")
1560 self.ui.status("crosschecking files in changesets and manifests\n")
1558 for f in filenodes:
1561 for f in filenodes:
1559 if f not in filelinkrevs:
1562 if f not in filelinkrevs:
1560 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1563 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1561 errors += 1
1564 errors += 1
1562
1565
1563 for f in filelinkrevs:
1566 for f in filelinkrevs:
1564 if f not in filenodes:
1567 if f not in filenodes:
1565 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1568 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1566 errors += 1
1569 errors += 1
1567
1570
1568 self.ui.status("checking files\n")
1571 self.ui.status("checking files\n")
1569 ff = filenodes.keys()
1572 ff = filenodes.keys()
1570 ff.sort()
1573 ff.sort()
1571 for f in ff:
1574 for f in ff:
1572 if f == "/dev/null": continue
1575 if f == "/dev/null": continue
1573 files += 1
1576 files += 1
1574 fl = self.file(f)
1577 fl = self.file(f)
1575 nodes = { nullid: 1 }
1578 nodes = { nullid: 1 }
1576 seen = {}
1579 seen = {}
1577 for i in range(fl.count()):
1580 for i in range(fl.count()):
1578 revisions += 1
1581 revisions += 1
1579 n = fl.node(i)
1582 n = fl.node(i)
1580
1583
1581 if n in seen:
1584 if n in seen:
1582 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1585 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1583 errors += 1
1586 errors += 1
1584
1587
1585 if n not in filenodes[f]:
1588 if n not in filenodes[f]:
1586 self.ui.warn("%s: %d:%s not in manifests\n"
1589 self.ui.warn("%s: %d:%s not in manifests\n"
1587 % (f, i, short(n)))
1590 % (f, i, short(n)))
1588 errors += 1
1591 errors += 1
1589 else:
1592 else:
1590 del filenodes[f][n]
1593 del filenodes[f][n]
1591
1594
1592 flr = fl.linkrev(n)
1595 flr = fl.linkrev(n)
1593 if flr not in filelinkrevs[f]:
1596 if flr not in filelinkrevs[f]:
1594 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1597 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1595 % (f, short(n), fl.linkrev(n)))
1598 % (f, short(n), fl.linkrev(n)))
1596 errors += 1
1599 errors += 1
1597 else:
1600 else:
1598 filelinkrevs[f].remove(flr)
1601 filelinkrevs[f].remove(flr)
1599
1602
1600 # verify contents
1603 # verify contents
1601 try:
1604 try:
1602 t = fl.read(n)
1605 t = fl.read(n)
1603 except Exception, inst:
1606 except Exception, inst:
1604 self.ui.warn("unpacking file %s %s: %s\n"
1607 self.ui.warn("unpacking file %s %s: %s\n"
1605 % (f, short(n), inst))
1608 % (f, short(n), inst))
1606 errors += 1
1609 errors += 1
1607
1610
1608 # verify parents
1611 # verify parents
1609 (p1, p2) = fl.parents(n)
1612 (p1, p2) = fl.parents(n)
1610 if p1 not in nodes:
1613 if p1 not in nodes:
1611 self.ui.warn("file %s:%s unknown parent 1 %s" %
1614 self.ui.warn("file %s:%s unknown parent 1 %s" %
1612 (f, short(n), short(p1)))
1615 (f, short(n), short(p1)))
1613 errors += 1
1616 errors += 1
1614 if p2 not in nodes:
1617 if p2 not in nodes:
1615 self.ui.warn("file %s:%s unknown parent 2 %s" %
1618 self.ui.warn("file %s:%s unknown parent 2 %s" %
1616 (f, short(n), short(p1)))
1619 (f, short(n), short(p1)))
1617 errors += 1
1620 errors += 1
1618 nodes[n] = 1
1621 nodes[n] = 1
1619
1622
1620 # cross-check
1623 # cross-check
1621 for node in filenodes[f]:
1624 for node in filenodes[f]:
1622 self.ui.warn("node %s in manifests not in %s\n"
1625 self.ui.warn("node %s in manifests not in %s\n"
1623 % (hex(n), f))
1626 % (hex(n), f))
1624 errors += 1
1627 errors += 1
1625
1628
1626 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1629 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1627 (files, changesets, revisions))
1630 (files, changesets, revisions))
1628
1631
1629 if errors:
1632 if errors:
1630 self.ui.warn("%d integrity errors encountered!\n" % errors)
1633 self.ui.warn("%d integrity errors encountered!\n" % errors)
1631 return 1
1634 return 1
1632
1635
1633 class httprepository:
1636 class httprepository:
1634 def __init__(self, ui, path):
1637 def __init__(self, ui, path):
1635 self.url = path
1638 self.url = path
1636 self.ui = ui
1639 self.ui = ui
1637 no_list = [ "localhost", "127.0.0.1" ]
1640 no_list = [ "localhost", "127.0.0.1" ]
1638 host = ui.config("http_proxy", "host")
1641 host = ui.config("http_proxy", "host")
1639 if host is None:
1642 if host is None:
1640 host = os.environ.get("http_proxy")
1643 host = os.environ.get("http_proxy")
1641 if host and host.startswith('http://'):
1644 if host and host.startswith('http://'):
1642 host = host[7:]
1645 host = host[7:]
1643 user = ui.config("http_proxy", "user")
1646 user = ui.config("http_proxy", "user")
1644 passwd = ui.config("http_proxy", "passwd")
1647 passwd = ui.config("http_proxy", "passwd")
1645 no = ui.config("http_proxy", "no")
1648 no = ui.config("http_proxy", "no")
1646 if no is None:
1649 if no is None:
1647 no = os.environ.get("no_proxy")
1650 no = os.environ.get("no_proxy")
1648 if no:
1651 if no:
1649 no_list = no_list + no.split(",")
1652 no_list = no_list + no.split(",")
1650
1653
1651 no_proxy = 0
1654 no_proxy = 0
1652 for h in no_list:
1655 for h in no_list:
1653 if (path.startswith("http://" + h + "/") or
1656 if (path.startswith("http://" + h + "/") or
1654 path.startswith("http://" + h + ":") or
1657 path.startswith("http://" + h + ":") or
1655 path == "http://" + h):
1658 path == "http://" + h):
1656 no_proxy = 1
1659 no_proxy = 1
1657
1660
1658 # Note: urllib2 takes proxy values from the environment and those will
1661 # Note: urllib2 takes proxy values from the environment and those will
1659 # take precedence
1662 # take precedence
1660 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1663 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1661 if os.environ.has_key(env):
1664 if os.environ.has_key(env):
1662 del os.environ[env]
1665 del os.environ[env]
1663
1666
1664 proxy_handler = urllib2.BaseHandler()
1667 proxy_handler = urllib2.BaseHandler()
1665 if host and not no_proxy:
1668 if host and not no_proxy:
1666 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1669 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1667
1670
1668 authinfo = None
1671 authinfo = None
1669 if user and passwd:
1672 if user and passwd:
1670 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1673 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1671 passmgr.add_password(None, host, user, passwd)
1674 passmgr.add_password(None, host, user, passwd)
1672 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1675 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1673
1676
1674 opener = urllib2.build_opener(proxy_handler, authinfo)
1677 opener = urllib2.build_opener(proxy_handler, authinfo)
1675 urllib2.install_opener(opener)
1678 urllib2.install_opener(opener)
1676
1679
1677 def dev(self):
1680 def dev(self):
1678 return -1
1681 return -1
1679
1682
1680 def do_cmd(self, cmd, **args):
1683 def do_cmd(self, cmd, **args):
1681 self.ui.debug("sending %s command\n" % cmd)
1684 self.ui.debug("sending %s command\n" % cmd)
1682 q = {"cmd": cmd}
1685 q = {"cmd": cmd}
1683 q.update(args)
1686 q.update(args)
1684 qs = urllib.urlencode(q)
1687 qs = urllib.urlencode(q)
1685 cu = "%s?%s" % (self.url, qs)
1688 cu = "%s?%s" % (self.url, qs)
1686 return urllib2.urlopen(cu)
1689 return urllib2.urlopen(cu)
1687
1690
1688 def heads(self):
1691 def heads(self):
1689 d = self.do_cmd("heads").read()
1692 d = self.do_cmd("heads").read()
1690 try:
1693 try:
1691 return map(bin, d[:-1].split(" "))
1694 return map(bin, d[:-1].split(" "))
1692 except:
1695 except:
1693 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1696 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1694 raise
1697 raise
1695
1698
1696 def branches(self, nodes):
1699 def branches(self, nodes):
1697 n = " ".join(map(hex, nodes))
1700 n = " ".join(map(hex, nodes))
1698 d = self.do_cmd("branches", nodes=n).read()
1701 d = self.do_cmd("branches", nodes=n).read()
1699 try:
1702 try:
1700 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1703 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1701 return br
1704 return br
1702 except:
1705 except:
1703 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1706 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1704 raise
1707 raise
1705
1708
1706 def between(self, pairs):
1709 def between(self, pairs):
1707 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1710 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1708 d = self.do_cmd("between", pairs=n).read()
1711 d = self.do_cmd("between", pairs=n).read()
1709 try:
1712 try:
1710 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1713 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1711 return p
1714 return p
1712 except:
1715 except:
1713 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1716 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1714 raise
1717 raise
1715
1718
1716 def changegroup(self, nodes):
1719 def changegroup(self, nodes):
1717 n = " ".join(map(hex, nodes))
1720 n = " ".join(map(hex, nodes))
1718 f = self.do_cmd("changegroup", roots=n)
1721 f = self.do_cmd("changegroup", roots=n)
1719 bytes = 0
1722 bytes = 0
1720
1723
1721 class zread:
1724 class zread:
1722 def __init__(self, f):
1725 def __init__(self, f):
1723 self.zd = zlib.decompressobj()
1726 self.zd = zlib.decompressobj()
1724 self.f = f
1727 self.f = f
1725 self.buf = ""
1728 self.buf = ""
1726 def read(self, l):
1729 def read(self, l):
1727 while l > len(self.buf):
1730 while l > len(self.buf):
1728 r = f.read(4096)
1731 r = f.read(4096)
1729 if r:
1732 if r:
1730 self.buf += self.zd.decompress(r)
1733 self.buf += self.zd.decompress(r)
1731 else:
1734 else:
1732 self.buf += self.zd.flush()
1735 self.buf += self.zd.flush()
1733 break
1736 break
1734 d, self.buf = self.buf[:l], self.buf[l:]
1737 d, self.buf = self.buf[:l], self.buf[l:]
1735 return d
1738 return d
1736
1739
1737 return zread(f)
1740 return zread(f)
1738
1741
1739 class remotelock:
1742 class remotelock:
1740 def __init__(self, repo):
1743 def __init__(self, repo):
1741 self.repo = repo
1744 self.repo = repo
1742 def release(self):
1745 def release(self):
1743 self.repo.unlock()
1746 self.repo.unlock()
1744 self.repo = None
1747 self.repo = None
1745 def __del__(self):
1748 def __del__(self):
1746 if self.repo:
1749 if self.repo:
1747 self.release()
1750 self.release()
1748
1751
1749 class sshrepository:
1752 class sshrepository:
1750 def __init__(self, ui, path):
1753 def __init__(self, ui, path):
1751 self.url = path
1754 self.url = path
1752 self.ui = ui
1755 self.ui = ui
1753
1756
1754 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1757 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1755 if not m:
1758 if not m:
1756 raise RepoError("couldn't parse destination %s\n" % path)
1759 raise RepoError("couldn't parse destination %s\n" % path)
1757
1760
1758 self.user = m.group(2)
1761 self.user = m.group(2)
1759 self.host = m.group(3)
1762 self.host = m.group(3)
1760 self.port = m.group(5)
1763 self.port = m.group(5)
1761 self.path = m.group(7)
1764 self.path = m.group(7)
1762
1765
1763 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1766 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1764 args = self.port and ("%s -p %s") % (args, self.port) or args
1767 args = self.port and ("%s -p %s") % (args, self.port) or args
1765 path = self.path or ""
1768 path = self.path or ""
1766
1769
1767 cmd = "ssh %s 'hg -R %s serve --stdio'"
1770 cmd = "ssh %s 'hg -R %s serve --stdio'"
1768 cmd = cmd % (args, path)
1771 cmd = cmd % (args, path)
1769
1772
1770 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1773 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1771
1774
1772 def readerr(self):
1775 def readerr(self):
1773 while 1:
1776 while 1:
1774 r,w,x = select.select([self.pipee], [], [], 0)
1777 r,w,x = select.select([self.pipee], [], [], 0)
1775 if not r: break
1778 if not r: break
1776 l = self.pipee.readline()
1779 l = self.pipee.readline()
1777 if not l: break
1780 if not l: break
1778 self.ui.status("remote: ", l)
1781 self.ui.status("remote: ", l)
1779
1782
1780 def __del__(self):
1783 def __del__(self):
1781 self.pipeo.close()
1784 self.pipeo.close()
1782 self.pipei.close()
1785 self.pipei.close()
1783 for l in self.pipee:
1786 for l in self.pipee:
1784 self.ui.status("remote: ", l)
1787 self.ui.status("remote: ", l)
1785 self.pipee.close()
1788 self.pipee.close()
1786
1789
1787 def dev(self):
1790 def dev(self):
1788 return -1
1791 return -1
1789
1792
1790 def do_cmd(self, cmd, **args):
1793 def do_cmd(self, cmd, **args):
1791 self.ui.debug("sending %s command\n" % cmd)
1794 self.ui.debug("sending %s command\n" % cmd)
1792 self.pipeo.write("%s\n" % cmd)
1795 self.pipeo.write("%s\n" % cmd)
1793 for k, v in args.items():
1796 for k, v in args.items():
1794 self.pipeo.write("%s %d\n" % (k, len(v)))
1797 self.pipeo.write("%s %d\n" % (k, len(v)))
1795 self.pipeo.write(v)
1798 self.pipeo.write(v)
1796 self.pipeo.flush()
1799 self.pipeo.flush()
1797
1800
1798 return self.pipei
1801 return self.pipei
1799
1802
1800 def call(self, cmd, **args):
1803 def call(self, cmd, **args):
1801 r = self.do_cmd(cmd, **args)
1804 r = self.do_cmd(cmd, **args)
1802 l = r.readline()
1805 l = r.readline()
1803 self.readerr()
1806 self.readerr()
1804 try:
1807 try:
1805 l = int(l)
1808 l = int(l)
1806 except:
1809 except:
1807 raise RepoError("unexpected response '%s'" % l)
1810 raise RepoError("unexpected response '%s'" % l)
1808 return r.read(l)
1811 return r.read(l)
1809
1812
1810 def lock(self):
1813 def lock(self):
1811 self.call("lock")
1814 self.call("lock")
1812 return remotelock(self)
1815 return remotelock(self)
1813
1816
1814 def unlock(self):
1817 def unlock(self):
1815 self.call("unlock")
1818 self.call("unlock")
1816
1819
1817 def heads(self):
1820 def heads(self):
1818 d = self.call("heads")
1821 d = self.call("heads")
1819 try:
1822 try:
1820 return map(bin, d[:-1].split(" "))
1823 return map(bin, d[:-1].split(" "))
1821 except:
1824 except:
1822 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1825 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1823
1826
1824 def branches(self, nodes):
1827 def branches(self, nodes):
1825 n = " ".join(map(hex, nodes))
1828 n = " ".join(map(hex, nodes))
1826 d = self.call("branches", nodes=n)
1829 d = self.call("branches", nodes=n)
1827 try:
1830 try:
1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1831 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1829 return br
1832 return br
1830 except:
1833 except:
1831 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1834 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1832
1835
1833 def between(self, pairs):
1836 def between(self, pairs):
1834 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1837 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1835 d = self.call("between", pairs=n)
1838 d = self.call("between", pairs=n)
1836 try:
1839 try:
1837 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1840 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1838 return p
1841 return p
1839 except:
1842 except:
1840 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1843 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1841
1844
1842 def changegroup(self, nodes):
1845 def changegroup(self, nodes):
1843 n = " ".join(map(hex, nodes))
1846 n = " ".join(map(hex, nodes))
1844 f = self.do_cmd("changegroup", roots=n)
1847 f = self.do_cmd("changegroup", roots=n)
1845 return self.pipei
1848 return self.pipei
1846
1849
1847 def addchangegroup(self, cg):
1850 def addchangegroup(self, cg):
1848 d = self.call("addchangegroup")
1851 d = self.call("addchangegroup")
1849 if d:
1852 if d:
1850 raise RepoError("push refused: %s", d)
1853 raise RepoError("push refused: %s", d)
1851
1854
1852 while 1:
1855 while 1:
1853 d = cg.read(4096)
1856 d = cg.read(4096)
1854 if not d: break
1857 if not d: break
1855 self.pipeo.write(d)
1858 self.pipeo.write(d)
1856 self.readerr()
1859 self.readerr()
1857
1860
1858 self.pipeo.flush()
1861 self.pipeo.flush()
1859
1862
1860 self.readerr()
1863 self.readerr()
1861 l = int(self.pipei.readline())
1864 l = int(self.pipei.readline())
1862 return self.pipei.read(l) != ""
1865 return self.pipei.read(l) != ""
1863
1866
1864 def repository(ui, path=None, create=0):
1867 def repository(ui, path=None, create=0):
1865 if path:
1868 if path:
1866 if path.startswith("http://"):
1869 if path.startswith("http://"):
1867 return httprepository(ui, path)
1870 return httprepository(ui, path)
1868 if path.startswith("hg://"):
1871 if path.startswith("hg://"):
1869 return httprepository(ui, path.replace("hg://", "http://"))
1872 return httprepository(ui, path.replace("hg://", "http://"))
1870 if path.startswith("old-http://"):
1873 if path.startswith("old-http://"):
1871 return localrepository(ui, path.replace("old-http://", "http://"))
1874 return localrepository(ui, path.replace("old-http://", "http://"))
1872 if path.startswith("ssh://"):
1875 if path.startswith("ssh://"):
1873 return sshrepository(ui, path)
1876 return sshrepository(ui, path)
1874
1877
1875 return localrepository(ui, path, create)
1878 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now