##// END OF EJS Templates
Warn about bogus ignore expressions...
Matt Mackall -
r656:147d2fa2 default
parent child Browse files
Show More
@@ -1,1868 +1,1875 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if t[:2] != '\1\n':
24 if t[:2] != '\1\n':
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if t[:2] != '\1\n':
31 if t[:2] != '\1\n':
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text[:2] == '\1\n':
41 if meta or text[:2] == '\1\n':
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
128 def add(self, map, flags, transaction, link, p1=None, p2=None,changed=None):
129 # directly generate the mdiff delta from the data collected during
129 # directly generate the mdiff delta from the data collected during
130 # the bisect loop below
130 # the bisect loop below
131 def gendelta(delta):
131 def gendelta(delta):
132 i = 0
132 i = 0
133 result = []
133 result = []
134 while i < len(delta):
134 while i < len(delta):
135 start = delta[i][2]
135 start = delta[i][2]
136 end = delta[i][3]
136 end = delta[i][3]
137 l = delta[i][4]
137 l = delta[i][4]
138 if l == None:
138 if l == None:
139 l = ""
139 l = ""
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
140 while i < len(delta) - 1 and start <= delta[i+1][2] and end >= delta[i+1][2]:
141 if delta[i+1][3] > end:
141 if delta[i+1][3] > end:
142 end = delta[i+1][3]
142 end = delta[i+1][3]
143 if delta[i+1][4]:
143 if delta[i+1][4]:
144 l += delta[i+1][4]
144 l += delta[i+1][4]
145 i += 1
145 i += 1
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
146 result.append(struct.pack(">lll", start, end, len(l)) + l)
147 i += 1
147 i += 1
148 return result
148 return result
149
149
150 # apply the changes collected during the bisect loop to our addlist
150 # apply the changes collected during the bisect loop to our addlist
151 def addlistdelta(addlist, delta):
151 def addlistdelta(addlist, delta):
152 # apply the deltas to the addlist. start from the bottom up
152 # apply the deltas to the addlist. start from the bottom up
153 # so changes to the offsets don't mess things up.
153 # so changes to the offsets don't mess things up.
154 i = len(delta)
154 i = len(delta)
155 while i > 0:
155 while i > 0:
156 i -= 1
156 i -= 1
157 start = delta[i][0]
157 start = delta[i][0]
158 end = delta[i][1]
158 end = delta[i][1]
159 if delta[i][4]:
159 if delta[i][4]:
160 addlist[start:end] = [delta[i][4]]
160 addlist[start:end] = [delta[i][4]]
161 else:
161 else:
162 del addlist[start:end]
162 del addlist[start:end]
163 return addlist
163 return addlist
164
164
165 # calculate the byte offset of the start of each line in the
165 # calculate the byte offset of the start of each line in the
166 # manifest
166 # manifest
167 def calcoffsets(addlist):
167 def calcoffsets(addlist):
168 offsets = [0] * (len(addlist) + 1)
168 offsets = [0] * (len(addlist) + 1)
169 offset = 0
169 offset = 0
170 i = 0
170 i = 0
171 while i < len(addlist):
171 while i < len(addlist):
172 offsets[i] = offset
172 offsets[i] = offset
173 offset += len(addlist[i])
173 offset += len(addlist[i])
174 i += 1
174 i += 1
175 offsets[i] = offset
175 offsets[i] = offset
176 return offsets
176 return offsets
177
177
178 # if we're using the listcache, make sure it is valid and
178 # if we're using the listcache, make sure it is valid and
179 # parented by the same node we're diffing against
179 # parented by the same node we're diffing against
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
180 if not changed or not self.listcache or not p1 or self.mapcache[0] != p1:
181 files = map.keys()
181 files = map.keys()
182 files.sort()
182 files.sort()
183
183
184 self.addlist = ["%s\000%s%s\n" %
184 self.addlist = ["%s\000%s%s\n" %
185 (f, hex(map[f]), flags[f] and "x" or '')
185 (f, hex(map[f]), flags[f] and "x" or '')
186 for f in files]
186 for f in files]
187 cachedelta = None
187 cachedelta = None
188 else:
188 else:
189 addlist = self.listcache[1]
189 addlist = self.listcache[1]
190
190
191 # find the starting offset for each line in the add list
191 # find the starting offset for each line in the add list
192 offsets = calcoffsets(addlist)
192 offsets = calcoffsets(addlist)
193
193
194 # combine the changed lists into one list for sorting
194 # combine the changed lists into one list for sorting
195 work = [[x, 0] for x in changed[0]]
195 work = [[x, 0] for x in changed[0]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
196 work[len(work):] = [[x, 1] for x in changed[1]]
197 work.sort()
197 work.sort()
198
198
199 delta = []
199 delta = []
200 bs = 0
200 bs = 0
201
201
202 for w in work:
202 for w in work:
203 f = w[0]
203 f = w[0]
204 # bs will either be the index of the item or the insertion point
204 # bs will either be the index of the item or the insertion point
205 bs = bisect.bisect(addlist, f, bs)
205 bs = bisect.bisect(addlist, f, bs)
206 if bs < len(addlist):
206 if bs < len(addlist):
207 fn = addlist[bs][:addlist[bs].index('\0')]
207 fn = addlist[bs][:addlist[bs].index('\0')]
208 else:
208 else:
209 fn = None
209 fn = None
210 if w[1] == 0:
210 if w[1] == 0:
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
211 l = "%s\000%s%s\n" % (f, hex(map[f]), flags[f] and "x" or '')
212 else:
212 else:
213 l = None
213 l = None
214 start = bs
214 start = bs
215 if fn != f:
215 if fn != f:
216 # item not found, insert a new one
216 # item not found, insert a new one
217 end = bs
217 end = bs
218 if w[1] == 1:
218 if w[1] == 1:
219 sys.stderr.write("failed to remove %s from manifest" % f)
219 sys.stderr.write("failed to remove %s from manifest" % f)
220 sys.exit(1)
220 sys.exit(1)
221 else:
221 else:
222 # item is found, replace/delete the existing line
222 # item is found, replace/delete the existing line
223 end = bs + 1
223 end = bs + 1
224 delta.append([start, end, offsets[start], offsets[end], l])
224 delta.append([start, end, offsets[start], offsets[end], l])
225
225
226 self.addlist = addlistdelta(addlist, delta)
226 self.addlist = addlistdelta(addlist, delta)
227 if self.mapcache[0] == self.tip():
227 if self.mapcache[0] == self.tip():
228 cachedelta = "".join(gendelta(delta))
228 cachedelta = "".join(gendelta(delta))
229 else:
229 else:
230 cachedelta = None
230 cachedelta = None
231
231
232 text = "".join(self.addlist)
232 text = "".join(self.addlist)
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
233 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
234 sys.stderr.write("manifest delta failure")
234 sys.stderr.write("manifest delta failure")
235 sys.exit(1)
235 sys.exit(1)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
236 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
237 self.mapcache = (n, map, flags)
237 self.mapcache = (n, map, flags)
238 self.listcache = (text, self.addlist)
238 self.listcache = (text, self.addlist)
239 self.addlist = None
239 self.addlist = None
240
240
241 return n
241 return n
242
242
243 class changelog(revlog):
243 class changelog(revlog):
244 def __init__(self, opener):
244 def __init__(self, opener):
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
245 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
246
246
247 def extract(self, text):
247 def extract(self, text):
248 if not text:
248 if not text:
249 return (nullid, "", "0", [], "")
249 return (nullid, "", "0", [], "")
250 last = text.index("\n\n")
250 last = text.index("\n\n")
251 desc = text[last + 2:]
251 desc = text[last + 2:]
252 l = text[:last].splitlines()
252 l = text[:last].splitlines()
253 manifest = bin(l[0])
253 manifest = bin(l[0])
254 user = l[1]
254 user = l[1]
255 date = l[2]
255 date = l[2]
256 files = l[3:]
256 files = l[3:]
257 return (manifest, user, date, files, desc)
257 return (manifest, user, date, files, desc)
258
258
259 def read(self, node):
259 def read(self, node):
260 return self.extract(self.revision(node))
260 return self.extract(self.revision(node))
261
261
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
262 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
263 user=None, date=None):
263 user=None, date=None):
264 date = date or "%d %d" % (time.time(), time.timezone)
264 date = date or "%d %d" % (time.time(), time.timezone)
265 list.sort()
265 list.sort()
266 l = [hex(manifest), user, date] + list + ["", desc]
266 l = [hex(manifest), user, date] + list + ["", desc]
267 text = "\n".join(l)
267 text = "\n".join(l)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
268 return self.addrevision(text, transaction, self.count(), p1, p2)
269
269
270 class dirstate:
270 class dirstate:
271 def __init__(self, opener, ui, root):
271 def __init__(self, opener, ui, root):
272 self.opener = opener
272 self.opener = opener
273 self.root = root
273 self.root = root
274 self.dirty = 0
274 self.dirty = 0
275 self.ui = ui
275 self.ui = ui
276 self.map = None
276 self.map = None
277 self.pl = None
277 self.pl = None
278 self.copies = {}
278 self.copies = {}
279
279
280 def __del__(self):
280 def __del__(self):
281 if self.dirty:
281 if self.dirty:
282 self.write()
282 self.write()
283
283
284 def __getitem__(self, key):
284 def __getitem__(self, key):
285 try:
285 try:
286 return self.map[key]
286 return self.map[key]
287 except TypeError:
287 except TypeError:
288 self.read()
288 self.read()
289 return self[key]
289 return self[key]
290
290
291 def __contains__(self, key):
291 def __contains__(self, key):
292 if not self.map: self.read()
292 if not self.map: self.read()
293 return key in self.map
293 return key in self.map
294
294
295 def parents(self):
295 def parents(self):
296 if not self.pl:
296 if not self.pl:
297 self.read()
297 self.read()
298 return self.pl
298 return self.pl
299
299
300 def setparents(self, p1, p2 = nullid):
300 def setparents(self, p1, p2 = nullid):
301 self.dirty = 1
301 self.dirty = 1
302 self.pl = p1, p2
302 self.pl = p1, p2
303
303
304 def state(self, key):
304 def state(self, key):
305 try:
305 try:
306 return self[key][0]
306 return self[key][0]
307 except KeyError:
307 except KeyError:
308 return "?"
308 return "?"
309
309
310 def read(self):
310 def read(self):
311 if self.map is not None: return self.map
311 if self.map is not None: return self.map
312
312
313 self.map = {}
313 self.map = {}
314 self.pl = [nullid, nullid]
314 self.pl = [nullid, nullid]
315 try:
315 try:
316 st = self.opener("dirstate").read()
316 st = self.opener("dirstate").read()
317 if not st: return
317 if not st: return
318 except: return
318 except: return
319
319
320 self.pl = [st[:20], st[20: 40]]
320 self.pl = [st[:20], st[20: 40]]
321
321
322 pos = 40
322 pos = 40
323 while pos < len(st):
323 while pos < len(st):
324 e = struct.unpack(">cllll", st[pos:pos+17])
324 e = struct.unpack(">cllll", st[pos:pos+17])
325 l = e[4]
325 l = e[4]
326 pos += 17
326 pos += 17
327 f = st[pos:pos + l]
327 f = st[pos:pos + l]
328 if '\0' in f:
328 if '\0' in f:
329 f, c = f.split('\0')
329 f, c = f.split('\0')
330 self.copies[f] = c
330 self.copies[f] = c
331 self.map[f] = e[:4]
331 self.map[f] = e[:4]
332 pos += l
332 pos += l
333
333
334 def copy(self, source, dest):
334 def copy(self, source, dest):
335 self.read()
335 self.read()
336 self.dirty = 1
336 self.dirty = 1
337 self.copies[dest] = source
337 self.copies[dest] = source
338
338
339 def copied(self, file):
339 def copied(self, file):
340 return self.copies.get(file, None)
340 return self.copies.get(file, None)
341
341
342 def update(self, files, state):
342 def update(self, files, state):
343 ''' current states:
343 ''' current states:
344 n normal
344 n normal
345 m needs merging
345 m needs merging
346 r marked for removal
346 r marked for removal
347 a marked for addition'''
347 a marked for addition'''
348
348
349 if not files: return
349 if not files: return
350 self.read()
350 self.read()
351 self.dirty = 1
351 self.dirty = 1
352 for f in files:
352 for f in files:
353 if state == "r":
353 if state == "r":
354 self.map[f] = ('r', 0, 0, 0)
354 self.map[f] = ('r', 0, 0, 0)
355 else:
355 else:
356 s = os.stat(os.path.join(self.root, f))
356 s = os.stat(os.path.join(self.root, f))
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
357 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
358
358
359 def forget(self, files):
359 def forget(self, files):
360 if not files: return
360 if not files: return
361 self.read()
361 self.read()
362 self.dirty = 1
362 self.dirty = 1
363 for f in files:
363 for f in files:
364 try:
364 try:
365 del self.map[f]
365 del self.map[f]
366 except KeyError:
366 except KeyError:
367 self.ui.warn("not in dirstate: %s!\n" % f)
367 self.ui.warn("not in dirstate: %s!\n" % f)
368 pass
368 pass
369
369
370 def clear(self):
370 def clear(self):
371 self.map = {}
371 self.map = {}
372 self.dirty = 1
372 self.dirty = 1
373
373
374 def write(self):
374 def write(self):
375 st = self.opener("dirstate", "w")
375 st = self.opener("dirstate", "w")
376 st.write("".join(self.pl))
376 st.write("".join(self.pl))
377 for f, e in self.map.items():
377 for f, e in self.map.items():
378 c = self.copied(f)
378 c = self.copied(f)
379 if c:
379 if c:
380 f = f + "\0" + c
380 f = f + "\0" + c
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
381 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
382 st.write(e + f)
382 st.write(e + f)
383 self.dirty = 0
383 self.dirty = 0
384
384
385 def changes(self, files, ignore):
385 def changes(self, files, ignore):
386 self.read()
386 self.read()
387 dc = self.map.copy()
387 dc = self.map.copy()
388 lookup, changed, added, unknown = [], [], [], []
388 lookup, changed, added, unknown = [], [], [], []
389
389
390 # compare all files by default
390 # compare all files by default
391 if not files: files = [self.root]
391 if not files: files = [self.root]
392
392
393 # recursive generator of all files listed
393 # recursive generator of all files listed
394 def walk(files):
394 def walk(files):
395 for f in util.unique(files):
395 for f in util.unique(files):
396 f = os.path.join(self.root, f)
396 f = os.path.join(self.root, f)
397 if os.path.isdir(f):
397 if os.path.isdir(f):
398 for dir, subdirs, fl in os.walk(f):
398 for dir, subdirs, fl in os.walk(f):
399 d = dir[len(self.root) + 1:]
399 d = dir[len(self.root) + 1:]
400 if ".hg" in subdirs: subdirs.remove(".hg")
400 if ".hg" in subdirs: subdirs.remove(".hg")
401 for fn in fl:
401 for fn in fl:
402 fn = util.pconvert(os.path.join(d, fn))
402 fn = util.pconvert(os.path.join(d, fn))
403 yield fn
403 yield fn
404 else:
404 else:
405 yield f[len(self.root) + 1:]
405 yield f[len(self.root) + 1:]
406
406
407 for fn in util.unique(walk(files)):
407 for fn in util.unique(walk(files)):
408 try: s = os.stat(os.path.join(self.root, fn))
408 try: s = os.stat(os.path.join(self.root, fn))
409 except: continue
409 except: continue
410
410
411 if fn in dc:
411 if fn in dc:
412 c = dc[fn]
412 c = dc[fn]
413 del dc[fn]
413 del dc[fn]
414
414
415 if c[0] == 'm':
415 if c[0] == 'm':
416 changed.append(fn)
416 changed.append(fn)
417 elif c[0] == 'a':
417 elif c[0] == 'a':
418 added.append(fn)
418 added.append(fn)
419 elif c[0] == 'r':
419 elif c[0] == 'r':
420 unknown.append(fn)
420 unknown.append(fn)
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
421 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
422 changed.append(fn)
422 changed.append(fn)
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
423 elif c[1] != s.st_mode or c[3] != s.st_mtime:
424 lookup.append(fn)
424 lookup.append(fn)
425 else:
425 else:
426 if not ignore(fn): unknown.append(fn)
426 if not ignore(fn): unknown.append(fn)
427
427
428 return (lookup, changed, added, dc.keys(), unknown)
428 return (lookup, changed, added, dc.keys(), unknown)
429
429
430 # used to avoid circular references so destructors work
430 # used to avoid circular references so destructors work
431 def opener(base):
431 def opener(base):
432 p = base
432 p = base
433 def o(path, mode="r"):
433 def o(path, mode="r"):
434 if p[:7] == "http://":
434 if p[:7] == "http://":
435 f = os.path.join(p, urllib.quote(path))
435 f = os.path.join(p, urllib.quote(path))
436 return httprangereader.httprangereader(f)
436 return httprangereader.httprangereader(f)
437
437
438 f = os.path.join(p, path)
438 f = os.path.join(p, path)
439
439
440 mode += "b" # for that other OS
440 mode += "b" # for that other OS
441
441
442 if mode[0] != "r":
442 if mode[0] != "r":
443 try:
443 try:
444 s = os.stat(f)
444 s = os.stat(f)
445 except OSError:
445 except OSError:
446 d = os.path.dirname(f)
446 d = os.path.dirname(f)
447 if not os.path.isdir(d):
447 if not os.path.isdir(d):
448 os.makedirs(d)
448 os.makedirs(d)
449 else:
449 else:
450 if s.st_nlink > 1:
450 if s.st_nlink > 1:
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
451 file(f + ".tmp", "wb").write(file(f, "rb").read())
452 util.rename(f+".tmp", f)
452 util.rename(f+".tmp", f)
453
453
454 return file(f, mode)
454 return file(f, mode)
455
455
456 return o
456 return o
457
457
458 class RepoError(Exception): pass
458 class RepoError(Exception): pass
459
459
460 class localrepository:
460 class localrepository:
461 def __init__(self, ui, path=None, create=0):
461 def __init__(self, ui, path=None, create=0):
462 self.remote = 0
462 self.remote = 0
463 if path and path[:7] == "http://":
463 if path and path[:7] == "http://":
464 self.remote = 1
464 self.remote = 1
465 self.path = path
465 self.path = path
466 else:
466 else:
467 if not path:
467 if not path:
468 p = os.getcwd()
468 p = os.getcwd()
469 while not os.path.isdir(os.path.join(p, ".hg")):
469 while not os.path.isdir(os.path.join(p, ".hg")):
470 oldp = p
470 oldp = p
471 p = os.path.dirname(p)
471 p = os.path.dirname(p)
472 if p == oldp: raise RepoError("no repo found")
472 if p == oldp: raise RepoError("no repo found")
473 path = p
473 path = p
474 self.path = os.path.join(path, ".hg")
474 self.path = os.path.join(path, ".hg")
475
475
476 if not create and not os.path.isdir(self.path):
476 if not create and not os.path.isdir(self.path):
477 raise RepoError("repository %s not found" % self.path)
477 raise RepoError("repository %s not found" % self.path)
478
478
479 self.root = path
479 self.root = path
480 self.ui = ui
480 self.ui = ui
481
481
482 if create:
482 if create:
483 os.mkdir(self.path)
483 os.mkdir(self.path)
484 os.mkdir(self.join("data"))
484 os.mkdir(self.join("data"))
485
485
486 self.opener = opener(self.path)
486 self.opener = opener(self.path)
487 self.wopener = opener(self.root)
487 self.wopener = opener(self.root)
488 self.manifest = manifest(self.opener)
488 self.manifest = manifest(self.opener)
489 self.changelog = changelog(self.opener)
489 self.changelog = changelog(self.opener)
490 self.ignorefunc = None
490 self.ignorefunc = None
491 self.tagscache = None
491 self.tagscache = None
492 self.nodetagscache = None
492 self.nodetagscache = None
493
493
494 if not self.remote:
494 if not self.remote:
495 self.dirstate = dirstate(self.opener, ui, self.root)
495 self.dirstate = dirstate(self.opener, ui, self.root)
496 try:
496 try:
497 self.ui.readconfig(self.opener("hgrc"))
497 self.ui.readconfig(self.opener("hgrc"))
498 except IOError: pass
498 except IOError: pass
499
499
500 def ignore(self, f):
500 def ignore(self, f):
501 if not self.ignorefunc:
501 if not self.ignorefunc:
502 bigpat = []
502 bigpat = []
503 try:
503 try:
504 l = file(self.wjoin(".hgignore"))
504 l = file(self.wjoin(".hgignore"))
505 for pat in l:
505 for pat in l:
506 if pat != "\n":
506 if pat != "\n":
507 bigpat.append(util.pconvert(pat[:-1]))
507 p = util.pconvert(pat[:-1])
508 try:
509 r = re.compile(p)
510 except:
511 self.ui.warn("ignoring invalid ignore"
512 + " regular expression '%s'\n" % p)
513 else:
514 bigpat.append(util.pconvert(pat[:-1]))
508 except IOError: pass
515 except IOError: pass
509 if bigpat:
516 if bigpat:
510 s = "(?:%s)" % (")|(?:".join(bigpat))
517 s = "(?:%s)" % (")|(?:".join(bigpat))
511 r = re.compile(s)
518 r = re.compile(s)
512 self.ignorefunc = r.search
519 self.ignorefunc = r.search
513 else:
520 else:
514 self.ignorefunc = lambda x: False
521 self.ignorefunc = lambda x: False
515
522
516 return self.ignorefunc(f)
523 return self.ignorefunc(f)
517
524
518 def hook(self, name, **args):
525 def hook(self, name, **args):
519 s = self.ui.config("hooks", name)
526 s = self.ui.config("hooks", name)
520 if s:
527 if s:
521 self.ui.note("running hook %s: %s\n" % (name, s))
528 self.ui.note("running hook %s: %s\n" % (name, s))
522 old = {}
529 old = {}
523 for k, v in args.items():
530 for k, v in args.items():
524 k = k.upper()
531 k = k.upper()
525 old[k] = os.environ.get(k, None)
532 old[k] = os.environ.get(k, None)
526 os.environ[k] = v
533 os.environ[k] = v
527
534
528 r = os.system(s)
535 r = os.system(s)
529
536
530 for k, v in old.items():
537 for k, v in old.items():
531 if v != None:
538 if v != None:
532 os.environ[k] = v
539 os.environ[k] = v
533 else:
540 else:
534 del os.environ[k]
541 del os.environ[k]
535
542
536 if r:
543 if r:
537 self.ui.warn("abort: %s hook failed with status %d!\n" %
544 self.ui.warn("abort: %s hook failed with status %d!\n" %
538 (name, r))
545 (name, r))
539 return False
546 return False
540 return True
547 return True
541
548
542 def tags(self):
549 def tags(self):
543 '''return a mapping of tag to node'''
550 '''return a mapping of tag to node'''
544 if not self.tagscache:
551 if not self.tagscache:
545 self.tagscache = {}
552 self.tagscache = {}
546 def addtag(self, k, n):
553 def addtag(self, k, n):
547 try:
554 try:
548 bin_n = bin(n)
555 bin_n = bin(n)
549 except TypeError:
556 except TypeError:
550 bin_n = ''
557 bin_n = ''
551 self.tagscache[k.strip()] = bin_n
558 self.tagscache[k.strip()] = bin_n
552
559
553 try:
560 try:
554 # read each head of the tags file, ending with the tip
561 # read each head of the tags file, ending with the tip
555 # and add each tag found to the map, with "newer" ones
562 # and add each tag found to the map, with "newer" ones
556 # taking precedence
563 # taking precedence
557 fl = self.file(".hgtags")
564 fl = self.file(".hgtags")
558 h = fl.heads()
565 h = fl.heads()
559 h.reverse()
566 h.reverse()
560 for r in h:
567 for r in h:
561 for l in fl.revision(r).splitlines():
568 for l in fl.revision(r).splitlines():
562 if l:
569 if l:
563 n, k = l.split(" ", 1)
570 n, k = l.split(" ", 1)
564 addtag(self, k, n)
571 addtag(self, k, n)
565 except KeyError:
572 except KeyError:
566 pass
573 pass
567
574
568 try:
575 try:
569 f = self.opener("localtags")
576 f = self.opener("localtags")
570 for l in f:
577 for l in f:
571 n, k = l.split(" ", 1)
578 n, k = l.split(" ", 1)
572 addtag(self, k, n)
579 addtag(self, k, n)
573 except IOError:
580 except IOError:
574 pass
581 pass
575
582
576 self.tagscache['tip'] = self.changelog.tip()
583 self.tagscache['tip'] = self.changelog.tip()
577
584
578 return self.tagscache
585 return self.tagscache
579
586
580 def tagslist(self):
587 def tagslist(self):
581 '''return a list of tags ordered by revision'''
588 '''return a list of tags ordered by revision'''
582 l = []
589 l = []
583 for t, n in self.tags().items():
590 for t, n in self.tags().items():
584 try:
591 try:
585 r = self.changelog.rev(n)
592 r = self.changelog.rev(n)
586 except:
593 except:
587 r = -2 # sort to the beginning of the list if unknown
594 r = -2 # sort to the beginning of the list if unknown
588 l.append((r,t,n))
595 l.append((r,t,n))
589 l.sort()
596 l.sort()
590 return [(t,n) for r,t,n in l]
597 return [(t,n) for r,t,n in l]
591
598
592 def nodetags(self, node):
599 def nodetags(self, node):
593 '''return the tags associated with a node'''
600 '''return the tags associated with a node'''
594 if not self.nodetagscache:
601 if not self.nodetagscache:
595 self.nodetagscache = {}
602 self.nodetagscache = {}
596 for t,n in self.tags().items():
603 for t,n in self.tags().items():
597 self.nodetagscache.setdefault(n,[]).append(t)
604 self.nodetagscache.setdefault(n,[]).append(t)
598 return self.nodetagscache.get(node, [])
605 return self.nodetagscache.get(node, [])
599
606
600 def lookup(self, key):
607 def lookup(self, key):
601 try:
608 try:
602 return self.tags()[key]
609 return self.tags()[key]
603 except KeyError:
610 except KeyError:
604 return self.changelog.lookup(key)
611 return self.changelog.lookup(key)
605
612
606 def dev(self):
613 def dev(self):
607 if self.remote: return -1
614 if self.remote: return -1
608 return os.stat(self.path).st_dev
615 return os.stat(self.path).st_dev
609
616
610 def join(self, f):
617 def join(self, f):
611 return os.path.join(self.path, f)
618 return os.path.join(self.path, f)
612
619
613 def wjoin(self, f):
620 def wjoin(self, f):
614 return os.path.join(self.root, f)
621 return os.path.join(self.root, f)
615
622
616 def file(self, f):
623 def file(self, f):
617 if f[0] == '/': f = f[1:]
624 if f[0] == '/': f = f[1:]
618 return filelog(self.opener, f)
625 return filelog(self.opener, f)
619
626
620 def getcwd(self):
627 def getcwd(self):
621 cwd = os.getcwd()
628 cwd = os.getcwd()
622 if cwd == self.root: return ''
629 if cwd == self.root: return ''
623 return cwd[len(self.root) + 1:]
630 return cwd[len(self.root) + 1:]
624
631
625 def wfile(self, f, mode='r'):
632 def wfile(self, f, mode='r'):
626 return self.wopener(f, mode)
633 return self.wopener(f, mode)
627
634
628 def transaction(self):
635 def transaction(self):
629 # save dirstate for undo
636 # save dirstate for undo
630 try:
637 try:
631 ds = self.opener("dirstate").read()
638 ds = self.opener("dirstate").read()
632 except IOError:
639 except IOError:
633 ds = ""
640 ds = ""
634 self.opener("undo.dirstate", "w").write(ds)
641 self.opener("undo.dirstate", "w").write(ds)
635
642
636 return transaction.transaction(self.ui.warn,
643 return transaction.transaction(self.ui.warn,
637 self.opener, self.join("journal"),
644 self.opener, self.join("journal"),
638 self.join("undo"))
645 self.join("undo"))
639
646
640 def recover(self):
647 def recover(self):
641 lock = self.lock()
648 lock = self.lock()
642 if os.path.exists(self.join("journal")):
649 if os.path.exists(self.join("journal")):
643 self.ui.status("rolling back interrupted transaction\n")
650 self.ui.status("rolling back interrupted transaction\n")
644 return transaction.rollback(self.opener, self.join("journal"))
651 return transaction.rollback(self.opener, self.join("journal"))
645 else:
652 else:
646 self.ui.warn("no interrupted transaction available\n")
653 self.ui.warn("no interrupted transaction available\n")
647
654
648 def undo(self):
655 def undo(self):
649 lock = self.lock()
656 lock = self.lock()
650 if os.path.exists(self.join("undo")):
657 if os.path.exists(self.join("undo")):
651 self.ui.status("rolling back last transaction\n")
658 self.ui.status("rolling back last transaction\n")
652 transaction.rollback(self.opener, self.join("undo"))
659 transaction.rollback(self.opener, self.join("undo"))
653 self.dirstate = None
660 self.dirstate = None
654 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
661 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
655 self.dirstate = dirstate(self.opener, self.ui, self.root)
662 self.dirstate = dirstate(self.opener, self.ui, self.root)
656 else:
663 else:
657 self.ui.warn("no undo information available\n")
664 self.ui.warn("no undo information available\n")
658
665
659 def lock(self, wait = 1):
666 def lock(self, wait = 1):
660 try:
667 try:
661 return lock.lock(self.join("lock"), 0)
668 return lock.lock(self.join("lock"), 0)
662 except lock.LockHeld, inst:
669 except lock.LockHeld, inst:
663 if wait:
670 if wait:
664 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
671 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
665 return lock.lock(self.join("lock"), wait)
672 return lock.lock(self.join("lock"), wait)
666 raise inst
673 raise inst
667
674
668 def rawcommit(self, files, text, user, date, p1=None, p2=None):
675 def rawcommit(self, files, text, user, date, p1=None, p2=None):
669 orig_parent = self.dirstate.parents()[0] or nullid
676 orig_parent = self.dirstate.parents()[0] or nullid
670 p1 = p1 or self.dirstate.parents()[0] or nullid
677 p1 = p1 or self.dirstate.parents()[0] or nullid
671 p2 = p2 or self.dirstate.parents()[1] or nullid
678 p2 = p2 or self.dirstate.parents()[1] or nullid
672 c1 = self.changelog.read(p1)
679 c1 = self.changelog.read(p1)
673 c2 = self.changelog.read(p2)
680 c2 = self.changelog.read(p2)
674 m1 = self.manifest.read(c1[0])
681 m1 = self.manifest.read(c1[0])
675 mf1 = self.manifest.readflags(c1[0])
682 mf1 = self.manifest.readflags(c1[0])
676 m2 = self.manifest.read(c2[0])
683 m2 = self.manifest.read(c2[0])
677
684
678 if orig_parent == p1:
685 if orig_parent == p1:
679 update_dirstate = 1
686 update_dirstate = 1
680 else:
687 else:
681 update_dirstate = 0
688 update_dirstate = 0
682
689
683 tr = self.transaction()
690 tr = self.transaction()
684 mm = m1.copy()
691 mm = m1.copy()
685 mfm = mf1.copy()
692 mfm = mf1.copy()
686 linkrev = self.changelog.count()
693 linkrev = self.changelog.count()
687 for f in files:
694 for f in files:
688 try:
695 try:
689 t = self.wfile(f).read()
696 t = self.wfile(f).read()
690 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
697 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
691 r = self.file(f)
698 r = self.file(f)
692 mfm[f] = tm
699 mfm[f] = tm
693 mm[f] = r.add(t, {}, tr, linkrev,
700 mm[f] = r.add(t, {}, tr, linkrev,
694 m1.get(f, nullid), m2.get(f, nullid))
701 m1.get(f, nullid), m2.get(f, nullid))
695 if update_dirstate:
702 if update_dirstate:
696 self.dirstate.update([f], "n")
703 self.dirstate.update([f], "n")
697 except IOError:
704 except IOError:
698 try:
705 try:
699 del mm[f]
706 del mm[f]
700 del mfm[f]
707 del mfm[f]
701 if update_dirstate:
708 if update_dirstate:
702 self.dirstate.forget([f])
709 self.dirstate.forget([f])
703 except:
710 except:
704 # deleted from p2?
711 # deleted from p2?
705 pass
712 pass
706
713
707 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
714 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
708 user = user or self.ui.username()
715 user = user or self.ui.username()
709 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
716 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
710 tr.close()
717 tr.close()
711 if update_dirstate:
718 if update_dirstate:
712 self.dirstate.setparents(n, nullid)
719 self.dirstate.setparents(n, nullid)
713
720
714 def commit(self, files = None, text = "", user = None, date = None):
721 def commit(self, files = None, text = "", user = None, date = None):
715 commit = []
722 commit = []
716 remove = []
723 remove = []
717 if files:
724 if files:
718 for f in files:
725 for f in files:
719 s = self.dirstate.state(f)
726 s = self.dirstate.state(f)
720 if s in 'nmai':
727 if s in 'nmai':
721 commit.append(f)
728 commit.append(f)
722 elif s == 'r':
729 elif s == 'r':
723 remove.append(f)
730 remove.append(f)
724 else:
731 else:
725 self.ui.warn("%s not tracked!\n" % f)
732 self.ui.warn("%s not tracked!\n" % f)
726 else:
733 else:
727 (c, a, d, u) = self.changes(None, None)
734 (c, a, d, u) = self.changes(None, None)
728 commit = c + a
735 commit = c + a
729 remove = d
736 remove = d
730
737
731 if not commit and not remove:
738 if not commit and not remove:
732 self.ui.status("nothing changed\n")
739 self.ui.status("nothing changed\n")
733 return
740 return
734
741
735 if not self.hook("precommit"):
742 if not self.hook("precommit"):
736 return 1
743 return 1
737
744
738 p1, p2 = self.dirstate.parents()
745 p1, p2 = self.dirstate.parents()
739 c1 = self.changelog.read(p1)
746 c1 = self.changelog.read(p1)
740 c2 = self.changelog.read(p2)
747 c2 = self.changelog.read(p2)
741 m1 = self.manifest.read(c1[0])
748 m1 = self.manifest.read(c1[0])
742 mf1 = self.manifest.readflags(c1[0])
749 mf1 = self.manifest.readflags(c1[0])
743 m2 = self.manifest.read(c2[0])
750 m2 = self.manifest.read(c2[0])
744 lock = self.lock()
751 lock = self.lock()
745 tr = self.transaction()
752 tr = self.transaction()
746
753
747 # check in files
754 # check in files
748 new = {}
755 new = {}
749 linkrev = self.changelog.count()
756 linkrev = self.changelog.count()
750 commit.sort()
757 commit.sort()
751 for f in commit:
758 for f in commit:
752 self.ui.note(f + "\n")
759 self.ui.note(f + "\n")
753 try:
760 try:
754 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
761 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
755 t = self.wfile(f).read()
762 t = self.wfile(f).read()
756 except IOError:
763 except IOError:
757 self.warn("trouble committing %s!\n" % f)
764 self.warn("trouble committing %s!\n" % f)
758 raise
765 raise
759
766
760 meta = {}
767 meta = {}
761 cp = self.dirstate.copied(f)
768 cp = self.dirstate.copied(f)
762 if cp:
769 if cp:
763 meta["copy"] = cp
770 meta["copy"] = cp
764 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
771 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
765 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
772 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
766
773
767 r = self.file(f)
774 r = self.file(f)
768 fp1 = m1.get(f, nullid)
775 fp1 = m1.get(f, nullid)
769 fp2 = m2.get(f, nullid)
776 fp2 = m2.get(f, nullid)
770 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
777 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
771
778
772 # update manifest
779 # update manifest
773 m1.update(new)
780 m1.update(new)
774 for f in remove:
781 for f in remove:
775 if f in m1:
782 if f in m1:
776 del m1[f]
783 del m1[f]
777 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
784 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], (new,remove))
778
785
779 # add changeset
786 # add changeset
780 new = new.keys()
787 new = new.keys()
781 new.sort()
788 new.sort()
782
789
783 if not text:
790 if not text:
784 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
791 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
785 edittext += "".join(["HG: changed %s\n" % f for f in new])
792 edittext += "".join(["HG: changed %s\n" % f for f in new])
786 edittext += "".join(["HG: removed %s\n" % f for f in remove])
793 edittext += "".join(["HG: removed %s\n" % f for f in remove])
787 edittext = self.ui.edit(edittext)
794 edittext = self.ui.edit(edittext)
788 if not edittext.rstrip():
795 if not edittext.rstrip():
789 return 1
796 return 1
790 text = edittext
797 text = edittext
791
798
792 user = user or self.ui.username()
799 user = user or self.ui.username()
793 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
800 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
794
801
795 if not self.hook("commit", node=hex(n)):
802 if not self.hook("commit", node=hex(n)):
796 return 1
803 return 1
797
804
798 tr.close()
805 tr.close()
799
806
800 self.dirstate.setparents(n)
807 self.dirstate.setparents(n)
801 self.dirstate.update(new, "n")
808 self.dirstate.update(new, "n")
802 self.dirstate.forget(remove)
809 self.dirstate.forget(remove)
803
810
804 def changes(self, node1, node2, files=None):
811 def changes(self, node1, node2, files=None):
805 mf2, u = None, []
812 mf2, u = None, []
806
813
807 def fcmp(fn, mf):
814 def fcmp(fn, mf):
808 t1 = self.wfile(fn).read()
815 t1 = self.wfile(fn).read()
809 t2 = self.file(fn).revision(mf[fn])
816 t2 = self.file(fn).revision(mf[fn])
810 return cmp(t1, t2)
817 return cmp(t1, t2)
811
818
812 # are we comparing the working directory?
819 # are we comparing the working directory?
813 if not node2:
820 if not node2:
814 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
821 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
815
822
816 # are we comparing working dir against its parent?
823 # are we comparing working dir against its parent?
817 if not node1:
824 if not node1:
818 if l:
825 if l:
819 # do a full compare of any files that might have changed
826 # do a full compare of any files that might have changed
820 change = self.changelog.read(self.dirstate.parents()[0])
827 change = self.changelog.read(self.dirstate.parents()[0])
821 mf2 = self.manifest.read(change[0])
828 mf2 = self.manifest.read(change[0])
822 for f in l:
829 for f in l:
823 if fcmp(f, mf2):
830 if fcmp(f, mf2):
824 c.append(f)
831 c.append(f)
825
832
826 for l in c, a, d, u:
833 for l in c, a, d, u:
827 l.sort()
834 l.sort()
828
835
829 return (c, a, d, u)
836 return (c, a, d, u)
830
837
831 # are we comparing working dir against non-tip?
838 # are we comparing working dir against non-tip?
832 # generate a pseudo-manifest for the working dir
839 # generate a pseudo-manifest for the working dir
833 if not node2:
840 if not node2:
834 if not mf2:
841 if not mf2:
835 change = self.changelog.read(self.dirstate.parents()[0])
842 change = self.changelog.read(self.dirstate.parents()[0])
836 mf2 = self.manifest.read(change[0]).copy()
843 mf2 = self.manifest.read(change[0]).copy()
837 for f in a + c + l:
844 for f in a + c + l:
838 mf2[f] = ""
845 mf2[f] = ""
839 for f in d:
846 for f in d:
840 if f in mf2: del mf2[f]
847 if f in mf2: del mf2[f]
841 else:
848 else:
842 change = self.changelog.read(node2)
849 change = self.changelog.read(node2)
843 mf2 = self.manifest.read(change[0])
850 mf2 = self.manifest.read(change[0])
844
851
845 # flush lists from dirstate before comparing manifests
852 # flush lists from dirstate before comparing manifests
846 c, a = [], []
853 c, a = [], []
847
854
848 change = self.changelog.read(node1)
855 change = self.changelog.read(node1)
849 mf1 = self.manifest.read(change[0]).copy()
856 mf1 = self.manifest.read(change[0]).copy()
850
857
851 for fn in mf2:
858 for fn in mf2:
852 if mf1.has_key(fn):
859 if mf1.has_key(fn):
853 if mf1[fn] != mf2[fn]:
860 if mf1[fn] != mf2[fn]:
854 if mf2[fn] != "" or fcmp(fn, mf1):
861 if mf2[fn] != "" or fcmp(fn, mf1):
855 c.append(fn)
862 c.append(fn)
856 del mf1[fn]
863 del mf1[fn]
857 else:
864 else:
858 a.append(fn)
865 a.append(fn)
859
866
860 d = mf1.keys()
867 d = mf1.keys()
861
868
862 for l in c, a, d, u:
869 for l in c, a, d, u:
863 l.sort()
870 l.sort()
864
871
865 return (c, a, d, u)
872 return (c, a, d, u)
866
873
867 def add(self, list):
874 def add(self, list):
868 for f in list:
875 for f in list:
869 p = self.wjoin(f)
876 p = self.wjoin(f)
870 if not os.path.exists(p):
877 if not os.path.exists(p):
871 self.ui.warn("%s does not exist!\n" % f)
878 self.ui.warn("%s does not exist!\n" % f)
872 elif not os.path.isfile(p):
879 elif not os.path.isfile(p):
873 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
880 self.ui.warn("%s not added: mercurial only supports files currently\n" % f)
874 elif self.dirstate.state(f) == 'n':
881 elif self.dirstate.state(f) == 'n':
875 self.ui.warn("%s already tracked!\n" % f)
882 self.ui.warn("%s already tracked!\n" % f)
876 else:
883 else:
877 self.dirstate.update([f], "a")
884 self.dirstate.update([f], "a")
878
885
879 def forget(self, list):
886 def forget(self, list):
880 for f in list:
887 for f in list:
881 if self.dirstate.state(f) not in 'ai':
888 if self.dirstate.state(f) not in 'ai':
882 self.ui.warn("%s not added!\n" % f)
889 self.ui.warn("%s not added!\n" % f)
883 else:
890 else:
884 self.dirstate.forget([f])
891 self.dirstate.forget([f])
885
892
886 def remove(self, list):
893 def remove(self, list):
887 for f in list:
894 for f in list:
888 p = self.wjoin(f)
895 p = self.wjoin(f)
889 if os.path.exists(p):
896 if os.path.exists(p):
890 self.ui.warn("%s still exists!\n" % f)
897 self.ui.warn("%s still exists!\n" % f)
891 elif self.dirstate.state(f) == 'a':
898 elif self.dirstate.state(f) == 'a':
892 self.ui.warn("%s never committed!\n" % f)
899 self.ui.warn("%s never committed!\n" % f)
893 self.dirstate.forget(f)
900 self.dirstate.forget(f)
894 elif f not in self.dirstate:
901 elif f not in self.dirstate:
895 self.ui.warn("%s not tracked!\n" % f)
902 self.ui.warn("%s not tracked!\n" % f)
896 else:
903 else:
897 self.dirstate.update([f], "r")
904 self.dirstate.update([f], "r")
898
905
899 def copy(self, source, dest):
906 def copy(self, source, dest):
900 p = self.wjoin(dest)
907 p = self.wjoin(dest)
901 if not os.path.exists(dest):
908 if not os.path.exists(dest):
902 self.ui.warn("%s does not exist!\n" % dest)
909 self.ui.warn("%s does not exist!\n" % dest)
903 elif not os.path.isfile(dest):
910 elif not os.path.isfile(dest):
904 self.ui.warn("copy failed: %s is not a file\n" % dest)
911 self.ui.warn("copy failed: %s is not a file\n" % dest)
905 else:
912 else:
906 if self.dirstate.state(dest) == '?':
913 if self.dirstate.state(dest) == '?':
907 self.dirstate.update([dest], "a")
914 self.dirstate.update([dest], "a")
908 self.dirstate.copy(source, dest)
915 self.dirstate.copy(source, dest)
909
916
910 def heads(self):
917 def heads(self):
911 return self.changelog.heads()
918 return self.changelog.heads()
912
919
913 def branches(self, nodes):
920 def branches(self, nodes):
914 if not nodes: nodes = [self.changelog.tip()]
921 if not nodes: nodes = [self.changelog.tip()]
915 b = []
922 b = []
916 for n in nodes:
923 for n in nodes:
917 t = n
924 t = n
918 while n:
925 while n:
919 p = self.changelog.parents(n)
926 p = self.changelog.parents(n)
920 if p[1] != nullid or p[0] == nullid:
927 if p[1] != nullid or p[0] == nullid:
921 b.append((t, n, p[0], p[1]))
928 b.append((t, n, p[0], p[1]))
922 break
929 break
923 n = p[0]
930 n = p[0]
924 return b
931 return b
925
932
926 def between(self, pairs):
933 def between(self, pairs):
927 r = []
934 r = []
928
935
929 for top, bottom in pairs:
936 for top, bottom in pairs:
930 n, l, i = top, [], 0
937 n, l, i = top, [], 0
931 f = 1
938 f = 1
932
939
933 while n != bottom:
940 while n != bottom:
934 p = self.changelog.parents(n)[0]
941 p = self.changelog.parents(n)[0]
935 if i == f:
942 if i == f:
936 l.append(n)
943 l.append(n)
937 f = f * 2
944 f = f * 2
938 n = p
945 n = p
939 i += 1
946 i += 1
940
947
941 r.append(l)
948 r.append(l)
942
949
943 return r
950 return r
944
951
945 def newer(self, nodes):
952 def newer(self, nodes):
946 m = {}
953 m = {}
947 nl = []
954 nl = []
948 pm = {}
955 pm = {}
949 cl = self.changelog
956 cl = self.changelog
950 t = l = cl.count()
957 t = l = cl.count()
951
958
952 # find the lowest numbered node
959 # find the lowest numbered node
953 for n in nodes:
960 for n in nodes:
954 l = min(l, cl.rev(n))
961 l = min(l, cl.rev(n))
955 m[n] = 1
962 m[n] = 1
956
963
957 for i in xrange(l, t):
964 for i in xrange(l, t):
958 n = cl.node(i)
965 n = cl.node(i)
959 if n in m: # explicitly listed
966 if n in m: # explicitly listed
960 pm[n] = 1
967 pm[n] = 1
961 nl.append(n)
968 nl.append(n)
962 continue
969 continue
963 for p in cl.parents(n):
970 for p in cl.parents(n):
964 if p in pm: # parent listed
971 if p in pm: # parent listed
965 pm[n] = 1
972 pm[n] = 1
966 nl.append(n)
973 nl.append(n)
967 break
974 break
968
975
969 return nl
976 return nl
970
977
971 def findincoming(self, remote, base={}):
978 def findincoming(self, remote, base={}):
972 m = self.changelog.nodemap
979 m = self.changelog.nodemap
973 search = []
980 search = []
974 fetch = []
981 fetch = []
975 seen = {}
982 seen = {}
976 seenbranch = {}
983 seenbranch = {}
977
984
978 # assume we're closer to the tip than the root
985 # assume we're closer to the tip than the root
979 # and start by examining the heads
986 # and start by examining the heads
980 self.ui.status("searching for changes\n")
987 self.ui.status("searching for changes\n")
981 heads = remote.heads()
988 heads = remote.heads()
982 unknown = []
989 unknown = []
983 for h in heads:
990 for h in heads:
984 if h not in m:
991 if h not in m:
985 unknown.append(h)
992 unknown.append(h)
986 else:
993 else:
987 base[h] = 1
994 base[h] = 1
988
995
989 if not unknown:
996 if not unknown:
990 return None
997 return None
991
998
992 rep = {}
999 rep = {}
993 reqcnt = 0
1000 reqcnt = 0
994
1001
995 # search through remote branches
1002 # search through remote branches
996 # a 'branch' here is a linear segment of history, with four parts:
1003 # a 'branch' here is a linear segment of history, with four parts:
997 # head, root, first parent, second parent
1004 # head, root, first parent, second parent
998 # (a branch always has two parents (or none) by definition)
1005 # (a branch always has two parents (or none) by definition)
999 unknown = remote.branches(unknown)
1006 unknown = remote.branches(unknown)
1000 while unknown:
1007 while unknown:
1001 r = []
1008 r = []
1002 while unknown:
1009 while unknown:
1003 n = unknown.pop(0)
1010 n = unknown.pop(0)
1004 if n[0] in seen:
1011 if n[0] in seen:
1005 continue
1012 continue
1006
1013
1007 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1014 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1008 if n[0] == nullid:
1015 if n[0] == nullid:
1009 break
1016 break
1010 if n in seenbranch:
1017 if n in seenbranch:
1011 self.ui.debug("branch already found\n")
1018 self.ui.debug("branch already found\n")
1012 continue
1019 continue
1013 if n[1] and n[1] in m: # do we know the base?
1020 if n[1] and n[1] in m: # do we know the base?
1014 self.ui.debug("found incomplete branch %s:%s\n"
1021 self.ui.debug("found incomplete branch %s:%s\n"
1015 % (short(n[0]), short(n[1])))
1022 % (short(n[0]), short(n[1])))
1016 search.append(n) # schedule branch range for scanning
1023 search.append(n) # schedule branch range for scanning
1017 seenbranch[n] = 1
1024 seenbranch[n] = 1
1018 else:
1025 else:
1019 if n[1] not in seen and n[1] not in fetch:
1026 if n[1] not in seen and n[1] not in fetch:
1020 if n[2] in m and n[3] in m:
1027 if n[2] in m and n[3] in m:
1021 self.ui.debug("found new changeset %s\n" %
1028 self.ui.debug("found new changeset %s\n" %
1022 short(n[1]))
1029 short(n[1]))
1023 fetch.append(n[1]) # earliest unknown
1030 fetch.append(n[1]) # earliest unknown
1024 base[n[2]] = 1 # latest known
1031 base[n[2]] = 1 # latest known
1025 continue
1032 continue
1026
1033
1027 for a in n[2:4]:
1034 for a in n[2:4]:
1028 if a not in rep:
1035 if a not in rep:
1029 r.append(a)
1036 r.append(a)
1030 rep[a] = 1
1037 rep[a] = 1
1031
1038
1032 seen[n[0]] = 1
1039 seen[n[0]] = 1
1033
1040
1034 if r:
1041 if r:
1035 reqcnt += 1
1042 reqcnt += 1
1036 self.ui.debug("request %d: %s\n" %
1043 self.ui.debug("request %d: %s\n" %
1037 (reqcnt, " ".join(map(short, r))))
1044 (reqcnt, " ".join(map(short, r))))
1038 for p in range(0, len(r), 10):
1045 for p in range(0, len(r), 10):
1039 for b in remote.branches(r[p:p+10]):
1046 for b in remote.branches(r[p:p+10]):
1040 self.ui.debug("received %s:%s\n" %
1047 self.ui.debug("received %s:%s\n" %
1041 (short(b[0]), short(b[1])))
1048 (short(b[0]), short(b[1])))
1042 if b[0] not in m and b[0] not in seen:
1049 if b[0] not in m and b[0] not in seen:
1043 unknown.append(b)
1050 unknown.append(b)
1044
1051
1045 # do binary search on the branches we found
1052 # do binary search on the branches we found
1046 while search:
1053 while search:
1047 n = search.pop(0)
1054 n = search.pop(0)
1048 reqcnt += 1
1055 reqcnt += 1
1049 l = remote.between([(n[0], n[1])])[0]
1056 l = remote.between([(n[0], n[1])])[0]
1050 l.append(n[1])
1057 l.append(n[1])
1051 p = n[0]
1058 p = n[0]
1052 f = 1
1059 f = 1
1053 for i in l:
1060 for i in l:
1054 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1061 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1055 if i in m:
1062 if i in m:
1056 if f <= 2:
1063 if f <= 2:
1057 self.ui.debug("found new branch changeset %s\n" %
1064 self.ui.debug("found new branch changeset %s\n" %
1058 short(p))
1065 short(p))
1059 fetch.append(p)
1066 fetch.append(p)
1060 base[i] = 1
1067 base[i] = 1
1061 else:
1068 else:
1062 self.ui.debug("narrowed branch search to %s:%s\n"
1069 self.ui.debug("narrowed branch search to %s:%s\n"
1063 % (short(p), short(i)))
1070 % (short(p), short(i)))
1064 search.append((p, i))
1071 search.append((p, i))
1065 break
1072 break
1066 p, f = i, f * 2
1073 p, f = i, f * 2
1067
1074
1068 # sanity check our fetch list
1075 # sanity check our fetch list
1069 for f in fetch:
1076 for f in fetch:
1070 if f in m:
1077 if f in m:
1071 raise RepoError("already have changeset " + short(f[:4]))
1078 raise RepoError("already have changeset " + short(f[:4]))
1072
1079
1073 if base.keys() == [nullid]:
1080 if base.keys() == [nullid]:
1074 self.ui.warn("warning: pulling from an unrelated repository!\n")
1081 self.ui.warn("warning: pulling from an unrelated repository!\n")
1075
1082
1076 self.ui.note("adding new changesets starting at " +
1083 self.ui.note("adding new changesets starting at " +
1077 " ".join([short(f) for f in fetch]) + "\n")
1084 " ".join([short(f) for f in fetch]) + "\n")
1078
1085
1079 self.ui.debug("%d total queries\n" % reqcnt)
1086 self.ui.debug("%d total queries\n" % reqcnt)
1080
1087
1081 return fetch
1088 return fetch
1082
1089
1083 def findoutgoing(self, remote):
1090 def findoutgoing(self, remote):
1084 base = {}
1091 base = {}
1085 self.findincoming(remote, base)
1092 self.findincoming(remote, base)
1086 remain = dict.fromkeys(self.changelog.nodemap)
1093 remain = dict.fromkeys(self.changelog.nodemap)
1087
1094
1088 # prune everything remote has from the tree
1095 # prune everything remote has from the tree
1089 del remain[nullid]
1096 del remain[nullid]
1090 remove = base.keys()
1097 remove = base.keys()
1091 while remove:
1098 while remove:
1092 n = remove.pop(0)
1099 n = remove.pop(0)
1093 if n in remain:
1100 if n in remain:
1094 del remain[n]
1101 del remain[n]
1095 for p in self.changelog.parents(n):
1102 for p in self.changelog.parents(n):
1096 remove.append(p)
1103 remove.append(p)
1097
1104
1098 # find every node whose parents have been pruned
1105 # find every node whose parents have been pruned
1099 subset = []
1106 subset = []
1100 for n in remain:
1107 for n in remain:
1101 p1, p2 = self.changelog.parents(n)
1108 p1, p2 = self.changelog.parents(n)
1102 if p1 not in remain and p2 not in remain:
1109 if p1 not in remain and p2 not in remain:
1103 subset.append(n)
1110 subset.append(n)
1104
1111
1105 # this is the set of all roots we have to push
1112 # this is the set of all roots we have to push
1106 return subset
1113 return subset
1107
1114
1108 def pull(self, remote):
1115 def pull(self, remote):
1109 lock = self.lock()
1116 lock = self.lock()
1110
1117
1111 # if we have an empty repo, fetch everything
1118 # if we have an empty repo, fetch everything
1112 if self.changelog.tip() == nullid:
1119 if self.changelog.tip() == nullid:
1113 self.ui.status("requesting all changes\n")
1120 self.ui.status("requesting all changes\n")
1114 fetch = [nullid]
1121 fetch = [nullid]
1115 else:
1122 else:
1116 fetch = self.findincoming(remote)
1123 fetch = self.findincoming(remote)
1117
1124
1118 if not fetch:
1125 if not fetch:
1119 self.ui.status("no changes found\n")
1126 self.ui.status("no changes found\n")
1120 return 1
1127 return 1
1121
1128
1122 cg = remote.changegroup(fetch)
1129 cg = remote.changegroup(fetch)
1123 return self.addchangegroup(cg)
1130 return self.addchangegroup(cg)
1124
1131
1125 def push(self, remote):
1132 def push(self, remote):
1126 lock = remote.lock()
1133 lock = remote.lock()
1127 update = self.findoutgoing(remote)
1134 update = self.findoutgoing(remote)
1128 if not update:
1135 if not update:
1129 self.ui.status("no changes found\n")
1136 self.ui.status("no changes found\n")
1130 return 1
1137 return 1
1131
1138
1132 cg = self.changegroup(update)
1139 cg = self.changegroup(update)
1133 return remote.addchangegroup(cg)
1140 return remote.addchangegroup(cg)
1134
1141
1135 def changegroup(self, basenodes):
1142 def changegroup(self, basenodes):
1136 class genread:
1143 class genread:
1137 def __init__(self, generator):
1144 def __init__(self, generator):
1138 self.g = generator
1145 self.g = generator
1139 self.buf = ""
1146 self.buf = ""
1140 def read(self, l):
1147 def read(self, l):
1141 while l > len(self.buf):
1148 while l > len(self.buf):
1142 try:
1149 try:
1143 self.buf += self.g.next()
1150 self.buf += self.g.next()
1144 except StopIteration:
1151 except StopIteration:
1145 break
1152 break
1146 d, self.buf = self.buf[:l], self.buf[l:]
1153 d, self.buf = self.buf[:l], self.buf[l:]
1147 return d
1154 return d
1148
1155
1149 def gengroup():
1156 def gengroup():
1150 nodes = self.newer(basenodes)
1157 nodes = self.newer(basenodes)
1151
1158
1152 # construct the link map
1159 # construct the link map
1153 linkmap = {}
1160 linkmap = {}
1154 for n in nodes:
1161 for n in nodes:
1155 linkmap[self.changelog.rev(n)] = n
1162 linkmap[self.changelog.rev(n)] = n
1156
1163
1157 # construct a list of all changed files
1164 # construct a list of all changed files
1158 changed = {}
1165 changed = {}
1159 for n in nodes:
1166 for n in nodes:
1160 c = self.changelog.read(n)
1167 c = self.changelog.read(n)
1161 for f in c[3]:
1168 for f in c[3]:
1162 changed[f] = 1
1169 changed[f] = 1
1163 changed = changed.keys()
1170 changed = changed.keys()
1164 changed.sort()
1171 changed.sort()
1165
1172
1166 # the changegroup is changesets + manifests + all file revs
1173 # the changegroup is changesets + manifests + all file revs
1167 revs = [ self.changelog.rev(n) for n in nodes ]
1174 revs = [ self.changelog.rev(n) for n in nodes ]
1168
1175
1169 for y in self.changelog.group(linkmap): yield y
1176 for y in self.changelog.group(linkmap): yield y
1170 for y in self.manifest.group(linkmap): yield y
1177 for y in self.manifest.group(linkmap): yield y
1171 for f in changed:
1178 for f in changed:
1172 yield struct.pack(">l", len(f) + 4) + f
1179 yield struct.pack(">l", len(f) + 4) + f
1173 g = self.file(f).group(linkmap)
1180 g = self.file(f).group(linkmap)
1174 for y in g:
1181 for y in g:
1175 yield y
1182 yield y
1176
1183
1177 yield struct.pack(">l", 0)
1184 yield struct.pack(">l", 0)
1178
1185
1179 return genread(gengroup())
1186 return genread(gengroup())
1180
1187
1181 def addchangegroup(self, source):
1188 def addchangegroup(self, source):
1182
1189
1183 def getchunk():
1190 def getchunk():
1184 d = source.read(4)
1191 d = source.read(4)
1185 if not d: return ""
1192 if not d: return ""
1186 l = struct.unpack(">l", d)[0]
1193 l = struct.unpack(">l", d)[0]
1187 if l <= 4: return ""
1194 if l <= 4: return ""
1188 return source.read(l - 4)
1195 return source.read(l - 4)
1189
1196
1190 def getgroup():
1197 def getgroup():
1191 while 1:
1198 while 1:
1192 c = getchunk()
1199 c = getchunk()
1193 if not c: break
1200 if not c: break
1194 yield c
1201 yield c
1195
1202
1196 def csmap(x):
1203 def csmap(x):
1197 self.ui.debug("add changeset %s\n" % short(x))
1204 self.ui.debug("add changeset %s\n" % short(x))
1198 return self.changelog.count()
1205 return self.changelog.count()
1199
1206
1200 def revmap(x):
1207 def revmap(x):
1201 return self.changelog.rev(x)
1208 return self.changelog.rev(x)
1202
1209
1203 if not source: return
1210 if not source: return
1204 changesets = files = revisions = 0
1211 changesets = files = revisions = 0
1205
1212
1206 tr = self.transaction()
1213 tr = self.transaction()
1207
1214
1208 # pull off the changeset group
1215 # pull off the changeset group
1209 self.ui.status("adding changesets\n")
1216 self.ui.status("adding changesets\n")
1210 co = self.changelog.tip()
1217 co = self.changelog.tip()
1211 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1218 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1212 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1219 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1213
1220
1214 # pull off the manifest group
1221 # pull off the manifest group
1215 self.ui.status("adding manifests\n")
1222 self.ui.status("adding manifests\n")
1216 mm = self.manifest.tip()
1223 mm = self.manifest.tip()
1217 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1224 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1218
1225
1219 # process the files
1226 # process the files
1220 self.ui.status("adding file revisions\n")
1227 self.ui.status("adding file revisions\n")
1221 while 1:
1228 while 1:
1222 f = getchunk()
1229 f = getchunk()
1223 if not f: break
1230 if not f: break
1224 self.ui.debug("adding %s revisions\n" % f)
1231 self.ui.debug("adding %s revisions\n" % f)
1225 fl = self.file(f)
1232 fl = self.file(f)
1226 o = fl.count()
1233 o = fl.count()
1227 n = fl.addgroup(getgroup(), revmap, tr)
1234 n = fl.addgroup(getgroup(), revmap, tr)
1228 revisions += fl.count() - o
1235 revisions += fl.count() - o
1229 files += 1
1236 files += 1
1230
1237
1231 self.ui.status(("modified %d files, added %d changesets" +
1238 self.ui.status(("modified %d files, added %d changesets" +
1232 " and %d new revisions\n")
1239 " and %d new revisions\n")
1233 % (files, changesets, revisions))
1240 % (files, changesets, revisions))
1234
1241
1235 tr.close()
1242 tr.close()
1236 return
1243 return
1237
1244
1238 def update(self, node, allow=False, force=False, choose=None,
1245 def update(self, node, allow=False, force=False, choose=None,
1239 moddirstate=True):
1246 moddirstate=True):
1240 pl = self.dirstate.parents()
1247 pl = self.dirstate.parents()
1241 if not force and pl[1] != nullid:
1248 if not force and pl[1] != nullid:
1242 self.ui.warn("aborting: outstanding uncommitted merges\n")
1249 self.ui.warn("aborting: outstanding uncommitted merges\n")
1243 return
1250 return
1244
1251
1245 p1, p2 = pl[0], node
1252 p1, p2 = pl[0], node
1246 pa = self.changelog.ancestor(p1, p2)
1253 pa = self.changelog.ancestor(p1, p2)
1247 m1n = self.changelog.read(p1)[0]
1254 m1n = self.changelog.read(p1)[0]
1248 m2n = self.changelog.read(p2)[0]
1255 m2n = self.changelog.read(p2)[0]
1249 man = self.manifest.ancestor(m1n, m2n)
1256 man = self.manifest.ancestor(m1n, m2n)
1250 m1 = self.manifest.read(m1n)
1257 m1 = self.manifest.read(m1n)
1251 mf1 = self.manifest.readflags(m1n)
1258 mf1 = self.manifest.readflags(m1n)
1252 m2 = self.manifest.read(m2n)
1259 m2 = self.manifest.read(m2n)
1253 mf2 = self.manifest.readflags(m2n)
1260 mf2 = self.manifest.readflags(m2n)
1254 ma = self.manifest.read(man)
1261 ma = self.manifest.read(man)
1255 mfa = self.manifest.readflags(man)
1262 mfa = self.manifest.readflags(man)
1256
1263
1257 (c, a, d, u) = self.changes(None, None)
1264 (c, a, d, u) = self.changes(None, None)
1258
1265
1259 # is this a jump, or a merge? i.e. is there a linear path
1266 # is this a jump, or a merge? i.e. is there a linear path
1260 # from p1 to p2?
1267 # from p1 to p2?
1261 linear_path = (pa == p1 or pa == p2)
1268 linear_path = (pa == p1 or pa == p2)
1262
1269
1263 # resolve the manifest to determine which files
1270 # resolve the manifest to determine which files
1264 # we care about merging
1271 # we care about merging
1265 self.ui.note("resolving manifests\n")
1272 self.ui.note("resolving manifests\n")
1266 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1273 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1267 (force, allow, moddirstate, linear_path))
1274 (force, allow, moddirstate, linear_path))
1268 self.ui.debug(" ancestor %s local %s remote %s\n" %
1275 self.ui.debug(" ancestor %s local %s remote %s\n" %
1269 (short(man), short(m1n), short(m2n)))
1276 (short(man), short(m1n), short(m2n)))
1270
1277
1271 merge = {}
1278 merge = {}
1272 get = {}
1279 get = {}
1273 remove = []
1280 remove = []
1274 mark = {}
1281 mark = {}
1275
1282
1276 # construct a working dir manifest
1283 # construct a working dir manifest
1277 mw = m1.copy()
1284 mw = m1.copy()
1278 mfw = mf1.copy()
1285 mfw = mf1.copy()
1279 umap = dict.fromkeys(u)
1286 umap = dict.fromkeys(u)
1280
1287
1281 for f in a + c + u:
1288 for f in a + c + u:
1282 mw[f] = ""
1289 mw[f] = ""
1283 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1290 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1284
1291
1285 for f in d:
1292 for f in d:
1286 if f in mw: del mw[f]
1293 if f in mw: del mw[f]
1287
1294
1288 # If we're jumping between revisions (as opposed to merging),
1295 # If we're jumping between revisions (as opposed to merging),
1289 # and if neither the working directory nor the target rev has
1296 # and if neither the working directory nor the target rev has
1290 # the file, then we need to remove it from the dirstate, to
1297 # the file, then we need to remove it from the dirstate, to
1291 # prevent the dirstate from listing the file when it is no
1298 # prevent the dirstate from listing the file when it is no
1292 # longer in the manifest.
1299 # longer in the manifest.
1293 if moddirstate and linear_path and f not in m2:
1300 if moddirstate and linear_path and f not in m2:
1294 self.dirstate.forget((f,))
1301 self.dirstate.forget((f,))
1295
1302
1296 # Compare manifests
1303 # Compare manifests
1297 for f, n in mw.iteritems():
1304 for f, n in mw.iteritems():
1298 if choose and not choose(f): continue
1305 if choose and not choose(f): continue
1299 if f in m2:
1306 if f in m2:
1300 s = 0
1307 s = 0
1301
1308
1302 # is the wfile new since m1, and match m2?
1309 # is the wfile new since m1, and match m2?
1303 if f not in m1:
1310 if f not in m1:
1304 t1 = self.wfile(f).read()
1311 t1 = self.wfile(f).read()
1305 t2 = self.file(f).revision(m2[f])
1312 t2 = self.file(f).revision(m2[f])
1306 if cmp(t1, t2) == 0:
1313 if cmp(t1, t2) == 0:
1307 mark[f] = 1
1314 mark[f] = 1
1308 n = m2[f]
1315 n = m2[f]
1309 del t1, t2
1316 del t1, t2
1310
1317
1311 # are files different?
1318 # are files different?
1312 if n != m2[f]:
1319 if n != m2[f]:
1313 a = ma.get(f, nullid)
1320 a = ma.get(f, nullid)
1314 # are both different from the ancestor?
1321 # are both different from the ancestor?
1315 if n != a and m2[f] != a:
1322 if n != a and m2[f] != a:
1316 self.ui.debug(" %s versions differ, resolve\n" % f)
1323 self.ui.debug(" %s versions differ, resolve\n" % f)
1317 # merge executable bits
1324 # merge executable bits
1318 # "if we changed or they changed, change in merge"
1325 # "if we changed or they changed, change in merge"
1319 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1326 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1320 mode = ((a^b) | (a^c)) ^ a
1327 mode = ((a^b) | (a^c)) ^ a
1321 merge[f] = (m1.get(f, nullid), m2[f], mode)
1328 merge[f] = (m1.get(f, nullid), m2[f], mode)
1322 s = 1
1329 s = 1
1323 # are we clobbering?
1330 # are we clobbering?
1324 # is remote's version newer?
1331 # is remote's version newer?
1325 # or are we going back in time?
1332 # or are we going back in time?
1326 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1333 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1327 self.ui.debug(" remote %s is newer, get\n" % f)
1334 self.ui.debug(" remote %s is newer, get\n" % f)
1328 get[f] = m2[f]
1335 get[f] = m2[f]
1329 s = 1
1336 s = 1
1330 else:
1337 else:
1331 mark[f] = 1
1338 mark[f] = 1
1332 elif f in umap:
1339 elif f in umap:
1333 # this unknown file is the same as the checkout
1340 # this unknown file is the same as the checkout
1334 get[f] = m2[f]
1341 get[f] = m2[f]
1335
1342
1336 if not s and mfw[f] != mf2[f]:
1343 if not s and mfw[f] != mf2[f]:
1337 if force:
1344 if force:
1338 self.ui.debug(" updating permissions for %s\n" % f)
1345 self.ui.debug(" updating permissions for %s\n" % f)
1339 util.set_exec(self.wjoin(f), mf2[f])
1346 util.set_exec(self.wjoin(f), mf2[f])
1340 else:
1347 else:
1341 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1348 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1342 mode = ((a^b) | (a^c)) ^ a
1349 mode = ((a^b) | (a^c)) ^ a
1343 if mode != b:
1350 if mode != b:
1344 self.ui.debug(" updating permissions for %s\n" % f)
1351 self.ui.debug(" updating permissions for %s\n" % f)
1345 util.set_exec(self.wjoin(f), mode)
1352 util.set_exec(self.wjoin(f), mode)
1346 mark[f] = 1
1353 mark[f] = 1
1347 del m2[f]
1354 del m2[f]
1348 elif f in ma:
1355 elif f in ma:
1349 if n != ma[f]:
1356 if n != ma[f]:
1350 r = "d"
1357 r = "d"
1351 if not force and (linear_path or allow):
1358 if not force and (linear_path or allow):
1352 r = self.ui.prompt(
1359 r = self.ui.prompt(
1353 (" local changed %s which remote deleted\n" % f) +
1360 (" local changed %s which remote deleted\n" % f) +
1354 "(k)eep or (d)elete?", "[kd]", "k")
1361 "(k)eep or (d)elete?", "[kd]", "k")
1355 if r == "d":
1362 if r == "d":
1356 remove.append(f)
1363 remove.append(f)
1357 else:
1364 else:
1358 self.ui.debug("other deleted %s\n" % f)
1365 self.ui.debug("other deleted %s\n" % f)
1359 remove.append(f) # other deleted it
1366 remove.append(f) # other deleted it
1360 else:
1367 else:
1361 if n == m1.get(f, nullid): # same as parent
1368 if n == m1.get(f, nullid): # same as parent
1362 if p2 == pa: # going backwards?
1369 if p2 == pa: # going backwards?
1363 self.ui.debug("remote deleted %s\n" % f)
1370 self.ui.debug("remote deleted %s\n" % f)
1364 remove.append(f)
1371 remove.append(f)
1365 else:
1372 else:
1366 self.ui.debug("local created %s, keeping\n" % f)
1373 self.ui.debug("local created %s, keeping\n" % f)
1367 else:
1374 else:
1368 self.ui.debug("working dir created %s, keeping\n" % f)
1375 self.ui.debug("working dir created %s, keeping\n" % f)
1369
1376
1370 for f, n in m2.iteritems():
1377 for f, n in m2.iteritems():
1371 if choose and not choose(f): continue
1378 if choose and not choose(f): continue
1372 if f[0] == "/": continue
1379 if f[0] == "/": continue
1373 if f in ma and n != ma[f]:
1380 if f in ma and n != ma[f]:
1374 r = "k"
1381 r = "k"
1375 if not force and (linear_path or allow):
1382 if not force and (linear_path or allow):
1376 r = self.ui.prompt(
1383 r = self.ui.prompt(
1377 ("remote changed %s which local deleted\n" % f) +
1384 ("remote changed %s which local deleted\n" % f) +
1378 "(k)eep or (d)elete?", "[kd]", "k")
1385 "(k)eep or (d)elete?", "[kd]", "k")
1379 if r == "k": get[f] = n
1386 if r == "k": get[f] = n
1380 elif f not in ma:
1387 elif f not in ma:
1381 self.ui.debug("remote created %s\n" % f)
1388 self.ui.debug("remote created %s\n" % f)
1382 get[f] = n
1389 get[f] = n
1383 else:
1390 else:
1384 self.ui.debug("local deleted %s\n" % f)
1391 self.ui.debug("local deleted %s\n" % f)
1385 if force:
1392 if force:
1386 get[f] = n
1393 get[f] = n
1387
1394
1388 del mw, m1, m2, ma
1395 del mw, m1, m2, ma
1389
1396
1390 if force:
1397 if force:
1391 for f in merge:
1398 for f in merge:
1392 get[f] = merge[f][1]
1399 get[f] = merge[f][1]
1393 merge = {}
1400 merge = {}
1394
1401
1395 if linear_path:
1402 if linear_path:
1396 # we don't need to do any magic, just jump to the new rev
1403 # we don't need to do any magic, just jump to the new rev
1397 mode = 'n'
1404 mode = 'n'
1398 p1, p2 = p2, nullid
1405 p1, p2 = p2, nullid
1399 else:
1406 else:
1400 if not allow:
1407 if not allow:
1401 self.ui.status("this update spans a branch" +
1408 self.ui.status("this update spans a branch" +
1402 " affecting the following files:\n")
1409 " affecting the following files:\n")
1403 fl = merge.keys() + get.keys()
1410 fl = merge.keys() + get.keys()
1404 fl.sort()
1411 fl.sort()
1405 for f in fl:
1412 for f in fl:
1406 cf = ""
1413 cf = ""
1407 if f in merge: cf = " (resolve)"
1414 if f in merge: cf = " (resolve)"
1408 self.ui.status(" %s%s\n" % (f, cf))
1415 self.ui.status(" %s%s\n" % (f, cf))
1409 self.ui.warn("aborting update spanning branches!\n")
1416 self.ui.warn("aborting update spanning branches!\n")
1410 self.ui.status("(use update -m to perform a branch merge)\n")
1417 self.ui.status("(use update -m to perform a branch merge)\n")
1411 return 1
1418 return 1
1412 # we have to remember what files we needed to get/change
1419 # we have to remember what files we needed to get/change
1413 # because any file that's different from either one of its
1420 # because any file that's different from either one of its
1414 # parents must be in the changeset
1421 # parents must be in the changeset
1415 mode = 'm'
1422 mode = 'm'
1416 if moddirstate:
1423 if moddirstate:
1417 self.dirstate.update(mark.keys(), "m")
1424 self.dirstate.update(mark.keys(), "m")
1418
1425
1419 if moddirstate:
1426 if moddirstate:
1420 self.dirstate.setparents(p1, p2)
1427 self.dirstate.setparents(p1, p2)
1421
1428
1422 # get the files we don't need to change
1429 # get the files we don't need to change
1423 files = get.keys()
1430 files = get.keys()
1424 files.sort()
1431 files.sort()
1425 for f in files:
1432 for f in files:
1426 if f[0] == "/": continue
1433 if f[0] == "/": continue
1427 self.ui.note("getting %s\n" % f)
1434 self.ui.note("getting %s\n" % f)
1428 t = self.file(f).read(get[f])
1435 t = self.file(f).read(get[f])
1429 try:
1436 try:
1430 self.wfile(f, "w").write(t)
1437 self.wfile(f, "w").write(t)
1431 except IOError:
1438 except IOError:
1432 os.makedirs(os.path.dirname(self.wjoin(f)))
1439 os.makedirs(os.path.dirname(self.wjoin(f)))
1433 self.wfile(f, "w").write(t)
1440 self.wfile(f, "w").write(t)
1434 util.set_exec(self.wjoin(f), mf2[f])
1441 util.set_exec(self.wjoin(f), mf2[f])
1435 if moddirstate:
1442 if moddirstate:
1436 self.dirstate.update([f], mode)
1443 self.dirstate.update([f], mode)
1437
1444
1438 # merge the tricky bits
1445 # merge the tricky bits
1439 files = merge.keys()
1446 files = merge.keys()
1440 files.sort()
1447 files.sort()
1441 for f in files:
1448 for f in files:
1442 self.ui.status("merging %s\n" % f)
1449 self.ui.status("merging %s\n" % f)
1443 m, o, flag = merge[f]
1450 m, o, flag = merge[f]
1444 self.merge3(f, m, o)
1451 self.merge3(f, m, o)
1445 util.set_exec(self.wjoin(f), flag)
1452 util.set_exec(self.wjoin(f), flag)
1446 if moddirstate:
1453 if moddirstate:
1447 self.dirstate.update([f], 'm')
1454 self.dirstate.update([f], 'm')
1448
1455
1449 for f in remove:
1456 for f in remove:
1450 self.ui.note("removing %s\n" % f)
1457 self.ui.note("removing %s\n" % f)
1451 os.unlink(f)
1458 os.unlink(f)
1452 # try removing directories that might now be empty
1459 # try removing directories that might now be empty
1453 try: os.removedirs(os.path.dirname(f))
1460 try: os.removedirs(os.path.dirname(f))
1454 except: pass
1461 except: pass
1455 if moddirstate:
1462 if moddirstate:
1456 if mode == 'n':
1463 if mode == 'n':
1457 self.dirstate.forget(remove)
1464 self.dirstate.forget(remove)
1458 else:
1465 else:
1459 self.dirstate.update(remove, 'r')
1466 self.dirstate.update(remove, 'r')
1460
1467
1461 def merge3(self, fn, my, other):
1468 def merge3(self, fn, my, other):
1462 """perform a 3-way merge in the working directory"""
1469 """perform a 3-way merge in the working directory"""
1463
1470
1464 def temp(prefix, node):
1471 def temp(prefix, node):
1465 pre = "%s~%s." % (os.path.basename(fn), prefix)
1472 pre = "%s~%s." % (os.path.basename(fn), prefix)
1466 (fd, name) = tempfile.mkstemp("", pre)
1473 (fd, name) = tempfile.mkstemp("", pre)
1467 f = os.fdopen(fd, "wb")
1474 f = os.fdopen(fd, "wb")
1468 f.write(fl.revision(node))
1475 f.write(fl.revision(node))
1469 f.close()
1476 f.close()
1470 return name
1477 return name
1471
1478
1472 fl = self.file(fn)
1479 fl = self.file(fn)
1473 base = fl.ancestor(my, other)
1480 base = fl.ancestor(my, other)
1474 a = self.wjoin(fn)
1481 a = self.wjoin(fn)
1475 b = temp("base", base)
1482 b = temp("base", base)
1476 c = temp("other", other)
1483 c = temp("other", other)
1477
1484
1478 self.ui.note("resolving %s\n" % fn)
1485 self.ui.note("resolving %s\n" % fn)
1479 self.ui.debug("file %s: other %s ancestor %s\n" %
1486 self.ui.debug("file %s: other %s ancestor %s\n" %
1480 (fn, short(other), short(base)))
1487 (fn, short(other), short(base)))
1481
1488
1482 cmd = self.ui.config("ui", "merge") or \
1489 cmd = self.ui.config("ui", "merge") or \
1483 os.environ.get("HGMERGE", "hgmerge")
1490 os.environ.get("HGMERGE", "hgmerge")
1484 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1491 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1485 if r:
1492 if r:
1486 self.ui.warn("merging %s failed!\n" % fn)
1493 self.ui.warn("merging %s failed!\n" % fn)
1487
1494
1488 os.unlink(b)
1495 os.unlink(b)
1489 os.unlink(c)
1496 os.unlink(c)
1490
1497
1491 def verify(self):
1498 def verify(self):
1492 filelinkrevs = {}
1499 filelinkrevs = {}
1493 filenodes = {}
1500 filenodes = {}
1494 changesets = revisions = files = 0
1501 changesets = revisions = files = 0
1495 errors = 0
1502 errors = 0
1496
1503
1497 seen = {}
1504 seen = {}
1498 self.ui.status("checking changesets\n")
1505 self.ui.status("checking changesets\n")
1499 for i in range(self.changelog.count()):
1506 for i in range(self.changelog.count()):
1500 changesets += 1
1507 changesets += 1
1501 n = self.changelog.node(i)
1508 n = self.changelog.node(i)
1502 if n in seen:
1509 if n in seen:
1503 self.ui.warn("duplicate changeset at revision %d\n" % i)
1510 self.ui.warn("duplicate changeset at revision %d\n" % i)
1504 errors += 1
1511 errors += 1
1505 seen[n] = 1
1512 seen[n] = 1
1506
1513
1507 for p in self.changelog.parents(n):
1514 for p in self.changelog.parents(n):
1508 if p not in self.changelog.nodemap:
1515 if p not in self.changelog.nodemap:
1509 self.ui.warn("changeset %s has unknown parent %s\n" %
1516 self.ui.warn("changeset %s has unknown parent %s\n" %
1510 (short(n), short(p)))
1517 (short(n), short(p)))
1511 errors += 1
1518 errors += 1
1512 try:
1519 try:
1513 changes = self.changelog.read(n)
1520 changes = self.changelog.read(n)
1514 except Exception, inst:
1521 except Exception, inst:
1515 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1522 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1516 errors += 1
1523 errors += 1
1517
1524
1518 for f in changes[3]:
1525 for f in changes[3]:
1519 filelinkrevs.setdefault(f, []).append(i)
1526 filelinkrevs.setdefault(f, []).append(i)
1520
1527
1521 seen = {}
1528 seen = {}
1522 self.ui.status("checking manifests\n")
1529 self.ui.status("checking manifests\n")
1523 for i in range(self.manifest.count()):
1530 for i in range(self.manifest.count()):
1524 n = self.manifest.node(i)
1531 n = self.manifest.node(i)
1525 if n in seen:
1532 if n in seen:
1526 self.ui.warn("duplicate manifest at revision %d\n" % i)
1533 self.ui.warn("duplicate manifest at revision %d\n" % i)
1527 errors += 1
1534 errors += 1
1528 seen[n] = 1
1535 seen[n] = 1
1529
1536
1530 for p in self.manifest.parents(n):
1537 for p in self.manifest.parents(n):
1531 if p not in self.manifest.nodemap:
1538 if p not in self.manifest.nodemap:
1532 self.ui.warn("manifest %s has unknown parent %s\n" %
1539 self.ui.warn("manifest %s has unknown parent %s\n" %
1533 (short(n), short(p)))
1540 (short(n), short(p)))
1534 errors += 1
1541 errors += 1
1535
1542
1536 try:
1543 try:
1537 delta = mdiff.patchtext(self.manifest.delta(n))
1544 delta = mdiff.patchtext(self.manifest.delta(n))
1538 except KeyboardInterrupt:
1545 except KeyboardInterrupt:
1539 self.ui.warn("aborted")
1546 self.ui.warn("aborted")
1540 sys.exit(0)
1547 sys.exit(0)
1541 except Exception, inst:
1548 except Exception, inst:
1542 self.ui.warn("unpacking manifest %s: %s\n"
1549 self.ui.warn("unpacking manifest %s: %s\n"
1543 % (short(n), inst))
1550 % (short(n), inst))
1544 errors += 1
1551 errors += 1
1545
1552
1546 ff = [ l.split('\0') for l in delta.splitlines() ]
1553 ff = [ l.split('\0') for l in delta.splitlines() ]
1547 for f, fn in ff:
1554 for f, fn in ff:
1548 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1555 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1549
1556
1550 self.ui.status("crosschecking files in changesets and manifests\n")
1557 self.ui.status("crosschecking files in changesets and manifests\n")
1551 for f in filenodes:
1558 for f in filenodes:
1552 if f not in filelinkrevs:
1559 if f not in filelinkrevs:
1553 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1560 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1554 errors += 1
1561 errors += 1
1555
1562
1556 for f in filelinkrevs:
1563 for f in filelinkrevs:
1557 if f not in filenodes:
1564 if f not in filenodes:
1558 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1565 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1559 errors += 1
1566 errors += 1
1560
1567
1561 self.ui.status("checking files\n")
1568 self.ui.status("checking files\n")
1562 ff = filenodes.keys()
1569 ff = filenodes.keys()
1563 ff.sort()
1570 ff.sort()
1564 for f in ff:
1571 for f in ff:
1565 if f == "/dev/null": continue
1572 if f == "/dev/null": continue
1566 files += 1
1573 files += 1
1567 fl = self.file(f)
1574 fl = self.file(f)
1568 nodes = { nullid: 1 }
1575 nodes = { nullid: 1 }
1569 seen = {}
1576 seen = {}
1570 for i in range(fl.count()):
1577 for i in range(fl.count()):
1571 revisions += 1
1578 revisions += 1
1572 n = fl.node(i)
1579 n = fl.node(i)
1573
1580
1574 if n in seen:
1581 if n in seen:
1575 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1582 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1576 errors += 1
1583 errors += 1
1577
1584
1578 if n not in filenodes[f]:
1585 if n not in filenodes[f]:
1579 self.ui.warn("%s: %d:%s not in manifests\n"
1586 self.ui.warn("%s: %d:%s not in manifests\n"
1580 % (f, i, short(n)))
1587 % (f, i, short(n)))
1581 errors += 1
1588 errors += 1
1582 else:
1589 else:
1583 del filenodes[f][n]
1590 del filenodes[f][n]
1584
1591
1585 flr = fl.linkrev(n)
1592 flr = fl.linkrev(n)
1586 if flr not in filelinkrevs[f]:
1593 if flr not in filelinkrevs[f]:
1587 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1594 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1588 % (f, short(n), fl.linkrev(n)))
1595 % (f, short(n), fl.linkrev(n)))
1589 errors += 1
1596 errors += 1
1590 else:
1597 else:
1591 filelinkrevs[f].remove(flr)
1598 filelinkrevs[f].remove(flr)
1592
1599
1593 # verify contents
1600 # verify contents
1594 try:
1601 try:
1595 t = fl.read(n)
1602 t = fl.read(n)
1596 except Exception, inst:
1603 except Exception, inst:
1597 self.ui.warn("unpacking file %s %s: %s\n"
1604 self.ui.warn("unpacking file %s %s: %s\n"
1598 % (f, short(n), inst))
1605 % (f, short(n), inst))
1599 errors += 1
1606 errors += 1
1600
1607
1601 # verify parents
1608 # verify parents
1602 (p1, p2) = fl.parents(n)
1609 (p1, p2) = fl.parents(n)
1603 if p1 not in nodes:
1610 if p1 not in nodes:
1604 self.ui.warn("file %s:%s unknown parent 1 %s" %
1611 self.ui.warn("file %s:%s unknown parent 1 %s" %
1605 (f, short(n), short(p1)))
1612 (f, short(n), short(p1)))
1606 errors += 1
1613 errors += 1
1607 if p2 not in nodes:
1614 if p2 not in nodes:
1608 self.ui.warn("file %s:%s unknown parent 2 %s" %
1615 self.ui.warn("file %s:%s unknown parent 2 %s" %
1609 (f, short(n), short(p1)))
1616 (f, short(n), short(p1)))
1610 errors += 1
1617 errors += 1
1611 nodes[n] = 1
1618 nodes[n] = 1
1612
1619
1613 # cross-check
1620 # cross-check
1614 for node in filenodes[f]:
1621 for node in filenodes[f]:
1615 self.ui.warn("node %s in manifests not in %s\n"
1622 self.ui.warn("node %s in manifests not in %s\n"
1616 % (hex(n), f))
1623 % (hex(n), f))
1617 errors += 1
1624 errors += 1
1618
1625
1619 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1626 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1620 (files, changesets, revisions))
1627 (files, changesets, revisions))
1621
1628
1622 if errors:
1629 if errors:
1623 self.ui.warn("%d integrity errors encountered!\n" % errors)
1630 self.ui.warn("%d integrity errors encountered!\n" % errors)
1624 return 1
1631 return 1
1625
1632
1626 class httprepository:
1633 class httprepository:
1627 def __init__(self, ui, path):
1634 def __init__(self, ui, path):
1628 self.url = path
1635 self.url = path
1629 self.ui = ui
1636 self.ui = ui
1630 no_list = [ "localhost", "127.0.0.1" ]
1637 no_list = [ "localhost", "127.0.0.1" ]
1631 host = ui.config("http_proxy", "host")
1638 host = ui.config("http_proxy", "host")
1632 if host is None:
1639 if host is None:
1633 host = os.environ.get("http_proxy")
1640 host = os.environ.get("http_proxy")
1634 if host and host.startswith('http://'):
1641 if host and host.startswith('http://'):
1635 host = host[7:]
1642 host = host[7:]
1636 user = ui.config("http_proxy", "user")
1643 user = ui.config("http_proxy", "user")
1637 passwd = ui.config("http_proxy", "passwd")
1644 passwd = ui.config("http_proxy", "passwd")
1638 no = ui.config("http_proxy", "no")
1645 no = ui.config("http_proxy", "no")
1639 if no is None:
1646 if no is None:
1640 no = os.environ.get("no_proxy")
1647 no = os.environ.get("no_proxy")
1641 if no:
1648 if no:
1642 no_list = no_list + no.split(",")
1649 no_list = no_list + no.split(",")
1643
1650
1644 no_proxy = 0
1651 no_proxy = 0
1645 for h in no_list:
1652 for h in no_list:
1646 if (path.startswith("http://" + h + "/") or
1653 if (path.startswith("http://" + h + "/") or
1647 path.startswith("http://" + h + ":") or
1654 path.startswith("http://" + h + ":") or
1648 path == "http://" + h):
1655 path == "http://" + h):
1649 no_proxy = 1
1656 no_proxy = 1
1650
1657
1651 # Note: urllib2 takes proxy values from the environment and those will
1658 # Note: urllib2 takes proxy values from the environment and those will
1652 # take precedence
1659 # take precedence
1653 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1660 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1654 if os.environ.has_key(env):
1661 if os.environ.has_key(env):
1655 del os.environ[env]
1662 del os.environ[env]
1656
1663
1657 proxy_handler = urllib2.BaseHandler()
1664 proxy_handler = urllib2.BaseHandler()
1658 if host and not no_proxy:
1665 if host and not no_proxy:
1659 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1666 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1660
1667
1661 authinfo = None
1668 authinfo = None
1662 if user and passwd:
1669 if user and passwd:
1663 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1670 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1664 passmgr.add_password(None, host, user, passwd)
1671 passmgr.add_password(None, host, user, passwd)
1665 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1672 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1666
1673
1667 opener = urllib2.build_opener(proxy_handler, authinfo)
1674 opener = urllib2.build_opener(proxy_handler, authinfo)
1668 urllib2.install_opener(opener)
1675 urllib2.install_opener(opener)
1669
1676
1670 def dev(self):
1677 def dev(self):
1671 return -1
1678 return -1
1672
1679
1673 def do_cmd(self, cmd, **args):
1680 def do_cmd(self, cmd, **args):
1674 self.ui.debug("sending %s command\n" % cmd)
1681 self.ui.debug("sending %s command\n" % cmd)
1675 q = {"cmd": cmd}
1682 q = {"cmd": cmd}
1676 q.update(args)
1683 q.update(args)
1677 qs = urllib.urlencode(q)
1684 qs = urllib.urlencode(q)
1678 cu = "%s?%s" % (self.url, qs)
1685 cu = "%s?%s" % (self.url, qs)
1679 return urllib2.urlopen(cu)
1686 return urllib2.urlopen(cu)
1680
1687
1681 def heads(self):
1688 def heads(self):
1682 d = self.do_cmd("heads").read()
1689 d = self.do_cmd("heads").read()
1683 try:
1690 try:
1684 return map(bin, d[:-1].split(" "))
1691 return map(bin, d[:-1].split(" "))
1685 except:
1692 except:
1686 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1693 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1687 raise
1694 raise
1688
1695
1689 def branches(self, nodes):
1696 def branches(self, nodes):
1690 n = " ".join(map(hex, nodes))
1697 n = " ".join(map(hex, nodes))
1691 d = self.do_cmd("branches", nodes=n).read()
1698 d = self.do_cmd("branches", nodes=n).read()
1692 try:
1699 try:
1693 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1700 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1694 return br
1701 return br
1695 except:
1702 except:
1696 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1703 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1697 raise
1704 raise
1698
1705
1699 def between(self, pairs):
1706 def between(self, pairs):
1700 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1707 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1701 d = self.do_cmd("between", pairs=n).read()
1708 d = self.do_cmd("between", pairs=n).read()
1702 try:
1709 try:
1703 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1710 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1704 return p
1711 return p
1705 except:
1712 except:
1706 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1713 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1707 raise
1714 raise
1708
1715
1709 def changegroup(self, nodes):
1716 def changegroup(self, nodes):
1710 n = " ".join(map(hex, nodes))
1717 n = " ".join(map(hex, nodes))
1711 f = self.do_cmd("changegroup", roots=n)
1718 f = self.do_cmd("changegroup", roots=n)
1712 bytes = 0
1719 bytes = 0
1713
1720
1714 class zread:
1721 class zread:
1715 def __init__(self, f):
1722 def __init__(self, f):
1716 self.zd = zlib.decompressobj()
1723 self.zd = zlib.decompressobj()
1717 self.f = f
1724 self.f = f
1718 self.buf = ""
1725 self.buf = ""
1719 def read(self, l):
1726 def read(self, l):
1720 while l > len(self.buf):
1727 while l > len(self.buf):
1721 r = f.read(4096)
1728 r = f.read(4096)
1722 if r:
1729 if r:
1723 self.buf += self.zd.decompress(r)
1730 self.buf += self.zd.decompress(r)
1724 else:
1731 else:
1725 self.buf += self.zd.flush()
1732 self.buf += self.zd.flush()
1726 break
1733 break
1727 d, self.buf = self.buf[:l], self.buf[l:]
1734 d, self.buf = self.buf[:l], self.buf[l:]
1728 return d
1735 return d
1729
1736
1730 return zread(f)
1737 return zread(f)
1731
1738
1732 class remotelock:
1739 class remotelock:
1733 def __init__(self, repo):
1740 def __init__(self, repo):
1734 self.repo = repo
1741 self.repo = repo
1735 def release(self):
1742 def release(self):
1736 self.repo.unlock()
1743 self.repo.unlock()
1737 self.repo = None
1744 self.repo = None
1738 def __del__(self):
1745 def __del__(self):
1739 if self.repo:
1746 if self.repo:
1740 self.release()
1747 self.release()
1741
1748
1742 class sshrepository:
1749 class sshrepository:
1743 def __init__(self, ui, path):
1750 def __init__(self, ui, path):
1744 self.url = path
1751 self.url = path
1745 self.ui = ui
1752 self.ui = ui
1746
1753
1747 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1754 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1748 if not m:
1755 if not m:
1749 raise RepoError("couldn't parse destination %s\n" % path)
1756 raise RepoError("couldn't parse destination %s\n" % path)
1750
1757
1751 self.user = m.group(2)
1758 self.user = m.group(2)
1752 self.host = m.group(3)
1759 self.host = m.group(3)
1753 self.port = m.group(5)
1760 self.port = m.group(5)
1754 self.path = m.group(7)
1761 self.path = m.group(7)
1755
1762
1756 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1763 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1757 args = self.port and ("%s -p %s") % (args, self.port) or args
1764 args = self.port and ("%s -p %s") % (args, self.port) or args
1758 path = self.path or ""
1765 path = self.path or ""
1759
1766
1760 cmd = "ssh %s 'hg -R %s serve --stdio'"
1767 cmd = "ssh %s 'hg -R %s serve --stdio'"
1761 cmd = cmd % (args, path)
1768 cmd = cmd % (args, path)
1762
1769
1763 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1770 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1764
1771
1765 def readerr(self):
1772 def readerr(self):
1766 while 1:
1773 while 1:
1767 r,w,x = select.select([self.pipee], [], [], 0)
1774 r,w,x = select.select([self.pipee], [], [], 0)
1768 if not r: break
1775 if not r: break
1769 l = self.pipee.readline()
1776 l = self.pipee.readline()
1770 if not l: break
1777 if not l: break
1771 self.ui.status("remote: ", l)
1778 self.ui.status("remote: ", l)
1772
1779
1773 def __del__(self):
1780 def __del__(self):
1774 self.pipeo.close()
1781 self.pipeo.close()
1775 self.pipei.close()
1782 self.pipei.close()
1776 for l in self.pipee:
1783 for l in self.pipee:
1777 self.ui.status("remote: ", l)
1784 self.ui.status("remote: ", l)
1778 self.pipee.close()
1785 self.pipee.close()
1779
1786
1780 def dev(self):
1787 def dev(self):
1781 return -1
1788 return -1
1782
1789
1783 def do_cmd(self, cmd, **args):
1790 def do_cmd(self, cmd, **args):
1784 self.ui.debug("sending %s command\n" % cmd)
1791 self.ui.debug("sending %s command\n" % cmd)
1785 self.pipeo.write("%s\n" % cmd)
1792 self.pipeo.write("%s\n" % cmd)
1786 for k, v in args.items():
1793 for k, v in args.items():
1787 self.pipeo.write("%s %d\n" % (k, len(v)))
1794 self.pipeo.write("%s %d\n" % (k, len(v)))
1788 self.pipeo.write(v)
1795 self.pipeo.write(v)
1789 self.pipeo.flush()
1796 self.pipeo.flush()
1790
1797
1791 return self.pipei
1798 return self.pipei
1792
1799
1793 def call(self, cmd, **args):
1800 def call(self, cmd, **args):
1794 r = self.do_cmd(cmd, **args)
1801 r = self.do_cmd(cmd, **args)
1795 l = r.readline()
1802 l = r.readline()
1796 self.readerr()
1803 self.readerr()
1797 try:
1804 try:
1798 l = int(l)
1805 l = int(l)
1799 except:
1806 except:
1800 raise RepoError("unexpected response '%s'" % l)
1807 raise RepoError("unexpected response '%s'" % l)
1801 return r.read(l)
1808 return r.read(l)
1802
1809
1803 def lock(self):
1810 def lock(self):
1804 self.call("lock")
1811 self.call("lock")
1805 return remotelock(self)
1812 return remotelock(self)
1806
1813
1807 def unlock(self):
1814 def unlock(self):
1808 self.call("unlock")
1815 self.call("unlock")
1809
1816
1810 def heads(self):
1817 def heads(self):
1811 d = self.call("heads")
1818 d = self.call("heads")
1812 try:
1819 try:
1813 return map(bin, d[:-1].split(" "))
1820 return map(bin, d[:-1].split(" "))
1814 except:
1821 except:
1815 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1822 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1816
1823
1817 def branches(self, nodes):
1824 def branches(self, nodes):
1818 n = " ".join(map(hex, nodes))
1825 n = " ".join(map(hex, nodes))
1819 d = self.call("branches", nodes=n)
1826 d = self.call("branches", nodes=n)
1820 try:
1827 try:
1821 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1822 return br
1829 return br
1823 except:
1830 except:
1824 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1831 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1825
1832
1826 def between(self, pairs):
1833 def between(self, pairs):
1827 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1834 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1828 d = self.call("between", pairs=n)
1835 d = self.call("between", pairs=n)
1829 try:
1836 try:
1830 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1837 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1831 return p
1838 return p
1832 except:
1839 except:
1833 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1840 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1834
1841
1835 def changegroup(self, nodes):
1842 def changegroup(self, nodes):
1836 n = " ".join(map(hex, nodes))
1843 n = " ".join(map(hex, nodes))
1837 f = self.do_cmd("changegroup", roots=n)
1844 f = self.do_cmd("changegroup", roots=n)
1838 return self.pipei
1845 return self.pipei
1839
1846
1840 def addchangegroup(self, cg):
1847 def addchangegroup(self, cg):
1841 d = self.call("addchangegroup")
1848 d = self.call("addchangegroup")
1842 if d:
1849 if d:
1843 raise RepoError("push refused: %s", d)
1850 raise RepoError("push refused: %s", d)
1844
1851
1845 while 1:
1852 while 1:
1846 d = cg.read(4096)
1853 d = cg.read(4096)
1847 if not d: break
1854 if not d: break
1848 self.pipeo.write(d)
1855 self.pipeo.write(d)
1849 self.readerr()
1856 self.readerr()
1850
1857
1851 self.pipeo.flush()
1858 self.pipeo.flush()
1852
1859
1853 self.readerr()
1860 self.readerr()
1854 l = int(self.pipei.readline())
1861 l = int(self.pipei.readline())
1855 return self.pipei.read(l) != ""
1862 return self.pipei.read(l) != ""
1856
1863
1857 def repository(ui, path=None, create=0):
1864 def repository(ui, path=None, create=0):
1858 if path:
1865 if path:
1859 if path.startswith("http://"):
1866 if path.startswith("http://"):
1860 return httprepository(ui, path)
1867 return httprepository(ui, path)
1861 if path.startswith("hg://"):
1868 if path.startswith("hg://"):
1862 return httprepository(ui, path.replace("hg://", "http://"))
1869 return httprepository(ui, path.replace("hg://", "http://"))
1863 if path.startswith("old-http://"):
1870 if path.startswith("old-http://"):
1864 return localrepository(ui, path.replace("old-http://", "http://"))
1871 return localrepository(ui, path.replace("old-http://", "http://"))
1865 if path.startswith("ssh://"):
1872 if path.startswith("ssh://"):
1866 return sshrepository(ui, path)
1873 return sshrepository(ui, path)
1867
1874
1868 return localrepository(ui, path, create)
1875 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now