##// END OF EJS Templates
Show number of new heads when doing a pull
mpm@selenic.com -
r1040:35e883d1 default
parent child Browse files
Show More
@@ -1,2287 +1,2294 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 if " " not in date:
275 if " " not in date:
276 date += " 0" # some tools used -d without a timezone
276 date += " 0" # some tools used -d without a timezone
277 files = l[3:]
277 files = l[3:]
278 return (manifest, user, date, files, desc)
278 return (manifest, user, date, files, desc)
279
279
280 def read(self, node):
280 def read(self, node):
281 return self.extract(self.revision(node))
281 return self.extract(self.revision(node))
282
282
283 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
283 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
284 user=None, date=None):
284 user=None, date=None):
285 if not date:
285 if not date:
286 if time.daylight: offset = time.altzone
286 if time.daylight: offset = time.altzone
287 else: offset = time.timezone
287 else: offset = time.timezone
288 date = "%d %d" % (time.time(), offset)
288 date = "%d %d" % (time.time(), offset)
289 list.sort()
289 list.sort()
290 l = [hex(manifest), user, date] + list + ["", desc]
290 l = [hex(manifest), user, date] + list + ["", desc]
291 text = "\n".join(l)
291 text = "\n".join(l)
292 return self.addrevision(text, transaction, self.count(), p1, p2)
292 return self.addrevision(text, transaction, self.count(), p1, p2)
293
293
294 class dirstate:
294 class dirstate:
295 def __init__(self, opener, ui, root):
295 def __init__(self, opener, ui, root):
296 self.opener = opener
296 self.opener = opener
297 self.root = root
297 self.root = root
298 self.dirty = 0
298 self.dirty = 0
299 self.ui = ui
299 self.ui = ui
300 self.map = None
300 self.map = None
301 self.pl = None
301 self.pl = None
302 self.copies = {}
302 self.copies = {}
303 self.ignorefunc = None
303 self.ignorefunc = None
304
304
305 def wjoin(self, f):
305 def wjoin(self, f):
306 return os.path.join(self.root, f)
306 return os.path.join(self.root, f)
307
307
308 def getcwd(self):
308 def getcwd(self):
309 cwd = os.getcwd()
309 cwd = os.getcwd()
310 if cwd == self.root: return ''
310 if cwd == self.root: return ''
311 return cwd[len(self.root) + 1:]
311 return cwd[len(self.root) + 1:]
312
312
313 def ignore(self, f):
313 def ignore(self, f):
314 if not self.ignorefunc:
314 if not self.ignorefunc:
315 bigpat = []
315 bigpat = []
316 try:
316 try:
317 l = file(self.wjoin(".hgignore"))
317 l = file(self.wjoin(".hgignore"))
318 for pat in l:
318 for pat in l:
319 p = pat.rstrip()
319 p = pat.rstrip()
320 if p:
320 if p:
321 try:
321 try:
322 re.compile(p)
322 re.compile(p)
323 except:
323 except:
324 self.ui.warn("ignoring invalid ignore"
324 self.ui.warn("ignoring invalid ignore"
325 + " regular expression '%s'\n" % p)
325 + " regular expression '%s'\n" % p)
326 else:
326 else:
327 bigpat.append(p)
327 bigpat.append(p)
328 except IOError: pass
328 except IOError: pass
329
329
330 if bigpat:
330 if bigpat:
331 s = "(?:%s)" % (")|(?:".join(bigpat))
331 s = "(?:%s)" % (")|(?:".join(bigpat))
332 r = re.compile(s)
332 r = re.compile(s)
333 self.ignorefunc = r.search
333 self.ignorefunc = r.search
334 else:
334 else:
335 self.ignorefunc = util.never
335 self.ignorefunc = util.never
336
336
337 return self.ignorefunc(f)
337 return self.ignorefunc(f)
338
338
339 def __del__(self):
339 def __del__(self):
340 if self.dirty:
340 if self.dirty:
341 self.write()
341 self.write()
342
342
343 def __getitem__(self, key):
343 def __getitem__(self, key):
344 try:
344 try:
345 return self.map[key]
345 return self.map[key]
346 except TypeError:
346 except TypeError:
347 self.read()
347 self.read()
348 return self[key]
348 return self[key]
349
349
350 def __contains__(self, key):
350 def __contains__(self, key):
351 if not self.map: self.read()
351 if not self.map: self.read()
352 return key in self.map
352 return key in self.map
353
353
354 def parents(self):
354 def parents(self):
355 if not self.pl:
355 if not self.pl:
356 self.read()
356 self.read()
357 return self.pl
357 return self.pl
358
358
359 def markdirty(self):
359 def markdirty(self):
360 if not self.dirty:
360 if not self.dirty:
361 self.dirty = 1
361 self.dirty = 1
362
362
363 def setparents(self, p1, p2 = nullid):
363 def setparents(self, p1, p2 = nullid):
364 self.markdirty()
364 self.markdirty()
365 self.pl = p1, p2
365 self.pl = p1, p2
366
366
367 def state(self, key):
367 def state(self, key):
368 try:
368 try:
369 return self[key][0]
369 return self[key][0]
370 except KeyError:
370 except KeyError:
371 return "?"
371 return "?"
372
372
373 def read(self):
373 def read(self):
374 if self.map is not None: return self.map
374 if self.map is not None: return self.map
375
375
376 self.map = {}
376 self.map = {}
377 self.pl = [nullid, nullid]
377 self.pl = [nullid, nullid]
378 try:
378 try:
379 st = self.opener("dirstate").read()
379 st = self.opener("dirstate").read()
380 if not st: return
380 if not st: return
381 except: return
381 except: return
382
382
383 self.pl = [st[:20], st[20: 40]]
383 self.pl = [st[:20], st[20: 40]]
384
384
385 pos = 40
385 pos = 40
386 while pos < len(st):
386 while pos < len(st):
387 e = struct.unpack(">cllll", st[pos:pos+17])
387 e = struct.unpack(">cllll", st[pos:pos+17])
388 l = e[4]
388 l = e[4]
389 pos += 17
389 pos += 17
390 f = st[pos:pos + l]
390 f = st[pos:pos + l]
391 if '\0' in f:
391 if '\0' in f:
392 f, c = f.split('\0')
392 f, c = f.split('\0')
393 self.copies[f] = c
393 self.copies[f] = c
394 self.map[f] = e[:4]
394 self.map[f] = e[:4]
395 pos += l
395 pos += l
396
396
397 def copy(self, source, dest):
397 def copy(self, source, dest):
398 self.read()
398 self.read()
399 self.markdirty()
399 self.markdirty()
400 self.copies[dest] = source
400 self.copies[dest] = source
401
401
402 def copied(self, file):
402 def copied(self, file):
403 return self.copies.get(file, None)
403 return self.copies.get(file, None)
404
404
405 def update(self, files, state, **kw):
405 def update(self, files, state, **kw):
406 ''' current states:
406 ''' current states:
407 n normal
407 n normal
408 m needs merging
408 m needs merging
409 r marked for removal
409 r marked for removal
410 a marked for addition'''
410 a marked for addition'''
411
411
412 if not files: return
412 if not files: return
413 self.read()
413 self.read()
414 self.markdirty()
414 self.markdirty()
415 for f in files:
415 for f in files:
416 if state == "r":
416 if state == "r":
417 self.map[f] = ('r', 0, 0, 0)
417 self.map[f] = ('r', 0, 0, 0)
418 else:
418 else:
419 s = os.stat(os.path.join(self.root, f))
419 s = os.stat(os.path.join(self.root, f))
420 st_size = kw.get('st_size', s.st_size)
420 st_size = kw.get('st_size', s.st_size)
421 st_mtime = kw.get('st_mtime', s.st_mtime)
421 st_mtime = kw.get('st_mtime', s.st_mtime)
422 self.map[f] = (state, s.st_mode, st_size, st_mtime)
422 self.map[f] = (state, s.st_mode, st_size, st_mtime)
423
423
424 def forget(self, files):
424 def forget(self, files):
425 if not files: return
425 if not files: return
426 self.read()
426 self.read()
427 self.markdirty()
427 self.markdirty()
428 for f in files:
428 for f in files:
429 try:
429 try:
430 del self.map[f]
430 del self.map[f]
431 except KeyError:
431 except KeyError:
432 self.ui.warn("not in dirstate: %s!\n" % f)
432 self.ui.warn("not in dirstate: %s!\n" % f)
433 pass
433 pass
434
434
435 def clear(self):
435 def clear(self):
436 self.map = {}
436 self.map = {}
437 self.markdirty()
437 self.markdirty()
438
438
439 def write(self):
439 def write(self):
440 st = self.opener("dirstate", "w")
440 st = self.opener("dirstate", "w")
441 st.write("".join(self.pl))
441 st.write("".join(self.pl))
442 for f, e in self.map.items():
442 for f, e in self.map.items():
443 c = self.copied(f)
443 c = self.copied(f)
444 if c:
444 if c:
445 f = f + "\0" + c
445 f = f + "\0" + c
446 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
446 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
447 st.write(e + f)
447 st.write(e + f)
448 self.dirty = 0
448 self.dirty = 0
449
449
450 def filterfiles(self, files):
450 def filterfiles(self, files):
451 ret = {}
451 ret = {}
452 unknown = []
452 unknown = []
453
453
454 for x in files:
454 for x in files:
455 if x is '.':
455 if x is '.':
456 return self.map.copy()
456 return self.map.copy()
457 if x not in self.map:
457 if x not in self.map:
458 unknown.append(x)
458 unknown.append(x)
459 else:
459 else:
460 ret[x] = self.map[x]
460 ret[x] = self.map[x]
461
461
462 if not unknown:
462 if not unknown:
463 return ret
463 return ret
464
464
465 b = self.map.keys()
465 b = self.map.keys()
466 b.sort()
466 b.sort()
467 blen = len(b)
467 blen = len(b)
468
468
469 for x in unknown:
469 for x in unknown:
470 bs = bisect.bisect(b, x)
470 bs = bisect.bisect(b, x)
471 if bs != 0 and b[bs-1] == x:
471 if bs != 0 and b[bs-1] == x:
472 ret[x] = self.map[x]
472 ret[x] = self.map[x]
473 continue
473 continue
474 while bs < blen:
474 while bs < blen:
475 s = b[bs]
475 s = b[bs]
476 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
476 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
477 ret[s] = self.map[s]
477 ret[s] = self.map[s]
478 else:
478 else:
479 break
479 break
480 bs += 1
480 bs += 1
481 return ret
481 return ret
482
482
483 def walk(self, files = None, match = util.always, dc=None):
483 def walk(self, files = None, match = util.always, dc=None):
484 self.read()
484 self.read()
485
485
486 # walk all files by default
486 # walk all files by default
487 if not files:
487 if not files:
488 files = [self.root]
488 files = [self.root]
489 if not dc:
489 if not dc:
490 dc = self.map.copy()
490 dc = self.map.copy()
491 elif not dc:
491 elif not dc:
492 dc = self.filterfiles(files)
492 dc = self.filterfiles(files)
493
493
494 known = {'.hg': 1}
494 known = {'.hg': 1}
495 def seen(fn):
495 def seen(fn):
496 if fn in known: return True
496 if fn in known: return True
497 known[fn] = 1
497 known[fn] = 1
498 def traverse():
498 def traverse():
499 for ff in util.unique(files):
499 for ff in util.unique(files):
500 f = os.path.join(self.root, ff)
500 f = os.path.join(self.root, ff)
501 try:
501 try:
502 st = os.stat(f)
502 st = os.stat(f)
503 except OSError, inst:
503 except OSError, inst:
504 if ff not in dc: self.ui.warn('%s: %s\n' % (
504 if ff not in dc: self.ui.warn('%s: %s\n' % (
505 util.pathto(self.getcwd(), ff),
505 util.pathto(self.getcwd(), ff),
506 inst.strerror))
506 inst.strerror))
507 continue
507 continue
508 if stat.S_ISDIR(st.st_mode):
508 if stat.S_ISDIR(st.st_mode):
509 for dir, subdirs, fl in os.walk(f):
509 for dir, subdirs, fl in os.walk(f):
510 d = dir[len(self.root) + 1:]
510 d = dir[len(self.root) + 1:]
511 nd = util.normpath(d)
511 nd = util.normpath(d)
512 if nd == '.': nd = ''
512 if nd == '.': nd = ''
513 if seen(nd):
513 if seen(nd):
514 subdirs[:] = []
514 subdirs[:] = []
515 continue
515 continue
516 for sd in subdirs:
516 for sd in subdirs:
517 ds = os.path.join(nd, sd +'/')
517 ds = os.path.join(nd, sd +'/')
518 if self.ignore(ds) or not match(ds):
518 if self.ignore(ds) or not match(ds):
519 subdirs.remove(sd)
519 subdirs.remove(sd)
520 subdirs.sort()
520 subdirs.sort()
521 fl.sort()
521 fl.sort()
522 for fn in fl:
522 for fn in fl:
523 fn = util.pconvert(os.path.join(d, fn))
523 fn = util.pconvert(os.path.join(d, fn))
524 yield 'f', fn
524 yield 'f', fn
525 elif stat.S_ISREG(st.st_mode):
525 elif stat.S_ISREG(st.st_mode):
526 yield 'f', ff
526 yield 'f', ff
527 else:
527 else:
528 kind = 'unknown'
528 kind = 'unknown'
529 if stat.S_ISCHR(st.st_mode): kind = 'character device'
529 if stat.S_ISCHR(st.st_mode): kind = 'character device'
530 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
530 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
531 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
531 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
532 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
532 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
533 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
533 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
534 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
534 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
535 util.pathto(self.getcwd(), ff),
535 util.pathto(self.getcwd(), ff),
536 kind))
536 kind))
537
537
538 ks = dc.keys()
538 ks = dc.keys()
539 ks.sort()
539 ks.sort()
540 for k in ks:
540 for k in ks:
541 yield 'm', k
541 yield 'm', k
542
542
543 # yield only files that match: all in dirstate, others only if
543 # yield only files that match: all in dirstate, others only if
544 # not in .hgignore
544 # not in .hgignore
545
545
546 for src, fn in util.unique(traverse()):
546 for src, fn in util.unique(traverse()):
547 fn = util.normpath(fn)
547 fn = util.normpath(fn)
548 if seen(fn): continue
548 if seen(fn): continue
549 if fn not in dc and self.ignore(fn):
549 if fn not in dc and self.ignore(fn):
550 continue
550 continue
551 if match(fn):
551 if match(fn):
552 yield src, fn
552 yield src, fn
553
553
554 def changes(self, files=None, match=util.always):
554 def changes(self, files=None, match=util.always):
555 self.read()
555 self.read()
556 if not files:
556 if not files:
557 dc = self.map.copy()
557 dc = self.map.copy()
558 else:
558 else:
559 dc = self.filterfiles(files)
559 dc = self.filterfiles(files)
560 lookup, modified, added, unknown = [], [], [], []
560 lookup, modified, added, unknown = [], [], [], []
561 removed, deleted = [], []
561 removed, deleted = [], []
562
562
563 for src, fn in self.walk(files, match, dc=dc):
563 for src, fn in self.walk(files, match, dc=dc):
564 try:
564 try:
565 s = os.stat(os.path.join(self.root, fn))
565 s = os.stat(os.path.join(self.root, fn))
566 except OSError:
566 except OSError:
567 continue
567 continue
568 if not stat.S_ISREG(s.st_mode):
568 if not stat.S_ISREG(s.st_mode):
569 continue
569 continue
570 c = dc.get(fn)
570 c = dc.get(fn)
571 if c:
571 if c:
572 del dc[fn]
572 del dc[fn]
573 if c[0] == 'm':
573 if c[0] == 'm':
574 modified.append(fn)
574 modified.append(fn)
575 elif c[0] == 'a':
575 elif c[0] == 'a':
576 added.append(fn)
576 added.append(fn)
577 elif c[0] == 'r':
577 elif c[0] == 'r':
578 unknown.append(fn)
578 unknown.append(fn)
579 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
579 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
580 modified.append(fn)
580 modified.append(fn)
581 elif c[3] != s.st_mtime:
581 elif c[3] != s.st_mtime:
582 lookup.append(fn)
582 lookup.append(fn)
583 else:
583 else:
584 unknown.append(fn)
584 unknown.append(fn)
585
585
586 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
586 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
587 if c[0] == 'r':
587 if c[0] == 'r':
588 removed.append(fn)
588 removed.append(fn)
589 else:
589 else:
590 deleted.append(fn)
590 deleted.append(fn)
591 return (lookup, modified, added, removed + deleted, unknown)
591 return (lookup, modified, added, removed + deleted, unknown)
592
592
593 # used to avoid circular references so destructors work
593 # used to avoid circular references so destructors work
594 def opener(base):
594 def opener(base):
595 p = base
595 p = base
596 def o(path, mode="r"):
596 def o(path, mode="r"):
597 if p.startswith("http://"):
597 if p.startswith("http://"):
598 f = os.path.join(p, urllib.quote(path))
598 f = os.path.join(p, urllib.quote(path))
599 return httprangereader.httprangereader(f)
599 return httprangereader.httprangereader(f)
600
600
601 f = os.path.join(p, path)
601 f = os.path.join(p, path)
602
602
603 mode += "b" # for that other OS
603 mode += "b" # for that other OS
604
604
605 if mode[0] != "r":
605 if mode[0] != "r":
606 try:
606 try:
607 s = os.stat(f)
607 s = os.stat(f)
608 except OSError:
608 except OSError:
609 d = os.path.dirname(f)
609 d = os.path.dirname(f)
610 if not os.path.isdir(d):
610 if not os.path.isdir(d):
611 os.makedirs(d)
611 os.makedirs(d)
612 else:
612 else:
613 if s.st_nlink > 1:
613 if s.st_nlink > 1:
614 file(f + ".tmp", "wb").write(file(f, "rb").read())
614 file(f + ".tmp", "wb").write(file(f, "rb").read())
615 util.rename(f+".tmp", f)
615 util.rename(f+".tmp", f)
616
616
617 return file(f, mode)
617 return file(f, mode)
618
618
619 return o
619 return o
620
620
621 class RepoError(Exception): pass
621 class RepoError(Exception): pass
622
622
623 class localrepository:
623 class localrepository:
624 def __init__(self, ui, path=None, create=0):
624 def __init__(self, ui, path=None, create=0):
625 self.remote = 0
625 self.remote = 0
626 if path and path.startswith("http://"):
626 if path and path.startswith("http://"):
627 self.remote = 1
627 self.remote = 1
628 self.path = path
628 self.path = path
629 else:
629 else:
630 if not path:
630 if not path:
631 p = os.getcwd()
631 p = os.getcwd()
632 while not os.path.isdir(os.path.join(p, ".hg")):
632 while not os.path.isdir(os.path.join(p, ".hg")):
633 oldp = p
633 oldp = p
634 p = os.path.dirname(p)
634 p = os.path.dirname(p)
635 if p == oldp: raise RepoError("no repo found")
635 if p == oldp: raise RepoError("no repo found")
636 path = p
636 path = p
637 self.path = os.path.join(path, ".hg")
637 self.path = os.path.join(path, ".hg")
638
638
639 if not create and not os.path.isdir(self.path):
639 if not create and not os.path.isdir(self.path):
640 raise RepoError("repository %s not found" % self.path)
640 raise RepoError("repository %s not found" % self.path)
641
641
642 self.root = os.path.abspath(path)
642 self.root = os.path.abspath(path)
643 self.ui = ui
643 self.ui = ui
644
644
645 if create:
645 if create:
646 os.mkdir(self.path)
646 os.mkdir(self.path)
647 os.mkdir(self.join("data"))
647 os.mkdir(self.join("data"))
648
648
649 self.opener = opener(self.path)
649 self.opener = opener(self.path)
650 self.wopener = opener(self.root)
650 self.wopener = opener(self.root)
651 self.manifest = manifest(self.opener)
651 self.manifest = manifest(self.opener)
652 self.changelog = changelog(self.opener)
652 self.changelog = changelog(self.opener)
653 self.tagscache = None
653 self.tagscache = None
654 self.nodetagscache = None
654 self.nodetagscache = None
655
655
656 if not self.remote:
656 if not self.remote:
657 self.dirstate = dirstate(self.opener, ui, self.root)
657 self.dirstate = dirstate(self.opener, ui, self.root)
658 try:
658 try:
659 self.ui.readconfig(self.opener("hgrc"))
659 self.ui.readconfig(self.opener("hgrc"))
660 except IOError: pass
660 except IOError: pass
661
661
662 def hook(self, name, **args):
662 def hook(self, name, **args):
663 s = self.ui.config("hooks", name)
663 s = self.ui.config("hooks", name)
664 if s:
664 if s:
665 self.ui.note("running hook %s: %s\n" % (name, s))
665 self.ui.note("running hook %s: %s\n" % (name, s))
666 old = {}
666 old = {}
667 for k, v in args.items():
667 for k, v in args.items():
668 k = k.upper()
668 k = k.upper()
669 old[k] = os.environ.get(k, None)
669 old[k] = os.environ.get(k, None)
670 os.environ[k] = v
670 os.environ[k] = v
671
671
672 r = os.system(s)
672 r = os.system(s)
673
673
674 for k, v in old.items():
674 for k, v in old.items():
675 if v != None:
675 if v != None:
676 os.environ[k] = v
676 os.environ[k] = v
677 else:
677 else:
678 del os.environ[k]
678 del os.environ[k]
679
679
680 if r:
680 if r:
681 self.ui.warn("abort: %s hook failed with status %d!\n" %
681 self.ui.warn("abort: %s hook failed with status %d!\n" %
682 (name, r))
682 (name, r))
683 return False
683 return False
684 return True
684 return True
685
685
686 def tags(self):
686 def tags(self):
687 '''return a mapping of tag to node'''
687 '''return a mapping of tag to node'''
688 if not self.tagscache:
688 if not self.tagscache:
689 self.tagscache = {}
689 self.tagscache = {}
690 def addtag(self, k, n):
690 def addtag(self, k, n):
691 try:
691 try:
692 bin_n = bin(n)
692 bin_n = bin(n)
693 except TypeError:
693 except TypeError:
694 bin_n = ''
694 bin_n = ''
695 self.tagscache[k.strip()] = bin_n
695 self.tagscache[k.strip()] = bin_n
696
696
697 try:
697 try:
698 # read each head of the tags file, ending with the tip
698 # read each head of the tags file, ending with the tip
699 # and add each tag found to the map, with "newer" ones
699 # and add each tag found to the map, with "newer" ones
700 # taking precedence
700 # taking precedence
701 fl = self.file(".hgtags")
701 fl = self.file(".hgtags")
702 h = fl.heads()
702 h = fl.heads()
703 h.reverse()
703 h.reverse()
704 for r in h:
704 for r in h:
705 for l in fl.read(r).splitlines():
705 for l in fl.read(r).splitlines():
706 if l:
706 if l:
707 n, k = l.split(" ", 1)
707 n, k = l.split(" ", 1)
708 addtag(self, k, n)
708 addtag(self, k, n)
709 except KeyError:
709 except KeyError:
710 pass
710 pass
711
711
712 try:
712 try:
713 f = self.opener("localtags")
713 f = self.opener("localtags")
714 for l in f:
714 for l in f:
715 n, k = l.split(" ", 1)
715 n, k = l.split(" ", 1)
716 addtag(self, k, n)
716 addtag(self, k, n)
717 except IOError:
717 except IOError:
718 pass
718 pass
719
719
720 self.tagscache['tip'] = self.changelog.tip()
720 self.tagscache['tip'] = self.changelog.tip()
721
721
722 return self.tagscache
722 return self.tagscache
723
723
724 def tagslist(self):
724 def tagslist(self):
725 '''return a list of tags ordered by revision'''
725 '''return a list of tags ordered by revision'''
726 l = []
726 l = []
727 for t, n in self.tags().items():
727 for t, n in self.tags().items():
728 try:
728 try:
729 r = self.changelog.rev(n)
729 r = self.changelog.rev(n)
730 except:
730 except:
731 r = -2 # sort to the beginning of the list if unknown
731 r = -2 # sort to the beginning of the list if unknown
732 l.append((r,t,n))
732 l.append((r,t,n))
733 l.sort()
733 l.sort()
734 return [(t,n) for r,t,n in l]
734 return [(t,n) for r,t,n in l]
735
735
736 def nodetags(self, node):
736 def nodetags(self, node):
737 '''return the tags associated with a node'''
737 '''return the tags associated with a node'''
738 if not self.nodetagscache:
738 if not self.nodetagscache:
739 self.nodetagscache = {}
739 self.nodetagscache = {}
740 for t,n in self.tags().items():
740 for t,n in self.tags().items():
741 self.nodetagscache.setdefault(n,[]).append(t)
741 self.nodetagscache.setdefault(n,[]).append(t)
742 return self.nodetagscache.get(node, [])
742 return self.nodetagscache.get(node, [])
743
743
744 def lookup(self, key):
744 def lookup(self, key):
745 try:
745 try:
746 return self.tags()[key]
746 return self.tags()[key]
747 except KeyError:
747 except KeyError:
748 try:
748 try:
749 return self.changelog.lookup(key)
749 return self.changelog.lookup(key)
750 except:
750 except:
751 raise RepoError("unknown revision '%s'" % key)
751 raise RepoError("unknown revision '%s'" % key)
752
752
753 def dev(self):
753 def dev(self):
754 if self.remote: return -1
754 if self.remote: return -1
755 return os.stat(self.path).st_dev
755 return os.stat(self.path).st_dev
756
756
757 def local(self):
757 def local(self):
758 return not self.remote
758 return not self.remote
759
759
760 def join(self, f):
760 def join(self, f):
761 return os.path.join(self.path, f)
761 return os.path.join(self.path, f)
762
762
763 def wjoin(self, f):
763 def wjoin(self, f):
764 return os.path.join(self.root, f)
764 return os.path.join(self.root, f)
765
765
766 def file(self, f):
766 def file(self, f):
767 if f[0] == '/': f = f[1:]
767 if f[0] == '/': f = f[1:]
768 return filelog(self.opener, f)
768 return filelog(self.opener, f)
769
769
770 def getcwd(self):
770 def getcwd(self):
771 return self.dirstate.getcwd()
771 return self.dirstate.getcwd()
772
772
773 def wfile(self, f, mode='r'):
773 def wfile(self, f, mode='r'):
774 return self.wopener(f, mode)
774 return self.wopener(f, mode)
775
775
776 def wread(self, filename):
776 def wread(self, filename):
777 return self.wopener(filename, 'r').read()
777 return self.wopener(filename, 'r').read()
778
778
779 def wwrite(self, filename, data, fd=None):
779 def wwrite(self, filename, data, fd=None):
780 if fd:
780 if fd:
781 return fd.write(data)
781 return fd.write(data)
782 return self.wopener(filename, 'w').write(data)
782 return self.wopener(filename, 'w').write(data)
783
783
784 def transaction(self):
784 def transaction(self):
785 # save dirstate for undo
785 # save dirstate for undo
786 try:
786 try:
787 ds = self.opener("dirstate").read()
787 ds = self.opener("dirstate").read()
788 except IOError:
788 except IOError:
789 ds = ""
789 ds = ""
790 self.opener("journal.dirstate", "w").write(ds)
790 self.opener("journal.dirstate", "w").write(ds)
791
791
792 def after():
792 def after():
793 util.rename(self.join("journal"), self.join("undo"))
793 util.rename(self.join("journal"), self.join("undo"))
794 util.rename(self.join("journal.dirstate"),
794 util.rename(self.join("journal.dirstate"),
795 self.join("undo.dirstate"))
795 self.join("undo.dirstate"))
796
796
797 return transaction.transaction(self.ui.warn, self.opener,
797 return transaction.transaction(self.ui.warn, self.opener,
798 self.join("journal"), after)
798 self.join("journal"), after)
799
799
800 def recover(self):
800 def recover(self):
801 lock = self.lock()
801 lock = self.lock()
802 if os.path.exists(self.join("journal")):
802 if os.path.exists(self.join("journal")):
803 self.ui.status("rolling back interrupted transaction\n")
803 self.ui.status("rolling back interrupted transaction\n")
804 return transaction.rollback(self.opener, self.join("journal"))
804 return transaction.rollback(self.opener, self.join("journal"))
805 else:
805 else:
806 self.ui.warn("no interrupted transaction available\n")
806 self.ui.warn("no interrupted transaction available\n")
807
807
808 def undo(self):
808 def undo(self):
809 lock = self.lock()
809 lock = self.lock()
810 if os.path.exists(self.join("undo")):
810 if os.path.exists(self.join("undo")):
811 self.ui.status("rolling back last transaction\n")
811 self.ui.status("rolling back last transaction\n")
812 transaction.rollback(self.opener, self.join("undo"))
812 transaction.rollback(self.opener, self.join("undo"))
813 self.dirstate = None
813 self.dirstate = None
814 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
814 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
815 self.dirstate = dirstate(self.opener, self.ui, self.root)
815 self.dirstate = dirstate(self.opener, self.ui, self.root)
816 else:
816 else:
817 self.ui.warn("no undo information available\n")
817 self.ui.warn("no undo information available\n")
818
818
819 def lock(self, wait = 1):
819 def lock(self, wait = 1):
820 try:
820 try:
821 return lock.lock(self.join("lock"), 0)
821 return lock.lock(self.join("lock"), 0)
822 except lock.LockHeld, inst:
822 except lock.LockHeld, inst:
823 if wait:
823 if wait:
824 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
824 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
825 return lock.lock(self.join("lock"), wait)
825 return lock.lock(self.join("lock"), wait)
826 raise inst
826 raise inst
827
827
828 def rawcommit(self, files, text, user, date, p1=None, p2=None):
828 def rawcommit(self, files, text, user, date, p1=None, p2=None):
829 orig_parent = self.dirstate.parents()[0] or nullid
829 orig_parent = self.dirstate.parents()[0] or nullid
830 p1 = p1 or self.dirstate.parents()[0] or nullid
830 p1 = p1 or self.dirstate.parents()[0] or nullid
831 p2 = p2 or self.dirstate.parents()[1] or nullid
831 p2 = p2 or self.dirstate.parents()[1] or nullid
832 c1 = self.changelog.read(p1)
832 c1 = self.changelog.read(p1)
833 c2 = self.changelog.read(p2)
833 c2 = self.changelog.read(p2)
834 m1 = self.manifest.read(c1[0])
834 m1 = self.manifest.read(c1[0])
835 mf1 = self.manifest.readflags(c1[0])
835 mf1 = self.manifest.readflags(c1[0])
836 m2 = self.manifest.read(c2[0])
836 m2 = self.manifest.read(c2[0])
837 changed = []
837 changed = []
838
838
839 if orig_parent == p1:
839 if orig_parent == p1:
840 update_dirstate = 1
840 update_dirstate = 1
841 else:
841 else:
842 update_dirstate = 0
842 update_dirstate = 0
843
843
844 tr = self.transaction()
844 tr = self.transaction()
845 mm = m1.copy()
845 mm = m1.copy()
846 mfm = mf1.copy()
846 mfm = mf1.copy()
847 linkrev = self.changelog.count()
847 linkrev = self.changelog.count()
848 for f in files:
848 for f in files:
849 try:
849 try:
850 t = self.wread(f)
850 t = self.wread(f)
851 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
851 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
852 r = self.file(f)
852 r = self.file(f)
853 mfm[f] = tm
853 mfm[f] = tm
854
854
855 fp1 = m1.get(f, nullid)
855 fp1 = m1.get(f, nullid)
856 fp2 = m2.get(f, nullid)
856 fp2 = m2.get(f, nullid)
857
857
858 # is the same revision on two branches of a merge?
858 # is the same revision on two branches of a merge?
859 if fp2 == fp1:
859 if fp2 == fp1:
860 fp2 = nullid
860 fp2 = nullid
861
861
862 if fp2 != nullid:
862 if fp2 != nullid:
863 # is one parent an ancestor of the other?
863 # is one parent an ancestor of the other?
864 fpa = r.ancestor(fp1, fp2)
864 fpa = r.ancestor(fp1, fp2)
865 if fpa == fp1:
865 if fpa == fp1:
866 fp1, fp2 = fp2, nullid
866 fp1, fp2 = fp2, nullid
867 elif fpa == fp2:
867 elif fpa == fp2:
868 fp2 = nullid
868 fp2 = nullid
869
869
870 # is the file unmodified from the parent?
870 # is the file unmodified from the parent?
871 if t == r.read(fp1):
871 if t == r.read(fp1):
872 # record the proper existing parent in manifest
872 # record the proper existing parent in manifest
873 # no need to add a revision
873 # no need to add a revision
874 mm[f] = fp1
874 mm[f] = fp1
875 continue
875 continue
876
876
877 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
877 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
878 changed.append(f)
878 changed.append(f)
879 if update_dirstate:
879 if update_dirstate:
880 self.dirstate.update([f], "n")
880 self.dirstate.update([f], "n")
881 except IOError:
881 except IOError:
882 try:
882 try:
883 del mm[f]
883 del mm[f]
884 del mfm[f]
884 del mfm[f]
885 if update_dirstate:
885 if update_dirstate:
886 self.dirstate.forget([f])
886 self.dirstate.forget([f])
887 except:
887 except:
888 # deleted from p2?
888 # deleted from p2?
889 pass
889 pass
890
890
891 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
891 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
892 user = user or self.ui.username()
892 user = user or self.ui.username()
893 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
893 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
894 tr.close()
894 tr.close()
895 if update_dirstate:
895 if update_dirstate:
896 self.dirstate.setparents(n, nullid)
896 self.dirstate.setparents(n, nullid)
897
897
898 def commit(self, files = None, text = "", user = None, date = None,
898 def commit(self, files = None, text = "", user = None, date = None,
899 match = util.always, force=False):
899 match = util.always, force=False):
900 commit = []
900 commit = []
901 remove = []
901 remove = []
902 changed = []
902 changed = []
903
903
904 if files:
904 if files:
905 for f in files:
905 for f in files:
906 s = self.dirstate.state(f)
906 s = self.dirstate.state(f)
907 if s in 'nmai':
907 if s in 'nmai':
908 commit.append(f)
908 commit.append(f)
909 elif s == 'r':
909 elif s == 'r':
910 remove.append(f)
910 remove.append(f)
911 else:
911 else:
912 self.ui.warn("%s not tracked!\n" % f)
912 self.ui.warn("%s not tracked!\n" % f)
913 else:
913 else:
914 (c, a, d, u) = self.changes(match = match)
914 (c, a, d, u) = self.changes(match = match)
915 commit = c + a
915 commit = c + a
916 remove = d
916 remove = d
917
917
918 p1, p2 = self.dirstate.parents()
918 p1, p2 = self.dirstate.parents()
919 c1 = self.changelog.read(p1)
919 c1 = self.changelog.read(p1)
920 c2 = self.changelog.read(p2)
920 c2 = self.changelog.read(p2)
921 m1 = self.manifest.read(c1[0])
921 m1 = self.manifest.read(c1[0])
922 mf1 = self.manifest.readflags(c1[0])
922 mf1 = self.manifest.readflags(c1[0])
923 m2 = self.manifest.read(c2[0])
923 m2 = self.manifest.read(c2[0])
924
924
925 if not commit and not remove and not force and p2 == nullid:
925 if not commit and not remove and not force and p2 == nullid:
926 self.ui.status("nothing changed\n")
926 self.ui.status("nothing changed\n")
927 return None
927 return None
928
928
929 if not self.hook("precommit"):
929 if not self.hook("precommit"):
930 return None
930 return None
931
931
932 lock = self.lock()
932 lock = self.lock()
933 tr = self.transaction()
933 tr = self.transaction()
934
934
935 # check in files
935 # check in files
936 new = {}
936 new = {}
937 linkrev = self.changelog.count()
937 linkrev = self.changelog.count()
938 commit.sort()
938 commit.sort()
939 for f in commit:
939 for f in commit:
940 self.ui.note(f + "\n")
940 self.ui.note(f + "\n")
941 try:
941 try:
942 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
942 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
943 t = self.wread(f)
943 t = self.wread(f)
944 except IOError:
944 except IOError:
945 self.ui.warn("trouble committing %s!\n" % f)
945 self.ui.warn("trouble committing %s!\n" % f)
946 raise
946 raise
947
947
948 meta = {}
948 meta = {}
949 cp = self.dirstate.copied(f)
949 cp = self.dirstate.copied(f)
950 if cp:
950 if cp:
951 meta["copy"] = cp
951 meta["copy"] = cp
952 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
952 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
953 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
953 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
954
954
955 r = self.file(f)
955 r = self.file(f)
956 fp1 = m1.get(f, nullid)
956 fp1 = m1.get(f, nullid)
957 fp2 = m2.get(f, nullid)
957 fp2 = m2.get(f, nullid)
958
958
959 # is the same revision on two branches of a merge?
959 # is the same revision on two branches of a merge?
960 if fp2 == fp1:
960 if fp2 == fp1:
961 fp2 = nullid
961 fp2 = nullid
962
962
963 if fp2 != nullid:
963 if fp2 != nullid:
964 # is one parent an ancestor of the other?
964 # is one parent an ancestor of the other?
965 fpa = r.ancestor(fp1, fp2)
965 fpa = r.ancestor(fp1, fp2)
966 if fpa == fp1:
966 if fpa == fp1:
967 fp1, fp2 = fp2, nullid
967 fp1, fp2 = fp2, nullid
968 elif fpa == fp2:
968 elif fpa == fp2:
969 fp2 = nullid
969 fp2 = nullid
970
970
971 # is the file unmodified from the parent?
971 # is the file unmodified from the parent?
972 if not meta and t == r.read(fp1):
972 if not meta and t == r.read(fp1):
973 # record the proper existing parent in manifest
973 # record the proper existing parent in manifest
974 # no need to add a revision
974 # no need to add a revision
975 new[f] = fp1
975 new[f] = fp1
976 continue
976 continue
977
977
978 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
978 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
979 # remember what we've added so that we can later calculate
979 # remember what we've added so that we can later calculate
980 # the files to pull from a set of changesets
980 # the files to pull from a set of changesets
981 changed.append(f)
981 changed.append(f)
982
982
983 # update manifest
983 # update manifest
984 m1.update(new)
984 m1.update(new)
985 for f in remove:
985 for f in remove:
986 if f in m1:
986 if f in m1:
987 del m1[f]
987 del m1[f]
988 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
988 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
989 (new, remove))
989 (new, remove))
990
990
991 # add changeset
991 # add changeset
992 new = new.keys()
992 new = new.keys()
993 new.sort()
993 new.sort()
994
994
995 if not text:
995 if not text:
996 edittext = ""
996 edittext = ""
997 if p2 != nullid:
997 if p2 != nullid:
998 edittext += "HG: branch merge\n"
998 edittext += "HG: branch merge\n"
999 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
999 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
1000 edittext += "".join(["HG: changed %s\n" % f for f in changed])
1000 edittext += "".join(["HG: changed %s\n" % f for f in changed])
1001 edittext += "".join(["HG: removed %s\n" % f for f in remove])
1001 edittext += "".join(["HG: removed %s\n" % f for f in remove])
1002 if not changed and not remove:
1002 if not changed and not remove:
1003 edittext += "HG: no files changed\n"
1003 edittext += "HG: no files changed\n"
1004 edittext = self.ui.edit(edittext)
1004 edittext = self.ui.edit(edittext)
1005 if not edittext.rstrip():
1005 if not edittext.rstrip():
1006 return None
1006 return None
1007 text = edittext
1007 text = edittext
1008
1008
1009 user = user or self.ui.username()
1009 user = user or self.ui.username()
1010 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1010 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1011 tr.close()
1011 tr.close()
1012
1012
1013 self.dirstate.setparents(n)
1013 self.dirstate.setparents(n)
1014 self.dirstate.update(new, "n")
1014 self.dirstate.update(new, "n")
1015 self.dirstate.forget(remove)
1015 self.dirstate.forget(remove)
1016
1016
1017 if not self.hook("commit", node=hex(n)):
1017 if not self.hook("commit", node=hex(n)):
1018 return None
1018 return None
1019 return n
1019 return n
1020
1020
1021 def walk(self, node = None, files = [], match = util.always):
1021 def walk(self, node = None, files = [], match = util.always):
1022 if node:
1022 if node:
1023 for fn in self.manifest.read(self.changelog.read(node)[0]):
1023 for fn in self.manifest.read(self.changelog.read(node)[0]):
1024 if match(fn): yield 'm', fn
1024 if match(fn): yield 'm', fn
1025 else:
1025 else:
1026 for src, fn in self.dirstate.walk(files, match):
1026 for src, fn in self.dirstate.walk(files, match):
1027 yield src, fn
1027 yield src, fn
1028
1028
1029 def changes(self, node1 = None, node2 = None, files = [],
1029 def changes(self, node1 = None, node2 = None, files = [],
1030 match = util.always):
1030 match = util.always):
1031 mf2, u = None, []
1031 mf2, u = None, []
1032
1032
1033 def fcmp(fn, mf):
1033 def fcmp(fn, mf):
1034 t1 = self.wread(fn)
1034 t1 = self.wread(fn)
1035 t2 = self.file(fn).read(mf.get(fn, nullid))
1035 t2 = self.file(fn).read(mf.get(fn, nullid))
1036 return cmp(t1, t2)
1036 return cmp(t1, t2)
1037
1037
1038 def mfmatches(node):
1038 def mfmatches(node):
1039 mf = dict(self.manifest.read(node))
1039 mf = dict(self.manifest.read(node))
1040 for fn in mf.keys():
1040 for fn in mf.keys():
1041 if not match(fn):
1041 if not match(fn):
1042 del mf[fn]
1042 del mf[fn]
1043 return mf
1043 return mf
1044
1044
1045 # are we comparing the working directory?
1045 # are we comparing the working directory?
1046 if not node2:
1046 if not node2:
1047 l, c, a, d, u = self.dirstate.changes(files, match)
1047 l, c, a, d, u = self.dirstate.changes(files, match)
1048
1048
1049 # are we comparing working dir against its parent?
1049 # are we comparing working dir against its parent?
1050 if not node1:
1050 if not node1:
1051 if l:
1051 if l:
1052 # do a full compare of any files that might have changed
1052 # do a full compare of any files that might have changed
1053 change = self.changelog.read(self.dirstate.parents()[0])
1053 change = self.changelog.read(self.dirstate.parents()[0])
1054 mf2 = mfmatches(change[0])
1054 mf2 = mfmatches(change[0])
1055 for f in l:
1055 for f in l:
1056 if fcmp(f, mf2):
1056 if fcmp(f, mf2):
1057 c.append(f)
1057 c.append(f)
1058
1058
1059 for l in c, a, d, u:
1059 for l in c, a, d, u:
1060 l.sort()
1060 l.sort()
1061
1061
1062 return (c, a, d, u)
1062 return (c, a, d, u)
1063
1063
1064 # are we comparing working dir against non-tip?
1064 # are we comparing working dir against non-tip?
1065 # generate a pseudo-manifest for the working dir
1065 # generate a pseudo-manifest for the working dir
1066 if not node2:
1066 if not node2:
1067 if not mf2:
1067 if not mf2:
1068 change = self.changelog.read(self.dirstate.parents()[0])
1068 change = self.changelog.read(self.dirstate.parents()[0])
1069 mf2 = mfmatches(change[0])
1069 mf2 = mfmatches(change[0])
1070 for f in a + c + l:
1070 for f in a + c + l:
1071 mf2[f] = ""
1071 mf2[f] = ""
1072 for f in d:
1072 for f in d:
1073 if f in mf2: del mf2[f]
1073 if f in mf2: del mf2[f]
1074 else:
1074 else:
1075 change = self.changelog.read(node2)
1075 change = self.changelog.read(node2)
1076 mf2 = mfmatches(change[0])
1076 mf2 = mfmatches(change[0])
1077
1077
1078 # flush lists from dirstate before comparing manifests
1078 # flush lists from dirstate before comparing manifests
1079 c, a = [], []
1079 c, a = [], []
1080
1080
1081 change = self.changelog.read(node1)
1081 change = self.changelog.read(node1)
1082 mf1 = mfmatches(change[0])
1082 mf1 = mfmatches(change[0])
1083
1083
1084 for fn in mf2:
1084 for fn in mf2:
1085 if mf1.has_key(fn):
1085 if mf1.has_key(fn):
1086 if mf1[fn] != mf2[fn]:
1086 if mf1[fn] != mf2[fn]:
1087 if mf2[fn] != "" or fcmp(fn, mf1):
1087 if mf2[fn] != "" or fcmp(fn, mf1):
1088 c.append(fn)
1088 c.append(fn)
1089 del mf1[fn]
1089 del mf1[fn]
1090 else:
1090 else:
1091 a.append(fn)
1091 a.append(fn)
1092
1092
1093 d = mf1.keys()
1093 d = mf1.keys()
1094
1094
1095 for l in c, a, d, u:
1095 for l in c, a, d, u:
1096 l.sort()
1096 l.sort()
1097
1097
1098 return (c, a, d, u)
1098 return (c, a, d, u)
1099
1099
1100 def add(self, list):
1100 def add(self, list):
1101 for f in list:
1101 for f in list:
1102 p = self.wjoin(f)
1102 p = self.wjoin(f)
1103 if not os.path.exists(p):
1103 if not os.path.exists(p):
1104 self.ui.warn("%s does not exist!\n" % f)
1104 self.ui.warn("%s does not exist!\n" % f)
1105 elif not os.path.isfile(p):
1105 elif not os.path.isfile(p):
1106 self.ui.warn("%s not added: only files supported currently\n" % f)
1106 self.ui.warn("%s not added: only files supported currently\n" % f)
1107 elif self.dirstate.state(f) in 'an':
1107 elif self.dirstate.state(f) in 'an':
1108 self.ui.warn("%s already tracked!\n" % f)
1108 self.ui.warn("%s already tracked!\n" % f)
1109 else:
1109 else:
1110 self.dirstate.update([f], "a")
1110 self.dirstate.update([f], "a")
1111
1111
1112 def forget(self, list):
1112 def forget(self, list):
1113 for f in list:
1113 for f in list:
1114 if self.dirstate.state(f) not in 'ai':
1114 if self.dirstate.state(f) not in 'ai':
1115 self.ui.warn("%s not added!\n" % f)
1115 self.ui.warn("%s not added!\n" % f)
1116 else:
1116 else:
1117 self.dirstate.forget([f])
1117 self.dirstate.forget([f])
1118
1118
1119 def remove(self, list):
1119 def remove(self, list):
1120 for f in list:
1120 for f in list:
1121 p = self.wjoin(f)
1121 p = self.wjoin(f)
1122 if os.path.exists(p):
1122 if os.path.exists(p):
1123 self.ui.warn("%s still exists!\n" % f)
1123 self.ui.warn("%s still exists!\n" % f)
1124 elif self.dirstate.state(f) == 'a':
1124 elif self.dirstate.state(f) == 'a':
1125 self.ui.warn("%s never committed!\n" % f)
1125 self.ui.warn("%s never committed!\n" % f)
1126 self.dirstate.forget([f])
1126 self.dirstate.forget([f])
1127 elif f not in self.dirstate:
1127 elif f not in self.dirstate:
1128 self.ui.warn("%s not tracked!\n" % f)
1128 self.ui.warn("%s not tracked!\n" % f)
1129 else:
1129 else:
1130 self.dirstate.update([f], "r")
1130 self.dirstate.update([f], "r")
1131
1131
1132 def copy(self, source, dest):
1132 def copy(self, source, dest):
1133 p = self.wjoin(dest)
1133 p = self.wjoin(dest)
1134 if not os.path.exists(p):
1134 if not os.path.exists(p):
1135 self.ui.warn("%s does not exist!\n" % dest)
1135 self.ui.warn("%s does not exist!\n" % dest)
1136 elif not os.path.isfile(p):
1136 elif not os.path.isfile(p):
1137 self.ui.warn("copy failed: %s is not a file\n" % dest)
1137 self.ui.warn("copy failed: %s is not a file\n" % dest)
1138 else:
1138 else:
1139 if self.dirstate.state(dest) == '?':
1139 if self.dirstate.state(dest) == '?':
1140 self.dirstate.update([dest], "a")
1140 self.dirstate.update([dest], "a")
1141 self.dirstate.copy(source, dest)
1141 self.dirstate.copy(source, dest)
1142
1142
1143 def heads(self):
1143 def heads(self):
1144 return self.changelog.heads()
1144 return self.changelog.heads()
1145
1145
1146 # branchlookup returns a dict giving a list of branches for
1146 # branchlookup returns a dict giving a list of branches for
1147 # each head. A branch is defined as the tag of a node or
1147 # each head. A branch is defined as the tag of a node or
1148 # the branch of the node's parents. If a node has multiple
1148 # the branch of the node's parents. If a node has multiple
1149 # branch tags, tags are eliminated if they are visible from other
1149 # branch tags, tags are eliminated if they are visible from other
1150 # branch tags.
1150 # branch tags.
1151 #
1151 #
1152 # So, for this graph: a->b->c->d->e
1152 # So, for this graph: a->b->c->d->e
1153 # \ /
1153 # \ /
1154 # aa -----/
1154 # aa -----/
1155 # a has tag 2.6.12
1155 # a has tag 2.6.12
1156 # d has tag 2.6.13
1156 # d has tag 2.6.13
1157 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1157 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1158 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1158 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1159 # from the list.
1159 # from the list.
1160 #
1160 #
1161 # It is possible that more than one head will have the same branch tag.
1161 # It is possible that more than one head will have the same branch tag.
1162 # callers need to check the result for multiple heads under the same
1162 # callers need to check the result for multiple heads under the same
1163 # branch tag if that is a problem for them (ie checkout of a specific
1163 # branch tag if that is a problem for them (ie checkout of a specific
1164 # branch).
1164 # branch).
1165 #
1165 #
1166 # passing in a specific branch will limit the depth of the search
1166 # passing in a specific branch will limit the depth of the search
1167 # through the parents. It won't limit the branches returned in the
1167 # through the parents. It won't limit the branches returned in the
1168 # result though.
1168 # result though.
1169 def branchlookup(self, heads=None, branch=None):
1169 def branchlookup(self, heads=None, branch=None):
1170 if not heads:
1170 if not heads:
1171 heads = self.heads()
1171 heads = self.heads()
1172 headt = [ h for h in heads ]
1172 headt = [ h for h in heads ]
1173 chlog = self.changelog
1173 chlog = self.changelog
1174 branches = {}
1174 branches = {}
1175 merges = []
1175 merges = []
1176 seenmerge = {}
1176 seenmerge = {}
1177
1177
1178 # traverse the tree once for each head, recording in the branches
1178 # traverse the tree once for each head, recording in the branches
1179 # dict which tags are visible from this head. The branches
1179 # dict which tags are visible from this head. The branches
1180 # dict also records which tags are visible from each tag
1180 # dict also records which tags are visible from each tag
1181 # while we traverse.
1181 # while we traverse.
1182 while headt or merges:
1182 while headt or merges:
1183 if merges:
1183 if merges:
1184 n, found = merges.pop()
1184 n, found = merges.pop()
1185 visit = [n]
1185 visit = [n]
1186 else:
1186 else:
1187 h = headt.pop()
1187 h = headt.pop()
1188 visit = [h]
1188 visit = [h]
1189 found = [h]
1189 found = [h]
1190 seen = {}
1190 seen = {}
1191 while visit:
1191 while visit:
1192 n = visit.pop()
1192 n = visit.pop()
1193 if n in seen:
1193 if n in seen:
1194 continue
1194 continue
1195 pp = chlog.parents(n)
1195 pp = chlog.parents(n)
1196 tags = self.nodetags(n)
1196 tags = self.nodetags(n)
1197 if tags:
1197 if tags:
1198 for x in tags:
1198 for x in tags:
1199 if x == 'tip':
1199 if x == 'tip':
1200 continue
1200 continue
1201 for f in found:
1201 for f in found:
1202 branches.setdefault(f, {})[n] = 1
1202 branches.setdefault(f, {})[n] = 1
1203 branches.setdefault(n, {})[n] = 1
1203 branches.setdefault(n, {})[n] = 1
1204 break
1204 break
1205 if n not in found:
1205 if n not in found:
1206 found.append(n)
1206 found.append(n)
1207 if branch in tags:
1207 if branch in tags:
1208 continue
1208 continue
1209 seen[n] = 1
1209 seen[n] = 1
1210 if pp[1] != nullid and n not in seenmerge:
1210 if pp[1] != nullid and n not in seenmerge:
1211 merges.append((pp[1], [x for x in found]))
1211 merges.append((pp[1], [x for x in found]))
1212 seenmerge[n] = 1
1212 seenmerge[n] = 1
1213 if pp[0] != nullid:
1213 if pp[0] != nullid:
1214 visit.append(pp[0])
1214 visit.append(pp[0])
1215 # traverse the branches dict, eliminating branch tags from each
1215 # traverse the branches dict, eliminating branch tags from each
1216 # head that are visible from another branch tag for that head.
1216 # head that are visible from another branch tag for that head.
1217 out = {}
1217 out = {}
1218 viscache = {}
1218 viscache = {}
1219 for h in heads:
1219 for h in heads:
1220 def visible(node):
1220 def visible(node):
1221 if node in viscache:
1221 if node in viscache:
1222 return viscache[node]
1222 return viscache[node]
1223 ret = {}
1223 ret = {}
1224 visit = [node]
1224 visit = [node]
1225 while visit:
1225 while visit:
1226 x = visit.pop()
1226 x = visit.pop()
1227 if x in viscache:
1227 if x in viscache:
1228 ret.update(viscache[x])
1228 ret.update(viscache[x])
1229 elif x not in ret:
1229 elif x not in ret:
1230 ret[x] = 1
1230 ret[x] = 1
1231 if x in branches:
1231 if x in branches:
1232 visit[len(visit):] = branches[x].keys()
1232 visit[len(visit):] = branches[x].keys()
1233 viscache[node] = ret
1233 viscache[node] = ret
1234 return ret
1234 return ret
1235 if h not in branches:
1235 if h not in branches:
1236 continue
1236 continue
1237 # O(n^2), but somewhat limited. This only searches the
1237 # O(n^2), but somewhat limited. This only searches the
1238 # tags visible from a specific head, not all the tags in the
1238 # tags visible from a specific head, not all the tags in the
1239 # whole repo.
1239 # whole repo.
1240 for b in branches[h]:
1240 for b in branches[h]:
1241 vis = False
1241 vis = False
1242 for bb in branches[h].keys():
1242 for bb in branches[h].keys():
1243 if b != bb:
1243 if b != bb:
1244 if b in visible(bb):
1244 if b in visible(bb):
1245 vis = True
1245 vis = True
1246 break
1246 break
1247 if not vis:
1247 if not vis:
1248 l = out.setdefault(h, [])
1248 l = out.setdefault(h, [])
1249 l[len(l):] = self.nodetags(b)
1249 l[len(l):] = self.nodetags(b)
1250 return out
1250 return out
1251
1251
1252 def branches(self, nodes):
1252 def branches(self, nodes):
1253 if not nodes: nodes = [self.changelog.tip()]
1253 if not nodes: nodes = [self.changelog.tip()]
1254 b = []
1254 b = []
1255 for n in nodes:
1255 for n in nodes:
1256 t = n
1256 t = n
1257 while n:
1257 while n:
1258 p = self.changelog.parents(n)
1258 p = self.changelog.parents(n)
1259 if p[1] != nullid or p[0] == nullid:
1259 if p[1] != nullid or p[0] == nullid:
1260 b.append((t, n, p[0], p[1]))
1260 b.append((t, n, p[0], p[1]))
1261 break
1261 break
1262 n = p[0]
1262 n = p[0]
1263 return b
1263 return b
1264
1264
1265 def between(self, pairs):
1265 def between(self, pairs):
1266 r = []
1266 r = []
1267
1267
1268 for top, bottom in pairs:
1268 for top, bottom in pairs:
1269 n, l, i = top, [], 0
1269 n, l, i = top, [], 0
1270 f = 1
1270 f = 1
1271
1271
1272 while n != bottom:
1272 while n != bottom:
1273 p = self.changelog.parents(n)[0]
1273 p = self.changelog.parents(n)[0]
1274 if i == f:
1274 if i == f:
1275 l.append(n)
1275 l.append(n)
1276 f = f * 2
1276 f = f * 2
1277 n = p
1277 n = p
1278 i += 1
1278 i += 1
1279
1279
1280 r.append(l)
1280 r.append(l)
1281
1281
1282 return r
1282 return r
1283
1283
1284 def newer(self, nodes):
1284 def newer(self, nodes):
1285 m = {}
1285 m = {}
1286 nl = []
1286 nl = []
1287 pm = {}
1287 pm = {}
1288 cl = self.changelog
1288 cl = self.changelog
1289 t = l = cl.count()
1289 t = l = cl.count()
1290
1290
1291 # find the lowest numbered node
1291 # find the lowest numbered node
1292 for n in nodes:
1292 for n in nodes:
1293 l = min(l, cl.rev(n))
1293 l = min(l, cl.rev(n))
1294 m[n] = 1
1294 m[n] = 1
1295
1295
1296 for i in xrange(l, t):
1296 for i in xrange(l, t):
1297 n = cl.node(i)
1297 n = cl.node(i)
1298 if n in m: # explicitly listed
1298 if n in m: # explicitly listed
1299 pm[n] = 1
1299 pm[n] = 1
1300 nl.append(n)
1300 nl.append(n)
1301 continue
1301 continue
1302 for p in cl.parents(n):
1302 for p in cl.parents(n):
1303 if p in pm: # parent listed
1303 if p in pm: # parent listed
1304 pm[n] = 1
1304 pm[n] = 1
1305 nl.append(n)
1305 nl.append(n)
1306 break
1306 break
1307
1307
1308 return nl
1308 return nl
1309
1309
1310 def findincoming(self, remote, base=None, heads=None):
1310 def findincoming(self, remote, base=None, heads=None):
1311 m = self.changelog.nodemap
1311 m = self.changelog.nodemap
1312 search = []
1312 search = []
1313 fetch = []
1313 fetch = []
1314 seen = {}
1314 seen = {}
1315 seenbranch = {}
1315 seenbranch = {}
1316 if base == None:
1316 if base == None:
1317 base = {}
1317 base = {}
1318
1318
1319 # assume we're closer to the tip than the root
1319 # assume we're closer to the tip than the root
1320 # and start by examining the heads
1320 # and start by examining the heads
1321 self.ui.status("searching for changes\n")
1321 self.ui.status("searching for changes\n")
1322
1322
1323 if not heads:
1323 if not heads:
1324 heads = remote.heads()
1324 heads = remote.heads()
1325
1325
1326 unknown = []
1326 unknown = []
1327 for h in heads:
1327 for h in heads:
1328 if h not in m:
1328 if h not in m:
1329 unknown.append(h)
1329 unknown.append(h)
1330 else:
1330 else:
1331 base[h] = 1
1331 base[h] = 1
1332
1332
1333 if not unknown:
1333 if not unknown:
1334 return None
1334 return None
1335
1335
1336 rep = {}
1336 rep = {}
1337 reqcnt = 0
1337 reqcnt = 0
1338
1338
1339 # search through remote branches
1339 # search through remote branches
1340 # a 'branch' here is a linear segment of history, with four parts:
1340 # a 'branch' here is a linear segment of history, with four parts:
1341 # head, root, first parent, second parent
1341 # head, root, first parent, second parent
1342 # (a branch always has two parents (or none) by definition)
1342 # (a branch always has two parents (or none) by definition)
1343 unknown = remote.branches(unknown)
1343 unknown = remote.branches(unknown)
1344 while unknown:
1344 while unknown:
1345 r = []
1345 r = []
1346 while unknown:
1346 while unknown:
1347 n = unknown.pop(0)
1347 n = unknown.pop(0)
1348 if n[0] in seen:
1348 if n[0] in seen:
1349 continue
1349 continue
1350
1350
1351 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1351 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1352 if n[0] == nullid:
1352 if n[0] == nullid:
1353 break
1353 break
1354 if n in seenbranch:
1354 if n in seenbranch:
1355 self.ui.debug("branch already found\n")
1355 self.ui.debug("branch already found\n")
1356 continue
1356 continue
1357 if n[1] and n[1] in m: # do we know the base?
1357 if n[1] and n[1] in m: # do we know the base?
1358 self.ui.debug("found incomplete branch %s:%s\n"
1358 self.ui.debug("found incomplete branch %s:%s\n"
1359 % (short(n[0]), short(n[1])))
1359 % (short(n[0]), short(n[1])))
1360 search.append(n) # schedule branch range for scanning
1360 search.append(n) # schedule branch range for scanning
1361 seenbranch[n] = 1
1361 seenbranch[n] = 1
1362 else:
1362 else:
1363 if n[1] not in seen and n[1] not in fetch:
1363 if n[1] not in seen and n[1] not in fetch:
1364 if n[2] in m and n[3] in m:
1364 if n[2] in m and n[3] in m:
1365 self.ui.debug("found new changeset %s\n" %
1365 self.ui.debug("found new changeset %s\n" %
1366 short(n[1]))
1366 short(n[1]))
1367 fetch.append(n[1]) # earliest unknown
1367 fetch.append(n[1]) # earliest unknown
1368 base[n[2]] = 1 # latest known
1368 base[n[2]] = 1 # latest known
1369 continue
1369 continue
1370
1370
1371 for a in n[2:4]:
1371 for a in n[2:4]:
1372 if a not in rep:
1372 if a not in rep:
1373 r.append(a)
1373 r.append(a)
1374 rep[a] = 1
1374 rep[a] = 1
1375
1375
1376 seen[n[0]] = 1
1376 seen[n[0]] = 1
1377
1377
1378 if r:
1378 if r:
1379 reqcnt += 1
1379 reqcnt += 1
1380 self.ui.debug("request %d: %s\n" %
1380 self.ui.debug("request %d: %s\n" %
1381 (reqcnt, " ".join(map(short, r))))
1381 (reqcnt, " ".join(map(short, r))))
1382 for p in range(0, len(r), 10):
1382 for p in range(0, len(r), 10):
1383 for b in remote.branches(r[p:p+10]):
1383 for b in remote.branches(r[p:p+10]):
1384 self.ui.debug("received %s:%s\n" %
1384 self.ui.debug("received %s:%s\n" %
1385 (short(b[0]), short(b[1])))
1385 (short(b[0]), short(b[1])))
1386 if b[0] not in m and b[0] not in seen:
1386 if b[0] not in m and b[0] not in seen:
1387 unknown.append(b)
1387 unknown.append(b)
1388
1388
1389 # do binary search on the branches we found
1389 # do binary search on the branches we found
1390 while search:
1390 while search:
1391 n = search.pop(0)
1391 n = search.pop(0)
1392 reqcnt += 1
1392 reqcnt += 1
1393 l = remote.between([(n[0], n[1])])[0]
1393 l = remote.between([(n[0], n[1])])[0]
1394 l.append(n[1])
1394 l.append(n[1])
1395 p = n[0]
1395 p = n[0]
1396 f = 1
1396 f = 1
1397 for i in l:
1397 for i in l:
1398 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1398 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1399 if i in m:
1399 if i in m:
1400 if f <= 2:
1400 if f <= 2:
1401 self.ui.debug("found new branch changeset %s\n" %
1401 self.ui.debug("found new branch changeset %s\n" %
1402 short(p))
1402 short(p))
1403 fetch.append(p)
1403 fetch.append(p)
1404 base[i] = 1
1404 base[i] = 1
1405 else:
1405 else:
1406 self.ui.debug("narrowed branch search to %s:%s\n"
1406 self.ui.debug("narrowed branch search to %s:%s\n"
1407 % (short(p), short(i)))
1407 % (short(p), short(i)))
1408 search.append((p, i))
1408 search.append((p, i))
1409 break
1409 break
1410 p, f = i, f * 2
1410 p, f = i, f * 2
1411
1411
1412 # sanity check our fetch list
1412 # sanity check our fetch list
1413 for f in fetch:
1413 for f in fetch:
1414 if f in m:
1414 if f in m:
1415 raise RepoError("already have changeset " + short(f[:4]))
1415 raise RepoError("already have changeset " + short(f[:4]))
1416
1416
1417 if base.keys() == [nullid]:
1417 if base.keys() == [nullid]:
1418 self.ui.warn("warning: pulling from an unrelated repository!\n")
1418 self.ui.warn("warning: pulling from an unrelated repository!\n")
1419
1419
1420 self.ui.note("adding new changesets starting at " +
1420 self.ui.note("adding new changesets starting at " +
1421 " ".join([short(f) for f in fetch]) + "\n")
1421 " ".join([short(f) for f in fetch]) + "\n")
1422
1422
1423 self.ui.debug("%d total queries\n" % reqcnt)
1423 self.ui.debug("%d total queries\n" % reqcnt)
1424
1424
1425 return fetch
1425 return fetch
1426
1426
1427 def findoutgoing(self, remote, base=None, heads=None):
1427 def findoutgoing(self, remote, base=None, heads=None):
1428 if base == None:
1428 if base == None:
1429 base = {}
1429 base = {}
1430 self.findincoming(remote, base, heads)
1430 self.findincoming(remote, base, heads)
1431
1431
1432 remain = dict.fromkeys(self.changelog.nodemap)
1432 remain = dict.fromkeys(self.changelog.nodemap)
1433
1433
1434 # prune everything remote has from the tree
1434 # prune everything remote has from the tree
1435 del remain[nullid]
1435 del remain[nullid]
1436 remove = base.keys()
1436 remove = base.keys()
1437 while remove:
1437 while remove:
1438 n = remove.pop(0)
1438 n = remove.pop(0)
1439 if n in remain:
1439 if n in remain:
1440 del remain[n]
1440 del remain[n]
1441 for p in self.changelog.parents(n):
1441 for p in self.changelog.parents(n):
1442 remove.append(p)
1442 remove.append(p)
1443
1443
1444 # find every node whose parents have been pruned
1444 # find every node whose parents have been pruned
1445 subset = []
1445 subset = []
1446 for n in remain:
1446 for n in remain:
1447 p1, p2 = self.changelog.parents(n)
1447 p1, p2 = self.changelog.parents(n)
1448 if p1 not in remain and p2 not in remain:
1448 if p1 not in remain and p2 not in remain:
1449 subset.append(n)
1449 subset.append(n)
1450
1450
1451 # this is the set of all roots we have to push
1451 # this is the set of all roots we have to push
1452 return subset
1452 return subset
1453
1453
1454 def pull(self, remote):
1454 def pull(self, remote):
1455 lock = self.lock()
1455 lock = self.lock()
1456
1456
1457 # if we have an empty repo, fetch everything
1457 # if we have an empty repo, fetch everything
1458 if self.changelog.tip() == nullid:
1458 if self.changelog.tip() == nullid:
1459 self.ui.status("requesting all changes\n")
1459 self.ui.status("requesting all changes\n")
1460 fetch = [nullid]
1460 fetch = [nullid]
1461 else:
1461 else:
1462 fetch = self.findincoming(remote)
1462 fetch = self.findincoming(remote)
1463
1463
1464 if not fetch:
1464 if not fetch:
1465 self.ui.status("no changes found\n")
1465 self.ui.status("no changes found\n")
1466 return 1
1466 return 1
1467
1467
1468 cg = remote.changegroup(fetch)
1468 cg = remote.changegroup(fetch)
1469 return self.addchangegroup(cg)
1469 return self.addchangegroup(cg)
1470
1470
1471 def push(self, remote, force=False):
1471 def push(self, remote, force=False):
1472 lock = remote.lock()
1472 lock = remote.lock()
1473
1473
1474 base = {}
1474 base = {}
1475 heads = remote.heads()
1475 heads = remote.heads()
1476 inc = self.findincoming(remote, base, heads)
1476 inc = self.findincoming(remote, base, heads)
1477 if not force and inc:
1477 if not force and inc:
1478 self.ui.warn("abort: unsynced remote changes!\n")
1478 self.ui.warn("abort: unsynced remote changes!\n")
1479 self.ui.status("(did you forget to sync? use push -f to force)\n")
1479 self.ui.status("(did you forget to sync? use push -f to force)\n")
1480 return 1
1480 return 1
1481
1481
1482 update = self.findoutgoing(remote, base)
1482 update = self.findoutgoing(remote, base)
1483 if not update:
1483 if not update:
1484 self.ui.status("no changes found\n")
1484 self.ui.status("no changes found\n")
1485 return 1
1485 return 1
1486 elif not force:
1486 elif not force:
1487 if len(heads) < len(self.changelog.heads()):
1487 if len(heads) < len(self.changelog.heads()):
1488 self.ui.warn("abort: push creates new remote branches!\n")
1488 self.ui.warn("abort: push creates new remote branches!\n")
1489 self.ui.status("(did you forget to merge?" +
1489 self.ui.status("(did you forget to merge?" +
1490 " use push -f to force)\n")
1490 " use push -f to force)\n")
1491 return 1
1491 return 1
1492
1492
1493 cg = self.changegroup(update)
1493 cg = self.changegroup(update)
1494 return remote.addchangegroup(cg)
1494 return remote.addchangegroup(cg)
1495
1495
1496 def changegroup(self, basenodes):
1496 def changegroup(self, basenodes):
1497 class genread:
1497 class genread:
1498 def __init__(self, generator):
1498 def __init__(self, generator):
1499 self.g = generator
1499 self.g = generator
1500 self.buf = ""
1500 self.buf = ""
1501 def fillbuf(self):
1501 def fillbuf(self):
1502 self.buf += "".join(self.g)
1502 self.buf += "".join(self.g)
1503
1503
1504 def read(self, l):
1504 def read(self, l):
1505 while l > len(self.buf):
1505 while l > len(self.buf):
1506 try:
1506 try:
1507 self.buf += self.g.next()
1507 self.buf += self.g.next()
1508 except StopIteration:
1508 except StopIteration:
1509 break
1509 break
1510 d, self.buf = self.buf[:l], self.buf[l:]
1510 d, self.buf = self.buf[:l], self.buf[l:]
1511 return d
1511 return d
1512
1512
1513 def gengroup():
1513 def gengroup():
1514 nodes = self.newer(basenodes)
1514 nodes = self.newer(basenodes)
1515
1515
1516 # construct the link map
1516 # construct the link map
1517 linkmap = {}
1517 linkmap = {}
1518 for n in nodes:
1518 for n in nodes:
1519 linkmap[self.changelog.rev(n)] = n
1519 linkmap[self.changelog.rev(n)] = n
1520
1520
1521 # construct a list of all changed files
1521 # construct a list of all changed files
1522 changed = {}
1522 changed = {}
1523 for n in nodes:
1523 for n in nodes:
1524 c = self.changelog.read(n)
1524 c = self.changelog.read(n)
1525 for f in c[3]:
1525 for f in c[3]:
1526 changed[f] = 1
1526 changed[f] = 1
1527 changed = changed.keys()
1527 changed = changed.keys()
1528 changed.sort()
1528 changed.sort()
1529
1529
1530 # the changegroup is changesets + manifests + all file revs
1530 # the changegroup is changesets + manifests + all file revs
1531 revs = [ self.changelog.rev(n) for n in nodes ]
1531 revs = [ self.changelog.rev(n) for n in nodes ]
1532
1532
1533 for y in self.changelog.group(linkmap): yield y
1533 for y in self.changelog.group(linkmap): yield y
1534 for y in self.manifest.group(linkmap): yield y
1534 for y in self.manifest.group(linkmap): yield y
1535 for f in changed:
1535 for f in changed:
1536 yield struct.pack(">l", len(f) + 4) + f
1536 yield struct.pack(">l", len(f) + 4) + f
1537 g = self.file(f).group(linkmap)
1537 g = self.file(f).group(linkmap)
1538 for y in g:
1538 for y in g:
1539 yield y
1539 yield y
1540
1540
1541 yield struct.pack(">l", 0)
1541 yield struct.pack(">l", 0)
1542
1542
1543 return genread(gengroup())
1543 return genread(gengroup())
1544
1544
1545 def addchangegroup(self, source):
1545 def addchangegroup(self, source):
1546
1546
1547 def getchunk():
1547 def getchunk():
1548 d = source.read(4)
1548 d = source.read(4)
1549 if not d: return ""
1549 if not d: return ""
1550 l = struct.unpack(">l", d)[0]
1550 l = struct.unpack(">l", d)[0]
1551 if l <= 4: return ""
1551 if l <= 4: return ""
1552 return source.read(l - 4)
1552 return source.read(l - 4)
1553
1553
1554 def getgroup():
1554 def getgroup():
1555 while 1:
1555 while 1:
1556 c = getchunk()
1556 c = getchunk()
1557 if not c: break
1557 if not c: break
1558 yield c
1558 yield c
1559
1559
1560 def csmap(x):
1560 def csmap(x):
1561 self.ui.debug("add changeset %s\n" % short(x))
1561 self.ui.debug("add changeset %s\n" % short(x))
1562 return self.changelog.count()
1562 return self.changelog.count()
1563
1563
1564 def revmap(x):
1564 def revmap(x):
1565 return self.changelog.rev(x)
1565 return self.changelog.rev(x)
1566
1566
1567 if not source: return
1567 if not source: return
1568 changesets = files = revisions = 0
1568 changesets = files = revisions = 0
1569
1569
1570 tr = self.transaction()
1570 tr = self.transaction()
1571
1571
1572 oldheads = len(self.changelog.heads())
1573
1572 # pull off the changeset group
1574 # pull off the changeset group
1573 self.ui.status("adding changesets\n")
1575 self.ui.status("adding changesets\n")
1574 co = self.changelog.tip()
1576 co = self.changelog.tip()
1575 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1577 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1576 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1578 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1577
1579
1578 # pull off the manifest group
1580 # pull off the manifest group
1579 self.ui.status("adding manifests\n")
1581 self.ui.status("adding manifests\n")
1580 mm = self.manifest.tip()
1582 mm = self.manifest.tip()
1581 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1583 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1582
1584
1583 # process the files
1585 # process the files
1584 self.ui.status("adding file changes\n")
1586 self.ui.status("adding file changes\n")
1585 while 1:
1587 while 1:
1586 f = getchunk()
1588 f = getchunk()
1587 if not f: break
1589 if not f: break
1588 self.ui.debug("adding %s revisions\n" % f)
1590 self.ui.debug("adding %s revisions\n" % f)
1589 fl = self.file(f)
1591 fl = self.file(f)
1590 o = fl.count()
1592 o = fl.count()
1591 n = fl.addgroup(getgroup(), revmap, tr)
1593 n = fl.addgroup(getgroup(), revmap, tr)
1592 revisions += fl.count() - o
1594 revisions += fl.count() - o
1593 files += 1
1595 files += 1
1594
1596
1597 newheads = len(self.changelog.heads())
1598 heads = ""
1599 if oldheads and newheads > oldheads:
1600 heads = " (+%d heads)" % (newheads - oldheads)
1601
1595 self.ui.status(("added %d changesets" +
1602 self.ui.status(("added %d changesets" +
1596 " with %d changes to %d files\n")
1603 " with %d changes to %d files%s\n")
1597 % (changesets, revisions, files))
1604 % (changesets, revisions, files, heads))
1598
1605
1599 tr.close()
1606 tr.close()
1600
1607
1601 if not self.hook("changegroup"):
1608 if not self.hook("changegroup"):
1602 return 1
1609 return 1
1603
1610
1604 return
1611 return
1605
1612
1606 def update(self, node, allow=False, force=False, choose=None,
1613 def update(self, node, allow=False, force=False, choose=None,
1607 moddirstate=True):
1614 moddirstate=True):
1608 pl = self.dirstate.parents()
1615 pl = self.dirstate.parents()
1609 if not force and pl[1] != nullid:
1616 if not force and pl[1] != nullid:
1610 self.ui.warn("aborting: outstanding uncommitted merges\n")
1617 self.ui.warn("aborting: outstanding uncommitted merges\n")
1611 return 1
1618 return 1
1612
1619
1613 p1, p2 = pl[0], node
1620 p1, p2 = pl[0], node
1614 pa = self.changelog.ancestor(p1, p2)
1621 pa = self.changelog.ancestor(p1, p2)
1615 m1n = self.changelog.read(p1)[0]
1622 m1n = self.changelog.read(p1)[0]
1616 m2n = self.changelog.read(p2)[0]
1623 m2n = self.changelog.read(p2)[0]
1617 man = self.manifest.ancestor(m1n, m2n)
1624 man = self.manifest.ancestor(m1n, m2n)
1618 m1 = self.manifest.read(m1n)
1625 m1 = self.manifest.read(m1n)
1619 mf1 = self.manifest.readflags(m1n)
1626 mf1 = self.manifest.readflags(m1n)
1620 m2 = self.manifest.read(m2n)
1627 m2 = self.manifest.read(m2n)
1621 mf2 = self.manifest.readflags(m2n)
1628 mf2 = self.manifest.readflags(m2n)
1622 ma = self.manifest.read(man)
1629 ma = self.manifest.read(man)
1623 mfa = self.manifest.readflags(man)
1630 mfa = self.manifest.readflags(man)
1624
1631
1625 (c, a, d, u) = self.changes()
1632 (c, a, d, u) = self.changes()
1626
1633
1627 # is this a jump, or a merge? i.e. is there a linear path
1634 # is this a jump, or a merge? i.e. is there a linear path
1628 # from p1 to p2?
1635 # from p1 to p2?
1629 linear_path = (pa == p1 or pa == p2)
1636 linear_path = (pa == p1 or pa == p2)
1630
1637
1631 # resolve the manifest to determine which files
1638 # resolve the manifest to determine which files
1632 # we care about merging
1639 # we care about merging
1633 self.ui.note("resolving manifests\n")
1640 self.ui.note("resolving manifests\n")
1634 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1641 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1635 (force, allow, moddirstate, linear_path))
1642 (force, allow, moddirstate, linear_path))
1636 self.ui.debug(" ancestor %s local %s remote %s\n" %
1643 self.ui.debug(" ancestor %s local %s remote %s\n" %
1637 (short(man), short(m1n), short(m2n)))
1644 (short(man), short(m1n), short(m2n)))
1638
1645
1639 merge = {}
1646 merge = {}
1640 get = {}
1647 get = {}
1641 remove = []
1648 remove = []
1642
1649
1643 # construct a working dir manifest
1650 # construct a working dir manifest
1644 mw = m1.copy()
1651 mw = m1.copy()
1645 mfw = mf1.copy()
1652 mfw = mf1.copy()
1646 umap = dict.fromkeys(u)
1653 umap = dict.fromkeys(u)
1647
1654
1648 for f in a + c + u:
1655 for f in a + c + u:
1649 mw[f] = ""
1656 mw[f] = ""
1650 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1657 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1651
1658
1652 for f in d:
1659 for f in d:
1653 if f in mw: del mw[f]
1660 if f in mw: del mw[f]
1654
1661
1655 # If we're jumping between revisions (as opposed to merging),
1662 # If we're jumping between revisions (as opposed to merging),
1656 # and if neither the working directory nor the target rev has
1663 # and if neither the working directory nor the target rev has
1657 # the file, then we need to remove it from the dirstate, to
1664 # the file, then we need to remove it from the dirstate, to
1658 # prevent the dirstate from listing the file when it is no
1665 # prevent the dirstate from listing the file when it is no
1659 # longer in the manifest.
1666 # longer in the manifest.
1660 if moddirstate and linear_path and f not in m2:
1667 if moddirstate and linear_path and f not in m2:
1661 self.dirstate.forget((f,))
1668 self.dirstate.forget((f,))
1662
1669
1663 # Compare manifests
1670 # Compare manifests
1664 for f, n in mw.iteritems():
1671 for f, n in mw.iteritems():
1665 if choose and not choose(f): continue
1672 if choose and not choose(f): continue
1666 if f in m2:
1673 if f in m2:
1667 s = 0
1674 s = 0
1668
1675
1669 # is the wfile new since m1, and match m2?
1676 # is the wfile new since m1, and match m2?
1670 if f not in m1:
1677 if f not in m1:
1671 t1 = self.wread(f)
1678 t1 = self.wread(f)
1672 t2 = self.file(f).read(m2[f])
1679 t2 = self.file(f).read(m2[f])
1673 if cmp(t1, t2) == 0:
1680 if cmp(t1, t2) == 0:
1674 n = m2[f]
1681 n = m2[f]
1675 del t1, t2
1682 del t1, t2
1676
1683
1677 # are files different?
1684 # are files different?
1678 if n != m2[f]:
1685 if n != m2[f]:
1679 a = ma.get(f, nullid)
1686 a = ma.get(f, nullid)
1680 # are both different from the ancestor?
1687 # are both different from the ancestor?
1681 if n != a and m2[f] != a:
1688 if n != a and m2[f] != a:
1682 self.ui.debug(" %s versions differ, resolve\n" % f)
1689 self.ui.debug(" %s versions differ, resolve\n" % f)
1683 # merge executable bits
1690 # merge executable bits
1684 # "if we changed or they changed, change in merge"
1691 # "if we changed or they changed, change in merge"
1685 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1692 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1686 mode = ((a^b) | (a^c)) ^ a
1693 mode = ((a^b) | (a^c)) ^ a
1687 merge[f] = (m1.get(f, nullid), m2[f], mode)
1694 merge[f] = (m1.get(f, nullid), m2[f], mode)
1688 s = 1
1695 s = 1
1689 # are we clobbering?
1696 # are we clobbering?
1690 # is remote's version newer?
1697 # is remote's version newer?
1691 # or are we going back in time?
1698 # or are we going back in time?
1692 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1699 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1693 self.ui.debug(" remote %s is newer, get\n" % f)
1700 self.ui.debug(" remote %s is newer, get\n" % f)
1694 get[f] = m2[f]
1701 get[f] = m2[f]
1695 s = 1
1702 s = 1
1696 elif f in umap:
1703 elif f in umap:
1697 # this unknown file is the same as the checkout
1704 # this unknown file is the same as the checkout
1698 get[f] = m2[f]
1705 get[f] = m2[f]
1699
1706
1700 if not s and mfw[f] != mf2[f]:
1707 if not s and mfw[f] != mf2[f]:
1701 if force:
1708 if force:
1702 self.ui.debug(" updating permissions for %s\n" % f)
1709 self.ui.debug(" updating permissions for %s\n" % f)
1703 util.set_exec(self.wjoin(f), mf2[f])
1710 util.set_exec(self.wjoin(f), mf2[f])
1704 else:
1711 else:
1705 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1712 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1706 mode = ((a^b) | (a^c)) ^ a
1713 mode = ((a^b) | (a^c)) ^ a
1707 if mode != b:
1714 if mode != b:
1708 self.ui.debug(" updating permissions for %s\n" % f)
1715 self.ui.debug(" updating permissions for %s\n" % f)
1709 util.set_exec(self.wjoin(f), mode)
1716 util.set_exec(self.wjoin(f), mode)
1710 del m2[f]
1717 del m2[f]
1711 elif f in ma:
1718 elif f in ma:
1712 if n != ma[f]:
1719 if n != ma[f]:
1713 r = "d"
1720 r = "d"
1714 if not force and (linear_path or allow):
1721 if not force and (linear_path or allow):
1715 r = self.ui.prompt(
1722 r = self.ui.prompt(
1716 (" local changed %s which remote deleted\n" % f) +
1723 (" local changed %s which remote deleted\n" % f) +
1717 "(k)eep or (d)elete?", "[kd]", "k")
1724 "(k)eep or (d)elete?", "[kd]", "k")
1718 if r == "d":
1725 if r == "d":
1719 remove.append(f)
1726 remove.append(f)
1720 else:
1727 else:
1721 self.ui.debug("other deleted %s\n" % f)
1728 self.ui.debug("other deleted %s\n" % f)
1722 remove.append(f) # other deleted it
1729 remove.append(f) # other deleted it
1723 else:
1730 else:
1724 if n == m1.get(f, nullid): # same as parent
1731 if n == m1.get(f, nullid): # same as parent
1725 if p2 == pa: # going backwards?
1732 if p2 == pa: # going backwards?
1726 self.ui.debug("remote deleted %s\n" % f)
1733 self.ui.debug("remote deleted %s\n" % f)
1727 remove.append(f)
1734 remove.append(f)
1728 else:
1735 else:
1729 self.ui.debug("local created %s, keeping\n" % f)
1736 self.ui.debug("local created %s, keeping\n" % f)
1730 else:
1737 else:
1731 self.ui.debug("working dir created %s, keeping\n" % f)
1738 self.ui.debug("working dir created %s, keeping\n" % f)
1732
1739
1733 for f, n in m2.iteritems():
1740 for f, n in m2.iteritems():
1734 if choose and not choose(f): continue
1741 if choose and not choose(f): continue
1735 if f[0] == "/": continue
1742 if f[0] == "/": continue
1736 if f in ma and n != ma[f]:
1743 if f in ma and n != ma[f]:
1737 r = "k"
1744 r = "k"
1738 if not force and (linear_path or allow):
1745 if not force and (linear_path or allow):
1739 r = self.ui.prompt(
1746 r = self.ui.prompt(
1740 ("remote changed %s which local deleted\n" % f) +
1747 ("remote changed %s which local deleted\n" % f) +
1741 "(k)eep or (d)elete?", "[kd]", "k")
1748 "(k)eep or (d)elete?", "[kd]", "k")
1742 if r == "k": get[f] = n
1749 if r == "k": get[f] = n
1743 elif f not in ma:
1750 elif f not in ma:
1744 self.ui.debug("remote created %s\n" % f)
1751 self.ui.debug("remote created %s\n" % f)
1745 get[f] = n
1752 get[f] = n
1746 else:
1753 else:
1747 if force or p2 == pa: # going backwards?
1754 if force or p2 == pa: # going backwards?
1748 self.ui.debug("local deleted %s, recreating\n" % f)
1755 self.ui.debug("local deleted %s, recreating\n" % f)
1749 get[f] = n
1756 get[f] = n
1750 else:
1757 else:
1751 self.ui.debug("local deleted %s\n" % f)
1758 self.ui.debug("local deleted %s\n" % f)
1752
1759
1753 del mw, m1, m2, ma
1760 del mw, m1, m2, ma
1754
1761
1755 if force:
1762 if force:
1756 for f in merge:
1763 for f in merge:
1757 get[f] = merge[f][1]
1764 get[f] = merge[f][1]
1758 merge = {}
1765 merge = {}
1759
1766
1760 if linear_path or force:
1767 if linear_path or force:
1761 # we don't need to do any magic, just jump to the new rev
1768 # we don't need to do any magic, just jump to the new rev
1762 branch_merge = False
1769 branch_merge = False
1763 p1, p2 = p2, nullid
1770 p1, p2 = p2, nullid
1764 else:
1771 else:
1765 if not allow:
1772 if not allow:
1766 self.ui.status("this update spans a branch" +
1773 self.ui.status("this update spans a branch" +
1767 " affecting the following files:\n")
1774 " affecting the following files:\n")
1768 fl = merge.keys() + get.keys()
1775 fl = merge.keys() + get.keys()
1769 fl.sort()
1776 fl.sort()
1770 for f in fl:
1777 for f in fl:
1771 cf = ""
1778 cf = ""
1772 if f in merge: cf = " (resolve)"
1779 if f in merge: cf = " (resolve)"
1773 self.ui.status(" %s%s\n" % (f, cf))
1780 self.ui.status(" %s%s\n" % (f, cf))
1774 self.ui.warn("aborting update spanning branches!\n")
1781 self.ui.warn("aborting update spanning branches!\n")
1775 self.ui.status("(use update -m to merge across branches" +
1782 self.ui.status("(use update -m to merge across branches" +
1776 " or -C to lose changes)\n")
1783 " or -C to lose changes)\n")
1777 return 1
1784 return 1
1778 branch_merge = True
1785 branch_merge = True
1779
1786
1780 if moddirstate:
1787 if moddirstate:
1781 self.dirstate.setparents(p1, p2)
1788 self.dirstate.setparents(p1, p2)
1782
1789
1783 # get the files we don't need to change
1790 # get the files we don't need to change
1784 files = get.keys()
1791 files = get.keys()
1785 files.sort()
1792 files.sort()
1786 for f in files:
1793 for f in files:
1787 if f[0] == "/": continue
1794 if f[0] == "/": continue
1788 self.ui.note("getting %s\n" % f)
1795 self.ui.note("getting %s\n" % f)
1789 t = self.file(f).read(get[f])
1796 t = self.file(f).read(get[f])
1790 try:
1797 try:
1791 self.wwrite(f, t)
1798 self.wwrite(f, t)
1792 except IOError:
1799 except IOError:
1793 os.makedirs(os.path.dirname(self.wjoin(f)))
1800 os.makedirs(os.path.dirname(self.wjoin(f)))
1794 self.wwrite(f, t)
1801 self.wwrite(f, t)
1795 util.set_exec(self.wjoin(f), mf2[f])
1802 util.set_exec(self.wjoin(f), mf2[f])
1796 if moddirstate:
1803 if moddirstate:
1797 if branch_merge:
1804 if branch_merge:
1798 self.dirstate.update([f], 'n', st_mtime=-1)
1805 self.dirstate.update([f], 'n', st_mtime=-1)
1799 else:
1806 else:
1800 self.dirstate.update([f], 'n')
1807 self.dirstate.update([f], 'n')
1801
1808
1802 # merge the tricky bits
1809 # merge the tricky bits
1803 files = merge.keys()
1810 files = merge.keys()
1804 files.sort()
1811 files.sort()
1805 for f in files:
1812 for f in files:
1806 self.ui.status("merging %s\n" % f)
1813 self.ui.status("merging %s\n" % f)
1807 my, other, flag = merge[f]
1814 my, other, flag = merge[f]
1808 self.merge3(f, my, other)
1815 self.merge3(f, my, other)
1809 util.set_exec(self.wjoin(f), flag)
1816 util.set_exec(self.wjoin(f), flag)
1810 if moddirstate:
1817 if moddirstate:
1811 if branch_merge:
1818 if branch_merge:
1812 # We've done a branch merge, mark this file as merged
1819 # We've done a branch merge, mark this file as merged
1813 # so that we properly record the merger later
1820 # so that we properly record the merger later
1814 self.dirstate.update([f], 'm')
1821 self.dirstate.update([f], 'm')
1815 else:
1822 else:
1816 # We've update-merged a locally modified file, so
1823 # We've update-merged a locally modified file, so
1817 # we set the dirstate to emulate a normal checkout
1824 # we set the dirstate to emulate a normal checkout
1818 # of that file some time in the past. Thus our
1825 # of that file some time in the past. Thus our
1819 # merge will appear as a normal local file
1826 # merge will appear as a normal local file
1820 # modification.
1827 # modification.
1821 f_len = len(self.file(f).read(other))
1828 f_len = len(self.file(f).read(other))
1822 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1829 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1823
1830
1824 remove.sort()
1831 remove.sort()
1825 for f in remove:
1832 for f in remove:
1826 self.ui.note("removing %s\n" % f)
1833 self.ui.note("removing %s\n" % f)
1827 try:
1834 try:
1828 os.unlink(self.wjoin(f))
1835 os.unlink(self.wjoin(f))
1829 except OSError, inst:
1836 except OSError, inst:
1830 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1837 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1831 # try removing directories that might now be empty
1838 # try removing directories that might now be empty
1832 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1839 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1833 except: pass
1840 except: pass
1834 if moddirstate:
1841 if moddirstate:
1835 if branch_merge:
1842 if branch_merge:
1836 self.dirstate.update(remove, 'r')
1843 self.dirstate.update(remove, 'r')
1837 else:
1844 else:
1838 self.dirstate.forget(remove)
1845 self.dirstate.forget(remove)
1839
1846
1840 def merge3(self, fn, my, other):
1847 def merge3(self, fn, my, other):
1841 """perform a 3-way merge in the working directory"""
1848 """perform a 3-way merge in the working directory"""
1842
1849
1843 def temp(prefix, node):
1850 def temp(prefix, node):
1844 pre = "%s~%s." % (os.path.basename(fn), prefix)
1851 pre = "%s~%s." % (os.path.basename(fn), prefix)
1845 (fd, name) = tempfile.mkstemp("", pre)
1852 (fd, name) = tempfile.mkstemp("", pre)
1846 f = os.fdopen(fd, "wb")
1853 f = os.fdopen(fd, "wb")
1847 self.wwrite(fn, fl.read(node), f)
1854 self.wwrite(fn, fl.read(node), f)
1848 f.close()
1855 f.close()
1849 return name
1856 return name
1850
1857
1851 fl = self.file(fn)
1858 fl = self.file(fn)
1852 base = fl.ancestor(my, other)
1859 base = fl.ancestor(my, other)
1853 a = self.wjoin(fn)
1860 a = self.wjoin(fn)
1854 b = temp("base", base)
1861 b = temp("base", base)
1855 c = temp("other", other)
1862 c = temp("other", other)
1856
1863
1857 self.ui.note("resolving %s\n" % fn)
1864 self.ui.note("resolving %s\n" % fn)
1858 self.ui.debug("file %s: other %s ancestor %s\n" %
1865 self.ui.debug("file %s: other %s ancestor %s\n" %
1859 (fn, short(other), short(base)))
1866 (fn, short(other), short(base)))
1860
1867
1861 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1868 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1862 or "hgmerge")
1869 or "hgmerge")
1863 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1870 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1864 if r:
1871 if r:
1865 self.ui.warn("merging %s failed!\n" % fn)
1872 self.ui.warn("merging %s failed!\n" % fn)
1866
1873
1867 os.unlink(b)
1874 os.unlink(b)
1868 os.unlink(c)
1875 os.unlink(c)
1869
1876
1870 def verify(self):
1877 def verify(self):
1871 filelinkrevs = {}
1878 filelinkrevs = {}
1872 filenodes = {}
1879 filenodes = {}
1873 changesets = revisions = files = 0
1880 changesets = revisions = files = 0
1874 errors = 0
1881 errors = 0
1875
1882
1876 seen = {}
1883 seen = {}
1877 self.ui.status("checking changesets\n")
1884 self.ui.status("checking changesets\n")
1878 for i in range(self.changelog.count()):
1885 for i in range(self.changelog.count()):
1879 changesets += 1
1886 changesets += 1
1880 n = self.changelog.node(i)
1887 n = self.changelog.node(i)
1881 if n in seen:
1888 if n in seen:
1882 self.ui.warn("duplicate changeset at revision %d\n" % i)
1889 self.ui.warn("duplicate changeset at revision %d\n" % i)
1883 errors += 1
1890 errors += 1
1884 seen[n] = 1
1891 seen[n] = 1
1885
1892
1886 for p in self.changelog.parents(n):
1893 for p in self.changelog.parents(n):
1887 if p not in self.changelog.nodemap:
1894 if p not in self.changelog.nodemap:
1888 self.ui.warn("changeset %s has unknown parent %s\n" %
1895 self.ui.warn("changeset %s has unknown parent %s\n" %
1889 (short(n), short(p)))
1896 (short(n), short(p)))
1890 errors += 1
1897 errors += 1
1891 try:
1898 try:
1892 changes = self.changelog.read(n)
1899 changes = self.changelog.read(n)
1893 except Exception, inst:
1900 except Exception, inst:
1894 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1901 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1895 errors += 1
1902 errors += 1
1896
1903
1897 for f in changes[3]:
1904 for f in changes[3]:
1898 filelinkrevs.setdefault(f, []).append(i)
1905 filelinkrevs.setdefault(f, []).append(i)
1899
1906
1900 seen = {}
1907 seen = {}
1901 self.ui.status("checking manifests\n")
1908 self.ui.status("checking manifests\n")
1902 for i in range(self.manifest.count()):
1909 for i in range(self.manifest.count()):
1903 n = self.manifest.node(i)
1910 n = self.manifest.node(i)
1904 if n in seen:
1911 if n in seen:
1905 self.ui.warn("duplicate manifest at revision %d\n" % i)
1912 self.ui.warn("duplicate manifest at revision %d\n" % i)
1906 errors += 1
1913 errors += 1
1907 seen[n] = 1
1914 seen[n] = 1
1908
1915
1909 for p in self.manifest.parents(n):
1916 for p in self.manifest.parents(n):
1910 if p not in self.manifest.nodemap:
1917 if p not in self.manifest.nodemap:
1911 self.ui.warn("manifest %s has unknown parent %s\n" %
1918 self.ui.warn("manifest %s has unknown parent %s\n" %
1912 (short(n), short(p)))
1919 (short(n), short(p)))
1913 errors += 1
1920 errors += 1
1914
1921
1915 try:
1922 try:
1916 delta = mdiff.patchtext(self.manifest.delta(n))
1923 delta = mdiff.patchtext(self.manifest.delta(n))
1917 except KeyboardInterrupt:
1924 except KeyboardInterrupt:
1918 self.ui.warn("aborted")
1925 self.ui.warn("aborted")
1919 sys.exit(0)
1926 sys.exit(0)
1920 except Exception, inst:
1927 except Exception, inst:
1921 self.ui.warn("unpacking manifest %s: %s\n"
1928 self.ui.warn("unpacking manifest %s: %s\n"
1922 % (short(n), inst))
1929 % (short(n), inst))
1923 errors += 1
1930 errors += 1
1924
1931
1925 ff = [ l.split('\0') for l in delta.splitlines() ]
1932 ff = [ l.split('\0') for l in delta.splitlines() ]
1926 for f, fn in ff:
1933 for f, fn in ff:
1927 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1934 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1928
1935
1929 self.ui.status("crosschecking files in changesets and manifests\n")
1936 self.ui.status("crosschecking files in changesets and manifests\n")
1930 for f in filenodes:
1937 for f in filenodes:
1931 if f not in filelinkrevs:
1938 if f not in filelinkrevs:
1932 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1939 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1933 errors += 1
1940 errors += 1
1934
1941
1935 for f in filelinkrevs:
1942 for f in filelinkrevs:
1936 if f not in filenodes:
1943 if f not in filenodes:
1937 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1944 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1938 errors += 1
1945 errors += 1
1939
1946
1940 self.ui.status("checking files\n")
1947 self.ui.status("checking files\n")
1941 ff = filenodes.keys()
1948 ff = filenodes.keys()
1942 ff.sort()
1949 ff.sort()
1943 for f in ff:
1950 for f in ff:
1944 if f == "/dev/null": continue
1951 if f == "/dev/null": continue
1945 files += 1
1952 files += 1
1946 fl = self.file(f)
1953 fl = self.file(f)
1947 nodes = { nullid: 1 }
1954 nodes = { nullid: 1 }
1948 seen = {}
1955 seen = {}
1949 for i in range(fl.count()):
1956 for i in range(fl.count()):
1950 revisions += 1
1957 revisions += 1
1951 n = fl.node(i)
1958 n = fl.node(i)
1952
1959
1953 if n in seen:
1960 if n in seen:
1954 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1961 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1955 errors += 1
1962 errors += 1
1956
1963
1957 if n not in filenodes[f]:
1964 if n not in filenodes[f]:
1958 self.ui.warn("%s: %d:%s not in manifests\n"
1965 self.ui.warn("%s: %d:%s not in manifests\n"
1959 % (f, i, short(n)))
1966 % (f, i, short(n)))
1960 errors += 1
1967 errors += 1
1961 else:
1968 else:
1962 del filenodes[f][n]
1969 del filenodes[f][n]
1963
1970
1964 flr = fl.linkrev(n)
1971 flr = fl.linkrev(n)
1965 if flr not in filelinkrevs[f]:
1972 if flr not in filelinkrevs[f]:
1966 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1973 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1967 % (f, short(n), fl.linkrev(n)))
1974 % (f, short(n), fl.linkrev(n)))
1968 errors += 1
1975 errors += 1
1969 else:
1976 else:
1970 filelinkrevs[f].remove(flr)
1977 filelinkrevs[f].remove(flr)
1971
1978
1972 # verify contents
1979 # verify contents
1973 try:
1980 try:
1974 t = fl.read(n)
1981 t = fl.read(n)
1975 except Exception, inst:
1982 except Exception, inst:
1976 self.ui.warn("unpacking file %s %s: %s\n"
1983 self.ui.warn("unpacking file %s %s: %s\n"
1977 % (f, short(n), inst))
1984 % (f, short(n), inst))
1978 errors += 1
1985 errors += 1
1979
1986
1980 # verify parents
1987 # verify parents
1981 (p1, p2) = fl.parents(n)
1988 (p1, p2) = fl.parents(n)
1982 if p1 not in nodes:
1989 if p1 not in nodes:
1983 self.ui.warn("file %s:%s unknown parent 1 %s" %
1990 self.ui.warn("file %s:%s unknown parent 1 %s" %
1984 (f, short(n), short(p1)))
1991 (f, short(n), short(p1)))
1985 errors += 1
1992 errors += 1
1986 if p2 not in nodes:
1993 if p2 not in nodes:
1987 self.ui.warn("file %s:%s unknown parent 2 %s" %
1994 self.ui.warn("file %s:%s unknown parent 2 %s" %
1988 (f, short(n), short(p1)))
1995 (f, short(n), short(p1)))
1989 errors += 1
1996 errors += 1
1990 nodes[n] = 1
1997 nodes[n] = 1
1991
1998
1992 # cross-check
1999 # cross-check
1993 for node in filenodes[f]:
2000 for node in filenodes[f]:
1994 self.ui.warn("node %s in manifests not in %s\n"
2001 self.ui.warn("node %s in manifests not in %s\n"
1995 % (hex(node), f))
2002 % (hex(node), f))
1996 errors += 1
2003 errors += 1
1997
2004
1998 self.ui.status("%d files, %d changesets, %d total revisions\n" %
2005 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1999 (files, changesets, revisions))
2006 (files, changesets, revisions))
2000
2007
2001 if errors:
2008 if errors:
2002 self.ui.warn("%d integrity errors encountered!\n" % errors)
2009 self.ui.warn("%d integrity errors encountered!\n" % errors)
2003 return 1
2010 return 1
2004
2011
2005 class remoterepository:
2012 class remoterepository:
2006 def local(self):
2013 def local(self):
2007 return False
2014 return False
2008
2015
2009 class httprepository(remoterepository):
2016 class httprepository(remoterepository):
2010 def __init__(self, ui, path):
2017 def __init__(self, ui, path):
2011 # fix missing / after hostname
2018 # fix missing / after hostname
2012 s = urlparse.urlsplit(path)
2019 s = urlparse.urlsplit(path)
2013 partial = s[2]
2020 partial = s[2]
2014 if not partial: partial = "/"
2021 if not partial: partial = "/"
2015 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2022 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2016 self.ui = ui
2023 self.ui = ui
2017 no_list = [ "localhost", "127.0.0.1" ]
2024 no_list = [ "localhost", "127.0.0.1" ]
2018 host = ui.config("http_proxy", "host")
2025 host = ui.config("http_proxy", "host")
2019 if host is None:
2026 if host is None:
2020 host = os.environ.get("http_proxy")
2027 host = os.environ.get("http_proxy")
2021 if host and host.startswith('http://'):
2028 if host and host.startswith('http://'):
2022 host = host[7:]
2029 host = host[7:]
2023 user = ui.config("http_proxy", "user")
2030 user = ui.config("http_proxy", "user")
2024 passwd = ui.config("http_proxy", "passwd")
2031 passwd = ui.config("http_proxy", "passwd")
2025 no = ui.config("http_proxy", "no")
2032 no = ui.config("http_proxy", "no")
2026 if no is None:
2033 if no is None:
2027 no = os.environ.get("no_proxy")
2034 no = os.environ.get("no_proxy")
2028 if no:
2035 if no:
2029 no_list = no_list + no.split(",")
2036 no_list = no_list + no.split(",")
2030
2037
2031 no_proxy = 0
2038 no_proxy = 0
2032 for h in no_list:
2039 for h in no_list:
2033 if (path.startswith("http://" + h + "/") or
2040 if (path.startswith("http://" + h + "/") or
2034 path.startswith("http://" + h + ":") or
2041 path.startswith("http://" + h + ":") or
2035 path == "http://" + h):
2042 path == "http://" + h):
2036 no_proxy = 1
2043 no_proxy = 1
2037
2044
2038 # Note: urllib2 takes proxy values from the environment and those will
2045 # Note: urllib2 takes proxy values from the environment and those will
2039 # take precedence
2046 # take precedence
2040 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2047 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2041 try:
2048 try:
2042 if os.environ.has_key(env):
2049 if os.environ.has_key(env):
2043 del os.environ[env]
2050 del os.environ[env]
2044 except OSError:
2051 except OSError:
2045 pass
2052 pass
2046
2053
2047 proxy_handler = urllib2.BaseHandler()
2054 proxy_handler = urllib2.BaseHandler()
2048 if host and not no_proxy:
2055 if host and not no_proxy:
2049 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2056 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2050
2057
2051 authinfo = None
2058 authinfo = None
2052 if user and passwd:
2059 if user and passwd:
2053 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2060 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2054 passmgr.add_password(None, host, user, passwd)
2061 passmgr.add_password(None, host, user, passwd)
2055 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2062 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2056
2063
2057 opener = urllib2.build_opener(proxy_handler, authinfo)
2064 opener = urllib2.build_opener(proxy_handler, authinfo)
2058 urllib2.install_opener(opener)
2065 urllib2.install_opener(opener)
2059
2066
2060 def dev(self):
2067 def dev(self):
2061 return -1
2068 return -1
2062
2069
2063 def do_cmd(self, cmd, **args):
2070 def do_cmd(self, cmd, **args):
2064 self.ui.debug("sending %s command\n" % cmd)
2071 self.ui.debug("sending %s command\n" % cmd)
2065 q = {"cmd": cmd}
2072 q = {"cmd": cmd}
2066 q.update(args)
2073 q.update(args)
2067 qs = urllib.urlencode(q)
2074 qs = urllib.urlencode(q)
2068 cu = "%s?%s" % (self.url, qs)
2075 cu = "%s?%s" % (self.url, qs)
2069 resp = urllib2.urlopen(cu)
2076 resp = urllib2.urlopen(cu)
2070 proto = resp.headers['content-type']
2077 proto = resp.headers['content-type']
2071
2078
2072 # accept old "text/plain" and "application/hg-changegroup" for now
2079 # accept old "text/plain" and "application/hg-changegroup" for now
2073 if not proto.startswith('application/mercurial') and \
2080 if not proto.startswith('application/mercurial') and \
2074 not proto.startswith('text/plain') and \
2081 not proto.startswith('text/plain') and \
2075 not proto.startswith('application/hg-changegroup'):
2082 not proto.startswith('application/hg-changegroup'):
2076 raise RepoError("'%s' does not appear to be an hg repository"
2083 raise RepoError("'%s' does not appear to be an hg repository"
2077 % self.url)
2084 % self.url)
2078
2085
2079 if proto.startswith('application/mercurial'):
2086 if proto.startswith('application/mercurial'):
2080 version = proto[22:]
2087 version = proto[22:]
2081 if float(version) > 0.1:
2088 if float(version) > 0.1:
2082 raise RepoError("'%s' uses newer protocol %s" %
2089 raise RepoError("'%s' uses newer protocol %s" %
2083 (self.url, version))
2090 (self.url, version))
2084
2091
2085 return resp
2092 return resp
2086
2093
2087 def heads(self):
2094 def heads(self):
2088 d = self.do_cmd("heads").read()
2095 d = self.do_cmd("heads").read()
2089 try:
2096 try:
2090 return map(bin, d[:-1].split(" "))
2097 return map(bin, d[:-1].split(" "))
2091 except:
2098 except:
2092 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2099 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2093 raise
2100 raise
2094
2101
2095 def branches(self, nodes):
2102 def branches(self, nodes):
2096 n = " ".join(map(hex, nodes))
2103 n = " ".join(map(hex, nodes))
2097 d = self.do_cmd("branches", nodes=n).read()
2104 d = self.do_cmd("branches", nodes=n).read()
2098 try:
2105 try:
2099 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2106 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2100 return br
2107 return br
2101 except:
2108 except:
2102 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2109 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2103 raise
2110 raise
2104
2111
2105 def between(self, pairs):
2112 def between(self, pairs):
2106 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2113 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2107 d = self.do_cmd("between", pairs=n).read()
2114 d = self.do_cmd("between", pairs=n).read()
2108 try:
2115 try:
2109 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2116 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2110 return p
2117 return p
2111 except:
2118 except:
2112 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2119 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2113 raise
2120 raise
2114
2121
2115 def changegroup(self, nodes):
2122 def changegroup(self, nodes):
2116 n = " ".join(map(hex, nodes))
2123 n = " ".join(map(hex, nodes))
2117 f = self.do_cmd("changegroup", roots=n)
2124 f = self.do_cmd("changegroup", roots=n)
2118 bytes = 0
2125 bytes = 0
2119
2126
2120 class zread:
2127 class zread:
2121 def __init__(self, f):
2128 def __init__(self, f):
2122 self.zd = zlib.decompressobj()
2129 self.zd = zlib.decompressobj()
2123 self.f = f
2130 self.f = f
2124 self.buf = ""
2131 self.buf = ""
2125 def read(self, l):
2132 def read(self, l):
2126 while l > len(self.buf):
2133 while l > len(self.buf):
2127 r = self.f.read(4096)
2134 r = self.f.read(4096)
2128 if r:
2135 if r:
2129 self.buf += self.zd.decompress(r)
2136 self.buf += self.zd.decompress(r)
2130 else:
2137 else:
2131 self.buf += self.zd.flush()
2138 self.buf += self.zd.flush()
2132 break
2139 break
2133 d, self.buf = self.buf[:l], self.buf[l:]
2140 d, self.buf = self.buf[:l], self.buf[l:]
2134 return d
2141 return d
2135
2142
2136 return zread(f)
2143 return zread(f)
2137
2144
2138 class remotelock:
2145 class remotelock:
2139 def __init__(self, repo):
2146 def __init__(self, repo):
2140 self.repo = repo
2147 self.repo = repo
2141 def release(self):
2148 def release(self):
2142 self.repo.unlock()
2149 self.repo.unlock()
2143 self.repo = None
2150 self.repo = None
2144 def __del__(self):
2151 def __del__(self):
2145 if self.repo:
2152 if self.repo:
2146 self.release()
2153 self.release()
2147
2154
2148 class sshrepository(remoterepository):
2155 class sshrepository(remoterepository):
2149 def __init__(self, ui, path):
2156 def __init__(self, ui, path):
2150 self.url = path
2157 self.url = path
2151 self.ui = ui
2158 self.ui = ui
2152
2159
2153 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2160 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2154 if not m:
2161 if not m:
2155 raise RepoError("couldn't parse destination %s" % path)
2162 raise RepoError("couldn't parse destination %s" % path)
2156
2163
2157 self.user = m.group(2)
2164 self.user = m.group(2)
2158 self.host = m.group(3)
2165 self.host = m.group(3)
2159 self.port = m.group(5)
2166 self.port = m.group(5)
2160 self.path = m.group(7)
2167 self.path = m.group(7)
2161
2168
2162 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2169 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2163 args = self.port and ("%s -p %s") % (args, self.port) or args
2170 args = self.port and ("%s -p %s") % (args, self.port) or args
2164 path = self.path or ""
2171 path = self.path or ""
2165
2172
2166 if not path:
2173 if not path:
2167 raise RepoError("no remote repository path specified")
2174 raise RepoError("no remote repository path specified")
2168
2175
2169 sshcmd = self.ui.config("ui", "ssh", "ssh")
2176 sshcmd = self.ui.config("ui", "ssh", "ssh")
2170 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2177 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2171 cmd = "%s %s '%s -R %s serve --stdio'"
2178 cmd = "%s %s '%s -R %s serve --stdio'"
2172 cmd = cmd % (sshcmd, args, remotecmd, path)
2179 cmd = cmd % (sshcmd, args, remotecmd, path)
2173
2180
2174 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2181 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2175
2182
2176 def readerr(self):
2183 def readerr(self):
2177 while 1:
2184 while 1:
2178 r,w,x = select.select([self.pipee], [], [], 0)
2185 r,w,x = select.select([self.pipee], [], [], 0)
2179 if not r: break
2186 if not r: break
2180 l = self.pipee.readline()
2187 l = self.pipee.readline()
2181 if not l: break
2188 if not l: break
2182 self.ui.status("remote: ", l)
2189 self.ui.status("remote: ", l)
2183
2190
2184 def __del__(self):
2191 def __del__(self):
2185 try:
2192 try:
2186 self.pipeo.close()
2193 self.pipeo.close()
2187 self.pipei.close()
2194 self.pipei.close()
2188 for l in self.pipee:
2195 for l in self.pipee:
2189 self.ui.status("remote: ", l)
2196 self.ui.status("remote: ", l)
2190 self.pipee.close()
2197 self.pipee.close()
2191 except:
2198 except:
2192 pass
2199 pass
2193
2200
2194 def dev(self):
2201 def dev(self):
2195 return -1
2202 return -1
2196
2203
2197 def do_cmd(self, cmd, **args):
2204 def do_cmd(self, cmd, **args):
2198 self.ui.debug("sending %s command\n" % cmd)
2205 self.ui.debug("sending %s command\n" % cmd)
2199 self.pipeo.write("%s\n" % cmd)
2206 self.pipeo.write("%s\n" % cmd)
2200 for k, v in args.items():
2207 for k, v in args.items():
2201 self.pipeo.write("%s %d\n" % (k, len(v)))
2208 self.pipeo.write("%s %d\n" % (k, len(v)))
2202 self.pipeo.write(v)
2209 self.pipeo.write(v)
2203 self.pipeo.flush()
2210 self.pipeo.flush()
2204
2211
2205 return self.pipei
2212 return self.pipei
2206
2213
2207 def call(self, cmd, **args):
2214 def call(self, cmd, **args):
2208 r = self.do_cmd(cmd, **args)
2215 r = self.do_cmd(cmd, **args)
2209 l = r.readline()
2216 l = r.readline()
2210 self.readerr()
2217 self.readerr()
2211 try:
2218 try:
2212 l = int(l)
2219 l = int(l)
2213 except:
2220 except:
2214 raise RepoError("unexpected response '%s'" % l)
2221 raise RepoError("unexpected response '%s'" % l)
2215 return r.read(l)
2222 return r.read(l)
2216
2223
2217 def lock(self):
2224 def lock(self):
2218 self.call("lock")
2225 self.call("lock")
2219 return remotelock(self)
2226 return remotelock(self)
2220
2227
2221 def unlock(self):
2228 def unlock(self):
2222 self.call("unlock")
2229 self.call("unlock")
2223
2230
2224 def heads(self):
2231 def heads(self):
2225 d = self.call("heads")
2232 d = self.call("heads")
2226 try:
2233 try:
2227 return map(bin, d[:-1].split(" "))
2234 return map(bin, d[:-1].split(" "))
2228 except:
2235 except:
2229 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2236 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2230
2237
2231 def branches(self, nodes):
2238 def branches(self, nodes):
2232 n = " ".join(map(hex, nodes))
2239 n = " ".join(map(hex, nodes))
2233 d = self.call("branches", nodes=n)
2240 d = self.call("branches", nodes=n)
2234 try:
2241 try:
2235 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2242 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2236 return br
2243 return br
2237 except:
2244 except:
2238 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2245 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2239
2246
2240 def between(self, pairs):
2247 def between(self, pairs):
2241 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2248 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2242 d = self.call("between", pairs=n)
2249 d = self.call("between", pairs=n)
2243 try:
2250 try:
2244 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2251 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2245 return p
2252 return p
2246 except:
2253 except:
2247 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2254 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2248
2255
2249 def changegroup(self, nodes):
2256 def changegroup(self, nodes):
2250 n = " ".join(map(hex, nodes))
2257 n = " ".join(map(hex, nodes))
2251 f = self.do_cmd("changegroup", roots=n)
2258 f = self.do_cmd("changegroup", roots=n)
2252 return self.pipei
2259 return self.pipei
2253
2260
2254 def addchangegroup(self, cg):
2261 def addchangegroup(self, cg):
2255 d = self.call("addchangegroup")
2262 d = self.call("addchangegroup")
2256 if d:
2263 if d:
2257 raise RepoError("push refused: %s", d)
2264 raise RepoError("push refused: %s", d)
2258
2265
2259 while 1:
2266 while 1:
2260 d = cg.read(4096)
2267 d = cg.read(4096)
2261 if not d: break
2268 if not d: break
2262 self.pipeo.write(d)
2269 self.pipeo.write(d)
2263 self.readerr()
2270 self.readerr()
2264
2271
2265 self.pipeo.flush()
2272 self.pipeo.flush()
2266
2273
2267 self.readerr()
2274 self.readerr()
2268 l = int(self.pipei.readline())
2275 l = int(self.pipei.readline())
2269 return self.pipei.read(l) != ""
2276 return self.pipei.read(l) != ""
2270
2277
2271 class httpsrepository(httprepository):
2278 class httpsrepository(httprepository):
2272 pass
2279 pass
2273
2280
2274 def repository(ui, path=None, create=0):
2281 def repository(ui, path=None, create=0):
2275 if path:
2282 if path:
2276 if path.startswith("http://"):
2283 if path.startswith("http://"):
2277 return httprepository(ui, path)
2284 return httprepository(ui, path)
2278 if path.startswith("https://"):
2285 if path.startswith("https://"):
2279 return httpsrepository(ui, path)
2286 return httpsrepository(ui, path)
2280 if path.startswith("hg://"):
2287 if path.startswith("hg://"):
2281 return httprepository(ui, path.replace("hg://", "http://"))
2288 return httprepository(ui, path.replace("hg://", "http://"))
2282 if path.startswith("old-http://"):
2289 if path.startswith("old-http://"):
2283 return localrepository(ui, path.replace("old-http://", "http://"))
2290 return localrepository(ui, path.replace("old-http://", "http://"))
2284 if path.startswith("ssh://"):
2291 if path.startswith("ssh://"):
2285 return sshrepository(ui, path)
2292 return sshrepository(ui, path)
2286
2293
2287 return localrepository(ui, path, create)
2294 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now