##// END OF EJS Templates
Fix up some bugs introduced by recent merge changes...
mpm@selenic.com -
r992:f859e9cb default
parent child Browse files
Show More
@@ -1,2263 +1,2278 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 if not date:
283 if not date:
284 if time.daylight: offset = time.altzone
284 if time.daylight: offset = time.altzone
285 else: offset = time.timezone
285 else: offset = time.timezone
286 date = "%d %d" % (time.time(), offset)
286 date = "%d %d" % (time.time(), offset)
287 list.sort()
287 list.sort()
288 l = [hex(manifest), user, date] + list + ["", desc]
288 l = [hex(manifest), user, date] + list + ["", desc]
289 text = "\n".join(l)
289 text = "\n".join(l)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
291
291
292 class dirstate:
292 class dirstate:
293 def __init__(self, opener, ui, root):
293 def __init__(self, opener, ui, root):
294 self.opener = opener
294 self.opener = opener
295 self.root = root
295 self.root = root
296 self.dirty = 0
296 self.dirty = 0
297 self.ui = ui
297 self.ui = ui
298 self.map = None
298 self.map = None
299 self.pl = None
299 self.pl = None
300 self.copies = {}
300 self.copies = {}
301 self.ignorefunc = None
301 self.ignorefunc = None
302
302
303 def wjoin(self, f):
303 def wjoin(self, f):
304 return os.path.join(self.root, f)
304 return os.path.join(self.root, f)
305
305
306 def getcwd(self):
306 def getcwd(self):
307 cwd = os.getcwd()
307 cwd = os.getcwd()
308 if cwd == self.root: return ''
308 if cwd == self.root: return ''
309 return cwd[len(self.root) + 1:]
309 return cwd[len(self.root) + 1:]
310
310
311 def ignore(self, f):
311 def ignore(self, f):
312 if not self.ignorefunc:
312 if not self.ignorefunc:
313 bigpat = []
313 bigpat = []
314 try:
314 try:
315 l = file(self.wjoin(".hgignore"))
315 l = file(self.wjoin(".hgignore"))
316 for pat in l:
316 for pat in l:
317 p = pat.rstrip()
317 p = pat.rstrip()
318 if p:
318 if p:
319 try:
319 try:
320 re.compile(p)
320 re.compile(p)
321 except:
321 except:
322 self.ui.warn("ignoring invalid ignore"
322 self.ui.warn("ignoring invalid ignore"
323 + " regular expression '%s'\n" % p)
323 + " regular expression '%s'\n" % p)
324 else:
324 else:
325 bigpat.append(p)
325 bigpat.append(p)
326 except IOError: pass
326 except IOError: pass
327
327
328 if bigpat:
328 if bigpat:
329 s = "(?:%s)" % (")|(?:".join(bigpat))
329 s = "(?:%s)" % (")|(?:".join(bigpat))
330 r = re.compile(s)
330 r = re.compile(s)
331 self.ignorefunc = r.search
331 self.ignorefunc = r.search
332 else:
332 else:
333 self.ignorefunc = util.never
333 self.ignorefunc = util.never
334
334
335 return self.ignorefunc(f)
335 return self.ignorefunc(f)
336
336
337 def __del__(self):
337 def __del__(self):
338 if self.dirty:
338 if self.dirty:
339 self.write()
339 self.write()
340
340
341 def __getitem__(self, key):
341 def __getitem__(self, key):
342 try:
342 try:
343 return self.map[key]
343 return self.map[key]
344 except TypeError:
344 except TypeError:
345 self.read()
345 self.read()
346 return self[key]
346 return self[key]
347
347
348 def __contains__(self, key):
348 def __contains__(self, key):
349 if not self.map: self.read()
349 if not self.map: self.read()
350 return key in self.map
350 return key in self.map
351
351
352 def parents(self):
352 def parents(self):
353 if not self.pl:
353 if not self.pl:
354 self.read()
354 self.read()
355 return self.pl
355 return self.pl
356
356
357 def markdirty(self):
357 def markdirty(self):
358 if not self.dirty:
358 if not self.dirty:
359 self.dirty = 1
359 self.dirty = 1
360
360
361 def setparents(self, p1, p2 = nullid):
361 def setparents(self, p1, p2 = nullid):
362 self.markdirty()
362 self.markdirty()
363 self.pl = p1, p2
363 self.pl = p1, p2
364
364
365 def state(self, key):
365 def state(self, key):
366 try:
366 try:
367 return self[key][0]
367 return self[key][0]
368 except KeyError:
368 except KeyError:
369 return "?"
369 return "?"
370
370
371 def read(self):
371 def read(self):
372 if self.map is not None: return self.map
372 if self.map is not None: return self.map
373
373
374 self.map = {}
374 self.map = {}
375 self.pl = [nullid, nullid]
375 self.pl = [nullid, nullid]
376 try:
376 try:
377 st = self.opener("dirstate").read()
377 st = self.opener("dirstate").read()
378 if not st: return
378 if not st: return
379 except: return
379 except: return
380
380
381 self.pl = [st[:20], st[20: 40]]
381 self.pl = [st[:20], st[20: 40]]
382
382
383 pos = 40
383 pos = 40
384 while pos < len(st):
384 while pos < len(st):
385 e = struct.unpack(">cllll", st[pos:pos+17])
385 e = struct.unpack(">cllll", st[pos:pos+17])
386 l = e[4]
386 l = e[4]
387 pos += 17
387 pos += 17
388 f = st[pos:pos + l]
388 f = st[pos:pos + l]
389 if '\0' in f:
389 if '\0' in f:
390 f, c = f.split('\0')
390 f, c = f.split('\0')
391 self.copies[f] = c
391 self.copies[f] = c
392 self.map[f] = e[:4]
392 self.map[f] = e[:4]
393 pos += l
393 pos += l
394
394
395 def copy(self, source, dest):
395 def copy(self, source, dest):
396 self.read()
396 self.read()
397 self.markdirty()
397 self.markdirty()
398 self.copies[dest] = source
398 self.copies[dest] = source
399
399
400 def copied(self, file):
400 def copied(self, file):
401 return self.copies.get(file, None)
401 return self.copies.get(file, None)
402
402
403 def update(self, files, state, **kw):
403 def update(self, files, state, **kw):
404 ''' current states:
404 ''' current states:
405 n normal
405 n normal
406 m needs merging
406 m needs merging
407 r marked for removal
407 r marked for removal
408 a marked for addition'''
408 a marked for addition'''
409
409
410 if not files: return
410 if not files: return
411 self.read()
411 self.read()
412 self.markdirty()
412 self.markdirty()
413 for f in files:
413 for f in files:
414 if state == "r":
414 if state == "r":
415 self.map[f] = ('r', 0, 0, 0)
415 self.map[f] = ('r', 0, 0, 0)
416 else:
416 else:
417 s = os.stat(os.path.join(self.root, f))
417 s = os.stat(os.path.join(self.root, f))
418 st_size = kw.get('st_size', s.st_size)
418 st_size = kw.get('st_size', s.st_size)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
421
421
422 def forget(self, files):
422 def forget(self, files):
423 if not files: return
423 if not files: return
424 self.read()
424 self.read()
425 self.markdirty()
425 self.markdirty()
426 for f in files:
426 for f in files:
427 try:
427 try:
428 del self.map[f]
428 del self.map[f]
429 except KeyError:
429 except KeyError:
430 self.ui.warn("not in dirstate: %s!\n" % f)
430 self.ui.warn("not in dirstate: %s!\n" % f)
431 pass
431 pass
432
432
433 def clear(self):
433 def clear(self):
434 self.map = {}
434 self.map = {}
435 self.markdirty()
435 self.markdirty()
436
436
437 def write(self):
437 def write(self):
438 st = self.opener("dirstate", "w")
438 st = self.opener("dirstate", "w")
439 st.write("".join(self.pl))
439 st.write("".join(self.pl))
440 for f, e in self.map.items():
440 for f, e in self.map.items():
441 c = self.copied(f)
441 c = self.copied(f)
442 if c:
442 if c:
443 f = f + "\0" + c
443 f = f + "\0" + c
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
445 st.write(e + f)
445 st.write(e + f)
446 self.dirty = 0
446 self.dirty = 0
447
447
448 def filterfiles(self, files):
448 def filterfiles(self, files):
449 ret = {}
449 ret = {}
450 unknown = []
450 unknown = []
451
451
452 for x in files:
452 for x in files:
453 if x is '.':
453 if x is '.':
454 return self.map.copy()
454 return self.map.copy()
455 if x not in self.map:
455 if x not in self.map:
456 unknown.append(x)
456 unknown.append(x)
457 else:
457 else:
458 ret[x] = self.map[x]
458 ret[x] = self.map[x]
459
459
460 if not unknown:
460 if not unknown:
461 return ret
461 return ret
462
462
463 b = self.map.keys()
463 b = self.map.keys()
464 b.sort()
464 b.sort()
465 blen = len(b)
465 blen = len(b)
466
466
467 for x in unknown:
467 for x in unknown:
468 bs = bisect.bisect(b, x)
468 bs = bisect.bisect(b, x)
469 if bs != 0 and b[bs-1] == x:
469 if bs != 0 and b[bs-1] == x:
470 ret[x] = self.map[x]
470 ret[x] = self.map[x]
471 continue
471 continue
472 while bs < blen:
472 while bs < blen:
473 s = b[bs]
473 s = b[bs]
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
475 ret[s] = self.map[s]
475 ret[s] = self.map[s]
476 else:
476 else:
477 break
477 break
478 bs += 1
478 bs += 1
479 return ret
479 return ret
480
480
481 def walk(self, files = None, match = util.always, dc=None):
481 def walk(self, files = None, match = util.always, dc=None):
482 self.read()
482 self.read()
483
483
484 # walk all files by default
484 # walk all files by default
485 if not files:
485 if not files:
486 files = [self.root]
486 files = [self.root]
487 if not dc:
487 if not dc:
488 dc = self.map.copy()
488 dc = self.map.copy()
489 elif not dc:
489 elif not dc:
490 dc = self.filterfiles(files)
490 dc = self.filterfiles(files)
491
491
492 known = {'.hg': 1}
492 known = {'.hg': 1}
493 def seen(fn):
493 def seen(fn):
494 if fn in known: return True
494 if fn in known: return True
495 known[fn] = 1
495 known[fn] = 1
496 def traverse():
496 def traverse():
497 for ff in util.unique(files):
497 for ff in util.unique(files):
498 f = os.path.join(self.root, ff)
498 f = os.path.join(self.root, ff)
499 try:
499 try:
500 st = os.stat(f)
500 st = os.stat(f)
501 except OSError, inst:
501 except OSError, inst:
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
503 util.pathto(self.getcwd(), ff),
503 util.pathto(self.getcwd(), ff),
504 inst.strerror))
504 inst.strerror))
505 continue
505 continue
506 if stat.S_ISDIR(st.st_mode):
506 if stat.S_ISDIR(st.st_mode):
507 for dir, subdirs, fl in os.walk(f):
507 for dir, subdirs, fl in os.walk(f):
508 d = dir[len(self.root) + 1:]
508 d = dir[len(self.root) + 1:]
509 nd = util.normpath(d)
509 nd = util.normpath(d)
510 if nd == '.': nd = ''
510 if nd == '.': nd = ''
511 if seen(nd):
511 if seen(nd):
512 subdirs[:] = []
512 subdirs[:] = []
513 continue
513 continue
514 for sd in subdirs:
514 for sd in subdirs:
515 ds = os.path.join(nd, sd +'/')
515 ds = os.path.join(nd, sd +'/')
516 if self.ignore(ds) or not match(ds):
516 if self.ignore(ds) or not match(ds):
517 subdirs.remove(sd)
517 subdirs.remove(sd)
518 subdirs.sort()
518 subdirs.sort()
519 fl.sort()
519 fl.sort()
520 for fn in fl:
520 for fn in fl:
521 fn = util.pconvert(os.path.join(d, fn))
521 fn = util.pconvert(os.path.join(d, fn))
522 yield 'f', fn
522 yield 'f', fn
523 elif stat.S_ISREG(st.st_mode):
523 elif stat.S_ISREG(st.st_mode):
524 yield 'f', ff
524 yield 'f', ff
525 else:
525 else:
526 kind = 'unknown'
526 kind = 'unknown'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
533 util.pathto(self.getcwd(), ff),
533 util.pathto(self.getcwd(), ff),
534 kind))
534 kind))
535
535
536 ks = dc.keys()
536 ks = dc.keys()
537 ks.sort()
537 ks.sort()
538 for k in ks:
538 for k in ks:
539 yield 'm', k
539 yield 'm', k
540
540
541 # yield only files that match: all in dirstate, others only if
541 # yield only files that match: all in dirstate, others only if
542 # not in .hgignore
542 # not in .hgignore
543
543
544 for src, fn in util.unique(traverse()):
544 for src, fn in util.unique(traverse()):
545 fn = util.normpath(fn)
545 fn = util.normpath(fn)
546 if seen(fn): continue
546 if seen(fn): continue
547 if fn not in dc and self.ignore(fn):
547 if fn not in dc and self.ignore(fn):
548 continue
548 continue
549 if match(fn):
549 if match(fn):
550 yield src, fn
550 yield src, fn
551
551
552 def changes(self, files=None, match=util.always):
552 def changes(self, files=None, match=util.always):
553 self.read()
553 self.read()
554 if not files:
554 if not files:
555 dc = self.map.copy()
555 dc = self.map.copy()
556 else:
556 else:
557 dc = self.filterfiles(files)
557 dc = self.filterfiles(files)
558 lookup, modified, added, unknown = [], [], [], []
558 lookup, modified, added, unknown = [], [], [], []
559 removed, deleted = [], []
559 removed, deleted = [], []
560
560
561 for src, fn in self.walk(files, match, dc=dc):
561 for src, fn in self.walk(files, match, dc=dc):
562 try:
562 try:
563 s = os.stat(os.path.join(self.root, fn))
563 s = os.stat(os.path.join(self.root, fn))
564 except OSError:
564 except OSError:
565 continue
565 continue
566 if not stat.S_ISREG(s.st_mode):
566 if not stat.S_ISREG(s.st_mode):
567 continue
567 continue
568 c = dc.get(fn)
568 c = dc.get(fn)
569 if c:
569 if c:
570 del dc[fn]
570 del dc[fn]
571 if c[0] == 'm':
571 if c[0] == 'm':
572 modified.append(fn)
572 modified.append(fn)
573 elif c[0] == 'a':
573 elif c[0] == 'a':
574 added.append(fn)
574 added.append(fn)
575 elif c[0] == 'r':
575 elif c[0] == 'r':
576 unknown.append(fn)
576 unknown.append(fn)
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
578 modified.append(fn)
578 modified.append(fn)
579 elif c[3] != s.st_mtime:
579 elif c[3] != s.st_mtime:
580 lookup.append(fn)
580 lookup.append(fn)
581 else:
581 else:
582 unknown.append(fn)
582 unknown.append(fn)
583
583
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
585 if c[0] == 'r':
585 if c[0] == 'r':
586 removed.append(fn)
586 removed.append(fn)
587 else:
587 else:
588 deleted.append(fn)
588 deleted.append(fn)
589 return (lookup, modified, added, removed + deleted, unknown)
589 return (lookup, modified, added, removed + deleted, unknown)
590
590
591 # used to avoid circular references so destructors work
591 # used to avoid circular references so destructors work
592 def opener(base):
592 def opener(base):
593 p = base
593 p = base
594 def o(path, mode="r"):
594 def o(path, mode="r"):
595 if p.startswith("http://"):
595 if p.startswith("http://"):
596 f = os.path.join(p, urllib.quote(path))
596 f = os.path.join(p, urllib.quote(path))
597 return httprangereader.httprangereader(f)
597 return httprangereader.httprangereader(f)
598
598
599 f = os.path.join(p, path)
599 f = os.path.join(p, path)
600
600
601 mode += "b" # for that other OS
601 mode += "b" # for that other OS
602
602
603 if mode[0] != "r":
603 if mode[0] != "r":
604 try:
604 try:
605 s = os.stat(f)
605 s = os.stat(f)
606 except OSError:
606 except OSError:
607 d = os.path.dirname(f)
607 d = os.path.dirname(f)
608 if not os.path.isdir(d):
608 if not os.path.isdir(d):
609 os.makedirs(d)
609 os.makedirs(d)
610 else:
610 else:
611 if s.st_nlink > 1:
611 if s.st_nlink > 1:
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
613 util.rename(f+".tmp", f)
613 util.rename(f+".tmp", f)
614
614
615 return file(f, mode)
615 return file(f, mode)
616
616
617 return o
617 return o
618
618
619 class RepoError(Exception): pass
619 class RepoError(Exception): pass
620
620
621 class localrepository:
621 class localrepository:
622 def __init__(self, ui, path=None, create=0):
622 def __init__(self, ui, path=None, create=0):
623 self.remote = 0
623 self.remote = 0
624 if path and path.startswith("http://"):
624 if path and path.startswith("http://"):
625 self.remote = 1
625 self.remote = 1
626 self.path = path
626 self.path = path
627 else:
627 else:
628 if not path:
628 if not path:
629 p = os.getcwd()
629 p = os.getcwd()
630 while not os.path.isdir(os.path.join(p, ".hg")):
630 while not os.path.isdir(os.path.join(p, ".hg")):
631 oldp = p
631 oldp = p
632 p = os.path.dirname(p)
632 p = os.path.dirname(p)
633 if p == oldp: raise RepoError("no repo found")
633 if p == oldp: raise RepoError("no repo found")
634 path = p
634 path = p
635 self.path = os.path.join(path, ".hg")
635 self.path = os.path.join(path, ".hg")
636
636
637 if not create and not os.path.isdir(self.path):
637 if not create and not os.path.isdir(self.path):
638 raise RepoError("repository %s not found" % self.path)
638 raise RepoError("repository %s not found" % self.path)
639
639
640 self.root = os.path.abspath(path)
640 self.root = os.path.abspath(path)
641 self.ui = ui
641 self.ui = ui
642
642
643 if create:
643 if create:
644 os.mkdir(self.path)
644 os.mkdir(self.path)
645 os.mkdir(self.join("data"))
645 os.mkdir(self.join("data"))
646
646
647 self.opener = opener(self.path)
647 self.opener = opener(self.path)
648 self.wopener = opener(self.root)
648 self.wopener = opener(self.root)
649 self.manifest = manifest(self.opener)
649 self.manifest = manifest(self.opener)
650 self.changelog = changelog(self.opener)
650 self.changelog = changelog(self.opener)
651 self.tagscache = None
651 self.tagscache = None
652 self.nodetagscache = None
652 self.nodetagscache = None
653
653
654 if not self.remote:
654 if not self.remote:
655 self.dirstate = dirstate(self.opener, ui, self.root)
655 self.dirstate = dirstate(self.opener, ui, self.root)
656 try:
656 try:
657 self.ui.readconfig(self.opener("hgrc"))
657 self.ui.readconfig(self.opener("hgrc"))
658 except IOError: pass
658 except IOError: pass
659
659
660 def hook(self, name, **args):
660 def hook(self, name, **args):
661 s = self.ui.config("hooks", name)
661 s = self.ui.config("hooks", name)
662 if s:
662 if s:
663 self.ui.note("running hook %s: %s\n" % (name, s))
663 self.ui.note("running hook %s: %s\n" % (name, s))
664 old = {}
664 old = {}
665 for k, v in args.items():
665 for k, v in args.items():
666 k = k.upper()
666 k = k.upper()
667 old[k] = os.environ.get(k, None)
667 old[k] = os.environ.get(k, None)
668 os.environ[k] = v
668 os.environ[k] = v
669
669
670 r = os.system(s)
670 r = os.system(s)
671
671
672 for k, v in old.items():
672 for k, v in old.items():
673 if v != None:
673 if v != None:
674 os.environ[k] = v
674 os.environ[k] = v
675 else:
675 else:
676 del os.environ[k]
676 del os.environ[k]
677
677
678 if r:
678 if r:
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
680 (name, r))
680 (name, r))
681 return False
681 return False
682 return True
682 return True
683
683
684 def tags(self):
684 def tags(self):
685 '''return a mapping of tag to node'''
685 '''return a mapping of tag to node'''
686 if not self.tagscache:
686 if not self.tagscache:
687 self.tagscache = {}
687 self.tagscache = {}
688 def addtag(self, k, n):
688 def addtag(self, k, n):
689 try:
689 try:
690 bin_n = bin(n)
690 bin_n = bin(n)
691 except TypeError:
691 except TypeError:
692 bin_n = ''
692 bin_n = ''
693 self.tagscache[k.strip()] = bin_n
693 self.tagscache[k.strip()] = bin_n
694
694
695 try:
695 try:
696 # read each head of the tags file, ending with the tip
696 # read each head of the tags file, ending with the tip
697 # and add each tag found to the map, with "newer" ones
697 # and add each tag found to the map, with "newer" ones
698 # taking precedence
698 # taking precedence
699 fl = self.file(".hgtags")
699 fl = self.file(".hgtags")
700 h = fl.heads()
700 h = fl.heads()
701 h.reverse()
701 h.reverse()
702 for r in h:
702 for r in h:
703 for l in fl.revision(r).splitlines():
703 for l in fl.revision(r).splitlines():
704 if l:
704 if l:
705 n, k = l.split(" ", 1)
705 n, k = l.split(" ", 1)
706 addtag(self, k, n)
706 addtag(self, k, n)
707 except KeyError:
707 except KeyError:
708 pass
708 pass
709
709
710 try:
710 try:
711 f = self.opener("localtags")
711 f = self.opener("localtags")
712 for l in f:
712 for l in f:
713 n, k = l.split(" ", 1)
713 n, k = l.split(" ", 1)
714 addtag(self, k, n)
714 addtag(self, k, n)
715 except IOError:
715 except IOError:
716 pass
716 pass
717
717
718 self.tagscache['tip'] = self.changelog.tip()
718 self.tagscache['tip'] = self.changelog.tip()
719
719
720 return self.tagscache
720 return self.tagscache
721
721
722 def tagslist(self):
722 def tagslist(self):
723 '''return a list of tags ordered by revision'''
723 '''return a list of tags ordered by revision'''
724 l = []
724 l = []
725 for t, n in self.tags().items():
725 for t, n in self.tags().items():
726 try:
726 try:
727 r = self.changelog.rev(n)
727 r = self.changelog.rev(n)
728 except:
728 except:
729 r = -2 # sort to the beginning of the list if unknown
729 r = -2 # sort to the beginning of the list if unknown
730 l.append((r,t,n))
730 l.append((r,t,n))
731 l.sort()
731 l.sort()
732 return [(t,n) for r,t,n in l]
732 return [(t,n) for r,t,n in l]
733
733
734 def nodetags(self, node):
734 def nodetags(self, node):
735 '''return the tags associated with a node'''
735 '''return the tags associated with a node'''
736 if not self.nodetagscache:
736 if not self.nodetagscache:
737 self.nodetagscache = {}
737 self.nodetagscache = {}
738 for t,n in self.tags().items():
738 for t,n in self.tags().items():
739 self.nodetagscache.setdefault(n,[]).append(t)
739 self.nodetagscache.setdefault(n,[]).append(t)
740 return self.nodetagscache.get(node, [])
740 return self.nodetagscache.get(node, [])
741
741
742 def lookup(self, key):
742 def lookup(self, key):
743 try:
743 try:
744 return self.tags()[key]
744 return self.tags()[key]
745 except KeyError:
745 except KeyError:
746 try:
746 try:
747 return self.changelog.lookup(key)
747 return self.changelog.lookup(key)
748 except:
748 except:
749 raise RepoError("unknown revision '%s'" % key)
749 raise RepoError("unknown revision '%s'" % key)
750
750
751 def dev(self):
751 def dev(self):
752 if self.remote: return -1
752 if self.remote: return -1
753 return os.stat(self.path).st_dev
753 return os.stat(self.path).st_dev
754
754
755 def local(self):
755 def local(self):
756 return not self.remote
756 return not self.remote
757
757
758 def join(self, f):
758 def join(self, f):
759 return os.path.join(self.path, f)
759 return os.path.join(self.path, f)
760
760
761 def wjoin(self, f):
761 def wjoin(self, f):
762 return os.path.join(self.root, f)
762 return os.path.join(self.root, f)
763
763
764 def file(self, f):
764 def file(self, f):
765 if f[0] == '/': f = f[1:]
765 if f[0] == '/': f = f[1:]
766 return filelog(self.opener, f)
766 return filelog(self.opener, f)
767
767
768 def getcwd(self):
768 def getcwd(self):
769 return self.dirstate.getcwd()
769 return self.dirstate.getcwd()
770
770
771 def wfile(self, f, mode='r'):
771 def wfile(self, f, mode='r'):
772 return self.wopener(f, mode)
772 return self.wopener(f, mode)
773
773
774 def transaction(self):
774 def transaction(self):
775 # save dirstate for undo
775 # save dirstate for undo
776 try:
776 try:
777 ds = self.opener("dirstate").read()
777 ds = self.opener("dirstate").read()
778 except IOError:
778 except IOError:
779 ds = ""
779 ds = ""
780 self.opener("journal.dirstate", "w").write(ds)
780 self.opener("journal.dirstate", "w").write(ds)
781
781
782 def after():
782 def after():
783 util.rename(self.join("journal"), self.join("undo"))
783 util.rename(self.join("journal"), self.join("undo"))
784 util.rename(self.join("journal.dirstate"),
784 util.rename(self.join("journal.dirstate"),
785 self.join("undo.dirstate"))
785 self.join("undo.dirstate"))
786
786
787 return transaction.transaction(self.ui.warn, self.opener,
787 return transaction.transaction(self.ui.warn, self.opener,
788 self.join("journal"), after)
788 self.join("journal"), after)
789
789
790 def recover(self):
790 def recover(self):
791 lock = self.lock()
791 lock = self.lock()
792 if os.path.exists(self.join("journal")):
792 if os.path.exists(self.join("journal")):
793 self.ui.status("rolling back interrupted transaction\n")
793 self.ui.status("rolling back interrupted transaction\n")
794 return transaction.rollback(self.opener, self.join("journal"))
794 return transaction.rollback(self.opener, self.join("journal"))
795 else:
795 else:
796 self.ui.warn("no interrupted transaction available\n")
796 self.ui.warn("no interrupted transaction available\n")
797
797
798 def undo(self):
798 def undo(self):
799 lock = self.lock()
799 lock = self.lock()
800 if os.path.exists(self.join("undo")):
800 if os.path.exists(self.join("undo")):
801 self.ui.status("rolling back last transaction\n")
801 self.ui.status("rolling back last transaction\n")
802 transaction.rollback(self.opener, self.join("undo"))
802 transaction.rollback(self.opener, self.join("undo"))
803 self.dirstate = None
803 self.dirstate = None
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
806 else:
806 else:
807 self.ui.warn("no undo information available\n")
807 self.ui.warn("no undo information available\n")
808
808
809 def lock(self, wait = 1):
809 def lock(self, wait = 1):
810 try:
810 try:
811 return lock.lock(self.join("lock"), 0)
811 return lock.lock(self.join("lock"), 0)
812 except lock.LockHeld, inst:
812 except lock.LockHeld, inst:
813 if wait:
813 if wait:
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
815 return lock.lock(self.join("lock"), wait)
815 return lock.lock(self.join("lock"), wait)
816 raise inst
816 raise inst
817
817
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
819 orig_parent = self.dirstate.parents()[0] or nullid
819 orig_parent = self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
822 c1 = self.changelog.read(p1)
822 c1 = self.changelog.read(p1)
823 c2 = self.changelog.read(p2)
823 c2 = self.changelog.read(p2)
824 m1 = self.manifest.read(c1[0])
824 m1 = self.manifest.read(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
826 m2 = self.manifest.read(c2[0])
826 m2 = self.manifest.read(c2[0])
827 changed = []
827
828
828 if orig_parent == p1:
829 if orig_parent == p1:
829 update_dirstate = 1
830 update_dirstate = 1
830 else:
831 else:
831 update_dirstate = 0
832 update_dirstate = 0
832
833
833 tr = self.transaction()
834 tr = self.transaction()
834 mm = m1.copy()
835 mm = m1.copy()
835 mfm = mf1.copy()
836 mfm = mf1.copy()
836 linkrev = self.changelog.count()
837 linkrev = self.changelog.count()
837 for f in files:
838 for f in files:
838 try:
839 try:
839 t = self.wfile(f).read()
840 t = self.wfile(f).read()
840 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
841 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
841 r = self.file(f)
842 r = self.file(f)
842 mfm[f] = tm
843 mfm[f] = tm
843
844
844 fp1 = m1.get(f, nullid)
845 fp1 = m1.get(f, nullid)
845 fp2 = m2.get(f, nullid)
846 fp2 = m2.get(f, nullid)
846
847
847 # is the same revision on two branches of a merge?
848 # is the same revision on two branches of a merge?
848 if fp2 == fp1:
849 if fp2 == fp1:
849 fp2 = nullid
850 fp2 = nullid
850
851
851 if fp2 != nullid:
852 if fp2 != nullid:
852 # is one parent an ancestor of the other?
853 # is one parent an ancestor of the other?
853 fpa = r.ancestor(fp1, fp2)
854 fpa = r.ancestor(fp1, fp2)
854 if fpa == fp1:
855 if fpa == fp1:
855 fp1, fp2 = fp2, nullid
856 fp1, fp2 = fp2, nullid
856 elif fpa == fp2:
857 elif fpa == fp2:
857 fp2 = nullid
858 fp2 = nullid
858
859
859 # is the file unmodified from the parent?
860 # is the file unmodified from the parent?
860 if t == r.read(fp1):
861 if t == r.read(fp1):
861 # record the proper existing parent in manifest
862 # record the proper existing parent in manifest
862 # no need to add a revision
863 # no need to add a revision
863 mm[f] = fp1
864 mm[f] = fp1
864 continue
865 continue
865
866
866 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
867 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
868 changed.append(f)
867 if update_dirstate:
869 if update_dirstate:
868 self.dirstate.update([f], "n")
870 self.dirstate.update([f], "n")
869 except IOError:
871 except IOError:
870 try:
872 try:
871 del mm[f]
873 del mm[f]
872 del mfm[f]
874 del mfm[f]
873 if update_dirstate:
875 if update_dirstate:
874 self.dirstate.forget([f])
876 self.dirstate.forget([f])
875 except:
877 except:
876 # deleted from p2?
878 # deleted from p2?
877 pass
879 pass
878
880
879 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
881 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
880 user = user or self.ui.username()
882 user = user or self.ui.username()
881 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
883 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
882 tr.close()
884 tr.close()
883 if update_dirstate:
885 if update_dirstate:
884 self.dirstate.setparents(n, nullid)
886 self.dirstate.setparents(n, nullid)
885
887
886 def commit(self, files = None, text = "", user = None, date = None,
888 def commit(self, files = None, text = "", user = None, date = None,
887 match = util.always, force=False):
889 match = util.always, force=False):
888 commit = []
890 commit = []
889 remove = []
891 remove = []
892 changed = []
893
890 if files:
894 if files:
891 for f in files:
895 for f in files:
892 s = self.dirstate.state(f)
896 s = self.dirstate.state(f)
893 if s in 'nmai':
897 if s in 'nmai':
894 commit.append(f)
898 commit.append(f)
895 elif s == 'r':
899 elif s == 'r':
896 remove.append(f)
900 remove.append(f)
897 else:
901 else:
898 self.ui.warn("%s not tracked!\n" % f)
902 self.ui.warn("%s not tracked!\n" % f)
899 else:
903 else:
900 (c, a, d, u) = self.changes(match = match)
904 (c, a, d, u) = self.changes(match = match)
901 commit = c + a
905 commit = c + a
902 remove = d
906 remove = d
903
907
904 p1, p2 = self.dirstate.parents()
908 p1, p2 = self.dirstate.parents()
905 c1 = self.changelog.read(p1)
909 c1 = self.changelog.read(p1)
906 c2 = self.changelog.read(p2)
910 c2 = self.changelog.read(p2)
907 m1 = self.manifest.read(c1[0])
911 m1 = self.manifest.read(c1[0])
908 mf1 = self.manifest.readflags(c1[0])
912 mf1 = self.manifest.readflags(c1[0])
909 m2 = self.manifest.read(c2[0])
913 m2 = self.manifest.read(c2[0])
910
914
911 if not commit and not remove and not force and p2 == nullid:
915 if not commit and not remove and not force and p2 == nullid:
912 self.ui.status("nothing changed\n")
916 self.ui.status("nothing changed\n")
913 return None
917 return None
914
918
915 if not self.hook("precommit"):
919 if not self.hook("precommit"):
916 return None
920 return None
917
921
918 lock = self.lock()
922 lock = self.lock()
919 tr = self.transaction()
923 tr = self.transaction()
920
924
921 # check in files
925 # check in files
922 new = {}
926 new = {}
923 linkrev = self.changelog.count()
927 linkrev = self.changelog.count()
924 commit.sort()
928 commit.sort()
925 for f in commit:
929 for f in commit:
926 self.ui.note(f + "\n")
930 self.ui.note(f + "\n")
927 try:
931 try:
928 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
932 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
929 t = self.wfile(f).read()
933 t = self.wfile(f).read()
930 except IOError:
934 except IOError:
931 self.ui.warn("trouble committing %s!\n" % f)
935 self.ui.warn("trouble committing %s!\n" % f)
932 raise
936 raise
933
937
934 meta = {}
938 meta = {}
935 cp = self.dirstate.copied(f)
939 cp = self.dirstate.copied(f)
936 if cp:
940 if cp:
937 meta["copy"] = cp
941 meta["copy"] = cp
938 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
942 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
939 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
943 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
940
944
941 r = self.file(f)
945 r = self.file(f)
942 fp1 = m1.get(f, nullid)
946 fp1 = m1.get(f, nullid)
943 fp2 = m2.get(f, nullid)
947 fp2 = m2.get(f, nullid)
944
948
945 # is the same revision on two branches of a merge?
949 # is the same revision on two branches of a merge?
946 if fp2 == fp1:
950 if fp2 == fp1:
947 fp2 = nullid
951 fp2 = nullid
948
952
949 if fp2 != nullid:
953 if fp2 != nullid:
950 # is one parent an ancestor of the other?
954 # is one parent an ancestor of the other?
951 fpa = r.ancestor(fp1, fp2)
955 fpa = r.ancestor(fp1, fp2)
952 if fpa == fp1:
956 if fpa == fp1:
953 fp1, fp2 = fp2, nullid
957 fp1, fp2 = fp2, nullid
954 elif fpa == fp2:
958 elif fpa == fp2:
955 fp2 = nullid
959 fp2 = nullid
956
960
957 # is the file unmodified from the parent?
961 # is the file unmodified from the parent?
958 if not meta and t == r.read(fp1):
962 if not meta and t == r.read(fp1):
959 # record the proper existing parent in manifest
963 # record the proper existing parent in manifest
960 # no need to add a revision
964 # no need to add a revision
961 new[f] = fp1
965 new[f] = fp1
962 continue
966 continue
963
967
964 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
968 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
969 # remember what we've added so that we can later calculate
970 # the files to pull from a set of changesets
971 changed.append(f)
965
972
966 # update manifest
973 # update manifest
967 m1.update(new)
974 m1.update(new)
968 for f in remove:
975 for f in remove:
969 if f in m1:
976 if f in m1:
970 del m1[f]
977 del m1[f]
971 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
978 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
972 (new, remove))
979 (new, remove))
973
980
974 # add changeset
981 # add changeset
975 new = new.keys()
982 new = new.keys()
976 new.sort()
983 new.sort()
977
984
978 if not text:
985 if not text:
979 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
986 edittext = ""
980 edittext += "".join(["HG: changed %s\n" % f for f in new])
987 if p2 != nullid:
988 edittext += "HG: branch merge\n"
989 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
990 edittext += "".join(["HG: changed %s\n" % f for f in changed])
981 edittext += "".join(["HG: removed %s\n" % f for f in remove])
991 edittext += "".join(["HG: removed %s\n" % f for f in remove])
992 if not changed and not remove:
993 edittext += "HG: no files changed\n"
982 edittext = self.ui.edit(edittext)
994 edittext = self.ui.edit(edittext)
983 if not edittext.rstrip():
995 if not edittext.rstrip():
984 return None
996 return None
985 text = edittext
997 text = edittext
986
998
987 user = user or self.ui.username()
999 user = user or self.ui.username()
988 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
1000 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
989 tr.close()
1001 tr.close()
990
1002
991 self.dirstate.setparents(n)
1003 self.dirstate.setparents(n)
992 self.dirstate.update(new, "n")
1004 self.dirstate.update(new, "n")
993 self.dirstate.forget(remove)
1005 self.dirstate.forget(remove)
994
1006
995 if not self.hook("commit", node=hex(n)):
1007 if not self.hook("commit", node=hex(n)):
996 return None
1008 return None
997 return n
1009 return n
998
1010
999 def walk(self, node = None, files = [], match = util.always):
1011 def walk(self, node = None, files = [], match = util.always):
1000 if node:
1012 if node:
1001 for fn in self.manifest.read(self.changelog.read(node)[0]):
1013 for fn in self.manifest.read(self.changelog.read(node)[0]):
1002 if match(fn): yield 'm', fn
1014 if match(fn): yield 'm', fn
1003 else:
1015 else:
1004 for src, fn in self.dirstate.walk(files, match):
1016 for src, fn in self.dirstate.walk(files, match):
1005 yield src, fn
1017 yield src, fn
1006
1018
1007 def changes(self, node1 = None, node2 = None, files = [],
1019 def changes(self, node1 = None, node2 = None, files = [],
1008 match = util.always):
1020 match = util.always):
1009 mf2, u = None, []
1021 mf2, u = None, []
1010
1022
1011 def fcmp(fn, mf):
1023 def fcmp(fn, mf):
1012 t1 = self.wfile(fn).read()
1024 t1 = self.wfile(fn).read()
1013 t2 = self.file(fn).revision(mf[fn])
1025 t2 = self.file(fn).revision(mf.get(fn, nullid))
1014 return cmp(t1, t2)
1026 return cmp(t1, t2)
1015
1027
1016 def mfmatches(node):
1028 def mfmatches(node):
1017 mf = dict(self.manifest.read(node))
1029 mf = dict(self.manifest.read(node))
1018 for fn in mf.keys():
1030 for fn in mf.keys():
1019 if not match(fn):
1031 if not match(fn):
1020 del mf[fn]
1032 del mf[fn]
1021 return mf
1033 return mf
1022
1034
1023 # are we comparing the working directory?
1035 # are we comparing the working directory?
1024 if not node2:
1036 if not node2:
1025 l, c, a, d, u = self.dirstate.changes(files, match)
1037 l, c, a, d, u = self.dirstate.changes(files, match)
1026
1038
1027 # are we comparing working dir against its parent?
1039 # are we comparing working dir against its parent?
1028 if not node1:
1040 if not node1:
1029 if l:
1041 if l:
1030 # do a full compare of any files that might have changed
1042 # do a full compare of any files that might have changed
1031 change = self.changelog.read(self.dirstate.parents()[0])
1043 change = self.changelog.read(self.dirstate.parents()[0])
1032 mf2 = mfmatches(change[0])
1044 mf2 = mfmatches(change[0])
1033 for f in l:
1045 for f in l:
1034 if fcmp(f, mf2):
1046 if fcmp(f, mf2):
1035 c.append(f)
1047 c.append(f)
1036
1048
1037 for l in c, a, d, u:
1049 for l in c, a, d, u:
1038 l.sort()
1050 l.sort()
1039
1051
1040 return (c, a, d, u)
1052 return (c, a, d, u)
1041
1053
1042 # are we comparing working dir against non-tip?
1054 # are we comparing working dir against non-tip?
1043 # generate a pseudo-manifest for the working dir
1055 # generate a pseudo-manifest for the working dir
1044 if not node2:
1056 if not node2:
1045 if not mf2:
1057 if not mf2:
1046 change = self.changelog.read(self.dirstate.parents()[0])
1058 change = self.changelog.read(self.dirstate.parents()[0])
1047 mf2 = mfmatches(change[0])
1059 mf2 = mfmatches(change[0])
1048 for f in a + c + l:
1060 for f in a + c + l:
1049 mf2[f] = ""
1061 mf2[f] = ""
1050 for f in d:
1062 for f in d:
1051 if f in mf2: del mf2[f]
1063 if f in mf2: del mf2[f]
1052 else:
1064 else:
1053 change = self.changelog.read(node2)
1065 change = self.changelog.read(node2)
1054 mf2 = mfmatches(change[0])
1066 mf2 = mfmatches(change[0])
1055
1067
1056 # flush lists from dirstate before comparing manifests
1068 # flush lists from dirstate before comparing manifests
1057 c, a = [], []
1069 c, a = [], []
1058
1070
1059 change = self.changelog.read(node1)
1071 change = self.changelog.read(node1)
1060 mf1 = mfmatches(change[0])
1072 mf1 = mfmatches(change[0])
1061
1073
1062 for fn in mf2:
1074 for fn in mf2:
1063 if mf1.has_key(fn):
1075 if mf1.has_key(fn):
1064 if mf1[fn] != mf2[fn]:
1076 if mf1[fn] != mf2[fn]:
1065 if mf2[fn] != "" or fcmp(fn, mf1):
1077 if mf2[fn] != "" or fcmp(fn, mf1):
1066 c.append(fn)
1078 c.append(fn)
1067 del mf1[fn]
1079 del mf1[fn]
1068 else:
1080 else:
1069 a.append(fn)
1081 a.append(fn)
1070
1082
1071 d = mf1.keys()
1083 d = mf1.keys()
1072
1084
1073 for l in c, a, d, u:
1085 for l in c, a, d, u:
1074 l.sort()
1086 l.sort()
1075
1087
1076 return (c, a, d, u)
1088 return (c, a, d, u)
1077
1089
1078 def add(self, list):
1090 def add(self, list):
1079 for f in list:
1091 for f in list:
1080 p = self.wjoin(f)
1092 p = self.wjoin(f)
1081 if not os.path.exists(p):
1093 if not os.path.exists(p):
1082 self.ui.warn("%s does not exist!\n" % f)
1094 self.ui.warn("%s does not exist!\n" % f)
1083 elif not os.path.isfile(p):
1095 elif not os.path.isfile(p):
1084 self.ui.warn("%s not added: only files supported currently\n" % f)
1096 self.ui.warn("%s not added: only files supported currently\n" % f)
1085 elif self.dirstate.state(f) in 'an':
1097 elif self.dirstate.state(f) in 'an':
1086 self.ui.warn("%s already tracked!\n" % f)
1098 self.ui.warn("%s already tracked!\n" % f)
1087 else:
1099 else:
1088 self.dirstate.update([f], "a")
1100 self.dirstate.update([f], "a")
1089
1101
1090 def forget(self, list):
1102 def forget(self, list):
1091 for f in list:
1103 for f in list:
1092 if self.dirstate.state(f) not in 'ai':
1104 if self.dirstate.state(f) not in 'ai':
1093 self.ui.warn("%s not added!\n" % f)
1105 self.ui.warn("%s not added!\n" % f)
1094 else:
1106 else:
1095 self.dirstate.forget([f])
1107 self.dirstate.forget([f])
1096
1108
1097 def remove(self, list):
1109 def remove(self, list):
1098 for f in list:
1110 for f in list:
1099 p = self.wjoin(f)
1111 p = self.wjoin(f)
1100 if os.path.exists(p):
1112 if os.path.exists(p):
1101 self.ui.warn("%s still exists!\n" % f)
1113 self.ui.warn("%s still exists!\n" % f)
1102 elif self.dirstate.state(f) == 'a':
1114 elif self.dirstate.state(f) == 'a':
1103 self.ui.warn("%s never committed!\n" % f)
1115 self.ui.warn("%s never committed!\n" % f)
1104 self.dirstate.forget([f])
1116 self.dirstate.forget([f])
1105 elif f not in self.dirstate:
1117 elif f not in self.dirstate:
1106 self.ui.warn("%s not tracked!\n" % f)
1118 self.ui.warn("%s not tracked!\n" % f)
1107 else:
1119 else:
1108 self.dirstate.update([f], "r")
1120 self.dirstate.update([f], "r")
1109
1121
1110 def copy(self, source, dest):
1122 def copy(self, source, dest):
1111 p = self.wjoin(dest)
1123 p = self.wjoin(dest)
1112 if not os.path.exists(p):
1124 if not os.path.exists(p):
1113 self.ui.warn("%s does not exist!\n" % dest)
1125 self.ui.warn("%s does not exist!\n" % dest)
1114 elif not os.path.isfile(p):
1126 elif not os.path.isfile(p):
1115 self.ui.warn("copy failed: %s is not a file\n" % dest)
1127 self.ui.warn("copy failed: %s is not a file\n" % dest)
1116 else:
1128 else:
1117 if self.dirstate.state(dest) == '?':
1129 if self.dirstate.state(dest) == '?':
1118 self.dirstate.update([dest], "a")
1130 self.dirstate.update([dest], "a")
1119 self.dirstate.copy(source, dest)
1131 self.dirstate.copy(source, dest)
1120
1132
1121 def heads(self):
1133 def heads(self):
1122 return self.changelog.heads()
1134 return self.changelog.heads()
1123
1135
1124 # branchlookup returns a dict giving a list of branches for
1136 # branchlookup returns a dict giving a list of branches for
1125 # each head. A branch is defined as the tag of a node or
1137 # each head. A branch is defined as the tag of a node or
1126 # the branch of the node's parents. If a node has multiple
1138 # the branch of the node's parents. If a node has multiple
1127 # branch tags, tags are eliminated if they are visible from other
1139 # branch tags, tags are eliminated if they are visible from other
1128 # branch tags.
1140 # branch tags.
1129 #
1141 #
1130 # So, for this graph: a->b->c->d->e
1142 # So, for this graph: a->b->c->d->e
1131 # \ /
1143 # \ /
1132 # aa -----/
1144 # aa -----/
1133 # a has tag 2.6.12
1145 # a has tag 2.6.12
1134 # d has tag 2.6.13
1146 # d has tag 2.6.13
1135 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1147 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1136 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1148 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1137 # from the list.
1149 # from the list.
1138 #
1150 #
1139 # It is possible that more than one head will have the same branch tag.
1151 # It is possible that more than one head will have the same branch tag.
1140 # callers need to check the result for multiple heads under the same
1152 # callers need to check the result for multiple heads under the same
1141 # branch tag if that is a problem for them (ie checkout of a specific
1153 # branch tag if that is a problem for them (ie checkout of a specific
1142 # branch).
1154 # branch).
1143 #
1155 #
1144 # passing in a specific branch will limit the depth of the search
1156 # passing in a specific branch will limit the depth of the search
1145 # through the parents. It won't limit the branches returned in the
1157 # through the parents. It won't limit the branches returned in the
1146 # result though.
1158 # result though.
1147 def branchlookup(self, heads=None, branch=None):
1159 def branchlookup(self, heads=None, branch=None):
1148 if not heads:
1160 if not heads:
1149 heads = self.heads()
1161 heads = self.heads()
1150 headt = [ h for h in heads ]
1162 headt = [ h for h in heads ]
1151 chlog = self.changelog
1163 chlog = self.changelog
1152 branches = {}
1164 branches = {}
1153 merges = []
1165 merges = []
1154 seenmerge = {}
1166 seenmerge = {}
1155
1167
1156 # traverse the tree once for each head, recording in the branches
1168 # traverse the tree once for each head, recording in the branches
1157 # dict which tags are visible from this head. The branches
1169 # dict which tags are visible from this head. The branches
1158 # dict also records which tags are visible from each tag
1170 # dict also records which tags are visible from each tag
1159 # while we traverse.
1171 # while we traverse.
1160 while headt or merges:
1172 while headt or merges:
1161 if merges:
1173 if merges:
1162 n, found = merges.pop()
1174 n, found = merges.pop()
1163 visit = [n]
1175 visit = [n]
1164 else:
1176 else:
1165 h = headt.pop()
1177 h = headt.pop()
1166 visit = [h]
1178 visit = [h]
1167 found = [h]
1179 found = [h]
1168 seen = {}
1180 seen = {}
1169 while visit:
1181 while visit:
1170 n = visit.pop()
1182 n = visit.pop()
1171 if n in seen:
1183 if n in seen:
1172 continue
1184 continue
1173 pp = chlog.parents(n)
1185 pp = chlog.parents(n)
1174 tags = self.nodetags(n)
1186 tags = self.nodetags(n)
1175 if tags:
1187 if tags:
1176 for x in tags:
1188 for x in tags:
1177 if x == 'tip':
1189 if x == 'tip':
1178 continue
1190 continue
1179 for f in found:
1191 for f in found:
1180 branches.setdefault(f, {})[n] = 1
1192 branches.setdefault(f, {})[n] = 1
1181 branches.setdefault(n, {})[n] = 1
1193 branches.setdefault(n, {})[n] = 1
1182 break
1194 break
1183 if n not in found:
1195 if n not in found:
1184 found.append(n)
1196 found.append(n)
1185 if branch in tags:
1197 if branch in tags:
1186 continue
1198 continue
1187 seen[n] = 1
1199 seen[n] = 1
1188 if pp[1] != nullid and n not in seenmerge:
1200 if pp[1] != nullid and n not in seenmerge:
1189 merges.append((pp[1], [x for x in found]))
1201 merges.append((pp[1], [x for x in found]))
1190 seenmerge[n] = 1
1202 seenmerge[n] = 1
1191 if pp[0] != nullid:
1203 if pp[0] != nullid:
1192 visit.append(pp[0])
1204 visit.append(pp[0])
1193 # traverse the branches dict, eliminating branch tags from each
1205 # traverse the branches dict, eliminating branch tags from each
1194 # head that are visible from another branch tag for that head.
1206 # head that are visible from another branch tag for that head.
1195 out = {}
1207 out = {}
1196 viscache = {}
1208 viscache = {}
1197 for h in heads:
1209 for h in heads:
1198 def visible(node):
1210 def visible(node):
1199 if node in viscache:
1211 if node in viscache:
1200 return viscache[node]
1212 return viscache[node]
1201 ret = {}
1213 ret = {}
1202 visit = [node]
1214 visit = [node]
1203 while visit:
1215 while visit:
1204 x = visit.pop()
1216 x = visit.pop()
1205 if x in viscache:
1217 if x in viscache:
1206 ret.update(viscache[x])
1218 ret.update(viscache[x])
1207 elif x not in ret:
1219 elif x not in ret:
1208 ret[x] = 1
1220 ret[x] = 1
1209 if x in branches:
1221 if x in branches:
1210 visit[len(visit):] = branches[x].keys()
1222 visit[len(visit):] = branches[x].keys()
1211 viscache[node] = ret
1223 viscache[node] = ret
1212 return ret
1224 return ret
1213 if h not in branches:
1225 if h not in branches:
1214 continue
1226 continue
1215 # O(n^2), but somewhat limited. This only searches the
1227 # O(n^2), but somewhat limited. This only searches the
1216 # tags visible from a specific head, not all the tags in the
1228 # tags visible from a specific head, not all the tags in the
1217 # whole repo.
1229 # whole repo.
1218 for b in branches[h]:
1230 for b in branches[h]:
1219 vis = False
1231 vis = False
1220 for bb in branches[h].keys():
1232 for bb in branches[h].keys():
1221 if b != bb:
1233 if b != bb:
1222 if b in visible(bb):
1234 if b in visible(bb):
1223 vis = True
1235 vis = True
1224 break
1236 break
1225 if not vis:
1237 if not vis:
1226 l = out.setdefault(h, [])
1238 l = out.setdefault(h, [])
1227 l[len(l):] = self.nodetags(b)
1239 l[len(l):] = self.nodetags(b)
1228 return out
1240 return out
1229
1241
1230 def branches(self, nodes):
1242 def branches(self, nodes):
1231 if not nodes: nodes = [self.changelog.tip()]
1243 if not nodes: nodes = [self.changelog.tip()]
1232 b = []
1244 b = []
1233 for n in nodes:
1245 for n in nodes:
1234 t = n
1246 t = n
1235 while n:
1247 while n:
1236 p = self.changelog.parents(n)
1248 p = self.changelog.parents(n)
1237 if p[1] != nullid or p[0] == nullid:
1249 if p[1] != nullid or p[0] == nullid:
1238 b.append((t, n, p[0], p[1]))
1250 b.append((t, n, p[0], p[1]))
1239 break
1251 break
1240 n = p[0]
1252 n = p[0]
1241 return b
1253 return b
1242
1254
1243 def between(self, pairs):
1255 def between(self, pairs):
1244 r = []
1256 r = []
1245
1257
1246 for top, bottom in pairs:
1258 for top, bottom in pairs:
1247 n, l, i = top, [], 0
1259 n, l, i = top, [], 0
1248 f = 1
1260 f = 1
1249
1261
1250 while n != bottom:
1262 while n != bottom:
1251 p = self.changelog.parents(n)[0]
1263 p = self.changelog.parents(n)[0]
1252 if i == f:
1264 if i == f:
1253 l.append(n)
1265 l.append(n)
1254 f = f * 2
1266 f = f * 2
1255 n = p
1267 n = p
1256 i += 1
1268 i += 1
1257
1269
1258 r.append(l)
1270 r.append(l)
1259
1271
1260 return r
1272 return r
1261
1273
1262 def newer(self, nodes):
1274 def newer(self, nodes):
1263 m = {}
1275 m = {}
1264 nl = []
1276 nl = []
1265 pm = {}
1277 pm = {}
1266 cl = self.changelog
1278 cl = self.changelog
1267 t = l = cl.count()
1279 t = l = cl.count()
1268
1280
1269 # find the lowest numbered node
1281 # find the lowest numbered node
1270 for n in nodes:
1282 for n in nodes:
1271 l = min(l, cl.rev(n))
1283 l = min(l, cl.rev(n))
1272 m[n] = 1
1284 m[n] = 1
1273
1285
1274 for i in xrange(l, t):
1286 for i in xrange(l, t):
1275 n = cl.node(i)
1287 n = cl.node(i)
1276 if n in m: # explicitly listed
1288 if n in m: # explicitly listed
1277 pm[n] = 1
1289 pm[n] = 1
1278 nl.append(n)
1290 nl.append(n)
1279 continue
1291 continue
1280 for p in cl.parents(n):
1292 for p in cl.parents(n):
1281 if p in pm: # parent listed
1293 if p in pm: # parent listed
1282 pm[n] = 1
1294 pm[n] = 1
1283 nl.append(n)
1295 nl.append(n)
1284 break
1296 break
1285
1297
1286 return nl
1298 return nl
1287
1299
1288 def findincoming(self, remote, base=None, heads=None):
1300 def findincoming(self, remote, base=None, heads=None):
1289 m = self.changelog.nodemap
1301 m = self.changelog.nodemap
1290 search = []
1302 search = []
1291 fetch = []
1303 fetch = []
1292 seen = {}
1304 seen = {}
1293 seenbranch = {}
1305 seenbranch = {}
1294 if base == None:
1306 if base == None:
1295 base = {}
1307 base = {}
1296
1308
1297 # assume we're closer to the tip than the root
1309 # assume we're closer to the tip than the root
1298 # and start by examining the heads
1310 # and start by examining the heads
1299 self.ui.status("searching for changes\n")
1311 self.ui.status("searching for changes\n")
1300
1312
1301 if not heads:
1313 if not heads:
1302 heads = remote.heads()
1314 heads = remote.heads()
1303
1315
1304 unknown = []
1316 unknown = []
1305 for h in heads:
1317 for h in heads:
1306 if h not in m:
1318 if h not in m:
1307 unknown.append(h)
1319 unknown.append(h)
1308 else:
1320 else:
1309 base[h] = 1
1321 base[h] = 1
1310
1322
1311 if not unknown:
1323 if not unknown:
1312 return None
1324 return None
1313
1325
1314 rep = {}
1326 rep = {}
1315 reqcnt = 0
1327 reqcnt = 0
1316
1328
1317 # search through remote branches
1329 # search through remote branches
1318 # a 'branch' here is a linear segment of history, with four parts:
1330 # a 'branch' here is a linear segment of history, with four parts:
1319 # head, root, first parent, second parent
1331 # head, root, first parent, second parent
1320 # (a branch always has two parents (or none) by definition)
1332 # (a branch always has two parents (or none) by definition)
1321 unknown = remote.branches(unknown)
1333 unknown = remote.branches(unknown)
1322 while unknown:
1334 while unknown:
1323 r = []
1335 r = []
1324 while unknown:
1336 while unknown:
1325 n = unknown.pop(0)
1337 n = unknown.pop(0)
1326 if n[0] in seen:
1338 if n[0] in seen:
1327 continue
1339 continue
1328
1340
1329 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1341 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1330 if n[0] == nullid:
1342 if n[0] == nullid:
1331 break
1343 break
1332 if n in seenbranch:
1344 if n in seenbranch:
1333 self.ui.debug("branch already found\n")
1345 self.ui.debug("branch already found\n")
1334 continue
1346 continue
1335 if n[1] and n[1] in m: # do we know the base?
1347 if n[1] and n[1] in m: # do we know the base?
1336 self.ui.debug("found incomplete branch %s:%s\n"
1348 self.ui.debug("found incomplete branch %s:%s\n"
1337 % (short(n[0]), short(n[1])))
1349 % (short(n[0]), short(n[1])))
1338 search.append(n) # schedule branch range for scanning
1350 search.append(n) # schedule branch range for scanning
1339 seenbranch[n] = 1
1351 seenbranch[n] = 1
1340 else:
1352 else:
1341 if n[1] not in seen and n[1] not in fetch:
1353 if n[1] not in seen and n[1] not in fetch:
1342 if n[2] in m and n[3] in m:
1354 if n[2] in m and n[3] in m:
1343 self.ui.debug("found new changeset %s\n" %
1355 self.ui.debug("found new changeset %s\n" %
1344 short(n[1]))
1356 short(n[1]))
1345 fetch.append(n[1]) # earliest unknown
1357 fetch.append(n[1]) # earliest unknown
1346 base[n[2]] = 1 # latest known
1358 base[n[2]] = 1 # latest known
1347 continue
1359 continue
1348
1360
1349 for a in n[2:4]:
1361 for a in n[2:4]:
1350 if a not in rep:
1362 if a not in rep:
1351 r.append(a)
1363 r.append(a)
1352 rep[a] = 1
1364 rep[a] = 1
1353
1365
1354 seen[n[0]] = 1
1366 seen[n[0]] = 1
1355
1367
1356 if r:
1368 if r:
1357 reqcnt += 1
1369 reqcnt += 1
1358 self.ui.debug("request %d: %s\n" %
1370 self.ui.debug("request %d: %s\n" %
1359 (reqcnt, " ".join(map(short, r))))
1371 (reqcnt, " ".join(map(short, r))))
1360 for p in range(0, len(r), 10):
1372 for p in range(0, len(r), 10):
1361 for b in remote.branches(r[p:p+10]):
1373 for b in remote.branches(r[p:p+10]):
1362 self.ui.debug("received %s:%s\n" %
1374 self.ui.debug("received %s:%s\n" %
1363 (short(b[0]), short(b[1])))
1375 (short(b[0]), short(b[1])))
1364 if b[0] not in m and b[0] not in seen:
1376 if b[0] not in m and b[0] not in seen:
1365 unknown.append(b)
1377 unknown.append(b)
1366
1378
1367 # do binary search on the branches we found
1379 # do binary search on the branches we found
1368 while search:
1380 while search:
1369 n = search.pop(0)
1381 n = search.pop(0)
1370 reqcnt += 1
1382 reqcnt += 1
1371 l = remote.between([(n[0], n[1])])[0]
1383 l = remote.between([(n[0], n[1])])[0]
1372 l.append(n[1])
1384 l.append(n[1])
1373 p = n[0]
1385 p = n[0]
1374 f = 1
1386 f = 1
1375 for i in l:
1387 for i in l:
1376 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1388 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1377 if i in m:
1389 if i in m:
1378 if f <= 2:
1390 if f <= 2:
1379 self.ui.debug("found new branch changeset %s\n" %
1391 self.ui.debug("found new branch changeset %s\n" %
1380 short(p))
1392 short(p))
1381 fetch.append(p)
1393 fetch.append(p)
1382 base[i] = 1
1394 base[i] = 1
1383 else:
1395 else:
1384 self.ui.debug("narrowed branch search to %s:%s\n"
1396 self.ui.debug("narrowed branch search to %s:%s\n"
1385 % (short(p), short(i)))
1397 % (short(p), short(i)))
1386 search.append((p, i))
1398 search.append((p, i))
1387 break
1399 break
1388 p, f = i, f * 2
1400 p, f = i, f * 2
1389
1401
1390 # sanity check our fetch list
1402 # sanity check our fetch list
1391 for f in fetch:
1403 for f in fetch:
1392 if f in m:
1404 if f in m:
1393 raise RepoError("already have changeset " + short(f[:4]))
1405 raise RepoError("already have changeset " + short(f[:4]))
1394
1406
1395 if base.keys() == [nullid]:
1407 if base.keys() == [nullid]:
1396 self.ui.warn("warning: pulling from an unrelated repository!\n")
1408 self.ui.warn("warning: pulling from an unrelated repository!\n")
1397
1409
1398 self.ui.note("adding new changesets starting at " +
1410 self.ui.note("adding new changesets starting at " +
1399 " ".join([short(f) for f in fetch]) + "\n")
1411 " ".join([short(f) for f in fetch]) + "\n")
1400
1412
1401 self.ui.debug("%d total queries\n" % reqcnt)
1413 self.ui.debug("%d total queries\n" % reqcnt)
1402
1414
1403 return fetch
1415 return fetch
1404
1416
1405 def findoutgoing(self, remote, base=None, heads=None):
1417 def findoutgoing(self, remote, base=None, heads=None):
1406 if base == None:
1418 if base == None:
1407 base = {}
1419 base = {}
1408 self.findincoming(remote, base, heads)
1420 self.findincoming(remote, base, heads)
1409
1421
1410 remain = dict.fromkeys(self.changelog.nodemap)
1422 remain = dict.fromkeys(self.changelog.nodemap)
1411
1423
1412 # prune everything remote has from the tree
1424 # prune everything remote has from the tree
1413 del remain[nullid]
1425 del remain[nullid]
1414 remove = base.keys()
1426 remove = base.keys()
1415 while remove:
1427 while remove:
1416 n = remove.pop(0)
1428 n = remove.pop(0)
1417 if n in remain:
1429 if n in remain:
1418 del remain[n]
1430 del remain[n]
1419 for p in self.changelog.parents(n):
1431 for p in self.changelog.parents(n):
1420 remove.append(p)
1432 remove.append(p)
1421
1433
1422 # find every node whose parents have been pruned
1434 # find every node whose parents have been pruned
1423 subset = []
1435 subset = []
1424 for n in remain:
1436 for n in remain:
1425 p1, p2 = self.changelog.parents(n)
1437 p1, p2 = self.changelog.parents(n)
1426 if p1 not in remain and p2 not in remain:
1438 if p1 not in remain and p2 not in remain:
1427 subset.append(n)
1439 subset.append(n)
1428
1440
1429 # this is the set of all roots we have to push
1441 # this is the set of all roots we have to push
1430 return subset
1442 return subset
1431
1443
1432 def pull(self, remote):
1444 def pull(self, remote):
1433 lock = self.lock()
1445 lock = self.lock()
1434
1446
1435 # if we have an empty repo, fetch everything
1447 # if we have an empty repo, fetch everything
1436 if self.changelog.tip() == nullid:
1448 if self.changelog.tip() == nullid:
1437 self.ui.status("requesting all changes\n")
1449 self.ui.status("requesting all changes\n")
1438 fetch = [nullid]
1450 fetch = [nullid]
1439 else:
1451 else:
1440 fetch = self.findincoming(remote)
1452 fetch = self.findincoming(remote)
1441
1453
1442 if not fetch:
1454 if not fetch:
1443 self.ui.status("no changes found\n")
1455 self.ui.status("no changes found\n")
1444 return 1
1456 return 1
1445
1457
1446 cg = remote.changegroup(fetch)
1458 cg = remote.changegroup(fetch)
1447 return self.addchangegroup(cg)
1459 return self.addchangegroup(cg)
1448
1460
1449 def push(self, remote, force=False):
1461 def push(self, remote, force=False):
1450 lock = remote.lock()
1462 lock = remote.lock()
1451
1463
1452 base = {}
1464 base = {}
1453 heads = remote.heads()
1465 heads = remote.heads()
1454 inc = self.findincoming(remote, base, heads)
1466 inc = self.findincoming(remote, base, heads)
1455 if not force and inc:
1467 if not force and inc:
1456 self.ui.warn("abort: unsynced remote changes!\n")
1468 self.ui.warn("abort: unsynced remote changes!\n")
1457 self.ui.status("(did you forget to sync? use push -f to force)\n")
1469 self.ui.status("(did you forget to sync? use push -f to force)\n")
1458 return 1
1470 return 1
1459
1471
1460 update = self.findoutgoing(remote, base)
1472 update = self.findoutgoing(remote, base)
1461 if not update:
1473 if not update:
1462 self.ui.status("no changes found\n")
1474 self.ui.status("no changes found\n")
1463 return 1
1475 return 1
1464 elif not force:
1476 elif not force:
1465 if len(heads) < len(self.changelog.heads()):
1477 if len(heads) < len(self.changelog.heads()):
1466 self.ui.warn("abort: push creates new remote branches!\n")
1478 self.ui.warn("abort: push creates new remote branches!\n")
1467 self.ui.status("(did you forget to merge?" +
1479 self.ui.status("(did you forget to merge?" +
1468 " use push -f to force)\n")
1480 " use push -f to force)\n")
1469 return 1
1481 return 1
1470
1482
1471 cg = self.changegroup(update)
1483 cg = self.changegroup(update)
1472 return remote.addchangegroup(cg)
1484 return remote.addchangegroup(cg)
1473
1485
1474 def changegroup(self, basenodes):
1486 def changegroup(self, basenodes):
1475 class genread:
1487 class genread:
1476 def __init__(self, generator):
1488 def __init__(self, generator):
1477 self.g = generator
1489 self.g = generator
1478 self.buf = ""
1490 self.buf = ""
1479 def fillbuf(self):
1491 def fillbuf(self):
1480 self.buf += "".join(self.g)
1492 self.buf += "".join(self.g)
1481
1493
1482 def read(self, l):
1494 def read(self, l):
1483 while l > len(self.buf):
1495 while l > len(self.buf):
1484 try:
1496 try:
1485 self.buf += self.g.next()
1497 self.buf += self.g.next()
1486 except StopIteration:
1498 except StopIteration:
1487 break
1499 break
1488 d, self.buf = self.buf[:l], self.buf[l:]
1500 d, self.buf = self.buf[:l], self.buf[l:]
1489 return d
1501 return d
1490
1502
1491 def gengroup():
1503 def gengroup():
1492 nodes = self.newer(basenodes)
1504 nodes = self.newer(basenodes)
1493
1505
1494 # construct the link map
1506 # construct the link map
1495 linkmap = {}
1507 linkmap = {}
1496 for n in nodes:
1508 for n in nodes:
1497 linkmap[self.changelog.rev(n)] = n
1509 linkmap[self.changelog.rev(n)] = n
1498
1510
1499 # construct a list of all changed files
1511 # construct a list of all changed files
1500 changed = {}
1512 changed = {}
1501 for n in nodes:
1513 for n in nodes:
1502 c = self.changelog.read(n)
1514 c = self.changelog.read(n)
1503 for f in c[3]:
1515 for f in c[3]:
1504 changed[f] = 1
1516 changed[f] = 1
1505 changed = changed.keys()
1517 changed = changed.keys()
1506 changed.sort()
1518 changed.sort()
1507
1519
1508 # the changegroup is changesets + manifests + all file revs
1520 # the changegroup is changesets + manifests + all file revs
1509 revs = [ self.changelog.rev(n) for n in nodes ]
1521 revs = [ self.changelog.rev(n) for n in nodes ]
1510
1522
1511 for y in self.changelog.group(linkmap): yield y
1523 for y in self.changelog.group(linkmap): yield y
1512 for y in self.manifest.group(linkmap): yield y
1524 for y in self.manifest.group(linkmap): yield y
1513 for f in changed:
1525 for f in changed:
1514 yield struct.pack(">l", len(f) + 4) + f
1526 yield struct.pack(">l", len(f) + 4) + f
1515 g = self.file(f).group(linkmap)
1527 g = self.file(f).group(linkmap)
1516 for y in g:
1528 for y in g:
1517 yield y
1529 yield y
1518
1530
1519 yield struct.pack(">l", 0)
1531 yield struct.pack(">l", 0)
1520
1532
1521 return genread(gengroup())
1533 return genread(gengroup())
1522
1534
1523 def addchangegroup(self, source):
1535 def addchangegroup(self, source):
1524
1536
1525 def getchunk():
1537 def getchunk():
1526 d = source.read(4)
1538 d = source.read(4)
1527 if not d: return ""
1539 if not d: return ""
1528 l = struct.unpack(">l", d)[0]
1540 l = struct.unpack(">l", d)[0]
1529 if l <= 4: return ""
1541 if l <= 4: return ""
1530 return source.read(l - 4)
1542 return source.read(l - 4)
1531
1543
1532 def getgroup():
1544 def getgroup():
1533 while 1:
1545 while 1:
1534 c = getchunk()
1546 c = getchunk()
1535 if not c: break
1547 if not c: break
1536 yield c
1548 yield c
1537
1549
1538 def csmap(x):
1550 def csmap(x):
1539 self.ui.debug("add changeset %s\n" % short(x))
1551 self.ui.debug("add changeset %s\n" % short(x))
1540 return self.changelog.count()
1552 return self.changelog.count()
1541
1553
1542 def revmap(x):
1554 def revmap(x):
1543 return self.changelog.rev(x)
1555 return self.changelog.rev(x)
1544
1556
1545 if not source: return
1557 if not source: return
1546 changesets = files = revisions = 0
1558 changesets = files = revisions = 0
1547
1559
1548 tr = self.transaction()
1560 tr = self.transaction()
1549
1561
1550 # pull off the changeset group
1562 # pull off the changeset group
1551 self.ui.status("adding changesets\n")
1563 self.ui.status("adding changesets\n")
1552 co = self.changelog.tip()
1564 co = self.changelog.tip()
1553 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1565 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1554 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1566 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1555
1567
1556 # pull off the manifest group
1568 # pull off the manifest group
1557 self.ui.status("adding manifests\n")
1569 self.ui.status("adding manifests\n")
1558 mm = self.manifest.tip()
1570 mm = self.manifest.tip()
1559 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1571 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1560
1572
1561 # process the files
1573 # process the files
1562 self.ui.status("adding file changes\n")
1574 self.ui.status("adding file changes\n")
1563 while 1:
1575 while 1:
1564 f = getchunk()
1576 f = getchunk()
1565 if not f: break
1577 if not f: break
1566 self.ui.debug("adding %s revisions\n" % f)
1578 self.ui.debug("adding %s revisions\n" % f)
1567 fl = self.file(f)
1579 fl = self.file(f)
1568 o = fl.count()
1580 o = fl.count()
1569 n = fl.addgroup(getgroup(), revmap, tr)
1581 n = fl.addgroup(getgroup(), revmap, tr)
1570 revisions += fl.count() - o
1582 revisions += fl.count() - o
1571 files += 1
1583 files += 1
1572
1584
1573 self.ui.status(("added %d changesets" +
1585 self.ui.status(("added %d changesets" +
1574 " with %d changes to %d files\n")
1586 " with %d changes to %d files\n")
1575 % (changesets, revisions, files))
1587 % (changesets, revisions, files))
1576
1588
1577 tr.close()
1589 tr.close()
1578
1590
1579 if not self.hook("changegroup"):
1591 if not self.hook("changegroup"):
1580 return 1
1592 return 1
1581
1593
1582 return
1594 return
1583
1595
1584 def update(self, node, allow=False, force=False, choose=None,
1596 def update(self, node, allow=False, force=False, choose=None,
1585 moddirstate=True):
1597 moddirstate=True):
1586 pl = self.dirstate.parents()
1598 pl = self.dirstate.parents()
1587 if not force and pl[1] != nullid:
1599 if not force and pl[1] != nullid:
1588 self.ui.warn("aborting: outstanding uncommitted merges\n")
1600 self.ui.warn("aborting: outstanding uncommitted merges\n")
1589 return 1
1601 return 1
1590
1602
1591 p1, p2 = pl[0], node
1603 p1, p2 = pl[0], node
1592 pa = self.changelog.ancestor(p1, p2)
1604 pa = self.changelog.ancestor(p1, p2)
1593 m1n = self.changelog.read(p1)[0]
1605 m1n = self.changelog.read(p1)[0]
1594 m2n = self.changelog.read(p2)[0]
1606 m2n = self.changelog.read(p2)[0]
1595 man = self.manifest.ancestor(m1n, m2n)
1607 man = self.manifest.ancestor(m1n, m2n)
1596 m1 = self.manifest.read(m1n)
1608 m1 = self.manifest.read(m1n)
1597 mf1 = self.manifest.readflags(m1n)
1609 mf1 = self.manifest.readflags(m1n)
1598 m2 = self.manifest.read(m2n)
1610 m2 = self.manifest.read(m2n)
1599 mf2 = self.manifest.readflags(m2n)
1611 mf2 = self.manifest.readflags(m2n)
1600 ma = self.manifest.read(man)
1612 ma = self.manifest.read(man)
1601 mfa = self.manifest.readflags(man)
1613 mfa = self.manifest.readflags(man)
1602
1614
1603 (c, a, d, u) = self.changes()
1615 (c, a, d, u) = self.changes()
1604
1616
1605 # is this a jump, or a merge? i.e. is there a linear path
1617 # is this a jump, or a merge? i.e. is there a linear path
1606 # from p1 to p2?
1618 # from p1 to p2?
1607 linear_path = (pa == p1 or pa == p2)
1619 linear_path = (pa == p1 or pa == p2)
1608
1620
1609 # resolve the manifest to determine which files
1621 # resolve the manifest to determine which files
1610 # we care about merging
1622 # we care about merging
1611 self.ui.note("resolving manifests\n")
1623 self.ui.note("resolving manifests\n")
1612 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1624 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1613 (force, allow, moddirstate, linear_path))
1625 (force, allow, moddirstate, linear_path))
1614 self.ui.debug(" ancestor %s local %s remote %s\n" %
1626 self.ui.debug(" ancestor %s local %s remote %s\n" %
1615 (short(man), short(m1n), short(m2n)))
1627 (short(man), short(m1n), short(m2n)))
1616
1628
1617 merge = {}
1629 merge = {}
1618 get = {}
1630 get = {}
1619 remove = []
1631 remove = []
1620
1632
1621 # construct a working dir manifest
1633 # construct a working dir manifest
1622 mw = m1.copy()
1634 mw = m1.copy()
1623 mfw = mf1.copy()
1635 mfw = mf1.copy()
1624 umap = dict.fromkeys(u)
1636 umap = dict.fromkeys(u)
1625
1637
1626 for f in a + c + u:
1638 for f in a + c + u:
1627 mw[f] = ""
1639 mw[f] = ""
1628 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1640 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1629
1641
1630 for f in d:
1642 for f in d:
1631 if f in mw: del mw[f]
1643 if f in mw: del mw[f]
1632
1644
1633 # If we're jumping between revisions (as opposed to merging),
1645 # If we're jumping between revisions (as opposed to merging),
1634 # and if neither the working directory nor the target rev has
1646 # and if neither the working directory nor the target rev has
1635 # the file, then we need to remove it from the dirstate, to
1647 # the file, then we need to remove it from the dirstate, to
1636 # prevent the dirstate from listing the file when it is no
1648 # prevent the dirstate from listing the file when it is no
1637 # longer in the manifest.
1649 # longer in the manifest.
1638 if moddirstate and linear_path and f not in m2:
1650 if moddirstate and linear_path and f not in m2:
1639 self.dirstate.forget((f,))
1651 self.dirstate.forget((f,))
1640
1652
1641 # Compare manifests
1653 # Compare manifests
1642 for f, n in mw.iteritems():
1654 for f, n in mw.iteritems():
1643 if choose and not choose(f): continue
1655 if choose and not choose(f): continue
1644 if f in m2:
1656 if f in m2:
1645 s = 0
1657 s = 0
1646
1658
1647 # is the wfile new since m1, and match m2?
1659 # is the wfile new since m1, and match m2?
1648 if f not in m1:
1660 if f not in m1:
1649 t1 = self.wfile(f).read()
1661 t1 = self.wfile(f).read()
1650 t2 = self.file(f).revision(m2[f])
1662 t2 = self.file(f).revision(m2[f])
1651 if cmp(t1, t2) == 0:
1663 if cmp(t1, t2) == 0:
1652 n = m2[f]
1664 n = m2[f]
1653 del t1, t2
1665 del t1, t2
1654
1666
1655 # are files different?
1667 # are files different?
1656 if n != m2[f]:
1668 if n != m2[f]:
1657 a = ma.get(f, nullid)
1669 a = ma.get(f, nullid)
1658 # are both different from the ancestor?
1670 # are both different from the ancestor?
1659 if n != a and m2[f] != a:
1671 if n != a and m2[f] != a:
1660 self.ui.debug(" %s versions differ, resolve\n" % f)
1672 self.ui.debug(" %s versions differ, resolve\n" % f)
1661 # merge executable bits
1673 # merge executable bits
1662 # "if we changed or they changed, change in merge"
1674 # "if we changed or they changed, change in merge"
1663 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1675 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1664 mode = ((a^b) | (a^c)) ^ a
1676 mode = ((a^b) | (a^c)) ^ a
1665 merge[f] = (m1.get(f, nullid), m2[f], mode)
1677 merge[f] = (m1.get(f, nullid), m2[f], mode)
1666 s = 1
1678 s = 1
1667 # are we clobbering?
1679 # are we clobbering?
1668 # is remote's version newer?
1680 # is remote's version newer?
1669 # or are we going back in time?
1681 # or are we going back in time?
1670 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1682 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1671 self.ui.debug(" remote %s is newer, get\n" % f)
1683 self.ui.debug(" remote %s is newer, get\n" % f)
1672 get[f] = m2[f]
1684 get[f] = m2[f]
1673 s = 1
1685 s = 1
1674 elif f in umap:
1686 elif f in umap:
1675 # this unknown file is the same as the checkout
1687 # this unknown file is the same as the checkout
1676 get[f] = m2[f]
1688 get[f] = m2[f]
1677
1689
1678 if not s and mfw[f] != mf2[f]:
1690 if not s and mfw[f] != mf2[f]:
1679 if force:
1691 if force:
1680 self.ui.debug(" updating permissions for %s\n" % f)
1692 self.ui.debug(" updating permissions for %s\n" % f)
1681 util.set_exec(self.wjoin(f), mf2[f])
1693 util.set_exec(self.wjoin(f), mf2[f])
1682 else:
1694 else:
1683 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1695 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1684 mode = ((a^b) | (a^c)) ^ a
1696 mode = ((a^b) | (a^c)) ^ a
1685 if mode != b:
1697 if mode != b:
1686 self.ui.debug(" updating permissions for %s\n" % f)
1698 self.ui.debug(" updating permissions for %s\n" % f)
1687 util.set_exec(self.wjoin(f), mode)
1699 util.set_exec(self.wjoin(f), mode)
1688 del m2[f]
1700 del m2[f]
1689 elif f in ma:
1701 elif f in ma:
1690 if n != ma[f]:
1702 if n != ma[f]:
1691 r = "d"
1703 r = "d"
1692 if not force and (linear_path or allow):
1704 if not force and (linear_path or allow):
1693 r = self.ui.prompt(
1705 r = self.ui.prompt(
1694 (" local changed %s which remote deleted\n" % f) +
1706 (" local changed %s which remote deleted\n" % f) +
1695 "(k)eep or (d)elete?", "[kd]", "k")
1707 "(k)eep or (d)elete?", "[kd]", "k")
1696 if r == "d":
1708 if r == "d":
1697 remove.append(f)
1709 remove.append(f)
1698 else:
1710 else:
1699 self.ui.debug("other deleted %s\n" % f)
1711 self.ui.debug("other deleted %s\n" % f)
1700 remove.append(f) # other deleted it
1712 remove.append(f) # other deleted it
1701 else:
1713 else:
1702 if n == m1.get(f, nullid): # same as parent
1714 if n == m1.get(f, nullid): # same as parent
1703 if p2 == pa: # going backwards?
1715 if p2 == pa: # going backwards?
1704 self.ui.debug("remote deleted %s\n" % f)
1716 self.ui.debug("remote deleted %s\n" % f)
1705 remove.append(f)
1717 remove.append(f)
1706 else:
1718 else:
1707 self.ui.debug("local created %s, keeping\n" % f)
1719 self.ui.debug("local created %s, keeping\n" % f)
1708 else:
1720 else:
1709 self.ui.debug("working dir created %s, keeping\n" % f)
1721 self.ui.debug("working dir created %s, keeping\n" % f)
1710
1722
1711 for f, n in m2.iteritems():
1723 for f, n in m2.iteritems():
1712 if choose and not choose(f): continue
1724 if choose and not choose(f): continue
1713 if f[0] == "/": continue
1725 if f[0] == "/": continue
1714 if f in ma and n != ma[f]:
1726 if f in ma and n != ma[f]:
1715 r = "k"
1727 r = "k"
1716 if not force and (linear_path or allow):
1728 if not force and (linear_path or allow):
1717 r = self.ui.prompt(
1729 r = self.ui.prompt(
1718 ("remote changed %s which local deleted\n" % f) +
1730 ("remote changed %s which local deleted\n" % f) +
1719 "(k)eep or (d)elete?", "[kd]", "k")
1731 "(k)eep or (d)elete?", "[kd]", "k")
1720 if r == "k": get[f] = n
1732 if r == "k": get[f] = n
1721 elif f not in ma:
1733 elif f not in ma:
1722 self.ui.debug("remote created %s\n" % f)
1734 self.ui.debug("remote created %s\n" % f)
1723 get[f] = n
1735 get[f] = n
1724 else:
1736 else:
1725 if force or p2 == pa: # going backwards?
1737 if force or p2 == pa: # going backwards?
1726 self.ui.debug("local deleted %s, recreating\n" % f)
1738 self.ui.debug("local deleted %s, recreating\n" % f)
1727 get[f] = n
1739 get[f] = n
1728 else:
1740 else:
1729 self.ui.debug("local deleted %s\n" % f)
1741 self.ui.debug("local deleted %s\n" % f)
1730
1742
1731 del mw, m1, m2, ma
1743 del mw, m1, m2, ma
1732
1744
1733 if force:
1745 if force:
1734 for f in merge:
1746 for f in merge:
1735 get[f] = merge[f][1]
1747 get[f] = merge[f][1]
1736 merge = {}
1748 merge = {}
1737
1749
1738 if linear_path or force:
1750 if linear_path or force:
1739 # we don't need to do any magic, just jump to the new rev
1751 # we don't need to do any magic, just jump to the new rev
1740 mode = 'n'
1752 mode = 'n'
1741 p1, p2 = p2, nullid
1753 p1, p2 = p2, nullid
1742 else:
1754 else:
1743 if not allow:
1755 if not allow:
1744 self.ui.status("this update spans a branch" +
1756 self.ui.status("this update spans a branch" +
1745 " affecting the following files:\n")
1757 " affecting the following files:\n")
1746 fl = merge.keys() + get.keys()
1758 fl = merge.keys() + get.keys()
1747 fl.sort()
1759 fl.sort()
1748 for f in fl:
1760 for f in fl:
1749 cf = ""
1761 cf = ""
1750 if f in merge: cf = " (resolve)"
1762 if f in merge: cf = " (resolve)"
1751 self.ui.status(" %s%s\n" % (f, cf))
1763 self.ui.status(" %s%s\n" % (f, cf))
1752 self.ui.warn("aborting update spanning branches!\n")
1764 self.ui.warn("aborting update spanning branches!\n")
1753 self.ui.status("(use update -m to merge across branches" +
1765 self.ui.status("(use update -m to merge across branches" +
1754 " or -C to lose changes)\n")
1766 " or -C to lose changes)\n")
1755 return 1
1767 return 1
1756 mode = 'm'
1768 mode = 'm'
1757
1769
1758 if moddirstate:
1770 if moddirstate:
1759 self.dirstate.setparents(p1, p2)
1771 self.dirstate.setparents(p1, p2)
1760
1772
1761 # get the files we don't need to change
1773 # get the files we don't need to change
1762 files = get.keys()
1774 files = get.keys()
1763 files.sort()
1775 files.sort()
1764 for f in files:
1776 for f in files:
1765 if f[0] == "/": continue
1777 if f[0] == "/": continue
1766 self.ui.note("getting %s\n" % f)
1778 self.ui.note("getting %s\n" % f)
1767 t = self.file(f).read(get[f])
1779 t = self.file(f).read(get[f])
1768 try:
1780 try:
1769 self.wfile(f, "w").write(t)
1781 self.wfile(f, "w").write(t)
1770 except IOError:
1782 except IOError:
1771 os.makedirs(os.path.dirname(self.wjoin(f)))
1783 os.makedirs(os.path.dirname(self.wjoin(f)))
1772 self.wfile(f, "w").write(t)
1784 self.wfile(f, "w").write(t)
1773 util.set_exec(self.wjoin(f), mf2[f])
1785 util.set_exec(self.wjoin(f), mf2[f])
1774 if moddirstate:
1786 if moddirstate:
1787 if mode == 'm':
1788 self.dirstate.update([f], 'n', st_mtime=0)
1789 else:
1775 self.dirstate.update([f], 'n')
1790 self.dirstate.update([f], 'n')
1776
1791
1777 # merge the tricky bits
1792 # merge the tricky bits
1778 files = merge.keys()
1793 files = merge.keys()
1779 files.sort()
1794 files.sort()
1780 for f in files:
1795 for f in files:
1781 self.ui.status("merging %s\n" % f)
1796 self.ui.status("merging %s\n" % f)
1782 m, o, flag = merge[f]
1797 m, o, flag = merge[f]
1783 self.merge3(f, m, o)
1798 self.merge3(f, m, o)
1784 util.set_exec(self.wjoin(f), flag)
1799 util.set_exec(self.wjoin(f), flag)
1785 if moddirstate:
1800 if moddirstate:
1786 if mode == 'm':
1801 if mode == 'm':
1787 # only update dirstate on branch merge, otherwise we
1802 # only update dirstate on branch merge, otherwise we
1788 # could mark files with changes as unchanged
1803 # could mark files with changes as unchanged
1789 self.dirstate.update([f], mode)
1804 self.dirstate.update([f], mode)
1790 elif p2 == nullid:
1805 elif p2 == nullid:
1791 # update dirstate from parent1's manifest
1806 # update dirstate from parent1's manifest
1792 m1n = self.changelog.read(p1)[0]
1807 m1n = self.changelog.read(p1)[0]
1793 m1 = self.manifest.read(m1n)
1808 m1 = self.manifest.read(m1n)
1794 f_len = len(self.file(f).read(m1[f]))
1809 f_len = len(self.file(f).read(m1[f]))
1795 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1810 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1796 else:
1811 else:
1797 self.ui.warn("Second parent without branch merge!?\n"
1812 self.ui.warn("Second parent without branch merge!?\n"
1798 "Dirstate for file %s may be wrong.\n" % f)
1813 "Dirstate for file %s may be wrong.\n" % f)
1799
1814
1800 remove.sort()
1815 remove.sort()
1801 for f in remove:
1816 for f in remove:
1802 self.ui.note("removing %s\n" % f)
1817 self.ui.note("removing %s\n" % f)
1803 try:
1818 try:
1804 os.unlink(self.wjoin(f))
1819 os.unlink(self.wjoin(f))
1805 except OSError, inst:
1820 except OSError, inst:
1806 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1821 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1807 # try removing directories that might now be empty
1822 # try removing directories that might now be empty
1808 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1823 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1809 except: pass
1824 except: pass
1810 if moddirstate:
1825 if moddirstate:
1811 if mode == 'n':
1826 if mode == 'n':
1812 self.dirstate.forget(remove)
1827 self.dirstate.forget(remove)
1813 else:
1828 else:
1814 self.dirstate.update(remove, 'r')
1829 self.dirstate.update(remove, 'r')
1815
1830
1816 def merge3(self, fn, my, other):
1831 def merge3(self, fn, my, other):
1817 """perform a 3-way merge in the working directory"""
1832 """perform a 3-way merge in the working directory"""
1818
1833
1819 def temp(prefix, node):
1834 def temp(prefix, node):
1820 pre = "%s~%s." % (os.path.basename(fn), prefix)
1835 pre = "%s~%s." % (os.path.basename(fn), prefix)
1821 (fd, name) = tempfile.mkstemp("", pre)
1836 (fd, name) = tempfile.mkstemp("", pre)
1822 f = os.fdopen(fd, "wb")
1837 f = os.fdopen(fd, "wb")
1823 f.write(fl.revision(node))
1838 f.write(fl.revision(node))
1824 f.close()
1839 f.close()
1825 return name
1840 return name
1826
1841
1827 fl = self.file(fn)
1842 fl = self.file(fn)
1828 base = fl.ancestor(my, other)
1843 base = fl.ancestor(my, other)
1829 a = self.wjoin(fn)
1844 a = self.wjoin(fn)
1830 b = temp("base", base)
1845 b = temp("base", base)
1831 c = temp("other", other)
1846 c = temp("other", other)
1832
1847
1833 self.ui.note("resolving %s\n" % fn)
1848 self.ui.note("resolving %s\n" % fn)
1834 self.ui.debug("file %s: other %s ancestor %s\n" %
1849 self.ui.debug("file %s: other %s ancestor %s\n" %
1835 (fn, short(other), short(base)))
1850 (fn, short(other), short(base)))
1836
1851
1837 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1852 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1838 or "hgmerge")
1853 or "hgmerge")
1839 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1854 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1840 if r:
1855 if r:
1841 self.ui.warn("merging %s failed!\n" % fn)
1856 self.ui.warn("merging %s failed!\n" % fn)
1842
1857
1843 os.unlink(b)
1858 os.unlink(b)
1844 os.unlink(c)
1859 os.unlink(c)
1845
1860
1846 def verify(self):
1861 def verify(self):
1847 filelinkrevs = {}
1862 filelinkrevs = {}
1848 filenodes = {}
1863 filenodes = {}
1849 changesets = revisions = files = 0
1864 changesets = revisions = files = 0
1850 errors = 0
1865 errors = 0
1851
1866
1852 seen = {}
1867 seen = {}
1853 self.ui.status("checking changesets\n")
1868 self.ui.status("checking changesets\n")
1854 for i in range(self.changelog.count()):
1869 for i in range(self.changelog.count()):
1855 changesets += 1
1870 changesets += 1
1856 n = self.changelog.node(i)
1871 n = self.changelog.node(i)
1857 if n in seen:
1872 if n in seen:
1858 self.ui.warn("duplicate changeset at revision %d\n" % i)
1873 self.ui.warn("duplicate changeset at revision %d\n" % i)
1859 errors += 1
1874 errors += 1
1860 seen[n] = 1
1875 seen[n] = 1
1861
1876
1862 for p in self.changelog.parents(n):
1877 for p in self.changelog.parents(n):
1863 if p not in self.changelog.nodemap:
1878 if p not in self.changelog.nodemap:
1864 self.ui.warn("changeset %s has unknown parent %s\n" %
1879 self.ui.warn("changeset %s has unknown parent %s\n" %
1865 (short(n), short(p)))
1880 (short(n), short(p)))
1866 errors += 1
1881 errors += 1
1867 try:
1882 try:
1868 changes = self.changelog.read(n)
1883 changes = self.changelog.read(n)
1869 except Exception, inst:
1884 except Exception, inst:
1870 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1885 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1871 errors += 1
1886 errors += 1
1872
1887
1873 for f in changes[3]:
1888 for f in changes[3]:
1874 filelinkrevs.setdefault(f, []).append(i)
1889 filelinkrevs.setdefault(f, []).append(i)
1875
1890
1876 seen = {}
1891 seen = {}
1877 self.ui.status("checking manifests\n")
1892 self.ui.status("checking manifests\n")
1878 for i in range(self.manifest.count()):
1893 for i in range(self.manifest.count()):
1879 n = self.manifest.node(i)
1894 n = self.manifest.node(i)
1880 if n in seen:
1895 if n in seen:
1881 self.ui.warn("duplicate manifest at revision %d\n" % i)
1896 self.ui.warn("duplicate manifest at revision %d\n" % i)
1882 errors += 1
1897 errors += 1
1883 seen[n] = 1
1898 seen[n] = 1
1884
1899
1885 for p in self.manifest.parents(n):
1900 for p in self.manifest.parents(n):
1886 if p not in self.manifest.nodemap:
1901 if p not in self.manifest.nodemap:
1887 self.ui.warn("manifest %s has unknown parent %s\n" %
1902 self.ui.warn("manifest %s has unknown parent %s\n" %
1888 (short(n), short(p)))
1903 (short(n), short(p)))
1889 errors += 1
1904 errors += 1
1890
1905
1891 try:
1906 try:
1892 delta = mdiff.patchtext(self.manifest.delta(n))
1907 delta = mdiff.patchtext(self.manifest.delta(n))
1893 except KeyboardInterrupt:
1908 except KeyboardInterrupt:
1894 self.ui.warn("aborted")
1909 self.ui.warn("aborted")
1895 sys.exit(0)
1910 sys.exit(0)
1896 except Exception, inst:
1911 except Exception, inst:
1897 self.ui.warn("unpacking manifest %s: %s\n"
1912 self.ui.warn("unpacking manifest %s: %s\n"
1898 % (short(n), inst))
1913 % (short(n), inst))
1899 errors += 1
1914 errors += 1
1900
1915
1901 ff = [ l.split('\0') for l in delta.splitlines() ]
1916 ff = [ l.split('\0') for l in delta.splitlines() ]
1902 for f, fn in ff:
1917 for f, fn in ff:
1903 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1918 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1904
1919
1905 self.ui.status("crosschecking files in changesets and manifests\n")
1920 self.ui.status("crosschecking files in changesets and manifests\n")
1906 for f in filenodes:
1921 for f in filenodes:
1907 if f not in filelinkrevs:
1922 if f not in filelinkrevs:
1908 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1923 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1909 errors += 1
1924 errors += 1
1910
1925
1911 for f in filelinkrevs:
1926 for f in filelinkrevs:
1912 if f not in filenodes:
1927 if f not in filenodes:
1913 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1928 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1914 errors += 1
1929 errors += 1
1915
1930
1916 self.ui.status("checking files\n")
1931 self.ui.status("checking files\n")
1917 ff = filenodes.keys()
1932 ff = filenodes.keys()
1918 ff.sort()
1933 ff.sort()
1919 for f in ff:
1934 for f in ff:
1920 if f == "/dev/null": continue
1935 if f == "/dev/null": continue
1921 files += 1
1936 files += 1
1922 fl = self.file(f)
1937 fl = self.file(f)
1923 nodes = { nullid: 1 }
1938 nodes = { nullid: 1 }
1924 seen = {}
1939 seen = {}
1925 for i in range(fl.count()):
1940 for i in range(fl.count()):
1926 revisions += 1
1941 revisions += 1
1927 n = fl.node(i)
1942 n = fl.node(i)
1928
1943
1929 if n in seen:
1944 if n in seen:
1930 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1945 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1931 errors += 1
1946 errors += 1
1932
1947
1933 if n not in filenodes[f]:
1948 if n not in filenodes[f]:
1934 self.ui.warn("%s: %d:%s not in manifests\n"
1949 self.ui.warn("%s: %d:%s not in manifests\n"
1935 % (f, i, short(n)))
1950 % (f, i, short(n)))
1936 errors += 1
1951 errors += 1
1937 else:
1952 else:
1938 del filenodes[f][n]
1953 del filenodes[f][n]
1939
1954
1940 flr = fl.linkrev(n)
1955 flr = fl.linkrev(n)
1941 if flr not in filelinkrevs[f]:
1956 if flr not in filelinkrevs[f]:
1942 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1957 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1943 % (f, short(n), fl.linkrev(n)))
1958 % (f, short(n), fl.linkrev(n)))
1944 errors += 1
1959 errors += 1
1945 else:
1960 else:
1946 filelinkrevs[f].remove(flr)
1961 filelinkrevs[f].remove(flr)
1947
1962
1948 # verify contents
1963 # verify contents
1949 try:
1964 try:
1950 t = fl.read(n)
1965 t = fl.read(n)
1951 except Exception, inst:
1966 except Exception, inst:
1952 self.ui.warn("unpacking file %s %s: %s\n"
1967 self.ui.warn("unpacking file %s %s: %s\n"
1953 % (f, short(n), inst))
1968 % (f, short(n), inst))
1954 errors += 1
1969 errors += 1
1955
1970
1956 # verify parents
1971 # verify parents
1957 (p1, p2) = fl.parents(n)
1972 (p1, p2) = fl.parents(n)
1958 if p1 not in nodes:
1973 if p1 not in nodes:
1959 self.ui.warn("file %s:%s unknown parent 1 %s" %
1974 self.ui.warn("file %s:%s unknown parent 1 %s" %
1960 (f, short(n), short(p1)))
1975 (f, short(n), short(p1)))
1961 errors += 1
1976 errors += 1
1962 if p2 not in nodes:
1977 if p2 not in nodes:
1963 self.ui.warn("file %s:%s unknown parent 2 %s" %
1978 self.ui.warn("file %s:%s unknown parent 2 %s" %
1964 (f, short(n), short(p1)))
1979 (f, short(n), short(p1)))
1965 errors += 1
1980 errors += 1
1966 nodes[n] = 1
1981 nodes[n] = 1
1967
1982
1968 # cross-check
1983 # cross-check
1969 for node in filenodes[f]:
1984 for node in filenodes[f]:
1970 self.ui.warn("node %s in manifests not in %s\n"
1985 self.ui.warn("node %s in manifests not in %s\n"
1971 % (hex(node), f))
1986 % (hex(node), f))
1972 errors += 1
1987 errors += 1
1973
1988
1974 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1989 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1975 (files, changesets, revisions))
1990 (files, changesets, revisions))
1976
1991
1977 if errors:
1992 if errors:
1978 self.ui.warn("%d integrity errors encountered!\n" % errors)
1993 self.ui.warn("%d integrity errors encountered!\n" % errors)
1979 return 1
1994 return 1
1980
1995
1981 class remoterepository:
1996 class remoterepository:
1982 def local(self):
1997 def local(self):
1983 return False
1998 return False
1984
1999
1985 class httprepository(remoterepository):
2000 class httprepository(remoterepository):
1986 def __init__(self, ui, path):
2001 def __init__(self, ui, path):
1987 # fix missing / after hostname
2002 # fix missing / after hostname
1988 s = urlparse.urlsplit(path)
2003 s = urlparse.urlsplit(path)
1989 partial = s[2]
2004 partial = s[2]
1990 if not partial: partial = "/"
2005 if not partial: partial = "/"
1991 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2006 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1992 self.ui = ui
2007 self.ui = ui
1993 no_list = [ "localhost", "127.0.0.1" ]
2008 no_list = [ "localhost", "127.0.0.1" ]
1994 host = ui.config("http_proxy", "host")
2009 host = ui.config("http_proxy", "host")
1995 if host is None:
2010 if host is None:
1996 host = os.environ.get("http_proxy")
2011 host = os.environ.get("http_proxy")
1997 if host and host.startswith('http://'):
2012 if host and host.startswith('http://'):
1998 host = host[7:]
2013 host = host[7:]
1999 user = ui.config("http_proxy", "user")
2014 user = ui.config("http_proxy", "user")
2000 passwd = ui.config("http_proxy", "passwd")
2015 passwd = ui.config("http_proxy", "passwd")
2001 no = ui.config("http_proxy", "no")
2016 no = ui.config("http_proxy", "no")
2002 if no is None:
2017 if no is None:
2003 no = os.environ.get("no_proxy")
2018 no = os.environ.get("no_proxy")
2004 if no:
2019 if no:
2005 no_list = no_list + no.split(",")
2020 no_list = no_list + no.split(",")
2006
2021
2007 no_proxy = 0
2022 no_proxy = 0
2008 for h in no_list:
2023 for h in no_list:
2009 if (path.startswith("http://" + h + "/") or
2024 if (path.startswith("http://" + h + "/") or
2010 path.startswith("http://" + h + ":") or
2025 path.startswith("http://" + h + ":") or
2011 path == "http://" + h):
2026 path == "http://" + h):
2012 no_proxy = 1
2027 no_proxy = 1
2013
2028
2014 # Note: urllib2 takes proxy values from the environment and those will
2029 # Note: urllib2 takes proxy values from the environment and those will
2015 # take precedence
2030 # take precedence
2016 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2031 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2017 try:
2032 try:
2018 if os.environ.has_key(env):
2033 if os.environ.has_key(env):
2019 del os.environ[env]
2034 del os.environ[env]
2020 except OSError:
2035 except OSError:
2021 pass
2036 pass
2022
2037
2023 proxy_handler = urllib2.BaseHandler()
2038 proxy_handler = urllib2.BaseHandler()
2024 if host and not no_proxy:
2039 if host and not no_proxy:
2025 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2040 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2026
2041
2027 authinfo = None
2042 authinfo = None
2028 if user and passwd:
2043 if user and passwd:
2029 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2044 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2030 passmgr.add_password(None, host, user, passwd)
2045 passmgr.add_password(None, host, user, passwd)
2031 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2046 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2032
2047
2033 opener = urllib2.build_opener(proxy_handler, authinfo)
2048 opener = urllib2.build_opener(proxy_handler, authinfo)
2034 urllib2.install_opener(opener)
2049 urllib2.install_opener(opener)
2035
2050
2036 def dev(self):
2051 def dev(self):
2037 return -1
2052 return -1
2038
2053
2039 def do_cmd(self, cmd, **args):
2054 def do_cmd(self, cmd, **args):
2040 self.ui.debug("sending %s command\n" % cmd)
2055 self.ui.debug("sending %s command\n" % cmd)
2041 q = {"cmd": cmd}
2056 q = {"cmd": cmd}
2042 q.update(args)
2057 q.update(args)
2043 qs = urllib.urlencode(q)
2058 qs = urllib.urlencode(q)
2044 cu = "%s?%s" % (self.url, qs)
2059 cu = "%s?%s" % (self.url, qs)
2045 resp = urllib2.urlopen(cu)
2060 resp = urllib2.urlopen(cu)
2046 proto = resp.headers['content-type']
2061 proto = resp.headers['content-type']
2047
2062
2048 # accept old "text/plain" and "application/hg-changegroup" for now
2063 # accept old "text/plain" and "application/hg-changegroup" for now
2049 if not proto.startswith('application/mercurial') and \
2064 if not proto.startswith('application/mercurial') and \
2050 not proto.startswith('text/plain') and \
2065 not proto.startswith('text/plain') and \
2051 not proto.startswith('application/hg-changegroup'):
2066 not proto.startswith('application/hg-changegroup'):
2052 raise RepoError("'%s' does not appear to be an hg repository"
2067 raise RepoError("'%s' does not appear to be an hg repository"
2053 % self.url)
2068 % self.url)
2054
2069
2055 if proto.startswith('application/mercurial'):
2070 if proto.startswith('application/mercurial'):
2056 version = proto[22:]
2071 version = proto[22:]
2057 if float(version) > 0.1:
2072 if float(version) > 0.1:
2058 raise RepoError("'%s' uses newer protocol %s" %
2073 raise RepoError("'%s' uses newer protocol %s" %
2059 (self.url, version))
2074 (self.url, version))
2060
2075
2061 return resp
2076 return resp
2062
2077
2063 def heads(self):
2078 def heads(self):
2064 d = self.do_cmd("heads").read()
2079 d = self.do_cmd("heads").read()
2065 try:
2080 try:
2066 return map(bin, d[:-1].split(" "))
2081 return map(bin, d[:-1].split(" "))
2067 except:
2082 except:
2068 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2083 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2069 raise
2084 raise
2070
2085
2071 def branches(self, nodes):
2086 def branches(self, nodes):
2072 n = " ".join(map(hex, nodes))
2087 n = " ".join(map(hex, nodes))
2073 d = self.do_cmd("branches", nodes=n).read()
2088 d = self.do_cmd("branches", nodes=n).read()
2074 try:
2089 try:
2075 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2090 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2076 return br
2091 return br
2077 except:
2092 except:
2078 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2093 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2079 raise
2094 raise
2080
2095
2081 def between(self, pairs):
2096 def between(self, pairs):
2082 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2097 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2083 d = self.do_cmd("between", pairs=n).read()
2098 d = self.do_cmd("between", pairs=n).read()
2084 try:
2099 try:
2085 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2100 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2086 return p
2101 return p
2087 except:
2102 except:
2088 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2103 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2089 raise
2104 raise
2090
2105
2091 def changegroup(self, nodes):
2106 def changegroup(self, nodes):
2092 n = " ".join(map(hex, nodes))
2107 n = " ".join(map(hex, nodes))
2093 f = self.do_cmd("changegroup", roots=n)
2108 f = self.do_cmd("changegroup", roots=n)
2094 bytes = 0
2109 bytes = 0
2095
2110
2096 class zread:
2111 class zread:
2097 def __init__(self, f):
2112 def __init__(self, f):
2098 self.zd = zlib.decompressobj()
2113 self.zd = zlib.decompressobj()
2099 self.f = f
2114 self.f = f
2100 self.buf = ""
2115 self.buf = ""
2101 def read(self, l):
2116 def read(self, l):
2102 while l > len(self.buf):
2117 while l > len(self.buf):
2103 r = self.f.read(4096)
2118 r = self.f.read(4096)
2104 if r:
2119 if r:
2105 self.buf += self.zd.decompress(r)
2120 self.buf += self.zd.decompress(r)
2106 else:
2121 else:
2107 self.buf += self.zd.flush()
2122 self.buf += self.zd.flush()
2108 break
2123 break
2109 d, self.buf = self.buf[:l], self.buf[l:]
2124 d, self.buf = self.buf[:l], self.buf[l:]
2110 return d
2125 return d
2111
2126
2112 return zread(f)
2127 return zread(f)
2113
2128
2114 class remotelock:
2129 class remotelock:
2115 def __init__(self, repo):
2130 def __init__(self, repo):
2116 self.repo = repo
2131 self.repo = repo
2117 def release(self):
2132 def release(self):
2118 self.repo.unlock()
2133 self.repo.unlock()
2119 self.repo = None
2134 self.repo = None
2120 def __del__(self):
2135 def __del__(self):
2121 if self.repo:
2136 if self.repo:
2122 self.release()
2137 self.release()
2123
2138
2124 class sshrepository(remoterepository):
2139 class sshrepository(remoterepository):
2125 def __init__(self, ui, path):
2140 def __init__(self, ui, path):
2126 self.url = path
2141 self.url = path
2127 self.ui = ui
2142 self.ui = ui
2128
2143
2129 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2144 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2130 if not m:
2145 if not m:
2131 raise RepoError("couldn't parse destination %s" % path)
2146 raise RepoError("couldn't parse destination %s" % path)
2132
2147
2133 self.user = m.group(2)
2148 self.user = m.group(2)
2134 self.host = m.group(3)
2149 self.host = m.group(3)
2135 self.port = m.group(5)
2150 self.port = m.group(5)
2136 self.path = m.group(7)
2151 self.path = m.group(7)
2137
2152
2138 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2153 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2139 args = self.port and ("%s -p %s") % (args, self.port) or args
2154 args = self.port and ("%s -p %s") % (args, self.port) or args
2140 path = self.path or ""
2155 path = self.path or ""
2141
2156
2142 if not path:
2157 if not path:
2143 raise RepoError("no remote repository path specified")
2158 raise RepoError("no remote repository path specified")
2144
2159
2145 sshcmd = self.ui.config("ui", "ssh", "ssh")
2160 sshcmd = self.ui.config("ui", "ssh", "ssh")
2146 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2161 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2147 cmd = "%s %s '%s -R %s serve --stdio'"
2162 cmd = "%s %s '%s -R %s serve --stdio'"
2148 cmd = cmd % (sshcmd, args, remotecmd, path)
2163 cmd = cmd % (sshcmd, args, remotecmd, path)
2149
2164
2150 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2165 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2151
2166
2152 def readerr(self):
2167 def readerr(self):
2153 while 1:
2168 while 1:
2154 r,w,x = select.select([self.pipee], [], [], 0)
2169 r,w,x = select.select([self.pipee], [], [], 0)
2155 if not r: break
2170 if not r: break
2156 l = self.pipee.readline()
2171 l = self.pipee.readline()
2157 if not l: break
2172 if not l: break
2158 self.ui.status("remote: ", l)
2173 self.ui.status("remote: ", l)
2159
2174
2160 def __del__(self):
2175 def __del__(self):
2161 try:
2176 try:
2162 self.pipeo.close()
2177 self.pipeo.close()
2163 self.pipei.close()
2178 self.pipei.close()
2164 for l in self.pipee:
2179 for l in self.pipee:
2165 self.ui.status("remote: ", l)
2180 self.ui.status("remote: ", l)
2166 self.pipee.close()
2181 self.pipee.close()
2167 except:
2182 except:
2168 pass
2183 pass
2169
2184
2170 def dev(self):
2185 def dev(self):
2171 return -1
2186 return -1
2172
2187
2173 def do_cmd(self, cmd, **args):
2188 def do_cmd(self, cmd, **args):
2174 self.ui.debug("sending %s command\n" % cmd)
2189 self.ui.debug("sending %s command\n" % cmd)
2175 self.pipeo.write("%s\n" % cmd)
2190 self.pipeo.write("%s\n" % cmd)
2176 for k, v in args.items():
2191 for k, v in args.items():
2177 self.pipeo.write("%s %d\n" % (k, len(v)))
2192 self.pipeo.write("%s %d\n" % (k, len(v)))
2178 self.pipeo.write(v)
2193 self.pipeo.write(v)
2179 self.pipeo.flush()
2194 self.pipeo.flush()
2180
2195
2181 return self.pipei
2196 return self.pipei
2182
2197
2183 def call(self, cmd, **args):
2198 def call(self, cmd, **args):
2184 r = self.do_cmd(cmd, **args)
2199 r = self.do_cmd(cmd, **args)
2185 l = r.readline()
2200 l = r.readline()
2186 self.readerr()
2201 self.readerr()
2187 try:
2202 try:
2188 l = int(l)
2203 l = int(l)
2189 except:
2204 except:
2190 raise RepoError("unexpected response '%s'" % l)
2205 raise RepoError("unexpected response '%s'" % l)
2191 return r.read(l)
2206 return r.read(l)
2192
2207
2193 def lock(self):
2208 def lock(self):
2194 self.call("lock")
2209 self.call("lock")
2195 return remotelock(self)
2210 return remotelock(self)
2196
2211
2197 def unlock(self):
2212 def unlock(self):
2198 self.call("unlock")
2213 self.call("unlock")
2199
2214
2200 def heads(self):
2215 def heads(self):
2201 d = self.call("heads")
2216 d = self.call("heads")
2202 try:
2217 try:
2203 return map(bin, d[:-1].split(" "))
2218 return map(bin, d[:-1].split(" "))
2204 except:
2219 except:
2205 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2220 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2206
2221
2207 def branches(self, nodes):
2222 def branches(self, nodes):
2208 n = " ".join(map(hex, nodes))
2223 n = " ".join(map(hex, nodes))
2209 d = self.call("branches", nodes=n)
2224 d = self.call("branches", nodes=n)
2210 try:
2225 try:
2211 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2226 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2212 return br
2227 return br
2213 except:
2228 except:
2214 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2229 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2215
2230
2216 def between(self, pairs):
2231 def between(self, pairs):
2217 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2232 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2218 d = self.call("between", pairs=n)
2233 d = self.call("between", pairs=n)
2219 try:
2234 try:
2220 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2235 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2221 return p
2236 return p
2222 except:
2237 except:
2223 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2238 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2224
2239
2225 def changegroup(self, nodes):
2240 def changegroup(self, nodes):
2226 n = " ".join(map(hex, nodes))
2241 n = " ".join(map(hex, nodes))
2227 f = self.do_cmd("changegroup", roots=n)
2242 f = self.do_cmd("changegroup", roots=n)
2228 return self.pipei
2243 return self.pipei
2229
2244
2230 def addchangegroup(self, cg):
2245 def addchangegroup(self, cg):
2231 d = self.call("addchangegroup")
2246 d = self.call("addchangegroup")
2232 if d:
2247 if d:
2233 raise RepoError("push refused: %s", d)
2248 raise RepoError("push refused: %s", d)
2234
2249
2235 while 1:
2250 while 1:
2236 d = cg.read(4096)
2251 d = cg.read(4096)
2237 if not d: break
2252 if not d: break
2238 self.pipeo.write(d)
2253 self.pipeo.write(d)
2239 self.readerr()
2254 self.readerr()
2240
2255
2241 self.pipeo.flush()
2256 self.pipeo.flush()
2242
2257
2243 self.readerr()
2258 self.readerr()
2244 l = int(self.pipei.readline())
2259 l = int(self.pipei.readline())
2245 return self.pipei.read(l) != ""
2260 return self.pipei.read(l) != ""
2246
2261
2247 class httpsrepository(httprepository):
2262 class httpsrepository(httprepository):
2248 pass
2263 pass
2249
2264
2250 def repository(ui, path=None, create=0):
2265 def repository(ui, path=None, create=0):
2251 if path:
2266 if path:
2252 if path.startswith("http://"):
2267 if path.startswith("http://"):
2253 return httprepository(ui, path)
2268 return httprepository(ui, path)
2254 if path.startswith("https://"):
2269 if path.startswith("https://"):
2255 return httpsrepository(ui, path)
2270 return httpsrepository(ui, path)
2256 if path.startswith("hg://"):
2271 if path.startswith("hg://"):
2257 return httprepository(ui, path.replace("hg://", "http://"))
2272 return httprepository(ui, path.replace("hg://", "http://"))
2258 if path.startswith("old-http://"):
2273 if path.startswith("old-http://"):
2259 return localrepository(ui, path.replace("old-http://", "http://"))
2274 return localrepository(ui, path.replace("old-http://", "http://"))
2260 if path.startswith("ssh://"):
2275 if path.startswith("ssh://"):
2261 return sshrepository(ui, path)
2276 return sshrepository(ui, path)
2262
2277
2263 return localrepository(ui, path, create)
2278 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now