##// END OF EJS Templates
Clean up some merge logic...
mpm@selenic.com -
r993:6f274afc default
parent child Browse files
Show More
@@ -1,2278 +1,2277 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 if not date:
283 if not date:
284 if time.daylight: offset = time.altzone
284 if time.daylight: offset = time.altzone
285 else: offset = time.timezone
285 else: offset = time.timezone
286 date = "%d %d" % (time.time(), offset)
286 date = "%d %d" % (time.time(), offset)
287 list.sort()
287 list.sort()
288 l = [hex(manifest), user, date] + list + ["", desc]
288 l = [hex(manifest), user, date] + list + ["", desc]
289 text = "\n".join(l)
289 text = "\n".join(l)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
291
291
292 class dirstate:
292 class dirstate:
293 def __init__(self, opener, ui, root):
293 def __init__(self, opener, ui, root):
294 self.opener = opener
294 self.opener = opener
295 self.root = root
295 self.root = root
296 self.dirty = 0
296 self.dirty = 0
297 self.ui = ui
297 self.ui = ui
298 self.map = None
298 self.map = None
299 self.pl = None
299 self.pl = None
300 self.copies = {}
300 self.copies = {}
301 self.ignorefunc = None
301 self.ignorefunc = None
302
302
303 def wjoin(self, f):
303 def wjoin(self, f):
304 return os.path.join(self.root, f)
304 return os.path.join(self.root, f)
305
305
306 def getcwd(self):
306 def getcwd(self):
307 cwd = os.getcwd()
307 cwd = os.getcwd()
308 if cwd == self.root: return ''
308 if cwd == self.root: return ''
309 return cwd[len(self.root) + 1:]
309 return cwd[len(self.root) + 1:]
310
310
311 def ignore(self, f):
311 def ignore(self, f):
312 if not self.ignorefunc:
312 if not self.ignorefunc:
313 bigpat = []
313 bigpat = []
314 try:
314 try:
315 l = file(self.wjoin(".hgignore"))
315 l = file(self.wjoin(".hgignore"))
316 for pat in l:
316 for pat in l:
317 p = pat.rstrip()
317 p = pat.rstrip()
318 if p:
318 if p:
319 try:
319 try:
320 re.compile(p)
320 re.compile(p)
321 except:
321 except:
322 self.ui.warn("ignoring invalid ignore"
322 self.ui.warn("ignoring invalid ignore"
323 + " regular expression '%s'\n" % p)
323 + " regular expression '%s'\n" % p)
324 else:
324 else:
325 bigpat.append(p)
325 bigpat.append(p)
326 except IOError: pass
326 except IOError: pass
327
327
328 if bigpat:
328 if bigpat:
329 s = "(?:%s)" % (")|(?:".join(bigpat))
329 s = "(?:%s)" % (")|(?:".join(bigpat))
330 r = re.compile(s)
330 r = re.compile(s)
331 self.ignorefunc = r.search
331 self.ignorefunc = r.search
332 else:
332 else:
333 self.ignorefunc = util.never
333 self.ignorefunc = util.never
334
334
335 return self.ignorefunc(f)
335 return self.ignorefunc(f)
336
336
337 def __del__(self):
337 def __del__(self):
338 if self.dirty:
338 if self.dirty:
339 self.write()
339 self.write()
340
340
341 def __getitem__(self, key):
341 def __getitem__(self, key):
342 try:
342 try:
343 return self.map[key]
343 return self.map[key]
344 except TypeError:
344 except TypeError:
345 self.read()
345 self.read()
346 return self[key]
346 return self[key]
347
347
348 def __contains__(self, key):
348 def __contains__(self, key):
349 if not self.map: self.read()
349 if not self.map: self.read()
350 return key in self.map
350 return key in self.map
351
351
352 def parents(self):
352 def parents(self):
353 if not self.pl:
353 if not self.pl:
354 self.read()
354 self.read()
355 return self.pl
355 return self.pl
356
356
357 def markdirty(self):
357 def markdirty(self):
358 if not self.dirty:
358 if not self.dirty:
359 self.dirty = 1
359 self.dirty = 1
360
360
361 def setparents(self, p1, p2 = nullid):
361 def setparents(self, p1, p2 = nullid):
362 self.markdirty()
362 self.markdirty()
363 self.pl = p1, p2
363 self.pl = p1, p2
364
364
365 def state(self, key):
365 def state(self, key):
366 try:
366 try:
367 return self[key][0]
367 return self[key][0]
368 except KeyError:
368 except KeyError:
369 return "?"
369 return "?"
370
370
371 def read(self):
371 def read(self):
372 if self.map is not None: return self.map
372 if self.map is not None: return self.map
373
373
374 self.map = {}
374 self.map = {}
375 self.pl = [nullid, nullid]
375 self.pl = [nullid, nullid]
376 try:
376 try:
377 st = self.opener("dirstate").read()
377 st = self.opener("dirstate").read()
378 if not st: return
378 if not st: return
379 except: return
379 except: return
380
380
381 self.pl = [st[:20], st[20: 40]]
381 self.pl = [st[:20], st[20: 40]]
382
382
383 pos = 40
383 pos = 40
384 while pos < len(st):
384 while pos < len(st):
385 e = struct.unpack(">cllll", st[pos:pos+17])
385 e = struct.unpack(">cllll", st[pos:pos+17])
386 l = e[4]
386 l = e[4]
387 pos += 17
387 pos += 17
388 f = st[pos:pos + l]
388 f = st[pos:pos + l]
389 if '\0' in f:
389 if '\0' in f:
390 f, c = f.split('\0')
390 f, c = f.split('\0')
391 self.copies[f] = c
391 self.copies[f] = c
392 self.map[f] = e[:4]
392 self.map[f] = e[:4]
393 pos += l
393 pos += l
394
394
395 def copy(self, source, dest):
395 def copy(self, source, dest):
396 self.read()
396 self.read()
397 self.markdirty()
397 self.markdirty()
398 self.copies[dest] = source
398 self.copies[dest] = source
399
399
400 def copied(self, file):
400 def copied(self, file):
401 return self.copies.get(file, None)
401 return self.copies.get(file, None)
402
402
403 def update(self, files, state, **kw):
403 def update(self, files, state, **kw):
404 ''' current states:
404 ''' current states:
405 n normal
405 n normal
406 m needs merging
406 m needs merging
407 r marked for removal
407 r marked for removal
408 a marked for addition'''
408 a marked for addition'''
409
409
410 if not files: return
410 if not files: return
411 self.read()
411 self.read()
412 self.markdirty()
412 self.markdirty()
413 for f in files:
413 for f in files:
414 if state == "r":
414 if state == "r":
415 self.map[f] = ('r', 0, 0, 0)
415 self.map[f] = ('r', 0, 0, 0)
416 else:
416 else:
417 s = os.stat(os.path.join(self.root, f))
417 s = os.stat(os.path.join(self.root, f))
418 st_size = kw.get('st_size', s.st_size)
418 st_size = kw.get('st_size', s.st_size)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
421
421
422 def forget(self, files):
422 def forget(self, files):
423 if not files: return
423 if not files: return
424 self.read()
424 self.read()
425 self.markdirty()
425 self.markdirty()
426 for f in files:
426 for f in files:
427 try:
427 try:
428 del self.map[f]
428 del self.map[f]
429 except KeyError:
429 except KeyError:
430 self.ui.warn("not in dirstate: %s!\n" % f)
430 self.ui.warn("not in dirstate: %s!\n" % f)
431 pass
431 pass
432
432
433 def clear(self):
433 def clear(self):
434 self.map = {}
434 self.map = {}
435 self.markdirty()
435 self.markdirty()
436
436
437 def write(self):
437 def write(self):
438 st = self.opener("dirstate", "w")
438 st = self.opener("dirstate", "w")
439 st.write("".join(self.pl))
439 st.write("".join(self.pl))
440 for f, e in self.map.items():
440 for f, e in self.map.items():
441 c = self.copied(f)
441 c = self.copied(f)
442 if c:
442 if c:
443 f = f + "\0" + c
443 f = f + "\0" + c
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
445 st.write(e + f)
445 st.write(e + f)
446 self.dirty = 0
446 self.dirty = 0
447
447
448 def filterfiles(self, files):
448 def filterfiles(self, files):
449 ret = {}
449 ret = {}
450 unknown = []
450 unknown = []
451
451
452 for x in files:
452 for x in files:
453 if x is '.':
453 if x is '.':
454 return self.map.copy()
454 return self.map.copy()
455 if x not in self.map:
455 if x not in self.map:
456 unknown.append(x)
456 unknown.append(x)
457 else:
457 else:
458 ret[x] = self.map[x]
458 ret[x] = self.map[x]
459
459
460 if not unknown:
460 if not unknown:
461 return ret
461 return ret
462
462
463 b = self.map.keys()
463 b = self.map.keys()
464 b.sort()
464 b.sort()
465 blen = len(b)
465 blen = len(b)
466
466
467 for x in unknown:
467 for x in unknown:
468 bs = bisect.bisect(b, x)
468 bs = bisect.bisect(b, x)
469 if bs != 0 and b[bs-1] == x:
469 if bs != 0 and b[bs-1] == x:
470 ret[x] = self.map[x]
470 ret[x] = self.map[x]
471 continue
471 continue
472 while bs < blen:
472 while bs < blen:
473 s = b[bs]
473 s = b[bs]
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
475 ret[s] = self.map[s]
475 ret[s] = self.map[s]
476 else:
476 else:
477 break
477 break
478 bs += 1
478 bs += 1
479 return ret
479 return ret
480
480
481 def walk(self, files = None, match = util.always, dc=None):
481 def walk(self, files = None, match = util.always, dc=None):
482 self.read()
482 self.read()
483
483
484 # walk all files by default
484 # walk all files by default
485 if not files:
485 if not files:
486 files = [self.root]
486 files = [self.root]
487 if not dc:
487 if not dc:
488 dc = self.map.copy()
488 dc = self.map.copy()
489 elif not dc:
489 elif not dc:
490 dc = self.filterfiles(files)
490 dc = self.filterfiles(files)
491
491
492 known = {'.hg': 1}
492 known = {'.hg': 1}
493 def seen(fn):
493 def seen(fn):
494 if fn in known: return True
494 if fn in known: return True
495 known[fn] = 1
495 known[fn] = 1
496 def traverse():
496 def traverse():
497 for ff in util.unique(files):
497 for ff in util.unique(files):
498 f = os.path.join(self.root, ff)
498 f = os.path.join(self.root, ff)
499 try:
499 try:
500 st = os.stat(f)
500 st = os.stat(f)
501 except OSError, inst:
501 except OSError, inst:
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
503 util.pathto(self.getcwd(), ff),
503 util.pathto(self.getcwd(), ff),
504 inst.strerror))
504 inst.strerror))
505 continue
505 continue
506 if stat.S_ISDIR(st.st_mode):
506 if stat.S_ISDIR(st.st_mode):
507 for dir, subdirs, fl in os.walk(f):
507 for dir, subdirs, fl in os.walk(f):
508 d = dir[len(self.root) + 1:]
508 d = dir[len(self.root) + 1:]
509 nd = util.normpath(d)
509 nd = util.normpath(d)
510 if nd == '.': nd = ''
510 if nd == '.': nd = ''
511 if seen(nd):
511 if seen(nd):
512 subdirs[:] = []
512 subdirs[:] = []
513 continue
513 continue
514 for sd in subdirs:
514 for sd in subdirs:
515 ds = os.path.join(nd, sd +'/')
515 ds = os.path.join(nd, sd +'/')
516 if self.ignore(ds) or not match(ds):
516 if self.ignore(ds) or not match(ds):
517 subdirs.remove(sd)
517 subdirs.remove(sd)
518 subdirs.sort()
518 subdirs.sort()
519 fl.sort()
519 fl.sort()
520 for fn in fl:
520 for fn in fl:
521 fn = util.pconvert(os.path.join(d, fn))
521 fn = util.pconvert(os.path.join(d, fn))
522 yield 'f', fn
522 yield 'f', fn
523 elif stat.S_ISREG(st.st_mode):
523 elif stat.S_ISREG(st.st_mode):
524 yield 'f', ff
524 yield 'f', ff
525 else:
525 else:
526 kind = 'unknown'
526 kind = 'unknown'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
533 util.pathto(self.getcwd(), ff),
533 util.pathto(self.getcwd(), ff),
534 kind))
534 kind))
535
535
536 ks = dc.keys()
536 ks = dc.keys()
537 ks.sort()
537 ks.sort()
538 for k in ks:
538 for k in ks:
539 yield 'm', k
539 yield 'm', k
540
540
541 # yield only files that match: all in dirstate, others only if
541 # yield only files that match: all in dirstate, others only if
542 # not in .hgignore
542 # not in .hgignore
543
543
544 for src, fn in util.unique(traverse()):
544 for src, fn in util.unique(traverse()):
545 fn = util.normpath(fn)
545 fn = util.normpath(fn)
546 if seen(fn): continue
546 if seen(fn): continue
547 if fn not in dc and self.ignore(fn):
547 if fn not in dc and self.ignore(fn):
548 continue
548 continue
549 if match(fn):
549 if match(fn):
550 yield src, fn
550 yield src, fn
551
551
552 def changes(self, files=None, match=util.always):
552 def changes(self, files=None, match=util.always):
553 self.read()
553 self.read()
554 if not files:
554 if not files:
555 dc = self.map.copy()
555 dc = self.map.copy()
556 else:
556 else:
557 dc = self.filterfiles(files)
557 dc = self.filterfiles(files)
558 lookup, modified, added, unknown = [], [], [], []
558 lookup, modified, added, unknown = [], [], [], []
559 removed, deleted = [], []
559 removed, deleted = [], []
560
560
561 for src, fn in self.walk(files, match, dc=dc):
561 for src, fn in self.walk(files, match, dc=dc):
562 try:
562 try:
563 s = os.stat(os.path.join(self.root, fn))
563 s = os.stat(os.path.join(self.root, fn))
564 except OSError:
564 except OSError:
565 continue
565 continue
566 if not stat.S_ISREG(s.st_mode):
566 if not stat.S_ISREG(s.st_mode):
567 continue
567 continue
568 c = dc.get(fn)
568 c = dc.get(fn)
569 if c:
569 if c:
570 del dc[fn]
570 del dc[fn]
571 if c[0] == 'm':
571 if c[0] == 'm':
572 modified.append(fn)
572 modified.append(fn)
573 elif c[0] == 'a':
573 elif c[0] == 'a':
574 added.append(fn)
574 added.append(fn)
575 elif c[0] == 'r':
575 elif c[0] == 'r':
576 unknown.append(fn)
576 unknown.append(fn)
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
578 modified.append(fn)
578 modified.append(fn)
579 elif c[3] != s.st_mtime:
579 elif c[3] != s.st_mtime:
580 lookup.append(fn)
580 lookup.append(fn)
581 else:
581 else:
582 unknown.append(fn)
582 unknown.append(fn)
583
583
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
585 if c[0] == 'r':
585 if c[0] == 'r':
586 removed.append(fn)
586 removed.append(fn)
587 else:
587 else:
588 deleted.append(fn)
588 deleted.append(fn)
589 return (lookup, modified, added, removed + deleted, unknown)
589 return (lookup, modified, added, removed + deleted, unknown)
590
590
591 # used to avoid circular references so destructors work
591 # used to avoid circular references so destructors work
592 def opener(base):
592 def opener(base):
593 p = base
593 p = base
594 def o(path, mode="r"):
594 def o(path, mode="r"):
595 if p.startswith("http://"):
595 if p.startswith("http://"):
596 f = os.path.join(p, urllib.quote(path))
596 f = os.path.join(p, urllib.quote(path))
597 return httprangereader.httprangereader(f)
597 return httprangereader.httprangereader(f)
598
598
599 f = os.path.join(p, path)
599 f = os.path.join(p, path)
600
600
601 mode += "b" # for that other OS
601 mode += "b" # for that other OS
602
602
603 if mode[0] != "r":
603 if mode[0] != "r":
604 try:
604 try:
605 s = os.stat(f)
605 s = os.stat(f)
606 except OSError:
606 except OSError:
607 d = os.path.dirname(f)
607 d = os.path.dirname(f)
608 if not os.path.isdir(d):
608 if not os.path.isdir(d):
609 os.makedirs(d)
609 os.makedirs(d)
610 else:
610 else:
611 if s.st_nlink > 1:
611 if s.st_nlink > 1:
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
613 util.rename(f+".tmp", f)
613 util.rename(f+".tmp", f)
614
614
615 return file(f, mode)
615 return file(f, mode)
616
616
617 return o
617 return o
618
618
619 class RepoError(Exception): pass
619 class RepoError(Exception): pass
620
620
621 class localrepository:
621 class localrepository:
622 def __init__(self, ui, path=None, create=0):
622 def __init__(self, ui, path=None, create=0):
623 self.remote = 0
623 self.remote = 0
624 if path and path.startswith("http://"):
624 if path and path.startswith("http://"):
625 self.remote = 1
625 self.remote = 1
626 self.path = path
626 self.path = path
627 else:
627 else:
628 if not path:
628 if not path:
629 p = os.getcwd()
629 p = os.getcwd()
630 while not os.path.isdir(os.path.join(p, ".hg")):
630 while not os.path.isdir(os.path.join(p, ".hg")):
631 oldp = p
631 oldp = p
632 p = os.path.dirname(p)
632 p = os.path.dirname(p)
633 if p == oldp: raise RepoError("no repo found")
633 if p == oldp: raise RepoError("no repo found")
634 path = p
634 path = p
635 self.path = os.path.join(path, ".hg")
635 self.path = os.path.join(path, ".hg")
636
636
637 if not create and not os.path.isdir(self.path):
637 if not create and not os.path.isdir(self.path):
638 raise RepoError("repository %s not found" % self.path)
638 raise RepoError("repository %s not found" % self.path)
639
639
640 self.root = os.path.abspath(path)
640 self.root = os.path.abspath(path)
641 self.ui = ui
641 self.ui = ui
642
642
643 if create:
643 if create:
644 os.mkdir(self.path)
644 os.mkdir(self.path)
645 os.mkdir(self.join("data"))
645 os.mkdir(self.join("data"))
646
646
647 self.opener = opener(self.path)
647 self.opener = opener(self.path)
648 self.wopener = opener(self.root)
648 self.wopener = opener(self.root)
649 self.manifest = manifest(self.opener)
649 self.manifest = manifest(self.opener)
650 self.changelog = changelog(self.opener)
650 self.changelog = changelog(self.opener)
651 self.tagscache = None
651 self.tagscache = None
652 self.nodetagscache = None
652 self.nodetagscache = None
653
653
654 if not self.remote:
654 if not self.remote:
655 self.dirstate = dirstate(self.opener, ui, self.root)
655 self.dirstate = dirstate(self.opener, ui, self.root)
656 try:
656 try:
657 self.ui.readconfig(self.opener("hgrc"))
657 self.ui.readconfig(self.opener("hgrc"))
658 except IOError: pass
658 except IOError: pass
659
659
660 def hook(self, name, **args):
660 def hook(self, name, **args):
661 s = self.ui.config("hooks", name)
661 s = self.ui.config("hooks", name)
662 if s:
662 if s:
663 self.ui.note("running hook %s: %s\n" % (name, s))
663 self.ui.note("running hook %s: %s\n" % (name, s))
664 old = {}
664 old = {}
665 for k, v in args.items():
665 for k, v in args.items():
666 k = k.upper()
666 k = k.upper()
667 old[k] = os.environ.get(k, None)
667 old[k] = os.environ.get(k, None)
668 os.environ[k] = v
668 os.environ[k] = v
669
669
670 r = os.system(s)
670 r = os.system(s)
671
671
672 for k, v in old.items():
672 for k, v in old.items():
673 if v != None:
673 if v != None:
674 os.environ[k] = v
674 os.environ[k] = v
675 else:
675 else:
676 del os.environ[k]
676 del os.environ[k]
677
677
678 if r:
678 if r:
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
680 (name, r))
680 (name, r))
681 return False
681 return False
682 return True
682 return True
683
683
684 def tags(self):
684 def tags(self):
685 '''return a mapping of tag to node'''
685 '''return a mapping of tag to node'''
686 if not self.tagscache:
686 if not self.tagscache:
687 self.tagscache = {}
687 self.tagscache = {}
688 def addtag(self, k, n):
688 def addtag(self, k, n):
689 try:
689 try:
690 bin_n = bin(n)
690 bin_n = bin(n)
691 except TypeError:
691 except TypeError:
692 bin_n = ''
692 bin_n = ''
693 self.tagscache[k.strip()] = bin_n
693 self.tagscache[k.strip()] = bin_n
694
694
695 try:
695 try:
696 # read each head of the tags file, ending with the tip
696 # read each head of the tags file, ending with the tip
697 # and add each tag found to the map, with "newer" ones
697 # and add each tag found to the map, with "newer" ones
698 # taking precedence
698 # taking precedence
699 fl = self.file(".hgtags")
699 fl = self.file(".hgtags")
700 h = fl.heads()
700 h = fl.heads()
701 h.reverse()
701 h.reverse()
702 for r in h:
702 for r in h:
703 for l in fl.revision(r).splitlines():
703 for l in fl.revision(r).splitlines():
704 if l:
704 if l:
705 n, k = l.split(" ", 1)
705 n, k = l.split(" ", 1)
706 addtag(self, k, n)
706 addtag(self, k, n)
707 except KeyError:
707 except KeyError:
708 pass
708 pass
709
709
710 try:
710 try:
711 f = self.opener("localtags")
711 f = self.opener("localtags")
712 for l in f:
712 for l in f:
713 n, k = l.split(" ", 1)
713 n, k = l.split(" ", 1)
714 addtag(self, k, n)
714 addtag(self, k, n)
715 except IOError:
715 except IOError:
716 pass
716 pass
717
717
718 self.tagscache['tip'] = self.changelog.tip()
718 self.tagscache['tip'] = self.changelog.tip()
719
719
720 return self.tagscache
720 return self.tagscache
721
721
722 def tagslist(self):
722 def tagslist(self):
723 '''return a list of tags ordered by revision'''
723 '''return a list of tags ordered by revision'''
724 l = []
724 l = []
725 for t, n in self.tags().items():
725 for t, n in self.tags().items():
726 try:
726 try:
727 r = self.changelog.rev(n)
727 r = self.changelog.rev(n)
728 except:
728 except:
729 r = -2 # sort to the beginning of the list if unknown
729 r = -2 # sort to the beginning of the list if unknown
730 l.append((r,t,n))
730 l.append((r,t,n))
731 l.sort()
731 l.sort()
732 return [(t,n) for r,t,n in l]
732 return [(t,n) for r,t,n in l]
733
733
734 def nodetags(self, node):
734 def nodetags(self, node):
735 '''return the tags associated with a node'''
735 '''return the tags associated with a node'''
736 if not self.nodetagscache:
736 if not self.nodetagscache:
737 self.nodetagscache = {}
737 self.nodetagscache = {}
738 for t,n in self.tags().items():
738 for t,n in self.tags().items():
739 self.nodetagscache.setdefault(n,[]).append(t)
739 self.nodetagscache.setdefault(n,[]).append(t)
740 return self.nodetagscache.get(node, [])
740 return self.nodetagscache.get(node, [])
741
741
742 def lookup(self, key):
742 def lookup(self, key):
743 try:
743 try:
744 return self.tags()[key]
744 return self.tags()[key]
745 except KeyError:
745 except KeyError:
746 try:
746 try:
747 return self.changelog.lookup(key)
747 return self.changelog.lookup(key)
748 except:
748 except:
749 raise RepoError("unknown revision '%s'" % key)
749 raise RepoError("unknown revision '%s'" % key)
750
750
751 def dev(self):
751 def dev(self):
752 if self.remote: return -1
752 if self.remote: return -1
753 return os.stat(self.path).st_dev
753 return os.stat(self.path).st_dev
754
754
755 def local(self):
755 def local(self):
756 return not self.remote
756 return not self.remote
757
757
758 def join(self, f):
758 def join(self, f):
759 return os.path.join(self.path, f)
759 return os.path.join(self.path, f)
760
760
761 def wjoin(self, f):
761 def wjoin(self, f):
762 return os.path.join(self.root, f)
762 return os.path.join(self.root, f)
763
763
764 def file(self, f):
764 def file(self, f):
765 if f[0] == '/': f = f[1:]
765 if f[0] == '/': f = f[1:]
766 return filelog(self.opener, f)
766 return filelog(self.opener, f)
767
767
768 def getcwd(self):
768 def getcwd(self):
769 return self.dirstate.getcwd()
769 return self.dirstate.getcwd()
770
770
771 def wfile(self, f, mode='r'):
771 def wfile(self, f, mode='r'):
772 return self.wopener(f, mode)
772 return self.wopener(f, mode)
773
773
774 def transaction(self):
774 def transaction(self):
775 # save dirstate for undo
775 # save dirstate for undo
776 try:
776 try:
777 ds = self.opener("dirstate").read()
777 ds = self.opener("dirstate").read()
778 except IOError:
778 except IOError:
779 ds = ""
779 ds = ""
780 self.opener("journal.dirstate", "w").write(ds)
780 self.opener("journal.dirstate", "w").write(ds)
781
781
782 def after():
782 def after():
783 util.rename(self.join("journal"), self.join("undo"))
783 util.rename(self.join("journal"), self.join("undo"))
784 util.rename(self.join("journal.dirstate"),
784 util.rename(self.join("journal.dirstate"),
785 self.join("undo.dirstate"))
785 self.join("undo.dirstate"))
786
786
787 return transaction.transaction(self.ui.warn, self.opener,
787 return transaction.transaction(self.ui.warn, self.opener,
788 self.join("journal"), after)
788 self.join("journal"), after)
789
789
790 def recover(self):
790 def recover(self):
791 lock = self.lock()
791 lock = self.lock()
792 if os.path.exists(self.join("journal")):
792 if os.path.exists(self.join("journal")):
793 self.ui.status("rolling back interrupted transaction\n")
793 self.ui.status("rolling back interrupted transaction\n")
794 return transaction.rollback(self.opener, self.join("journal"))
794 return transaction.rollback(self.opener, self.join("journal"))
795 else:
795 else:
796 self.ui.warn("no interrupted transaction available\n")
796 self.ui.warn("no interrupted transaction available\n")
797
797
798 def undo(self):
798 def undo(self):
799 lock = self.lock()
799 lock = self.lock()
800 if os.path.exists(self.join("undo")):
800 if os.path.exists(self.join("undo")):
801 self.ui.status("rolling back last transaction\n")
801 self.ui.status("rolling back last transaction\n")
802 transaction.rollback(self.opener, self.join("undo"))
802 transaction.rollback(self.opener, self.join("undo"))
803 self.dirstate = None
803 self.dirstate = None
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
806 else:
806 else:
807 self.ui.warn("no undo information available\n")
807 self.ui.warn("no undo information available\n")
808
808
809 def lock(self, wait = 1):
809 def lock(self, wait = 1):
810 try:
810 try:
811 return lock.lock(self.join("lock"), 0)
811 return lock.lock(self.join("lock"), 0)
812 except lock.LockHeld, inst:
812 except lock.LockHeld, inst:
813 if wait:
813 if wait:
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
815 return lock.lock(self.join("lock"), wait)
815 return lock.lock(self.join("lock"), wait)
816 raise inst
816 raise inst
817
817
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
819 orig_parent = self.dirstate.parents()[0] or nullid
819 orig_parent = self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
822 c1 = self.changelog.read(p1)
822 c1 = self.changelog.read(p1)
823 c2 = self.changelog.read(p2)
823 c2 = self.changelog.read(p2)
824 m1 = self.manifest.read(c1[0])
824 m1 = self.manifest.read(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
826 m2 = self.manifest.read(c2[0])
826 m2 = self.manifest.read(c2[0])
827 changed = []
827 changed = []
828
828
829 if orig_parent == p1:
829 if orig_parent == p1:
830 update_dirstate = 1
830 update_dirstate = 1
831 else:
831 else:
832 update_dirstate = 0
832 update_dirstate = 0
833
833
834 tr = self.transaction()
834 tr = self.transaction()
835 mm = m1.copy()
835 mm = m1.copy()
836 mfm = mf1.copy()
836 mfm = mf1.copy()
837 linkrev = self.changelog.count()
837 linkrev = self.changelog.count()
838 for f in files:
838 for f in files:
839 try:
839 try:
840 t = self.wfile(f).read()
840 t = self.wfile(f).read()
841 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
841 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
842 r = self.file(f)
842 r = self.file(f)
843 mfm[f] = tm
843 mfm[f] = tm
844
844
845 fp1 = m1.get(f, nullid)
845 fp1 = m1.get(f, nullid)
846 fp2 = m2.get(f, nullid)
846 fp2 = m2.get(f, nullid)
847
847
848 # is the same revision on two branches of a merge?
848 # is the same revision on two branches of a merge?
849 if fp2 == fp1:
849 if fp2 == fp1:
850 fp2 = nullid
850 fp2 = nullid
851
851
852 if fp2 != nullid:
852 if fp2 != nullid:
853 # is one parent an ancestor of the other?
853 # is one parent an ancestor of the other?
854 fpa = r.ancestor(fp1, fp2)
854 fpa = r.ancestor(fp1, fp2)
855 if fpa == fp1:
855 if fpa == fp1:
856 fp1, fp2 = fp2, nullid
856 fp1, fp2 = fp2, nullid
857 elif fpa == fp2:
857 elif fpa == fp2:
858 fp2 = nullid
858 fp2 = nullid
859
859
860 # is the file unmodified from the parent?
860 # is the file unmodified from the parent?
861 if t == r.read(fp1):
861 if t == r.read(fp1):
862 # record the proper existing parent in manifest
862 # record the proper existing parent in manifest
863 # no need to add a revision
863 # no need to add a revision
864 mm[f] = fp1
864 mm[f] = fp1
865 continue
865 continue
866
866
867 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
867 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
868 changed.append(f)
868 changed.append(f)
869 if update_dirstate:
869 if update_dirstate:
870 self.dirstate.update([f], "n")
870 self.dirstate.update([f], "n")
871 except IOError:
871 except IOError:
872 try:
872 try:
873 del mm[f]
873 del mm[f]
874 del mfm[f]
874 del mfm[f]
875 if update_dirstate:
875 if update_dirstate:
876 self.dirstate.forget([f])
876 self.dirstate.forget([f])
877 except:
877 except:
878 # deleted from p2?
878 # deleted from p2?
879 pass
879 pass
880
880
881 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
881 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
882 user = user or self.ui.username()
882 user = user or self.ui.username()
883 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
883 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
884 tr.close()
884 tr.close()
885 if update_dirstate:
885 if update_dirstate:
886 self.dirstate.setparents(n, nullid)
886 self.dirstate.setparents(n, nullid)
887
887
888 def commit(self, files = None, text = "", user = None, date = None,
888 def commit(self, files = None, text = "", user = None, date = None,
889 match = util.always, force=False):
889 match = util.always, force=False):
890 commit = []
890 commit = []
891 remove = []
891 remove = []
892 changed = []
892 changed = []
893
893
894 if files:
894 if files:
895 for f in files:
895 for f in files:
896 s = self.dirstate.state(f)
896 s = self.dirstate.state(f)
897 if s in 'nmai':
897 if s in 'nmai':
898 commit.append(f)
898 commit.append(f)
899 elif s == 'r':
899 elif s == 'r':
900 remove.append(f)
900 remove.append(f)
901 else:
901 else:
902 self.ui.warn("%s not tracked!\n" % f)
902 self.ui.warn("%s not tracked!\n" % f)
903 else:
903 else:
904 (c, a, d, u) = self.changes(match = match)
904 (c, a, d, u) = self.changes(match = match)
905 commit = c + a
905 commit = c + a
906 remove = d
906 remove = d
907
907
908 p1, p2 = self.dirstate.parents()
908 p1, p2 = self.dirstate.parents()
909 c1 = self.changelog.read(p1)
909 c1 = self.changelog.read(p1)
910 c2 = self.changelog.read(p2)
910 c2 = self.changelog.read(p2)
911 m1 = self.manifest.read(c1[0])
911 m1 = self.manifest.read(c1[0])
912 mf1 = self.manifest.readflags(c1[0])
912 mf1 = self.manifest.readflags(c1[0])
913 m2 = self.manifest.read(c2[0])
913 m2 = self.manifest.read(c2[0])
914
914
915 if not commit and not remove and not force and p2 == nullid:
915 if not commit and not remove and not force and p2 == nullid:
916 self.ui.status("nothing changed\n")
916 self.ui.status("nothing changed\n")
917 return None
917 return None
918
918
919 if not self.hook("precommit"):
919 if not self.hook("precommit"):
920 return None
920 return None
921
921
922 lock = self.lock()
922 lock = self.lock()
923 tr = self.transaction()
923 tr = self.transaction()
924
924
925 # check in files
925 # check in files
926 new = {}
926 new = {}
927 linkrev = self.changelog.count()
927 linkrev = self.changelog.count()
928 commit.sort()
928 commit.sort()
929 for f in commit:
929 for f in commit:
930 self.ui.note(f + "\n")
930 self.ui.note(f + "\n")
931 try:
931 try:
932 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
932 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
933 t = self.wfile(f).read()
933 t = self.wfile(f).read()
934 except IOError:
934 except IOError:
935 self.ui.warn("trouble committing %s!\n" % f)
935 self.ui.warn("trouble committing %s!\n" % f)
936 raise
936 raise
937
937
938 meta = {}
938 meta = {}
939 cp = self.dirstate.copied(f)
939 cp = self.dirstate.copied(f)
940 if cp:
940 if cp:
941 meta["copy"] = cp
941 meta["copy"] = cp
942 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
942 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
943 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
943 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
944
944
945 r = self.file(f)
945 r = self.file(f)
946 fp1 = m1.get(f, nullid)
946 fp1 = m1.get(f, nullid)
947 fp2 = m2.get(f, nullid)
947 fp2 = m2.get(f, nullid)
948
948
949 # is the same revision on two branches of a merge?
949 # is the same revision on two branches of a merge?
950 if fp2 == fp1:
950 if fp2 == fp1:
951 fp2 = nullid
951 fp2 = nullid
952
952
953 if fp2 != nullid:
953 if fp2 != nullid:
954 # is one parent an ancestor of the other?
954 # is one parent an ancestor of the other?
955 fpa = r.ancestor(fp1, fp2)
955 fpa = r.ancestor(fp1, fp2)
956 if fpa == fp1:
956 if fpa == fp1:
957 fp1, fp2 = fp2, nullid
957 fp1, fp2 = fp2, nullid
958 elif fpa == fp2:
958 elif fpa == fp2:
959 fp2 = nullid
959 fp2 = nullid
960
960
961 # is the file unmodified from the parent?
961 # is the file unmodified from the parent?
962 if not meta and t == r.read(fp1):
962 if not meta and t == r.read(fp1):
963 # record the proper existing parent in manifest
963 # record the proper existing parent in manifest
964 # no need to add a revision
964 # no need to add a revision
965 new[f] = fp1
965 new[f] = fp1
966 continue
966 continue
967
967
968 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
968 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
969 # remember what we've added so that we can later calculate
969 # remember what we've added so that we can later calculate
970 # the files to pull from a set of changesets
970 # the files to pull from a set of changesets
971 changed.append(f)
971 changed.append(f)
972
972
973 # update manifest
973 # update manifest
974 m1.update(new)
974 m1.update(new)
975 for f in remove:
975 for f in remove:
976 if f in m1:
976 if f in m1:
977 del m1[f]
977 del m1[f]
978 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
978 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
979 (new, remove))
979 (new, remove))
980
980
981 # add changeset
981 # add changeset
982 new = new.keys()
982 new = new.keys()
983 new.sort()
983 new.sort()
984
984
985 if not text:
985 if not text:
986 edittext = ""
986 edittext = ""
987 if p2 != nullid:
987 if p2 != nullid:
988 edittext += "HG: branch merge\n"
988 edittext += "HG: branch merge\n"
989 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
989 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
990 edittext += "".join(["HG: changed %s\n" % f for f in changed])
990 edittext += "".join(["HG: changed %s\n" % f for f in changed])
991 edittext += "".join(["HG: removed %s\n" % f for f in remove])
991 edittext += "".join(["HG: removed %s\n" % f for f in remove])
992 if not changed and not remove:
992 if not changed and not remove:
993 edittext += "HG: no files changed\n"
993 edittext += "HG: no files changed\n"
994 edittext = self.ui.edit(edittext)
994 edittext = self.ui.edit(edittext)
995 if not edittext.rstrip():
995 if not edittext.rstrip():
996 return None
996 return None
997 text = edittext
997 text = edittext
998
998
999 user = user or self.ui.username()
999 user = user or self.ui.username()
1000 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1000 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1001 tr.close()
1001 tr.close()
1002
1002
1003 self.dirstate.setparents(n)
1003 self.dirstate.setparents(n)
1004 self.dirstate.update(new, "n")
1004 self.dirstate.update(new, "n")
1005 self.dirstate.forget(remove)
1005 self.dirstate.forget(remove)
1006
1006
1007 if not self.hook("commit", node=hex(n)):
1007 if not self.hook("commit", node=hex(n)):
1008 return None
1008 return None
1009 return n
1009 return n
1010
1010
1011 def walk(self, node = None, files = [], match = util.always):
1011 def walk(self, node = None, files = [], match = util.always):
1012 if node:
1012 if node:
1013 for fn in self.manifest.read(self.changelog.read(node)[0]):
1013 for fn in self.manifest.read(self.changelog.read(node)[0]):
1014 if match(fn): yield 'm', fn
1014 if match(fn): yield 'm', fn
1015 else:
1015 else:
1016 for src, fn in self.dirstate.walk(files, match):
1016 for src, fn in self.dirstate.walk(files, match):
1017 yield src, fn
1017 yield src, fn
1018
1018
1019 def changes(self, node1 = None, node2 = None, files = [],
1019 def changes(self, node1 = None, node2 = None, files = [],
1020 match = util.always):
1020 match = util.always):
1021 mf2, u = None, []
1021 mf2, u = None, []
1022
1022
1023 def fcmp(fn, mf):
1023 def fcmp(fn, mf):
1024 t1 = self.wfile(fn).read()
1024 t1 = self.wfile(fn).read()
1025 t2 = self.file(fn).revision(mf.get(fn, nullid))
1025 t2 = self.file(fn).revision(mf.get(fn, nullid))
1026 return cmp(t1, t2)
1026 return cmp(t1, t2)
1027
1027
1028 def mfmatches(node):
1028 def mfmatches(node):
1029 mf = dict(self.manifest.read(node))
1029 mf = dict(self.manifest.read(node))
1030 for fn in mf.keys():
1030 for fn in mf.keys():
1031 if not match(fn):
1031 if not match(fn):
1032 del mf[fn]
1032 del mf[fn]
1033 return mf
1033 return mf
1034
1034
1035 # are we comparing the working directory?
1035 # are we comparing the working directory?
1036 if not node2:
1036 if not node2:
1037 l, c, a, d, u = self.dirstate.changes(files, match)
1037 l, c, a, d, u = self.dirstate.changes(files, match)
1038
1038
1039 # are we comparing working dir against its parent?
1039 # are we comparing working dir against its parent?
1040 if not node1:
1040 if not node1:
1041 if l:
1041 if l:
1042 # do a full compare of any files that might have changed
1042 # do a full compare of any files that might have changed
1043 change = self.changelog.read(self.dirstate.parents()[0])
1043 change = self.changelog.read(self.dirstate.parents()[0])
1044 mf2 = mfmatches(change[0])
1044 mf2 = mfmatches(change[0])
1045 for f in l:
1045 for f in l:
1046 if fcmp(f, mf2):
1046 if fcmp(f, mf2):
1047 c.append(f)
1047 c.append(f)
1048
1048
1049 for l in c, a, d, u:
1049 for l in c, a, d, u:
1050 l.sort()
1050 l.sort()
1051
1051
1052 return (c, a, d, u)
1052 return (c, a, d, u)
1053
1053
1054 # are we comparing working dir against non-tip?
1054 # are we comparing working dir against non-tip?
1055 # generate a pseudo-manifest for the working dir
1055 # generate a pseudo-manifest for the working dir
1056 if not node2:
1056 if not node2:
1057 if not mf2:
1057 if not mf2:
1058 change = self.changelog.read(self.dirstate.parents()[0])
1058 change = self.changelog.read(self.dirstate.parents()[0])
1059 mf2 = mfmatches(change[0])
1059 mf2 = mfmatches(change[0])
1060 for f in a + c + l:
1060 for f in a + c + l:
1061 mf2[f] = ""
1061 mf2[f] = ""
1062 for f in d:
1062 for f in d:
1063 if f in mf2: del mf2[f]
1063 if f in mf2: del mf2[f]
1064 else:
1064 else:
1065 change = self.changelog.read(node2)
1065 change = self.changelog.read(node2)
1066 mf2 = mfmatches(change[0])
1066 mf2 = mfmatches(change[0])
1067
1067
1068 # flush lists from dirstate before comparing manifests
1068 # flush lists from dirstate before comparing manifests
1069 c, a = [], []
1069 c, a = [], []
1070
1070
1071 change = self.changelog.read(node1)
1071 change = self.changelog.read(node1)
1072 mf1 = mfmatches(change[0])
1072 mf1 = mfmatches(change[0])
1073
1073
1074 for fn in mf2:
1074 for fn in mf2:
1075 if mf1.has_key(fn):
1075 if mf1.has_key(fn):
1076 if mf1[fn] != mf2[fn]:
1076 if mf1[fn] != mf2[fn]:
1077 if mf2[fn] != "" or fcmp(fn, mf1):
1077 if mf2[fn] != "" or fcmp(fn, mf1):
1078 c.append(fn)
1078 c.append(fn)
1079 del mf1[fn]
1079 del mf1[fn]
1080 else:
1080 else:
1081 a.append(fn)
1081 a.append(fn)
1082
1082
1083 d = mf1.keys()
1083 d = mf1.keys()
1084
1084
1085 for l in c, a, d, u:
1085 for l in c, a, d, u:
1086 l.sort()
1086 l.sort()
1087
1087
1088 return (c, a, d, u)
1088 return (c, a, d, u)
1089
1089
1090 def add(self, list):
1090 def add(self, list):
1091 for f in list:
1091 for f in list:
1092 p = self.wjoin(f)
1092 p = self.wjoin(f)
1093 if not os.path.exists(p):
1093 if not os.path.exists(p):
1094 self.ui.warn("%s does not exist!\n" % f)
1094 self.ui.warn("%s does not exist!\n" % f)
1095 elif not os.path.isfile(p):
1095 elif not os.path.isfile(p):
1096 self.ui.warn("%s not added: only files supported currently\n" % f)
1096 self.ui.warn("%s not added: only files supported currently\n" % f)
1097 elif self.dirstate.state(f) in 'an':
1097 elif self.dirstate.state(f) in 'an':
1098 self.ui.warn("%s already tracked!\n" % f)
1098 self.ui.warn("%s already tracked!\n" % f)
1099 else:
1099 else:
1100 self.dirstate.update([f], "a")
1100 self.dirstate.update([f], "a")
1101
1101
1102 def forget(self, list):
1102 def forget(self, list):
1103 for f in list:
1103 for f in list:
1104 if self.dirstate.state(f) not in 'ai':
1104 if self.dirstate.state(f) not in 'ai':
1105 self.ui.warn("%s not added!\n" % f)
1105 self.ui.warn("%s not added!\n" % f)
1106 else:
1106 else:
1107 self.dirstate.forget([f])
1107 self.dirstate.forget([f])
1108
1108
1109 def remove(self, list):
1109 def remove(self, list):
1110 for f in list:
1110 for f in list:
1111 p = self.wjoin(f)
1111 p = self.wjoin(f)
1112 if os.path.exists(p):
1112 if os.path.exists(p):
1113 self.ui.warn("%s still exists!\n" % f)
1113 self.ui.warn("%s still exists!\n" % f)
1114 elif self.dirstate.state(f) == 'a':
1114 elif self.dirstate.state(f) == 'a':
1115 self.ui.warn("%s never committed!\n" % f)
1115 self.ui.warn("%s never committed!\n" % f)
1116 self.dirstate.forget([f])
1116 self.dirstate.forget([f])
1117 elif f not in self.dirstate:
1117 elif f not in self.dirstate:
1118 self.ui.warn("%s not tracked!\n" % f)
1118 self.ui.warn("%s not tracked!\n" % f)
1119 else:
1119 else:
1120 self.dirstate.update([f], "r")
1120 self.dirstate.update([f], "r")
1121
1121
1122 def copy(self, source, dest):
1122 def copy(self, source, dest):
1123 p = self.wjoin(dest)
1123 p = self.wjoin(dest)
1124 if not os.path.exists(p):
1124 if not os.path.exists(p):
1125 self.ui.warn("%s does not exist!\n" % dest)
1125 self.ui.warn("%s does not exist!\n" % dest)
1126 elif not os.path.isfile(p):
1126 elif not os.path.isfile(p):
1127 self.ui.warn("copy failed: %s is not a file\n" % dest)
1127 self.ui.warn("copy failed: %s is not a file\n" % dest)
1128 else:
1128 else:
1129 if self.dirstate.state(dest) == '?':
1129 if self.dirstate.state(dest) == '?':
1130 self.dirstate.update([dest], "a")
1130 self.dirstate.update([dest], "a")
1131 self.dirstate.copy(source, dest)
1131 self.dirstate.copy(source, dest)
1132
1132
1133 def heads(self):
1133 def heads(self):
1134 return self.changelog.heads()
1134 return self.changelog.heads()
1135
1135
1136 # branchlookup returns a dict giving a list of branches for
1136 # branchlookup returns a dict giving a list of branches for
1137 # each head. A branch is defined as the tag of a node or
1137 # each head. A branch is defined as the tag of a node or
1138 # the branch of the node's parents. If a node has multiple
1138 # the branch of the node's parents. If a node has multiple
1139 # branch tags, tags are eliminated if they are visible from other
1139 # branch tags, tags are eliminated if they are visible from other
1140 # branch tags.
1140 # branch tags.
1141 #
1141 #
1142 # So, for this graph: a->b->c->d->e
1142 # So, for this graph: a->b->c->d->e
1143 # \ /
1143 # \ /
1144 # aa -----/
1144 # aa -----/
1145 # a has tag 2.6.12
1145 # a has tag 2.6.12
1146 # d has tag 2.6.13
1146 # d has tag 2.6.13
1147 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1147 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1148 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1148 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1149 # from the list.
1149 # from the list.
1150 #
1150 #
1151 # It is possible that more than one head will have the same branch tag.
1151 # It is possible that more than one head will have the same branch tag.
1152 # callers need to check the result for multiple heads under the same
1152 # callers need to check the result for multiple heads under the same
1153 # branch tag if that is a problem for them (ie checkout of a specific
1153 # branch tag if that is a problem for them (ie checkout of a specific
1154 # branch).
1154 # branch).
1155 #
1155 #
1156 # passing in a specific branch will limit the depth of the search
1156 # passing in a specific branch will limit the depth of the search
1157 # through the parents. It won't limit the branches returned in the
1157 # through the parents. It won't limit the branches returned in the
1158 # result though.
1158 # result though.
1159 def branchlookup(self, heads=None, branch=None):
1159 def branchlookup(self, heads=None, branch=None):
1160 if not heads:
1160 if not heads:
1161 heads = self.heads()
1161 heads = self.heads()
1162 headt = [ h for h in heads ]
1162 headt = [ h for h in heads ]
1163 chlog = self.changelog
1163 chlog = self.changelog
1164 branches = {}
1164 branches = {}
1165 merges = []
1165 merges = []
1166 seenmerge = {}
1166 seenmerge = {}
1167
1167
1168 # traverse the tree once for each head, recording in the branches
1168 # traverse the tree once for each head, recording in the branches
1169 # dict which tags are visible from this head. The branches
1169 # dict which tags are visible from this head. The branches
1170 # dict also records which tags are visible from each tag
1170 # dict also records which tags are visible from each tag
1171 # while we traverse.
1171 # while we traverse.
1172 while headt or merges:
1172 while headt or merges:
1173 if merges:
1173 if merges:
1174 n, found = merges.pop()
1174 n, found = merges.pop()
1175 visit = [n]
1175 visit = [n]
1176 else:
1176 else:
1177 h = headt.pop()
1177 h = headt.pop()
1178 visit = [h]
1178 visit = [h]
1179 found = [h]
1179 found = [h]
1180 seen = {}
1180 seen = {}
1181 while visit:
1181 while visit:
1182 n = visit.pop()
1182 n = visit.pop()
1183 if n in seen:
1183 if n in seen:
1184 continue
1184 continue
1185 pp = chlog.parents(n)
1185 pp = chlog.parents(n)
1186 tags = self.nodetags(n)
1186 tags = self.nodetags(n)
1187 if tags:
1187 if tags:
1188 for x in tags:
1188 for x in tags:
1189 if x == 'tip':
1189 if x == 'tip':
1190 continue
1190 continue
1191 for f in found:
1191 for f in found:
1192 branches.setdefault(f, {})[n] = 1
1192 branches.setdefault(f, {})[n] = 1
1193 branches.setdefault(n, {})[n] = 1
1193 branches.setdefault(n, {})[n] = 1
1194 break
1194 break
1195 if n not in found:
1195 if n not in found:
1196 found.append(n)
1196 found.append(n)
1197 if branch in tags:
1197 if branch in tags:
1198 continue
1198 continue
1199 seen[n] = 1
1199 seen[n] = 1
1200 if pp[1] != nullid and n not in seenmerge:
1200 if pp[1] != nullid and n not in seenmerge:
1201 merges.append((pp[1], [x for x in found]))
1201 merges.append((pp[1], [x for x in found]))
1202 seenmerge[n] = 1
1202 seenmerge[n] = 1
1203 if pp[0] != nullid:
1203 if pp[0] != nullid:
1204 visit.append(pp[0])
1204 visit.append(pp[0])
1205 # traverse the branches dict, eliminating branch tags from each
1205 # traverse the branches dict, eliminating branch tags from each
1206 # head that are visible from another branch tag for that head.
1206 # head that are visible from another branch tag for that head.
1207 out = {}
1207 out = {}
1208 viscache = {}
1208 viscache = {}
1209 for h in heads:
1209 for h in heads:
1210 def visible(node):
1210 def visible(node):
1211 if node in viscache:
1211 if node in viscache:
1212 return viscache[node]
1212 return viscache[node]
1213 ret = {}
1213 ret = {}
1214 visit = [node]
1214 visit = [node]
1215 while visit:
1215 while visit:
1216 x = visit.pop()
1216 x = visit.pop()
1217 if x in viscache:
1217 if x in viscache:
1218 ret.update(viscache[x])
1218 ret.update(viscache[x])
1219 elif x not in ret:
1219 elif x not in ret:
1220 ret[x] = 1
1220 ret[x] = 1
1221 if x in branches:
1221 if x in branches:
1222 visit[len(visit):] = branches[x].keys()
1222 visit[len(visit):] = branches[x].keys()
1223 viscache[node] = ret
1223 viscache[node] = ret
1224 return ret
1224 return ret
1225 if h not in branches:
1225 if h not in branches:
1226 continue
1226 continue
1227 # O(n^2), but somewhat limited. This only searches the
1227 # O(n^2), but somewhat limited. This only searches the
1228 # tags visible from a specific head, not all the tags in the
1228 # tags visible from a specific head, not all the tags in the
1229 # whole repo.
1229 # whole repo.
1230 for b in branches[h]:
1230 for b in branches[h]:
1231 vis = False
1231 vis = False
1232 for bb in branches[h].keys():
1232 for bb in branches[h].keys():
1233 if b != bb:
1233 if b != bb:
1234 if b in visible(bb):
1234 if b in visible(bb):
1235 vis = True
1235 vis = True
1236 break
1236 break
1237 if not vis:
1237 if not vis:
1238 l = out.setdefault(h, [])
1238 l = out.setdefault(h, [])
1239 l[len(l):] = self.nodetags(b)
1239 l[len(l):] = self.nodetags(b)
1240 return out
1240 return out
1241
1241
1242 def branches(self, nodes):
1242 def branches(self, nodes):
1243 if not nodes: nodes = [self.changelog.tip()]
1243 if not nodes: nodes = [self.changelog.tip()]
1244 b = []
1244 b = []
1245 for n in nodes:
1245 for n in nodes:
1246 t = n
1246 t = n
1247 while n:
1247 while n:
1248 p = self.changelog.parents(n)
1248 p = self.changelog.parents(n)
1249 if p[1] != nullid or p[0] == nullid:
1249 if p[1] != nullid or p[0] == nullid:
1250 b.append((t, n, p[0], p[1]))
1250 b.append((t, n, p[0], p[1]))
1251 break
1251 break
1252 n = p[0]
1252 n = p[0]
1253 return b
1253 return b
1254
1254
1255 def between(self, pairs):
1255 def between(self, pairs):
1256 r = []
1256 r = []
1257
1257
1258 for top, bottom in pairs:
1258 for top, bottom in pairs:
1259 n, l, i = top, [], 0
1259 n, l, i = top, [], 0
1260 f = 1
1260 f = 1
1261
1261
1262 while n != bottom:
1262 while n != bottom:
1263 p = self.changelog.parents(n)[0]
1263 p = self.changelog.parents(n)[0]
1264 if i == f:
1264 if i == f:
1265 l.append(n)
1265 l.append(n)
1266 f = f * 2
1266 f = f * 2
1267 n = p
1267 n = p
1268 i += 1
1268 i += 1
1269
1269
1270 r.append(l)
1270 r.append(l)
1271
1271
1272 return r
1272 return r
1273
1273
1274 def newer(self, nodes):
1274 def newer(self, nodes):
1275 m = {}
1275 m = {}
1276 nl = []
1276 nl = []
1277 pm = {}
1277 pm = {}
1278 cl = self.changelog
1278 cl = self.changelog
1279 t = l = cl.count()
1279 t = l = cl.count()
1280
1280
1281 # find the lowest numbered node
1281 # find the lowest numbered node
1282 for n in nodes:
1282 for n in nodes:
1283 l = min(l, cl.rev(n))
1283 l = min(l, cl.rev(n))
1284 m[n] = 1
1284 m[n] = 1
1285
1285
1286 for i in xrange(l, t):
1286 for i in xrange(l, t):
1287 n = cl.node(i)
1287 n = cl.node(i)
1288 if n in m: # explicitly listed
1288 if n in m: # explicitly listed
1289 pm[n] = 1
1289 pm[n] = 1
1290 nl.append(n)
1290 nl.append(n)
1291 continue
1291 continue
1292 for p in cl.parents(n):
1292 for p in cl.parents(n):
1293 if p in pm: # parent listed
1293 if p in pm: # parent listed
1294 pm[n] = 1
1294 pm[n] = 1
1295 nl.append(n)
1295 nl.append(n)
1296 break
1296 break
1297
1297
1298 return nl
1298 return nl
1299
1299
1300 def findincoming(self, remote, base=None, heads=None):
1300 def findincoming(self, remote, base=None, heads=None):
1301 m = self.changelog.nodemap
1301 m = self.changelog.nodemap
1302 search = []
1302 search = []
1303 fetch = []
1303 fetch = []
1304 seen = {}
1304 seen = {}
1305 seenbranch = {}
1305 seenbranch = {}
1306 if base == None:
1306 if base == None:
1307 base = {}
1307 base = {}
1308
1308
1309 # assume we're closer to the tip than the root
1309 # assume we're closer to the tip than the root
1310 # and start by examining the heads
1310 # and start by examining the heads
1311 self.ui.status("searching for changes\n")
1311 self.ui.status("searching for changes\n")
1312
1312
1313 if not heads:
1313 if not heads:
1314 heads = remote.heads()
1314 heads = remote.heads()
1315
1315
1316 unknown = []
1316 unknown = []
1317 for h in heads:
1317 for h in heads:
1318 if h not in m:
1318 if h not in m:
1319 unknown.append(h)
1319 unknown.append(h)
1320 else:
1320 else:
1321 base[h] = 1
1321 base[h] = 1
1322
1322
1323 if not unknown:
1323 if not unknown:
1324 return None
1324 return None
1325
1325
1326 rep = {}
1326 rep = {}
1327 reqcnt = 0
1327 reqcnt = 0
1328
1328
1329 # search through remote branches
1329 # search through remote branches
1330 # a 'branch' here is a linear segment of history, with four parts:
1330 # a 'branch' here is a linear segment of history, with four parts:
1331 # head, root, first parent, second parent
1331 # head, root, first parent, second parent
1332 # (a branch always has two parents (or none) by definition)
1332 # (a branch always has two parents (or none) by definition)
1333 unknown = remote.branches(unknown)
1333 unknown = remote.branches(unknown)
1334 while unknown:
1334 while unknown:
1335 r = []
1335 r = []
1336 while unknown:
1336 while unknown:
1337 n = unknown.pop(0)
1337 n = unknown.pop(0)
1338 if n[0] in seen:
1338 if n[0] in seen:
1339 continue
1339 continue
1340
1340
1341 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1341 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1342 if n[0] == nullid:
1342 if n[0] == nullid:
1343 break
1343 break
1344 if n in seenbranch:
1344 if n in seenbranch:
1345 self.ui.debug("branch already found\n")
1345 self.ui.debug("branch already found\n")
1346 continue
1346 continue
1347 if n[1] and n[1] in m: # do we know the base?
1347 if n[1] and n[1] in m: # do we know the base?
1348 self.ui.debug("found incomplete branch %s:%s\n"
1348 self.ui.debug("found incomplete branch %s:%s\n"
1349 % (short(n[0]), short(n[1])))
1349 % (short(n[0]), short(n[1])))
1350 search.append(n) # schedule branch range for scanning
1350 search.append(n) # schedule branch range for scanning
1351 seenbranch[n] = 1
1351 seenbranch[n] = 1
1352 else:
1352 else:
1353 if n[1] not in seen and n[1] not in fetch:
1353 if n[1] not in seen and n[1] not in fetch:
1354 if n[2] in m and n[3] in m:
1354 if n[2] in m and n[3] in m:
1355 self.ui.debug("found new changeset %s\n" %
1355 self.ui.debug("found new changeset %s\n" %
1356 short(n[1]))
1356 short(n[1]))
1357 fetch.append(n[1]) # earliest unknown
1357 fetch.append(n[1]) # earliest unknown
1358 base[n[2]] = 1 # latest known
1358 base[n[2]] = 1 # latest known
1359 continue
1359 continue
1360
1360
1361 for a in n[2:4]:
1361 for a in n[2:4]:
1362 if a not in rep:
1362 if a not in rep:
1363 r.append(a)
1363 r.append(a)
1364 rep[a] = 1
1364 rep[a] = 1
1365
1365
1366 seen[n[0]] = 1
1366 seen[n[0]] = 1
1367
1367
1368 if r:
1368 if r:
1369 reqcnt += 1
1369 reqcnt += 1
1370 self.ui.debug("request %d: %s\n" %
1370 self.ui.debug("request %d: %s\n" %
1371 (reqcnt, " ".join(map(short, r))))
1371 (reqcnt, " ".join(map(short, r))))
1372 for p in range(0, len(r), 10):
1372 for p in range(0, len(r), 10):
1373 for b in remote.branches(r[p:p+10]):
1373 for b in remote.branches(r[p:p+10]):
1374 self.ui.debug("received %s:%s\n" %
1374 self.ui.debug("received %s:%s\n" %
1375 (short(b[0]), short(b[1])))
1375 (short(b[0]), short(b[1])))
1376 if b[0] not in m and b[0] not in seen:
1376 if b[0] not in m and b[0] not in seen:
1377 unknown.append(b)
1377 unknown.append(b)
1378
1378
1379 # do binary search on the branches we found
1379 # do binary search on the branches we found
1380 while search:
1380 while search:
1381 n = search.pop(0)
1381 n = search.pop(0)
1382 reqcnt += 1
1382 reqcnt += 1
1383 l = remote.between([(n[0], n[1])])[0]
1383 l = remote.between([(n[0], n[1])])[0]
1384 l.append(n[1])
1384 l.append(n[1])
1385 p = n[0]
1385 p = n[0]
1386 f = 1
1386 f = 1
1387 for i in l:
1387 for i in l:
1388 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1388 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1389 if i in m:
1389 if i in m:
1390 if f <= 2:
1390 if f <= 2:
1391 self.ui.debug("found new branch changeset %s\n" %
1391 self.ui.debug("found new branch changeset %s\n" %
1392 short(p))
1392 short(p))
1393 fetch.append(p)
1393 fetch.append(p)
1394 base[i] = 1
1394 base[i] = 1
1395 else:
1395 else:
1396 self.ui.debug("narrowed branch search to %s:%s\n"
1396 self.ui.debug("narrowed branch search to %s:%s\n"
1397 % (short(p), short(i)))
1397 % (short(p), short(i)))
1398 search.append((p, i))
1398 search.append((p, i))
1399 break
1399 break
1400 p, f = i, f * 2
1400 p, f = i, f * 2
1401
1401
1402 # sanity check our fetch list
1402 # sanity check our fetch list
1403 for f in fetch:
1403 for f in fetch:
1404 if f in m:
1404 if f in m:
1405 raise RepoError("already have changeset " + short(f[:4]))
1405 raise RepoError("already have changeset " + short(f[:4]))
1406
1406
1407 if base.keys() == [nullid]:
1407 if base.keys() == [nullid]:
1408 self.ui.warn("warning: pulling from an unrelated repository!\n")
1408 self.ui.warn("warning: pulling from an unrelated repository!\n")
1409
1409
1410 self.ui.note("adding new changesets starting at " +
1410 self.ui.note("adding new changesets starting at " +
1411 " ".join([short(f) for f in fetch]) + "\n")
1411 " ".join([short(f) for f in fetch]) + "\n")
1412
1412
1413 self.ui.debug("%d total queries\n" % reqcnt)
1413 self.ui.debug("%d total queries\n" % reqcnt)
1414
1414
1415 return fetch
1415 return fetch
1416
1416
1417 def findoutgoing(self, remote, base=None, heads=None):
1417 def findoutgoing(self, remote, base=None, heads=None):
1418 if base == None:
1418 if base == None:
1419 base = {}
1419 base = {}
1420 self.findincoming(remote, base, heads)
1420 self.findincoming(remote, base, heads)
1421
1421
1422 remain = dict.fromkeys(self.changelog.nodemap)
1422 remain = dict.fromkeys(self.changelog.nodemap)
1423
1423
1424 # prune everything remote has from the tree
1424 # prune everything remote has from the tree
1425 del remain[nullid]
1425 del remain[nullid]
1426 remove = base.keys()
1426 remove = base.keys()
1427 while remove:
1427 while remove:
1428 n = remove.pop(0)
1428 n = remove.pop(0)
1429 if n in remain:
1429 if n in remain:
1430 del remain[n]
1430 del remain[n]
1431 for p in self.changelog.parents(n):
1431 for p in self.changelog.parents(n):
1432 remove.append(p)
1432 remove.append(p)
1433
1433
1434 # find every node whose parents have been pruned
1434 # find every node whose parents have been pruned
1435 subset = []
1435 subset = []
1436 for n in remain:
1436 for n in remain:
1437 p1, p2 = self.changelog.parents(n)
1437 p1, p2 = self.changelog.parents(n)
1438 if p1 not in remain and p2 not in remain:
1438 if p1 not in remain and p2 not in remain:
1439 subset.append(n)
1439 subset.append(n)
1440
1440
1441 # this is the set of all roots we have to push
1441 # this is the set of all roots we have to push
1442 return subset
1442 return subset
1443
1443
1444 def pull(self, remote):
1444 def pull(self, remote):
1445 lock = self.lock()
1445 lock = self.lock()
1446
1446
1447 # if we have an empty repo, fetch everything
1447 # if we have an empty repo, fetch everything
1448 if self.changelog.tip() == nullid:
1448 if self.changelog.tip() == nullid:
1449 self.ui.status("requesting all changes\n")
1449 self.ui.status("requesting all changes\n")
1450 fetch = [nullid]
1450 fetch = [nullid]
1451 else:
1451 else:
1452 fetch = self.findincoming(remote)
1452 fetch = self.findincoming(remote)
1453
1453
1454 if not fetch:
1454 if not fetch:
1455 self.ui.status("no changes found\n")
1455 self.ui.status("no changes found\n")
1456 return 1
1456 return 1
1457
1457
1458 cg = remote.changegroup(fetch)
1458 cg = remote.changegroup(fetch)
1459 return self.addchangegroup(cg)
1459 return self.addchangegroup(cg)
1460
1460
1461 def push(self, remote, force=False):
1461 def push(self, remote, force=False):
1462 lock = remote.lock()
1462 lock = remote.lock()
1463
1463
1464 base = {}
1464 base = {}
1465 heads = remote.heads()
1465 heads = remote.heads()
1466 inc = self.findincoming(remote, base, heads)
1466 inc = self.findincoming(remote, base, heads)
1467 if not force and inc:
1467 if not force and inc:
1468 self.ui.warn("abort: unsynced remote changes!\n")
1468 self.ui.warn("abort: unsynced remote changes!\n")
1469 self.ui.status("(did you forget to sync? use push -f to force)\n")
1469 self.ui.status("(did you forget to sync? use push -f to force)\n")
1470 return 1
1470 return 1
1471
1471
1472 update = self.findoutgoing(remote, base)
1472 update = self.findoutgoing(remote, base)
1473 if not update:
1473 if not update:
1474 self.ui.status("no changes found\n")
1474 self.ui.status("no changes found\n")
1475 return 1
1475 return 1
1476 elif not force:
1476 elif not force:
1477 if len(heads) < len(self.changelog.heads()):
1477 if len(heads) < len(self.changelog.heads()):
1478 self.ui.warn("abort: push creates new remote branches!\n")
1478 self.ui.warn("abort: push creates new remote branches!\n")
1479 self.ui.status("(did you forget to merge?" +
1479 self.ui.status("(did you forget to merge?" +
1480 " use push -f to force)\n")
1480 " use push -f to force)\n")
1481 return 1
1481 return 1
1482
1482
1483 cg = self.changegroup(update)
1483 cg = self.changegroup(update)
1484 return remote.addchangegroup(cg)
1484 return remote.addchangegroup(cg)
1485
1485
1486 def changegroup(self, basenodes):
1486 def changegroup(self, basenodes):
1487 class genread:
1487 class genread:
1488 def __init__(self, generator):
1488 def __init__(self, generator):
1489 self.g = generator
1489 self.g = generator
1490 self.buf = ""
1490 self.buf = ""
1491 def fillbuf(self):
1491 def fillbuf(self):
1492 self.buf += "".join(self.g)
1492 self.buf += "".join(self.g)
1493
1493
1494 def read(self, l):
1494 def read(self, l):
1495 while l > len(self.buf):
1495 while l > len(self.buf):
1496 try:
1496 try:
1497 self.buf += self.g.next()
1497 self.buf += self.g.next()
1498 except StopIteration:
1498 except StopIteration:
1499 break
1499 break
1500 d, self.buf = self.buf[:l], self.buf[l:]
1500 d, self.buf = self.buf[:l], self.buf[l:]
1501 return d
1501 return d
1502
1502
1503 def gengroup():
1503 def gengroup():
1504 nodes = self.newer(basenodes)
1504 nodes = self.newer(basenodes)
1505
1505
1506 # construct the link map
1506 # construct the link map
1507 linkmap = {}
1507 linkmap = {}
1508 for n in nodes:
1508 for n in nodes:
1509 linkmap[self.changelog.rev(n)] = n
1509 linkmap[self.changelog.rev(n)] = n
1510
1510
1511 # construct a list of all changed files
1511 # construct a list of all changed files
1512 changed = {}
1512 changed = {}
1513 for n in nodes:
1513 for n in nodes:
1514 c = self.changelog.read(n)
1514 c = self.changelog.read(n)
1515 for f in c[3]:
1515 for f in c[3]:
1516 changed[f] = 1
1516 changed[f] = 1
1517 changed = changed.keys()
1517 changed = changed.keys()
1518 changed.sort()
1518 changed.sort()
1519
1519
1520 # the changegroup is changesets + manifests + all file revs
1520 # the changegroup is changesets + manifests + all file revs
1521 revs = [ self.changelog.rev(n) for n in nodes ]
1521 revs = [ self.changelog.rev(n) for n in nodes ]
1522
1522
1523 for y in self.changelog.group(linkmap): yield y
1523 for y in self.changelog.group(linkmap): yield y
1524 for y in self.manifest.group(linkmap): yield y
1524 for y in self.manifest.group(linkmap): yield y
1525 for f in changed:
1525 for f in changed:
1526 yield struct.pack(">l", len(f) + 4) + f
1526 yield struct.pack(">l", len(f) + 4) + f
1527 g = self.file(f).group(linkmap)
1527 g = self.file(f).group(linkmap)
1528 for y in g:
1528 for y in g:
1529 yield y
1529 yield y
1530
1530
1531 yield struct.pack(">l", 0)
1531 yield struct.pack(">l", 0)
1532
1532
1533 return genread(gengroup())
1533 return genread(gengroup())
1534
1534
1535 def addchangegroup(self, source):
1535 def addchangegroup(self, source):
1536
1536
1537 def getchunk():
1537 def getchunk():
1538 d = source.read(4)
1538 d = source.read(4)
1539 if not d: return ""
1539 if not d: return ""
1540 l = struct.unpack(">l", d)[0]
1540 l = struct.unpack(">l", d)[0]
1541 if l <= 4: return ""
1541 if l <= 4: return ""
1542 return source.read(l - 4)
1542 return source.read(l - 4)
1543
1543
1544 def getgroup():
1544 def getgroup():
1545 while 1:
1545 while 1:
1546 c = getchunk()
1546 c = getchunk()
1547 if not c: break
1547 if not c: break
1548 yield c
1548 yield c
1549
1549
1550 def csmap(x):
1550 def csmap(x):
1551 self.ui.debug("add changeset %s\n" % short(x))
1551 self.ui.debug("add changeset %s\n" % short(x))
1552 return self.changelog.count()
1552 return self.changelog.count()
1553
1553
1554 def revmap(x):
1554 def revmap(x):
1555 return self.changelog.rev(x)
1555 return self.changelog.rev(x)
1556
1556
1557 if not source: return
1557 if not source: return
1558 changesets = files = revisions = 0
1558 changesets = files = revisions = 0
1559
1559
1560 tr = self.transaction()
1560 tr = self.transaction()
1561
1561
1562 # pull off the changeset group
1562 # pull off the changeset group
1563 self.ui.status("adding changesets\n")
1563 self.ui.status("adding changesets\n")
1564 co = self.changelog.tip()
1564 co = self.changelog.tip()
1565 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1565 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1566 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1566 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1567
1567
1568 # pull off the manifest group
1568 # pull off the manifest group
1569 self.ui.status("adding manifests\n")
1569 self.ui.status("adding manifests\n")
1570 mm = self.manifest.tip()
1570 mm = self.manifest.tip()
1571 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1571 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1572
1572
1573 # process the files
1573 # process the files
1574 self.ui.status("adding file changes\n")
1574 self.ui.status("adding file changes\n")
1575 while 1:
1575 while 1:
1576 f = getchunk()
1576 f = getchunk()
1577 if not f: break
1577 if not f: break
1578 self.ui.debug("adding %s revisions\n" % f)
1578 self.ui.debug("adding %s revisions\n" % f)
1579 fl = self.file(f)
1579 fl = self.file(f)
1580 o = fl.count()
1580 o = fl.count()
1581 n = fl.addgroup(getgroup(), revmap, tr)
1581 n = fl.addgroup(getgroup(), revmap, tr)
1582 revisions += fl.count() - o
1582 revisions += fl.count() - o
1583 files += 1
1583 files += 1
1584
1584
1585 self.ui.status(("added %d changesets" +
1585 self.ui.status(("added %d changesets" +
1586 " with %d changes to %d files\n")
1586 " with %d changes to %d files\n")
1587 % (changesets, revisions, files))
1587 % (changesets, revisions, files))
1588
1588
1589 tr.close()
1589 tr.close()
1590
1590
1591 if not self.hook("changegroup"):
1591 if not self.hook("changegroup"):
1592 return 1
1592 return 1
1593
1593
1594 return
1594 return
1595
1595
1596 def update(self, node, allow=False, force=False, choose=None,
1596 def update(self, node, allow=False, force=False, choose=None,
1597 moddirstate=True):
1597 moddirstate=True):
1598 pl = self.dirstate.parents()
1598 pl = self.dirstate.parents()
1599 if not force and pl[1] != nullid:
1599 if not force and pl[1] != nullid:
1600 self.ui.warn("aborting: outstanding uncommitted merges\n")
1600 self.ui.warn("aborting: outstanding uncommitted merges\n")
1601 return 1
1601 return 1
1602
1602
1603 p1, p2 = pl[0], node
1603 p1, p2 = pl[0], node
1604 pa = self.changelog.ancestor(p1, p2)
1604 pa = self.changelog.ancestor(p1, p2)
1605 m1n = self.changelog.read(p1)[0]
1605 m1n = self.changelog.read(p1)[0]
1606 m2n = self.changelog.read(p2)[0]
1606 m2n = self.changelog.read(p2)[0]
1607 man = self.manifest.ancestor(m1n, m2n)
1607 man = self.manifest.ancestor(m1n, m2n)
1608 m1 = self.manifest.read(m1n)
1608 m1 = self.manifest.read(m1n)
1609 mf1 = self.manifest.readflags(m1n)
1609 mf1 = self.manifest.readflags(m1n)
1610 m2 = self.manifest.read(m2n)
1610 m2 = self.manifest.read(m2n)
1611 mf2 = self.manifest.readflags(m2n)
1611 mf2 = self.manifest.readflags(m2n)
1612 ma = self.manifest.read(man)
1612 ma = self.manifest.read(man)
1613 mfa = self.manifest.readflags(man)
1613 mfa = self.manifest.readflags(man)
1614
1614
1615 (c, a, d, u) = self.changes()
1615 (c, a, d, u) = self.changes()
1616
1616
1617 # is this a jump, or a merge? i.e. is there a linear path
1617 # is this a jump, or a merge? i.e. is there a linear path
1618 # from p1 to p2?
1618 # from p1 to p2?
1619 linear_path = (pa == p1 or pa == p2)
1619 linear_path = (pa == p1 or pa == p2)
1620
1620
1621 # resolve the manifest to determine which files
1621 # resolve the manifest to determine which files
1622 # we care about merging
1622 # we care about merging
1623 self.ui.note("resolving manifests\n")
1623 self.ui.note("resolving manifests\n")
1624 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1624 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1625 (force, allow, moddirstate, linear_path))
1625 (force, allow, moddirstate, linear_path))
1626 self.ui.debug(" ancestor %s local %s remote %s\n" %
1626 self.ui.debug(" ancestor %s local %s remote %s\n" %
1627 (short(man), short(m1n), short(m2n)))
1627 (short(man), short(m1n), short(m2n)))
1628
1628
1629 merge = {}
1629 merge = {}
1630 get = {}
1630 get = {}
1631 remove = []
1631 remove = []
1632
1632
1633 # construct a working dir manifest
1633 # construct a working dir manifest
1634 mw = m1.copy()
1634 mw = m1.copy()
1635 mfw = mf1.copy()
1635 mfw = mf1.copy()
1636 umap = dict.fromkeys(u)
1636 umap = dict.fromkeys(u)
1637
1637
1638 for f in a + c + u:
1638 for f in a + c + u:
1639 mw[f] = ""
1639 mw[f] = ""
1640 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1640 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1641
1641
1642 for f in d:
1642 for f in d:
1643 if f in mw: del mw[f]
1643 if f in mw: del mw[f]
1644
1644
1645 # If we're jumping between revisions (as opposed to merging),
1645 # If we're jumping between revisions (as opposed to merging),
1646 # and if neither the working directory nor the target rev has
1646 # and if neither the working directory nor the target rev has
1647 # the file, then we need to remove it from the dirstate, to
1647 # the file, then we need to remove it from the dirstate, to
1648 # prevent the dirstate from listing the file when it is no
1648 # prevent the dirstate from listing the file when it is no
1649 # longer in the manifest.
1649 # longer in the manifest.
1650 if moddirstate and linear_path and f not in m2:
1650 if moddirstate and linear_path and f not in m2:
1651 self.dirstate.forget((f,))
1651 self.dirstate.forget((f,))
1652
1652
1653 # Compare manifests
1653 # Compare manifests
1654 for f, n in mw.iteritems():
1654 for f, n in mw.iteritems():
1655 if choose and not choose(f): continue
1655 if choose and not choose(f): continue
1656 if f in m2:
1656 if f in m2:
1657 s = 0
1657 s = 0
1658
1658
1659 # is the wfile new since m1, and match m2?
1659 # is the wfile new since m1, and match m2?
1660 if f not in m1:
1660 if f not in m1:
1661 t1 = self.wfile(f).read()
1661 t1 = self.wfile(f).read()
1662 t2 = self.file(f).revision(m2[f])
1662 t2 = self.file(f).revision(m2[f])
1663 if cmp(t1, t2) == 0:
1663 if cmp(t1, t2) == 0:
1664 n = m2[f]
1664 n = m2[f]
1665 del t1, t2
1665 del t1, t2
1666
1666
1667 # are files different?
1667 # are files different?
1668 if n != m2[f]:
1668 if n != m2[f]:
1669 a = ma.get(f, nullid)
1669 a = ma.get(f, nullid)
1670 # are both different from the ancestor?
1670 # are both different from the ancestor?
1671 if n != a and m2[f] != a:
1671 if n != a and m2[f] != a:
1672 self.ui.debug(" %s versions differ, resolve\n" % f)
1672 self.ui.debug(" %s versions differ, resolve\n" % f)
1673 # merge executable bits
1673 # merge executable bits
1674 # "if we changed or they changed, change in merge"
1674 # "if we changed or they changed, change in merge"
1675 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1675 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1676 mode = ((a^b) | (a^c)) ^ a
1676 mode = ((a^b) | (a^c)) ^ a
1677 merge[f] = (m1.get(f, nullid), m2[f], mode)
1677 merge[f] = (m1.get(f, nullid), m2[f], mode)
1678 s = 1
1678 s = 1
1679 # are we clobbering?
1679 # are we clobbering?
1680 # is remote's version newer?
1680 # is remote's version newer?
1681 # or are we going back in time?
1681 # or are we going back in time?
1682 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1682 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1683 self.ui.debug(" remote %s is newer, get\n" % f)
1683 self.ui.debug(" remote %s is newer, get\n" % f)
1684 get[f] = m2[f]
1684 get[f] = m2[f]
1685 s = 1
1685 s = 1
1686 elif f in umap:
1686 elif f in umap:
1687 # this unknown file is the same as the checkout
1687 # this unknown file is the same as the checkout
1688 get[f] = m2[f]
1688 get[f] = m2[f]
1689
1689
1690 if not s and mfw[f] != mf2[f]:
1690 if not s and mfw[f] != mf2[f]:
1691 if force:
1691 if force:
1692 self.ui.debug(" updating permissions for %s\n" % f)
1692 self.ui.debug(" updating permissions for %s\n" % f)
1693 util.set_exec(self.wjoin(f), mf2[f])
1693 util.set_exec(self.wjoin(f), mf2[f])
1694 else:
1694 else:
1695 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1695 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1696 mode = ((a^b) | (a^c)) ^ a
1696 mode = ((a^b) | (a^c)) ^ a
1697 if mode != b:
1697 if mode != b:
1698 self.ui.debug(" updating permissions for %s\n" % f)
1698 self.ui.debug(" updating permissions for %s\n" % f)
1699 util.set_exec(self.wjoin(f), mode)
1699 util.set_exec(self.wjoin(f), mode)
1700 del m2[f]
1700 del m2[f]
1701 elif f in ma:
1701 elif f in ma:
1702 if n != ma[f]:
1702 if n != ma[f]:
1703 r = "d"
1703 r = "d"
1704 if not force and (linear_path or allow):
1704 if not force and (linear_path or allow):
1705 r = self.ui.prompt(
1705 r = self.ui.prompt(
1706 (" local changed %s which remote deleted\n" % f) +
1706 (" local changed %s which remote deleted\n" % f) +
1707 "(k)eep or (d)elete?", "[kd]", "k")
1707 "(k)eep or (d)elete?", "[kd]", "k")
1708 if r == "d":
1708 if r == "d":
1709 remove.append(f)
1709 remove.append(f)
1710 else:
1710 else:
1711 self.ui.debug("other deleted %s\n" % f)
1711 self.ui.debug("other deleted %s\n" % f)
1712 remove.append(f) # other deleted it
1712 remove.append(f) # other deleted it
1713 else:
1713 else:
1714 if n == m1.get(f, nullid): # same as parent
1714 if n == m1.get(f, nullid): # same as parent
1715 if p2 == pa: # going backwards?
1715 if p2 == pa: # going backwards?
1716 self.ui.debug("remote deleted %s\n" % f)
1716 self.ui.debug("remote deleted %s\n" % f)
1717 remove.append(f)
1717 remove.append(f)
1718 else:
1718 else:
1719 self.ui.debug("local created %s, keeping\n" % f)
1719 self.ui.debug("local created %s, keeping\n" % f)
1720 else:
1720 else:
1721 self.ui.debug("working dir created %s, keeping\n" % f)
1721 self.ui.debug("working dir created %s, keeping\n" % f)
1722
1722
1723 for f, n in m2.iteritems():
1723 for f, n in m2.iteritems():
1724 if choose and not choose(f): continue
1724 if choose and not choose(f): continue
1725 if f[0] == "/": continue
1725 if f[0] == "/": continue
1726 if f in ma and n != ma[f]:
1726 if f in ma and n != ma[f]:
1727 r = "k"
1727 r = "k"
1728 if not force and (linear_path or allow):
1728 if not force and (linear_path or allow):
1729 r = self.ui.prompt(
1729 r = self.ui.prompt(
1730 ("remote changed %s which local deleted\n" % f) +
1730 ("remote changed %s which local deleted\n" % f) +
1731 "(k)eep or (d)elete?", "[kd]", "k")
1731 "(k)eep or (d)elete?", "[kd]", "k")
1732 if r == "k": get[f] = n
1732 if r == "k": get[f] = n
1733 elif f not in ma:
1733 elif f not in ma:
1734 self.ui.debug("remote created %s\n" % f)
1734 self.ui.debug("remote created %s\n" % f)
1735 get[f] = n
1735 get[f] = n
1736 else:
1736 else:
1737 if force or p2 == pa: # going backwards?
1737 if force or p2 == pa: # going backwards?
1738 self.ui.debug("local deleted %s, recreating\n" % f)
1738 self.ui.debug("local deleted %s, recreating\n" % f)
1739 get[f] = n
1739 get[f] = n
1740 else:
1740 else:
1741 self.ui.debug("local deleted %s\n" % f)
1741 self.ui.debug("local deleted %s\n" % f)
1742
1742
1743 del mw, m1, m2, ma
1743 del mw, m1, m2, ma
1744
1744
1745 if force:
1745 if force:
1746 for f in merge:
1746 for f in merge:
1747 get[f] = merge[f][1]
1747 get[f] = merge[f][1]
1748 merge = {}
1748 merge = {}
1749
1749
1750 if linear_path or force:
1750 if linear_path or force:
1751 # we don't need to do any magic, just jump to the new rev
1751 # we don't need to do any magic, just jump to the new rev
1752 mode = 'n'
1752 branch_merge = False
1753 p1, p2 = p2, nullid
1753 p1, p2 = p2, nullid
1754 else:
1754 else:
1755 if not allow:
1755 if not allow:
1756 self.ui.status("this update spans a branch" +
1756 self.ui.status("this update spans a branch" +
1757 " affecting the following files:\n")
1757 " affecting the following files:\n")
1758 fl = merge.keys() + get.keys()
1758 fl = merge.keys() + get.keys()
1759 fl.sort()
1759 fl.sort()
1760 for f in fl:
1760 for f in fl:
1761 cf = ""
1761 cf = ""
1762 if f in merge: cf = " (resolve)"
1762 if f in merge: cf = " (resolve)"
1763 self.ui.status(" %s%s\n" % (f, cf))
1763 self.ui.status(" %s%s\n" % (f, cf))
1764 self.ui.warn("aborting update spanning branches!\n")
1764 self.ui.warn("aborting update spanning branches!\n")
1765 self.ui.status("(use update -m to merge across branches" +
1765 self.ui.status("(use update -m to merge across branches" +
1766 " or -C to lose changes)\n")
1766 " or -C to lose changes)\n")
1767 return 1
1767 return 1
1768 mode = 'm'
1768 branch_merge = True
1769
1769
1770 if moddirstate:
1770 if moddirstate:
1771 self.dirstate.setparents(p1, p2)
1771 self.dirstate.setparents(p1, p2)
1772
1772
1773 # get the files we don't need to change
1773 # get the files we don't need to change
1774 files = get.keys()
1774 files = get.keys()
1775 files.sort()
1775 files.sort()
1776 for f in files:
1776 for f in files:
1777 if f[0] == "/": continue
1777 if f[0] == "/": continue
1778 self.ui.note("getting %s\n" % f)
1778 self.ui.note("getting %s\n" % f)
1779 t = self.file(f).read(get[f])
1779 t = self.file(f).read(get[f])
1780 try:
1780 try:
1781 self.wfile(f, "w").write(t)
1781 self.wfile(f, "w").write(t)
1782 except IOError:
1782 except IOError:
1783 os.makedirs(os.path.dirname(self.wjoin(f)))
1783 os.makedirs(os.path.dirname(self.wjoin(f)))
1784 self.wfile(f, "w").write(t)
1784 self.wfile(f, "w").write(t)
1785 util.set_exec(self.wjoin(f), mf2[f])
1785 util.set_exec(self.wjoin(f), mf2[f])
1786 if moddirstate:
1786 if moddirstate:
1787 if mode == 'm':
1787 if branch_merge:
1788 self.dirstate.update([f], 'n', st_mtime=0)
1788 self.dirstate.update([f], 'n', st_mtime=-1)
1789 else:
1789 else:
1790 self.dirstate.update([f], 'n')
1790 self.dirstate.update([f], 'n')
1791
1791
1792 # merge the tricky bits
1792 # merge the tricky bits
1793 files = merge.keys()
1793 files = merge.keys()
1794 files.sort()
1794 files.sort()
1795 for f in files:
1795 for f in files:
1796 self.ui.status("merging %s\n" % f)
1796 self.ui.status("merging %s\n" % f)
1797 m, o, flag = merge[f]
1797 my, other, flag = merge[f]
1798 self.merge3(f, m, o)
1798 self.merge3(f, my, other)
1799 util.set_exec(self.wjoin(f), flag)
1799 util.set_exec(self.wjoin(f), flag)
1800 if moddirstate:
1800 if moddirstate:
1801 if mode == 'm':
1801 if branch_merge:
1802 # only update dirstate on branch merge, otherwise we
1802 # We've done a branch merge, mark this file as merged
1803 # could mark files with changes as unchanged
1803 # so that we properly record the merger later
1804 self.dirstate.update([f], mode)
1804 self.dirstate.update([f], 'm')
1805 elif p2 == nullid:
1806 # update dirstate from parent1's manifest
1807 m1n = self.changelog.read(p1)[0]
1808 m1 = self.manifest.read(m1n)
1809 f_len = len(self.file(f).read(m1[f]))
1810 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1811 else:
1805 else:
1812 self.ui.warn("Second parent without branch merge!?\n"
1806 # We've update-merged a locally modified file, so
1813 "Dirstate for file %s may be wrong.\n" % f)
1807 # we set the dirstate to emulate a normal checkout
1808 # of that file some time in the past. Thus our
1809 # merge will appear as a normal local file
1810 # modification.
1811 f_len = len(self.file(f).read(other))
1812 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1814
1813
1815 remove.sort()
1814 remove.sort()
1816 for f in remove:
1815 for f in remove:
1817 self.ui.note("removing %s\n" % f)
1816 self.ui.note("removing %s\n" % f)
1818 try:
1817 try:
1819 os.unlink(self.wjoin(f))
1818 os.unlink(self.wjoin(f))
1820 except OSError, inst:
1819 except OSError, inst:
1821 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1820 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1822 # try removing directories that might now be empty
1821 # try removing directories that might now be empty
1823 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1822 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1824 except: pass
1823 except: pass
1825 if moddirstate:
1824 if moddirstate:
1826 if mode == 'n':
1825 if branch_merge:
1826 self.dirstate.update(remove, 'r')
1827 else:
1827 self.dirstate.forget(remove)
1828 self.dirstate.forget(remove)
1828 else:
1829 self.dirstate.update(remove, 'r')
1830
1829
1831 def merge3(self, fn, my, other):
1830 def merge3(self, fn, my, other):
1832 """perform a 3-way merge in the working directory"""
1831 """perform a 3-way merge in the working directory"""
1833
1832
1834 def temp(prefix, node):
1833 def temp(prefix, node):
1835 pre = "%s~%s." % (os.path.basename(fn), prefix)
1834 pre = "%s~%s." % (os.path.basename(fn), prefix)
1836 (fd, name) = tempfile.mkstemp("", pre)
1835 (fd, name) = tempfile.mkstemp("", pre)
1837 f = os.fdopen(fd, "wb")
1836 f = os.fdopen(fd, "wb")
1838 f.write(fl.revision(node))
1837 f.write(fl.revision(node))
1839 f.close()
1838 f.close()
1840 return name
1839 return name
1841
1840
1842 fl = self.file(fn)
1841 fl = self.file(fn)
1843 base = fl.ancestor(my, other)
1842 base = fl.ancestor(my, other)
1844 a = self.wjoin(fn)
1843 a = self.wjoin(fn)
1845 b = temp("base", base)
1844 b = temp("base", base)
1846 c = temp("other", other)
1845 c = temp("other", other)
1847
1846
1848 self.ui.note("resolving %s\n" % fn)
1847 self.ui.note("resolving %s\n" % fn)
1849 self.ui.debug("file %s: other %s ancestor %s\n" %
1848 self.ui.debug("file %s: other %s ancestor %s\n" %
1850 (fn, short(other), short(base)))
1849 (fn, short(other), short(base)))
1851
1850
1852 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1851 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1853 or "hgmerge")
1852 or "hgmerge")
1854 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1853 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1855 if r:
1854 if r:
1856 self.ui.warn("merging %s failed!\n" % fn)
1855 self.ui.warn("merging %s failed!\n" % fn)
1857
1856
1858 os.unlink(b)
1857 os.unlink(b)
1859 os.unlink(c)
1858 os.unlink(c)
1860
1859
1861 def verify(self):
1860 def verify(self):
1862 filelinkrevs = {}
1861 filelinkrevs = {}
1863 filenodes = {}
1862 filenodes = {}
1864 changesets = revisions = files = 0
1863 changesets = revisions = files = 0
1865 errors = 0
1864 errors = 0
1866
1865
1867 seen = {}
1866 seen = {}
1868 self.ui.status("checking changesets\n")
1867 self.ui.status("checking changesets\n")
1869 for i in range(self.changelog.count()):
1868 for i in range(self.changelog.count()):
1870 changesets += 1
1869 changesets += 1
1871 n = self.changelog.node(i)
1870 n = self.changelog.node(i)
1872 if n in seen:
1871 if n in seen:
1873 self.ui.warn("duplicate changeset at revision %d\n" % i)
1872 self.ui.warn("duplicate changeset at revision %d\n" % i)
1874 errors += 1
1873 errors += 1
1875 seen[n] = 1
1874 seen[n] = 1
1876
1875
1877 for p in self.changelog.parents(n):
1876 for p in self.changelog.parents(n):
1878 if p not in self.changelog.nodemap:
1877 if p not in self.changelog.nodemap:
1879 self.ui.warn("changeset %s has unknown parent %s\n" %
1878 self.ui.warn("changeset %s has unknown parent %s\n" %
1880 (short(n), short(p)))
1879 (short(n), short(p)))
1881 errors += 1
1880 errors += 1
1882 try:
1881 try:
1883 changes = self.changelog.read(n)
1882 changes = self.changelog.read(n)
1884 except Exception, inst:
1883 except Exception, inst:
1885 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1884 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1886 errors += 1
1885 errors += 1
1887
1886
1888 for f in changes[3]:
1887 for f in changes[3]:
1889 filelinkrevs.setdefault(f, []).append(i)
1888 filelinkrevs.setdefault(f, []).append(i)
1890
1889
1891 seen = {}
1890 seen = {}
1892 self.ui.status("checking manifests\n")
1891 self.ui.status("checking manifests\n")
1893 for i in range(self.manifest.count()):
1892 for i in range(self.manifest.count()):
1894 n = self.manifest.node(i)
1893 n = self.manifest.node(i)
1895 if n in seen:
1894 if n in seen:
1896 self.ui.warn("duplicate manifest at revision %d\n" % i)
1895 self.ui.warn("duplicate manifest at revision %d\n" % i)
1897 errors += 1
1896 errors += 1
1898 seen[n] = 1
1897 seen[n] = 1
1899
1898
1900 for p in self.manifest.parents(n):
1899 for p in self.manifest.parents(n):
1901 if p not in self.manifest.nodemap:
1900 if p not in self.manifest.nodemap:
1902 self.ui.warn("manifest %s has unknown parent %s\n" %
1901 self.ui.warn("manifest %s has unknown parent %s\n" %
1903 (short(n), short(p)))
1902 (short(n), short(p)))
1904 errors += 1
1903 errors += 1
1905
1904
1906 try:
1905 try:
1907 delta = mdiff.patchtext(self.manifest.delta(n))
1906 delta = mdiff.patchtext(self.manifest.delta(n))
1908 except KeyboardInterrupt:
1907 except KeyboardInterrupt:
1909 self.ui.warn("aborted")
1908 self.ui.warn("aborted")
1910 sys.exit(0)
1909 sys.exit(0)
1911 except Exception, inst:
1910 except Exception, inst:
1912 self.ui.warn("unpacking manifest %s: %s\n"
1911 self.ui.warn("unpacking manifest %s: %s\n"
1913 % (short(n), inst))
1912 % (short(n), inst))
1914 errors += 1
1913 errors += 1
1915
1914
1916 ff = [ l.split('\0') for l in delta.splitlines() ]
1915 ff = [ l.split('\0') for l in delta.splitlines() ]
1917 for f, fn in ff:
1916 for f, fn in ff:
1918 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1917 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1919
1918
1920 self.ui.status("crosschecking files in changesets and manifests\n")
1919 self.ui.status("crosschecking files in changesets and manifests\n")
1921 for f in filenodes:
1920 for f in filenodes:
1922 if f not in filelinkrevs:
1921 if f not in filelinkrevs:
1923 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1922 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1924 errors += 1
1923 errors += 1
1925
1924
1926 for f in filelinkrevs:
1925 for f in filelinkrevs:
1927 if f not in filenodes:
1926 if f not in filenodes:
1928 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1927 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1929 errors += 1
1928 errors += 1
1930
1929
1931 self.ui.status("checking files\n")
1930 self.ui.status("checking files\n")
1932 ff = filenodes.keys()
1931 ff = filenodes.keys()
1933 ff.sort()
1932 ff.sort()
1934 for f in ff:
1933 for f in ff:
1935 if f == "/dev/null": continue
1934 if f == "/dev/null": continue
1936 files += 1
1935 files += 1
1937 fl = self.file(f)
1936 fl = self.file(f)
1938 nodes = { nullid: 1 }
1937 nodes = { nullid: 1 }
1939 seen = {}
1938 seen = {}
1940 for i in range(fl.count()):
1939 for i in range(fl.count()):
1941 revisions += 1
1940 revisions += 1
1942 n = fl.node(i)
1941 n = fl.node(i)
1943
1942
1944 if n in seen:
1943 if n in seen:
1945 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1944 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1946 errors += 1
1945 errors += 1
1947
1946
1948 if n not in filenodes[f]:
1947 if n not in filenodes[f]:
1949 self.ui.warn("%s: %d:%s not in manifests\n"
1948 self.ui.warn("%s: %d:%s not in manifests\n"
1950 % (f, i, short(n)))
1949 % (f, i, short(n)))
1951 errors += 1
1950 errors += 1
1952 else:
1951 else:
1953 del filenodes[f][n]
1952 del filenodes[f][n]
1954
1953
1955 flr = fl.linkrev(n)
1954 flr = fl.linkrev(n)
1956 if flr not in filelinkrevs[f]:
1955 if flr not in filelinkrevs[f]:
1957 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1956 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1958 % (f, short(n), fl.linkrev(n)))
1957 % (f, short(n), fl.linkrev(n)))
1959 errors += 1
1958 errors += 1
1960 else:
1959 else:
1961 filelinkrevs[f].remove(flr)
1960 filelinkrevs[f].remove(flr)
1962
1961
1963 # verify contents
1962 # verify contents
1964 try:
1963 try:
1965 t = fl.read(n)
1964 t = fl.read(n)
1966 except Exception, inst:
1965 except Exception, inst:
1967 self.ui.warn("unpacking file %s %s: %s\n"
1966 self.ui.warn("unpacking file %s %s: %s\n"
1968 % (f, short(n), inst))
1967 % (f, short(n), inst))
1969 errors += 1
1968 errors += 1
1970
1969
1971 # verify parents
1970 # verify parents
1972 (p1, p2) = fl.parents(n)
1971 (p1, p2) = fl.parents(n)
1973 if p1 not in nodes:
1972 if p1 not in nodes:
1974 self.ui.warn("file %s:%s unknown parent 1 %s" %
1973 self.ui.warn("file %s:%s unknown parent 1 %s" %
1975 (f, short(n), short(p1)))
1974 (f, short(n), short(p1)))
1976 errors += 1
1975 errors += 1
1977 if p2 not in nodes:
1976 if p2 not in nodes:
1978 self.ui.warn("file %s:%s unknown parent 2 %s" %
1977 self.ui.warn("file %s:%s unknown parent 2 %s" %
1979 (f, short(n), short(p1)))
1978 (f, short(n), short(p1)))
1980 errors += 1
1979 errors += 1
1981 nodes[n] = 1
1980 nodes[n] = 1
1982
1981
1983 # cross-check
1982 # cross-check
1984 for node in filenodes[f]:
1983 for node in filenodes[f]:
1985 self.ui.warn("node %s in manifests not in %s\n"
1984 self.ui.warn("node %s in manifests not in %s\n"
1986 % (hex(node), f))
1985 % (hex(node), f))
1987 errors += 1
1986 errors += 1
1988
1987
1989 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1988 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1990 (files, changesets, revisions))
1989 (files, changesets, revisions))
1991
1990
1992 if errors:
1991 if errors:
1993 self.ui.warn("%d integrity errors encountered!\n" % errors)
1992 self.ui.warn("%d integrity errors encountered!\n" % errors)
1994 return 1
1993 return 1
1995
1994
1996 class remoterepository:
1995 class remoterepository:
1997 def local(self):
1996 def local(self):
1998 return False
1997 return False
1999
1998
2000 class httprepository(remoterepository):
1999 class httprepository(remoterepository):
2001 def __init__(self, ui, path):
2000 def __init__(self, ui, path):
2002 # fix missing / after hostname
2001 # fix missing / after hostname
2003 s = urlparse.urlsplit(path)
2002 s = urlparse.urlsplit(path)
2004 partial = s[2]
2003 partial = s[2]
2005 if not partial: partial = "/"
2004 if not partial: partial = "/"
2006 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2005 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2007 self.ui = ui
2006 self.ui = ui
2008 no_list = [ "localhost", "127.0.0.1" ]
2007 no_list = [ "localhost", "127.0.0.1" ]
2009 host = ui.config("http_proxy", "host")
2008 host = ui.config("http_proxy", "host")
2010 if host is None:
2009 if host is None:
2011 host = os.environ.get("http_proxy")
2010 host = os.environ.get("http_proxy")
2012 if host and host.startswith('http://'):
2011 if host and host.startswith('http://'):
2013 host = host[7:]
2012 host = host[7:]
2014 user = ui.config("http_proxy", "user")
2013 user = ui.config("http_proxy", "user")
2015 passwd = ui.config("http_proxy", "passwd")
2014 passwd = ui.config("http_proxy", "passwd")
2016 no = ui.config("http_proxy", "no")
2015 no = ui.config("http_proxy", "no")
2017 if no is None:
2016 if no is None:
2018 no = os.environ.get("no_proxy")
2017 no = os.environ.get("no_proxy")
2019 if no:
2018 if no:
2020 no_list = no_list + no.split(",")
2019 no_list = no_list + no.split(",")
2021
2020
2022 no_proxy = 0
2021 no_proxy = 0
2023 for h in no_list:
2022 for h in no_list:
2024 if (path.startswith("http://" + h + "/") or
2023 if (path.startswith("http://" + h + "/") or
2025 path.startswith("http://" + h + ":") or
2024 path.startswith("http://" + h + ":") or
2026 path == "http://" + h):
2025 path == "http://" + h):
2027 no_proxy = 1
2026 no_proxy = 1
2028
2027
2029 # Note: urllib2 takes proxy values from the environment and those will
2028 # Note: urllib2 takes proxy values from the environment and those will
2030 # take precedence
2029 # take precedence
2031 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2030 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2032 try:
2031 try:
2033 if os.environ.has_key(env):
2032 if os.environ.has_key(env):
2034 del os.environ[env]
2033 del os.environ[env]
2035 except OSError:
2034 except OSError:
2036 pass
2035 pass
2037
2036
2038 proxy_handler = urllib2.BaseHandler()
2037 proxy_handler = urllib2.BaseHandler()
2039 if host and not no_proxy:
2038 if host and not no_proxy:
2040 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2039 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2041
2040
2042 authinfo = None
2041 authinfo = None
2043 if user and passwd:
2042 if user and passwd:
2044 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2043 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2045 passmgr.add_password(None, host, user, passwd)
2044 passmgr.add_password(None, host, user, passwd)
2046 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2045 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2047
2046
2048 opener = urllib2.build_opener(proxy_handler, authinfo)
2047 opener = urllib2.build_opener(proxy_handler, authinfo)
2049 urllib2.install_opener(opener)
2048 urllib2.install_opener(opener)
2050
2049
2051 def dev(self):
2050 def dev(self):
2052 return -1
2051 return -1
2053
2052
2054 def do_cmd(self, cmd, **args):
2053 def do_cmd(self, cmd, **args):
2055 self.ui.debug("sending %s command\n" % cmd)
2054 self.ui.debug("sending %s command\n" % cmd)
2056 q = {"cmd": cmd}
2055 q = {"cmd": cmd}
2057 q.update(args)
2056 q.update(args)
2058 qs = urllib.urlencode(q)
2057 qs = urllib.urlencode(q)
2059 cu = "%s?%s" % (self.url, qs)
2058 cu = "%s?%s" % (self.url, qs)
2060 resp = urllib2.urlopen(cu)
2059 resp = urllib2.urlopen(cu)
2061 proto = resp.headers['content-type']
2060 proto = resp.headers['content-type']
2062
2061
2063 # accept old "text/plain" and "application/hg-changegroup" for now
2062 # accept old "text/plain" and "application/hg-changegroup" for now
2064 if not proto.startswith('application/mercurial') and \
2063 if not proto.startswith('application/mercurial') and \
2065 not proto.startswith('text/plain') and \
2064 not proto.startswith('text/plain') and \
2066 not proto.startswith('application/hg-changegroup'):
2065 not proto.startswith('application/hg-changegroup'):
2067 raise RepoError("'%s' does not appear to be an hg repository"
2066 raise RepoError("'%s' does not appear to be an hg repository"
2068 % self.url)
2067 % self.url)
2069
2068
2070 if proto.startswith('application/mercurial'):
2069 if proto.startswith('application/mercurial'):
2071 version = proto[22:]
2070 version = proto[22:]
2072 if float(version) > 0.1:
2071 if float(version) > 0.1:
2073 raise RepoError("'%s' uses newer protocol %s" %
2072 raise RepoError("'%s' uses newer protocol %s" %
2074 (self.url, version))
2073 (self.url, version))
2075
2074
2076 return resp
2075 return resp
2077
2076
2078 def heads(self):
2077 def heads(self):
2079 d = self.do_cmd("heads").read()
2078 d = self.do_cmd("heads").read()
2080 try:
2079 try:
2081 return map(bin, d[:-1].split(" "))
2080 return map(bin, d[:-1].split(" "))
2082 except:
2081 except:
2083 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2082 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2084 raise
2083 raise
2085
2084
2086 def branches(self, nodes):
2085 def branches(self, nodes):
2087 n = " ".join(map(hex, nodes))
2086 n = " ".join(map(hex, nodes))
2088 d = self.do_cmd("branches", nodes=n).read()
2087 d = self.do_cmd("branches", nodes=n).read()
2089 try:
2088 try:
2090 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2089 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2091 return br
2090 return br
2092 except:
2091 except:
2093 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2092 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2094 raise
2093 raise
2095
2094
2096 def between(self, pairs):
2095 def between(self, pairs):
2097 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2096 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2098 d = self.do_cmd("between", pairs=n).read()
2097 d = self.do_cmd("between", pairs=n).read()
2099 try:
2098 try:
2100 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2099 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2101 return p
2100 return p
2102 except:
2101 except:
2103 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2102 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2104 raise
2103 raise
2105
2104
2106 def changegroup(self, nodes):
2105 def changegroup(self, nodes):
2107 n = " ".join(map(hex, nodes))
2106 n = " ".join(map(hex, nodes))
2108 f = self.do_cmd("changegroup", roots=n)
2107 f = self.do_cmd("changegroup", roots=n)
2109 bytes = 0
2108 bytes = 0
2110
2109
2111 class zread:
2110 class zread:
2112 def __init__(self, f):
2111 def __init__(self, f):
2113 self.zd = zlib.decompressobj()
2112 self.zd = zlib.decompressobj()
2114 self.f = f
2113 self.f = f
2115 self.buf = ""
2114 self.buf = ""
2116 def read(self, l):
2115 def read(self, l):
2117 while l > len(self.buf):
2116 while l > len(self.buf):
2118 r = self.f.read(4096)
2117 r = self.f.read(4096)
2119 if r:
2118 if r:
2120 self.buf += self.zd.decompress(r)
2119 self.buf += self.zd.decompress(r)
2121 else:
2120 else:
2122 self.buf += self.zd.flush()
2121 self.buf += self.zd.flush()
2123 break
2122 break
2124 d, self.buf = self.buf[:l], self.buf[l:]
2123 d, self.buf = self.buf[:l], self.buf[l:]
2125 return d
2124 return d
2126
2125
2127 return zread(f)
2126 return zread(f)
2128
2127
2129 class remotelock:
2128 class remotelock:
2130 def __init__(self, repo):
2129 def __init__(self, repo):
2131 self.repo = repo
2130 self.repo = repo
2132 def release(self):
2131 def release(self):
2133 self.repo.unlock()
2132 self.repo.unlock()
2134 self.repo = None
2133 self.repo = None
2135 def __del__(self):
2134 def __del__(self):
2136 if self.repo:
2135 if self.repo:
2137 self.release()
2136 self.release()
2138
2137
2139 class sshrepository(remoterepository):
2138 class sshrepository(remoterepository):
2140 def __init__(self, ui, path):
2139 def __init__(self, ui, path):
2141 self.url = path
2140 self.url = path
2142 self.ui = ui
2141 self.ui = ui
2143
2142
2144 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2143 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2145 if not m:
2144 if not m:
2146 raise RepoError("couldn't parse destination %s" % path)
2145 raise RepoError("couldn't parse destination %s" % path)
2147
2146
2148 self.user = m.group(2)
2147 self.user = m.group(2)
2149 self.host = m.group(3)
2148 self.host = m.group(3)
2150 self.port = m.group(5)
2149 self.port = m.group(5)
2151 self.path = m.group(7)
2150 self.path = m.group(7)
2152
2151
2153 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2152 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2154 args = self.port and ("%s -p %s") % (args, self.port) or args
2153 args = self.port and ("%s -p %s") % (args, self.port) or args
2155 path = self.path or ""
2154 path = self.path or ""
2156
2155
2157 if not path:
2156 if not path:
2158 raise RepoError("no remote repository path specified")
2157 raise RepoError("no remote repository path specified")
2159
2158
2160 sshcmd = self.ui.config("ui", "ssh", "ssh")
2159 sshcmd = self.ui.config("ui", "ssh", "ssh")
2161 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2160 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2162 cmd = "%s %s '%s -R %s serve --stdio'"
2161 cmd = "%s %s '%s -R %s serve --stdio'"
2163 cmd = cmd % (sshcmd, args, remotecmd, path)
2162 cmd = cmd % (sshcmd, args, remotecmd, path)
2164
2163
2165 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2164 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2166
2165
2167 def readerr(self):
2166 def readerr(self):
2168 while 1:
2167 while 1:
2169 r,w,x = select.select([self.pipee], [], [], 0)
2168 r,w,x = select.select([self.pipee], [], [], 0)
2170 if not r: break
2169 if not r: break
2171 l = self.pipee.readline()
2170 l = self.pipee.readline()
2172 if not l: break
2171 if not l: break
2173 self.ui.status("remote: ", l)
2172 self.ui.status("remote: ", l)
2174
2173
2175 def __del__(self):
2174 def __del__(self):
2176 try:
2175 try:
2177 self.pipeo.close()
2176 self.pipeo.close()
2178 self.pipei.close()
2177 self.pipei.close()
2179 for l in self.pipee:
2178 for l in self.pipee:
2180 self.ui.status("remote: ", l)
2179 self.ui.status("remote: ", l)
2181 self.pipee.close()
2180 self.pipee.close()
2182 except:
2181 except:
2183 pass
2182 pass
2184
2183
2185 def dev(self):
2184 def dev(self):
2186 return -1
2185 return -1
2187
2186
2188 def do_cmd(self, cmd, **args):
2187 def do_cmd(self, cmd, **args):
2189 self.ui.debug("sending %s command\n" % cmd)
2188 self.ui.debug("sending %s command\n" % cmd)
2190 self.pipeo.write("%s\n" % cmd)
2189 self.pipeo.write("%s\n" % cmd)
2191 for k, v in args.items():
2190 for k, v in args.items():
2192 self.pipeo.write("%s %d\n" % (k, len(v)))
2191 self.pipeo.write("%s %d\n" % (k, len(v)))
2193 self.pipeo.write(v)
2192 self.pipeo.write(v)
2194 self.pipeo.flush()
2193 self.pipeo.flush()
2195
2194
2196 return self.pipei
2195 return self.pipei
2197
2196
2198 def call(self, cmd, **args):
2197 def call(self, cmd, **args):
2199 r = self.do_cmd(cmd, **args)
2198 r = self.do_cmd(cmd, **args)
2200 l = r.readline()
2199 l = r.readline()
2201 self.readerr()
2200 self.readerr()
2202 try:
2201 try:
2203 l = int(l)
2202 l = int(l)
2204 except:
2203 except:
2205 raise RepoError("unexpected response '%s'" % l)
2204 raise RepoError("unexpected response '%s'" % l)
2206 return r.read(l)
2205 return r.read(l)
2207
2206
2208 def lock(self):
2207 def lock(self):
2209 self.call("lock")
2208 self.call("lock")
2210 return remotelock(self)
2209 return remotelock(self)
2211
2210
2212 def unlock(self):
2211 def unlock(self):
2213 self.call("unlock")
2212 self.call("unlock")
2214
2213
2215 def heads(self):
2214 def heads(self):
2216 d = self.call("heads")
2215 d = self.call("heads")
2217 try:
2216 try:
2218 return map(bin, d[:-1].split(" "))
2217 return map(bin, d[:-1].split(" "))
2219 except:
2218 except:
2220 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2219 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2221
2220
2222 def branches(self, nodes):
2221 def branches(self, nodes):
2223 n = " ".join(map(hex, nodes))
2222 n = " ".join(map(hex, nodes))
2224 d = self.call("branches", nodes=n)
2223 d = self.call("branches", nodes=n)
2225 try:
2224 try:
2226 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2225 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2227 return br
2226 return br
2228 except:
2227 except:
2229 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2228 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2230
2229
2231 def between(self, pairs):
2230 def between(self, pairs):
2232 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2231 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2233 d = self.call("between", pairs=n)
2232 d = self.call("between", pairs=n)
2234 try:
2233 try:
2235 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2234 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2236 return p
2235 return p
2237 except:
2236 except:
2238 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2237 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2239
2238
2240 def changegroup(self, nodes):
2239 def changegroup(self, nodes):
2241 n = " ".join(map(hex, nodes))
2240 n = " ".join(map(hex, nodes))
2242 f = self.do_cmd("changegroup", roots=n)
2241 f = self.do_cmd("changegroup", roots=n)
2243 return self.pipei
2242 return self.pipei
2244
2243
2245 def addchangegroup(self, cg):
2244 def addchangegroup(self, cg):
2246 d = self.call("addchangegroup")
2245 d = self.call("addchangegroup")
2247 if d:
2246 if d:
2248 raise RepoError("push refused: %s", d)
2247 raise RepoError("push refused: %s", d)
2249
2248
2250 while 1:
2249 while 1:
2251 d = cg.read(4096)
2250 d = cg.read(4096)
2252 if not d: break
2251 if not d: break
2253 self.pipeo.write(d)
2252 self.pipeo.write(d)
2254 self.readerr()
2253 self.readerr()
2255
2254
2256 self.pipeo.flush()
2255 self.pipeo.flush()
2257
2256
2258 self.readerr()
2257 self.readerr()
2259 l = int(self.pipei.readline())
2258 l = int(self.pipei.readline())
2260 return self.pipei.read(l) != ""
2259 return self.pipei.read(l) != ""
2261
2260
2262 class httpsrepository(httprepository):
2261 class httpsrepository(httprepository):
2263 pass
2262 pass
2264
2263
2265 def repository(ui, path=None, create=0):
2264 def repository(ui, path=None, create=0):
2266 if path:
2265 if path:
2267 if path.startswith("http://"):
2266 if path.startswith("http://"):
2268 return httprepository(ui, path)
2267 return httprepository(ui, path)
2269 if path.startswith("https://"):
2268 if path.startswith("https://"):
2270 return httpsrepository(ui, path)
2269 return httpsrepository(ui, path)
2271 if path.startswith("hg://"):
2270 if path.startswith("hg://"):
2272 return httprepository(ui, path.replace("hg://", "http://"))
2271 return httprepository(ui, path.replace("hg://", "http://"))
2273 if path.startswith("old-http://"):
2272 if path.startswith("old-http://"):
2274 return localrepository(ui, path.replace("old-http://", "http://"))
2273 return localrepository(ui, path.replace("old-http://", "http://"))
2275 if path.startswith("ssh://"):
2274 if path.startswith("ssh://"):
2276 return sshrepository(ui, path)
2275 return sshrepository(ui, path)
2277
2276
2278 return localrepository(ui, path, create)
2277 return localrepository(ui, path, create)
@@ -1,73 +1,79 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # This test makes sure that we don't mark a file as merged with its ancestor
3 # This test makes sure that we don't mark a file as merged with its ancestor
4 # when we do a merge.
4 # when we do a merge.
5
5
6 cat <<'EOF' > merge
6 cat <<'EOF' > merge
7 #!/bin/sh
7 #!/bin/sh
8 echo merging for `basename $1`
8 echo merging for `basename $1`
9 EOF
9 EOF
10 chmod +x merge
10 chmod +x merge
11
11
12 echo creating base
12 echo creating base
13 hg init a
13 hg init a
14 cd a
14 cd a
15 echo 1 > foo
15 echo 1 > foo
16 echo 1 > bar
16 echo 1 > bar
17 echo 1 > baz
17 echo 1 > baz
18 echo 1 > quux
18 echo 1 > quux
19 hg add foo bar baz quux
19 hg add foo bar baz quux
20 hg commit -m "base" -d "0 0"
20 hg commit -m "base" -d "0 0"
21
21
22 cd ..
22 cd ..
23 hg clone a b
23 hg clone a b
24
24
25 echo creating branch a
25 echo creating branch a
26 cd a
26 cd a
27 echo 2a > foo
27 echo 2a > foo
28 echo 2a > bar
28 echo 2a > bar
29 hg commit -m "branch a" -d "0 0"
29 hg commit -m "branch a" -d "0 0"
30
30
31 echo creating branch b
31 echo creating branch b
32
32
33 cd ..
33 cd ..
34 cd b
34 cd b
35 echo 2b > foo
35 echo 2b > foo
36 echo 2b > baz
36 echo 2b > baz
37 hg commit -m "branch b" -d "0 0"
37 hg commit -m "branch b" -d "0 0"
38
38
39 echo "we shouldn't have anything but n state here"
39 echo "we shouldn't have anything but n state here"
40 hg debugstate | cut -b 1-16,35-
40 hg debugstate | cut -b 1-16,35-
41
41
42 echo merging
42 echo merging
43 hg pull ../a
43 hg pull ../a
44 env HGMERGE=../merge hg update -vm --debug
44 env HGMERGE=../merge hg update -vm
45
45
46 echo 2m > foo
46 echo 2m > foo
47 echo 2b > baz
47 echo 2b > baz
48 echo new > quux
48 echo new > quux
49
49
50 echo "we shouldn't have anything but foo in merge state here"
50 echo "we shouldn't have anything but foo in merge state here"
51 hg debugstate | cut -b 1-16,35- | grep "^m"
51 hg debugstate | cut -b 1-16,35- | grep "^m"
52
52
53 hg ci -m "merge" -d "0 0"
53 hg ci -m "merge" -d "0 0"
54
54
55 echo "main: we should have a merge here"
55 echo "main: we should have a merge here"
56 hg debugindex .hg/00changelog.i
56 hg debugindex .hg/00changelog.i
57
57
58 echo "log should show foo and quux changed"
59 hg log -v -r tip
60
58 echo "foo: we should have a merge here"
61 echo "foo: we should have a merge here"
59 hg debugindex .hg/data/foo.i
62 hg debugindex .hg/data/foo.i
60
63
61 echo "bar: we shouldn't have a merge here"
64 echo "bar: we shouldn't have a merge here"
62 hg debugindex .hg/data/bar.i
65 hg debugindex .hg/data/bar.i
63
66
64 echo "baz: we shouldn't have a merge here"
67 echo "baz: we shouldn't have a merge here"
65 hg debugindex .hg/data/baz.i
68 hg debugindex .hg/data/baz.i
66
69
67 echo "quux: we shouldn't have a merge here"
70 echo "quux: we shouldn't have a merge here"
68 hg debugindex .hg/data/quux.i
71 hg debugindex .hg/data/quux.i
69
72
73 echo "manifest entries should match tips of all files"
74 hg manifest
75
70 echo "everything should be clean now"
76 echo "everything should be clean now"
71 hg status
77 hg status
72
78
73 hg verify
79 hg verify
@@ -1,58 +1,70 b''
1 creating base
1 creating base
2 creating branch a
2 creating branch a
3 creating branch b
3 creating branch b
4 we shouldn't have anything but n state here
4 we shouldn't have anything but n state here
5 n 644 2 bar
5 n 644 2 bar
6 n 644 3 baz
6 n 644 3 baz
7 n 644 3 foo
7 n 644 3 foo
8 n 644 2 quux
8 n 644 2 quux
9 merging
9 merging
10 pulling from ../a
10 pulling from ../a
11 searching for changes
11 searching for changes
12 adding changesets
12 adding changesets
13 adding manifests
13 adding manifests
14 adding file changes
14 adding file changes
15 added 1 changesets with 2 changes to 2 files
15 added 1 changesets with 2 changes to 2 files
16 (run 'hg update' to get a working copy)
16 (run 'hg update' to get a working copy)
17 merging for foo
17 merging for foo
18 resolving manifests
18 resolving manifests
19 force None allow 1 moddirstate True linear False
20 ancestor a0486579db29 local ef1b4dbe2193 remote 336d8406d617
21 remote bar is newer, get
22 foo versions differ, resolve
23 getting bar
19 getting bar
24 merging foo
20 merging foo
25 resolving foo
21 resolving foo
26 file foo: other 33d1fb69067a ancestor b8e02f643373
27 we shouldn't have anything but foo in merge state here
22 we shouldn't have anything but foo in merge state here
28 m 644 3 foo
23 m 644 3 foo
29 main: we should have a merge here
24 main: we should have a merge here
30 rev offset length base linkrev nodeid p1 p2
25 rev offset length base linkrev nodeid p1 p2
31 0 0 73 0 0 cdca01651b96 000000000000 000000000000
26 0 0 73 0 0 cdca01651b96 000000000000 000000000000
32 1 73 68 1 1 f6718a9cb7f3 cdca01651b96 000000000000
27 1 73 68 1 1 f6718a9cb7f3 cdca01651b96 000000000000
33 2 141 68 2 2 bdd988058d16 cdca01651b96 000000000000
28 2 141 68 2 2 bdd988058d16 cdca01651b96 000000000000
34 3 209 66 3 3 9da9fbd62226 f6718a9cb7f3 bdd988058d16
29 3 209 66 3 3 d8a521142a3c f6718a9cb7f3 bdd988058d16
30 log should show foo and quux changed
31 changeset: 3:d8a521142a3c02186ee6c7254738a7e6427ed4c8
32 tag: tip
33 parent: 1:f6718a9cb7f31f1a92d27bd6544c71617d6d4e4f
34 parent: 2:bdd988058d16e2d7392958eace7b64817e44a54e
35 user: test
36 date: Thu Jan 1 00:00:00 1970 +0000
37 files: foo quux
38 description:
39 merge
40
41
35 foo: we should have a merge here
42 foo: we should have a merge here
36 rev offset length base linkrev nodeid p1 p2
43 rev offset length base linkrev nodeid p1 p2
37 0 0 3 0 0 b8e02f643373 000000000000 000000000000
44 0 0 3 0 0 b8e02f643373 000000000000 000000000000
38 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
45 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
39 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
46 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
40 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
47 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
41 bar: we shouldn't have a merge here
48 bar: we shouldn't have a merge here
42 rev offset length base linkrev nodeid p1 p2
49 rev offset length base linkrev nodeid p1 p2
43 0 0 3 0 0 b8e02f643373 000000000000 000000000000
50 0 0 3 0 0 b8e02f643373 000000000000 000000000000
44 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
51 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
45 baz: we shouldn't have a merge here
52 baz: we shouldn't have a merge here
46 rev offset length base linkrev nodeid p1 p2
53 rev offset length base linkrev nodeid p1 p2
47 0 0 3 0 0 b8e02f643373 000000000000 000000000000
54 0 0 3 0 0 b8e02f643373 000000000000 000000000000
48 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
55 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
49 quux: we shouldn't have a merge here
56 quux: we shouldn't have a merge here
50 rev offset length base linkrev nodeid p1 p2
57 rev offset length base linkrev nodeid p1 p2
51 0 0 3 0 0 b8e02f643373 000000000000 000000000000
58 0 0 3 0 0 b8e02f643373 000000000000 000000000000
52 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
59 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
60 manifest entries should match tips of all files
61 33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
62 2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
63 aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
64 6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
53 everything should be clean now
65 everything should be clean now
54 checking changesets
66 checking changesets
55 checking manifests
67 checking manifests
56 crosschecking files in changesets and manifests
68 crosschecking files in changesets and manifests
57 checking files
69 checking files
58 4 files, 4 changesets, 10 total revisions
70 4 files, 4 changesets, 10 total revisions
@@ -1,18 +1,18 b''
1 pulling from ../B1
1 pulling from ../B1
2 searching for changes
2 searching for changes
3 adding changesets
3 adding changesets
4 adding manifests
4 adding manifests
5 adding file changes
5 adding file changes
6 added 1 changesets with 1 changes to 1 files
6 added 1 changesets with 1 changes to 1 files
7 (run 'hg update' to get a working copy)
7 (run 'hg update' to get a working copy)
8 bar should remain deleted.
8 bar should remain deleted.
9 f405ac83a5611071d6b54dd5eb26943b1fdc4460 644 foo
9 f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
10 pulling from ../A2
10 pulling from ../A2
11 searching for changes
11 searching for changes
12 adding changesets
12 adding changesets
13 adding manifests
13 adding manifests
14 adding file changes
14 adding file changes
15 added 1 changesets with 0 changes to 0 files
15 added 1 changesets with 0 changes to 0 files
16 (run 'hg update' to get a working copy)
16 (run 'hg update' to get a working copy)
17 bar should remain deleted.
17 bar should remain deleted.
18 f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
18 f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
@@ -1,53 +1,53 b''
1 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
1 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
2 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
2 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
3 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
3 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
4 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
4 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
5 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
5 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
6 changeset: 2:9f827976dae4
6 changeset: 2:9f827976dae4
7 tag: tip
7 tag: tip
8 user: test
8 user: test
9 date: Thu Jan 1 00:00:00 1970 +0000
9 date: Thu Jan 1 00:00:00 1970 +0000
10 summary: 2
10 summary: 2
11
11
12 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
12 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
13 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
13 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
14 changeset: 3:c8225a106186
14 changeset: 3:142428fbbcc5
15 tag: tip
15 tag: tip
16 user: test
16 user: test
17 date: Thu Jan 1 00:00:00 1970 +0000
17 date: Thu Jan 1 00:00:00 1970 +0000
18 summary: 3
18 summary: 3
19
19
20 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
20 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
21 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
21 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
22 changeset: 4:8dfeee82a94b
22 changeset: 4:4d450f9aa680
23 tag: tip
23 tag: tip
24 user: test
24 user: test
25 date: Thu Jan 1 00:00:00 1970 +0000
25 date: Thu Jan 1 00:00:00 1970 +0000
26 summary: 4
26 summary: 4
27
27
28 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
28 05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
29 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
29 54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
30 3570202ceac2b52517df64ebd0a062cb0d8fe33a 644 c
30 3570202ceac2b52517df64ebd0a062cb0d8fe33a 644 c
31 changeset: 4:8dfeee82a94b
31 changeset: 4:4d450f9aa680
32 user: test
32 user: test
33 date: Thu Jan 1 00:00:00 1970 +0000
33 date: Thu Jan 1 00:00:00 1970 +0000
34 summary: 4
34 summary: 4
35
35
36 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
36 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
37 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
37 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
38 changeset: 6:c0e932ecae5e
38 changeset: 6:b4b8b9afa8cc
39 tag: tip
39 tag: tip
40 parent: 4:8dfeee82a94b
40 parent: 4:4d450f9aa680
41 parent: 5:a7925a42d0df
41 parent: 5:a7925a42d0df
42 user: test
42 user: test
43 date: Thu Jan 1 00:00:00 1970 +0000
43 date: Thu Jan 1 00:00:00 1970 +0000
44 summary: 6
44 summary: 6
45
45
46 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
46 d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
47 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
47 76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
48 changeset: 7:3a157da4365d
48 changeset: 7:f84d0b1b024e
49 tag: tip
49 tag: tip
50 user: test
50 user: test
51 date: Thu Jan 1 00:00:00 1970 +0000
51 date: Thu Jan 1 00:00:00 1970 +0000
52 summary: 7
52 summary: 7
53
53
@@ -1,13 +1,14 b''
1 unknown
1 unknown
2 acb14030fe0a tip
2 acb14030fe0a tip
3 acb14030fe0a21b60322c440ad2d20cf7685a376 first
3 acb14030fe0a21b60322c440ad2d20cf7685a376 first
4 tip 1:b9154636be938d3d431e75a7c906504a079bfe07
4 tip 1:b9154636be938d3d431e75a7c906504a079bfe07
5 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
5 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
6 b9154636be93 tip
6 b9154636be93 tip
7 M a
7 M a
8 b9154636be93+ tip
8 b9154636be93+ tip
9 acb14030fe0a+ first
9 acb14030fe0a+ first
10 acb14030fe0a21b60322c440ad2d20cf7685a376+ first
10 acb14030fe0a21b60322c440ad2d20cf7685a376+ first
11 M a
11 M a
12 c8edf04160c7 tip
12 c8edf04160c7 tip
13 c8edf04160c7+b9154636be93 tip
13 c8edf04160c7+b9154636be93+ tip
14 M .hgtags
General Comments 0
You need to be logged in to leave comments. Login now