##// END OF EJS Templates
Fix long-standing excessive file merges...
mpm@selenic.com -
r990:5007e0bd default
parent child Browse files
Show More
@@ -0,0 +1,73 b''
1 #!/bin/sh
2
3 # This test makes sure that we don't mark a file as merged with its ancestor
4 # when we do a merge.
5
6 cat <<'EOF' > merge
7 #!/bin/sh
8 echo merging for `basename $1`
9 EOF
10 chmod +x merge
11
12 echo creating base
13 hg init a
14 cd a
15 echo 1 > foo
16 echo 1 > bar
17 echo 1 > baz
18 echo 1 > quux
19 hg add foo bar baz quux
20 hg commit -m "base" -d "0 0"
21
22 cd ..
23 hg clone a b
24
25 echo creating branch a
26 cd a
27 echo 2a > foo
28 echo 2a > bar
29 hg commit -m "branch a" -d "0 0"
30
31 echo creating branch b
32
33 cd ..
34 cd b
35 echo 2b > foo
36 echo 2b > baz
37 hg commit -m "branch b" -d "0 0"
38
39 echo "we shouldn't have anything but n state here"
40 hg debugstate | cut -b 1-16,35-
41
42 echo merging
43 hg pull ../a
44 env HGMERGE=../merge hg update -vm --debug
45
46 echo 2m > foo
47 echo 2b > baz
48 echo new > quux
49
50 echo "we shouldn't have anything but foo in merge state here"
51 hg debugstate | cut -b 1-16,35- | grep "^m"
52
53 hg ci -m "merge" -d "0 0"
54
55 echo "main: we should have a merge here"
56 hg debugindex .hg/00changelog.i
57
58 echo "foo: we should have a merge here"
59 hg debugindex .hg/data/foo.i
60
61 echo "bar: we shouldn't have a merge here"
62 hg debugindex .hg/data/bar.i
63
64 echo "baz: we shouldn't have a merge here"
65 hg debugindex .hg/data/baz.i
66
67 echo "quux: we shouldn't have a merge here"
68 hg debugindex .hg/data/quux.i
69
70 echo "everything should be clean now"
71 hg status
72
73 hg verify
@@ -0,0 +1,58 b''
1 creating base
2 creating branch a
3 creating branch b
4 we shouldn't have anything but n state here
5 n 644 2 bar
6 n 644 3 baz
7 n 644 3 foo
8 n 644 2 quux
9 merging
10 pulling from ../a
11 searching for changes
12 adding changesets
13 adding manifests
14 adding file changes
15 added 1 changesets with 2 changes to 2 files
16 (run 'hg update' to get a working copy)
17 merging for foo
18 resolving manifests
19 force None allow 1 moddirstate True linear False
20 ancestor a0486579db29 local ef1b4dbe2193 remote 336d8406d617
21 remote bar is newer, get
22 foo versions differ, resolve
23 getting bar
24 merging foo
25 resolving foo
26 file foo: other 33d1fb69067a ancestor b8e02f643373
27 we shouldn't have anything but foo in merge state here
28 m 644 3 foo
29 main: we should have a merge here
30 rev offset length base linkrev nodeid p1 p2
31 0 0 73 0 0 cdca01651b96 000000000000 000000000000
32 1 73 68 1 1 f6718a9cb7f3 cdca01651b96 000000000000
33 2 141 68 2 2 bdd988058d16 cdca01651b96 000000000000
34 3 209 66 3 3 9da9fbd62226 f6718a9cb7f3 bdd988058d16
35 foo: we should have a merge here
36 rev offset length base linkrev nodeid p1 p2
37 0 0 3 0 0 b8e02f643373 000000000000 000000000000
38 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
39 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
40 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
41 bar: we shouldn't have a merge here
42 rev offset length base linkrev nodeid p1 p2
43 0 0 3 0 0 b8e02f643373 000000000000 000000000000
44 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
45 baz: we shouldn't have a merge here
46 rev offset length base linkrev nodeid p1 p2
47 0 0 3 0 0 b8e02f643373 000000000000 000000000000
48 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
49 quux: we shouldn't have a merge here
50 rev offset length base linkrev nodeid p1 p2
51 0 0 3 0 0 b8e02f643373 000000000000 000000000000
52 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
53 everything should be clean now
54 checking changesets
55 checking manifests
56 crosschecking files in changesets and manifests
57 checking files
58 4 files, 4 changesets, 10 total revisions
@@ -1,2230 +1,2268 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 if not date:
283 if not date:
284 if time.daylight: offset = time.altzone
284 if time.daylight: offset = time.altzone
285 else: offset = time.timezone
285 else: offset = time.timezone
286 date = "%d %d" % (time.time(), offset)
286 date = "%d %d" % (time.time(), offset)
287 list.sort()
287 list.sort()
288 l = [hex(manifest), user, date] + list + ["", desc]
288 l = [hex(manifest), user, date] + list + ["", desc]
289 text = "\n".join(l)
289 text = "\n".join(l)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
291
291
292 class dirstate:
292 class dirstate:
293 def __init__(self, opener, ui, root):
293 def __init__(self, opener, ui, root):
294 self.opener = opener
294 self.opener = opener
295 self.root = root
295 self.root = root
296 self.dirty = 0
296 self.dirty = 0
297 self.ui = ui
297 self.ui = ui
298 self.map = None
298 self.map = None
299 self.pl = None
299 self.pl = None
300 self.copies = {}
300 self.copies = {}
301 self.ignorefunc = None
301 self.ignorefunc = None
302
302
303 def wjoin(self, f):
303 def wjoin(self, f):
304 return os.path.join(self.root, f)
304 return os.path.join(self.root, f)
305
305
306 def getcwd(self):
306 def getcwd(self):
307 cwd = os.getcwd()
307 cwd = os.getcwd()
308 if cwd == self.root: return ''
308 if cwd == self.root: return ''
309 return cwd[len(self.root) + 1:]
309 return cwd[len(self.root) + 1:]
310
310
311 def ignore(self, f):
311 def ignore(self, f):
312 if not self.ignorefunc:
312 if not self.ignorefunc:
313 bigpat = []
313 bigpat = []
314 try:
314 try:
315 l = file(self.wjoin(".hgignore"))
315 l = file(self.wjoin(".hgignore"))
316 for pat in l:
316 for pat in l:
317 p = pat.rstrip()
317 p = pat.rstrip()
318 if p:
318 if p:
319 try:
319 try:
320 re.compile(p)
320 re.compile(p)
321 except:
321 except:
322 self.ui.warn("ignoring invalid ignore"
322 self.ui.warn("ignoring invalid ignore"
323 + " regular expression '%s'\n" % p)
323 + " regular expression '%s'\n" % p)
324 else:
324 else:
325 bigpat.append(p)
325 bigpat.append(p)
326 except IOError: pass
326 except IOError: pass
327
327
328 if bigpat:
328 if bigpat:
329 s = "(?:%s)" % (")|(?:".join(bigpat))
329 s = "(?:%s)" % (")|(?:".join(bigpat))
330 r = re.compile(s)
330 r = re.compile(s)
331 self.ignorefunc = r.search
331 self.ignorefunc = r.search
332 else:
332 else:
333 self.ignorefunc = util.never
333 self.ignorefunc = util.never
334
334
335 return self.ignorefunc(f)
335 return self.ignorefunc(f)
336
336
337 def __del__(self):
337 def __del__(self):
338 if self.dirty:
338 if self.dirty:
339 self.write()
339 self.write()
340
340
341 def __getitem__(self, key):
341 def __getitem__(self, key):
342 try:
342 try:
343 return self.map[key]
343 return self.map[key]
344 except TypeError:
344 except TypeError:
345 self.read()
345 self.read()
346 return self[key]
346 return self[key]
347
347
348 def __contains__(self, key):
348 def __contains__(self, key):
349 if not self.map: self.read()
349 if not self.map: self.read()
350 return key in self.map
350 return key in self.map
351
351
352 def parents(self):
352 def parents(self):
353 if not self.pl:
353 if not self.pl:
354 self.read()
354 self.read()
355 return self.pl
355 return self.pl
356
356
357 def markdirty(self):
357 def markdirty(self):
358 if not self.dirty:
358 if not self.dirty:
359 self.dirty = 1
359 self.dirty = 1
360
360
361 def setparents(self, p1, p2 = nullid):
361 def setparents(self, p1, p2 = nullid):
362 self.markdirty()
362 self.markdirty()
363 self.pl = p1, p2
363 self.pl = p1, p2
364
364
365 def state(self, key):
365 def state(self, key):
366 try:
366 try:
367 return self[key][0]
367 return self[key][0]
368 except KeyError:
368 except KeyError:
369 return "?"
369 return "?"
370
370
371 def read(self):
371 def read(self):
372 if self.map is not None: return self.map
372 if self.map is not None: return self.map
373
373
374 self.map = {}
374 self.map = {}
375 self.pl = [nullid, nullid]
375 self.pl = [nullid, nullid]
376 try:
376 try:
377 st = self.opener("dirstate").read()
377 st = self.opener("dirstate").read()
378 if not st: return
378 if not st: return
379 except: return
379 except: return
380
380
381 self.pl = [st[:20], st[20: 40]]
381 self.pl = [st[:20], st[20: 40]]
382
382
383 pos = 40
383 pos = 40
384 while pos < len(st):
384 while pos < len(st):
385 e = struct.unpack(">cllll", st[pos:pos+17])
385 e = struct.unpack(">cllll", st[pos:pos+17])
386 l = e[4]
386 l = e[4]
387 pos += 17
387 pos += 17
388 f = st[pos:pos + l]
388 f = st[pos:pos + l]
389 if '\0' in f:
389 if '\0' in f:
390 f, c = f.split('\0')
390 f, c = f.split('\0')
391 self.copies[f] = c
391 self.copies[f] = c
392 self.map[f] = e[:4]
392 self.map[f] = e[:4]
393 pos += l
393 pos += l
394
394
395 def copy(self, source, dest):
395 def copy(self, source, dest):
396 self.read()
396 self.read()
397 self.markdirty()
397 self.markdirty()
398 self.copies[dest] = source
398 self.copies[dest] = source
399
399
400 def copied(self, file):
400 def copied(self, file):
401 return self.copies.get(file, None)
401 return self.copies.get(file, None)
402
402
403 def update(self, files, state, **kw):
403 def update(self, files, state, **kw):
404 ''' current states:
404 ''' current states:
405 n normal
405 n normal
406 m needs merging
406 m needs merging
407 r marked for removal
407 r marked for removal
408 a marked for addition'''
408 a marked for addition'''
409
409
410 if not files: return
410 if not files: return
411 self.read()
411 self.read()
412 self.markdirty()
412 self.markdirty()
413 for f in files:
413 for f in files:
414 if state == "r":
414 if state == "r":
415 self.map[f] = ('r', 0, 0, 0)
415 self.map[f] = ('r', 0, 0, 0)
416 else:
416 else:
417 s = os.stat(os.path.join(self.root, f))
417 s = os.stat(os.path.join(self.root, f))
418 st_size = kw.get('st_size', s.st_size)
418 st_size = kw.get('st_size', s.st_size)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
421
421
422 def forget(self, files):
422 def forget(self, files):
423 if not files: return
423 if not files: return
424 self.read()
424 self.read()
425 self.markdirty()
425 self.markdirty()
426 for f in files:
426 for f in files:
427 try:
427 try:
428 del self.map[f]
428 del self.map[f]
429 except KeyError:
429 except KeyError:
430 self.ui.warn("not in dirstate: %s!\n" % f)
430 self.ui.warn("not in dirstate: %s!\n" % f)
431 pass
431 pass
432
432
433 def clear(self):
433 def clear(self):
434 self.map = {}
434 self.map = {}
435 self.markdirty()
435 self.markdirty()
436
436
437 def write(self):
437 def write(self):
438 st = self.opener("dirstate", "w")
438 st = self.opener("dirstate", "w")
439 st.write("".join(self.pl))
439 st.write("".join(self.pl))
440 for f, e in self.map.items():
440 for f, e in self.map.items():
441 c = self.copied(f)
441 c = self.copied(f)
442 if c:
442 if c:
443 f = f + "\0" + c
443 f = f + "\0" + c
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
445 st.write(e + f)
445 st.write(e + f)
446 self.dirty = 0
446 self.dirty = 0
447
447
448 def filterfiles(self, files):
448 def filterfiles(self, files):
449 ret = {}
449 ret = {}
450 unknown = []
450 unknown = []
451
451
452 for x in files:
452 for x in files:
453 if x is '.':
453 if x is '.':
454 return self.map.copy()
454 return self.map.copy()
455 if x not in self.map:
455 if x not in self.map:
456 unknown.append(x)
456 unknown.append(x)
457 else:
457 else:
458 ret[x] = self.map[x]
458 ret[x] = self.map[x]
459
459
460 if not unknown:
460 if not unknown:
461 return ret
461 return ret
462
462
463 b = self.map.keys()
463 b = self.map.keys()
464 b.sort()
464 b.sort()
465 blen = len(b)
465 blen = len(b)
466
466
467 for x in unknown:
467 for x in unknown:
468 bs = bisect.bisect(b, x)
468 bs = bisect.bisect(b, x)
469 if bs != 0 and b[bs-1] == x:
469 if bs != 0 and b[bs-1] == x:
470 ret[x] = self.map[x]
470 ret[x] = self.map[x]
471 continue
471 continue
472 while bs < blen:
472 while bs < blen:
473 s = b[bs]
473 s = b[bs]
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
475 ret[s] = self.map[s]
475 ret[s] = self.map[s]
476 else:
476 else:
477 break
477 break
478 bs += 1
478 bs += 1
479 return ret
479 return ret
480
480
481 def walk(self, files = None, match = util.always, dc=None):
481 def walk(self, files = None, match = util.always, dc=None):
482 self.read()
482 self.read()
483
483
484 # walk all files by default
484 # walk all files by default
485 if not files:
485 if not files:
486 files = [self.root]
486 files = [self.root]
487 if not dc:
487 if not dc:
488 dc = self.map.copy()
488 dc = self.map.copy()
489 elif not dc:
489 elif not dc:
490 dc = self.filterfiles(files)
490 dc = self.filterfiles(files)
491
491
492 known = {'.hg': 1}
492 known = {'.hg': 1}
493 def seen(fn):
493 def seen(fn):
494 if fn in known: return True
494 if fn in known: return True
495 known[fn] = 1
495 known[fn] = 1
496 def traverse():
496 def traverse():
497 for ff in util.unique(files):
497 for ff in util.unique(files):
498 f = os.path.join(self.root, ff)
498 f = os.path.join(self.root, ff)
499 try:
499 try:
500 st = os.stat(f)
500 st = os.stat(f)
501 except OSError, inst:
501 except OSError, inst:
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
503 util.pathto(self.getcwd(), ff),
503 util.pathto(self.getcwd(), ff),
504 inst.strerror))
504 inst.strerror))
505 continue
505 continue
506 if stat.S_ISDIR(st.st_mode):
506 if stat.S_ISDIR(st.st_mode):
507 for dir, subdirs, fl in os.walk(f):
507 for dir, subdirs, fl in os.walk(f):
508 d = dir[len(self.root) + 1:]
508 d = dir[len(self.root) + 1:]
509 nd = util.normpath(d)
509 nd = util.normpath(d)
510 if nd == '.': nd = ''
510 if nd == '.': nd = ''
511 if seen(nd):
511 if seen(nd):
512 subdirs[:] = []
512 subdirs[:] = []
513 continue
513 continue
514 for sd in subdirs:
514 for sd in subdirs:
515 ds = os.path.join(nd, sd +'/')
515 ds = os.path.join(nd, sd +'/')
516 if self.ignore(ds) or not match(ds):
516 if self.ignore(ds) or not match(ds):
517 subdirs.remove(sd)
517 subdirs.remove(sd)
518 subdirs.sort()
518 subdirs.sort()
519 fl.sort()
519 fl.sort()
520 for fn in fl:
520 for fn in fl:
521 fn = util.pconvert(os.path.join(d, fn))
521 fn = util.pconvert(os.path.join(d, fn))
522 yield 'f', fn
522 yield 'f', fn
523 elif stat.S_ISREG(st.st_mode):
523 elif stat.S_ISREG(st.st_mode):
524 yield 'f', ff
524 yield 'f', ff
525 else:
525 else:
526 kind = 'unknown'
526 kind = 'unknown'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
533 util.pathto(self.getcwd(), ff),
533 util.pathto(self.getcwd(), ff),
534 kind))
534 kind))
535
535
536 ks = dc.keys()
536 ks = dc.keys()
537 ks.sort()
537 ks.sort()
538 for k in ks:
538 for k in ks:
539 yield 'm', k
539 yield 'm', k
540
540
541 # yield only files that match: all in dirstate, others only if
541 # yield only files that match: all in dirstate, others only if
542 # not in .hgignore
542 # not in .hgignore
543
543
544 for src, fn in util.unique(traverse()):
544 for src, fn in util.unique(traverse()):
545 fn = util.normpath(fn)
545 fn = util.normpath(fn)
546 if seen(fn): continue
546 if seen(fn): continue
547 if fn not in dc and self.ignore(fn):
547 if fn not in dc and self.ignore(fn):
548 continue
548 continue
549 if match(fn):
549 if match(fn):
550 yield src, fn
550 yield src, fn
551
551
552 def changes(self, files=None, match=util.always):
552 def changes(self, files=None, match=util.always):
553 self.read()
553 self.read()
554 if not files:
554 if not files:
555 dc = self.map.copy()
555 dc = self.map.copy()
556 else:
556 else:
557 dc = self.filterfiles(files)
557 dc = self.filterfiles(files)
558 lookup, modified, added, unknown = [], [], [], []
558 lookup, modified, added, unknown = [], [], [], []
559 removed, deleted = [], []
559 removed, deleted = [], []
560
560
561 for src, fn in self.walk(files, match, dc=dc):
561 for src, fn in self.walk(files, match, dc=dc):
562 try:
562 try:
563 s = os.stat(os.path.join(self.root, fn))
563 s = os.stat(os.path.join(self.root, fn))
564 except OSError:
564 except OSError:
565 continue
565 continue
566 if not stat.S_ISREG(s.st_mode):
566 if not stat.S_ISREG(s.st_mode):
567 continue
567 continue
568 c = dc.get(fn)
568 c = dc.get(fn)
569 if c:
569 if c:
570 del dc[fn]
570 del dc[fn]
571 if c[0] == 'm':
571 if c[0] == 'm':
572 modified.append(fn)
572 modified.append(fn)
573 elif c[0] == 'a':
573 elif c[0] == 'a':
574 added.append(fn)
574 added.append(fn)
575 elif c[0] == 'r':
575 elif c[0] == 'r':
576 unknown.append(fn)
576 unknown.append(fn)
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
578 modified.append(fn)
578 modified.append(fn)
579 elif c[3] != s.st_mtime:
579 elif c[3] != s.st_mtime:
580 lookup.append(fn)
580 lookup.append(fn)
581 else:
581 else:
582 unknown.append(fn)
582 unknown.append(fn)
583
583
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
585 if c[0] == 'r':
585 if c[0] == 'r':
586 removed.append(fn)
586 removed.append(fn)
587 else:
587 else:
588 deleted.append(fn)
588 deleted.append(fn)
589 return (lookup, modified, added, removed + deleted, unknown)
589 return (lookup, modified, added, removed + deleted, unknown)
590
590
591 # used to avoid circular references so destructors work
591 # used to avoid circular references so destructors work
592 def opener(base):
592 def opener(base):
593 p = base
593 p = base
594 def o(path, mode="r"):
594 def o(path, mode="r"):
595 if p.startswith("http://"):
595 if p.startswith("http://"):
596 f = os.path.join(p, urllib.quote(path))
596 f = os.path.join(p, urllib.quote(path))
597 return httprangereader.httprangereader(f)
597 return httprangereader.httprangereader(f)
598
598
599 f = os.path.join(p, path)
599 f = os.path.join(p, path)
600
600
601 mode += "b" # for that other OS
601 mode += "b" # for that other OS
602
602
603 if mode[0] != "r":
603 if mode[0] != "r":
604 try:
604 try:
605 s = os.stat(f)
605 s = os.stat(f)
606 except OSError:
606 except OSError:
607 d = os.path.dirname(f)
607 d = os.path.dirname(f)
608 if not os.path.isdir(d):
608 if not os.path.isdir(d):
609 os.makedirs(d)
609 os.makedirs(d)
610 else:
610 else:
611 if s.st_nlink > 1:
611 if s.st_nlink > 1:
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
613 util.rename(f+".tmp", f)
613 util.rename(f+".tmp", f)
614
614
615 return file(f, mode)
615 return file(f, mode)
616
616
617 return o
617 return o
618
618
619 class RepoError(Exception): pass
619 class RepoError(Exception): pass
620
620
621 class localrepository:
621 class localrepository:
622 def __init__(self, ui, path=None, create=0):
622 def __init__(self, ui, path=None, create=0):
623 self.remote = 0
623 self.remote = 0
624 if path and path.startswith("http://"):
624 if path and path.startswith("http://"):
625 self.remote = 1
625 self.remote = 1
626 self.path = path
626 self.path = path
627 else:
627 else:
628 if not path:
628 if not path:
629 p = os.getcwd()
629 p = os.getcwd()
630 while not os.path.isdir(os.path.join(p, ".hg")):
630 while not os.path.isdir(os.path.join(p, ".hg")):
631 oldp = p
631 oldp = p
632 p = os.path.dirname(p)
632 p = os.path.dirname(p)
633 if p == oldp: raise RepoError("no repo found")
633 if p == oldp: raise RepoError("no repo found")
634 path = p
634 path = p
635 self.path = os.path.join(path, ".hg")
635 self.path = os.path.join(path, ".hg")
636
636
637 if not create and not os.path.isdir(self.path):
637 if not create and not os.path.isdir(self.path):
638 raise RepoError("repository %s not found" % self.path)
638 raise RepoError("repository %s not found" % self.path)
639
639
640 self.root = os.path.abspath(path)
640 self.root = os.path.abspath(path)
641 self.ui = ui
641 self.ui = ui
642
642
643 if create:
643 if create:
644 os.mkdir(self.path)
644 os.mkdir(self.path)
645 os.mkdir(self.join("data"))
645 os.mkdir(self.join("data"))
646
646
647 self.opener = opener(self.path)
647 self.opener = opener(self.path)
648 self.wopener = opener(self.root)
648 self.wopener = opener(self.root)
649 self.manifest = manifest(self.opener)
649 self.manifest = manifest(self.opener)
650 self.changelog = changelog(self.opener)
650 self.changelog = changelog(self.opener)
651 self.tagscache = None
651 self.tagscache = None
652 self.nodetagscache = None
652 self.nodetagscache = None
653
653
654 if not self.remote:
654 if not self.remote:
655 self.dirstate = dirstate(self.opener, ui, self.root)
655 self.dirstate = dirstate(self.opener, ui, self.root)
656 try:
656 try:
657 self.ui.readconfig(self.opener("hgrc"))
657 self.ui.readconfig(self.opener("hgrc"))
658 except IOError: pass
658 except IOError: pass
659
659
660 def hook(self, name, **args):
660 def hook(self, name, **args):
661 s = self.ui.config("hooks", name)
661 s = self.ui.config("hooks", name)
662 if s:
662 if s:
663 self.ui.note("running hook %s: %s\n" % (name, s))
663 self.ui.note("running hook %s: %s\n" % (name, s))
664 old = {}
664 old = {}
665 for k, v in args.items():
665 for k, v in args.items():
666 k = k.upper()
666 k = k.upper()
667 old[k] = os.environ.get(k, None)
667 old[k] = os.environ.get(k, None)
668 os.environ[k] = v
668 os.environ[k] = v
669
669
670 r = os.system(s)
670 r = os.system(s)
671
671
672 for k, v in old.items():
672 for k, v in old.items():
673 if v != None:
673 if v != None:
674 os.environ[k] = v
674 os.environ[k] = v
675 else:
675 else:
676 del os.environ[k]
676 del os.environ[k]
677
677
678 if r:
678 if r:
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
680 (name, r))
680 (name, r))
681 return False
681 return False
682 return True
682 return True
683
683
684 def tags(self):
684 def tags(self):
685 '''return a mapping of tag to node'''
685 '''return a mapping of tag to node'''
686 if not self.tagscache:
686 if not self.tagscache:
687 self.tagscache = {}
687 self.tagscache = {}
688 def addtag(self, k, n):
688 def addtag(self, k, n):
689 try:
689 try:
690 bin_n = bin(n)
690 bin_n = bin(n)
691 except TypeError:
691 except TypeError:
692 bin_n = ''
692 bin_n = ''
693 self.tagscache[k.strip()] = bin_n
693 self.tagscache[k.strip()] = bin_n
694
694
695 try:
695 try:
696 # read each head of the tags file, ending with the tip
696 # read each head of the tags file, ending with the tip
697 # and add each tag found to the map, with "newer" ones
697 # and add each tag found to the map, with "newer" ones
698 # taking precedence
698 # taking precedence
699 fl = self.file(".hgtags")
699 fl = self.file(".hgtags")
700 h = fl.heads()
700 h = fl.heads()
701 h.reverse()
701 h.reverse()
702 for r in h:
702 for r in h:
703 for l in fl.revision(r).splitlines():
703 for l in fl.revision(r).splitlines():
704 if l:
704 if l:
705 n, k = l.split(" ", 1)
705 n, k = l.split(" ", 1)
706 addtag(self, k, n)
706 addtag(self, k, n)
707 except KeyError:
707 except KeyError:
708 pass
708 pass
709
709
710 try:
710 try:
711 f = self.opener("localtags")
711 f = self.opener("localtags")
712 for l in f:
712 for l in f:
713 n, k = l.split(" ", 1)
713 n, k = l.split(" ", 1)
714 addtag(self, k, n)
714 addtag(self, k, n)
715 except IOError:
715 except IOError:
716 pass
716 pass
717
717
718 self.tagscache['tip'] = self.changelog.tip()
718 self.tagscache['tip'] = self.changelog.tip()
719
719
720 return self.tagscache
720 return self.tagscache
721
721
722 def tagslist(self):
722 def tagslist(self):
723 '''return a list of tags ordered by revision'''
723 '''return a list of tags ordered by revision'''
724 l = []
724 l = []
725 for t, n in self.tags().items():
725 for t, n in self.tags().items():
726 try:
726 try:
727 r = self.changelog.rev(n)
727 r = self.changelog.rev(n)
728 except:
728 except:
729 r = -2 # sort to the beginning of the list if unknown
729 r = -2 # sort to the beginning of the list if unknown
730 l.append((r,t,n))
730 l.append((r,t,n))
731 l.sort()
731 l.sort()
732 return [(t,n) for r,t,n in l]
732 return [(t,n) for r,t,n in l]
733
733
734 def nodetags(self, node):
734 def nodetags(self, node):
735 '''return the tags associated with a node'''
735 '''return the tags associated with a node'''
736 if not self.nodetagscache:
736 if not self.nodetagscache:
737 self.nodetagscache = {}
737 self.nodetagscache = {}
738 for t,n in self.tags().items():
738 for t,n in self.tags().items():
739 self.nodetagscache.setdefault(n,[]).append(t)
739 self.nodetagscache.setdefault(n,[]).append(t)
740 return self.nodetagscache.get(node, [])
740 return self.nodetagscache.get(node, [])
741
741
742 def lookup(self, key):
742 def lookup(self, key):
743 try:
743 try:
744 return self.tags()[key]
744 return self.tags()[key]
745 except KeyError:
745 except KeyError:
746 try:
746 try:
747 return self.changelog.lookup(key)
747 return self.changelog.lookup(key)
748 except:
748 except:
749 raise RepoError("unknown revision '%s'" % key)
749 raise RepoError("unknown revision '%s'" % key)
750
750
751 def dev(self):
751 def dev(self):
752 if self.remote: return -1
752 if self.remote: return -1
753 return os.stat(self.path).st_dev
753 return os.stat(self.path).st_dev
754
754
755 def local(self):
755 def local(self):
756 return not self.remote
756 return not self.remote
757
757
758 def join(self, f):
758 def join(self, f):
759 return os.path.join(self.path, f)
759 return os.path.join(self.path, f)
760
760
761 def wjoin(self, f):
761 def wjoin(self, f):
762 return os.path.join(self.root, f)
762 return os.path.join(self.root, f)
763
763
764 def file(self, f):
764 def file(self, f):
765 if f[0] == '/': f = f[1:]
765 if f[0] == '/': f = f[1:]
766 return filelog(self.opener, f)
766 return filelog(self.opener, f)
767
767
768 def getcwd(self):
768 def getcwd(self):
769 return self.dirstate.getcwd()
769 return self.dirstate.getcwd()
770
770
771 def wfile(self, f, mode='r'):
771 def wfile(self, f, mode='r'):
772 return self.wopener(f, mode)
772 return self.wopener(f, mode)
773
773
774 def transaction(self):
774 def transaction(self):
775 # save dirstate for undo
775 # save dirstate for undo
776 try:
776 try:
777 ds = self.opener("dirstate").read()
777 ds = self.opener("dirstate").read()
778 except IOError:
778 except IOError:
779 ds = ""
779 ds = ""
780 self.opener("journal.dirstate", "w").write(ds)
780 self.opener("journal.dirstate", "w").write(ds)
781
781
782 def after():
782 def after():
783 util.rename(self.join("journal"), self.join("undo"))
783 util.rename(self.join("journal"), self.join("undo"))
784 util.rename(self.join("journal.dirstate"),
784 util.rename(self.join("journal.dirstate"),
785 self.join("undo.dirstate"))
785 self.join("undo.dirstate"))
786
786
787 return transaction.transaction(self.ui.warn, self.opener,
787 return transaction.transaction(self.ui.warn, self.opener,
788 self.join("journal"), after)
788 self.join("journal"), after)
789
789
790 def recover(self):
790 def recover(self):
791 lock = self.lock()
791 lock = self.lock()
792 if os.path.exists(self.join("journal")):
792 if os.path.exists(self.join("journal")):
793 self.ui.status("rolling back interrupted transaction\n")
793 self.ui.status("rolling back interrupted transaction\n")
794 return transaction.rollback(self.opener, self.join("journal"))
794 return transaction.rollback(self.opener, self.join("journal"))
795 else:
795 else:
796 self.ui.warn("no interrupted transaction available\n")
796 self.ui.warn("no interrupted transaction available\n")
797
797
798 def undo(self):
798 def undo(self):
799 lock = self.lock()
799 lock = self.lock()
800 if os.path.exists(self.join("undo")):
800 if os.path.exists(self.join("undo")):
801 self.ui.status("rolling back last transaction\n")
801 self.ui.status("rolling back last transaction\n")
802 transaction.rollback(self.opener, self.join("undo"))
802 transaction.rollback(self.opener, self.join("undo"))
803 self.dirstate = None
803 self.dirstate = None
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
806 else:
806 else:
807 self.ui.warn("no undo information available\n")
807 self.ui.warn("no undo information available\n")
808
808
809 def lock(self, wait = 1):
809 def lock(self, wait = 1):
810 try:
810 try:
811 return lock.lock(self.join("lock"), 0)
811 return lock.lock(self.join("lock"), 0)
812 except lock.LockHeld, inst:
812 except lock.LockHeld, inst:
813 if wait:
813 if wait:
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
815 return lock.lock(self.join("lock"), wait)
815 return lock.lock(self.join("lock"), wait)
816 raise inst
816 raise inst
817
817
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
819 orig_parent = self.dirstate.parents()[0] or nullid
819 orig_parent = self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
822 c1 = self.changelog.read(p1)
822 c1 = self.changelog.read(p1)
823 c2 = self.changelog.read(p2)
823 c2 = self.changelog.read(p2)
824 m1 = self.manifest.read(c1[0])
824 m1 = self.manifest.read(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
826 m2 = self.manifest.read(c2[0])
826 m2 = self.manifest.read(c2[0])
827
827
828 if orig_parent == p1:
828 if orig_parent == p1:
829 update_dirstate = 1
829 update_dirstate = 1
830 else:
830 else:
831 update_dirstate = 0
831 update_dirstate = 0
832
832
833 tr = self.transaction()
833 tr = self.transaction()
834 mm = m1.copy()
834 mm = m1.copy()
835 mfm = mf1.copy()
835 mfm = mf1.copy()
836 linkrev = self.changelog.count()
836 linkrev = self.changelog.count()
837 for f in files:
837 for f in files:
838 try:
838 try:
839 t = self.wfile(f).read()
839 t = self.wfile(f).read()
840 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
840 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
841 r = self.file(f)
841 r = self.file(f)
842 mfm[f] = tm
842 mfm[f] = tm
843 mm[f] = r.add(t, {}, tr, linkrev,
843
844 m1.get(f, nullid), m2.get(f, nullid))
844 fp1 = m1.get(f, nullid)
845 fp2 = m2.get(f, nullid)
846
847 # is the same revision on two branches of a merge?
848 if fp2 == fp1:
849 fp2 = nullid
850
851 if fp2 != nullid:
852 # is one parent an ancestor of the other?
853 fpa = r.ancestor(fp1, fp2)
854 if fpa == fp1:
855 fp1, fp2 = fp2, nullid
856 elif fpa == fp2:
857 fp2 = nullid
858
859 # is the file unmodified from the parent?
860 if t == r.read(fp1):
861 # record the proper existing parent in manifest
862 # no need to add a revision
863 mm[f] = fp1
864 continue
865
866 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
845 if update_dirstate:
867 if update_dirstate:
846 self.dirstate.update([f], "n")
868 self.dirstate.update([f], "n")
847 except IOError:
869 except IOError:
848 try:
870 try:
849 del mm[f]
871 del mm[f]
850 del mfm[f]
872 del mfm[f]
851 if update_dirstate:
873 if update_dirstate:
852 self.dirstate.forget([f])
874 self.dirstate.forget([f])
853 except:
875 except:
854 # deleted from p2?
876 # deleted from p2?
855 pass
877 pass
856
878
857 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
879 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
858 user = user or self.ui.username()
880 user = user or self.ui.username()
859 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
881 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
860 tr.close()
882 tr.close()
861 if update_dirstate:
883 if update_dirstate:
862 self.dirstate.setparents(n, nullid)
884 self.dirstate.setparents(n, nullid)
863
885
864 def commit(self, files = None, text = "", user = None, date = None,
886 def commit(self, files = None, text = "", user = None, date = None,
865 match = util.always, force=False):
887 match = util.always, force=False):
866 commit = []
888 commit = []
867 remove = []
889 remove = []
868 if files:
890 if files:
869 for f in files:
891 for f in files:
870 s = self.dirstate.state(f)
892 s = self.dirstate.state(f)
871 if s in 'nmai':
893 if s in 'nmai':
872 commit.append(f)
894 commit.append(f)
873 elif s == 'r':
895 elif s == 'r':
874 remove.append(f)
896 remove.append(f)
875 else:
897 else:
876 self.ui.warn("%s not tracked!\n" % f)
898 self.ui.warn("%s not tracked!\n" % f)
877 else:
899 else:
878 (c, a, d, u) = self.changes(match = match)
900 (c, a, d, u) = self.changes(match = match)
879 commit = c + a
901 commit = c + a
880 remove = d
902 remove = d
881
903
882 if not commit and not remove and not force:
904 p1, p2 = self.dirstate.parents()
905 c1 = self.changelog.read(p1)
906 c2 = self.changelog.read(p2)
907 m1 = self.manifest.read(c1[0])
908 mf1 = self.manifest.readflags(c1[0])
909 m2 = self.manifest.read(c2[0])
910
911 if not commit and not remove and not force and p2 == nullid:
883 self.ui.status("nothing changed\n")
912 self.ui.status("nothing changed\n")
884 return None
913 return None
885
914
886 if not self.hook("precommit"):
915 if not self.hook("precommit"):
887 return None
916 return None
888
917
889 p1, p2 = self.dirstate.parents()
890 c1 = self.changelog.read(p1)
891 c2 = self.changelog.read(p2)
892 m1 = self.manifest.read(c1[0])
893 mf1 = self.manifest.readflags(c1[0])
894 m2 = self.manifest.read(c2[0])
895 lock = self.lock()
918 lock = self.lock()
896 tr = self.transaction()
919 tr = self.transaction()
897
920
898 # check in files
921 # check in files
899 new = {}
922 new = {}
900 linkrev = self.changelog.count()
923 linkrev = self.changelog.count()
901 commit.sort()
924 commit.sort()
902 for f in commit:
925 for f in commit:
903 self.ui.note(f + "\n")
926 self.ui.note(f + "\n")
904 try:
927 try:
905 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
928 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
906 t = self.wfile(f).read()
929 t = self.wfile(f).read()
907 except IOError:
930 except IOError:
908 self.ui.warn("trouble committing %s!\n" % f)
931 self.ui.warn("trouble committing %s!\n" % f)
909 raise
932 raise
910
933
911 meta = {}
934 meta = {}
912 cp = self.dirstate.copied(f)
935 cp = self.dirstate.copied(f)
913 if cp:
936 if cp:
914 meta["copy"] = cp
937 meta["copy"] = cp
915 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
938 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
916 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
939 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
917
940
918 r = self.file(f)
941 r = self.file(f)
919 fp1 = m1.get(f, nullid)
942 fp1 = m1.get(f, nullid)
920 fp2 = m2.get(f, nullid)
943 fp2 = m2.get(f, nullid)
944
945 # is the same revision on two branches of a merge?
946 if fp2 == fp1:
947 fp2 = nullid
948
949 if fp2 != nullid:
950 # is one parent an ancestor of the other?
951 fpa = r.ancestor(fp1, fp2)
952 if fpa == fp1:
953 fp1, fp2 = fp2, nullid
954 elif fpa == fp2:
955 fp2 = nullid
956
957 # is the file unmodified from the parent?
958 if not meta and t == r.read(fp1):
959 # record the proper existing parent in manifest
960 # no need to add a revision
961 new[f] = fp1
962 continue
963
921 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
964 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
922
965
923 # update manifest
966 # update manifest
924 m1.update(new)
967 m1.update(new)
925 for f in remove:
968 for f in remove:
926 if f in m1:
969 if f in m1:
927 del m1[f]
970 del m1[f]
928 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
971 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
929 (new, remove))
972 (new, remove))
930
973
931 # add changeset
974 # add changeset
932 new = new.keys()
975 new = new.keys()
933 new.sort()
976 new.sort()
934
977
935 if not text:
978 if not text:
936 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
979 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
937 edittext += "".join(["HG: changed %s\n" % f for f in new])
980 edittext += "".join(["HG: changed %s\n" % f for f in new])
938 edittext += "".join(["HG: removed %s\n" % f for f in remove])
981 edittext += "".join(["HG: removed %s\n" % f for f in remove])
939 edittext = self.ui.edit(edittext)
982 edittext = self.ui.edit(edittext)
940 if not edittext.rstrip():
983 if not edittext.rstrip():
941 return None
984 return None
942 text = edittext
985 text = edittext
943
986
944 user = user or self.ui.username()
987 user = user or self.ui.username()
945 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
988 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
946 tr.close()
989 tr.close()
947
990
948 self.dirstate.setparents(n)
991 self.dirstate.setparents(n)
949 self.dirstate.update(new, "n")
992 self.dirstate.update(new, "n")
950 self.dirstate.forget(remove)
993 self.dirstate.forget(remove)
951
994
952 if not self.hook("commit", node=hex(n)):
995 if not self.hook("commit", node=hex(n)):
953 return None
996 return None
954 return n
997 return n
955
998
956 def walk(self, node = None, files = [], match = util.always):
999 def walk(self, node = None, files = [], match = util.always):
957 if node:
1000 if node:
958 for fn in self.manifest.read(self.changelog.read(node)[0]):
1001 for fn in self.manifest.read(self.changelog.read(node)[0]):
959 if match(fn): yield 'm', fn
1002 if match(fn): yield 'm', fn
960 else:
1003 else:
961 for src, fn in self.dirstate.walk(files, match):
1004 for src, fn in self.dirstate.walk(files, match):
962 yield src, fn
1005 yield src, fn
963
1006
964 def changes(self, node1 = None, node2 = None, files = [],
1007 def changes(self, node1 = None, node2 = None, files = [],
965 match = util.always):
1008 match = util.always):
966 mf2, u = None, []
1009 mf2, u = None, []
967
1010
968 def fcmp(fn, mf):
1011 def fcmp(fn, mf):
969 t1 = self.wfile(fn).read()
1012 t1 = self.wfile(fn).read()
970 t2 = self.file(fn).revision(mf[fn])
1013 t2 = self.file(fn).revision(mf[fn])
971 return cmp(t1, t2)
1014 return cmp(t1, t2)
972
1015
973 def mfmatches(node):
1016 def mfmatches(node):
974 mf = dict(self.manifest.read(node))
1017 mf = dict(self.manifest.read(node))
975 for fn in mf.keys():
1018 for fn in mf.keys():
976 if not match(fn):
1019 if not match(fn):
977 del mf[fn]
1020 del mf[fn]
978 return mf
1021 return mf
979
1022
980 # are we comparing the working directory?
1023 # are we comparing the working directory?
981 if not node2:
1024 if not node2:
982 l, c, a, d, u = self.dirstate.changes(files, match)
1025 l, c, a, d, u = self.dirstate.changes(files, match)
983
1026
984 # are we comparing working dir against its parent?
1027 # are we comparing working dir against its parent?
985 if not node1:
1028 if not node1:
986 if l:
1029 if l:
987 # do a full compare of any files that might have changed
1030 # do a full compare of any files that might have changed
988 change = self.changelog.read(self.dirstate.parents()[0])
1031 change = self.changelog.read(self.dirstate.parents()[0])
989 mf2 = mfmatches(change[0])
1032 mf2 = mfmatches(change[0])
990 for f in l:
1033 for f in l:
991 if fcmp(f, mf2):
1034 if fcmp(f, mf2):
992 c.append(f)
1035 c.append(f)
993
1036
994 for l in c, a, d, u:
1037 for l in c, a, d, u:
995 l.sort()
1038 l.sort()
996
1039
997 return (c, a, d, u)
1040 return (c, a, d, u)
998
1041
999 # are we comparing working dir against non-tip?
1042 # are we comparing working dir against non-tip?
1000 # generate a pseudo-manifest for the working dir
1043 # generate a pseudo-manifest for the working dir
1001 if not node2:
1044 if not node2:
1002 if not mf2:
1045 if not mf2:
1003 change = self.changelog.read(self.dirstate.parents()[0])
1046 change = self.changelog.read(self.dirstate.parents()[0])
1004 mf2 = mfmatches(change[0])
1047 mf2 = mfmatches(change[0])
1005 for f in a + c + l:
1048 for f in a + c + l:
1006 mf2[f] = ""
1049 mf2[f] = ""
1007 for f in d:
1050 for f in d:
1008 if f in mf2: del mf2[f]
1051 if f in mf2: del mf2[f]
1009 else:
1052 else:
1010 change = self.changelog.read(node2)
1053 change = self.changelog.read(node2)
1011 mf2 = mfmatches(change[0])
1054 mf2 = mfmatches(change[0])
1012
1055
1013 # flush lists from dirstate before comparing manifests
1056 # flush lists from dirstate before comparing manifests
1014 c, a = [], []
1057 c, a = [], []
1015
1058
1016 change = self.changelog.read(node1)
1059 change = self.changelog.read(node1)
1017 mf1 = mfmatches(change[0])
1060 mf1 = mfmatches(change[0])
1018
1061
1019 for fn in mf2:
1062 for fn in mf2:
1020 if mf1.has_key(fn):
1063 if mf1.has_key(fn):
1021 if mf1[fn] != mf2[fn]:
1064 if mf1[fn] != mf2[fn]:
1022 if mf2[fn] != "" or fcmp(fn, mf1):
1065 if mf2[fn] != "" or fcmp(fn, mf1):
1023 c.append(fn)
1066 c.append(fn)
1024 del mf1[fn]
1067 del mf1[fn]
1025 else:
1068 else:
1026 a.append(fn)
1069 a.append(fn)
1027
1070
1028 d = mf1.keys()
1071 d = mf1.keys()
1029
1072
1030 for l in c, a, d, u:
1073 for l in c, a, d, u:
1031 l.sort()
1074 l.sort()
1032
1075
1033 return (c, a, d, u)
1076 return (c, a, d, u)
1034
1077
1035 def add(self, list):
1078 def add(self, list):
1036 for f in list:
1079 for f in list:
1037 p = self.wjoin(f)
1080 p = self.wjoin(f)
1038 if not os.path.exists(p):
1081 if not os.path.exists(p):
1039 self.ui.warn("%s does not exist!\n" % f)
1082 self.ui.warn("%s does not exist!\n" % f)
1040 elif not os.path.isfile(p):
1083 elif not os.path.isfile(p):
1041 self.ui.warn("%s not added: only files supported currently\n" % f)
1084 self.ui.warn("%s not added: only files supported currently\n" % f)
1042 elif self.dirstate.state(f) in 'an':
1085 elif self.dirstate.state(f) in 'an':
1043 self.ui.warn("%s already tracked!\n" % f)
1086 self.ui.warn("%s already tracked!\n" % f)
1044 else:
1087 else:
1045 self.dirstate.update([f], "a")
1088 self.dirstate.update([f], "a")
1046
1089
1047 def forget(self, list):
1090 def forget(self, list):
1048 for f in list:
1091 for f in list:
1049 if self.dirstate.state(f) not in 'ai':
1092 if self.dirstate.state(f) not in 'ai':
1050 self.ui.warn("%s not added!\n" % f)
1093 self.ui.warn("%s not added!\n" % f)
1051 else:
1094 else:
1052 self.dirstate.forget([f])
1095 self.dirstate.forget([f])
1053
1096
1054 def remove(self, list):
1097 def remove(self, list):
1055 for f in list:
1098 for f in list:
1056 p = self.wjoin(f)
1099 p = self.wjoin(f)
1057 if os.path.exists(p):
1100 if os.path.exists(p):
1058 self.ui.warn("%s still exists!\n" % f)
1101 self.ui.warn("%s still exists!\n" % f)
1059 elif self.dirstate.state(f) == 'a':
1102 elif self.dirstate.state(f) == 'a':
1060 self.ui.warn("%s never committed!\n" % f)
1103 self.ui.warn("%s never committed!\n" % f)
1061 self.dirstate.forget([f])
1104 self.dirstate.forget([f])
1062 elif f not in self.dirstate:
1105 elif f not in self.dirstate:
1063 self.ui.warn("%s not tracked!\n" % f)
1106 self.ui.warn("%s not tracked!\n" % f)
1064 else:
1107 else:
1065 self.dirstate.update([f], "r")
1108 self.dirstate.update([f], "r")
1066
1109
1067 def copy(self, source, dest):
1110 def copy(self, source, dest):
1068 p = self.wjoin(dest)
1111 p = self.wjoin(dest)
1069 if not os.path.exists(p):
1112 if not os.path.exists(p):
1070 self.ui.warn("%s does not exist!\n" % dest)
1113 self.ui.warn("%s does not exist!\n" % dest)
1071 elif not os.path.isfile(p):
1114 elif not os.path.isfile(p):
1072 self.ui.warn("copy failed: %s is not a file\n" % dest)
1115 self.ui.warn("copy failed: %s is not a file\n" % dest)
1073 else:
1116 else:
1074 if self.dirstate.state(dest) == '?':
1117 if self.dirstate.state(dest) == '?':
1075 self.dirstate.update([dest], "a")
1118 self.dirstate.update([dest], "a")
1076 self.dirstate.copy(source, dest)
1119 self.dirstate.copy(source, dest)
1077
1120
1078 def heads(self):
1121 def heads(self):
1079 return self.changelog.heads()
1122 return self.changelog.heads()
1080
1123
1081 # branchlookup returns a dict giving a list of branches for
1124 # branchlookup returns a dict giving a list of branches for
1082 # each head. A branch is defined as the tag of a node or
1125 # each head. A branch is defined as the tag of a node or
1083 # the branch of the node's parents. If a node has multiple
1126 # the branch of the node's parents. If a node has multiple
1084 # branch tags, tags are eliminated if they are visible from other
1127 # branch tags, tags are eliminated if they are visible from other
1085 # branch tags.
1128 # branch tags.
1086 #
1129 #
1087 # So, for this graph: a->b->c->d->e
1130 # So, for this graph: a->b->c->d->e
1088 # \ /
1131 # \ /
1089 # aa -----/
1132 # aa -----/
1090 # a has tag 2.6.12
1133 # a has tag 2.6.12
1091 # d has tag 2.6.13
1134 # d has tag 2.6.13
1092 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1135 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1093 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1136 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1094 # from the list.
1137 # from the list.
1095 #
1138 #
1096 # It is possible that more than one head will have the same branch tag.
1139 # It is possible that more than one head will have the same branch tag.
1097 # callers need to check the result for multiple heads under the same
1140 # callers need to check the result for multiple heads under the same
1098 # branch tag if that is a problem for them (ie checkout of a specific
1141 # branch tag if that is a problem for them (ie checkout of a specific
1099 # branch).
1142 # branch).
1100 #
1143 #
1101 # passing in a specific branch will limit the depth of the search
1144 # passing in a specific branch will limit the depth of the search
1102 # through the parents. It won't limit the branches returned in the
1145 # through the parents. It won't limit the branches returned in the
1103 # result though.
1146 # result though.
1104 def branchlookup(self, heads=None, branch=None):
1147 def branchlookup(self, heads=None, branch=None):
1105 if not heads:
1148 if not heads:
1106 heads = self.heads()
1149 heads = self.heads()
1107 headt = [ h for h in heads ]
1150 headt = [ h for h in heads ]
1108 chlog = self.changelog
1151 chlog = self.changelog
1109 branches = {}
1152 branches = {}
1110 merges = []
1153 merges = []
1111 seenmerge = {}
1154 seenmerge = {}
1112
1155
1113 # traverse the tree once for each head, recording in the branches
1156 # traverse the tree once for each head, recording in the branches
1114 # dict which tags are visible from this head. The branches
1157 # dict which tags are visible from this head. The branches
1115 # dict also records which tags are visible from each tag
1158 # dict also records which tags are visible from each tag
1116 # while we traverse.
1159 # while we traverse.
1117 while headt or merges:
1160 while headt or merges:
1118 if merges:
1161 if merges:
1119 n, found = merges.pop()
1162 n, found = merges.pop()
1120 visit = [n]
1163 visit = [n]
1121 else:
1164 else:
1122 h = headt.pop()
1165 h = headt.pop()
1123 visit = [h]
1166 visit = [h]
1124 found = [h]
1167 found = [h]
1125 seen = {}
1168 seen = {}
1126 while visit:
1169 while visit:
1127 n = visit.pop()
1170 n = visit.pop()
1128 if n in seen:
1171 if n in seen:
1129 continue
1172 continue
1130 pp = chlog.parents(n)
1173 pp = chlog.parents(n)
1131 tags = self.nodetags(n)
1174 tags = self.nodetags(n)
1132 if tags:
1175 if tags:
1133 for x in tags:
1176 for x in tags:
1134 if x == 'tip':
1177 if x == 'tip':
1135 continue
1178 continue
1136 for f in found:
1179 for f in found:
1137 branches.setdefault(f, {})[n] = 1
1180 branches.setdefault(f, {})[n] = 1
1138 branches.setdefault(n, {})[n] = 1
1181 branches.setdefault(n, {})[n] = 1
1139 break
1182 break
1140 if n not in found:
1183 if n not in found:
1141 found.append(n)
1184 found.append(n)
1142 if branch in tags:
1185 if branch in tags:
1143 continue
1186 continue
1144 seen[n] = 1
1187 seen[n] = 1
1145 if pp[1] != nullid and n not in seenmerge:
1188 if pp[1] != nullid and n not in seenmerge:
1146 merges.append((pp[1], [x for x in found]))
1189 merges.append((pp[1], [x for x in found]))
1147 seenmerge[n] = 1
1190 seenmerge[n] = 1
1148 if pp[0] != nullid:
1191 if pp[0] != nullid:
1149 visit.append(pp[0])
1192 visit.append(pp[0])
1150 # traverse the branches dict, eliminating branch tags from each
1193 # traverse the branches dict, eliminating branch tags from each
1151 # head that are visible from another branch tag for that head.
1194 # head that are visible from another branch tag for that head.
1152 out = {}
1195 out = {}
1153 viscache = {}
1196 viscache = {}
1154 for h in heads:
1197 for h in heads:
1155 def visible(node):
1198 def visible(node):
1156 if node in viscache:
1199 if node in viscache:
1157 return viscache[node]
1200 return viscache[node]
1158 ret = {}
1201 ret = {}
1159 visit = [node]
1202 visit = [node]
1160 while visit:
1203 while visit:
1161 x = visit.pop()
1204 x = visit.pop()
1162 if x in viscache:
1205 if x in viscache:
1163 ret.update(viscache[x])
1206 ret.update(viscache[x])
1164 elif x not in ret:
1207 elif x not in ret:
1165 ret[x] = 1
1208 ret[x] = 1
1166 if x in branches:
1209 if x in branches:
1167 visit[len(visit):] = branches[x].keys()
1210 visit[len(visit):] = branches[x].keys()
1168 viscache[node] = ret
1211 viscache[node] = ret
1169 return ret
1212 return ret
1170 if h not in branches:
1213 if h not in branches:
1171 continue
1214 continue
1172 # O(n^2), but somewhat limited. This only searches the
1215 # O(n^2), but somewhat limited. This only searches the
1173 # tags visible from a specific head, not all the tags in the
1216 # tags visible from a specific head, not all the tags in the
1174 # whole repo.
1217 # whole repo.
1175 for b in branches[h]:
1218 for b in branches[h]:
1176 vis = False
1219 vis = False
1177 for bb in branches[h].keys():
1220 for bb in branches[h].keys():
1178 if b != bb:
1221 if b != bb:
1179 if b in visible(bb):
1222 if b in visible(bb):
1180 vis = True
1223 vis = True
1181 break
1224 break
1182 if not vis:
1225 if not vis:
1183 l = out.setdefault(h, [])
1226 l = out.setdefault(h, [])
1184 l[len(l):] = self.nodetags(b)
1227 l[len(l):] = self.nodetags(b)
1185 return out
1228 return out
1186
1229
1187 def branches(self, nodes):
1230 def branches(self, nodes):
1188 if not nodes: nodes = [self.changelog.tip()]
1231 if not nodes: nodes = [self.changelog.tip()]
1189 b = []
1232 b = []
1190 for n in nodes:
1233 for n in nodes:
1191 t = n
1234 t = n
1192 while n:
1235 while n:
1193 p = self.changelog.parents(n)
1236 p = self.changelog.parents(n)
1194 if p[1] != nullid or p[0] == nullid:
1237 if p[1] != nullid or p[0] == nullid:
1195 b.append((t, n, p[0], p[1]))
1238 b.append((t, n, p[0], p[1]))
1196 break
1239 break
1197 n = p[0]
1240 n = p[0]
1198 return b
1241 return b
1199
1242
1200 def between(self, pairs):
1243 def between(self, pairs):
1201 r = []
1244 r = []
1202
1245
1203 for top, bottom in pairs:
1246 for top, bottom in pairs:
1204 n, l, i = top, [], 0
1247 n, l, i = top, [], 0
1205 f = 1
1248 f = 1
1206
1249
1207 while n != bottom:
1250 while n != bottom:
1208 p = self.changelog.parents(n)[0]
1251 p = self.changelog.parents(n)[0]
1209 if i == f:
1252 if i == f:
1210 l.append(n)
1253 l.append(n)
1211 f = f * 2
1254 f = f * 2
1212 n = p
1255 n = p
1213 i += 1
1256 i += 1
1214
1257
1215 r.append(l)
1258 r.append(l)
1216
1259
1217 return r
1260 return r
1218
1261
1219 def newer(self, nodes):
1262 def newer(self, nodes):
1220 m = {}
1263 m = {}
1221 nl = []
1264 nl = []
1222 pm = {}
1265 pm = {}
1223 cl = self.changelog
1266 cl = self.changelog
1224 t = l = cl.count()
1267 t = l = cl.count()
1225
1268
1226 # find the lowest numbered node
1269 # find the lowest numbered node
1227 for n in nodes:
1270 for n in nodes:
1228 l = min(l, cl.rev(n))
1271 l = min(l, cl.rev(n))
1229 m[n] = 1
1272 m[n] = 1
1230
1273
1231 for i in xrange(l, t):
1274 for i in xrange(l, t):
1232 n = cl.node(i)
1275 n = cl.node(i)
1233 if n in m: # explicitly listed
1276 if n in m: # explicitly listed
1234 pm[n] = 1
1277 pm[n] = 1
1235 nl.append(n)
1278 nl.append(n)
1236 continue
1279 continue
1237 for p in cl.parents(n):
1280 for p in cl.parents(n):
1238 if p in pm: # parent listed
1281 if p in pm: # parent listed
1239 pm[n] = 1
1282 pm[n] = 1
1240 nl.append(n)
1283 nl.append(n)
1241 break
1284 break
1242
1285
1243 return nl
1286 return nl
1244
1287
1245 def findincoming(self, remote, base=None, heads=None):
1288 def findincoming(self, remote, base=None, heads=None):
1246 m = self.changelog.nodemap
1289 m = self.changelog.nodemap
1247 search = []
1290 search = []
1248 fetch = []
1291 fetch = []
1249 seen = {}
1292 seen = {}
1250 seenbranch = {}
1293 seenbranch = {}
1251 if base == None:
1294 if base == None:
1252 base = {}
1295 base = {}
1253
1296
1254 # assume we're closer to the tip than the root
1297 # assume we're closer to the tip than the root
1255 # and start by examining the heads
1298 # and start by examining the heads
1256 self.ui.status("searching for changes\n")
1299 self.ui.status("searching for changes\n")
1257
1300
1258 if not heads:
1301 if not heads:
1259 heads = remote.heads()
1302 heads = remote.heads()
1260
1303
1261 unknown = []
1304 unknown = []
1262 for h in heads:
1305 for h in heads:
1263 if h not in m:
1306 if h not in m:
1264 unknown.append(h)
1307 unknown.append(h)
1265 else:
1308 else:
1266 base[h] = 1
1309 base[h] = 1
1267
1310
1268 if not unknown:
1311 if not unknown:
1269 return None
1312 return None
1270
1313
1271 rep = {}
1314 rep = {}
1272 reqcnt = 0
1315 reqcnt = 0
1273
1316
1274 # search through remote branches
1317 # search through remote branches
1275 # a 'branch' here is a linear segment of history, with four parts:
1318 # a 'branch' here is a linear segment of history, with four parts:
1276 # head, root, first parent, second parent
1319 # head, root, first parent, second parent
1277 # (a branch always has two parents (or none) by definition)
1320 # (a branch always has two parents (or none) by definition)
1278 unknown = remote.branches(unknown)
1321 unknown = remote.branches(unknown)
1279 while unknown:
1322 while unknown:
1280 r = []
1323 r = []
1281 while unknown:
1324 while unknown:
1282 n = unknown.pop(0)
1325 n = unknown.pop(0)
1283 if n[0] in seen:
1326 if n[0] in seen:
1284 continue
1327 continue
1285
1328
1286 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1329 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1287 if n[0] == nullid:
1330 if n[0] == nullid:
1288 break
1331 break
1289 if n in seenbranch:
1332 if n in seenbranch:
1290 self.ui.debug("branch already found\n")
1333 self.ui.debug("branch already found\n")
1291 continue
1334 continue
1292 if n[1] and n[1] in m: # do we know the base?
1335 if n[1] and n[1] in m: # do we know the base?
1293 self.ui.debug("found incomplete branch %s:%s\n"
1336 self.ui.debug("found incomplete branch %s:%s\n"
1294 % (short(n[0]), short(n[1])))
1337 % (short(n[0]), short(n[1])))
1295 search.append(n) # schedule branch range for scanning
1338 search.append(n) # schedule branch range for scanning
1296 seenbranch[n] = 1
1339 seenbranch[n] = 1
1297 else:
1340 else:
1298 if n[1] not in seen and n[1] not in fetch:
1341 if n[1] not in seen and n[1] not in fetch:
1299 if n[2] in m and n[3] in m:
1342 if n[2] in m and n[3] in m:
1300 self.ui.debug("found new changeset %s\n" %
1343 self.ui.debug("found new changeset %s\n" %
1301 short(n[1]))
1344 short(n[1]))
1302 fetch.append(n[1]) # earliest unknown
1345 fetch.append(n[1]) # earliest unknown
1303 base[n[2]] = 1 # latest known
1346 base[n[2]] = 1 # latest known
1304 continue
1347 continue
1305
1348
1306 for a in n[2:4]:
1349 for a in n[2:4]:
1307 if a not in rep:
1350 if a not in rep:
1308 r.append(a)
1351 r.append(a)
1309 rep[a] = 1
1352 rep[a] = 1
1310
1353
1311 seen[n[0]] = 1
1354 seen[n[0]] = 1
1312
1355
1313 if r:
1356 if r:
1314 reqcnt += 1
1357 reqcnt += 1
1315 self.ui.debug("request %d: %s\n" %
1358 self.ui.debug("request %d: %s\n" %
1316 (reqcnt, " ".join(map(short, r))))
1359 (reqcnt, " ".join(map(short, r))))
1317 for p in range(0, len(r), 10):
1360 for p in range(0, len(r), 10):
1318 for b in remote.branches(r[p:p+10]):
1361 for b in remote.branches(r[p:p+10]):
1319 self.ui.debug("received %s:%s\n" %
1362 self.ui.debug("received %s:%s\n" %
1320 (short(b[0]), short(b[1])))
1363 (short(b[0]), short(b[1])))
1321 if b[0] not in m and b[0] not in seen:
1364 if b[0] not in m and b[0] not in seen:
1322 unknown.append(b)
1365 unknown.append(b)
1323
1366
1324 # do binary search on the branches we found
1367 # do binary search on the branches we found
1325 while search:
1368 while search:
1326 n = search.pop(0)
1369 n = search.pop(0)
1327 reqcnt += 1
1370 reqcnt += 1
1328 l = remote.between([(n[0], n[1])])[0]
1371 l = remote.between([(n[0], n[1])])[0]
1329 l.append(n[1])
1372 l.append(n[1])
1330 p = n[0]
1373 p = n[0]
1331 f = 1
1374 f = 1
1332 for i in l:
1375 for i in l:
1333 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1376 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1334 if i in m:
1377 if i in m:
1335 if f <= 2:
1378 if f <= 2:
1336 self.ui.debug("found new branch changeset %s\n" %
1379 self.ui.debug("found new branch changeset %s\n" %
1337 short(p))
1380 short(p))
1338 fetch.append(p)
1381 fetch.append(p)
1339 base[i] = 1
1382 base[i] = 1
1340 else:
1383 else:
1341 self.ui.debug("narrowed branch search to %s:%s\n"
1384 self.ui.debug("narrowed branch search to %s:%s\n"
1342 % (short(p), short(i)))
1385 % (short(p), short(i)))
1343 search.append((p, i))
1386 search.append((p, i))
1344 break
1387 break
1345 p, f = i, f * 2
1388 p, f = i, f * 2
1346
1389
1347 # sanity check our fetch list
1390 # sanity check our fetch list
1348 for f in fetch:
1391 for f in fetch:
1349 if f in m:
1392 if f in m:
1350 raise RepoError("already have changeset " + short(f[:4]))
1393 raise RepoError("already have changeset " + short(f[:4]))
1351
1394
1352 if base.keys() == [nullid]:
1395 if base.keys() == [nullid]:
1353 self.ui.warn("warning: pulling from an unrelated repository!\n")
1396 self.ui.warn("warning: pulling from an unrelated repository!\n")
1354
1397
1355 self.ui.note("adding new changesets starting at " +
1398 self.ui.note("adding new changesets starting at " +
1356 " ".join([short(f) for f in fetch]) + "\n")
1399 " ".join([short(f) for f in fetch]) + "\n")
1357
1400
1358 self.ui.debug("%d total queries\n" % reqcnt)
1401 self.ui.debug("%d total queries\n" % reqcnt)
1359
1402
1360 return fetch
1403 return fetch
1361
1404
1362 def findoutgoing(self, remote, base=None, heads=None):
1405 def findoutgoing(self, remote, base=None, heads=None):
1363 if base == None:
1406 if base == None:
1364 base = {}
1407 base = {}
1365 self.findincoming(remote, base, heads)
1408 self.findincoming(remote, base, heads)
1366
1409
1367 remain = dict.fromkeys(self.changelog.nodemap)
1410 remain = dict.fromkeys(self.changelog.nodemap)
1368
1411
1369 # prune everything remote has from the tree
1412 # prune everything remote has from the tree
1370 del remain[nullid]
1413 del remain[nullid]
1371 remove = base.keys()
1414 remove = base.keys()
1372 while remove:
1415 while remove:
1373 n = remove.pop(0)
1416 n = remove.pop(0)
1374 if n in remain:
1417 if n in remain:
1375 del remain[n]
1418 del remain[n]
1376 for p in self.changelog.parents(n):
1419 for p in self.changelog.parents(n):
1377 remove.append(p)
1420 remove.append(p)
1378
1421
1379 # find every node whose parents have been pruned
1422 # find every node whose parents have been pruned
1380 subset = []
1423 subset = []
1381 for n in remain:
1424 for n in remain:
1382 p1, p2 = self.changelog.parents(n)
1425 p1, p2 = self.changelog.parents(n)
1383 if p1 not in remain and p2 not in remain:
1426 if p1 not in remain and p2 not in remain:
1384 subset.append(n)
1427 subset.append(n)
1385
1428
1386 # this is the set of all roots we have to push
1429 # this is the set of all roots we have to push
1387 return subset
1430 return subset
1388
1431
1389 def pull(self, remote):
1432 def pull(self, remote):
1390 lock = self.lock()
1433 lock = self.lock()
1391
1434
1392 # if we have an empty repo, fetch everything
1435 # if we have an empty repo, fetch everything
1393 if self.changelog.tip() == nullid:
1436 if self.changelog.tip() == nullid:
1394 self.ui.status("requesting all changes\n")
1437 self.ui.status("requesting all changes\n")
1395 fetch = [nullid]
1438 fetch = [nullid]
1396 else:
1439 else:
1397 fetch = self.findincoming(remote)
1440 fetch = self.findincoming(remote)
1398
1441
1399 if not fetch:
1442 if not fetch:
1400 self.ui.status("no changes found\n")
1443 self.ui.status("no changes found\n")
1401 return 1
1444 return 1
1402
1445
1403 cg = remote.changegroup(fetch)
1446 cg = remote.changegroup(fetch)
1404 return self.addchangegroup(cg)
1447 return self.addchangegroup(cg)
1405
1448
1406 def push(self, remote, force=False):
1449 def push(self, remote, force=False):
1407 lock = remote.lock()
1450 lock = remote.lock()
1408
1451
1409 base = {}
1452 base = {}
1410 heads = remote.heads()
1453 heads = remote.heads()
1411 inc = self.findincoming(remote, base, heads)
1454 inc = self.findincoming(remote, base, heads)
1412 if not force and inc:
1455 if not force and inc:
1413 self.ui.warn("abort: unsynced remote changes!\n")
1456 self.ui.warn("abort: unsynced remote changes!\n")
1414 self.ui.status("(did you forget to sync? use push -f to force)\n")
1457 self.ui.status("(did you forget to sync? use push -f to force)\n")
1415 return 1
1458 return 1
1416
1459
1417 update = self.findoutgoing(remote, base)
1460 update = self.findoutgoing(remote, base)
1418 if not update:
1461 if not update:
1419 self.ui.status("no changes found\n")
1462 self.ui.status("no changes found\n")
1420 return 1
1463 return 1
1421 elif not force:
1464 elif not force:
1422 if len(heads) < len(self.changelog.heads()):
1465 if len(heads) < len(self.changelog.heads()):
1423 self.ui.warn("abort: push creates new remote branches!\n")
1466 self.ui.warn("abort: push creates new remote branches!\n")
1424 self.ui.status("(did you forget to merge?" +
1467 self.ui.status("(did you forget to merge?" +
1425 " use push -f to force)\n")
1468 " use push -f to force)\n")
1426 return 1
1469 return 1
1427
1470
1428 cg = self.changegroup(update)
1471 cg = self.changegroup(update)
1429 return remote.addchangegroup(cg)
1472 return remote.addchangegroup(cg)
1430
1473
1431 def changegroup(self, basenodes):
1474 def changegroup(self, basenodes):
1432 class genread:
1475 class genread:
1433 def __init__(self, generator):
1476 def __init__(self, generator):
1434 self.g = generator
1477 self.g = generator
1435 self.buf = ""
1478 self.buf = ""
1436 def fillbuf(self):
1479 def fillbuf(self):
1437 self.buf += "".join(self.g)
1480 self.buf += "".join(self.g)
1438
1481
1439 def read(self, l):
1482 def read(self, l):
1440 while l > len(self.buf):
1483 while l > len(self.buf):
1441 try:
1484 try:
1442 self.buf += self.g.next()
1485 self.buf += self.g.next()
1443 except StopIteration:
1486 except StopIteration:
1444 break
1487 break
1445 d, self.buf = self.buf[:l], self.buf[l:]
1488 d, self.buf = self.buf[:l], self.buf[l:]
1446 return d
1489 return d
1447
1490
1448 def gengroup():
1491 def gengroup():
1449 nodes = self.newer(basenodes)
1492 nodes = self.newer(basenodes)
1450
1493
1451 # construct the link map
1494 # construct the link map
1452 linkmap = {}
1495 linkmap = {}
1453 for n in nodes:
1496 for n in nodes:
1454 linkmap[self.changelog.rev(n)] = n
1497 linkmap[self.changelog.rev(n)] = n
1455
1498
1456 # construct a list of all changed files
1499 # construct a list of all changed files
1457 changed = {}
1500 changed = {}
1458 for n in nodes:
1501 for n in nodes:
1459 c = self.changelog.read(n)
1502 c = self.changelog.read(n)
1460 for f in c[3]:
1503 for f in c[3]:
1461 changed[f] = 1
1504 changed[f] = 1
1462 changed = changed.keys()
1505 changed = changed.keys()
1463 changed.sort()
1506 changed.sort()
1464
1507
1465 # the changegroup is changesets + manifests + all file revs
1508 # the changegroup is changesets + manifests + all file revs
1466 revs = [ self.changelog.rev(n) for n in nodes ]
1509 revs = [ self.changelog.rev(n) for n in nodes ]
1467
1510
1468 for y in self.changelog.group(linkmap): yield y
1511 for y in self.changelog.group(linkmap): yield y
1469 for y in self.manifest.group(linkmap): yield y
1512 for y in self.manifest.group(linkmap): yield y
1470 for f in changed:
1513 for f in changed:
1471 yield struct.pack(">l", len(f) + 4) + f
1514 yield struct.pack(">l", len(f) + 4) + f
1472 g = self.file(f).group(linkmap)
1515 g = self.file(f).group(linkmap)
1473 for y in g:
1516 for y in g:
1474 yield y
1517 yield y
1475
1518
1476 yield struct.pack(">l", 0)
1519 yield struct.pack(">l", 0)
1477
1520
1478 return genread(gengroup())
1521 return genread(gengroup())
1479
1522
1480 def addchangegroup(self, source):
1523 def addchangegroup(self, source):
1481
1524
1482 def getchunk():
1525 def getchunk():
1483 d = source.read(4)
1526 d = source.read(4)
1484 if not d: return ""
1527 if not d: return ""
1485 l = struct.unpack(">l", d)[0]
1528 l = struct.unpack(">l", d)[0]
1486 if l <= 4: return ""
1529 if l <= 4: return ""
1487 return source.read(l - 4)
1530 return source.read(l - 4)
1488
1531
1489 def getgroup():
1532 def getgroup():
1490 while 1:
1533 while 1:
1491 c = getchunk()
1534 c = getchunk()
1492 if not c: break
1535 if not c: break
1493 yield c
1536 yield c
1494
1537
1495 def csmap(x):
1538 def csmap(x):
1496 self.ui.debug("add changeset %s\n" % short(x))
1539 self.ui.debug("add changeset %s\n" % short(x))
1497 return self.changelog.count()
1540 return self.changelog.count()
1498
1541
1499 def revmap(x):
1542 def revmap(x):
1500 return self.changelog.rev(x)
1543 return self.changelog.rev(x)
1501
1544
1502 if not source: return
1545 if not source: return
1503 changesets = files = revisions = 0
1546 changesets = files = revisions = 0
1504
1547
1505 tr = self.transaction()
1548 tr = self.transaction()
1506
1549
1507 # pull off the changeset group
1550 # pull off the changeset group
1508 self.ui.status("adding changesets\n")
1551 self.ui.status("adding changesets\n")
1509 co = self.changelog.tip()
1552 co = self.changelog.tip()
1510 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1553 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1511 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1554 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1512
1555
1513 # pull off the manifest group
1556 # pull off the manifest group
1514 self.ui.status("adding manifests\n")
1557 self.ui.status("adding manifests\n")
1515 mm = self.manifest.tip()
1558 mm = self.manifest.tip()
1516 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1559 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1517
1560
1518 # process the files
1561 # process the files
1519 self.ui.status("adding file changes\n")
1562 self.ui.status("adding file changes\n")
1520 while 1:
1563 while 1:
1521 f = getchunk()
1564 f = getchunk()
1522 if not f: break
1565 if not f: break
1523 self.ui.debug("adding %s revisions\n" % f)
1566 self.ui.debug("adding %s revisions\n" % f)
1524 fl = self.file(f)
1567 fl = self.file(f)
1525 o = fl.count()
1568 o = fl.count()
1526 n = fl.addgroup(getgroup(), revmap, tr)
1569 n = fl.addgroup(getgroup(), revmap, tr)
1527 revisions += fl.count() - o
1570 revisions += fl.count() - o
1528 files += 1
1571 files += 1
1529
1572
1530 self.ui.status(("added %d changesets" +
1573 self.ui.status(("added %d changesets" +
1531 " with %d changes to %d files\n")
1574 " with %d changes to %d files\n")
1532 % (changesets, revisions, files))
1575 % (changesets, revisions, files))
1533
1576
1534 tr.close()
1577 tr.close()
1535
1578
1536 if not self.hook("changegroup"):
1579 if not self.hook("changegroup"):
1537 return 1
1580 return 1
1538
1581
1539 return
1582 return
1540
1583
1541 def update(self, node, allow=False, force=False, choose=None,
1584 def update(self, node, allow=False, force=False, choose=None,
1542 moddirstate=True):
1585 moddirstate=True):
1543 pl = self.dirstate.parents()
1586 pl = self.dirstate.parents()
1544 if not force and pl[1] != nullid:
1587 if not force and pl[1] != nullid:
1545 self.ui.warn("aborting: outstanding uncommitted merges\n")
1588 self.ui.warn("aborting: outstanding uncommitted merges\n")
1546 return 1
1589 return 1
1547
1590
1548 p1, p2 = pl[0], node
1591 p1, p2 = pl[0], node
1549 pa = self.changelog.ancestor(p1, p2)
1592 pa = self.changelog.ancestor(p1, p2)
1550 m1n = self.changelog.read(p1)[0]
1593 m1n = self.changelog.read(p1)[0]
1551 m2n = self.changelog.read(p2)[0]
1594 m2n = self.changelog.read(p2)[0]
1552 man = self.manifest.ancestor(m1n, m2n)
1595 man = self.manifest.ancestor(m1n, m2n)
1553 m1 = self.manifest.read(m1n)
1596 m1 = self.manifest.read(m1n)
1554 mf1 = self.manifest.readflags(m1n)
1597 mf1 = self.manifest.readflags(m1n)
1555 m2 = self.manifest.read(m2n)
1598 m2 = self.manifest.read(m2n)
1556 mf2 = self.manifest.readflags(m2n)
1599 mf2 = self.manifest.readflags(m2n)
1557 ma = self.manifest.read(man)
1600 ma = self.manifest.read(man)
1558 mfa = self.manifest.readflags(man)
1601 mfa = self.manifest.readflags(man)
1559
1602
1560 (c, a, d, u) = self.changes()
1603 (c, a, d, u) = self.changes()
1561
1604
1562 # is this a jump, or a merge? i.e. is there a linear path
1605 # is this a jump, or a merge? i.e. is there a linear path
1563 # from p1 to p2?
1606 # from p1 to p2?
1564 linear_path = (pa == p1 or pa == p2)
1607 linear_path = (pa == p1 or pa == p2)
1565
1608
1566 # resolve the manifest to determine which files
1609 # resolve the manifest to determine which files
1567 # we care about merging
1610 # we care about merging
1568 self.ui.note("resolving manifests\n")
1611 self.ui.note("resolving manifests\n")
1569 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1612 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1570 (force, allow, moddirstate, linear_path))
1613 (force, allow, moddirstate, linear_path))
1571 self.ui.debug(" ancestor %s local %s remote %s\n" %
1614 self.ui.debug(" ancestor %s local %s remote %s\n" %
1572 (short(man), short(m1n), short(m2n)))
1615 (short(man), short(m1n), short(m2n)))
1573
1616
1574 merge = {}
1617 merge = {}
1575 get = {}
1618 get = {}
1576 remove = []
1619 remove = []
1577 mark = {}
1620 mark = {}
1578
1621
1579 # construct a working dir manifest
1622 # construct a working dir manifest
1580 mw = m1.copy()
1623 mw = m1.copy()
1581 mfw = mf1.copy()
1624 mfw = mf1.copy()
1582 umap = dict.fromkeys(u)
1625 umap = dict.fromkeys(u)
1583
1626
1584 for f in a + c + u:
1627 for f in a + c + u:
1585 mw[f] = ""
1628 mw[f] = ""
1586 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1629 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1587
1630
1588 for f in d:
1631 for f in d:
1589 if f in mw: del mw[f]
1632 if f in mw: del mw[f]
1590
1633
1591 # If we're jumping between revisions (as opposed to merging),
1634 # If we're jumping between revisions (as opposed to merging),
1592 # and if neither the working directory nor the target rev has
1635 # and if neither the working directory nor the target rev has
1593 # the file, then we need to remove it from the dirstate, to
1636 # the file, then we need to remove it from the dirstate, to
1594 # prevent the dirstate from listing the file when it is no
1637 # prevent the dirstate from listing the file when it is no
1595 # longer in the manifest.
1638 # longer in the manifest.
1596 if moddirstate and linear_path and f not in m2:
1639 if moddirstate and linear_path and f not in m2:
1597 self.dirstate.forget((f,))
1640 self.dirstate.forget((f,))
1598
1641
1599 # Compare manifests
1642 # Compare manifests
1600 for f, n in mw.iteritems():
1643 for f, n in mw.iteritems():
1601 if choose and not choose(f): continue
1644 if choose and not choose(f): continue
1602 if f in m2:
1645 if f in m2:
1603 s = 0
1646 s = 0
1604
1647
1605 # is the wfile new since m1, and match m2?
1648 # is the wfile new since m1, and match m2?
1606 if f not in m1:
1649 if f not in m1:
1607 t1 = self.wfile(f).read()
1650 t1 = self.wfile(f).read()
1608 t2 = self.file(f).revision(m2[f])
1651 t2 = self.file(f).revision(m2[f])
1609 if cmp(t1, t2) == 0:
1652 if cmp(t1, t2) == 0:
1610 mark[f] = 1
1653 mark[f] = 1
1611 n = m2[f]
1654 n = m2[f]
1612 del t1, t2
1655 del t1, t2
1613
1656
1614 # are files different?
1657 # are files different?
1615 if n != m2[f]:
1658 if n != m2[f]:
1616 a = ma.get(f, nullid)
1659 a = ma.get(f, nullid)
1617 # are both different from the ancestor?
1660 # are both different from the ancestor?
1618 if n != a and m2[f] != a:
1661 if n != a and m2[f] != a:
1619 self.ui.debug(" %s versions differ, resolve\n" % f)
1662 self.ui.debug(" %s versions differ, resolve\n" % f)
1620 # merge executable bits
1663 # merge executable bits
1621 # "if we changed or they changed, change in merge"
1664 # "if we changed or they changed, change in merge"
1622 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1623 mode = ((a^b) | (a^c)) ^ a
1666 mode = ((a^b) | (a^c)) ^ a
1624 merge[f] = (m1.get(f, nullid), m2[f], mode)
1667 merge[f] = (m1.get(f, nullid), m2[f], mode)
1625 s = 1
1668 s = 1
1626 # are we clobbering?
1669 # are we clobbering?
1627 # is remote's version newer?
1670 # is remote's version newer?
1628 # or are we going back in time?
1671 # or are we going back in time?
1629 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1672 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1630 self.ui.debug(" remote %s is newer, get\n" % f)
1673 self.ui.debug(" remote %s is newer, get\n" % f)
1631 get[f] = m2[f]
1674 get[f] = m2[f]
1632 s = 1
1675 s = 1
1633 else:
1676 else:
1634 mark[f] = 1
1677 mark[f] = 1
1635 elif f in umap:
1678 elif f in umap:
1636 # this unknown file is the same as the checkout
1679 # this unknown file is the same as the checkout
1637 get[f] = m2[f]
1680 get[f] = m2[f]
1638
1681
1639 if not s and mfw[f] != mf2[f]:
1682 if not s and mfw[f] != mf2[f]:
1640 if force:
1683 if force:
1641 self.ui.debug(" updating permissions for %s\n" % f)
1684 self.ui.debug(" updating permissions for %s\n" % f)
1642 util.set_exec(self.wjoin(f), mf2[f])
1685 util.set_exec(self.wjoin(f), mf2[f])
1643 else:
1686 else:
1644 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1687 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1645 mode = ((a^b) | (a^c)) ^ a
1688 mode = ((a^b) | (a^c)) ^ a
1646 if mode != b:
1689 if mode != b:
1647 self.ui.debug(" updating permissions for %s\n" % f)
1690 self.ui.debug(" updating permissions for %s\n" % f)
1648 util.set_exec(self.wjoin(f), mode)
1691 util.set_exec(self.wjoin(f), mode)
1649 mark[f] = 1
1692 mark[f] = 1
1650 del m2[f]
1693 del m2[f]
1651 elif f in ma:
1694 elif f in ma:
1652 if n != ma[f]:
1695 if n != ma[f]:
1653 r = "d"
1696 r = "d"
1654 if not force and (linear_path or allow):
1697 if not force and (linear_path or allow):
1655 r = self.ui.prompt(
1698 r = self.ui.prompt(
1656 (" local changed %s which remote deleted\n" % f) +
1699 (" local changed %s which remote deleted\n" % f) +
1657 "(k)eep or (d)elete?", "[kd]", "k")
1700 "(k)eep or (d)elete?", "[kd]", "k")
1658 if r == "d":
1701 if r == "d":
1659 remove.append(f)
1702 remove.append(f)
1660 else:
1703 else:
1661 self.ui.debug("other deleted %s\n" % f)
1704 self.ui.debug("other deleted %s\n" % f)
1662 remove.append(f) # other deleted it
1705 remove.append(f) # other deleted it
1663 else:
1706 else:
1664 if n == m1.get(f, nullid): # same as parent
1707 if n == m1.get(f, nullid): # same as parent
1665 if p2 == pa: # going backwards?
1708 if p2 == pa: # going backwards?
1666 self.ui.debug("remote deleted %s\n" % f)
1709 self.ui.debug("remote deleted %s\n" % f)
1667 remove.append(f)
1710 remove.append(f)
1668 else:
1711 else:
1669 self.ui.debug("local created %s, keeping\n" % f)
1712 self.ui.debug("local created %s, keeping\n" % f)
1670 else:
1713 else:
1671 self.ui.debug("working dir created %s, keeping\n" % f)
1714 self.ui.debug("working dir created %s, keeping\n" % f)
1672
1715
1673 for f, n in m2.iteritems():
1716 for f, n in m2.iteritems():
1674 if choose and not choose(f): continue
1717 if choose and not choose(f): continue
1675 if f[0] == "/": continue
1718 if f[0] == "/": continue
1676 if f in ma and n != ma[f]:
1719 if f in ma and n != ma[f]:
1677 r = "k"
1720 r = "k"
1678 if not force and (linear_path or allow):
1721 if not force and (linear_path or allow):
1679 r = self.ui.prompt(
1722 r = self.ui.prompt(
1680 ("remote changed %s which local deleted\n" % f) +
1723 ("remote changed %s which local deleted\n" % f) +
1681 "(k)eep or (d)elete?", "[kd]", "k")
1724 "(k)eep or (d)elete?", "[kd]", "k")
1682 if r == "k": get[f] = n
1725 if r == "k": get[f] = n
1683 elif f not in ma:
1726 elif f not in ma:
1684 self.ui.debug("remote created %s\n" % f)
1727 self.ui.debug("remote created %s\n" % f)
1685 get[f] = n
1728 get[f] = n
1686 else:
1729 else:
1687 if force or p2 == pa: # going backwards?
1730 if force or p2 == pa: # going backwards?
1688 self.ui.debug("local deleted %s, recreating\n" % f)
1731 self.ui.debug("local deleted %s, recreating\n" % f)
1689 get[f] = n
1732 get[f] = n
1690 else:
1733 else:
1691 self.ui.debug("local deleted %s\n" % f)
1734 self.ui.debug("local deleted %s\n" % f)
1692
1735
1693 del mw, m1, m2, ma
1736 del mw, m1, m2, ma
1694
1737
1695 if force:
1738 if force:
1696 for f in merge:
1739 for f in merge:
1697 get[f] = merge[f][1]
1740 get[f] = merge[f][1]
1698 merge = {}
1741 merge = {}
1699
1742
1700 if linear_path or force:
1743 if linear_path or force:
1701 # we don't need to do any magic, just jump to the new rev
1744 # we don't need to do any magic, just jump to the new rev
1702 mode = 'n'
1745 mode = 'n'
1703 p1, p2 = p2, nullid
1746 p1, p2 = p2, nullid
1704 else:
1747 else:
1705 if not allow:
1748 if not allow:
1706 self.ui.status("this update spans a branch" +
1749 self.ui.status("this update spans a branch" +
1707 " affecting the following files:\n")
1750 " affecting the following files:\n")
1708 fl = merge.keys() + get.keys()
1751 fl = merge.keys() + get.keys()
1709 fl.sort()
1752 fl.sort()
1710 for f in fl:
1753 for f in fl:
1711 cf = ""
1754 cf = ""
1712 if f in merge: cf = " (resolve)"
1755 if f in merge: cf = " (resolve)"
1713 self.ui.status(" %s%s\n" % (f, cf))
1756 self.ui.status(" %s%s\n" % (f, cf))
1714 self.ui.warn("aborting update spanning branches!\n")
1757 self.ui.warn("aborting update spanning branches!\n")
1715 self.ui.status("(use update -m to merge across branches" +
1758 self.ui.status("(use update -m to merge across branches" +
1716 " or -C to lose changes)\n")
1759 " or -C to lose changes)\n")
1717 return 1
1760 return 1
1718 # we have to remember what files we needed to get/change
1719 # because any file that's different from either one of its
1720 # parents must be in the changeset
1721 mode = 'm'
1761 mode = 'm'
1722 if moddirstate:
1723 self.dirstate.update(mark.keys(), "m")
1724
1762
1725 if moddirstate:
1763 if moddirstate:
1726 self.dirstate.setparents(p1, p2)
1764 self.dirstate.setparents(p1, p2)
1727
1765
1728 # get the files we don't need to change
1766 # get the files we don't need to change
1729 files = get.keys()
1767 files = get.keys()
1730 files.sort()
1768 files.sort()
1731 for f in files:
1769 for f in files:
1732 if f[0] == "/": continue
1770 if f[0] == "/": continue
1733 self.ui.note("getting %s\n" % f)
1771 self.ui.note("getting %s\n" % f)
1734 t = self.file(f).read(get[f])
1772 t = self.file(f).read(get[f])
1735 try:
1773 try:
1736 self.wfile(f, "w").write(t)
1774 self.wfile(f, "w").write(t)
1737 except IOError:
1775 except IOError:
1738 os.makedirs(os.path.dirname(self.wjoin(f)))
1776 os.makedirs(os.path.dirname(self.wjoin(f)))
1739 self.wfile(f, "w").write(t)
1777 self.wfile(f, "w").write(t)
1740 util.set_exec(self.wjoin(f), mf2[f])
1778 util.set_exec(self.wjoin(f), mf2[f])
1741 if moddirstate:
1779 if moddirstate:
1742 self.dirstate.update([f], mode)
1780 self.dirstate.update([f], 'n')
1743
1781
1744 # merge the tricky bits
1782 # merge the tricky bits
1745 files = merge.keys()
1783 files = merge.keys()
1746 files.sort()
1784 files.sort()
1747 for f in files:
1785 for f in files:
1748 self.ui.status("merging %s\n" % f)
1786 self.ui.status("merging %s\n" % f)
1749 m, o, flag = merge[f]
1787 m, o, flag = merge[f]
1750 self.merge3(f, m, o)
1788 self.merge3(f, m, o)
1751 util.set_exec(self.wjoin(f), flag)
1789 util.set_exec(self.wjoin(f), flag)
1752 if moddirstate:
1790 if moddirstate:
1753 if mode == 'm':
1791 if mode == 'm':
1754 # only update dirstate on branch merge, otherwise we
1792 # only update dirstate on branch merge, otherwise we
1755 # could mark files with changes as unchanged
1793 # could mark files with changes as unchanged
1756 self.dirstate.update([f], mode)
1794 self.dirstate.update([f], mode)
1757 elif p2 == nullid:
1795 elif p2 == nullid:
1758 # update dirstate from parent1's manifest
1796 # update dirstate from parent1's manifest
1759 m1n = self.changelog.read(p1)[0]
1797 m1n = self.changelog.read(p1)[0]
1760 m1 = self.manifest.read(m1n)
1798 m1 = self.manifest.read(m1n)
1761 f_len = len(self.file(f).read(m1[f]))
1799 f_len = len(self.file(f).read(m1[f]))
1762 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1800 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1763 else:
1801 else:
1764 self.ui.warn("Second parent without branch merge!?\n"
1802 self.ui.warn("Second parent without branch merge!?\n"
1765 "Dirstate for file %s may be wrong.\n" % f)
1803 "Dirstate for file %s may be wrong.\n" % f)
1766
1804
1767 remove.sort()
1805 remove.sort()
1768 for f in remove:
1806 for f in remove:
1769 self.ui.note("removing %s\n" % f)
1807 self.ui.note("removing %s\n" % f)
1770 try:
1808 try:
1771 os.unlink(self.wjoin(f))
1809 os.unlink(self.wjoin(f))
1772 except OSError, inst:
1810 except OSError, inst:
1773 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1811 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1774 # try removing directories that might now be empty
1812 # try removing directories that might now be empty
1775 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1813 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1776 except: pass
1814 except: pass
1777 if moddirstate:
1815 if moddirstate:
1778 if mode == 'n':
1816 if mode == 'n':
1779 self.dirstate.forget(remove)
1817 self.dirstate.forget(remove)
1780 else:
1818 else:
1781 self.dirstate.update(remove, 'r')
1819 self.dirstate.update(remove, 'r')
1782
1820
1783 def merge3(self, fn, my, other):
1821 def merge3(self, fn, my, other):
1784 """perform a 3-way merge in the working directory"""
1822 """perform a 3-way merge in the working directory"""
1785
1823
1786 def temp(prefix, node):
1824 def temp(prefix, node):
1787 pre = "%s~%s." % (os.path.basename(fn), prefix)
1825 pre = "%s~%s." % (os.path.basename(fn), prefix)
1788 (fd, name) = tempfile.mkstemp("", pre)
1826 (fd, name) = tempfile.mkstemp("", pre)
1789 f = os.fdopen(fd, "wb")
1827 f = os.fdopen(fd, "wb")
1790 f.write(fl.revision(node))
1828 f.write(fl.revision(node))
1791 f.close()
1829 f.close()
1792 return name
1830 return name
1793
1831
1794 fl = self.file(fn)
1832 fl = self.file(fn)
1795 base = fl.ancestor(my, other)
1833 base = fl.ancestor(my, other)
1796 a = self.wjoin(fn)
1834 a = self.wjoin(fn)
1797 b = temp("base", base)
1835 b = temp("base", base)
1798 c = temp("other", other)
1836 c = temp("other", other)
1799
1837
1800 self.ui.note("resolving %s\n" % fn)
1838 self.ui.note("resolving %s\n" % fn)
1801 self.ui.debug("file %s: other %s ancestor %s\n" %
1839 self.ui.debug("file %s: other %s ancestor %s\n" %
1802 (fn, short(other), short(base)))
1840 (fn, short(other), short(base)))
1803
1841
1804 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1842 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1805 or "hgmerge")
1843 or "hgmerge")
1806 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1844 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1807 if r:
1845 if r:
1808 self.ui.warn("merging %s failed!\n" % fn)
1846 self.ui.warn("merging %s failed!\n" % fn)
1809
1847
1810 os.unlink(b)
1848 os.unlink(b)
1811 os.unlink(c)
1849 os.unlink(c)
1812
1850
1813 def verify(self):
1851 def verify(self):
1814 filelinkrevs = {}
1852 filelinkrevs = {}
1815 filenodes = {}
1853 filenodes = {}
1816 changesets = revisions = files = 0
1854 changesets = revisions = files = 0
1817 errors = 0
1855 errors = 0
1818
1856
1819 seen = {}
1857 seen = {}
1820 self.ui.status("checking changesets\n")
1858 self.ui.status("checking changesets\n")
1821 for i in range(self.changelog.count()):
1859 for i in range(self.changelog.count()):
1822 changesets += 1
1860 changesets += 1
1823 n = self.changelog.node(i)
1861 n = self.changelog.node(i)
1824 if n in seen:
1862 if n in seen:
1825 self.ui.warn("duplicate changeset at revision %d\n" % i)
1863 self.ui.warn("duplicate changeset at revision %d\n" % i)
1826 errors += 1
1864 errors += 1
1827 seen[n] = 1
1865 seen[n] = 1
1828
1866
1829 for p in self.changelog.parents(n):
1867 for p in self.changelog.parents(n):
1830 if p not in self.changelog.nodemap:
1868 if p not in self.changelog.nodemap:
1831 self.ui.warn("changeset %s has unknown parent %s\n" %
1869 self.ui.warn("changeset %s has unknown parent %s\n" %
1832 (short(n), short(p)))
1870 (short(n), short(p)))
1833 errors += 1
1871 errors += 1
1834 try:
1872 try:
1835 changes = self.changelog.read(n)
1873 changes = self.changelog.read(n)
1836 except Exception, inst:
1874 except Exception, inst:
1837 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1875 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1838 errors += 1
1876 errors += 1
1839
1877
1840 for f in changes[3]:
1878 for f in changes[3]:
1841 filelinkrevs.setdefault(f, []).append(i)
1879 filelinkrevs.setdefault(f, []).append(i)
1842
1880
1843 seen = {}
1881 seen = {}
1844 self.ui.status("checking manifests\n")
1882 self.ui.status("checking manifests\n")
1845 for i in range(self.manifest.count()):
1883 for i in range(self.manifest.count()):
1846 n = self.manifest.node(i)
1884 n = self.manifest.node(i)
1847 if n in seen:
1885 if n in seen:
1848 self.ui.warn("duplicate manifest at revision %d\n" % i)
1886 self.ui.warn("duplicate manifest at revision %d\n" % i)
1849 errors += 1
1887 errors += 1
1850 seen[n] = 1
1888 seen[n] = 1
1851
1889
1852 for p in self.manifest.parents(n):
1890 for p in self.manifest.parents(n):
1853 if p not in self.manifest.nodemap:
1891 if p not in self.manifest.nodemap:
1854 self.ui.warn("manifest %s has unknown parent %s\n" %
1892 self.ui.warn("manifest %s has unknown parent %s\n" %
1855 (short(n), short(p)))
1893 (short(n), short(p)))
1856 errors += 1
1894 errors += 1
1857
1895
1858 try:
1896 try:
1859 delta = mdiff.patchtext(self.manifest.delta(n))
1897 delta = mdiff.patchtext(self.manifest.delta(n))
1860 except KeyboardInterrupt:
1898 except KeyboardInterrupt:
1861 self.ui.warn("aborted")
1899 self.ui.warn("aborted")
1862 sys.exit(0)
1900 sys.exit(0)
1863 except Exception, inst:
1901 except Exception, inst:
1864 self.ui.warn("unpacking manifest %s: %s\n"
1902 self.ui.warn("unpacking manifest %s: %s\n"
1865 % (short(n), inst))
1903 % (short(n), inst))
1866 errors += 1
1904 errors += 1
1867
1905
1868 ff = [ l.split('\0') for l in delta.splitlines() ]
1906 ff = [ l.split('\0') for l in delta.splitlines() ]
1869 for f, fn in ff:
1907 for f, fn in ff:
1870 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1908 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1871
1909
1872 self.ui.status("crosschecking files in changesets and manifests\n")
1910 self.ui.status("crosschecking files in changesets and manifests\n")
1873 for f in filenodes:
1911 for f in filenodes:
1874 if f not in filelinkrevs:
1912 if f not in filelinkrevs:
1875 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1913 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1876 errors += 1
1914 errors += 1
1877
1915
1878 for f in filelinkrevs:
1916 for f in filelinkrevs:
1879 if f not in filenodes:
1917 if f not in filenodes:
1880 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1918 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1881 errors += 1
1919 errors += 1
1882
1920
1883 self.ui.status("checking files\n")
1921 self.ui.status("checking files\n")
1884 ff = filenodes.keys()
1922 ff = filenodes.keys()
1885 ff.sort()
1923 ff.sort()
1886 for f in ff:
1924 for f in ff:
1887 if f == "/dev/null": continue
1925 if f == "/dev/null": continue
1888 files += 1
1926 files += 1
1889 fl = self.file(f)
1927 fl = self.file(f)
1890 nodes = { nullid: 1 }
1928 nodes = { nullid: 1 }
1891 seen = {}
1929 seen = {}
1892 for i in range(fl.count()):
1930 for i in range(fl.count()):
1893 revisions += 1
1931 revisions += 1
1894 n = fl.node(i)
1932 n = fl.node(i)
1895
1933
1896 if n in seen:
1934 if n in seen:
1897 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1935 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1898 errors += 1
1936 errors += 1
1899
1937
1900 if n not in filenodes[f]:
1938 if n not in filenodes[f]:
1901 self.ui.warn("%s: %d:%s not in manifests\n"
1939 self.ui.warn("%s: %d:%s not in manifests\n"
1902 % (f, i, short(n)))
1940 % (f, i, short(n)))
1903 errors += 1
1941 errors += 1
1904 else:
1942 else:
1905 del filenodes[f][n]
1943 del filenodes[f][n]
1906
1944
1907 flr = fl.linkrev(n)
1945 flr = fl.linkrev(n)
1908 if flr not in filelinkrevs[f]:
1946 if flr not in filelinkrevs[f]:
1909 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1947 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1910 % (f, short(n), fl.linkrev(n)))
1948 % (f, short(n), fl.linkrev(n)))
1911 errors += 1
1949 errors += 1
1912 else:
1950 else:
1913 filelinkrevs[f].remove(flr)
1951 filelinkrevs[f].remove(flr)
1914
1952
1915 # verify contents
1953 # verify contents
1916 try:
1954 try:
1917 t = fl.read(n)
1955 t = fl.read(n)
1918 except Exception, inst:
1956 except Exception, inst:
1919 self.ui.warn("unpacking file %s %s: %s\n"
1957 self.ui.warn("unpacking file %s %s: %s\n"
1920 % (f, short(n), inst))
1958 % (f, short(n), inst))
1921 errors += 1
1959 errors += 1
1922
1960
1923 # verify parents
1961 # verify parents
1924 (p1, p2) = fl.parents(n)
1962 (p1, p2) = fl.parents(n)
1925 if p1 not in nodes:
1963 if p1 not in nodes:
1926 self.ui.warn("file %s:%s unknown parent 1 %s" %
1964 self.ui.warn("file %s:%s unknown parent 1 %s" %
1927 (f, short(n), short(p1)))
1965 (f, short(n), short(p1)))
1928 errors += 1
1966 errors += 1
1929 if p2 not in nodes:
1967 if p2 not in nodes:
1930 self.ui.warn("file %s:%s unknown parent 2 %s" %
1968 self.ui.warn("file %s:%s unknown parent 2 %s" %
1931 (f, short(n), short(p1)))
1969 (f, short(n), short(p1)))
1932 errors += 1
1970 errors += 1
1933 nodes[n] = 1
1971 nodes[n] = 1
1934
1972
1935 # cross-check
1973 # cross-check
1936 for node in filenodes[f]:
1974 for node in filenodes[f]:
1937 self.ui.warn("node %s in manifests not in %s\n"
1975 self.ui.warn("node %s in manifests not in %s\n"
1938 % (hex(node), f))
1976 % (hex(node), f))
1939 errors += 1
1977 errors += 1
1940
1978
1941 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1979 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1942 (files, changesets, revisions))
1980 (files, changesets, revisions))
1943
1981
1944 if errors:
1982 if errors:
1945 self.ui.warn("%d integrity errors encountered!\n" % errors)
1983 self.ui.warn("%d integrity errors encountered!\n" % errors)
1946 return 1
1984 return 1
1947
1985
1948 class remoterepository:
1986 class remoterepository:
1949 def local(self):
1987 def local(self):
1950 return False
1988 return False
1951
1989
1952 class httprepository(remoterepository):
1990 class httprepository(remoterepository):
1953 def __init__(self, ui, path):
1991 def __init__(self, ui, path):
1954 # fix missing / after hostname
1992 # fix missing / after hostname
1955 s = urlparse.urlsplit(path)
1993 s = urlparse.urlsplit(path)
1956 partial = s[2]
1994 partial = s[2]
1957 if not partial: partial = "/"
1995 if not partial: partial = "/"
1958 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1996 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1959 self.ui = ui
1997 self.ui = ui
1960 no_list = [ "localhost", "127.0.0.1" ]
1998 no_list = [ "localhost", "127.0.0.1" ]
1961 host = ui.config("http_proxy", "host")
1999 host = ui.config("http_proxy", "host")
1962 if host is None:
2000 if host is None:
1963 host = os.environ.get("http_proxy")
2001 host = os.environ.get("http_proxy")
1964 if host and host.startswith('http://'):
2002 if host and host.startswith('http://'):
1965 host = host[7:]
2003 host = host[7:]
1966 user = ui.config("http_proxy", "user")
2004 user = ui.config("http_proxy", "user")
1967 passwd = ui.config("http_proxy", "passwd")
2005 passwd = ui.config("http_proxy", "passwd")
1968 no = ui.config("http_proxy", "no")
2006 no = ui.config("http_proxy", "no")
1969 if no is None:
2007 if no is None:
1970 no = os.environ.get("no_proxy")
2008 no = os.environ.get("no_proxy")
1971 if no:
2009 if no:
1972 no_list = no_list + no.split(",")
2010 no_list = no_list + no.split(",")
1973
2011
1974 no_proxy = 0
2012 no_proxy = 0
1975 for h in no_list:
2013 for h in no_list:
1976 if (path.startswith("http://" + h + "/") or
2014 if (path.startswith("http://" + h + "/") or
1977 path.startswith("http://" + h + ":") or
2015 path.startswith("http://" + h + ":") or
1978 path == "http://" + h):
2016 path == "http://" + h):
1979 no_proxy = 1
2017 no_proxy = 1
1980
2018
1981 # Note: urllib2 takes proxy values from the environment and those will
2019 # Note: urllib2 takes proxy values from the environment and those will
1982 # take precedence
2020 # take precedence
1983 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2021 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1984 try:
2022 try:
1985 if os.environ.has_key(env):
2023 if os.environ.has_key(env):
1986 del os.environ[env]
2024 del os.environ[env]
1987 except OSError:
2025 except OSError:
1988 pass
2026 pass
1989
2027
1990 proxy_handler = urllib2.BaseHandler()
2028 proxy_handler = urllib2.BaseHandler()
1991 if host and not no_proxy:
2029 if host and not no_proxy:
1992 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2030 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1993
2031
1994 authinfo = None
2032 authinfo = None
1995 if user and passwd:
2033 if user and passwd:
1996 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2034 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1997 passmgr.add_password(None, host, user, passwd)
2035 passmgr.add_password(None, host, user, passwd)
1998 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2036 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1999
2037
2000 opener = urllib2.build_opener(proxy_handler, authinfo)
2038 opener = urllib2.build_opener(proxy_handler, authinfo)
2001 urllib2.install_opener(opener)
2039 urllib2.install_opener(opener)
2002
2040
2003 def dev(self):
2041 def dev(self):
2004 return -1
2042 return -1
2005
2043
2006 def do_cmd(self, cmd, **args):
2044 def do_cmd(self, cmd, **args):
2007 self.ui.debug("sending %s command\n" % cmd)
2045 self.ui.debug("sending %s command\n" % cmd)
2008 q = {"cmd": cmd}
2046 q = {"cmd": cmd}
2009 q.update(args)
2047 q.update(args)
2010 qs = urllib.urlencode(q)
2048 qs = urllib.urlencode(q)
2011 cu = "%s?%s" % (self.url, qs)
2049 cu = "%s?%s" % (self.url, qs)
2012 resp = urllib2.urlopen(cu)
2050 resp = urllib2.urlopen(cu)
2013 proto = resp.headers['content-type']
2051 proto = resp.headers['content-type']
2014
2052
2015 # accept old "text/plain" and "application/hg-changegroup" for now
2053 # accept old "text/plain" and "application/hg-changegroup" for now
2016 if not proto.startswith('application/mercurial') and \
2054 if not proto.startswith('application/mercurial') and \
2017 not proto.startswith('text/plain') and \
2055 not proto.startswith('text/plain') and \
2018 not proto.startswith('application/hg-changegroup'):
2056 not proto.startswith('application/hg-changegroup'):
2019 raise RepoError("'%s' does not appear to be an hg repository"
2057 raise RepoError("'%s' does not appear to be an hg repository"
2020 % self.url)
2058 % self.url)
2021
2059
2022 if proto.startswith('application/mercurial'):
2060 if proto.startswith('application/mercurial'):
2023 version = proto[22:]
2061 version = proto[22:]
2024 if float(version) > 0.1:
2062 if float(version) > 0.1:
2025 raise RepoError("'%s' uses newer protocol %s" %
2063 raise RepoError("'%s' uses newer protocol %s" %
2026 (self.url, version))
2064 (self.url, version))
2027
2065
2028 return resp
2066 return resp
2029
2067
2030 def heads(self):
2068 def heads(self):
2031 d = self.do_cmd("heads").read()
2069 d = self.do_cmd("heads").read()
2032 try:
2070 try:
2033 return map(bin, d[:-1].split(" "))
2071 return map(bin, d[:-1].split(" "))
2034 except:
2072 except:
2035 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2073 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2036 raise
2074 raise
2037
2075
2038 def branches(self, nodes):
2076 def branches(self, nodes):
2039 n = " ".join(map(hex, nodes))
2077 n = " ".join(map(hex, nodes))
2040 d = self.do_cmd("branches", nodes=n).read()
2078 d = self.do_cmd("branches", nodes=n).read()
2041 try:
2079 try:
2042 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2080 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2043 return br
2081 return br
2044 except:
2082 except:
2045 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2083 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2046 raise
2084 raise
2047
2085
2048 def between(self, pairs):
2086 def between(self, pairs):
2049 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2087 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2050 d = self.do_cmd("between", pairs=n).read()
2088 d = self.do_cmd("between", pairs=n).read()
2051 try:
2089 try:
2052 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2090 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2053 return p
2091 return p
2054 except:
2092 except:
2055 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2093 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2056 raise
2094 raise
2057
2095
2058 def changegroup(self, nodes):
2096 def changegroup(self, nodes):
2059 n = " ".join(map(hex, nodes))
2097 n = " ".join(map(hex, nodes))
2060 f = self.do_cmd("changegroup", roots=n)
2098 f = self.do_cmd("changegroup", roots=n)
2061 bytes = 0
2099 bytes = 0
2062
2100
2063 class zread:
2101 class zread:
2064 def __init__(self, f):
2102 def __init__(self, f):
2065 self.zd = zlib.decompressobj()
2103 self.zd = zlib.decompressobj()
2066 self.f = f
2104 self.f = f
2067 self.buf = ""
2105 self.buf = ""
2068 def read(self, l):
2106 def read(self, l):
2069 while l > len(self.buf):
2107 while l > len(self.buf):
2070 r = self.f.read(4096)
2108 r = self.f.read(4096)
2071 if r:
2109 if r:
2072 self.buf += self.zd.decompress(r)
2110 self.buf += self.zd.decompress(r)
2073 else:
2111 else:
2074 self.buf += self.zd.flush()
2112 self.buf += self.zd.flush()
2075 break
2113 break
2076 d, self.buf = self.buf[:l], self.buf[l:]
2114 d, self.buf = self.buf[:l], self.buf[l:]
2077 return d
2115 return d
2078
2116
2079 return zread(f)
2117 return zread(f)
2080
2118
2081 class remotelock:
2119 class remotelock:
2082 def __init__(self, repo):
2120 def __init__(self, repo):
2083 self.repo = repo
2121 self.repo = repo
2084 def release(self):
2122 def release(self):
2085 self.repo.unlock()
2123 self.repo.unlock()
2086 self.repo = None
2124 self.repo = None
2087 def __del__(self):
2125 def __del__(self):
2088 if self.repo:
2126 if self.repo:
2089 self.release()
2127 self.release()
2090
2128
2091 class sshrepository(remoterepository):
2129 class sshrepository(remoterepository):
2092 def __init__(self, ui, path):
2130 def __init__(self, ui, path):
2093 self.url = path
2131 self.url = path
2094 self.ui = ui
2132 self.ui = ui
2095
2133
2096 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2134 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2097 if not m:
2135 if not m:
2098 raise RepoError("couldn't parse destination %s" % path)
2136 raise RepoError("couldn't parse destination %s" % path)
2099
2137
2100 self.user = m.group(2)
2138 self.user = m.group(2)
2101 self.host = m.group(3)
2139 self.host = m.group(3)
2102 self.port = m.group(5)
2140 self.port = m.group(5)
2103 self.path = m.group(7)
2141 self.path = m.group(7)
2104
2142
2105 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2143 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2106 args = self.port and ("%s -p %s") % (args, self.port) or args
2144 args = self.port and ("%s -p %s") % (args, self.port) or args
2107 path = self.path or ""
2145 path = self.path or ""
2108
2146
2109 if not path:
2147 if not path:
2110 raise RepoError("no remote repository path specified")
2148 raise RepoError("no remote repository path specified")
2111
2149
2112 sshcmd = self.ui.config("ui", "ssh", "ssh")
2150 sshcmd = self.ui.config("ui", "ssh", "ssh")
2113 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2151 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2114 cmd = "%s %s '%s -R %s serve --stdio'"
2152 cmd = "%s %s '%s -R %s serve --stdio'"
2115 cmd = cmd % (sshcmd, args, remotecmd, path)
2153 cmd = cmd % (sshcmd, args, remotecmd, path)
2116
2154
2117 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2155 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2118
2156
2119 def readerr(self):
2157 def readerr(self):
2120 while 1:
2158 while 1:
2121 r,w,x = select.select([self.pipee], [], [], 0)
2159 r,w,x = select.select([self.pipee], [], [], 0)
2122 if not r: break
2160 if not r: break
2123 l = self.pipee.readline()
2161 l = self.pipee.readline()
2124 if not l: break
2162 if not l: break
2125 self.ui.status("remote: ", l)
2163 self.ui.status("remote: ", l)
2126
2164
2127 def __del__(self):
2165 def __del__(self):
2128 try:
2166 try:
2129 self.pipeo.close()
2167 self.pipeo.close()
2130 self.pipei.close()
2168 self.pipei.close()
2131 for l in self.pipee:
2169 for l in self.pipee:
2132 self.ui.status("remote: ", l)
2170 self.ui.status("remote: ", l)
2133 self.pipee.close()
2171 self.pipee.close()
2134 except:
2172 except:
2135 pass
2173 pass
2136
2174
2137 def dev(self):
2175 def dev(self):
2138 return -1
2176 return -1
2139
2177
2140 def do_cmd(self, cmd, **args):
2178 def do_cmd(self, cmd, **args):
2141 self.ui.debug("sending %s command\n" % cmd)
2179 self.ui.debug("sending %s command\n" % cmd)
2142 self.pipeo.write("%s\n" % cmd)
2180 self.pipeo.write("%s\n" % cmd)
2143 for k, v in args.items():
2181 for k, v in args.items():
2144 self.pipeo.write("%s %d\n" % (k, len(v)))
2182 self.pipeo.write("%s %d\n" % (k, len(v)))
2145 self.pipeo.write(v)
2183 self.pipeo.write(v)
2146 self.pipeo.flush()
2184 self.pipeo.flush()
2147
2185
2148 return self.pipei
2186 return self.pipei
2149
2187
2150 def call(self, cmd, **args):
2188 def call(self, cmd, **args):
2151 r = self.do_cmd(cmd, **args)
2189 r = self.do_cmd(cmd, **args)
2152 l = r.readline()
2190 l = r.readline()
2153 self.readerr()
2191 self.readerr()
2154 try:
2192 try:
2155 l = int(l)
2193 l = int(l)
2156 except:
2194 except:
2157 raise RepoError("unexpected response '%s'" % l)
2195 raise RepoError("unexpected response '%s'" % l)
2158 return r.read(l)
2196 return r.read(l)
2159
2197
2160 def lock(self):
2198 def lock(self):
2161 self.call("lock")
2199 self.call("lock")
2162 return remotelock(self)
2200 return remotelock(self)
2163
2201
2164 def unlock(self):
2202 def unlock(self):
2165 self.call("unlock")
2203 self.call("unlock")
2166
2204
2167 def heads(self):
2205 def heads(self):
2168 d = self.call("heads")
2206 d = self.call("heads")
2169 try:
2207 try:
2170 return map(bin, d[:-1].split(" "))
2208 return map(bin, d[:-1].split(" "))
2171 except:
2209 except:
2172 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2210 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2173
2211
2174 def branches(self, nodes):
2212 def branches(self, nodes):
2175 n = " ".join(map(hex, nodes))
2213 n = " ".join(map(hex, nodes))
2176 d = self.call("branches", nodes=n)
2214 d = self.call("branches", nodes=n)
2177 try:
2215 try:
2178 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2216 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2179 return br
2217 return br
2180 except:
2218 except:
2181 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2219 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2182
2220
2183 def between(self, pairs):
2221 def between(self, pairs):
2184 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2222 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2185 d = self.call("between", pairs=n)
2223 d = self.call("between", pairs=n)
2186 try:
2224 try:
2187 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2225 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2188 return p
2226 return p
2189 except:
2227 except:
2190 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2228 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2191
2229
2192 def changegroup(self, nodes):
2230 def changegroup(self, nodes):
2193 n = " ".join(map(hex, nodes))
2231 n = " ".join(map(hex, nodes))
2194 f = self.do_cmd("changegroup", roots=n)
2232 f = self.do_cmd("changegroup", roots=n)
2195 return self.pipei
2233 return self.pipei
2196
2234
2197 def addchangegroup(self, cg):
2235 def addchangegroup(self, cg):
2198 d = self.call("addchangegroup")
2236 d = self.call("addchangegroup")
2199 if d:
2237 if d:
2200 raise RepoError("push refused: %s", d)
2238 raise RepoError("push refused: %s", d)
2201
2239
2202 while 1:
2240 while 1:
2203 d = cg.read(4096)
2241 d = cg.read(4096)
2204 if not d: break
2242 if not d: break
2205 self.pipeo.write(d)
2243 self.pipeo.write(d)
2206 self.readerr()
2244 self.readerr()
2207
2245
2208 self.pipeo.flush()
2246 self.pipeo.flush()
2209
2247
2210 self.readerr()
2248 self.readerr()
2211 l = int(self.pipei.readline())
2249 l = int(self.pipei.readline())
2212 return self.pipei.read(l) != ""
2250 return self.pipei.read(l) != ""
2213
2251
2214 class httpsrepository(httprepository):
2252 class httpsrepository(httprepository):
2215 pass
2253 pass
2216
2254
2217 def repository(ui, path=None, create=0):
2255 def repository(ui, path=None, create=0):
2218 if path:
2256 if path:
2219 if path.startswith("http://"):
2257 if path.startswith("http://"):
2220 return httprepository(ui, path)
2258 return httprepository(ui, path)
2221 if path.startswith("https://"):
2259 if path.startswith("https://"):
2222 return httpsrepository(ui, path)
2260 return httpsrepository(ui, path)
2223 if path.startswith("hg://"):
2261 if path.startswith("hg://"):
2224 return httprepository(ui, path.replace("hg://", "http://"))
2262 return httprepository(ui, path.replace("hg://", "http://"))
2225 if path.startswith("old-http://"):
2263 if path.startswith("old-http://"):
2226 return localrepository(ui, path.replace("old-http://", "http://"))
2264 return localrepository(ui, path.replace("old-http://", "http://"))
2227 if path.startswith("ssh://"):
2265 if path.startswith("ssh://"):
2228 return sshrepository(ui, path)
2266 return sshrepository(ui, path)
2229
2267
2230 return localrepository(ui, path, create)
2268 return localrepository(ui, path, create)
@@ -1,18 +1,18 b''
1 pulling from ../B1
1 pulling from ../B1
2 searching for changes
2 searching for changes
3 adding changesets
3 adding changesets
4 adding manifests
4 adding manifests
5 adding file changes
5 adding file changes
6 added 1 changesets with 1 changes to 1 files
6 added 1 changesets with 1 changes to 1 files
7 (run 'hg update' to get a working copy)
7 (run 'hg update' to get a working copy)
8 bar should remain deleted.
8 bar should remain deleted.
9 6b70e9e451a5a33faad7bbebe627e46b937b7364 644 foo
9 f405ac83a5611071d6b54dd5eb26943b1fdc4460 644 foo
10 pulling from ../A2
10 pulling from ../A2
11 searching for changes
11 searching for changes
12 adding changesets
12 adding changesets
13 adding manifests
13 adding manifests
14 adding file changes
14 adding file changes
15 added 1 changesets with 0 changes to 0 files
15 added 1 changesets with 0 changes to 0 files
16 (run 'hg update' to get a working copy)
16 (run 'hg update' to get a working copy)
17 bar should remain deleted.
17 bar should remain deleted.
18 6b70e9e451a5a33faad7bbebe627e46b937b7364 644 foo
18 f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
@@ -1,21 +1,21 b''
1 pushing to ../a
1 pushing to ../a
2 searching for changes
2 searching for changes
3 abort: unsynced remote changes!
3 abort: unsynced remote changes!
4 (did you forget to sync? use push -f to force)
4 (did you forget to sync? use push -f to force)
5 pulling from ../a
5 pulling from ../a
6 searching for changes
6 searching for changes
7 adding changesets
7 adding changesets
8 adding manifests
8 adding manifests
9 adding file changes
9 adding file changes
10 added 1 changesets with 1 changes to 1 files
10 added 1 changesets with 1 changes to 1 files
11 (run 'hg update' to get a working copy)
11 (run 'hg update' to get a working copy)
12 pushing to ../a
12 pushing to ../a
13 searching for changes
13 searching for changes
14 abort: push creates new remote branches!
14 abort: push creates new remote branches!
15 (did you forget to merge? use push -f to force)
15 (did you forget to merge? use push -f to force)
16 pushing to ../a
16 pushing to ../a
17 searching for changes
17 searching for changes
18 adding changesets
18 adding changesets
19 adding manifests
19 adding manifests
20 adding file changes
20 adding file changes
21 added 2 changesets with 2 changes to 2 files
21 added 2 changesets with 1 changes to 1 files
@@ -1,15 +1,13 b''
1 unknown
1 unknown
2 acb14030fe0a tip
2 acb14030fe0a tip
3 acb14030fe0a21b60322c440ad2d20cf7685a376 first
3 acb14030fe0a21b60322c440ad2d20cf7685a376 first
4 tip 1:b9154636be938d3d431e75a7c906504a079bfe07
4 tip 1:b9154636be938d3d431e75a7c906504a079bfe07
5 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
5 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
6 b9154636be93 tip
6 b9154636be93 tip
7 M a
7 M a
8 b9154636be93+ tip
8 b9154636be93+ tip
9 acb14030fe0a+ first
9 acb14030fe0a+ first
10 acb14030fe0a21b60322c440ad2d20cf7685a376+ first
10 acb14030fe0a21b60322c440ad2d20cf7685a376+ first
11 M a
11 M a
12 c8edf04160c7 tip
12 c8edf04160c7 tip
13 c8edf04160c7+b9154636be93+ tip
13 c8edf04160c7+b9154636be93 tip
14 M .hgtags
15 M a
General Comments 0
You need to be logged in to leave comments. Login now