##// END OF EJS Templates
Warn on pushing unsynced repo or adding new heads...
mpm@selenic.com -
r816:8674b780 default
parent child Browse files
Show More
@@ -0,0 +1,28 b''
1 #!/bin/sh
2
3 mkdir a
4 cd a
5 hg init
6 echo foo > t1
7 hg add t1
8 hg commit -m "1" -d "0 0"
9
10 cd ..
11 hg clone a b
12
13 cd a
14 echo foo > t2
15 hg add t2
16 hg commit -m "2" -d "0 0"
17
18 cd ../b
19 echo foo > t3
20 hg add t3
21 hg commit -m "3" -d "0 0"
22
23 hg push ../a
24 hg pull ../a
25 hg push ../a
26 hg up -m
27 hg commit -m "4" -d "0 0"
28 hg push ../a
@@ -0,0 +1,35 b''
1 + hg init
2 + hg add t1
3 + hg commit -m 1 -d 0 0
4 + hg clone a b
5 + hg add t2
6 + hg commit -m 2 -d 0 0
7 + hg add t3
8 + hg commit -m 3 -d 0 0
9 + hg push ../a
10 pushing to ../a
11 searching for changes
12 abort: unsynced remote changes!
13 (did you forget to sync? use push -f to force)
14 + hg pull ../a
15 pulling from ../a
16 searching for changes
17 adding changesets
18 adding manifests
19 adding file changes
20 added 1 changesets with 1 changes to 1 files
21 (run 'hg update' to get a working copy)
22 + hg push ../a
23 pushing to ../a
24 searching for changes
25 abort: push creates new remote branches!
26 (did you forget to merge? use push -f to force)
27 + hg up -m
28 + hg commit -m 4 -d 0 0
29 + hg push ../a
30 pushing to ../a
31 searching for changes
32 adding changesets
33 adding manifests
34 adding file changes
35 added 2 changesets with 2 changes to 2 files
@@ -1,1981 +1,2003 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 path.replace(".hg/", ".hg.hg/")
25 path.replace(".hg/", ".hg.hg/")
26 path.replace(".i/", ".i.hg/")
26 path.replace(".i/", ".i.hg/")
27 path.replace(".d/", ".i.hg/")
27 path.replace(".d/", ".i.hg/")
28 return path
28 return path
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 path.replace(".d.hg/", ".d/")
31 path.replace(".d.hg/", ".d/")
32 path.replace(".i.hg/", ".i/")
32 path.replace(".i.hg/", ".i/")
33 path.replace(".hg.hg/", ".hg/")
33 path.replace(".hg.hg/", ".hg/")
34 return path
34 return path
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def ignore(self, f):
303 def ignore(self, f):
304 if not self.ignorefunc:
304 if not self.ignorefunc:
305 bigpat = []
305 bigpat = []
306 try:
306 try:
307 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
308 for pat in l:
308 for pat in l:
309 if pat != "\n":
309 if pat != "\n":
310 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
311 try:
311 try:
312 r = re.compile(p)
312 r = re.compile(p)
313 except:
313 except:
314 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
315 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
316 else:
316 else:
317 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
318 except IOError: pass
318 except IOError: pass
319
319
320 if bigpat:
320 if bigpat:
321 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 r = re.compile(s)
322 r = re.compile(s)
323 self.ignorefunc = r.search
323 self.ignorefunc = r.search
324 else:
324 else:
325 self.ignorefunc = util.never
325 self.ignorefunc = util.never
326
326
327 return self.ignorefunc(f)
327 return self.ignorefunc(f)
328
328
329 def __del__(self):
329 def __del__(self):
330 if self.dirty:
330 if self.dirty:
331 self.write()
331 self.write()
332
332
333 def __getitem__(self, key):
333 def __getitem__(self, key):
334 try:
334 try:
335 return self.map[key]
335 return self.map[key]
336 except TypeError:
336 except TypeError:
337 self.read()
337 self.read()
338 return self[key]
338 return self[key]
339
339
340 def __contains__(self, key):
340 def __contains__(self, key):
341 if not self.map: self.read()
341 if not self.map: self.read()
342 return key in self.map
342 return key in self.map
343
343
344 def parents(self):
344 def parents(self):
345 if not self.pl:
345 if not self.pl:
346 self.read()
346 self.read()
347 return self.pl
347 return self.pl
348
348
349 def markdirty(self):
349 def markdirty(self):
350 if not self.dirty:
350 if not self.dirty:
351 self.dirty = 1
351 self.dirty = 1
352
352
353 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
354 self.markdirty()
354 self.markdirty()
355 self.pl = p1, p2
355 self.pl = p1, p2
356
356
357 def state(self, key):
357 def state(self, key):
358 try:
358 try:
359 return self[key][0]
359 return self[key][0]
360 except KeyError:
360 except KeyError:
361 return "?"
361 return "?"
362
362
363 def read(self):
363 def read(self):
364 if self.map is not None: return self.map
364 if self.map is not None: return self.map
365
365
366 self.map = {}
366 self.map = {}
367 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
368 try:
368 try:
369 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
370 if not st: return
370 if not st: return
371 except: return
371 except: return
372
372
373 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
374
374
375 pos = 40
375 pos = 40
376 while pos < len(st):
376 while pos < len(st):
377 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
378 l = e[4]
378 l = e[4]
379 pos += 17
379 pos += 17
380 f = st[pos:pos + l]
380 f = st[pos:pos + l]
381 if '\0' in f:
381 if '\0' in f:
382 f, c = f.split('\0')
382 f, c = f.split('\0')
383 self.copies[f] = c
383 self.copies[f] = c
384 self.map[f] = e[:4]
384 self.map[f] = e[:4]
385 pos += l
385 pos += l
386
386
387 def copy(self, source, dest):
387 def copy(self, source, dest):
388 self.read()
388 self.read()
389 self.markdirty()
389 self.markdirty()
390 self.copies[dest] = source
390 self.copies[dest] = source
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self.copies.get(file, None)
393 return self.copies.get(file, None)
394
394
395 def update(self, files, state):
395 def update(self, files, state):
396 ''' current states:
396 ''' current states:
397 n normal
397 n normal
398 m needs merging
398 m needs merging
399 r marked for removal
399 r marked for removal
400 a marked for addition'''
400 a marked for addition'''
401
401
402 if not files: return
402 if not files: return
403 self.read()
403 self.read()
404 self.markdirty()
404 self.markdirty()
405 for f in files:
405 for f in files:
406 if state == "r":
406 if state == "r":
407 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
408 else:
408 else:
409 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
411
411
412 def forget(self, files):
412 def forget(self, files):
413 if not files: return
413 if not files: return
414 self.read()
414 self.read()
415 self.markdirty()
415 self.markdirty()
416 for f in files:
416 for f in files:
417 try:
417 try:
418 del self.map[f]
418 del self.map[f]
419 except KeyError:
419 except KeyError:
420 self.ui.warn("not in dirstate: %s!\n" % f)
420 self.ui.warn("not in dirstate: %s!\n" % f)
421 pass
421 pass
422
422
423 def clear(self):
423 def clear(self):
424 self.map = {}
424 self.map = {}
425 self.markdirty()
425 self.markdirty()
426
426
427 def write(self):
427 def write(self):
428 st = self.opener("dirstate", "w")
428 st = self.opener("dirstate", "w")
429 st.write("".join(self.pl))
429 st.write("".join(self.pl))
430 for f, e in self.map.items():
430 for f, e in self.map.items():
431 c = self.copied(f)
431 c = self.copied(f)
432 if c:
432 if c:
433 f = f + "\0" + c
433 f = f + "\0" + c
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
435 st.write(e + f)
435 st.write(e + f)
436 self.dirty = 0
436 self.dirty = 0
437
437
438 def walk(self, files = None, match = util.always):
438 def walk(self, files = None, match = util.always):
439 self.read()
439 self.read()
440 dc = self.map.copy()
440 dc = self.map.copy()
441 # walk all files by default
441 # walk all files by default
442 if not files: files = [self.root]
442 if not files: files = [self.root]
443 def traverse():
443 def traverse():
444 for f in util.unique(files):
444 for f in util.unique(files):
445 f = os.path.join(self.root, f)
445 f = os.path.join(self.root, f)
446 if os.path.isdir(f):
446 if os.path.isdir(f):
447 for dir, subdirs, fl in os.walk(f):
447 for dir, subdirs, fl in os.walk(f):
448 d = dir[len(self.root) + 1:]
448 d = dir[len(self.root) + 1:]
449 if d == '.hg':
449 if d == '.hg':
450 subdirs[:] = []
450 subdirs[:] = []
451 continue
451 continue
452 for sd in subdirs:
452 for sd in subdirs:
453 ds = os.path.join(d, sd +'/')
453 ds = os.path.join(d, sd +'/')
454 if self.ignore(ds) or not match(ds):
454 if self.ignore(ds) or not match(ds):
455 subdirs.remove(sd)
455 subdirs.remove(sd)
456 for fn in fl:
456 for fn in fl:
457 fn = util.pconvert(os.path.join(d, fn))
457 fn = util.pconvert(os.path.join(d, fn))
458 yield 'f', fn
458 yield 'f', fn
459 else:
459 else:
460 yield 'f', f[len(self.root) + 1:]
460 yield 'f', f[len(self.root) + 1:]
461
461
462 for k in dc.keys():
462 for k in dc.keys():
463 yield 'm', k
463 yield 'm', k
464
464
465 # yield only files that match: all in dirstate, others only if
465 # yield only files that match: all in dirstate, others only if
466 # not in .hgignore
466 # not in .hgignore
467
467
468 for src, fn in util.unique(traverse()):
468 for src, fn in util.unique(traverse()):
469 if fn in dc:
469 if fn in dc:
470 del dc[fn]
470 del dc[fn]
471 elif self.ignore(fn):
471 elif self.ignore(fn):
472 continue
472 continue
473 if match(fn):
473 if match(fn):
474 yield src, fn
474 yield src, fn
475
475
476 def changes(self, files = None, match = util.always):
476 def changes(self, files = None, match = util.always):
477 self.read()
477 self.read()
478 dc = self.map.copy()
478 dc = self.map.copy()
479 lookup, changed, added, unknown = [], [], [], []
479 lookup, changed, added, unknown = [], [], [], []
480
480
481 for src, fn in self.walk(files, match):
481 for src, fn in self.walk(files, match):
482 try: s = os.stat(os.path.join(self.root, fn))
482 try: s = os.stat(os.path.join(self.root, fn))
483 except: continue
483 except: continue
484
484
485 if fn in dc:
485 if fn in dc:
486 c = dc[fn]
486 c = dc[fn]
487 del dc[fn]
487 del dc[fn]
488
488
489 if c[0] == 'm':
489 if c[0] == 'm':
490 changed.append(fn)
490 changed.append(fn)
491 elif c[0] == 'a':
491 elif c[0] == 'a':
492 added.append(fn)
492 added.append(fn)
493 elif c[0] == 'r':
493 elif c[0] == 'r':
494 unknown.append(fn)
494 unknown.append(fn)
495 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
495 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
496 changed.append(fn)
496 changed.append(fn)
497 elif c[1] != s.st_mode or c[3] != s.st_mtime:
497 elif c[1] != s.st_mode or c[3] != s.st_mtime:
498 lookup.append(fn)
498 lookup.append(fn)
499 else:
499 else:
500 if match(fn): unknown.append(fn)
500 if match(fn): unknown.append(fn)
501
501
502 return (lookup, changed, added, filter(match, dc.keys()), unknown)
502 return (lookup, changed, added, filter(match, dc.keys()), unknown)
503
503
504 # used to avoid circular references so destructors work
504 # used to avoid circular references so destructors work
505 def opener(base):
505 def opener(base):
506 p = base
506 p = base
507 def o(path, mode="r"):
507 def o(path, mode="r"):
508 if p.startswith("http://"):
508 if p.startswith("http://"):
509 f = os.path.join(p, urllib.quote(path))
509 f = os.path.join(p, urllib.quote(path))
510 return httprangereader.httprangereader(f)
510 return httprangereader.httprangereader(f)
511
511
512 f = os.path.join(p, path)
512 f = os.path.join(p, path)
513
513
514 mode += "b" # for that other OS
514 mode += "b" # for that other OS
515
515
516 if mode[0] != "r":
516 if mode[0] != "r":
517 try:
517 try:
518 s = os.stat(f)
518 s = os.stat(f)
519 except OSError:
519 except OSError:
520 d = os.path.dirname(f)
520 d = os.path.dirname(f)
521 if not os.path.isdir(d):
521 if not os.path.isdir(d):
522 os.makedirs(d)
522 os.makedirs(d)
523 else:
523 else:
524 if s.st_nlink > 1:
524 if s.st_nlink > 1:
525 file(f + ".tmp", "wb").write(file(f, "rb").read())
525 file(f + ".tmp", "wb").write(file(f, "rb").read())
526 util.rename(f+".tmp", f)
526 util.rename(f+".tmp", f)
527
527
528 return file(f, mode)
528 return file(f, mode)
529
529
530 return o
530 return o
531
531
532 class RepoError(Exception): pass
532 class RepoError(Exception): pass
533
533
534 class localrepository:
534 class localrepository:
535 def __init__(self, ui, path=None, create=0):
535 def __init__(self, ui, path=None, create=0):
536 self.remote = 0
536 self.remote = 0
537 if path and path.startswith("http://"):
537 if path and path.startswith("http://"):
538 self.remote = 1
538 self.remote = 1
539 self.path = path
539 self.path = path
540 else:
540 else:
541 if not path:
541 if not path:
542 p = os.getcwd()
542 p = os.getcwd()
543 while not os.path.isdir(os.path.join(p, ".hg")):
543 while not os.path.isdir(os.path.join(p, ".hg")):
544 oldp = p
544 oldp = p
545 p = os.path.dirname(p)
545 p = os.path.dirname(p)
546 if p == oldp: raise RepoError("no repo found")
546 if p == oldp: raise RepoError("no repo found")
547 path = p
547 path = p
548 self.path = os.path.join(path, ".hg")
548 self.path = os.path.join(path, ".hg")
549
549
550 if not create and not os.path.isdir(self.path):
550 if not create and not os.path.isdir(self.path):
551 raise RepoError("repository %s not found" % self.path)
551 raise RepoError("repository %s not found" % self.path)
552
552
553 self.root = path
553 self.root = path
554 self.ui = ui
554 self.ui = ui
555
555
556 if create:
556 if create:
557 os.mkdir(self.path)
557 os.mkdir(self.path)
558 os.mkdir(self.join("data"))
558 os.mkdir(self.join("data"))
559
559
560 self.opener = opener(self.path)
560 self.opener = opener(self.path)
561 self.wopener = opener(self.root)
561 self.wopener = opener(self.root)
562 self.manifest = manifest(self.opener)
562 self.manifest = manifest(self.opener)
563 self.changelog = changelog(self.opener)
563 self.changelog = changelog(self.opener)
564 self.tagscache = None
564 self.tagscache = None
565 self.nodetagscache = None
565 self.nodetagscache = None
566
566
567 if not self.remote:
567 if not self.remote:
568 self.dirstate = dirstate(self.opener, ui, self.root)
568 self.dirstate = dirstate(self.opener, ui, self.root)
569 try:
569 try:
570 self.ui.readconfig(self.opener("hgrc"))
570 self.ui.readconfig(self.opener("hgrc"))
571 except IOError: pass
571 except IOError: pass
572
572
573 def hook(self, name, **args):
573 def hook(self, name, **args):
574 s = self.ui.config("hooks", name)
574 s = self.ui.config("hooks", name)
575 if s:
575 if s:
576 self.ui.note("running hook %s: %s\n" % (name, s))
576 self.ui.note("running hook %s: %s\n" % (name, s))
577 old = {}
577 old = {}
578 for k, v in args.items():
578 for k, v in args.items():
579 k = k.upper()
579 k = k.upper()
580 old[k] = os.environ.get(k, None)
580 old[k] = os.environ.get(k, None)
581 os.environ[k] = v
581 os.environ[k] = v
582
582
583 r = os.system(s)
583 r = os.system(s)
584
584
585 for k, v in old.items():
585 for k, v in old.items():
586 if v != None:
586 if v != None:
587 os.environ[k] = v
587 os.environ[k] = v
588 else:
588 else:
589 del os.environ[k]
589 del os.environ[k]
590
590
591 if r:
591 if r:
592 self.ui.warn("abort: %s hook failed with status %d!\n" %
592 self.ui.warn("abort: %s hook failed with status %d!\n" %
593 (name, r))
593 (name, r))
594 return False
594 return False
595 return True
595 return True
596
596
597 def tags(self):
597 def tags(self):
598 '''return a mapping of tag to node'''
598 '''return a mapping of tag to node'''
599 if not self.tagscache:
599 if not self.tagscache:
600 self.tagscache = {}
600 self.tagscache = {}
601 def addtag(self, k, n):
601 def addtag(self, k, n):
602 try:
602 try:
603 bin_n = bin(n)
603 bin_n = bin(n)
604 except TypeError:
604 except TypeError:
605 bin_n = ''
605 bin_n = ''
606 self.tagscache[k.strip()] = bin_n
606 self.tagscache[k.strip()] = bin_n
607
607
608 try:
608 try:
609 # read each head of the tags file, ending with the tip
609 # read each head of the tags file, ending with the tip
610 # and add each tag found to the map, with "newer" ones
610 # and add each tag found to the map, with "newer" ones
611 # taking precedence
611 # taking precedence
612 fl = self.file(".hgtags")
612 fl = self.file(".hgtags")
613 h = fl.heads()
613 h = fl.heads()
614 h.reverse()
614 h.reverse()
615 for r in h:
615 for r in h:
616 for l in fl.revision(r).splitlines():
616 for l in fl.revision(r).splitlines():
617 if l:
617 if l:
618 n, k = l.split(" ", 1)
618 n, k = l.split(" ", 1)
619 addtag(self, k, n)
619 addtag(self, k, n)
620 except KeyError:
620 except KeyError:
621 pass
621 pass
622
622
623 try:
623 try:
624 f = self.opener("localtags")
624 f = self.opener("localtags")
625 for l in f:
625 for l in f:
626 n, k = l.split(" ", 1)
626 n, k = l.split(" ", 1)
627 addtag(self, k, n)
627 addtag(self, k, n)
628 except IOError:
628 except IOError:
629 pass
629 pass
630
630
631 self.tagscache['tip'] = self.changelog.tip()
631 self.tagscache['tip'] = self.changelog.tip()
632
632
633 return self.tagscache
633 return self.tagscache
634
634
635 def tagslist(self):
635 def tagslist(self):
636 '''return a list of tags ordered by revision'''
636 '''return a list of tags ordered by revision'''
637 l = []
637 l = []
638 for t, n in self.tags().items():
638 for t, n in self.tags().items():
639 try:
639 try:
640 r = self.changelog.rev(n)
640 r = self.changelog.rev(n)
641 except:
641 except:
642 r = -2 # sort to the beginning of the list if unknown
642 r = -2 # sort to the beginning of the list if unknown
643 l.append((r,t,n))
643 l.append((r,t,n))
644 l.sort()
644 l.sort()
645 return [(t,n) for r,t,n in l]
645 return [(t,n) for r,t,n in l]
646
646
647 def nodetags(self, node):
647 def nodetags(self, node):
648 '''return the tags associated with a node'''
648 '''return the tags associated with a node'''
649 if not self.nodetagscache:
649 if not self.nodetagscache:
650 self.nodetagscache = {}
650 self.nodetagscache = {}
651 for t,n in self.tags().items():
651 for t,n in self.tags().items():
652 self.nodetagscache.setdefault(n,[]).append(t)
652 self.nodetagscache.setdefault(n,[]).append(t)
653 return self.nodetagscache.get(node, [])
653 return self.nodetagscache.get(node, [])
654
654
655 def lookup(self, key):
655 def lookup(self, key):
656 try:
656 try:
657 return self.tags()[key]
657 return self.tags()[key]
658 except KeyError:
658 except KeyError:
659 try:
659 try:
660 return self.changelog.lookup(key)
660 return self.changelog.lookup(key)
661 except:
661 except:
662 raise RepoError("unknown revision '%s'" % key)
662 raise RepoError("unknown revision '%s'" % key)
663
663
664 def dev(self):
664 def dev(self):
665 if self.remote: return -1
665 if self.remote: return -1
666 return os.stat(self.path).st_dev
666 return os.stat(self.path).st_dev
667
667
668 def join(self, f):
668 def join(self, f):
669 return os.path.join(self.path, f)
669 return os.path.join(self.path, f)
670
670
671 def wjoin(self, f):
671 def wjoin(self, f):
672 return os.path.join(self.root, f)
672 return os.path.join(self.root, f)
673
673
674 def file(self, f):
674 def file(self, f):
675 if f[0] == '/': f = f[1:]
675 if f[0] == '/': f = f[1:]
676 return filelog(self.opener, f)
676 return filelog(self.opener, f)
677
677
678 def getcwd(self):
678 def getcwd(self):
679 cwd = os.getcwd()
679 cwd = os.getcwd()
680 if cwd == self.root: return ''
680 if cwd == self.root: return ''
681 return cwd[len(self.root) + 1:]
681 return cwd[len(self.root) + 1:]
682
682
683 def wfile(self, f, mode='r'):
683 def wfile(self, f, mode='r'):
684 return self.wopener(f, mode)
684 return self.wopener(f, mode)
685
685
686 def transaction(self):
686 def transaction(self):
687 # save dirstate for undo
687 # save dirstate for undo
688 try:
688 try:
689 ds = self.opener("dirstate").read()
689 ds = self.opener("dirstate").read()
690 except IOError:
690 except IOError:
691 ds = ""
691 ds = ""
692 self.opener("journal.dirstate", "w").write(ds)
692 self.opener("journal.dirstate", "w").write(ds)
693
693
694 def after():
694 def after():
695 util.rename(self.join("journal"), self.join("undo"))
695 util.rename(self.join("journal"), self.join("undo"))
696 util.rename(self.join("journal.dirstate"),
696 util.rename(self.join("journal.dirstate"),
697 self.join("undo.dirstate"))
697 self.join("undo.dirstate"))
698
698
699 return transaction.transaction(self.ui.warn, self.opener,
699 return transaction.transaction(self.ui.warn, self.opener,
700 self.join("journal"), after)
700 self.join("journal"), after)
701
701
702 def recover(self):
702 def recover(self):
703 lock = self.lock()
703 lock = self.lock()
704 if os.path.exists(self.join("journal")):
704 if os.path.exists(self.join("journal")):
705 self.ui.status("rolling back interrupted transaction\n")
705 self.ui.status("rolling back interrupted transaction\n")
706 return transaction.rollback(self.opener, self.join("journal"))
706 return transaction.rollback(self.opener, self.join("journal"))
707 else:
707 else:
708 self.ui.warn("no interrupted transaction available\n")
708 self.ui.warn("no interrupted transaction available\n")
709
709
710 def undo(self):
710 def undo(self):
711 lock = self.lock()
711 lock = self.lock()
712 if os.path.exists(self.join("undo")):
712 if os.path.exists(self.join("undo")):
713 self.ui.status("rolling back last transaction\n")
713 self.ui.status("rolling back last transaction\n")
714 transaction.rollback(self.opener, self.join("undo"))
714 transaction.rollback(self.opener, self.join("undo"))
715 self.dirstate = None
715 self.dirstate = None
716 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
716 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
717 self.dirstate = dirstate(self.opener, self.ui, self.root)
717 self.dirstate = dirstate(self.opener, self.ui, self.root)
718 else:
718 else:
719 self.ui.warn("no undo information available\n")
719 self.ui.warn("no undo information available\n")
720
720
721 def lock(self, wait = 1):
721 def lock(self, wait = 1):
722 try:
722 try:
723 return lock.lock(self.join("lock"), 0)
723 return lock.lock(self.join("lock"), 0)
724 except lock.LockHeld, inst:
724 except lock.LockHeld, inst:
725 if wait:
725 if wait:
726 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
726 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
727 return lock.lock(self.join("lock"), wait)
727 return lock.lock(self.join("lock"), wait)
728 raise inst
728 raise inst
729
729
730 def rawcommit(self, files, text, user, date, p1=None, p2=None):
730 def rawcommit(self, files, text, user, date, p1=None, p2=None):
731 orig_parent = self.dirstate.parents()[0] or nullid
731 orig_parent = self.dirstate.parents()[0] or nullid
732 p1 = p1 or self.dirstate.parents()[0] or nullid
732 p1 = p1 or self.dirstate.parents()[0] or nullid
733 p2 = p2 or self.dirstate.parents()[1] or nullid
733 p2 = p2 or self.dirstate.parents()[1] or nullid
734 c1 = self.changelog.read(p1)
734 c1 = self.changelog.read(p1)
735 c2 = self.changelog.read(p2)
735 c2 = self.changelog.read(p2)
736 m1 = self.manifest.read(c1[0])
736 m1 = self.manifest.read(c1[0])
737 mf1 = self.manifest.readflags(c1[0])
737 mf1 = self.manifest.readflags(c1[0])
738 m2 = self.manifest.read(c2[0])
738 m2 = self.manifest.read(c2[0])
739
739
740 if orig_parent == p1:
740 if orig_parent == p1:
741 update_dirstate = 1
741 update_dirstate = 1
742 else:
742 else:
743 update_dirstate = 0
743 update_dirstate = 0
744
744
745 tr = self.transaction()
745 tr = self.transaction()
746 mm = m1.copy()
746 mm = m1.copy()
747 mfm = mf1.copy()
747 mfm = mf1.copy()
748 linkrev = self.changelog.count()
748 linkrev = self.changelog.count()
749 for f in files:
749 for f in files:
750 try:
750 try:
751 t = self.wfile(f).read()
751 t = self.wfile(f).read()
752 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
752 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
753 r = self.file(f)
753 r = self.file(f)
754 mfm[f] = tm
754 mfm[f] = tm
755 mm[f] = r.add(t, {}, tr, linkrev,
755 mm[f] = r.add(t, {}, tr, linkrev,
756 m1.get(f, nullid), m2.get(f, nullid))
756 m1.get(f, nullid), m2.get(f, nullid))
757 if update_dirstate:
757 if update_dirstate:
758 self.dirstate.update([f], "n")
758 self.dirstate.update([f], "n")
759 except IOError:
759 except IOError:
760 try:
760 try:
761 del mm[f]
761 del mm[f]
762 del mfm[f]
762 del mfm[f]
763 if update_dirstate:
763 if update_dirstate:
764 self.dirstate.forget([f])
764 self.dirstate.forget([f])
765 except:
765 except:
766 # deleted from p2?
766 # deleted from p2?
767 pass
767 pass
768
768
769 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
769 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
770 user = user or self.ui.username()
770 user = user or self.ui.username()
771 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
771 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
772 tr.close()
772 tr.close()
773 if update_dirstate:
773 if update_dirstate:
774 self.dirstate.setparents(n, nullid)
774 self.dirstate.setparents(n, nullid)
775
775
776 def commit(self, files = None, text = "", user = None, date = None,
776 def commit(self, files = None, text = "", user = None, date = None,
777 match = util.always):
777 match = util.always):
778 commit = []
778 commit = []
779 remove = []
779 remove = []
780 if files:
780 if files:
781 for f in files:
781 for f in files:
782 s = self.dirstate.state(f)
782 s = self.dirstate.state(f)
783 if s in 'nmai':
783 if s in 'nmai':
784 commit.append(f)
784 commit.append(f)
785 elif s == 'r':
785 elif s == 'r':
786 remove.append(f)
786 remove.append(f)
787 else:
787 else:
788 self.ui.warn("%s not tracked!\n" % f)
788 self.ui.warn("%s not tracked!\n" % f)
789 else:
789 else:
790 (c, a, d, u) = self.changes(match = match)
790 (c, a, d, u) = self.changes(match = match)
791 commit = c + a
791 commit = c + a
792 remove = d
792 remove = d
793
793
794 if not commit and not remove:
794 if not commit and not remove:
795 self.ui.status("nothing changed\n")
795 self.ui.status("nothing changed\n")
796 return
796 return
797
797
798 if not self.hook("precommit"):
798 if not self.hook("precommit"):
799 return 1
799 return 1
800
800
801 p1, p2 = self.dirstate.parents()
801 p1, p2 = self.dirstate.parents()
802 c1 = self.changelog.read(p1)
802 c1 = self.changelog.read(p1)
803 c2 = self.changelog.read(p2)
803 c2 = self.changelog.read(p2)
804 m1 = self.manifest.read(c1[0])
804 m1 = self.manifest.read(c1[0])
805 mf1 = self.manifest.readflags(c1[0])
805 mf1 = self.manifest.readflags(c1[0])
806 m2 = self.manifest.read(c2[0])
806 m2 = self.manifest.read(c2[0])
807 lock = self.lock()
807 lock = self.lock()
808 tr = self.transaction()
808 tr = self.transaction()
809
809
810 # check in files
810 # check in files
811 new = {}
811 new = {}
812 linkrev = self.changelog.count()
812 linkrev = self.changelog.count()
813 commit.sort()
813 commit.sort()
814 for f in commit:
814 for f in commit:
815 self.ui.note(f + "\n")
815 self.ui.note(f + "\n")
816 try:
816 try:
817 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
817 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
818 t = self.wfile(f).read()
818 t = self.wfile(f).read()
819 except IOError:
819 except IOError:
820 self.ui.warn("trouble committing %s!\n" % f)
820 self.ui.warn("trouble committing %s!\n" % f)
821 raise
821 raise
822
822
823 meta = {}
823 meta = {}
824 cp = self.dirstate.copied(f)
824 cp = self.dirstate.copied(f)
825 if cp:
825 if cp:
826 meta["copy"] = cp
826 meta["copy"] = cp
827 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
827 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
828 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
828 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
829
829
830 r = self.file(f)
830 r = self.file(f)
831 fp1 = m1.get(f, nullid)
831 fp1 = m1.get(f, nullid)
832 fp2 = m2.get(f, nullid)
832 fp2 = m2.get(f, nullid)
833 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
833 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
834
834
835 # update manifest
835 # update manifest
836 m1.update(new)
836 m1.update(new)
837 for f in remove:
837 for f in remove:
838 if f in m1:
838 if f in m1:
839 del m1[f]
839 del m1[f]
840 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
840 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
841 (new, remove))
841 (new, remove))
842
842
843 # add changeset
843 # add changeset
844 new = new.keys()
844 new = new.keys()
845 new.sort()
845 new.sort()
846
846
847 if not text:
847 if not text:
848 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
848 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
849 edittext += "".join(["HG: changed %s\n" % f for f in new])
849 edittext += "".join(["HG: changed %s\n" % f for f in new])
850 edittext += "".join(["HG: removed %s\n" % f for f in remove])
850 edittext += "".join(["HG: removed %s\n" % f for f in remove])
851 edittext = self.ui.edit(edittext)
851 edittext = self.ui.edit(edittext)
852 if not edittext.rstrip():
852 if not edittext.rstrip():
853 return 1
853 return 1
854 text = edittext
854 text = edittext
855
855
856 user = user or self.ui.username()
856 user = user or self.ui.username()
857 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
857 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
858
858
859 tr.close()
859 tr.close()
860
860
861 self.dirstate.setparents(n)
861 self.dirstate.setparents(n)
862 self.dirstate.update(new, "n")
862 self.dirstate.update(new, "n")
863 self.dirstate.forget(remove)
863 self.dirstate.forget(remove)
864
864
865 if not self.hook("commit", node=hex(n)):
865 if not self.hook("commit", node=hex(n)):
866 return 1
866 return 1
867
867
868 def walk(self, node = None, files = [], match = util.always):
868 def walk(self, node = None, files = [], match = util.always):
869 if node:
869 if node:
870 for fn in self.manifest.read(self.changelog.read(node)[0]):
870 for fn in self.manifest.read(self.changelog.read(node)[0]):
871 yield 'm', fn
871 yield 'm', fn
872 else:
872 else:
873 for src, fn in self.dirstate.walk(files, match):
873 for src, fn in self.dirstate.walk(files, match):
874 yield src, fn
874 yield src, fn
875
875
876 def changes(self, node1 = None, node2 = None, files = [],
876 def changes(self, node1 = None, node2 = None, files = [],
877 match = util.always):
877 match = util.always):
878 mf2, u = None, []
878 mf2, u = None, []
879
879
880 def fcmp(fn, mf):
880 def fcmp(fn, mf):
881 t1 = self.wfile(fn).read()
881 t1 = self.wfile(fn).read()
882 t2 = self.file(fn).revision(mf[fn])
882 t2 = self.file(fn).revision(mf[fn])
883 return cmp(t1, t2)
883 return cmp(t1, t2)
884
884
885 def mfmatches(node):
885 def mfmatches(node):
886 mf = dict(self.manifest.read(node))
886 mf = dict(self.manifest.read(node))
887 for fn in mf.keys():
887 for fn in mf.keys():
888 if not match(fn):
888 if not match(fn):
889 del mf[fn]
889 del mf[fn]
890 return mf
890 return mf
891
891
892 # are we comparing the working directory?
892 # are we comparing the working directory?
893 if not node2:
893 if not node2:
894 l, c, a, d, u = self.dirstate.changes(files, match)
894 l, c, a, d, u = self.dirstate.changes(files, match)
895
895
896 # are we comparing working dir against its parent?
896 # are we comparing working dir against its parent?
897 if not node1:
897 if not node1:
898 if l:
898 if l:
899 # do a full compare of any files that might have changed
899 # do a full compare of any files that might have changed
900 change = self.changelog.read(self.dirstate.parents()[0])
900 change = self.changelog.read(self.dirstate.parents()[0])
901 mf2 = mfmatches(change[0])
901 mf2 = mfmatches(change[0])
902 for f in l:
902 for f in l:
903 if fcmp(f, mf2):
903 if fcmp(f, mf2):
904 c.append(f)
904 c.append(f)
905
905
906 for l in c, a, d, u:
906 for l in c, a, d, u:
907 l.sort()
907 l.sort()
908
908
909 return (c, a, d, u)
909 return (c, a, d, u)
910
910
911 # are we comparing working dir against non-tip?
911 # are we comparing working dir against non-tip?
912 # generate a pseudo-manifest for the working dir
912 # generate a pseudo-manifest for the working dir
913 if not node2:
913 if not node2:
914 if not mf2:
914 if not mf2:
915 change = self.changelog.read(self.dirstate.parents()[0])
915 change = self.changelog.read(self.dirstate.parents()[0])
916 mf2 = mfmatches(change[0])
916 mf2 = mfmatches(change[0])
917 for f in a + c + l:
917 for f in a + c + l:
918 mf2[f] = ""
918 mf2[f] = ""
919 for f in d:
919 for f in d:
920 if f in mf2: del mf2[f]
920 if f in mf2: del mf2[f]
921 else:
921 else:
922 change = self.changelog.read(node2)
922 change = self.changelog.read(node2)
923 mf2 = mfmatches(change[0])
923 mf2 = mfmatches(change[0])
924
924
925 # flush lists from dirstate before comparing manifests
925 # flush lists from dirstate before comparing manifests
926 c, a = [], []
926 c, a = [], []
927
927
928 change = self.changelog.read(node1)
928 change = self.changelog.read(node1)
929 mf1 = mfmatches(change[0])
929 mf1 = mfmatches(change[0])
930
930
931 for fn in mf2:
931 for fn in mf2:
932 if mf1.has_key(fn):
932 if mf1.has_key(fn):
933 if mf1[fn] != mf2[fn]:
933 if mf1[fn] != mf2[fn]:
934 if mf2[fn] != "" or fcmp(fn, mf1):
934 if mf2[fn] != "" or fcmp(fn, mf1):
935 c.append(fn)
935 c.append(fn)
936 del mf1[fn]
936 del mf1[fn]
937 else:
937 else:
938 a.append(fn)
938 a.append(fn)
939
939
940 d = mf1.keys()
940 d = mf1.keys()
941
941
942 for l in c, a, d, u:
942 for l in c, a, d, u:
943 l.sort()
943 l.sort()
944
944
945 return (c, a, d, u)
945 return (c, a, d, u)
946
946
947 def add(self, list):
947 def add(self, list):
948 for f in list:
948 for f in list:
949 p = self.wjoin(f)
949 p = self.wjoin(f)
950 if not os.path.exists(p):
950 if not os.path.exists(p):
951 self.ui.warn("%s does not exist!\n" % f)
951 self.ui.warn("%s does not exist!\n" % f)
952 elif not os.path.isfile(p):
952 elif not os.path.isfile(p):
953 self.ui.warn("%s not added: only files supported currently\n" % f)
953 self.ui.warn("%s not added: only files supported currently\n" % f)
954 elif self.dirstate.state(f) in 'an':
954 elif self.dirstate.state(f) in 'an':
955 self.ui.warn("%s already tracked!\n" % f)
955 self.ui.warn("%s already tracked!\n" % f)
956 else:
956 else:
957 self.dirstate.update([f], "a")
957 self.dirstate.update([f], "a")
958
958
959 def forget(self, list):
959 def forget(self, list):
960 for f in list:
960 for f in list:
961 if self.dirstate.state(f) not in 'ai':
961 if self.dirstate.state(f) not in 'ai':
962 self.ui.warn("%s not added!\n" % f)
962 self.ui.warn("%s not added!\n" % f)
963 else:
963 else:
964 self.dirstate.forget([f])
964 self.dirstate.forget([f])
965
965
966 def remove(self, list):
966 def remove(self, list):
967 for f in list:
967 for f in list:
968 p = self.wjoin(f)
968 p = self.wjoin(f)
969 if os.path.exists(p):
969 if os.path.exists(p):
970 self.ui.warn("%s still exists!\n" % f)
970 self.ui.warn("%s still exists!\n" % f)
971 elif self.dirstate.state(f) == 'a':
971 elif self.dirstate.state(f) == 'a':
972 self.ui.warn("%s never committed!\n" % f)
972 self.ui.warn("%s never committed!\n" % f)
973 self.dirstate.forget([f])
973 self.dirstate.forget([f])
974 elif f not in self.dirstate:
974 elif f not in self.dirstate:
975 self.ui.warn("%s not tracked!\n" % f)
975 self.ui.warn("%s not tracked!\n" % f)
976 else:
976 else:
977 self.dirstate.update([f], "r")
977 self.dirstate.update([f], "r")
978
978
979 def copy(self, source, dest):
979 def copy(self, source, dest):
980 p = self.wjoin(dest)
980 p = self.wjoin(dest)
981 if not os.path.exists(p):
981 if not os.path.exists(p):
982 self.ui.warn("%s does not exist!\n" % dest)
982 self.ui.warn("%s does not exist!\n" % dest)
983 elif not os.path.isfile(p):
983 elif not os.path.isfile(p):
984 self.ui.warn("copy failed: %s is not a file\n" % dest)
984 self.ui.warn("copy failed: %s is not a file\n" % dest)
985 else:
985 else:
986 if self.dirstate.state(dest) == '?':
986 if self.dirstate.state(dest) == '?':
987 self.dirstate.update([dest], "a")
987 self.dirstate.update([dest], "a")
988 self.dirstate.copy(source, dest)
988 self.dirstate.copy(source, dest)
989
989
990 def heads(self):
990 def heads(self):
991 return self.changelog.heads()
991 return self.changelog.heads()
992
992
993 def branches(self, nodes):
993 def branches(self, nodes):
994 if not nodes: nodes = [self.changelog.tip()]
994 if not nodes: nodes = [self.changelog.tip()]
995 b = []
995 b = []
996 for n in nodes:
996 for n in nodes:
997 t = n
997 t = n
998 while n:
998 while n:
999 p = self.changelog.parents(n)
999 p = self.changelog.parents(n)
1000 if p[1] != nullid or p[0] == nullid:
1000 if p[1] != nullid or p[0] == nullid:
1001 b.append((t, n, p[0], p[1]))
1001 b.append((t, n, p[0], p[1]))
1002 break
1002 break
1003 n = p[0]
1003 n = p[0]
1004 return b
1004 return b
1005
1005
1006 def between(self, pairs):
1006 def between(self, pairs):
1007 r = []
1007 r = []
1008
1008
1009 for top, bottom in pairs:
1009 for top, bottom in pairs:
1010 n, l, i = top, [], 0
1010 n, l, i = top, [], 0
1011 f = 1
1011 f = 1
1012
1012
1013 while n != bottom:
1013 while n != bottom:
1014 p = self.changelog.parents(n)[0]
1014 p = self.changelog.parents(n)[0]
1015 if i == f:
1015 if i == f:
1016 l.append(n)
1016 l.append(n)
1017 f = f * 2
1017 f = f * 2
1018 n = p
1018 n = p
1019 i += 1
1019 i += 1
1020
1020
1021 r.append(l)
1021 r.append(l)
1022
1022
1023 return r
1023 return r
1024
1024
1025 def newer(self, nodes):
1025 def newer(self, nodes):
1026 m = {}
1026 m = {}
1027 nl = []
1027 nl = []
1028 pm = {}
1028 pm = {}
1029 cl = self.changelog
1029 cl = self.changelog
1030 t = l = cl.count()
1030 t = l = cl.count()
1031
1031
1032 # find the lowest numbered node
1032 # find the lowest numbered node
1033 for n in nodes:
1033 for n in nodes:
1034 l = min(l, cl.rev(n))
1034 l = min(l, cl.rev(n))
1035 m[n] = 1
1035 m[n] = 1
1036
1036
1037 for i in xrange(l, t):
1037 for i in xrange(l, t):
1038 n = cl.node(i)
1038 n = cl.node(i)
1039 if n in m: # explicitly listed
1039 if n in m: # explicitly listed
1040 pm[n] = 1
1040 pm[n] = 1
1041 nl.append(n)
1041 nl.append(n)
1042 continue
1042 continue
1043 for p in cl.parents(n):
1043 for p in cl.parents(n):
1044 if p in pm: # parent listed
1044 if p in pm: # parent listed
1045 pm[n] = 1
1045 pm[n] = 1
1046 nl.append(n)
1046 nl.append(n)
1047 break
1047 break
1048
1048
1049 return nl
1049 return nl
1050
1050
1051 def findincoming(self, remote, base={}):
1051 def findincoming(self, remote, base=None, heads=None):
1052 m = self.changelog.nodemap
1052 m = self.changelog.nodemap
1053 search = []
1053 search = []
1054 fetch = []
1054 fetch = []
1055 seen = {}
1055 seen = {}
1056 seenbranch = {}
1056 seenbranch = {}
1057 if base == None:
1058 base = {}
1057
1059
1058 # assume we're closer to the tip than the root
1060 # assume we're closer to the tip than the root
1059 # and start by examining the heads
1061 # and start by examining the heads
1060 self.ui.status("searching for changes\n")
1062 self.ui.status("searching for changes\n")
1061 heads = remote.heads()
1063
1064 if not heads:
1065 heads = remote.heads()
1066
1062 unknown = []
1067 unknown = []
1063 for h in heads:
1068 for h in heads:
1064 if h not in m:
1069 if h not in m:
1065 unknown.append(h)
1070 unknown.append(h)
1066 else:
1071 else:
1067 base[h] = 1
1072 base[h] = 1
1068
1073
1069 if not unknown:
1074 if not unknown:
1070 return None
1075 return None
1071
1076
1072 rep = {}
1077 rep = {}
1073 reqcnt = 0
1078 reqcnt = 0
1074
1079
1075 # search through remote branches
1080 # search through remote branches
1076 # a 'branch' here is a linear segment of history, with four parts:
1081 # a 'branch' here is a linear segment of history, with four parts:
1077 # head, root, first parent, second parent
1082 # head, root, first parent, second parent
1078 # (a branch always has two parents (or none) by definition)
1083 # (a branch always has two parents (or none) by definition)
1079 unknown = remote.branches(unknown)
1084 unknown = remote.branches(unknown)
1080 while unknown:
1085 while unknown:
1081 r = []
1086 r = []
1082 while unknown:
1087 while unknown:
1083 n = unknown.pop(0)
1088 n = unknown.pop(0)
1084 if n[0] in seen:
1089 if n[0] in seen:
1085 continue
1090 continue
1086
1091
1087 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1092 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1088 if n[0] == nullid:
1093 if n[0] == nullid:
1089 break
1094 break
1090 if n in seenbranch:
1095 if n in seenbranch:
1091 self.ui.debug("branch already found\n")
1096 self.ui.debug("branch already found\n")
1092 continue
1097 continue
1093 if n[1] and n[1] in m: # do we know the base?
1098 if n[1] and n[1] in m: # do we know the base?
1094 self.ui.debug("found incomplete branch %s:%s\n"
1099 self.ui.debug("found incomplete branch %s:%s\n"
1095 % (short(n[0]), short(n[1])))
1100 % (short(n[0]), short(n[1])))
1096 search.append(n) # schedule branch range for scanning
1101 search.append(n) # schedule branch range for scanning
1097 seenbranch[n] = 1
1102 seenbranch[n] = 1
1098 else:
1103 else:
1099 if n[1] not in seen and n[1] not in fetch:
1104 if n[1] not in seen and n[1] not in fetch:
1100 if n[2] in m and n[3] in m:
1105 if n[2] in m and n[3] in m:
1101 self.ui.debug("found new changeset %s\n" %
1106 self.ui.debug("found new changeset %s\n" %
1102 short(n[1]))
1107 short(n[1]))
1103 fetch.append(n[1]) # earliest unknown
1108 fetch.append(n[1]) # earliest unknown
1104 base[n[2]] = 1 # latest known
1109 base[n[2]] = 1 # latest known
1105 continue
1110 continue
1106
1111
1107 for a in n[2:4]:
1112 for a in n[2:4]:
1108 if a not in rep:
1113 if a not in rep:
1109 r.append(a)
1114 r.append(a)
1110 rep[a] = 1
1115 rep[a] = 1
1111
1116
1112 seen[n[0]] = 1
1117 seen[n[0]] = 1
1113
1118
1114 if r:
1119 if r:
1115 reqcnt += 1
1120 reqcnt += 1
1116 self.ui.debug("request %d: %s\n" %
1121 self.ui.debug("request %d: %s\n" %
1117 (reqcnt, " ".join(map(short, r))))
1122 (reqcnt, " ".join(map(short, r))))
1118 for p in range(0, len(r), 10):
1123 for p in range(0, len(r), 10):
1119 for b in remote.branches(r[p:p+10]):
1124 for b in remote.branches(r[p:p+10]):
1120 self.ui.debug("received %s:%s\n" %
1125 self.ui.debug("received %s:%s\n" %
1121 (short(b[0]), short(b[1])))
1126 (short(b[0]), short(b[1])))
1122 if b[0] not in m and b[0] not in seen:
1127 if b[0] not in m and b[0] not in seen:
1123 unknown.append(b)
1128 unknown.append(b)
1124
1129
1125 # do binary search on the branches we found
1130 # do binary search on the branches we found
1126 while search:
1131 while search:
1127 n = search.pop(0)
1132 n = search.pop(0)
1128 reqcnt += 1
1133 reqcnt += 1
1129 l = remote.between([(n[0], n[1])])[0]
1134 l = remote.between([(n[0], n[1])])[0]
1130 l.append(n[1])
1135 l.append(n[1])
1131 p = n[0]
1136 p = n[0]
1132 f = 1
1137 f = 1
1133 for i in l:
1138 for i in l:
1134 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1139 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1135 if i in m:
1140 if i in m:
1136 if f <= 2:
1141 if f <= 2:
1137 self.ui.debug("found new branch changeset %s\n" %
1142 self.ui.debug("found new branch changeset %s\n" %
1138 short(p))
1143 short(p))
1139 fetch.append(p)
1144 fetch.append(p)
1140 base[i] = 1
1145 base[i] = 1
1141 else:
1146 else:
1142 self.ui.debug("narrowed branch search to %s:%s\n"
1147 self.ui.debug("narrowed branch search to %s:%s\n"
1143 % (short(p), short(i)))
1148 % (short(p), short(i)))
1144 search.append((p, i))
1149 search.append((p, i))
1145 break
1150 break
1146 p, f = i, f * 2
1151 p, f = i, f * 2
1147
1152
1148 # sanity check our fetch list
1153 # sanity check our fetch list
1149 for f in fetch:
1154 for f in fetch:
1150 if f in m:
1155 if f in m:
1151 raise RepoError("already have changeset " + short(f[:4]))
1156 raise RepoError("already have changeset " + short(f[:4]))
1152
1157
1153 if base.keys() == [nullid]:
1158 if base.keys() == [nullid]:
1154 self.ui.warn("warning: pulling from an unrelated repository!\n")
1159 self.ui.warn("warning: pulling from an unrelated repository!\n")
1155
1160
1156 self.ui.note("adding new changesets starting at " +
1161 self.ui.note("adding new changesets starting at " +
1157 " ".join([short(f) for f in fetch]) + "\n")
1162 " ".join([short(f) for f in fetch]) + "\n")
1158
1163
1159 self.ui.debug("%d total queries\n" % reqcnt)
1164 self.ui.debug("%d total queries\n" % reqcnt)
1160
1165
1161 return fetch
1166 return fetch
1162
1167
1163 def findoutgoing(self, remote):
1168 def findoutgoing(self, remote, base=None, heads=None):
1164 base = {}
1169 if base == None:
1165 self.findincoming(remote, base)
1170 base = {}
1171 self.findincoming(remote, base, heads)
1172
1166 remain = dict.fromkeys(self.changelog.nodemap)
1173 remain = dict.fromkeys(self.changelog.nodemap)
1167
1174
1168 # prune everything remote has from the tree
1175 # prune everything remote has from the tree
1169 del remain[nullid]
1176 del remain[nullid]
1170 remove = base.keys()
1177 remove = base.keys()
1171 while remove:
1178 while remove:
1172 n = remove.pop(0)
1179 n = remove.pop(0)
1173 if n in remain:
1180 if n in remain:
1174 del remain[n]
1181 del remain[n]
1175 for p in self.changelog.parents(n):
1182 for p in self.changelog.parents(n):
1176 remove.append(p)
1183 remove.append(p)
1177
1184
1178 # find every node whose parents have been pruned
1185 # find every node whose parents have been pruned
1179 subset = []
1186 subset = []
1180 for n in remain:
1187 for n in remain:
1181 p1, p2 = self.changelog.parents(n)
1188 p1, p2 = self.changelog.parents(n)
1182 if p1 not in remain and p2 not in remain:
1189 if p1 not in remain and p2 not in remain:
1183 subset.append(n)
1190 subset.append(n)
1184
1191
1185 # this is the set of all roots we have to push
1192 # this is the set of all roots we have to push
1186 return subset
1193 return subset
1187
1194
1188 def pull(self, remote):
1195 def pull(self, remote):
1189 lock = self.lock()
1196 lock = self.lock()
1190
1197
1191 # if we have an empty repo, fetch everything
1198 # if we have an empty repo, fetch everything
1192 if self.changelog.tip() == nullid:
1199 if self.changelog.tip() == nullid:
1193 self.ui.status("requesting all changes\n")
1200 self.ui.status("requesting all changes\n")
1194 fetch = [nullid]
1201 fetch = [nullid]
1195 else:
1202 else:
1196 fetch = self.findincoming(remote)
1203 fetch = self.findincoming(remote)
1197
1204
1198 if not fetch:
1205 if not fetch:
1199 self.ui.status("no changes found\n")
1206 self.ui.status("no changes found\n")
1200 return 1
1207 return 1
1201
1208
1202 cg = remote.changegroup(fetch)
1209 cg = remote.changegroup(fetch)
1203 return self.addchangegroup(cg)
1210 return self.addchangegroup(cg)
1204
1211
1205 def push(self, remote):
1212 def push(self, remote, force=False):
1206 lock = remote.lock()
1213 lock = remote.lock()
1207 update = self.findoutgoing(remote)
1214
1215 base = {}
1216 heads = remote.heads()
1217 inc = self.findincoming(remote, base, heads)
1218 if not force and inc:
1219 self.ui.warn("abort: unsynced remote changes!\n")
1220 self.ui.status("(did you forget to sync? use push -f to force)\n")
1221 return 1
1222
1223 update = self.findoutgoing(remote, base)
1208 if not update:
1224 if not update:
1209 self.ui.status("no changes found\n")
1225 self.ui.status("no changes found\n")
1210 return 1
1226 return 1
1227 elif not force:
1228 if len(heads) < len(self.changelog.heads()):
1229 self.ui.warn("abort: push creates new remote branches!\n")
1230 self.ui.status("(did you forget to merge?" +
1231 " use push -f to force)\n")
1232 return 1
1211
1233
1212 cg = self.changegroup(update)
1234 cg = self.changegroup(update)
1213 return remote.addchangegroup(cg)
1235 return remote.addchangegroup(cg)
1214
1236
1215 def changegroup(self, basenodes):
1237 def changegroup(self, basenodes):
1216 class genread:
1238 class genread:
1217 def __init__(self, generator):
1239 def __init__(self, generator):
1218 self.g = generator
1240 self.g = generator
1219 self.buf = ""
1241 self.buf = ""
1220 def read(self, l):
1242 def read(self, l):
1221 while l > len(self.buf):
1243 while l > len(self.buf):
1222 try:
1244 try:
1223 self.buf += self.g.next()
1245 self.buf += self.g.next()
1224 except StopIteration:
1246 except StopIteration:
1225 break
1247 break
1226 d, self.buf = self.buf[:l], self.buf[l:]
1248 d, self.buf = self.buf[:l], self.buf[l:]
1227 return d
1249 return d
1228
1250
1229 def gengroup():
1251 def gengroup():
1230 nodes = self.newer(basenodes)
1252 nodes = self.newer(basenodes)
1231
1253
1232 # construct the link map
1254 # construct the link map
1233 linkmap = {}
1255 linkmap = {}
1234 for n in nodes:
1256 for n in nodes:
1235 linkmap[self.changelog.rev(n)] = n
1257 linkmap[self.changelog.rev(n)] = n
1236
1258
1237 # construct a list of all changed files
1259 # construct a list of all changed files
1238 changed = {}
1260 changed = {}
1239 for n in nodes:
1261 for n in nodes:
1240 c = self.changelog.read(n)
1262 c = self.changelog.read(n)
1241 for f in c[3]:
1263 for f in c[3]:
1242 changed[f] = 1
1264 changed[f] = 1
1243 changed = changed.keys()
1265 changed = changed.keys()
1244 changed.sort()
1266 changed.sort()
1245
1267
1246 # the changegroup is changesets + manifests + all file revs
1268 # the changegroup is changesets + manifests + all file revs
1247 revs = [ self.changelog.rev(n) for n in nodes ]
1269 revs = [ self.changelog.rev(n) for n in nodes ]
1248
1270
1249 for y in self.changelog.group(linkmap): yield y
1271 for y in self.changelog.group(linkmap): yield y
1250 for y in self.manifest.group(linkmap): yield y
1272 for y in self.manifest.group(linkmap): yield y
1251 for f in changed:
1273 for f in changed:
1252 yield struct.pack(">l", len(f) + 4) + f
1274 yield struct.pack(">l", len(f) + 4) + f
1253 g = self.file(f).group(linkmap)
1275 g = self.file(f).group(linkmap)
1254 for y in g:
1276 for y in g:
1255 yield y
1277 yield y
1256
1278
1257 yield struct.pack(">l", 0)
1279 yield struct.pack(">l", 0)
1258
1280
1259 return genread(gengroup())
1281 return genread(gengroup())
1260
1282
1261 def addchangegroup(self, source):
1283 def addchangegroup(self, source):
1262
1284
1263 def getchunk():
1285 def getchunk():
1264 d = source.read(4)
1286 d = source.read(4)
1265 if not d: return ""
1287 if not d: return ""
1266 l = struct.unpack(">l", d)[0]
1288 l = struct.unpack(">l", d)[0]
1267 if l <= 4: return ""
1289 if l <= 4: return ""
1268 return source.read(l - 4)
1290 return source.read(l - 4)
1269
1291
1270 def getgroup():
1292 def getgroup():
1271 while 1:
1293 while 1:
1272 c = getchunk()
1294 c = getchunk()
1273 if not c: break
1295 if not c: break
1274 yield c
1296 yield c
1275
1297
1276 def csmap(x):
1298 def csmap(x):
1277 self.ui.debug("add changeset %s\n" % short(x))
1299 self.ui.debug("add changeset %s\n" % short(x))
1278 return self.changelog.count()
1300 return self.changelog.count()
1279
1301
1280 def revmap(x):
1302 def revmap(x):
1281 return self.changelog.rev(x)
1303 return self.changelog.rev(x)
1282
1304
1283 if not source: return
1305 if not source: return
1284 changesets = files = revisions = 0
1306 changesets = files = revisions = 0
1285
1307
1286 tr = self.transaction()
1308 tr = self.transaction()
1287
1309
1288 # pull off the changeset group
1310 # pull off the changeset group
1289 self.ui.status("adding changesets\n")
1311 self.ui.status("adding changesets\n")
1290 co = self.changelog.tip()
1312 co = self.changelog.tip()
1291 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1313 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1292 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1314 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1293
1315
1294 # pull off the manifest group
1316 # pull off the manifest group
1295 self.ui.status("adding manifests\n")
1317 self.ui.status("adding manifests\n")
1296 mm = self.manifest.tip()
1318 mm = self.manifest.tip()
1297 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1319 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1298
1320
1299 # process the files
1321 # process the files
1300 self.ui.status("adding file changes\n")
1322 self.ui.status("adding file changes\n")
1301 while 1:
1323 while 1:
1302 f = getchunk()
1324 f = getchunk()
1303 if not f: break
1325 if not f: break
1304 self.ui.debug("adding %s revisions\n" % f)
1326 self.ui.debug("adding %s revisions\n" % f)
1305 fl = self.file(f)
1327 fl = self.file(f)
1306 o = fl.count()
1328 o = fl.count()
1307 n = fl.addgroup(getgroup(), revmap, tr)
1329 n = fl.addgroup(getgroup(), revmap, tr)
1308 revisions += fl.count() - o
1330 revisions += fl.count() - o
1309 files += 1
1331 files += 1
1310
1332
1311 self.ui.status(("added %d changesets" +
1333 self.ui.status(("added %d changesets" +
1312 " with %d changes to %d files\n")
1334 " with %d changes to %d files\n")
1313 % (changesets, revisions, files))
1335 % (changesets, revisions, files))
1314
1336
1315 tr.close()
1337 tr.close()
1316
1338
1317 if not self.hook("changegroup"):
1339 if not self.hook("changegroup"):
1318 return 1
1340 return 1
1319
1341
1320 return
1342 return
1321
1343
1322 def update(self, node, allow=False, force=False, choose=None,
1344 def update(self, node, allow=False, force=False, choose=None,
1323 moddirstate=True):
1345 moddirstate=True):
1324 pl = self.dirstate.parents()
1346 pl = self.dirstate.parents()
1325 if not force and pl[1] != nullid:
1347 if not force and pl[1] != nullid:
1326 self.ui.warn("aborting: outstanding uncommitted merges\n")
1348 self.ui.warn("aborting: outstanding uncommitted merges\n")
1327 return 1
1349 return 1
1328
1350
1329 p1, p2 = pl[0], node
1351 p1, p2 = pl[0], node
1330 pa = self.changelog.ancestor(p1, p2)
1352 pa = self.changelog.ancestor(p1, p2)
1331 m1n = self.changelog.read(p1)[0]
1353 m1n = self.changelog.read(p1)[0]
1332 m2n = self.changelog.read(p2)[0]
1354 m2n = self.changelog.read(p2)[0]
1333 man = self.manifest.ancestor(m1n, m2n)
1355 man = self.manifest.ancestor(m1n, m2n)
1334 m1 = self.manifest.read(m1n)
1356 m1 = self.manifest.read(m1n)
1335 mf1 = self.manifest.readflags(m1n)
1357 mf1 = self.manifest.readflags(m1n)
1336 m2 = self.manifest.read(m2n)
1358 m2 = self.manifest.read(m2n)
1337 mf2 = self.manifest.readflags(m2n)
1359 mf2 = self.manifest.readflags(m2n)
1338 ma = self.manifest.read(man)
1360 ma = self.manifest.read(man)
1339 mfa = self.manifest.readflags(man)
1361 mfa = self.manifest.readflags(man)
1340
1362
1341 (c, a, d, u) = self.changes()
1363 (c, a, d, u) = self.changes()
1342
1364
1343 # is this a jump, or a merge? i.e. is there a linear path
1365 # is this a jump, or a merge? i.e. is there a linear path
1344 # from p1 to p2?
1366 # from p1 to p2?
1345 linear_path = (pa == p1 or pa == p2)
1367 linear_path = (pa == p1 or pa == p2)
1346
1368
1347 # resolve the manifest to determine which files
1369 # resolve the manifest to determine which files
1348 # we care about merging
1370 # we care about merging
1349 self.ui.note("resolving manifests\n")
1371 self.ui.note("resolving manifests\n")
1350 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1372 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1351 (force, allow, moddirstate, linear_path))
1373 (force, allow, moddirstate, linear_path))
1352 self.ui.debug(" ancestor %s local %s remote %s\n" %
1374 self.ui.debug(" ancestor %s local %s remote %s\n" %
1353 (short(man), short(m1n), short(m2n)))
1375 (short(man), short(m1n), short(m2n)))
1354
1376
1355 merge = {}
1377 merge = {}
1356 get = {}
1378 get = {}
1357 remove = []
1379 remove = []
1358 mark = {}
1380 mark = {}
1359
1381
1360 # construct a working dir manifest
1382 # construct a working dir manifest
1361 mw = m1.copy()
1383 mw = m1.copy()
1362 mfw = mf1.copy()
1384 mfw = mf1.copy()
1363 umap = dict.fromkeys(u)
1385 umap = dict.fromkeys(u)
1364
1386
1365 for f in a + c + u:
1387 for f in a + c + u:
1366 mw[f] = ""
1388 mw[f] = ""
1367 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1389 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1368
1390
1369 for f in d:
1391 for f in d:
1370 if f in mw: del mw[f]
1392 if f in mw: del mw[f]
1371
1393
1372 # If we're jumping between revisions (as opposed to merging),
1394 # If we're jumping between revisions (as opposed to merging),
1373 # and if neither the working directory nor the target rev has
1395 # and if neither the working directory nor the target rev has
1374 # the file, then we need to remove it from the dirstate, to
1396 # the file, then we need to remove it from the dirstate, to
1375 # prevent the dirstate from listing the file when it is no
1397 # prevent the dirstate from listing the file when it is no
1376 # longer in the manifest.
1398 # longer in the manifest.
1377 if moddirstate and linear_path and f not in m2:
1399 if moddirstate and linear_path and f not in m2:
1378 self.dirstate.forget((f,))
1400 self.dirstate.forget((f,))
1379
1401
1380 # Compare manifests
1402 # Compare manifests
1381 for f, n in mw.iteritems():
1403 for f, n in mw.iteritems():
1382 if choose and not choose(f): continue
1404 if choose and not choose(f): continue
1383 if f in m2:
1405 if f in m2:
1384 s = 0
1406 s = 0
1385
1407
1386 # is the wfile new since m1, and match m2?
1408 # is the wfile new since m1, and match m2?
1387 if f not in m1:
1409 if f not in m1:
1388 t1 = self.wfile(f).read()
1410 t1 = self.wfile(f).read()
1389 t2 = self.file(f).revision(m2[f])
1411 t2 = self.file(f).revision(m2[f])
1390 if cmp(t1, t2) == 0:
1412 if cmp(t1, t2) == 0:
1391 mark[f] = 1
1413 mark[f] = 1
1392 n = m2[f]
1414 n = m2[f]
1393 del t1, t2
1415 del t1, t2
1394
1416
1395 # are files different?
1417 # are files different?
1396 if n != m2[f]:
1418 if n != m2[f]:
1397 a = ma.get(f, nullid)
1419 a = ma.get(f, nullid)
1398 # are both different from the ancestor?
1420 # are both different from the ancestor?
1399 if n != a and m2[f] != a:
1421 if n != a and m2[f] != a:
1400 self.ui.debug(" %s versions differ, resolve\n" % f)
1422 self.ui.debug(" %s versions differ, resolve\n" % f)
1401 # merge executable bits
1423 # merge executable bits
1402 # "if we changed or they changed, change in merge"
1424 # "if we changed or they changed, change in merge"
1403 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1425 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1404 mode = ((a^b) | (a^c)) ^ a
1426 mode = ((a^b) | (a^c)) ^ a
1405 merge[f] = (m1.get(f, nullid), m2[f], mode)
1427 merge[f] = (m1.get(f, nullid), m2[f], mode)
1406 s = 1
1428 s = 1
1407 # are we clobbering?
1429 # are we clobbering?
1408 # is remote's version newer?
1430 # is remote's version newer?
1409 # or are we going back in time?
1431 # or are we going back in time?
1410 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1432 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1411 self.ui.debug(" remote %s is newer, get\n" % f)
1433 self.ui.debug(" remote %s is newer, get\n" % f)
1412 get[f] = m2[f]
1434 get[f] = m2[f]
1413 s = 1
1435 s = 1
1414 else:
1436 else:
1415 mark[f] = 1
1437 mark[f] = 1
1416 elif f in umap:
1438 elif f in umap:
1417 # this unknown file is the same as the checkout
1439 # this unknown file is the same as the checkout
1418 get[f] = m2[f]
1440 get[f] = m2[f]
1419
1441
1420 if not s and mfw[f] != mf2[f]:
1442 if not s and mfw[f] != mf2[f]:
1421 if force:
1443 if force:
1422 self.ui.debug(" updating permissions for %s\n" % f)
1444 self.ui.debug(" updating permissions for %s\n" % f)
1423 util.set_exec(self.wjoin(f), mf2[f])
1445 util.set_exec(self.wjoin(f), mf2[f])
1424 else:
1446 else:
1425 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1447 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1426 mode = ((a^b) | (a^c)) ^ a
1448 mode = ((a^b) | (a^c)) ^ a
1427 if mode != b:
1449 if mode != b:
1428 self.ui.debug(" updating permissions for %s\n" % f)
1450 self.ui.debug(" updating permissions for %s\n" % f)
1429 util.set_exec(self.wjoin(f), mode)
1451 util.set_exec(self.wjoin(f), mode)
1430 mark[f] = 1
1452 mark[f] = 1
1431 del m2[f]
1453 del m2[f]
1432 elif f in ma:
1454 elif f in ma:
1433 if n != ma[f]:
1455 if n != ma[f]:
1434 r = "d"
1456 r = "d"
1435 if not force and (linear_path or allow):
1457 if not force and (linear_path or allow):
1436 r = self.ui.prompt(
1458 r = self.ui.prompt(
1437 (" local changed %s which remote deleted\n" % f) +
1459 (" local changed %s which remote deleted\n" % f) +
1438 "(k)eep or (d)elete?", "[kd]", "k")
1460 "(k)eep or (d)elete?", "[kd]", "k")
1439 if r == "d":
1461 if r == "d":
1440 remove.append(f)
1462 remove.append(f)
1441 else:
1463 else:
1442 self.ui.debug("other deleted %s\n" % f)
1464 self.ui.debug("other deleted %s\n" % f)
1443 remove.append(f) # other deleted it
1465 remove.append(f) # other deleted it
1444 else:
1466 else:
1445 if n == m1.get(f, nullid): # same as parent
1467 if n == m1.get(f, nullid): # same as parent
1446 if p2 == pa: # going backwards?
1468 if p2 == pa: # going backwards?
1447 self.ui.debug("remote deleted %s\n" % f)
1469 self.ui.debug("remote deleted %s\n" % f)
1448 remove.append(f)
1470 remove.append(f)
1449 else:
1471 else:
1450 self.ui.debug("local created %s, keeping\n" % f)
1472 self.ui.debug("local created %s, keeping\n" % f)
1451 else:
1473 else:
1452 self.ui.debug("working dir created %s, keeping\n" % f)
1474 self.ui.debug("working dir created %s, keeping\n" % f)
1453
1475
1454 for f, n in m2.iteritems():
1476 for f, n in m2.iteritems():
1455 if choose and not choose(f): continue
1477 if choose and not choose(f): continue
1456 if f[0] == "/": continue
1478 if f[0] == "/": continue
1457 if f in ma and n != ma[f]:
1479 if f in ma and n != ma[f]:
1458 r = "k"
1480 r = "k"
1459 if not force and (linear_path or allow):
1481 if not force and (linear_path or allow):
1460 r = self.ui.prompt(
1482 r = self.ui.prompt(
1461 ("remote changed %s which local deleted\n" % f) +
1483 ("remote changed %s which local deleted\n" % f) +
1462 "(k)eep or (d)elete?", "[kd]", "k")
1484 "(k)eep or (d)elete?", "[kd]", "k")
1463 if r == "k": get[f] = n
1485 if r == "k": get[f] = n
1464 elif f not in ma:
1486 elif f not in ma:
1465 self.ui.debug("remote created %s\n" % f)
1487 self.ui.debug("remote created %s\n" % f)
1466 get[f] = n
1488 get[f] = n
1467 else:
1489 else:
1468 if force or p2 == pa: # going backwards?
1490 if force or p2 == pa: # going backwards?
1469 self.ui.debug("local deleted %s, recreating\n" % f)
1491 self.ui.debug("local deleted %s, recreating\n" % f)
1470 get[f] = n
1492 get[f] = n
1471 else:
1493 else:
1472 self.ui.debug("local deleted %s\n" % f)
1494 self.ui.debug("local deleted %s\n" % f)
1473
1495
1474 del mw, m1, m2, ma
1496 del mw, m1, m2, ma
1475
1497
1476 if force:
1498 if force:
1477 for f in merge:
1499 for f in merge:
1478 get[f] = merge[f][1]
1500 get[f] = merge[f][1]
1479 merge = {}
1501 merge = {}
1480
1502
1481 if linear_path or force:
1503 if linear_path or force:
1482 # we don't need to do any magic, just jump to the new rev
1504 # we don't need to do any magic, just jump to the new rev
1483 mode = 'n'
1505 mode = 'n'
1484 p1, p2 = p2, nullid
1506 p1, p2 = p2, nullid
1485 else:
1507 else:
1486 if not allow:
1508 if not allow:
1487 self.ui.status("this update spans a branch" +
1509 self.ui.status("this update spans a branch" +
1488 " affecting the following files:\n")
1510 " affecting the following files:\n")
1489 fl = merge.keys() + get.keys()
1511 fl = merge.keys() + get.keys()
1490 fl.sort()
1512 fl.sort()
1491 for f in fl:
1513 for f in fl:
1492 cf = ""
1514 cf = ""
1493 if f in merge: cf = " (resolve)"
1515 if f in merge: cf = " (resolve)"
1494 self.ui.status(" %s%s\n" % (f, cf))
1516 self.ui.status(" %s%s\n" % (f, cf))
1495 self.ui.warn("aborting update spanning branches!\n")
1517 self.ui.warn("aborting update spanning branches!\n")
1496 self.ui.status("(use update -m to merge across branches" +
1518 self.ui.status("(use update -m to merge across branches" +
1497 " or -C to lose changes)\n")
1519 " or -C to lose changes)\n")
1498 return 1
1520 return 1
1499 # we have to remember what files we needed to get/change
1521 # we have to remember what files we needed to get/change
1500 # because any file that's different from either one of its
1522 # because any file that's different from either one of its
1501 # parents must be in the changeset
1523 # parents must be in the changeset
1502 mode = 'm'
1524 mode = 'm'
1503 if moddirstate:
1525 if moddirstate:
1504 self.dirstate.update(mark.keys(), "m")
1526 self.dirstate.update(mark.keys(), "m")
1505
1527
1506 if moddirstate:
1528 if moddirstate:
1507 self.dirstate.setparents(p1, p2)
1529 self.dirstate.setparents(p1, p2)
1508
1530
1509 # get the files we don't need to change
1531 # get the files we don't need to change
1510 files = get.keys()
1532 files = get.keys()
1511 files.sort()
1533 files.sort()
1512 for f in files:
1534 for f in files:
1513 if f[0] == "/": continue
1535 if f[0] == "/": continue
1514 self.ui.note("getting %s\n" % f)
1536 self.ui.note("getting %s\n" % f)
1515 t = self.file(f).read(get[f])
1537 t = self.file(f).read(get[f])
1516 try:
1538 try:
1517 self.wfile(f, "w").write(t)
1539 self.wfile(f, "w").write(t)
1518 except IOError:
1540 except IOError:
1519 os.makedirs(os.path.dirname(self.wjoin(f)))
1541 os.makedirs(os.path.dirname(self.wjoin(f)))
1520 self.wfile(f, "w").write(t)
1542 self.wfile(f, "w").write(t)
1521 util.set_exec(self.wjoin(f), mf2[f])
1543 util.set_exec(self.wjoin(f), mf2[f])
1522 if moddirstate:
1544 if moddirstate:
1523 self.dirstate.update([f], mode)
1545 self.dirstate.update([f], mode)
1524
1546
1525 # merge the tricky bits
1547 # merge the tricky bits
1526 files = merge.keys()
1548 files = merge.keys()
1527 files.sort()
1549 files.sort()
1528 for f in files:
1550 for f in files:
1529 self.ui.status("merging %s\n" % f)
1551 self.ui.status("merging %s\n" % f)
1530 m, o, flag = merge[f]
1552 m, o, flag = merge[f]
1531 self.merge3(f, m, o)
1553 self.merge3(f, m, o)
1532 util.set_exec(self.wjoin(f), flag)
1554 util.set_exec(self.wjoin(f), flag)
1533 if moddirstate and mode == 'm':
1555 if moddirstate and mode == 'm':
1534 # only update dirstate on branch merge, otherwise we
1556 # only update dirstate on branch merge, otherwise we
1535 # could mark files with changes as unchanged
1557 # could mark files with changes as unchanged
1536 self.dirstate.update([f], mode)
1558 self.dirstate.update([f], mode)
1537
1559
1538 remove.sort()
1560 remove.sort()
1539 for f in remove:
1561 for f in remove:
1540 self.ui.note("removing %s\n" % f)
1562 self.ui.note("removing %s\n" % f)
1541 try:
1563 try:
1542 os.unlink(f)
1564 os.unlink(f)
1543 except OSError, inst:
1565 except OSError, inst:
1544 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1566 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1545 # try removing directories that might now be empty
1567 # try removing directories that might now be empty
1546 try: os.removedirs(os.path.dirname(f))
1568 try: os.removedirs(os.path.dirname(f))
1547 except: pass
1569 except: pass
1548 if moddirstate:
1570 if moddirstate:
1549 if mode == 'n':
1571 if mode == 'n':
1550 self.dirstate.forget(remove)
1572 self.dirstate.forget(remove)
1551 else:
1573 else:
1552 self.dirstate.update(remove, 'r')
1574 self.dirstate.update(remove, 'r')
1553
1575
1554 def merge3(self, fn, my, other):
1576 def merge3(self, fn, my, other):
1555 """perform a 3-way merge in the working directory"""
1577 """perform a 3-way merge in the working directory"""
1556
1578
1557 def temp(prefix, node):
1579 def temp(prefix, node):
1558 pre = "%s~%s." % (os.path.basename(fn), prefix)
1580 pre = "%s~%s." % (os.path.basename(fn), prefix)
1559 (fd, name) = tempfile.mkstemp("", pre)
1581 (fd, name) = tempfile.mkstemp("", pre)
1560 f = os.fdopen(fd, "wb")
1582 f = os.fdopen(fd, "wb")
1561 f.write(fl.revision(node))
1583 f.write(fl.revision(node))
1562 f.close()
1584 f.close()
1563 return name
1585 return name
1564
1586
1565 fl = self.file(fn)
1587 fl = self.file(fn)
1566 base = fl.ancestor(my, other)
1588 base = fl.ancestor(my, other)
1567 a = self.wjoin(fn)
1589 a = self.wjoin(fn)
1568 b = temp("base", base)
1590 b = temp("base", base)
1569 c = temp("other", other)
1591 c = temp("other", other)
1570
1592
1571 self.ui.note("resolving %s\n" % fn)
1593 self.ui.note("resolving %s\n" % fn)
1572 self.ui.debug("file %s: other %s ancestor %s\n" %
1594 self.ui.debug("file %s: other %s ancestor %s\n" %
1573 (fn, short(other), short(base)))
1595 (fn, short(other), short(base)))
1574
1596
1575 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1597 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1576 or "hgmerge")
1598 or "hgmerge")
1577 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1599 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1578 if r:
1600 if r:
1579 self.ui.warn("merging %s failed!\n" % fn)
1601 self.ui.warn("merging %s failed!\n" % fn)
1580
1602
1581 os.unlink(b)
1603 os.unlink(b)
1582 os.unlink(c)
1604 os.unlink(c)
1583
1605
1584 def verify(self):
1606 def verify(self):
1585 filelinkrevs = {}
1607 filelinkrevs = {}
1586 filenodes = {}
1608 filenodes = {}
1587 changesets = revisions = files = 0
1609 changesets = revisions = files = 0
1588 errors = 0
1610 errors = 0
1589
1611
1590 seen = {}
1612 seen = {}
1591 self.ui.status("checking changesets\n")
1613 self.ui.status("checking changesets\n")
1592 for i in range(self.changelog.count()):
1614 for i in range(self.changelog.count()):
1593 changesets += 1
1615 changesets += 1
1594 n = self.changelog.node(i)
1616 n = self.changelog.node(i)
1595 if n in seen:
1617 if n in seen:
1596 self.ui.warn("duplicate changeset at revision %d\n" % i)
1618 self.ui.warn("duplicate changeset at revision %d\n" % i)
1597 errors += 1
1619 errors += 1
1598 seen[n] = 1
1620 seen[n] = 1
1599
1621
1600 for p in self.changelog.parents(n):
1622 for p in self.changelog.parents(n):
1601 if p not in self.changelog.nodemap:
1623 if p not in self.changelog.nodemap:
1602 self.ui.warn("changeset %s has unknown parent %s\n" %
1624 self.ui.warn("changeset %s has unknown parent %s\n" %
1603 (short(n), short(p)))
1625 (short(n), short(p)))
1604 errors += 1
1626 errors += 1
1605 try:
1627 try:
1606 changes = self.changelog.read(n)
1628 changes = self.changelog.read(n)
1607 except Exception, inst:
1629 except Exception, inst:
1608 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1630 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1609 errors += 1
1631 errors += 1
1610
1632
1611 for f in changes[3]:
1633 for f in changes[3]:
1612 filelinkrevs.setdefault(f, []).append(i)
1634 filelinkrevs.setdefault(f, []).append(i)
1613
1635
1614 seen = {}
1636 seen = {}
1615 self.ui.status("checking manifests\n")
1637 self.ui.status("checking manifests\n")
1616 for i in range(self.manifest.count()):
1638 for i in range(self.manifest.count()):
1617 n = self.manifest.node(i)
1639 n = self.manifest.node(i)
1618 if n in seen:
1640 if n in seen:
1619 self.ui.warn("duplicate manifest at revision %d\n" % i)
1641 self.ui.warn("duplicate manifest at revision %d\n" % i)
1620 errors += 1
1642 errors += 1
1621 seen[n] = 1
1643 seen[n] = 1
1622
1644
1623 for p in self.manifest.parents(n):
1645 for p in self.manifest.parents(n):
1624 if p not in self.manifest.nodemap:
1646 if p not in self.manifest.nodemap:
1625 self.ui.warn("manifest %s has unknown parent %s\n" %
1647 self.ui.warn("manifest %s has unknown parent %s\n" %
1626 (short(n), short(p)))
1648 (short(n), short(p)))
1627 errors += 1
1649 errors += 1
1628
1650
1629 try:
1651 try:
1630 delta = mdiff.patchtext(self.manifest.delta(n))
1652 delta = mdiff.patchtext(self.manifest.delta(n))
1631 except KeyboardInterrupt:
1653 except KeyboardInterrupt:
1632 self.ui.warn("aborted")
1654 self.ui.warn("aborted")
1633 sys.exit(0)
1655 sys.exit(0)
1634 except Exception, inst:
1656 except Exception, inst:
1635 self.ui.warn("unpacking manifest %s: %s\n"
1657 self.ui.warn("unpacking manifest %s: %s\n"
1636 % (short(n), inst))
1658 % (short(n), inst))
1637 errors += 1
1659 errors += 1
1638
1660
1639 ff = [ l.split('\0') for l in delta.splitlines() ]
1661 ff = [ l.split('\0') for l in delta.splitlines() ]
1640 for f, fn in ff:
1662 for f, fn in ff:
1641 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1663 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1642
1664
1643 self.ui.status("crosschecking files in changesets and manifests\n")
1665 self.ui.status("crosschecking files in changesets and manifests\n")
1644 for f in filenodes:
1666 for f in filenodes:
1645 if f not in filelinkrevs:
1667 if f not in filelinkrevs:
1646 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1668 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1647 errors += 1
1669 errors += 1
1648
1670
1649 for f in filelinkrevs:
1671 for f in filelinkrevs:
1650 if f not in filenodes:
1672 if f not in filenodes:
1651 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1673 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1652 errors += 1
1674 errors += 1
1653
1675
1654 self.ui.status("checking files\n")
1676 self.ui.status("checking files\n")
1655 ff = filenodes.keys()
1677 ff = filenodes.keys()
1656 ff.sort()
1678 ff.sort()
1657 for f in ff:
1679 for f in ff:
1658 if f == "/dev/null": continue
1680 if f == "/dev/null": continue
1659 files += 1
1681 files += 1
1660 fl = self.file(f)
1682 fl = self.file(f)
1661 nodes = { nullid: 1 }
1683 nodes = { nullid: 1 }
1662 seen = {}
1684 seen = {}
1663 for i in range(fl.count()):
1685 for i in range(fl.count()):
1664 revisions += 1
1686 revisions += 1
1665 n = fl.node(i)
1687 n = fl.node(i)
1666
1688
1667 if n in seen:
1689 if n in seen:
1668 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1690 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1669 errors += 1
1691 errors += 1
1670
1692
1671 if n not in filenodes[f]:
1693 if n not in filenodes[f]:
1672 self.ui.warn("%s: %d:%s not in manifests\n"
1694 self.ui.warn("%s: %d:%s not in manifests\n"
1673 % (f, i, short(n)))
1695 % (f, i, short(n)))
1674 errors += 1
1696 errors += 1
1675 else:
1697 else:
1676 del filenodes[f][n]
1698 del filenodes[f][n]
1677
1699
1678 flr = fl.linkrev(n)
1700 flr = fl.linkrev(n)
1679 if flr not in filelinkrevs[f]:
1701 if flr not in filelinkrevs[f]:
1680 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1702 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1681 % (f, short(n), fl.linkrev(n)))
1703 % (f, short(n), fl.linkrev(n)))
1682 errors += 1
1704 errors += 1
1683 else:
1705 else:
1684 filelinkrevs[f].remove(flr)
1706 filelinkrevs[f].remove(flr)
1685
1707
1686 # verify contents
1708 # verify contents
1687 try:
1709 try:
1688 t = fl.read(n)
1710 t = fl.read(n)
1689 except Exception, inst:
1711 except Exception, inst:
1690 self.ui.warn("unpacking file %s %s: %s\n"
1712 self.ui.warn("unpacking file %s %s: %s\n"
1691 % (f, short(n), inst))
1713 % (f, short(n), inst))
1692 errors += 1
1714 errors += 1
1693
1715
1694 # verify parents
1716 # verify parents
1695 (p1, p2) = fl.parents(n)
1717 (p1, p2) = fl.parents(n)
1696 if p1 not in nodes:
1718 if p1 not in nodes:
1697 self.ui.warn("file %s:%s unknown parent 1 %s" %
1719 self.ui.warn("file %s:%s unknown parent 1 %s" %
1698 (f, short(n), short(p1)))
1720 (f, short(n), short(p1)))
1699 errors += 1
1721 errors += 1
1700 if p2 not in nodes:
1722 if p2 not in nodes:
1701 self.ui.warn("file %s:%s unknown parent 2 %s" %
1723 self.ui.warn("file %s:%s unknown parent 2 %s" %
1702 (f, short(n), short(p1)))
1724 (f, short(n), short(p1)))
1703 errors += 1
1725 errors += 1
1704 nodes[n] = 1
1726 nodes[n] = 1
1705
1727
1706 # cross-check
1728 # cross-check
1707 for node in filenodes[f]:
1729 for node in filenodes[f]:
1708 self.ui.warn("node %s in manifests not in %s\n"
1730 self.ui.warn("node %s in manifests not in %s\n"
1709 % (hex(node), f))
1731 % (hex(node), f))
1710 errors += 1
1732 errors += 1
1711
1733
1712 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1734 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1713 (files, changesets, revisions))
1735 (files, changesets, revisions))
1714
1736
1715 if errors:
1737 if errors:
1716 self.ui.warn("%d integrity errors encountered!\n" % errors)
1738 self.ui.warn("%d integrity errors encountered!\n" % errors)
1717 return 1
1739 return 1
1718
1740
1719 class httprepository:
1741 class httprepository:
1720 def __init__(self, ui, path):
1742 def __init__(self, ui, path):
1721 # fix missing / after hostname
1743 # fix missing / after hostname
1722 s = urlparse.urlsplit(path)
1744 s = urlparse.urlsplit(path)
1723 partial = s[2]
1745 partial = s[2]
1724 if not partial: partial = "/"
1746 if not partial: partial = "/"
1725 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1747 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1726 self.ui = ui
1748 self.ui = ui
1727 no_list = [ "localhost", "127.0.0.1" ]
1749 no_list = [ "localhost", "127.0.0.1" ]
1728 host = ui.config("http_proxy", "host")
1750 host = ui.config("http_proxy", "host")
1729 if host is None:
1751 if host is None:
1730 host = os.environ.get("http_proxy")
1752 host = os.environ.get("http_proxy")
1731 if host and host.startswith('http://'):
1753 if host and host.startswith('http://'):
1732 host = host[7:]
1754 host = host[7:]
1733 user = ui.config("http_proxy", "user")
1755 user = ui.config("http_proxy", "user")
1734 passwd = ui.config("http_proxy", "passwd")
1756 passwd = ui.config("http_proxy", "passwd")
1735 no = ui.config("http_proxy", "no")
1757 no = ui.config("http_proxy", "no")
1736 if no is None:
1758 if no is None:
1737 no = os.environ.get("no_proxy")
1759 no = os.environ.get("no_proxy")
1738 if no:
1760 if no:
1739 no_list = no_list + no.split(",")
1761 no_list = no_list + no.split(",")
1740
1762
1741 no_proxy = 0
1763 no_proxy = 0
1742 for h in no_list:
1764 for h in no_list:
1743 if (path.startswith("http://" + h + "/") or
1765 if (path.startswith("http://" + h + "/") or
1744 path.startswith("http://" + h + ":") or
1766 path.startswith("http://" + h + ":") or
1745 path == "http://" + h):
1767 path == "http://" + h):
1746 no_proxy = 1
1768 no_proxy = 1
1747
1769
1748 # Note: urllib2 takes proxy values from the environment and those will
1770 # Note: urllib2 takes proxy values from the environment and those will
1749 # take precedence
1771 # take precedence
1750 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1772 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1751 if os.environ.has_key(env):
1773 if os.environ.has_key(env):
1752 del os.environ[env]
1774 del os.environ[env]
1753
1775
1754 proxy_handler = urllib2.BaseHandler()
1776 proxy_handler = urllib2.BaseHandler()
1755 if host and not no_proxy:
1777 if host and not no_proxy:
1756 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1778 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1757
1779
1758 authinfo = None
1780 authinfo = None
1759 if user and passwd:
1781 if user and passwd:
1760 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1782 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1761 passmgr.add_password(None, host, user, passwd)
1783 passmgr.add_password(None, host, user, passwd)
1762 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1784 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1763
1785
1764 opener = urllib2.build_opener(proxy_handler, authinfo)
1786 opener = urllib2.build_opener(proxy_handler, authinfo)
1765 urllib2.install_opener(opener)
1787 urllib2.install_opener(opener)
1766
1788
1767 def dev(self):
1789 def dev(self):
1768 return -1
1790 return -1
1769
1791
1770 def do_cmd(self, cmd, **args):
1792 def do_cmd(self, cmd, **args):
1771 self.ui.debug("sending %s command\n" % cmd)
1793 self.ui.debug("sending %s command\n" % cmd)
1772 q = {"cmd": cmd}
1794 q = {"cmd": cmd}
1773 q.update(args)
1795 q.update(args)
1774 qs = urllib.urlencode(q)
1796 qs = urllib.urlencode(q)
1775 cu = "%s?%s" % (self.url, qs)
1797 cu = "%s?%s" % (self.url, qs)
1776 resp = urllib2.urlopen(cu)
1798 resp = urllib2.urlopen(cu)
1777 proto = resp.headers['content-type']
1799 proto = resp.headers['content-type']
1778
1800
1779 # accept old "text/plain" and "application/hg-changegroup" for now
1801 # accept old "text/plain" and "application/hg-changegroup" for now
1780 if not proto.startswith('application/mercurial') and \
1802 if not proto.startswith('application/mercurial') and \
1781 not proto.startswith('text/plain') and \
1803 not proto.startswith('text/plain') and \
1782 not proto.startswith('application/hg-changegroup'):
1804 not proto.startswith('application/hg-changegroup'):
1783 raise RepoError("'%s' does not appear to be an hg repository"
1805 raise RepoError("'%s' does not appear to be an hg repository"
1784 % self.url)
1806 % self.url)
1785
1807
1786 if proto.startswith('application/mercurial'):
1808 if proto.startswith('application/mercurial'):
1787 version = proto[22:]
1809 version = proto[22:]
1788 if float(version) > 0.1:
1810 if float(version) > 0.1:
1789 raise RepoError("'%s' uses newer protocol %s" %
1811 raise RepoError("'%s' uses newer protocol %s" %
1790 (self.url, version))
1812 (self.url, version))
1791
1813
1792 return resp
1814 return resp
1793
1815
1794 def heads(self):
1816 def heads(self):
1795 d = self.do_cmd("heads").read()
1817 d = self.do_cmd("heads").read()
1796 try:
1818 try:
1797 return map(bin, d[:-1].split(" "))
1819 return map(bin, d[:-1].split(" "))
1798 except:
1820 except:
1799 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1821 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1800 raise
1822 raise
1801
1823
1802 def branches(self, nodes):
1824 def branches(self, nodes):
1803 n = " ".join(map(hex, nodes))
1825 n = " ".join(map(hex, nodes))
1804 d = self.do_cmd("branches", nodes=n).read()
1826 d = self.do_cmd("branches", nodes=n).read()
1805 try:
1827 try:
1806 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1807 return br
1829 return br
1808 except:
1830 except:
1809 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1831 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1810 raise
1832 raise
1811
1833
1812 def between(self, pairs):
1834 def between(self, pairs):
1813 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1835 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1814 d = self.do_cmd("between", pairs=n).read()
1836 d = self.do_cmd("between", pairs=n).read()
1815 try:
1837 try:
1816 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1838 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1817 return p
1839 return p
1818 except:
1840 except:
1819 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1841 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1820 raise
1842 raise
1821
1843
1822 def changegroup(self, nodes):
1844 def changegroup(self, nodes):
1823 n = " ".join(map(hex, nodes))
1845 n = " ".join(map(hex, nodes))
1824 f = self.do_cmd("changegroup", roots=n)
1846 f = self.do_cmd("changegroup", roots=n)
1825 bytes = 0
1847 bytes = 0
1826
1848
1827 class zread:
1849 class zread:
1828 def __init__(self, f):
1850 def __init__(self, f):
1829 self.zd = zlib.decompressobj()
1851 self.zd = zlib.decompressobj()
1830 self.f = f
1852 self.f = f
1831 self.buf = ""
1853 self.buf = ""
1832 def read(self, l):
1854 def read(self, l):
1833 while l > len(self.buf):
1855 while l > len(self.buf):
1834 r = self.f.read(4096)
1856 r = self.f.read(4096)
1835 if r:
1857 if r:
1836 self.buf += self.zd.decompress(r)
1858 self.buf += self.zd.decompress(r)
1837 else:
1859 else:
1838 self.buf += self.zd.flush()
1860 self.buf += self.zd.flush()
1839 break
1861 break
1840 d, self.buf = self.buf[:l], self.buf[l:]
1862 d, self.buf = self.buf[:l], self.buf[l:]
1841 return d
1863 return d
1842
1864
1843 return zread(f)
1865 return zread(f)
1844
1866
1845 class remotelock:
1867 class remotelock:
1846 def __init__(self, repo):
1868 def __init__(self, repo):
1847 self.repo = repo
1869 self.repo = repo
1848 def release(self):
1870 def release(self):
1849 self.repo.unlock()
1871 self.repo.unlock()
1850 self.repo = None
1872 self.repo = None
1851 def __del__(self):
1873 def __del__(self):
1852 if self.repo:
1874 if self.repo:
1853 self.release()
1875 self.release()
1854
1876
1855 class sshrepository:
1877 class sshrepository:
1856 def __init__(self, ui, path):
1878 def __init__(self, ui, path):
1857 self.url = path
1879 self.url = path
1858 self.ui = ui
1880 self.ui = ui
1859
1881
1860 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1882 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1861 if not m:
1883 if not m:
1862 raise RepoError("couldn't parse destination %s\n" % path)
1884 raise RepoError("couldn't parse destination %s\n" % path)
1863
1885
1864 self.user = m.group(2)
1886 self.user = m.group(2)
1865 self.host = m.group(3)
1887 self.host = m.group(3)
1866 self.port = m.group(5)
1888 self.port = m.group(5)
1867 self.path = m.group(7)
1889 self.path = m.group(7)
1868
1890
1869 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1891 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1870 args = self.port and ("%s -p %s") % (args, self.port) or args
1892 args = self.port and ("%s -p %s") % (args, self.port) or args
1871 path = self.path or ""
1893 path = self.path or ""
1872
1894
1873 cmd = "ssh %s 'hg -R %s serve --stdio'"
1895 cmd = "ssh %s 'hg -R %s serve --stdio'"
1874 cmd = cmd % (args, path)
1896 cmd = cmd % (args, path)
1875
1897
1876 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1898 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1877
1899
1878 def readerr(self):
1900 def readerr(self):
1879 while 1:
1901 while 1:
1880 r,w,x = select.select([self.pipee], [], [], 0)
1902 r,w,x = select.select([self.pipee], [], [], 0)
1881 if not r: break
1903 if not r: break
1882 l = self.pipee.readline()
1904 l = self.pipee.readline()
1883 if not l: break
1905 if not l: break
1884 self.ui.status("remote: ", l)
1906 self.ui.status("remote: ", l)
1885
1907
1886 def __del__(self):
1908 def __del__(self):
1887 self.pipeo.close()
1909 self.pipeo.close()
1888 self.pipei.close()
1910 self.pipei.close()
1889 for l in self.pipee:
1911 for l in self.pipee:
1890 self.ui.status("remote: ", l)
1912 self.ui.status("remote: ", l)
1891 self.pipee.close()
1913 self.pipee.close()
1892
1914
1893 def dev(self):
1915 def dev(self):
1894 return -1
1916 return -1
1895
1917
1896 def do_cmd(self, cmd, **args):
1918 def do_cmd(self, cmd, **args):
1897 self.ui.debug("sending %s command\n" % cmd)
1919 self.ui.debug("sending %s command\n" % cmd)
1898 self.pipeo.write("%s\n" % cmd)
1920 self.pipeo.write("%s\n" % cmd)
1899 for k, v in args.items():
1921 for k, v in args.items():
1900 self.pipeo.write("%s %d\n" % (k, len(v)))
1922 self.pipeo.write("%s %d\n" % (k, len(v)))
1901 self.pipeo.write(v)
1923 self.pipeo.write(v)
1902 self.pipeo.flush()
1924 self.pipeo.flush()
1903
1925
1904 return self.pipei
1926 return self.pipei
1905
1927
1906 def call(self, cmd, **args):
1928 def call(self, cmd, **args):
1907 r = self.do_cmd(cmd, **args)
1929 r = self.do_cmd(cmd, **args)
1908 l = r.readline()
1930 l = r.readline()
1909 self.readerr()
1931 self.readerr()
1910 try:
1932 try:
1911 l = int(l)
1933 l = int(l)
1912 except:
1934 except:
1913 raise RepoError("unexpected response '%s'" % l)
1935 raise RepoError("unexpected response '%s'" % l)
1914 return r.read(l)
1936 return r.read(l)
1915
1937
1916 def lock(self):
1938 def lock(self):
1917 self.call("lock")
1939 self.call("lock")
1918 return remotelock(self)
1940 return remotelock(self)
1919
1941
1920 def unlock(self):
1942 def unlock(self):
1921 self.call("unlock")
1943 self.call("unlock")
1922
1944
1923 def heads(self):
1945 def heads(self):
1924 d = self.call("heads")
1946 d = self.call("heads")
1925 try:
1947 try:
1926 return map(bin, d[:-1].split(" "))
1948 return map(bin, d[:-1].split(" "))
1927 except:
1949 except:
1928 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1950 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1929
1951
1930 def branches(self, nodes):
1952 def branches(self, nodes):
1931 n = " ".join(map(hex, nodes))
1953 n = " ".join(map(hex, nodes))
1932 d = self.call("branches", nodes=n)
1954 d = self.call("branches", nodes=n)
1933 try:
1955 try:
1934 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1956 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1935 return br
1957 return br
1936 except:
1958 except:
1937 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1959 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1938
1960
1939 def between(self, pairs):
1961 def between(self, pairs):
1940 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1962 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1941 d = self.call("between", pairs=n)
1963 d = self.call("between", pairs=n)
1942 try:
1964 try:
1943 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1965 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1944 return p
1966 return p
1945 except:
1967 except:
1946 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1968 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1947
1969
1948 def changegroup(self, nodes):
1970 def changegroup(self, nodes):
1949 n = " ".join(map(hex, nodes))
1971 n = " ".join(map(hex, nodes))
1950 f = self.do_cmd("changegroup", roots=n)
1972 f = self.do_cmd("changegroup", roots=n)
1951 return self.pipei
1973 return self.pipei
1952
1974
1953 def addchangegroup(self, cg):
1975 def addchangegroup(self, cg):
1954 d = self.call("addchangegroup")
1976 d = self.call("addchangegroup")
1955 if d:
1977 if d:
1956 raise RepoError("push refused: %s", d)
1978 raise RepoError("push refused: %s", d)
1957
1979
1958 while 1:
1980 while 1:
1959 d = cg.read(4096)
1981 d = cg.read(4096)
1960 if not d: break
1982 if not d: break
1961 self.pipeo.write(d)
1983 self.pipeo.write(d)
1962 self.readerr()
1984 self.readerr()
1963
1985
1964 self.pipeo.flush()
1986 self.pipeo.flush()
1965
1987
1966 self.readerr()
1988 self.readerr()
1967 l = int(self.pipei.readline())
1989 l = int(self.pipei.readline())
1968 return self.pipei.read(l) != ""
1990 return self.pipei.read(l) != ""
1969
1991
1970 def repository(ui, path=None, create=0):
1992 def repository(ui, path=None, create=0):
1971 if path:
1993 if path:
1972 if path.startswith("http://"):
1994 if path.startswith("http://"):
1973 return httprepository(ui, path)
1995 return httprepository(ui, path)
1974 if path.startswith("hg://"):
1996 if path.startswith("hg://"):
1975 return httprepository(ui, path.replace("hg://", "http://"))
1997 return httprepository(ui, path.replace("hg://", "http://"))
1976 if path.startswith("old-http://"):
1998 if path.startswith("old-http://"):
1977 return localrepository(ui, path.replace("old-http://", "http://"))
1999 return localrepository(ui, path.replace("old-http://", "http://"))
1978 if path.startswith("ssh://"):
2000 if path.startswith("ssh://"):
1979 return sshrepository(ui, path)
2001 return sshrepository(ui, path)
1980
2002
1981 return localrepository(ui, path, create)
2003 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now