##// END OF EJS Templates
Ensure that dirstate.walk only yields names once....
Bryan O'Sullivan -
r821:72d9bd48 default
parent child Browse files
Show More
@@ -1,1983 +1,1988 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 path.replace(".hg/", ".hg.hg/")
25 path.replace(".hg/", ".hg.hg/")
26 path.replace(".i/", ".i.hg/")
26 path.replace(".i/", ".i.hg/")
27 path.replace(".d/", ".i.hg/")
27 path.replace(".d/", ".i.hg/")
28 return path
28 return path
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 path.replace(".d.hg/", ".d/")
31 path.replace(".d.hg/", ".d/")
32 path.replace(".i.hg/", ".i/")
32 path.replace(".i.hg/", ".i/")
33 path.replace(".hg.hg/", ".hg/")
33 path.replace(".hg.hg/", ".hg/")
34 return path
34 return path
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def ignore(self, f):
303 def ignore(self, f):
304 if not self.ignorefunc:
304 if not self.ignorefunc:
305 bigpat = []
305 bigpat = []
306 try:
306 try:
307 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
308 for pat in l:
308 for pat in l:
309 if pat != "\n":
309 if pat != "\n":
310 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
311 try:
311 try:
312 r = re.compile(p)
312 r = re.compile(p)
313 except:
313 except:
314 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
315 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
316 else:
316 else:
317 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
318 except IOError: pass
318 except IOError: pass
319
319
320 if bigpat:
320 if bigpat:
321 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 r = re.compile(s)
322 r = re.compile(s)
323 self.ignorefunc = r.search
323 self.ignorefunc = r.search
324 else:
324 else:
325 self.ignorefunc = util.never
325 self.ignorefunc = util.never
326
326
327 return self.ignorefunc(f)
327 return self.ignorefunc(f)
328
328
329 def __del__(self):
329 def __del__(self):
330 if self.dirty:
330 if self.dirty:
331 self.write()
331 self.write()
332
332
333 def __getitem__(self, key):
333 def __getitem__(self, key):
334 try:
334 try:
335 return self.map[key]
335 return self.map[key]
336 except TypeError:
336 except TypeError:
337 self.read()
337 self.read()
338 return self[key]
338 return self[key]
339
339
340 def __contains__(self, key):
340 def __contains__(self, key):
341 if not self.map: self.read()
341 if not self.map: self.read()
342 return key in self.map
342 return key in self.map
343
343
344 def parents(self):
344 def parents(self):
345 if not self.pl:
345 if not self.pl:
346 self.read()
346 self.read()
347 return self.pl
347 return self.pl
348
348
349 def markdirty(self):
349 def markdirty(self):
350 if not self.dirty:
350 if not self.dirty:
351 self.dirty = 1
351 self.dirty = 1
352
352
353 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
354 self.markdirty()
354 self.markdirty()
355 self.pl = p1, p2
355 self.pl = p1, p2
356
356
357 def state(self, key):
357 def state(self, key):
358 try:
358 try:
359 return self[key][0]
359 return self[key][0]
360 except KeyError:
360 except KeyError:
361 return "?"
361 return "?"
362
362
363 def read(self):
363 def read(self):
364 if self.map is not None: return self.map
364 if self.map is not None: return self.map
365
365
366 self.map = {}
366 self.map = {}
367 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
368 try:
368 try:
369 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
370 if not st: return
370 if not st: return
371 except: return
371 except: return
372
372
373 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
374
374
375 pos = 40
375 pos = 40
376 while pos < len(st):
376 while pos < len(st):
377 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
378 l = e[4]
378 l = e[4]
379 pos += 17
379 pos += 17
380 f = st[pos:pos + l]
380 f = st[pos:pos + l]
381 if '\0' in f:
381 if '\0' in f:
382 f, c = f.split('\0')
382 f, c = f.split('\0')
383 self.copies[f] = c
383 self.copies[f] = c
384 self.map[f] = e[:4]
384 self.map[f] = e[:4]
385 pos += l
385 pos += l
386
386
387 def copy(self, source, dest):
387 def copy(self, source, dest):
388 self.read()
388 self.read()
389 self.markdirty()
389 self.markdirty()
390 self.copies[dest] = source
390 self.copies[dest] = source
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self.copies.get(file, None)
393 return self.copies.get(file, None)
394
394
395 def update(self, files, state):
395 def update(self, files, state):
396 ''' current states:
396 ''' current states:
397 n normal
397 n normal
398 m needs merging
398 m needs merging
399 r marked for removal
399 r marked for removal
400 a marked for addition'''
400 a marked for addition'''
401
401
402 if not files: return
402 if not files: return
403 self.read()
403 self.read()
404 self.markdirty()
404 self.markdirty()
405 for f in files:
405 for f in files:
406 if state == "r":
406 if state == "r":
407 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
408 else:
408 else:
409 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
411
411
412 def forget(self, files):
412 def forget(self, files):
413 if not files: return
413 if not files: return
414 self.read()
414 self.read()
415 self.markdirty()
415 self.markdirty()
416 for f in files:
416 for f in files:
417 try:
417 try:
418 del self.map[f]
418 del self.map[f]
419 except KeyError:
419 except KeyError:
420 self.ui.warn("not in dirstate: %s!\n" % f)
420 self.ui.warn("not in dirstate: %s!\n" % f)
421 pass
421 pass
422
422
423 def clear(self):
423 def clear(self):
424 self.map = {}
424 self.map = {}
425 self.markdirty()
425 self.markdirty()
426
426
427 def write(self):
427 def write(self):
428 st = self.opener("dirstate", "w")
428 st = self.opener("dirstate", "w")
429 st.write("".join(self.pl))
429 st.write("".join(self.pl))
430 for f, e in self.map.items():
430 for f, e in self.map.items():
431 c = self.copied(f)
431 c = self.copied(f)
432 if c:
432 if c:
433 f = f + "\0" + c
433 f = f + "\0" + c
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
435 st.write(e + f)
435 st.write(e + f)
436 self.dirty = 0
436 self.dirty = 0
437
437
438 def walk(self, files = None, match = util.always):
438 def walk(self, files = None, match = util.always):
439 self.read()
439 self.read()
440 dc = self.map.copy()
440 dc = self.map.copy()
441 # walk all files by default
441 # walk all files by default
442 if not files: files = [self.root]
442 if not files: files = [self.root]
443 known = {'.hg': 1}
444 def seen(fn):
445 if fn in known: return True
446 known[fn] = 1
443 def traverse():
447 def traverse():
444 for f in util.unique(files):
448 for f in util.unique(files):
445 f = os.path.join(self.root, f)
449 f = os.path.join(self.root, f)
446 if os.path.isdir(f):
450 if os.path.isdir(f):
447 for dir, subdirs, fl in os.walk(f):
451 for dir, subdirs, fl in os.walk(f):
448 d = dir[len(self.root) + 1:]
452 d = dir[len(self.root) + 1:]
449 nd = os.path.normpath(d)
453 nd = os.path.normpath(d)
450 if nd == '.hg':
454 if seen(nd):
451 subdirs[:] = []
455 subdirs[:] = []
452 continue
456 continue
453 for sd in subdirs:
457 for sd in subdirs:
454 ds = os.path.join(nd, sd +'/')
458 ds = os.path.join(nd, sd +'/')
455 if self.ignore(ds) or not match(ds):
459 if self.ignore(ds) or not match(ds):
456 subdirs.remove(sd)
460 subdirs.remove(sd)
457 for fn in fl:
461 for fn in fl:
458 fn = util.pconvert(os.path.join(d, fn))
462 fn = util.pconvert(os.path.join(d, fn))
459 yield 'f', fn
463 yield 'f', fn
460 else:
464 else:
461 yield 'f', f[len(self.root) + 1:]
465 yield 'f', f[len(self.root) + 1:]
462
466
463 for k in dc.keys():
467 for k in dc.keys():
464 yield 'm', k
468 yield 'm', k
465
469
466 # yield only files that match: all in dirstate, others only if
470 # yield only files that match: all in dirstate, others only if
467 # not in .hgignore
471 # not in .hgignore
468
472
469 for src, fn in util.unique(traverse()):
473 for src, fn in util.unique(traverse()):
470 fn = os.path.normpath(fn)
474 fn = os.path.normpath(fn)
475 if seen(fn): continue
471 if fn in dc:
476 if fn in dc:
472 del dc[fn]
477 del dc[fn]
473 elif self.ignore(fn):
478 elif self.ignore(fn):
474 continue
479 continue
475 if match(fn):
480 if match(fn):
476 yield src, fn
481 yield src, fn
477
482
478 def changes(self, files = None, match = util.always):
483 def changes(self, files = None, match = util.always):
479 self.read()
484 self.read()
480 dc = self.map.copy()
485 dc = self.map.copy()
481 lookup, changed, added, unknown = [], [], [], []
486 lookup, changed, added, unknown = [], [], [], []
482
487
483 for src, fn in self.walk(files, match):
488 for src, fn in self.walk(files, match):
484 try: s = os.stat(os.path.join(self.root, fn))
489 try: s = os.stat(os.path.join(self.root, fn))
485 except: continue
490 except: continue
486
491
487 if fn in dc:
492 if fn in dc:
488 c = dc[fn]
493 c = dc[fn]
489 del dc[fn]
494 del dc[fn]
490
495
491 if c[0] == 'm':
496 if c[0] == 'm':
492 changed.append(fn)
497 changed.append(fn)
493 elif c[0] == 'a':
498 elif c[0] == 'a':
494 added.append(fn)
499 added.append(fn)
495 elif c[0] == 'r':
500 elif c[0] == 'r':
496 unknown.append(fn)
501 unknown.append(fn)
497 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
502 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
498 changed.append(fn)
503 changed.append(fn)
499 elif c[1] != s.st_mode or c[3] != s.st_mtime:
504 elif c[1] != s.st_mode or c[3] != s.st_mtime:
500 lookup.append(fn)
505 lookup.append(fn)
501 else:
506 else:
502 if match(fn): unknown.append(fn)
507 if match(fn): unknown.append(fn)
503
508
504 return (lookup, changed, added, filter(match, dc.keys()), unknown)
509 return (lookup, changed, added, filter(match, dc.keys()), unknown)
505
510
506 # used to avoid circular references so destructors work
511 # used to avoid circular references so destructors work
507 def opener(base):
512 def opener(base):
508 p = base
513 p = base
509 def o(path, mode="r"):
514 def o(path, mode="r"):
510 if p.startswith("http://"):
515 if p.startswith("http://"):
511 f = os.path.join(p, urllib.quote(path))
516 f = os.path.join(p, urllib.quote(path))
512 return httprangereader.httprangereader(f)
517 return httprangereader.httprangereader(f)
513
518
514 f = os.path.join(p, path)
519 f = os.path.join(p, path)
515
520
516 mode += "b" # for that other OS
521 mode += "b" # for that other OS
517
522
518 if mode[0] != "r":
523 if mode[0] != "r":
519 try:
524 try:
520 s = os.stat(f)
525 s = os.stat(f)
521 except OSError:
526 except OSError:
522 d = os.path.dirname(f)
527 d = os.path.dirname(f)
523 if not os.path.isdir(d):
528 if not os.path.isdir(d):
524 os.makedirs(d)
529 os.makedirs(d)
525 else:
530 else:
526 if s.st_nlink > 1:
531 if s.st_nlink > 1:
527 file(f + ".tmp", "wb").write(file(f, "rb").read())
532 file(f + ".tmp", "wb").write(file(f, "rb").read())
528 util.rename(f+".tmp", f)
533 util.rename(f+".tmp", f)
529
534
530 return file(f, mode)
535 return file(f, mode)
531
536
532 return o
537 return o
533
538
534 class RepoError(Exception): pass
539 class RepoError(Exception): pass
535
540
536 class localrepository:
541 class localrepository:
537 def __init__(self, ui, path=None, create=0):
542 def __init__(self, ui, path=None, create=0):
538 self.remote = 0
543 self.remote = 0
539 if path and path.startswith("http://"):
544 if path and path.startswith("http://"):
540 self.remote = 1
545 self.remote = 1
541 self.path = path
546 self.path = path
542 else:
547 else:
543 if not path:
548 if not path:
544 p = os.getcwd()
549 p = os.getcwd()
545 while not os.path.isdir(os.path.join(p, ".hg")):
550 while not os.path.isdir(os.path.join(p, ".hg")):
546 oldp = p
551 oldp = p
547 p = os.path.dirname(p)
552 p = os.path.dirname(p)
548 if p == oldp: raise RepoError("no repo found")
553 if p == oldp: raise RepoError("no repo found")
549 path = p
554 path = p
550 self.path = os.path.join(path, ".hg")
555 self.path = os.path.join(path, ".hg")
551
556
552 if not create and not os.path.isdir(self.path):
557 if not create and not os.path.isdir(self.path):
553 raise RepoError("repository %s not found" % self.path)
558 raise RepoError("repository %s not found" % self.path)
554
559
555 self.root = path
560 self.root = path
556 self.ui = ui
561 self.ui = ui
557
562
558 if create:
563 if create:
559 os.mkdir(self.path)
564 os.mkdir(self.path)
560 os.mkdir(self.join("data"))
565 os.mkdir(self.join("data"))
561
566
562 self.opener = opener(self.path)
567 self.opener = opener(self.path)
563 self.wopener = opener(self.root)
568 self.wopener = opener(self.root)
564 self.manifest = manifest(self.opener)
569 self.manifest = manifest(self.opener)
565 self.changelog = changelog(self.opener)
570 self.changelog = changelog(self.opener)
566 self.tagscache = None
571 self.tagscache = None
567 self.nodetagscache = None
572 self.nodetagscache = None
568
573
569 if not self.remote:
574 if not self.remote:
570 self.dirstate = dirstate(self.opener, ui, self.root)
575 self.dirstate = dirstate(self.opener, ui, self.root)
571 try:
576 try:
572 self.ui.readconfig(self.opener("hgrc"))
577 self.ui.readconfig(self.opener("hgrc"))
573 except IOError: pass
578 except IOError: pass
574
579
575 def hook(self, name, **args):
580 def hook(self, name, **args):
576 s = self.ui.config("hooks", name)
581 s = self.ui.config("hooks", name)
577 if s:
582 if s:
578 self.ui.note("running hook %s: %s\n" % (name, s))
583 self.ui.note("running hook %s: %s\n" % (name, s))
579 old = {}
584 old = {}
580 for k, v in args.items():
585 for k, v in args.items():
581 k = k.upper()
586 k = k.upper()
582 old[k] = os.environ.get(k, None)
587 old[k] = os.environ.get(k, None)
583 os.environ[k] = v
588 os.environ[k] = v
584
589
585 r = os.system(s)
590 r = os.system(s)
586
591
587 for k, v in old.items():
592 for k, v in old.items():
588 if v != None:
593 if v != None:
589 os.environ[k] = v
594 os.environ[k] = v
590 else:
595 else:
591 del os.environ[k]
596 del os.environ[k]
592
597
593 if r:
598 if r:
594 self.ui.warn("abort: %s hook failed with status %d!\n" %
599 self.ui.warn("abort: %s hook failed with status %d!\n" %
595 (name, r))
600 (name, r))
596 return False
601 return False
597 return True
602 return True
598
603
599 def tags(self):
604 def tags(self):
600 '''return a mapping of tag to node'''
605 '''return a mapping of tag to node'''
601 if not self.tagscache:
606 if not self.tagscache:
602 self.tagscache = {}
607 self.tagscache = {}
603 def addtag(self, k, n):
608 def addtag(self, k, n):
604 try:
609 try:
605 bin_n = bin(n)
610 bin_n = bin(n)
606 except TypeError:
611 except TypeError:
607 bin_n = ''
612 bin_n = ''
608 self.tagscache[k.strip()] = bin_n
613 self.tagscache[k.strip()] = bin_n
609
614
610 try:
615 try:
611 # read each head of the tags file, ending with the tip
616 # read each head of the tags file, ending with the tip
612 # and add each tag found to the map, with "newer" ones
617 # and add each tag found to the map, with "newer" ones
613 # taking precedence
618 # taking precedence
614 fl = self.file(".hgtags")
619 fl = self.file(".hgtags")
615 h = fl.heads()
620 h = fl.heads()
616 h.reverse()
621 h.reverse()
617 for r in h:
622 for r in h:
618 for l in fl.revision(r).splitlines():
623 for l in fl.revision(r).splitlines():
619 if l:
624 if l:
620 n, k = l.split(" ", 1)
625 n, k = l.split(" ", 1)
621 addtag(self, k, n)
626 addtag(self, k, n)
622 except KeyError:
627 except KeyError:
623 pass
628 pass
624
629
625 try:
630 try:
626 f = self.opener("localtags")
631 f = self.opener("localtags")
627 for l in f:
632 for l in f:
628 n, k = l.split(" ", 1)
633 n, k = l.split(" ", 1)
629 addtag(self, k, n)
634 addtag(self, k, n)
630 except IOError:
635 except IOError:
631 pass
636 pass
632
637
633 self.tagscache['tip'] = self.changelog.tip()
638 self.tagscache['tip'] = self.changelog.tip()
634
639
635 return self.tagscache
640 return self.tagscache
636
641
637 def tagslist(self):
642 def tagslist(self):
638 '''return a list of tags ordered by revision'''
643 '''return a list of tags ordered by revision'''
639 l = []
644 l = []
640 for t, n in self.tags().items():
645 for t, n in self.tags().items():
641 try:
646 try:
642 r = self.changelog.rev(n)
647 r = self.changelog.rev(n)
643 except:
648 except:
644 r = -2 # sort to the beginning of the list if unknown
649 r = -2 # sort to the beginning of the list if unknown
645 l.append((r,t,n))
650 l.append((r,t,n))
646 l.sort()
651 l.sort()
647 return [(t,n) for r,t,n in l]
652 return [(t,n) for r,t,n in l]
648
653
649 def nodetags(self, node):
654 def nodetags(self, node):
650 '''return the tags associated with a node'''
655 '''return the tags associated with a node'''
651 if not self.nodetagscache:
656 if not self.nodetagscache:
652 self.nodetagscache = {}
657 self.nodetagscache = {}
653 for t,n in self.tags().items():
658 for t,n in self.tags().items():
654 self.nodetagscache.setdefault(n,[]).append(t)
659 self.nodetagscache.setdefault(n,[]).append(t)
655 return self.nodetagscache.get(node, [])
660 return self.nodetagscache.get(node, [])
656
661
657 def lookup(self, key):
662 def lookup(self, key):
658 try:
663 try:
659 return self.tags()[key]
664 return self.tags()[key]
660 except KeyError:
665 except KeyError:
661 try:
666 try:
662 return self.changelog.lookup(key)
667 return self.changelog.lookup(key)
663 except:
668 except:
664 raise RepoError("unknown revision '%s'" % key)
669 raise RepoError("unknown revision '%s'" % key)
665
670
666 def dev(self):
671 def dev(self):
667 if self.remote: return -1
672 if self.remote: return -1
668 return os.stat(self.path).st_dev
673 return os.stat(self.path).st_dev
669
674
670 def join(self, f):
675 def join(self, f):
671 return os.path.join(self.path, f)
676 return os.path.join(self.path, f)
672
677
673 def wjoin(self, f):
678 def wjoin(self, f):
674 return os.path.join(self.root, f)
679 return os.path.join(self.root, f)
675
680
676 def file(self, f):
681 def file(self, f):
677 if f[0] == '/': f = f[1:]
682 if f[0] == '/': f = f[1:]
678 return filelog(self.opener, f)
683 return filelog(self.opener, f)
679
684
680 def getcwd(self):
685 def getcwd(self):
681 cwd = os.getcwd()
686 cwd = os.getcwd()
682 if cwd == self.root: return ''
687 if cwd == self.root: return ''
683 return cwd[len(self.root) + 1:]
688 return cwd[len(self.root) + 1:]
684
689
685 def wfile(self, f, mode='r'):
690 def wfile(self, f, mode='r'):
686 return self.wopener(f, mode)
691 return self.wopener(f, mode)
687
692
688 def transaction(self):
693 def transaction(self):
689 # save dirstate for undo
694 # save dirstate for undo
690 try:
695 try:
691 ds = self.opener("dirstate").read()
696 ds = self.opener("dirstate").read()
692 except IOError:
697 except IOError:
693 ds = ""
698 ds = ""
694 self.opener("journal.dirstate", "w").write(ds)
699 self.opener("journal.dirstate", "w").write(ds)
695
700
696 def after():
701 def after():
697 util.rename(self.join("journal"), self.join("undo"))
702 util.rename(self.join("journal"), self.join("undo"))
698 util.rename(self.join("journal.dirstate"),
703 util.rename(self.join("journal.dirstate"),
699 self.join("undo.dirstate"))
704 self.join("undo.dirstate"))
700
705
701 return transaction.transaction(self.ui.warn, self.opener,
706 return transaction.transaction(self.ui.warn, self.opener,
702 self.join("journal"), after)
707 self.join("journal"), after)
703
708
704 def recover(self):
709 def recover(self):
705 lock = self.lock()
710 lock = self.lock()
706 if os.path.exists(self.join("journal")):
711 if os.path.exists(self.join("journal")):
707 self.ui.status("rolling back interrupted transaction\n")
712 self.ui.status("rolling back interrupted transaction\n")
708 return transaction.rollback(self.opener, self.join("journal"))
713 return transaction.rollback(self.opener, self.join("journal"))
709 else:
714 else:
710 self.ui.warn("no interrupted transaction available\n")
715 self.ui.warn("no interrupted transaction available\n")
711
716
712 def undo(self):
717 def undo(self):
713 lock = self.lock()
718 lock = self.lock()
714 if os.path.exists(self.join("undo")):
719 if os.path.exists(self.join("undo")):
715 self.ui.status("rolling back last transaction\n")
720 self.ui.status("rolling back last transaction\n")
716 transaction.rollback(self.opener, self.join("undo"))
721 transaction.rollback(self.opener, self.join("undo"))
717 self.dirstate = None
722 self.dirstate = None
718 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
723 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
719 self.dirstate = dirstate(self.opener, self.ui, self.root)
724 self.dirstate = dirstate(self.opener, self.ui, self.root)
720 else:
725 else:
721 self.ui.warn("no undo information available\n")
726 self.ui.warn("no undo information available\n")
722
727
723 def lock(self, wait = 1):
728 def lock(self, wait = 1):
724 try:
729 try:
725 return lock.lock(self.join("lock"), 0)
730 return lock.lock(self.join("lock"), 0)
726 except lock.LockHeld, inst:
731 except lock.LockHeld, inst:
727 if wait:
732 if wait:
728 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
733 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
729 return lock.lock(self.join("lock"), wait)
734 return lock.lock(self.join("lock"), wait)
730 raise inst
735 raise inst
731
736
732 def rawcommit(self, files, text, user, date, p1=None, p2=None):
737 def rawcommit(self, files, text, user, date, p1=None, p2=None):
733 orig_parent = self.dirstate.parents()[0] or nullid
738 orig_parent = self.dirstate.parents()[0] or nullid
734 p1 = p1 or self.dirstate.parents()[0] or nullid
739 p1 = p1 or self.dirstate.parents()[0] or nullid
735 p2 = p2 or self.dirstate.parents()[1] or nullid
740 p2 = p2 or self.dirstate.parents()[1] or nullid
736 c1 = self.changelog.read(p1)
741 c1 = self.changelog.read(p1)
737 c2 = self.changelog.read(p2)
742 c2 = self.changelog.read(p2)
738 m1 = self.manifest.read(c1[0])
743 m1 = self.manifest.read(c1[0])
739 mf1 = self.manifest.readflags(c1[0])
744 mf1 = self.manifest.readflags(c1[0])
740 m2 = self.manifest.read(c2[0])
745 m2 = self.manifest.read(c2[0])
741
746
742 if orig_parent == p1:
747 if orig_parent == p1:
743 update_dirstate = 1
748 update_dirstate = 1
744 else:
749 else:
745 update_dirstate = 0
750 update_dirstate = 0
746
751
747 tr = self.transaction()
752 tr = self.transaction()
748 mm = m1.copy()
753 mm = m1.copy()
749 mfm = mf1.copy()
754 mfm = mf1.copy()
750 linkrev = self.changelog.count()
755 linkrev = self.changelog.count()
751 for f in files:
756 for f in files:
752 try:
757 try:
753 t = self.wfile(f).read()
758 t = self.wfile(f).read()
754 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
759 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
755 r = self.file(f)
760 r = self.file(f)
756 mfm[f] = tm
761 mfm[f] = tm
757 mm[f] = r.add(t, {}, tr, linkrev,
762 mm[f] = r.add(t, {}, tr, linkrev,
758 m1.get(f, nullid), m2.get(f, nullid))
763 m1.get(f, nullid), m2.get(f, nullid))
759 if update_dirstate:
764 if update_dirstate:
760 self.dirstate.update([f], "n")
765 self.dirstate.update([f], "n")
761 except IOError:
766 except IOError:
762 try:
767 try:
763 del mm[f]
768 del mm[f]
764 del mfm[f]
769 del mfm[f]
765 if update_dirstate:
770 if update_dirstate:
766 self.dirstate.forget([f])
771 self.dirstate.forget([f])
767 except:
772 except:
768 # deleted from p2?
773 # deleted from p2?
769 pass
774 pass
770
775
771 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
776 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
772 user = user or self.ui.username()
777 user = user or self.ui.username()
773 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
778 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
774 tr.close()
779 tr.close()
775 if update_dirstate:
780 if update_dirstate:
776 self.dirstate.setparents(n, nullid)
781 self.dirstate.setparents(n, nullid)
777
782
778 def commit(self, files = None, text = "", user = None, date = None,
783 def commit(self, files = None, text = "", user = None, date = None,
779 match = util.always):
784 match = util.always):
780 commit = []
785 commit = []
781 remove = []
786 remove = []
782 if files:
787 if files:
783 for f in files:
788 for f in files:
784 s = self.dirstate.state(f)
789 s = self.dirstate.state(f)
785 if s in 'nmai':
790 if s in 'nmai':
786 commit.append(f)
791 commit.append(f)
787 elif s == 'r':
792 elif s == 'r':
788 remove.append(f)
793 remove.append(f)
789 else:
794 else:
790 self.ui.warn("%s not tracked!\n" % f)
795 self.ui.warn("%s not tracked!\n" % f)
791 else:
796 else:
792 (c, a, d, u) = self.changes(match = match)
797 (c, a, d, u) = self.changes(match = match)
793 commit = c + a
798 commit = c + a
794 remove = d
799 remove = d
795
800
796 if not commit and not remove:
801 if not commit and not remove:
797 self.ui.status("nothing changed\n")
802 self.ui.status("nothing changed\n")
798 return
803 return
799
804
800 if not self.hook("precommit"):
805 if not self.hook("precommit"):
801 return 1
806 return 1
802
807
803 p1, p2 = self.dirstate.parents()
808 p1, p2 = self.dirstate.parents()
804 c1 = self.changelog.read(p1)
809 c1 = self.changelog.read(p1)
805 c2 = self.changelog.read(p2)
810 c2 = self.changelog.read(p2)
806 m1 = self.manifest.read(c1[0])
811 m1 = self.manifest.read(c1[0])
807 mf1 = self.manifest.readflags(c1[0])
812 mf1 = self.manifest.readflags(c1[0])
808 m2 = self.manifest.read(c2[0])
813 m2 = self.manifest.read(c2[0])
809 lock = self.lock()
814 lock = self.lock()
810 tr = self.transaction()
815 tr = self.transaction()
811
816
812 # check in files
817 # check in files
813 new = {}
818 new = {}
814 linkrev = self.changelog.count()
819 linkrev = self.changelog.count()
815 commit.sort()
820 commit.sort()
816 for f in commit:
821 for f in commit:
817 self.ui.note(f + "\n")
822 self.ui.note(f + "\n")
818 try:
823 try:
819 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
824 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
820 t = self.wfile(f).read()
825 t = self.wfile(f).read()
821 except IOError:
826 except IOError:
822 self.ui.warn("trouble committing %s!\n" % f)
827 self.ui.warn("trouble committing %s!\n" % f)
823 raise
828 raise
824
829
825 meta = {}
830 meta = {}
826 cp = self.dirstate.copied(f)
831 cp = self.dirstate.copied(f)
827 if cp:
832 if cp:
828 meta["copy"] = cp
833 meta["copy"] = cp
829 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
834 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
830 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
835 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
831
836
832 r = self.file(f)
837 r = self.file(f)
833 fp1 = m1.get(f, nullid)
838 fp1 = m1.get(f, nullid)
834 fp2 = m2.get(f, nullid)
839 fp2 = m2.get(f, nullid)
835 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
840 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
836
841
837 # update manifest
842 # update manifest
838 m1.update(new)
843 m1.update(new)
839 for f in remove:
844 for f in remove:
840 if f in m1:
845 if f in m1:
841 del m1[f]
846 del m1[f]
842 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
847 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
843 (new, remove))
848 (new, remove))
844
849
845 # add changeset
850 # add changeset
846 new = new.keys()
851 new = new.keys()
847 new.sort()
852 new.sort()
848
853
849 if not text:
854 if not text:
850 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
855 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
851 edittext += "".join(["HG: changed %s\n" % f for f in new])
856 edittext += "".join(["HG: changed %s\n" % f for f in new])
852 edittext += "".join(["HG: removed %s\n" % f for f in remove])
857 edittext += "".join(["HG: removed %s\n" % f for f in remove])
853 edittext = self.ui.edit(edittext)
858 edittext = self.ui.edit(edittext)
854 if not edittext.rstrip():
859 if not edittext.rstrip():
855 return 1
860 return 1
856 text = edittext
861 text = edittext
857
862
858 user = user or self.ui.username()
863 user = user or self.ui.username()
859 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
864 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
860
865
861 tr.close()
866 tr.close()
862
867
863 self.dirstate.setparents(n)
868 self.dirstate.setparents(n)
864 self.dirstate.update(new, "n")
869 self.dirstate.update(new, "n")
865 self.dirstate.forget(remove)
870 self.dirstate.forget(remove)
866
871
867 if not self.hook("commit", node=hex(n)):
872 if not self.hook("commit", node=hex(n)):
868 return 1
873 return 1
869
874
870 def walk(self, node = None, files = [], match = util.always):
875 def walk(self, node = None, files = [], match = util.always):
871 if node:
876 if node:
872 for fn in self.manifest.read(self.changelog.read(node)[0]):
877 for fn in self.manifest.read(self.changelog.read(node)[0]):
873 if match(fn): yield 'm', fn
878 if match(fn): yield 'm', fn
874 else:
879 else:
875 for src, fn in self.dirstate.walk(files, match):
880 for src, fn in self.dirstate.walk(files, match):
876 yield src, fn
881 yield src, fn
877
882
878 def changes(self, node1 = None, node2 = None, files = [],
883 def changes(self, node1 = None, node2 = None, files = [],
879 match = util.always):
884 match = util.always):
880 mf2, u = None, []
885 mf2, u = None, []
881
886
882 def fcmp(fn, mf):
887 def fcmp(fn, mf):
883 t1 = self.wfile(fn).read()
888 t1 = self.wfile(fn).read()
884 t2 = self.file(fn).revision(mf[fn])
889 t2 = self.file(fn).revision(mf[fn])
885 return cmp(t1, t2)
890 return cmp(t1, t2)
886
891
887 def mfmatches(node):
892 def mfmatches(node):
888 mf = dict(self.manifest.read(node))
893 mf = dict(self.manifest.read(node))
889 for fn in mf.keys():
894 for fn in mf.keys():
890 if not match(fn):
895 if not match(fn):
891 del mf[fn]
896 del mf[fn]
892 return mf
897 return mf
893
898
894 # are we comparing the working directory?
899 # are we comparing the working directory?
895 if not node2:
900 if not node2:
896 l, c, a, d, u = self.dirstate.changes(files, match)
901 l, c, a, d, u = self.dirstate.changes(files, match)
897
902
898 # are we comparing working dir against its parent?
903 # are we comparing working dir against its parent?
899 if not node1:
904 if not node1:
900 if l:
905 if l:
901 # do a full compare of any files that might have changed
906 # do a full compare of any files that might have changed
902 change = self.changelog.read(self.dirstate.parents()[0])
907 change = self.changelog.read(self.dirstate.parents()[0])
903 mf2 = mfmatches(change[0])
908 mf2 = mfmatches(change[0])
904 for f in l:
909 for f in l:
905 if fcmp(f, mf2):
910 if fcmp(f, mf2):
906 c.append(f)
911 c.append(f)
907
912
908 for l in c, a, d, u:
913 for l in c, a, d, u:
909 l.sort()
914 l.sort()
910
915
911 return (c, a, d, u)
916 return (c, a, d, u)
912
917
913 # are we comparing working dir against non-tip?
918 # are we comparing working dir against non-tip?
914 # generate a pseudo-manifest for the working dir
919 # generate a pseudo-manifest for the working dir
915 if not node2:
920 if not node2:
916 if not mf2:
921 if not mf2:
917 change = self.changelog.read(self.dirstate.parents()[0])
922 change = self.changelog.read(self.dirstate.parents()[0])
918 mf2 = mfmatches(change[0])
923 mf2 = mfmatches(change[0])
919 for f in a + c + l:
924 for f in a + c + l:
920 mf2[f] = ""
925 mf2[f] = ""
921 for f in d:
926 for f in d:
922 if f in mf2: del mf2[f]
927 if f in mf2: del mf2[f]
923 else:
928 else:
924 change = self.changelog.read(node2)
929 change = self.changelog.read(node2)
925 mf2 = mfmatches(change[0])
930 mf2 = mfmatches(change[0])
926
931
927 # flush lists from dirstate before comparing manifests
932 # flush lists from dirstate before comparing manifests
928 c, a = [], []
933 c, a = [], []
929
934
930 change = self.changelog.read(node1)
935 change = self.changelog.read(node1)
931 mf1 = mfmatches(change[0])
936 mf1 = mfmatches(change[0])
932
937
933 for fn in mf2:
938 for fn in mf2:
934 if mf1.has_key(fn):
939 if mf1.has_key(fn):
935 if mf1[fn] != mf2[fn]:
940 if mf1[fn] != mf2[fn]:
936 if mf2[fn] != "" or fcmp(fn, mf1):
941 if mf2[fn] != "" or fcmp(fn, mf1):
937 c.append(fn)
942 c.append(fn)
938 del mf1[fn]
943 del mf1[fn]
939 else:
944 else:
940 a.append(fn)
945 a.append(fn)
941
946
942 d = mf1.keys()
947 d = mf1.keys()
943
948
944 for l in c, a, d, u:
949 for l in c, a, d, u:
945 l.sort()
950 l.sort()
946
951
947 return (c, a, d, u)
952 return (c, a, d, u)
948
953
949 def add(self, list):
954 def add(self, list):
950 for f in list:
955 for f in list:
951 p = self.wjoin(f)
956 p = self.wjoin(f)
952 if not os.path.exists(p):
957 if not os.path.exists(p):
953 self.ui.warn("%s does not exist!\n" % f)
958 self.ui.warn("%s does not exist!\n" % f)
954 elif not os.path.isfile(p):
959 elif not os.path.isfile(p):
955 self.ui.warn("%s not added: only files supported currently\n" % f)
960 self.ui.warn("%s not added: only files supported currently\n" % f)
956 elif self.dirstate.state(f) in 'an':
961 elif self.dirstate.state(f) in 'an':
957 self.ui.warn("%s already tracked!\n" % f)
962 self.ui.warn("%s already tracked!\n" % f)
958 else:
963 else:
959 self.dirstate.update([f], "a")
964 self.dirstate.update([f], "a")
960
965
961 def forget(self, list):
966 def forget(self, list):
962 for f in list:
967 for f in list:
963 if self.dirstate.state(f) not in 'ai':
968 if self.dirstate.state(f) not in 'ai':
964 self.ui.warn("%s not added!\n" % f)
969 self.ui.warn("%s not added!\n" % f)
965 else:
970 else:
966 self.dirstate.forget([f])
971 self.dirstate.forget([f])
967
972
968 def remove(self, list):
973 def remove(self, list):
969 for f in list:
974 for f in list:
970 p = self.wjoin(f)
975 p = self.wjoin(f)
971 if os.path.exists(p):
976 if os.path.exists(p):
972 self.ui.warn("%s still exists!\n" % f)
977 self.ui.warn("%s still exists!\n" % f)
973 elif self.dirstate.state(f) == 'a':
978 elif self.dirstate.state(f) == 'a':
974 self.ui.warn("%s never committed!\n" % f)
979 self.ui.warn("%s never committed!\n" % f)
975 self.dirstate.forget([f])
980 self.dirstate.forget([f])
976 elif f not in self.dirstate:
981 elif f not in self.dirstate:
977 self.ui.warn("%s not tracked!\n" % f)
982 self.ui.warn("%s not tracked!\n" % f)
978 else:
983 else:
979 self.dirstate.update([f], "r")
984 self.dirstate.update([f], "r")
980
985
981 def copy(self, source, dest):
986 def copy(self, source, dest):
982 p = self.wjoin(dest)
987 p = self.wjoin(dest)
983 if not os.path.exists(p):
988 if not os.path.exists(p):
984 self.ui.warn("%s does not exist!\n" % dest)
989 self.ui.warn("%s does not exist!\n" % dest)
985 elif not os.path.isfile(p):
990 elif not os.path.isfile(p):
986 self.ui.warn("copy failed: %s is not a file\n" % dest)
991 self.ui.warn("copy failed: %s is not a file\n" % dest)
987 else:
992 else:
988 if self.dirstate.state(dest) == '?':
993 if self.dirstate.state(dest) == '?':
989 self.dirstate.update([dest], "a")
994 self.dirstate.update([dest], "a")
990 self.dirstate.copy(source, dest)
995 self.dirstate.copy(source, dest)
991
996
992 def heads(self):
997 def heads(self):
993 return self.changelog.heads()
998 return self.changelog.heads()
994
999
995 def branches(self, nodes):
1000 def branches(self, nodes):
996 if not nodes: nodes = [self.changelog.tip()]
1001 if not nodes: nodes = [self.changelog.tip()]
997 b = []
1002 b = []
998 for n in nodes:
1003 for n in nodes:
999 t = n
1004 t = n
1000 while n:
1005 while n:
1001 p = self.changelog.parents(n)
1006 p = self.changelog.parents(n)
1002 if p[1] != nullid or p[0] == nullid:
1007 if p[1] != nullid or p[0] == nullid:
1003 b.append((t, n, p[0], p[1]))
1008 b.append((t, n, p[0], p[1]))
1004 break
1009 break
1005 n = p[0]
1010 n = p[0]
1006 return b
1011 return b
1007
1012
1008 def between(self, pairs):
1013 def between(self, pairs):
1009 r = []
1014 r = []
1010
1015
1011 for top, bottom in pairs:
1016 for top, bottom in pairs:
1012 n, l, i = top, [], 0
1017 n, l, i = top, [], 0
1013 f = 1
1018 f = 1
1014
1019
1015 while n != bottom:
1020 while n != bottom:
1016 p = self.changelog.parents(n)[0]
1021 p = self.changelog.parents(n)[0]
1017 if i == f:
1022 if i == f:
1018 l.append(n)
1023 l.append(n)
1019 f = f * 2
1024 f = f * 2
1020 n = p
1025 n = p
1021 i += 1
1026 i += 1
1022
1027
1023 r.append(l)
1028 r.append(l)
1024
1029
1025 return r
1030 return r
1026
1031
1027 def newer(self, nodes):
1032 def newer(self, nodes):
1028 m = {}
1033 m = {}
1029 nl = []
1034 nl = []
1030 pm = {}
1035 pm = {}
1031 cl = self.changelog
1036 cl = self.changelog
1032 t = l = cl.count()
1037 t = l = cl.count()
1033
1038
1034 # find the lowest numbered node
1039 # find the lowest numbered node
1035 for n in nodes:
1040 for n in nodes:
1036 l = min(l, cl.rev(n))
1041 l = min(l, cl.rev(n))
1037 m[n] = 1
1042 m[n] = 1
1038
1043
1039 for i in xrange(l, t):
1044 for i in xrange(l, t):
1040 n = cl.node(i)
1045 n = cl.node(i)
1041 if n in m: # explicitly listed
1046 if n in m: # explicitly listed
1042 pm[n] = 1
1047 pm[n] = 1
1043 nl.append(n)
1048 nl.append(n)
1044 continue
1049 continue
1045 for p in cl.parents(n):
1050 for p in cl.parents(n):
1046 if p in pm: # parent listed
1051 if p in pm: # parent listed
1047 pm[n] = 1
1052 pm[n] = 1
1048 nl.append(n)
1053 nl.append(n)
1049 break
1054 break
1050
1055
1051 return nl
1056 return nl
1052
1057
1053 def findincoming(self, remote, base={}):
1058 def findincoming(self, remote, base={}):
1054 m = self.changelog.nodemap
1059 m = self.changelog.nodemap
1055 search = []
1060 search = []
1056 fetch = []
1061 fetch = []
1057 seen = {}
1062 seen = {}
1058 seenbranch = {}
1063 seenbranch = {}
1059
1064
1060 # assume we're closer to the tip than the root
1065 # assume we're closer to the tip than the root
1061 # and start by examining the heads
1066 # and start by examining the heads
1062 self.ui.status("searching for changes\n")
1067 self.ui.status("searching for changes\n")
1063 heads = remote.heads()
1068 heads = remote.heads()
1064 unknown = []
1069 unknown = []
1065 for h in heads:
1070 for h in heads:
1066 if h not in m:
1071 if h not in m:
1067 unknown.append(h)
1072 unknown.append(h)
1068 else:
1073 else:
1069 base[h] = 1
1074 base[h] = 1
1070
1075
1071 if not unknown:
1076 if not unknown:
1072 return None
1077 return None
1073
1078
1074 rep = {}
1079 rep = {}
1075 reqcnt = 0
1080 reqcnt = 0
1076
1081
1077 # search through remote branches
1082 # search through remote branches
1078 # a 'branch' here is a linear segment of history, with four parts:
1083 # a 'branch' here is a linear segment of history, with four parts:
1079 # head, root, first parent, second parent
1084 # head, root, first parent, second parent
1080 # (a branch always has two parents (or none) by definition)
1085 # (a branch always has two parents (or none) by definition)
1081 unknown = remote.branches(unknown)
1086 unknown = remote.branches(unknown)
1082 while unknown:
1087 while unknown:
1083 r = []
1088 r = []
1084 while unknown:
1089 while unknown:
1085 n = unknown.pop(0)
1090 n = unknown.pop(0)
1086 if n[0] in seen:
1091 if n[0] in seen:
1087 continue
1092 continue
1088
1093
1089 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1094 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1090 if n[0] == nullid:
1095 if n[0] == nullid:
1091 break
1096 break
1092 if n in seenbranch:
1097 if n in seenbranch:
1093 self.ui.debug("branch already found\n")
1098 self.ui.debug("branch already found\n")
1094 continue
1099 continue
1095 if n[1] and n[1] in m: # do we know the base?
1100 if n[1] and n[1] in m: # do we know the base?
1096 self.ui.debug("found incomplete branch %s:%s\n"
1101 self.ui.debug("found incomplete branch %s:%s\n"
1097 % (short(n[0]), short(n[1])))
1102 % (short(n[0]), short(n[1])))
1098 search.append(n) # schedule branch range for scanning
1103 search.append(n) # schedule branch range for scanning
1099 seenbranch[n] = 1
1104 seenbranch[n] = 1
1100 else:
1105 else:
1101 if n[1] not in seen and n[1] not in fetch:
1106 if n[1] not in seen and n[1] not in fetch:
1102 if n[2] in m and n[3] in m:
1107 if n[2] in m and n[3] in m:
1103 self.ui.debug("found new changeset %s\n" %
1108 self.ui.debug("found new changeset %s\n" %
1104 short(n[1]))
1109 short(n[1]))
1105 fetch.append(n[1]) # earliest unknown
1110 fetch.append(n[1]) # earliest unknown
1106 base[n[2]] = 1 # latest known
1111 base[n[2]] = 1 # latest known
1107 continue
1112 continue
1108
1113
1109 for a in n[2:4]:
1114 for a in n[2:4]:
1110 if a not in rep:
1115 if a not in rep:
1111 r.append(a)
1116 r.append(a)
1112 rep[a] = 1
1117 rep[a] = 1
1113
1118
1114 seen[n[0]] = 1
1119 seen[n[0]] = 1
1115
1120
1116 if r:
1121 if r:
1117 reqcnt += 1
1122 reqcnt += 1
1118 self.ui.debug("request %d: %s\n" %
1123 self.ui.debug("request %d: %s\n" %
1119 (reqcnt, " ".join(map(short, r))))
1124 (reqcnt, " ".join(map(short, r))))
1120 for p in range(0, len(r), 10):
1125 for p in range(0, len(r), 10):
1121 for b in remote.branches(r[p:p+10]):
1126 for b in remote.branches(r[p:p+10]):
1122 self.ui.debug("received %s:%s\n" %
1127 self.ui.debug("received %s:%s\n" %
1123 (short(b[0]), short(b[1])))
1128 (short(b[0]), short(b[1])))
1124 if b[0] not in m and b[0] not in seen:
1129 if b[0] not in m and b[0] not in seen:
1125 unknown.append(b)
1130 unknown.append(b)
1126
1131
1127 # do binary search on the branches we found
1132 # do binary search on the branches we found
1128 while search:
1133 while search:
1129 n = search.pop(0)
1134 n = search.pop(0)
1130 reqcnt += 1
1135 reqcnt += 1
1131 l = remote.between([(n[0], n[1])])[0]
1136 l = remote.between([(n[0], n[1])])[0]
1132 l.append(n[1])
1137 l.append(n[1])
1133 p = n[0]
1138 p = n[0]
1134 f = 1
1139 f = 1
1135 for i in l:
1140 for i in l:
1136 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1141 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1137 if i in m:
1142 if i in m:
1138 if f <= 2:
1143 if f <= 2:
1139 self.ui.debug("found new branch changeset %s\n" %
1144 self.ui.debug("found new branch changeset %s\n" %
1140 short(p))
1145 short(p))
1141 fetch.append(p)
1146 fetch.append(p)
1142 base[i] = 1
1147 base[i] = 1
1143 else:
1148 else:
1144 self.ui.debug("narrowed branch search to %s:%s\n"
1149 self.ui.debug("narrowed branch search to %s:%s\n"
1145 % (short(p), short(i)))
1150 % (short(p), short(i)))
1146 search.append((p, i))
1151 search.append((p, i))
1147 break
1152 break
1148 p, f = i, f * 2
1153 p, f = i, f * 2
1149
1154
1150 # sanity check our fetch list
1155 # sanity check our fetch list
1151 for f in fetch:
1156 for f in fetch:
1152 if f in m:
1157 if f in m:
1153 raise RepoError("already have changeset " + short(f[:4]))
1158 raise RepoError("already have changeset " + short(f[:4]))
1154
1159
1155 if base.keys() == [nullid]:
1160 if base.keys() == [nullid]:
1156 self.ui.warn("warning: pulling from an unrelated repository!\n")
1161 self.ui.warn("warning: pulling from an unrelated repository!\n")
1157
1162
1158 self.ui.note("adding new changesets starting at " +
1163 self.ui.note("adding new changesets starting at " +
1159 " ".join([short(f) for f in fetch]) + "\n")
1164 " ".join([short(f) for f in fetch]) + "\n")
1160
1165
1161 self.ui.debug("%d total queries\n" % reqcnt)
1166 self.ui.debug("%d total queries\n" % reqcnt)
1162
1167
1163 return fetch
1168 return fetch
1164
1169
1165 def findoutgoing(self, remote):
1170 def findoutgoing(self, remote):
1166 base = {}
1171 base = {}
1167 self.findincoming(remote, base)
1172 self.findincoming(remote, base)
1168 remain = dict.fromkeys(self.changelog.nodemap)
1173 remain = dict.fromkeys(self.changelog.nodemap)
1169
1174
1170 # prune everything remote has from the tree
1175 # prune everything remote has from the tree
1171 del remain[nullid]
1176 del remain[nullid]
1172 remove = base.keys()
1177 remove = base.keys()
1173 while remove:
1178 while remove:
1174 n = remove.pop(0)
1179 n = remove.pop(0)
1175 if n in remain:
1180 if n in remain:
1176 del remain[n]
1181 del remain[n]
1177 for p in self.changelog.parents(n):
1182 for p in self.changelog.parents(n):
1178 remove.append(p)
1183 remove.append(p)
1179
1184
1180 # find every node whose parents have been pruned
1185 # find every node whose parents have been pruned
1181 subset = []
1186 subset = []
1182 for n in remain:
1187 for n in remain:
1183 p1, p2 = self.changelog.parents(n)
1188 p1, p2 = self.changelog.parents(n)
1184 if p1 not in remain and p2 not in remain:
1189 if p1 not in remain and p2 not in remain:
1185 subset.append(n)
1190 subset.append(n)
1186
1191
1187 # this is the set of all roots we have to push
1192 # this is the set of all roots we have to push
1188 return subset
1193 return subset
1189
1194
1190 def pull(self, remote):
1195 def pull(self, remote):
1191 lock = self.lock()
1196 lock = self.lock()
1192
1197
1193 # if we have an empty repo, fetch everything
1198 # if we have an empty repo, fetch everything
1194 if self.changelog.tip() == nullid:
1199 if self.changelog.tip() == nullid:
1195 self.ui.status("requesting all changes\n")
1200 self.ui.status("requesting all changes\n")
1196 fetch = [nullid]
1201 fetch = [nullid]
1197 else:
1202 else:
1198 fetch = self.findincoming(remote)
1203 fetch = self.findincoming(remote)
1199
1204
1200 if not fetch:
1205 if not fetch:
1201 self.ui.status("no changes found\n")
1206 self.ui.status("no changes found\n")
1202 return 1
1207 return 1
1203
1208
1204 cg = remote.changegroup(fetch)
1209 cg = remote.changegroup(fetch)
1205 return self.addchangegroup(cg)
1210 return self.addchangegroup(cg)
1206
1211
1207 def push(self, remote):
1212 def push(self, remote):
1208 lock = remote.lock()
1213 lock = remote.lock()
1209 update = self.findoutgoing(remote)
1214 update = self.findoutgoing(remote)
1210 if not update:
1215 if not update:
1211 self.ui.status("no changes found\n")
1216 self.ui.status("no changes found\n")
1212 return 1
1217 return 1
1213
1218
1214 cg = self.changegroup(update)
1219 cg = self.changegroup(update)
1215 return remote.addchangegroup(cg)
1220 return remote.addchangegroup(cg)
1216
1221
1217 def changegroup(self, basenodes):
1222 def changegroup(self, basenodes):
1218 class genread:
1223 class genread:
1219 def __init__(self, generator):
1224 def __init__(self, generator):
1220 self.g = generator
1225 self.g = generator
1221 self.buf = ""
1226 self.buf = ""
1222 def read(self, l):
1227 def read(self, l):
1223 while l > len(self.buf):
1228 while l > len(self.buf):
1224 try:
1229 try:
1225 self.buf += self.g.next()
1230 self.buf += self.g.next()
1226 except StopIteration:
1231 except StopIteration:
1227 break
1232 break
1228 d, self.buf = self.buf[:l], self.buf[l:]
1233 d, self.buf = self.buf[:l], self.buf[l:]
1229 return d
1234 return d
1230
1235
1231 def gengroup():
1236 def gengroup():
1232 nodes = self.newer(basenodes)
1237 nodes = self.newer(basenodes)
1233
1238
1234 # construct the link map
1239 # construct the link map
1235 linkmap = {}
1240 linkmap = {}
1236 for n in nodes:
1241 for n in nodes:
1237 linkmap[self.changelog.rev(n)] = n
1242 linkmap[self.changelog.rev(n)] = n
1238
1243
1239 # construct a list of all changed files
1244 # construct a list of all changed files
1240 changed = {}
1245 changed = {}
1241 for n in nodes:
1246 for n in nodes:
1242 c = self.changelog.read(n)
1247 c = self.changelog.read(n)
1243 for f in c[3]:
1248 for f in c[3]:
1244 changed[f] = 1
1249 changed[f] = 1
1245 changed = changed.keys()
1250 changed = changed.keys()
1246 changed.sort()
1251 changed.sort()
1247
1252
1248 # the changegroup is changesets + manifests + all file revs
1253 # the changegroup is changesets + manifests + all file revs
1249 revs = [ self.changelog.rev(n) for n in nodes ]
1254 revs = [ self.changelog.rev(n) for n in nodes ]
1250
1255
1251 for y in self.changelog.group(linkmap): yield y
1256 for y in self.changelog.group(linkmap): yield y
1252 for y in self.manifest.group(linkmap): yield y
1257 for y in self.manifest.group(linkmap): yield y
1253 for f in changed:
1258 for f in changed:
1254 yield struct.pack(">l", len(f) + 4) + f
1259 yield struct.pack(">l", len(f) + 4) + f
1255 g = self.file(f).group(linkmap)
1260 g = self.file(f).group(linkmap)
1256 for y in g:
1261 for y in g:
1257 yield y
1262 yield y
1258
1263
1259 yield struct.pack(">l", 0)
1264 yield struct.pack(">l", 0)
1260
1265
1261 return genread(gengroup())
1266 return genread(gengroup())
1262
1267
1263 def addchangegroup(self, source):
1268 def addchangegroup(self, source):
1264
1269
1265 def getchunk():
1270 def getchunk():
1266 d = source.read(4)
1271 d = source.read(4)
1267 if not d: return ""
1272 if not d: return ""
1268 l = struct.unpack(">l", d)[0]
1273 l = struct.unpack(">l", d)[0]
1269 if l <= 4: return ""
1274 if l <= 4: return ""
1270 return source.read(l - 4)
1275 return source.read(l - 4)
1271
1276
1272 def getgroup():
1277 def getgroup():
1273 while 1:
1278 while 1:
1274 c = getchunk()
1279 c = getchunk()
1275 if not c: break
1280 if not c: break
1276 yield c
1281 yield c
1277
1282
1278 def csmap(x):
1283 def csmap(x):
1279 self.ui.debug("add changeset %s\n" % short(x))
1284 self.ui.debug("add changeset %s\n" % short(x))
1280 return self.changelog.count()
1285 return self.changelog.count()
1281
1286
1282 def revmap(x):
1287 def revmap(x):
1283 return self.changelog.rev(x)
1288 return self.changelog.rev(x)
1284
1289
1285 if not source: return
1290 if not source: return
1286 changesets = files = revisions = 0
1291 changesets = files = revisions = 0
1287
1292
1288 tr = self.transaction()
1293 tr = self.transaction()
1289
1294
1290 # pull off the changeset group
1295 # pull off the changeset group
1291 self.ui.status("adding changesets\n")
1296 self.ui.status("adding changesets\n")
1292 co = self.changelog.tip()
1297 co = self.changelog.tip()
1293 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1298 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1294 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1299 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1295
1300
1296 # pull off the manifest group
1301 # pull off the manifest group
1297 self.ui.status("adding manifests\n")
1302 self.ui.status("adding manifests\n")
1298 mm = self.manifest.tip()
1303 mm = self.manifest.tip()
1299 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1304 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1300
1305
1301 # process the files
1306 # process the files
1302 self.ui.status("adding file changes\n")
1307 self.ui.status("adding file changes\n")
1303 while 1:
1308 while 1:
1304 f = getchunk()
1309 f = getchunk()
1305 if not f: break
1310 if not f: break
1306 self.ui.debug("adding %s revisions\n" % f)
1311 self.ui.debug("adding %s revisions\n" % f)
1307 fl = self.file(f)
1312 fl = self.file(f)
1308 o = fl.count()
1313 o = fl.count()
1309 n = fl.addgroup(getgroup(), revmap, tr)
1314 n = fl.addgroup(getgroup(), revmap, tr)
1310 revisions += fl.count() - o
1315 revisions += fl.count() - o
1311 files += 1
1316 files += 1
1312
1317
1313 self.ui.status(("added %d changesets" +
1318 self.ui.status(("added %d changesets" +
1314 " with %d changes to %d files\n")
1319 " with %d changes to %d files\n")
1315 % (changesets, revisions, files))
1320 % (changesets, revisions, files))
1316
1321
1317 tr.close()
1322 tr.close()
1318
1323
1319 if not self.hook("changegroup"):
1324 if not self.hook("changegroup"):
1320 return 1
1325 return 1
1321
1326
1322 return
1327 return
1323
1328
1324 def update(self, node, allow=False, force=False, choose=None,
1329 def update(self, node, allow=False, force=False, choose=None,
1325 moddirstate=True):
1330 moddirstate=True):
1326 pl = self.dirstate.parents()
1331 pl = self.dirstate.parents()
1327 if not force and pl[1] != nullid:
1332 if not force and pl[1] != nullid:
1328 self.ui.warn("aborting: outstanding uncommitted merges\n")
1333 self.ui.warn("aborting: outstanding uncommitted merges\n")
1329 return 1
1334 return 1
1330
1335
1331 p1, p2 = pl[0], node
1336 p1, p2 = pl[0], node
1332 pa = self.changelog.ancestor(p1, p2)
1337 pa = self.changelog.ancestor(p1, p2)
1333 m1n = self.changelog.read(p1)[0]
1338 m1n = self.changelog.read(p1)[0]
1334 m2n = self.changelog.read(p2)[0]
1339 m2n = self.changelog.read(p2)[0]
1335 man = self.manifest.ancestor(m1n, m2n)
1340 man = self.manifest.ancestor(m1n, m2n)
1336 m1 = self.manifest.read(m1n)
1341 m1 = self.manifest.read(m1n)
1337 mf1 = self.manifest.readflags(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1338 m2 = self.manifest.read(m2n)
1343 m2 = self.manifest.read(m2n)
1339 mf2 = self.manifest.readflags(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1340 ma = self.manifest.read(man)
1345 ma = self.manifest.read(man)
1341 mfa = self.manifest.readflags(man)
1346 mfa = self.manifest.readflags(man)
1342
1347
1343 (c, a, d, u) = self.changes()
1348 (c, a, d, u) = self.changes()
1344
1349
1345 # is this a jump, or a merge? i.e. is there a linear path
1350 # is this a jump, or a merge? i.e. is there a linear path
1346 # from p1 to p2?
1351 # from p1 to p2?
1347 linear_path = (pa == p1 or pa == p2)
1352 linear_path = (pa == p1 or pa == p2)
1348
1353
1349 # resolve the manifest to determine which files
1354 # resolve the manifest to determine which files
1350 # we care about merging
1355 # we care about merging
1351 self.ui.note("resolving manifests\n")
1356 self.ui.note("resolving manifests\n")
1352 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1357 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1353 (force, allow, moddirstate, linear_path))
1358 (force, allow, moddirstate, linear_path))
1354 self.ui.debug(" ancestor %s local %s remote %s\n" %
1359 self.ui.debug(" ancestor %s local %s remote %s\n" %
1355 (short(man), short(m1n), short(m2n)))
1360 (short(man), short(m1n), short(m2n)))
1356
1361
1357 merge = {}
1362 merge = {}
1358 get = {}
1363 get = {}
1359 remove = []
1364 remove = []
1360 mark = {}
1365 mark = {}
1361
1366
1362 # construct a working dir manifest
1367 # construct a working dir manifest
1363 mw = m1.copy()
1368 mw = m1.copy()
1364 mfw = mf1.copy()
1369 mfw = mf1.copy()
1365 umap = dict.fromkeys(u)
1370 umap = dict.fromkeys(u)
1366
1371
1367 for f in a + c + u:
1372 for f in a + c + u:
1368 mw[f] = ""
1373 mw[f] = ""
1369 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1374 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1370
1375
1371 for f in d:
1376 for f in d:
1372 if f in mw: del mw[f]
1377 if f in mw: del mw[f]
1373
1378
1374 # If we're jumping between revisions (as opposed to merging),
1379 # If we're jumping between revisions (as opposed to merging),
1375 # and if neither the working directory nor the target rev has
1380 # and if neither the working directory nor the target rev has
1376 # the file, then we need to remove it from the dirstate, to
1381 # the file, then we need to remove it from the dirstate, to
1377 # prevent the dirstate from listing the file when it is no
1382 # prevent the dirstate from listing the file when it is no
1378 # longer in the manifest.
1383 # longer in the manifest.
1379 if moddirstate and linear_path and f not in m2:
1384 if moddirstate and linear_path and f not in m2:
1380 self.dirstate.forget((f,))
1385 self.dirstate.forget((f,))
1381
1386
1382 # Compare manifests
1387 # Compare manifests
1383 for f, n in mw.iteritems():
1388 for f, n in mw.iteritems():
1384 if choose and not choose(f): continue
1389 if choose and not choose(f): continue
1385 if f in m2:
1390 if f in m2:
1386 s = 0
1391 s = 0
1387
1392
1388 # is the wfile new since m1, and match m2?
1393 # is the wfile new since m1, and match m2?
1389 if f not in m1:
1394 if f not in m1:
1390 t1 = self.wfile(f).read()
1395 t1 = self.wfile(f).read()
1391 t2 = self.file(f).revision(m2[f])
1396 t2 = self.file(f).revision(m2[f])
1392 if cmp(t1, t2) == 0:
1397 if cmp(t1, t2) == 0:
1393 mark[f] = 1
1398 mark[f] = 1
1394 n = m2[f]
1399 n = m2[f]
1395 del t1, t2
1400 del t1, t2
1396
1401
1397 # are files different?
1402 # are files different?
1398 if n != m2[f]:
1403 if n != m2[f]:
1399 a = ma.get(f, nullid)
1404 a = ma.get(f, nullid)
1400 # are both different from the ancestor?
1405 # are both different from the ancestor?
1401 if n != a and m2[f] != a:
1406 if n != a and m2[f] != a:
1402 self.ui.debug(" %s versions differ, resolve\n" % f)
1407 self.ui.debug(" %s versions differ, resolve\n" % f)
1403 # merge executable bits
1408 # merge executable bits
1404 # "if we changed or they changed, change in merge"
1409 # "if we changed or they changed, change in merge"
1405 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1410 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1406 mode = ((a^b) | (a^c)) ^ a
1411 mode = ((a^b) | (a^c)) ^ a
1407 merge[f] = (m1.get(f, nullid), m2[f], mode)
1412 merge[f] = (m1.get(f, nullid), m2[f], mode)
1408 s = 1
1413 s = 1
1409 # are we clobbering?
1414 # are we clobbering?
1410 # is remote's version newer?
1415 # is remote's version newer?
1411 # or are we going back in time?
1416 # or are we going back in time?
1412 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1417 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1413 self.ui.debug(" remote %s is newer, get\n" % f)
1418 self.ui.debug(" remote %s is newer, get\n" % f)
1414 get[f] = m2[f]
1419 get[f] = m2[f]
1415 s = 1
1420 s = 1
1416 else:
1421 else:
1417 mark[f] = 1
1422 mark[f] = 1
1418 elif f in umap:
1423 elif f in umap:
1419 # this unknown file is the same as the checkout
1424 # this unknown file is the same as the checkout
1420 get[f] = m2[f]
1425 get[f] = m2[f]
1421
1426
1422 if not s and mfw[f] != mf2[f]:
1427 if not s and mfw[f] != mf2[f]:
1423 if force:
1428 if force:
1424 self.ui.debug(" updating permissions for %s\n" % f)
1429 self.ui.debug(" updating permissions for %s\n" % f)
1425 util.set_exec(self.wjoin(f), mf2[f])
1430 util.set_exec(self.wjoin(f), mf2[f])
1426 else:
1431 else:
1427 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1432 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1428 mode = ((a^b) | (a^c)) ^ a
1433 mode = ((a^b) | (a^c)) ^ a
1429 if mode != b:
1434 if mode != b:
1430 self.ui.debug(" updating permissions for %s\n" % f)
1435 self.ui.debug(" updating permissions for %s\n" % f)
1431 util.set_exec(self.wjoin(f), mode)
1436 util.set_exec(self.wjoin(f), mode)
1432 mark[f] = 1
1437 mark[f] = 1
1433 del m2[f]
1438 del m2[f]
1434 elif f in ma:
1439 elif f in ma:
1435 if n != ma[f]:
1440 if n != ma[f]:
1436 r = "d"
1441 r = "d"
1437 if not force and (linear_path or allow):
1442 if not force and (linear_path or allow):
1438 r = self.ui.prompt(
1443 r = self.ui.prompt(
1439 (" local changed %s which remote deleted\n" % f) +
1444 (" local changed %s which remote deleted\n" % f) +
1440 "(k)eep or (d)elete?", "[kd]", "k")
1445 "(k)eep or (d)elete?", "[kd]", "k")
1441 if r == "d":
1446 if r == "d":
1442 remove.append(f)
1447 remove.append(f)
1443 else:
1448 else:
1444 self.ui.debug("other deleted %s\n" % f)
1449 self.ui.debug("other deleted %s\n" % f)
1445 remove.append(f) # other deleted it
1450 remove.append(f) # other deleted it
1446 else:
1451 else:
1447 if n == m1.get(f, nullid): # same as parent
1452 if n == m1.get(f, nullid): # same as parent
1448 if p2 == pa: # going backwards?
1453 if p2 == pa: # going backwards?
1449 self.ui.debug("remote deleted %s\n" % f)
1454 self.ui.debug("remote deleted %s\n" % f)
1450 remove.append(f)
1455 remove.append(f)
1451 else:
1456 else:
1452 self.ui.debug("local created %s, keeping\n" % f)
1457 self.ui.debug("local created %s, keeping\n" % f)
1453 else:
1458 else:
1454 self.ui.debug("working dir created %s, keeping\n" % f)
1459 self.ui.debug("working dir created %s, keeping\n" % f)
1455
1460
1456 for f, n in m2.iteritems():
1461 for f, n in m2.iteritems():
1457 if choose and not choose(f): continue
1462 if choose and not choose(f): continue
1458 if f[0] == "/": continue
1463 if f[0] == "/": continue
1459 if f in ma and n != ma[f]:
1464 if f in ma and n != ma[f]:
1460 r = "k"
1465 r = "k"
1461 if not force and (linear_path or allow):
1466 if not force and (linear_path or allow):
1462 r = self.ui.prompt(
1467 r = self.ui.prompt(
1463 ("remote changed %s which local deleted\n" % f) +
1468 ("remote changed %s which local deleted\n" % f) +
1464 "(k)eep or (d)elete?", "[kd]", "k")
1469 "(k)eep or (d)elete?", "[kd]", "k")
1465 if r == "k": get[f] = n
1470 if r == "k": get[f] = n
1466 elif f not in ma:
1471 elif f not in ma:
1467 self.ui.debug("remote created %s\n" % f)
1472 self.ui.debug("remote created %s\n" % f)
1468 get[f] = n
1473 get[f] = n
1469 else:
1474 else:
1470 if force or p2 == pa: # going backwards?
1475 if force or p2 == pa: # going backwards?
1471 self.ui.debug("local deleted %s, recreating\n" % f)
1476 self.ui.debug("local deleted %s, recreating\n" % f)
1472 get[f] = n
1477 get[f] = n
1473 else:
1478 else:
1474 self.ui.debug("local deleted %s\n" % f)
1479 self.ui.debug("local deleted %s\n" % f)
1475
1480
1476 del mw, m1, m2, ma
1481 del mw, m1, m2, ma
1477
1482
1478 if force:
1483 if force:
1479 for f in merge:
1484 for f in merge:
1480 get[f] = merge[f][1]
1485 get[f] = merge[f][1]
1481 merge = {}
1486 merge = {}
1482
1487
1483 if linear_path or force:
1488 if linear_path or force:
1484 # we don't need to do any magic, just jump to the new rev
1489 # we don't need to do any magic, just jump to the new rev
1485 mode = 'n'
1490 mode = 'n'
1486 p1, p2 = p2, nullid
1491 p1, p2 = p2, nullid
1487 else:
1492 else:
1488 if not allow:
1493 if not allow:
1489 self.ui.status("this update spans a branch" +
1494 self.ui.status("this update spans a branch" +
1490 " affecting the following files:\n")
1495 " affecting the following files:\n")
1491 fl = merge.keys() + get.keys()
1496 fl = merge.keys() + get.keys()
1492 fl.sort()
1497 fl.sort()
1493 for f in fl:
1498 for f in fl:
1494 cf = ""
1499 cf = ""
1495 if f in merge: cf = " (resolve)"
1500 if f in merge: cf = " (resolve)"
1496 self.ui.status(" %s%s\n" % (f, cf))
1501 self.ui.status(" %s%s\n" % (f, cf))
1497 self.ui.warn("aborting update spanning branches!\n")
1502 self.ui.warn("aborting update spanning branches!\n")
1498 self.ui.status("(use update -m to merge across branches" +
1503 self.ui.status("(use update -m to merge across branches" +
1499 " or -C to lose changes)\n")
1504 " or -C to lose changes)\n")
1500 return 1
1505 return 1
1501 # we have to remember what files we needed to get/change
1506 # we have to remember what files we needed to get/change
1502 # because any file that's different from either one of its
1507 # because any file that's different from either one of its
1503 # parents must be in the changeset
1508 # parents must be in the changeset
1504 mode = 'm'
1509 mode = 'm'
1505 if moddirstate:
1510 if moddirstate:
1506 self.dirstate.update(mark.keys(), "m")
1511 self.dirstate.update(mark.keys(), "m")
1507
1512
1508 if moddirstate:
1513 if moddirstate:
1509 self.dirstate.setparents(p1, p2)
1514 self.dirstate.setparents(p1, p2)
1510
1515
1511 # get the files we don't need to change
1516 # get the files we don't need to change
1512 files = get.keys()
1517 files = get.keys()
1513 files.sort()
1518 files.sort()
1514 for f in files:
1519 for f in files:
1515 if f[0] == "/": continue
1520 if f[0] == "/": continue
1516 self.ui.note("getting %s\n" % f)
1521 self.ui.note("getting %s\n" % f)
1517 t = self.file(f).read(get[f])
1522 t = self.file(f).read(get[f])
1518 try:
1523 try:
1519 self.wfile(f, "w").write(t)
1524 self.wfile(f, "w").write(t)
1520 except IOError:
1525 except IOError:
1521 os.makedirs(os.path.dirname(self.wjoin(f)))
1526 os.makedirs(os.path.dirname(self.wjoin(f)))
1522 self.wfile(f, "w").write(t)
1527 self.wfile(f, "w").write(t)
1523 util.set_exec(self.wjoin(f), mf2[f])
1528 util.set_exec(self.wjoin(f), mf2[f])
1524 if moddirstate:
1529 if moddirstate:
1525 self.dirstate.update([f], mode)
1530 self.dirstate.update([f], mode)
1526
1531
1527 # merge the tricky bits
1532 # merge the tricky bits
1528 files = merge.keys()
1533 files = merge.keys()
1529 files.sort()
1534 files.sort()
1530 for f in files:
1535 for f in files:
1531 self.ui.status("merging %s\n" % f)
1536 self.ui.status("merging %s\n" % f)
1532 m, o, flag = merge[f]
1537 m, o, flag = merge[f]
1533 self.merge3(f, m, o)
1538 self.merge3(f, m, o)
1534 util.set_exec(self.wjoin(f), flag)
1539 util.set_exec(self.wjoin(f), flag)
1535 if moddirstate and mode == 'm':
1540 if moddirstate and mode == 'm':
1536 # only update dirstate on branch merge, otherwise we
1541 # only update dirstate on branch merge, otherwise we
1537 # could mark files with changes as unchanged
1542 # could mark files with changes as unchanged
1538 self.dirstate.update([f], mode)
1543 self.dirstate.update([f], mode)
1539
1544
1540 remove.sort()
1545 remove.sort()
1541 for f in remove:
1546 for f in remove:
1542 self.ui.note("removing %s\n" % f)
1547 self.ui.note("removing %s\n" % f)
1543 try:
1548 try:
1544 os.unlink(f)
1549 os.unlink(f)
1545 except OSError, inst:
1550 except OSError, inst:
1546 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1551 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1547 # try removing directories that might now be empty
1552 # try removing directories that might now be empty
1548 try: os.removedirs(os.path.dirname(f))
1553 try: os.removedirs(os.path.dirname(f))
1549 except: pass
1554 except: pass
1550 if moddirstate:
1555 if moddirstate:
1551 if mode == 'n':
1556 if mode == 'n':
1552 self.dirstate.forget(remove)
1557 self.dirstate.forget(remove)
1553 else:
1558 else:
1554 self.dirstate.update(remove, 'r')
1559 self.dirstate.update(remove, 'r')
1555
1560
1556 def merge3(self, fn, my, other):
1561 def merge3(self, fn, my, other):
1557 """perform a 3-way merge in the working directory"""
1562 """perform a 3-way merge in the working directory"""
1558
1563
1559 def temp(prefix, node):
1564 def temp(prefix, node):
1560 pre = "%s~%s." % (os.path.basename(fn), prefix)
1565 pre = "%s~%s." % (os.path.basename(fn), prefix)
1561 (fd, name) = tempfile.mkstemp("", pre)
1566 (fd, name) = tempfile.mkstemp("", pre)
1562 f = os.fdopen(fd, "wb")
1567 f = os.fdopen(fd, "wb")
1563 f.write(fl.revision(node))
1568 f.write(fl.revision(node))
1564 f.close()
1569 f.close()
1565 return name
1570 return name
1566
1571
1567 fl = self.file(fn)
1572 fl = self.file(fn)
1568 base = fl.ancestor(my, other)
1573 base = fl.ancestor(my, other)
1569 a = self.wjoin(fn)
1574 a = self.wjoin(fn)
1570 b = temp("base", base)
1575 b = temp("base", base)
1571 c = temp("other", other)
1576 c = temp("other", other)
1572
1577
1573 self.ui.note("resolving %s\n" % fn)
1578 self.ui.note("resolving %s\n" % fn)
1574 self.ui.debug("file %s: other %s ancestor %s\n" %
1579 self.ui.debug("file %s: other %s ancestor %s\n" %
1575 (fn, short(other), short(base)))
1580 (fn, short(other), short(base)))
1576
1581
1577 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1582 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1578 or "hgmerge")
1583 or "hgmerge")
1579 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1584 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1580 if r:
1585 if r:
1581 self.ui.warn("merging %s failed!\n" % fn)
1586 self.ui.warn("merging %s failed!\n" % fn)
1582
1587
1583 os.unlink(b)
1588 os.unlink(b)
1584 os.unlink(c)
1589 os.unlink(c)
1585
1590
1586 def verify(self):
1591 def verify(self):
1587 filelinkrevs = {}
1592 filelinkrevs = {}
1588 filenodes = {}
1593 filenodes = {}
1589 changesets = revisions = files = 0
1594 changesets = revisions = files = 0
1590 errors = 0
1595 errors = 0
1591
1596
1592 seen = {}
1597 seen = {}
1593 self.ui.status("checking changesets\n")
1598 self.ui.status("checking changesets\n")
1594 for i in range(self.changelog.count()):
1599 for i in range(self.changelog.count()):
1595 changesets += 1
1600 changesets += 1
1596 n = self.changelog.node(i)
1601 n = self.changelog.node(i)
1597 if n in seen:
1602 if n in seen:
1598 self.ui.warn("duplicate changeset at revision %d\n" % i)
1603 self.ui.warn("duplicate changeset at revision %d\n" % i)
1599 errors += 1
1604 errors += 1
1600 seen[n] = 1
1605 seen[n] = 1
1601
1606
1602 for p in self.changelog.parents(n):
1607 for p in self.changelog.parents(n):
1603 if p not in self.changelog.nodemap:
1608 if p not in self.changelog.nodemap:
1604 self.ui.warn("changeset %s has unknown parent %s\n" %
1609 self.ui.warn("changeset %s has unknown parent %s\n" %
1605 (short(n), short(p)))
1610 (short(n), short(p)))
1606 errors += 1
1611 errors += 1
1607 try:
1612 try:
1608 changes = self.changelog.read(n)
1613 changes = self.changelog.read(n)
1609 except Exception, inst:
1614 except Exception, inst:
1610 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1615 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1611 errors += 1
1616 errors += 1
1612
1617
1613 for f in changes[3]:
1618 for f in changes[3]:
1614 filelinkrevs.setdefault(f, []).append(i)
1619 filelinkrevs.setdefault(f, []).append(i)
1615
1620
1616 seen = {}
1621 seen = {}
1617 self.ui.status("checking manifests\n")
1622 self.ui.status("checking manifests\n")
1618 for i in range(self.manifest.count()):
1623 for i in range(self.manifest.count()):
1619 n = self.manifest.node(i)
1624 n = self.manifest.node(i)
1620 if n in seen:
1625 if n in seen:
1621 self.ui.warn("duplicate manifest at revision %d\n" % i)
1626 self.ui.warn("duplicate manifest at revision %d\n" % i)
1622 errors += 1
1627 errors += 1
1623 seen[n] = 1
1628 seen[n] = 1
1624
1629
1625 for p in self.manifest.parents(n):
1630 for p in self.manifest.parents(n):
1626 if p not in self.manifest.nodemap:
1631 if p not in self.manifest.nodemap:
1627 self.ui.warn("manifest %s has unknown parent %s\n" %
1632 self.ui.warn("manifest %s has unknown parent %s\n" %
1628 (short(n), short(p)))
1633 (short(n), short(p)))
1629 errors += 1
1634 errors += 1
1630
1635
1631 try:
1636 try:
1632 delta = mdiff.patchtext(self.manifest.delta(n))
1637 delta = mdiff.patchtext(self.manifest.delta(n))
1633 except KeyboardInterrupt:
1638 except KeyboardInterrupt:
1634 self.ui.warn("aborted")
1639 self.ui.warn("aborted")
1635 sys.exit(0)
1640 sys.exit(0)
1636 except Exception, inst:
1641 except Exception, inst:
1637 self.ui.warn("unpacking manifest %s: %s\n"
1642 self.ui.warn("unpacking manifest %s: %s\n"
1638 % (short(n), inst))
1643 % (short(n), inst))
1639 errors += 1
1644 errors += 1
1640
1645
1641 ff = [ l.split('\0') for l in delta.splitlines() ]
1646 ff = [ l.split('\0') for l in delta.splitlines() ]
1642 for f, fn in ff:
1647 for f, fn in ff:
1643 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1648 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1644
1649
1645 self.ui.status("crosschecking files in changesets and manifests\n")
1650 self.ui.status("crosschecking files in changesets and manifests\n")
1646 for f in filenodes:
1651 for f in filenodes:
1647 if f not in filelinkrevs:
1652 if f not in filelinkrevs:
1648 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1653 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1649 errors += 1
1654 errors += 1
1650
1655
1651 for f in filelinkrevs:
1656 for f in filelinkrevs:
1652 if f not in filenodes:
1657 if f not in filenodes:
1653 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1658 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1654 errors += 1
1659 errors += 1
1655
1660
1656 self.ui.status("checking files\n")
1661 self.ui.status("checking files\n")
1657 ff = filenodes.keys()
1662 ff = filenodes.keys()
1658 ff.sort()
1663 ff.sort()
1659 for f in ff:
1664 for f in ff:
1660 if f == "/dev/null": continue
1665 if f == "/dev/null": continue
1661 files += 1
1666 files += 1
1662 fl = self.file(f)
1667 fl = self.file(f)
1663 nodes = { nullid: 1 }
1668 nodes = { nullid: 1 }
1664 seen = {}
1669 seen = {}
1665 for i in range(fl.count()):
1670 for i in range(fl.count()):
1666 revisions += 1
1671 revisions += 1
1667 n = fl.node(i)
1672 n = fl.node(i)
1668
1673
1669 if n in seen:
1674 if n in seen:
1670 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1675 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1671 errors += 1
1676 errors += 1
1672
1677
1673 if n not in filenodes[f]:
1678 if n not in filenodes[f]:
1674 self.ui.warn("%s: %d:%s not in manifests\n"
1679 self.ui.warn("%s: %d:%s not in manifests\n"
1675 % (f, i, short(n)))
1680 % (f, i, short(n)))
1676 errors += 1
1681 errors += 1
1677 else:
1682 else:
1678 del filenodes[f][n]
1683 del filenodes[f][n]
1679
1684
1680 flr = fl.linkrev(n)
1685 flr = fl.linkrev(n)
1681 if flr not in filelinkrevs[f]:
1686 if flr not in filelinkrevs[f]:
1682 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1687 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1683 % (f, short(n), fl.linkrev(n)))
1688 % (f, short(n), fl.linkrev(n)))
1684 errors += 1
1689 errors += 1
1685 else:
1690 else:
1686 filelinkrevs[f].remove(flr)
1691 filelinkrevs[f].remove(flr)
1687
1692
1688 # verify contents
1693 # verify contents
1689 try:
1694 try:
1690 t = fl.read(n)
1695 t = fl.read(n)
1691 except Exception, inst:
1696 except Exception, inst:
1692 self.ui.warn("unpacking file %s %s: %s\n"
1697 self.ui.warn("unpacking file %s %s: %s\n"
1693 % (f, short(n), inst))
1698 % (f, short(n), inst))
1694 errors += 1
1699 errors += 1
1695
1700
1696 # verify parents
1701 # verify parents
1697 (p1, p2) = fl.parents(n)
1702 (p1, p2) = fl.parents(n)
1698 if p1 not in nodes:
1703 if p1 not in nodes:
1699 self.ui.warn("file %s:%s unknown parent 1 %s" %
1704 self.ui.warn("file %s:%s unknown parent 1 %s" %
1700 (f, short(n), short(p1)))
1705 (f, short(n), short(p1)))
1701 errors += 1
1706 errors += 1
1702 if p2 not in nodes:
1707 if p2 not in nodes:
1703 self.ui.warn("file %s:%s unknown parent 2 %s" %
1708 self.ui.warn("file %s:%s unknown parent 2 %s" %
1704 (f, short(n), short(p1)))
1709 (f, short(n), short(p1)))
1705 errors += 1
1710 errors += 1
1706 nodes[n] = 1
1711 nodes[n] = 1
1707
1712
1708 # cross-check
1713 # cross-check
1709 for node in filenodes[f]:
1714 for node in filenodes[f]:
1710 self.ui.warn("node %s in manifests not in %s\n"
1715 self.ui.warn("node %s in manifests not in %s\n"
1711 % (hex(node), f))
1716 % (hex(node), f))
1712 errors += 1
1717 errors += 1
1713
1718
1714 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1719 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1715 (files, changesets, revisions))
1720 (files, changesets, revisions))
1716
1721
1717 if errors:
1722 if errors:
1718 self.ui.warn("%d integrity errors encountered!\n" % errors)
1723 self.ui.warn("%d integrity errors encountered!\n" % errors)
1719 return 1
1724 return 1
1720
1725
1721 class httprepository:
1726 class httprepository:
1722 def __init__(self, ui, path):
1727 def __init__(self, ui, path):
1723 # fix missing / after hostname
1728 # fix missing / after hostname
1724 s = urlparse.urlsplit(path)
1729 s = urlparse.urlsplit(path)
1725 partial = s[2]
1730 partial = s[2]
1726 if not partial: partial = "/"
1731 if not partial: partial = "/"
1727 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1732 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1728 self.ui = ui
1733 self.ui = ui
1729 no_list = [ "localhost", "127.0.0.1" ]
1734 no_list = [ "localhost", "127.0.0.1" ]
1730 host = ui.config("http_proxy", "host")
1735 host = ui.config("http_proxy", "host")
1731 if host is None:
1736 if host is None:
1732 host = os.environ.get("http_proxy")
1737 host = os.environ.get("http_proxy")
1733 if host and host.startswith('http://'):
1738 if host and host.startswith('http://'):
1734 host = host[7:]
1739 host = host[7:]
1735 user = ui.config("http_proxy", "user")
1740 user = ui.config("http_proxy", "user")
1736 passwd = ui.config("http_proxy", "passwd")
1741 passwd = ui.config("http_proxy", "passwd")
1737 no = ui.config("http_proxy", "no")
1742 no = ui.config("http_proxy", "no")
1738 if no is None:
1743 if no is None:
1739 no = os.environ.get("no_proxy")
1744 no = os.environ.get("no_proxy")
1740 if no:
1745 if no:
1741 no_list = no_list + no.split(",")
1746 no_list = no_list + no.split(",")
1742
1747
1743 no_proxy = 0
1748 no_proxy = 0
1744 for h in no_list:
1749 for h in no_list:
1745 if (path.startswith("http://" + h + "/") or
1750 if (path.startswith("http://" + h + "/") or
1746 path.startswith("http://" + h + ":") or
1751 path.startswith("http://" + h + ":") or
1747 path == "http://" + h):
1752 path == "http://" + h):
1748 no_proxy = 1
1753 no_proxy = 1
1749
1754
1750 # Note: urllib2 takes proxy values from the environment and those will
1755 # Note: urllib2 takes proxy values from the environment and those will
1751 # take precedence
1756 # take precedence
1752 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1757 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1753 if os.environ.has_key(env):
1758 if os.environ.has_key(env):
1754 del os.environ[env]
1759 del os.environ[env]
1755
1760
1756 proxy_handler = urllib2.BaseHandler()
1761 proxy_handler = urllib2.BaseHandler()
1757 if host and not no_proxy:
1762 if host and not no_proxy:
1758 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1763 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1759
1764
1760 authinfo = None
1765 authinfo = None
1761 if user and passwd:
1766 if user and passwd:
1762 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1767 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1763 passmgr.add_password(None, host, user, passwd)
1768 passmgr.add_password(None, host, user, passwd)
1764 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1769 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1765
1770
1766 opener = urllib2.build_opener(proxy_handler, authinfo)
1771 opener = urllib2.build_opener(proxy_handler, authinfo)
1767 urllib2.install_opener(opener)
1772 urllib2.install_opener(opener)
1768
1773
1769 def dev(self):
1774 def dev(self):
1770 return -1
1775 return -1
1771
1776
1772 def do_cmd(self, cmd, **args):
1777 def do_cmd(self, cmd, **args):
1773 self.ui.debug("sending %s command\n" % cmd)
1778 self.ui.debug("sending %s command\n" % cmd)
1774 q = {"cmd": cmd}
1779 q = {"cmd": cmd}
1775 q.update(args)
1780 q.update(args)
1776 qs = urllib.urlencode(q)
1781 qs = urllib.urlencode(q)
1777 cu = "%s?%s" % (self.url, qs)
1782 cu = "%s?%s" % (self.url, qs)
1778 resp = urllib2.urlopen(cu)
1783 resp = urllib2.urlopen(cu)
1779 proto = resp.headers['content-type']
1784 proto = resp.headers['content-type']
1780
1785
1781 # accept old "text/plain" and "application/hg-changegroup" for now
1786 # accept old "text/plain" and "application/hg-changegroup" for now
1782 if not proto.startswith('application/mercurial') and \
1787 if not proto.startswith('application/mercurial') and \
1783 not proto.startswith('text/plain') and \
1788 not proto.startswith('text/plain') and \
1784 not proto.startswith('application/hg-changegroup'):
1789 not proto.startswith('application/hg-changegroup'):
1785 raise RepoError("'%s' does not appear to be an hg repository"
1790 raise RepoError("'%s' does not appear to be an hg repository"
1786 % self.url)
1791 % self.url)
1787
1792
1788 if proto.startswith('application/mercurial'):
1793 if proto.startswith('application/mercurial'):
1789 version = proto[22:]
1794 version = proto[22:]
1790 if float(version) > 0.1:
1795 if float(version) > 0.1:
1791 raise RepoError("'%s' uses newer protocol %s" %
1796 raise RepoError("'%s' uses newer protocol %s" %
1792 (self.url, version))
1797 (self.url, version))
1793
1798
1794 return resp
1799 return resp
1795
1800
1796 def heads(self):
1801 def heads(self):
1797 d = self.do_cmd("heads").read()
1802 d = self.do_cmd("heads").read()
1798 try:
1803 try:
1799 return map(bin, d[:-1].split(" "))
1804 return map(bin, d[:-1].split(" "))
1800 except:
1805 except:
1801 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1806 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1802 raise
1807 raise
1803
1808
1804 def branches(self, nodes):
1809 def branches(self, nodes):
1805 n = " ".join(map(hex, nodes))
1810 n = " ".join(map(hex, nodes))
1806 d = self.do_cmd("branches", nodes=n).read()
1811 d = self.do_cmd("branches", nodes=n).read()
1807 try:
1812 try:
1808 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1813 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1809 return br
1814 return br
1810 except:
1815 except:
1811 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1816 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1812 raise
1817 raise
1813
1818
1814 def between(self, pairs):
1819 def between(self, pairs):
1815 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1820 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1816 d = self.do_cmd("between", pairs=n).read()
1821 d = self.do_cmd("between", pairs=n).read()
1817 try:
1822 try:
1818 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1823 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1819 return p
1824 return p
1820 except:
1825 except:
1821 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1826 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1822 raise
1827 raise
1823
1828
1824 def changegroup(self, nodes):
1829 def changegroup(self, nodes):
1825 n = " ".join(map(hex, nodes))
1830 n = " ".join(map(hex, nodes))
1826 f = self.do_cmd("changegroup", roots=n)
1831 f = self.do_cmd("changegroup", roots=n)
1827 bytes = 0
1832 bytes = 0
1828
1833
1829 class zread:
1834 class zread:
1830 def __init__(self, f):
1835 def __init__(self, f):
1831 self.zd = zlib.decompressobj()
1836 self.zd = zlib.decompressobj()
1832 self.f = f
1837 self.f = f
1833 self.buf = ""
1838 self.buf = ""
1834 def read(self, l):
1839 def read(self, l):
1835 while l > len(self.buf):
1840 while l > len(self.buf):
1836 r = self.f.read(4096)
1841 r = self.f.read(4096)
1837 if r:
1842 if r:
1838 self.buf += self.zd.decompress(r)
1843 self.buf += self.zd.decompress(r)
1839 else:
1844 else:
1840 self.buf += self.zd.flush()
1845 self.buf += self.zd.flush()
1841 break
1846 break
1842 d, self.buf = self.buf[:l], self.buf[l:]
1847 d, self.buf = self.buf[:l], self.buf[l:]
1843 return d
1848 return d
1844
1849
1845 return zread(f)
1850 return zread(f)
1846
1851
1847 class remotelock:
1852 class remotelock:
1848 def __init__(self, repo):
1853 def __init__(self, repo):
1849 self.repo = repo
1854 self.repo = repo
1850 def release(self):
1855 def release(self):
1851 self.repo.unlock()
1856 self.repo.unlock()
1852 self.repo = None
1857 self.repo = None
1853 def __del__(self):
1858 def __del__(self):
1854 if self.repo:
1859 if self.repo:
1855 self.release()
1860 self.release()
1856
1861
1857 class sshrepository:
1862 class sshrepository:
1858 def __init__(self, ui, path):
1863 def __init__(self, ui, path):
1859 self.url = path
1864 self.url = path
1860 self.ui = ui
1865 self.ui = ui
1861
1866
1862 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1867 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1863 if not m:
1868 if not m:
1864 raise RepoError("couldn't parse destination %s\n" % path)
1869 raise RepoError("couldn't parse destination %s\n" % path)
1865
1870
1866 self.user = m.group(2)
1871 self.user = m.group(2)
1867 self.host = m.group(3)
1872 self.host = m.group(3)
1868 self.port = m.group(5)
1873 self.port = m.group(5)
1869 self.path = m.group(7)
1874 self.path = m.group(7)
1870
1875
1871 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1876 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1872 args = self.port and ("%s -p %s") % (args, self.port) or args
1877 args = self.port and ("%s -p %s") % (args, self.port) or args
1873 path = self.path or ""
1878 path = self.path or ""
1874
1879
1875 cmd = "ssh %s 'hg -R %s serve --stdio'"
1880 cmd = "ssh %s 'hg -R %s serve --stdio'"
1876 cmd = cmd % (args, path)
1881 cmd = cmd % (args, path)
1877
1882
1878 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1883 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1879
1884
1880 def readerr(self):
1885 def readerr(self):
1881 while 1:
1886 while 1:
1882 r,w,x = select.select([self.pipee], [], [], 0)
1887 r,w,x = select.select([self.pipee], [], [], 0)
1883 if not r: break
1888 if not r: break
1884 l = self.pipee.readline()
1889 l = self.pipee.readline()
1885 if not l: break
1890 if not l: break
1886 self.ui.status("remote: ", l)
1891 self.ui.status("remote: ", l)
1887
1892
1888 def __del__(self):
1893 def __del__(self):
1889 self.pipeo.close()
1894 self.pipeo.close()
1890 self.pipei.close()
1895 self.pipei.close()
1891 for l in self.pipee:
1896 for l in self.pipee:
1892 self.ui.status("remote: ", l)
1897 self.ui.status("remote: ", l)
1893 self.pipee.close()
1898 self.pipee.close()
1894
1899
1895 def dev(self):
1900 def dev(self):
1896 return -1
1901 return -1
1897
1902
1898 def do_cmd(self, cmd, **args):
1903 def do_cmd(self, cmd, **args):
1899 self.ui.debug("sending %s command\n" % cmd)
1904 self.ui.debug("sending %s command\n" % cmd)
1900 self.pipeo.write("%s\n" % cmd)
1905 self.pipeo.write("%s\n" % cmd)
1901 for k, v in args.items():
1906 for k, v in args.items():
1902 self.pipeo.write("%s %d\n" % (k, len(v)))
1907 self.pipeo.write("%s %d\n" % (k, len(v)))
1903 self.pipeo.write(v)
1908 self.pipeo.write(v)
1904 self.pipeo.flush()
1909 self.pipeo.flush()
1905
1910
1906 return self.pipei
1911 return self.pipei
1907
1912
1908 def call(self, cmd, **args):
1913 def call(self, cmd, **args):
1909 r = self.do_cmd(cmd, **args)
1914 r = self.do_cmd(cmd, **args)
1910 l = r.readline()
1915 l = r.readline()
1911 self.readerr()
1916 self.readerr()
1912 try:
1917 try:
1913 l = int(l)
1918 l = int(l)
1914 except:
1919 except:
1915 raise RepoError("unexpected response '%s'" % l)
1920 raise RepoError("unexpected response '%s'" % l)
1916 return r.read(l)
1921 return r.read(l)
1917
1922
1918 def lock(self):
1923 def lock(self):
1919 self.call("lock")
1924 self.call("lock")
1920 return remotelock(self)
1925 return remotelock(self)
1921
1926
1922 def unlock(self):
1927 def unlock(self):
1923 self.call("unlock")
1928 self.call("unlock")
1924
1929
1925 def heads(self):
1930 def heads(self):
1926 d = self.call("heads")
1931 d = self.call("heads")
1927 try:
1932 try:
1928 return map(bin, d[:-1].split(" "))
1933 return map(bin, d[:-1].split(" "))
1929 except:
1934 except:
1930 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1935 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1931
1936
1932 def branches(self, nodes):
1937 def branches(self, nodes):
1933 n = " ".join(map(hex, nodes))
1938 n = " ".join(map(hex, nodes))
1934 d = self.call("branches", nodes=n)
1939 d = self.call("branches", nodes=n)
1935 try:
1940 try:
1936 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1941 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1937 return br
1942 return br
1938 except:
1943 except:
1939 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1944 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1940
1945
1941 def between(self, pairs):
1946 def between(self, pairs):
1942 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1947 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1943 d = self.call("between", pairs=n)
1948 d = self.call("between", pairs=n)
1944 try:
1949 try:
1945 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1950 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1946 return p
1951 return p
1947 except:
1952 except:
1948 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1953 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1949
1954
1950 def changegroup(self, nodes):
1955 def changegroup(self, nodes):
1951 n = " ".join(map(hex, nodes))
1956 n = " ".join(map(hex, nodes))
1952 f = self.do_cmd("changegroup", roots=n)
1957 f = self.do_cmd("changegroup", roots=n)
1953 return self.pipei
1958 return self.pipei
1954
1959
1955 def addchangegroup(self, cg):
1960 def addchangegroup(self, cg):
1956 d = self.call("addchangegroup")
1961 d = self.call("addchangegroup")
1957 if d:
1962 if d:
1958 raise RepoError("push refused: %s", d)
1963 raise RepoError("push refused: %s", d)
1959
1964
1960 while 1:
1965 while 1:
1961 d = cg.read(4096)
1966 d = cg.read(4096)
1962 if not d: break
1967 if not d: break
1963 self.pipeo.write(d)
1968 self.pipeo.write(d)
1964 self.readerr()
1969 self.readerr()
1965
1970
1966 self.pipeo.flush()
1971 self.pipeo.flush()
1967
1972
1968 self.readerr()
1973 self.readerr()
1969 l = int(self.pipei.readline())
1974 l = int(self.pipei.readline())
1970 return self.pipei.read(l) != ""
1975 return self.pipei.read(l) != ""
1971
1976
1972 def repository(ui, path=None, create=0):
1977 def repository(ui, path=None, create=0):
1973 if path:
1978 if path:
1974 if path.startswith("http://"):
1979 if path.startswith("http://"):
1975 return httprepository(ui, path)
1980 return httprepository(ui, path)
1976 if path.startswith("hg://"):
1981 if path.startswith("hg://"):
1977 return httprepository(ui, path.replace("hg://", "http://"))
1982 return httprepository(ui, path.replace("hg://", "http://"))
1978 if path.startswith("old-http://"):
1983 if path.startswith("old-http://"):
1979 return localrepository(ui, path.replace("old-http://", "http://"))
1984 return localrepository(ui, path.replace("old-http://", "http://"))
1980 if path.startswith("ssh://"):
1985 if path.startswith("ssh://"):
1981 return sshrepository(ui, path)
1986 return sshrepository(ui, path)
1982
1987
1983 return localrepository(ui, path, create)
1988 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now