##// END OF EJS Templates
Fix the directory and revlog collision problem...
mpm@selenic.com -
r786:902b12d5 default
parent child Browse files
Show More
@@ -1,1965 +1,1979 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", self.encodedir(path + ".d")))
21
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
24 def encodedir(self, path):
25 path.replace(".hg/", ".hg.hg/")
26 path.replace(".i/", ".i.hg/")
27 path.replace(".d/", ".i.hg/")
28 return path
29
30 def decodedir(self, path):
31 path.replace(".d.hg/", ".d/")
32 path.replace(".i.hg/", ".i/")
33 path.replace(".hg.hg/", ".hg/")
34 return path
21
35
22 def read(self, node):
36 def read(self, node):
23 t = self.revision(node)
37 t = self.revision(node)
24 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
25 return t
39 return t
26 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
27 return t[s+2:]
41 return t[s+2:]
28
42
29 def readmeta(self, node):
43 def readmeta(self, node):
30 t = self.revision(node)
44 t = self.revision(node)
31 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
32 return t
46 return t
33 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
34 mt = t[2:s]
48 mt = t[2:s]
35 for l in mt.splitlines():
49 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
37 m[k] = v
51 m[k] = v
38 return m
52 return m
39
53
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
42 mt = ""
56 mt = ""
43 if meta:
57 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
47
61
48 def annotate(self, node):
62 def annotate(self, node):
49
63
50 def decorate(text, rev):
64 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
52
66
53 def pair(parent, child):
67 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
56 return child
70 return child
57
71
58 # find all ancestors
72 # find all ancestors
59 needed = {node:1}
73 needed = {node:1}
60 visit = [node]
74 visit = [node]
61 while visit:
75 while visit:
62 n = visit.pop(0)
76 n = visit.pop(0)
63 for p in self.parents(n):
77 for p in self.parents(n):
64 if p not in needed:
78 if p not in needed:
65 needed[p] = 1
79 needed[p] = 1
66 visit.append(p)
80 visit.append(p)
67 else:
81 else:
68 # count how many times we'll use this
82 # count how many times we'll use this
69 needed[p] += 1
83 needed[p] += 1
70
84
71 # sort by revision which is a topological order
85 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
87 visit.sort()
74 hist = {}
88 hist = {}
75
89
76 for r,n in visit:
90 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
92 for p in self.parents(n):
79 if p != nullid:
93 if p != nullid:
80 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
95 # trim the history of unneeded revs
82 needed[p] -= 1
96 needed[p] -= 1
83 if not needed[p]:
97 if not needed[p]:
84 del hist[p]
98 del hist[p]
85 hist[n] = curr
99 hist[n] = curr
86
100
87 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
88
102
89 class manifest(revlog):
103 class manifest(revlog):
90 def __init__(self, opener):
104 def __init__(self, opener):
91 self.mapcache = None
105 self.mapcache = None
92 self.listcache = None
106 self.listcache = None
93 self.addlist = None
107 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
109
96 def read(self, node):
110 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
113 return self.mapcache[1]
100 text = self.revision(node)
114 text = self.revision(node)
101 map = {}
115 map = {}
102 flag = {}
116 flag = {}
103 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
118 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
109 return map
123 return map
110
124
111 def readflags(self, node):
125 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
128 self.read(node)
115 return self.mapcache[2]
129 return self.mapcache[2]
116
130
117 def diff(self, a, b):
131 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
124 return d
138 return d
125 else:
139 else:
126 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
127
141
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
129 changed=None):
143 changed=None):
130 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
131 # the bisect loop below
145 # the bisect loop below
132 def gendelta(delta):
146 def gendelta(delta):
133 i = 0
147 i = 0
134 result = []
148 result = []
135 while i < len(delta):
149 while i < len(delta):
136 start = delta[i][2]
150 start = delta[i][2]
137 end = delta[i][3]
151 end = delta[i][3]
138 l = delta[i][4]
152 l = delta[i][4]
139 if l == None:
153 if l == None:
140 l = ""
154 l = ""
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
142 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
143 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
144 end = delta[i+1][3]
158 end = delta[i+1][3]
145 if delta[i+1][4]:
159 if delta[i+1][4]:
146 l += delta[i+1][4]
160 l += delta[i+1][4]
147 i += 1
161 i += 1
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
149 i += 1
163 i += 1
150 return result
164 return result
151
165
152 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
153 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
154 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
155 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
156 i = len(delta)
170 i = len(delta)
157 while i > 0:
171 while i > 0:
158 i -= 1
172 i -= 1
159 start = delta[i][0]
173 start = delta[i][0]
160 end = delta[i][1]
174 end = delta[i][1]
161 if delta[i][4]:
175 if delta[i][4]:
162 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
163 else:
177 else:
164 del addlist[start:end]
178 del addlist[start:end]
165 return addlist
179 return addlist
166
180
167 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
168 # manifest
182 # manifest
169 def calcoffsets(addlist):
183 def calcoffsets(addlist):
170 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
171 offset = 0
185 offset = 0
172 i = 0
186 i = 0
173 while i < len(addlist):
187 while i < len(addlist):
174 offsets[i] = offset
188 offsets[i] = offset
175 offset += len(addlist[i])
189 offset += len(addlist[i])
176 i += 1
190 i += 1
177 offsets[i] = offset
191 offsets[i] = offset
178 return offsets
192 return offsets
179
193
180 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
181 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
182 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
183 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
184 files = map.keys()
198 files = map.keys()
185 files.sort()
199 files.sort()
186
200
187 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
188 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
189 for f in files]
203 for f in files]
190 cachedelta = None
204 cachedelta = None
191 else:
205 else:
192 addlist = self.listcache[1]
206 addlist = self.listcache[1]
193
207
194 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
195 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
196
210
197 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
198 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
200 work.sort()
214 work.sort()
201
215
202 delta = []
216 delta = []
203 bs = 0
217 bs = 0
204
218
205 for w in work:
219 for w in work:
206 f = w[0]
220 f = w[0]
207 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
208 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
209 if bs < len(addlist):
223 if bs < len(addlist):
210 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
211 else:
225 else:
212 fn = None
226 fn = None
213 if w[1] == 0:
227 if w[1] == 0:
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
215 flags[f] and "x" or '')
229 flags[f] and "x" or '')
216 else:
230 else:
217 l = None
231 l = None
218 start = bs
232 start = bs
219 if fn != f:
233 if fn != f:
220 # item not found, insert a new one
234 # item not found, insert a new one
221 end = bs
235 end = bs
222 if w[1] == 1:
236 if w[1] == 1:
223 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
224 % f)
238 % f)
225 sys.exit(1)
239 sys.exit(1)
226 else:
240 else:
227 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
228 end = bs + 1
242 end = bs + 1
229 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
230
244
231 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
232 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
233 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
234 else:
248 else:
235 cachedelta = None
249 cachedelta = None
236
250
237 text = "".join(self.addlist)
251 text = "".join(self.addlist)
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
239 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
240 sys.exit(1)
254 sys.exit(1)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
242 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
243 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
244 self.addlist = None
258 self.addlist = None
245
259
246 return n
260 return n
247
261
248 class changelog(revlog):
262 class changelog(revlog):
249 def __init__(self, opener):
263 def __init__(self, opener):
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
251
265
252 def extract(self, text):
266 def extract(self, text):
253 if not text:
267 if not text:
254 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
255 last = text.index("\n\n")
269 last = text.index("\n\n")
256 desc = text[last + 2:]
270 desc = text[last + 2:]
257 l = text[:last].splitlines()
271 l = text[:last].splitlines()
258 manifest = bin(l[0])
272 manifest = bin(l[0])
259 user = l[1]
273 user = l[1]
260 date = l[2]
274 date = l[2]
261 files = l[3:]
275 files = l[3:]
262 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
263
277
264 def read(self, node):
278 def read(self, node):
265 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
266
280
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
268 user=None, date=None):
282 user=None, date=None):
269 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
270 list.sort()
284 list.sort()
271 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
272 text = "\n".join(l)
286 text = "\n".join(l)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
274
288
275 class dirstate:
289 class dirstate:
276 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
277 self.opener = opener
291 self.opener = opener
278 self.root = root
292 self.root = root
279 self.dirty = 0
293 self.dirty = 0
280 self.ui = ui
294 self.ui = ui
281 self.map = None
295 self.map = None
282 self.pl = None
296 self.pl = None
283 self.copies = {}
297 self.copies = {}
284 self.ignorefunc = None
298 self.ignorefunc = None
285
299
286 def wjoin(self, f):
300 def wjoin(self, f):
287 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
288
302
289 def ignore(self, f):
303 def ignore(self, f):
290 if not self.ignorefunc:
304 if not self.ignorefunc:
291 bigpat = []
305 bigpat = []
292 try:
306 try:
293 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
294 for pat in l:
308 for pat in l:
295 if pat != "\n":
309 if pat != "\n":
296 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
297 try:
311 try:
298 r = re.compile(p)
312 r = re.compile(p)
299 except:
313 except:
300 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
301 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
302 else:
316 else:
303 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
304 except IOError: pass
318 except IOError: pass
305
319
306 if bigpat:
320 if bigpat:
307 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
308 r = re.compile(s)
322 r = re.compile(s)
309 self.ignorefunc = r.search
323 self.ignorefunc = r.search
310 else:
324 else:
311 self.ignorefunc = util.never
325 self.ignorefunc = util.never
312
326
313 return self.ignorefunc(f)
327 return self.ignorefunc(f)
314
328
315 def __del__(self):
329 def __del__(self):
316 if self.dirty:
330 if self.dirty:
317 self.write()
331 self.write()
318
332
319 def __getitem__(self, key):
333 def __getitem__(self, key):
320 try:
334 try:
321 return self.map[key]
335 return self.map[key]
322 except TypeError:
336 except TypeError:
323 self.read()
337 self.read()
324 return self[key]
338 return self[key]
325
339
326 def __contains__(self, key):
340 def __contains__(self, key):
327 if not self.map: self.read()
341 if not self.map: self.read()
328 return key in self.map
342 return key in self.map
329
343
330 def parents(self):
344 def parents(self):
331 if not self.pl:
345 if not self.pl:
332 self.read()
346 self.read()
333 return self.pl
347 return self.pl
334
348
335 def markdirty(self):
349 def markdirty(self):
336 if not self.dirty:
350 if not self.dirty:
337 self.dirty = 1
351 self.dirty = 1
338
352
339 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
340 self.markdirty()
354 self.markdirty()
341 self.pl = p1, p2
355 self.pl = p1, p2
342
356
343 def state(self, key):
357 def state(self, key):
344 try:
358 try:
345 return self[key][0]
359 return self[key][0]
346 except KeyError:
360 except KeyError:
347 return "?"
361 return "?"
348
362
349 def read(self):
363 def read(self):
350 if self.map is not None: return self.map
364 if self.map is not None: return self.map
351
365
352 self.map = {}
366 self.map = {}
353 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
354 try:
368 try:
355 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
356 if not st: return
370 if not st: return
357 except: return
371 except: return
358
372
359 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
360
374
361 pos = 40
375 pos = 40
362 while pos < len(st):
376 while pos < len(st):
363 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
364 l = e[4]
378 l = e[4]
365 pos += 17
379 pos += 17
366 f = st[pos:pos + l]
380 f = st[pos:pos + l]
367 if '\0' in f:
381 if '\0' in f:
368 f, c = f.split('\0')
382 f, c = f.split('\0')
369 self.copies[f] = c
383 self.copies[f] = c
370 self.map[f] = e[:4]
384 self.map[f] = e[:4]
371 pos += l
385 pos += l
372
386
373 def copy(self, source, dest):
387 def copy(self, source, dest):
374 self.read()
388 self.read()
375 self.markdirty()
389 self.markdirty()
376 self.copies[dest] = source
390 self.copies[dest] = source
377
391
378 def copied(self, file):
392 def copied(self, file):
379 return self.copies.get(file, None)
393 return self.copies.get(file, None)
380
394
381 def update(self, files, state):
395 def update(self, files, state):
382 ''' current states:
396 ''' current states:
383 n normal
397 n normal
384 m needs merging
398 m needs merging
385 r marked for removal
399 r marked for removal
386 a marked for addition'''
400 a marked for addition'''
387
401
388 if not files: return
402 if not files: return
389 self.read()
403 self.read()
390 self.markdirty()
404 self.markdirty()
391 for f in files:
405 for f in files:
392 if state == "r":
406 if state == "r":
393 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
394 else:
408 else:
395 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
397
411
398 def forget(self, files):
412 def forget(self, files):
399 if not files: return
413 if not files: return
400 self.read()
414 self.read()
401 self.markdirty()
415 self.markdirty()
402 for f in files:
416 for f in files:
403 try:
417 try:
404 del self.map[f]
418 del self.map[f]
405 except KeyError:
419 except KeyError:
406 self.ui.warn("not in dirstate: %s!\n" % f)
420 self.ui.warn("not in dirstate: %s!\n" % f)
407 pass
421 pass
408
422
409 def clear(self):
423 def clear(self):
410 self.map = {}
424 self.map = {}
411 self.markdirty()
425 self.markdirty()
412
426
413 def write(self):
427 def write(self):
414 st = self.opener("dirstate", "w")
428 st = self.opener("dirstate", "w")
415 st.write("".join(self.pl))
429 st.write("".join(self.pl))
416 for f, e in self.map.items():
430 for f, e in self.map.items():
417 c = self.copied(f)
431 c = self.copied(f)
418 if c:
432 if c:
419 f = f + "\0" + c
433 f = f + "\0" + c
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
421 st.write(e + f)
435 st.write(e + f)
422 self.dirty = 0
436 self.dirty = 0
423
437
424 def walk(self, files = None, match = util.always):
438 def walk(self, files = None, match = util.always):
425 self.read()
439 self.read()
426 dc = self.map.copy()
440 dc = self.map.copy()
427 # walk all files by default
441 # walk all files by default
428 if not files: files = [self.root]
442 if not files: files = [self.root]
429 def traverse():
443 def traverse():
430 for f in util.unique(files):
444 for f in util.unique(files):
431 f = os.path.join(self.root, f)
445 f = os.path.join(self.root, f)
432 if os.path.isdir(f):
446 if os.path.isdir(f):
433 for dir, subdirs, fl in os.walk(f):
447 for dir, subdirs, fl in os.walk(f):
434 d = dir[len(self.root) + 1:]
448 d = dir[len(self.root) + 1:]
435 if d == '.hg':
449 if d == '.hg':
436 subdirs[:] = []
450 subdirs[:] = []
437 continue
451 continue
438 for sd in subdirs:
452 for sd in subdirs:
439 ds = os.path.join(d, sd +'/')
453 ds = os.path.join(d, sd +'/')
440 if self.ignore(ds) or not match(ds):
454 if self.ignore(ds) or not match(ds):
441 subdirs.remove(sd)
455 subdirs.remove(sd)
442 for fn in fl:
456 for fn in fl:
443 fn = util.pconvert(os.path.join(d, fn))
457 fn = util.pconvert(os.path.join(d, fn))
444 yield 'f', fn
458 yield 'f', fn
445 else:
459 else:
446 yield 'f', f[len(self.root) + 1:]
460 yield 'f', f[len(self.root) + 1:]
447
461
448 for k in dc.keys():
462 for k in dc.keys():
449 yield 'm', k
463 yield 'm', k
450
464
451 # yield only files that match: all in dirstate, others only if
465 # yield only files that match: all in dirstate, others only if
452 # not in .hgignore
466 # not in .hgignore
453
467
454 for src, fn in util.unique(traverse()):
468 for src, fn in util.unique(traverse()):
455 if fn in dc:
469 if fn in dc:
456 del dc[fn]
470 del dc[fn]
457 elif self.ignore(fn):
471 elif self.ignore(fn):
458 continue
472 continue
459 if match(fn):
473 if match(fn):
460 yield src, fn
474 yield src, fn
461
475
462 def changes(self, files = None, match = util.always):
476 def changes(self, files = None, match = util.always):
463 self.read()
477 self.read()
464 dc = self.map.copy()
478 dc = self.map.copy()
465 lookup, changed, added, unknown = [], [], [], []
479 lookup, changed, added, unknown = [], [], [], []
466
480
467 for src, fn in self.walk(files, match):
481 for src, fn in self.walk(files, match):
468 try: s = os.stat(os.path.join(self.root, fn))
482 try: s = os.stat(os.path.join(self.root, fn))
469 except: continue
483 except: continue
470
484
471 if fn in dc:
485 if fn in dc:
472 c = dc[fn]
486 c = dc[fn]
473 del dc[fn]
487 del dc[fn]
474
488
475 if c[0] == 'm':
489 if c[0] == 'm':
476 changed.append(fn)
490 changed.append(fn)
477 elif c[0] == 'a':
491 elif c[0] == 'a':
478 added.append(fn)
492 added.append(fn)
479 elif c[0] == 'r':
493 elif c[0] == 'r':
480 unknown.append(fn)
494 unknown.append(fn)
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
495 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
482 changed.append(fn)
496 changed.append(fn)
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
497 elif c[1] != s.st_mode or c[3] != s.st_mtime:
484 lookup.append(fn)
498 lookup.append(fn)
485 else:
499 else:
486 if match(fn): unknown.append(fn)
500 if match(fn): unknown.append(fn)
487
501
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
502 return (lookup, changed, added, filter(match, dc.keys()), unknown)
489
503
490 # used to avoid circular references so destructors work
504 # used to avoid circular references so destructors work
491 def opener(base):
505 def opener(base):
492 p = base
506 p = base
493 def o(path, mode="r"):
507 def o(path, mode="r"):
494 if p.startswith("http://"):
508 if p.startswith("http://"):
495 f = os.path.join(p, urllib.quote(path))
509 f = os.path.join(p, urllib.quote(path))
496 return httprangereader.httprangereader(f)
510 return httprangereader.httprangereader(f)
497
511
498 f = os.path.join(p, path)
512 f = os.path.join(p, path)
499
513
500 mode += "b" # for that other OS
514 mode += "b" # for that other OS
501
515
502 if mode[0] != "r":
516 if mode[0] != "r":
503 try:
517 try:
504 s = os.stat(f)
518 s = os.stat(f)
505 except OSError:
519 except OSError:
506 d = os.path.dirname(f)
520 d = os.path.dirname(f)
507 if not os.path.isdir(d):
521 if not os.path.isdir(d):
508 os.makedirs(d)
522 os.makedirs(d)
509 else:
523 else:
510 if s.st_nlink > 1:
524 if s.st_nlink > 1:
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
525 file(f + ".tmp", "wb").write(file(f, "rb").read())
512 util.rename(f+".tmp", f)
526 util.rename(f+".tmp", f)
513
527
514 return file(f, mode)
528 return file(f, mode)
515
529
516 return o
530 return o
517
531
518 class RepoError(Exception): pass
532 class RepoError(Exception): pass
519
533
520 class localrepository:
534 class localrepository:
521 def __init__(self, ui, path=None, create=0):
535 def __init__(self, ui, path=None, create=0):
522 self.remote = 0
536 self.remote = 0
523 if path and path.startswith("http://"):
537 if path and path.startswith("http://"):
524 self.remote = 1
538 self.remote = 1
525 self.path = path
539 self.path = path
526 else:
540 else:
527 if not path:
541 if not path:
528 p = os.getcwd()
542 p = os.getcwd()
529 while not os.path.isdir(os.path.join(p, ".hg")):
543 while not os.path.isdir(os.path.join(p, ".hg")):
530 oldp = p
544 oldp = p
531 p = os.path.dirname(p)
545 p = os.path.dirname(p)
532 if p == oldp: raise RepoError("no repo found")
546 if p == oldp: raise RepoError("no repo found")
533 path = p
547 path = p
534 self.path = os.path.join(path, ".hg")
548 self.path = os.path.join(path, ".hg")
535
549
536 if not create and not os.path.isdir(self.path):
550 if not create and not os.path.isdir(self.path):
537 raise RepoError("repository %s not found" % self.path)
551 raise RepoError("repository %s not found" % self.path)
538
552
539 self.root = path
553 self.root = path
540 self.ui = ui
554 self.ui = ui
541
555
542 if create:
556 if create:
543 os.mkdir(self.path)
557 os.mkdir(self.path)
544 os.mkdir(self.join("data"))
558 os.mkdir(self.join("data"))
545
559
546 self.opener = opener(self.path)
560 self.opener = opener(self.path)
547 self.wopener = opener(self.root)
561 self.wopener = opener(self.root)
548 self.manifest = manifest(self.opener)
562 self.manifest = manifest(self.opener)
549 self.changelog = changelog(self.opener)
563 self.changelog = changelog(self.opener)
550 self.tagscache = None
564 self.tagscache = None
551 self.nodetagscache = None
565 self.nodetagscache = None
552
566
553 if not self.remote:
567 if not self.remote:
554 self.dirstate = dirstate(self.opener, ui, self.root)
568 self.dirstate = dirstate(self.opener, ui, self.root)
555 try:
569 try:
556 self.ui.readconfig(self.opener("hgrc"))
570 self.ui.readconfig(self.opener("hgrc"))
557 except IOError: pass
571 except IOError: pass
558
572
559 def hook(self, name, **args):
573 def hook(self, name, **args):
560 s = self.ui.config("hooks", name)
574 s = self.ui.config("hooks", name)
561 if s:
575 if s:
562 self.ui.note("running hook %s: %s\n" % (name, s))
576 self.ui.note("running hook %s: %s\n" % (name, s))
563 old = {}
577 old = {}
564 for k, v in args.items():
578 for k, v in args.items():
565 k = k.upper()
579 k = k.upper()
566 old[k] = os.environ.get(k, None)
580 old[k] = os.environ.get(k, None)
567 os.environ[k] = v
581 os.environ[k] = v
568
582
569 r = os.system(s)
583 r = os.system(s)
570
584
571 for k, v in old.items():
585 for k, v in old.items():
572 if v != None:
586 if v != None:
573 os.environ[k] = v
587 os.environ[k] = v
574 else:
588 else:
575 del os.environ[k]
589 del os.environ[k]
576
590
577 if r:
591 if r:
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
592 self.ui.warn("abort: %s hook failed with status %d!\n" %
579 (name, r))
593 (name, r))
580 return False
594 return False
581 return True
595 return True
582
596
583 def tags(self):
597 def tags(self):
584 '''return a mapping of tag to node'''
598 '''return a mapping of tag to node'''
585 if not self.tagscache:
599 if not self.tagscache:
586 self.tagscache = {}
600 self.tagscache = {}
587 def addtag(self, k, n):
601 def addtag(self, k, n):
588 try:
602 try:
589 bin_n = bin(n)
603 bin_n = bin(n)
590 except TypeError:
604 except TypeError:
591 bin_n = ''
605 bin_n = ''
592 self.tagscache[k.strip()] = bin_n
606 self.tagscache[k.strip()] = bin_n
593
607
594 try:
608 try:
595 # read each head of the tags file, ending with the tip
609 # read each head of the tags file, ending with the tip
596 # and add each tag found to the map, with "newer" ones
610 # and add each tag found to the map, with "newer" ones
597 # taking precedence
611 # taking precedence
598 fl = self.file(".hgtags")
612 fl = self.file(".hgtags")
599 h = fl.heads()
613 h = fl.heads()
600 h.reverse()
614 h.reverse()
601 for r in h:
615 for r in h:
602 for l in fl.revision(r).splitlines():
616 for l in fl.revision(r).splitlines():
603 if l:
617 if l:
604 n, k = l.split(" ", 1)
618 n, k = l.split(" ", 1)
605 addtag(self, k, n)
619 addtag(self, k, n)
606 except KeyError:
620 except KeyError:
607 pass
621 pass
608
622
609 try:
623 try:
610 f = self.opener("localtags")
624 f = self.opener("localtags")
611 for l in f:
625 for l in f:
612 n, k = l.split(" ", 1)
626 n, k = l.split(" ", 1)
613 addtag(self, k, n)
627 addtag(self, k, n)
614 except IOError:
628 except IOError:
615 pass
629 pass
616
630
617 self.tagscache['tip'] = self.changelog.tip()
631 self.tagscache['tip'] = self.changelog.tip()
618
632
619 return self.tagscache
633 return self.tagscache
620
634
621 def tagslist(self):
635 def tagslist(self):
622 '''return a list of tags ordered by revision'''
636 '''return a list of tags ordered by revision'''
623 l = []
637 l = []
624 for t, n in self.tags().items():
638 for t, n in self.tags().items():
625 try:
639 try:
626 r = self.changelog.rev(n)
640 r = self.changelog.rev(n)
627 except:
641 except:
628 r = -2 # sort to the beginning of the list if unknown
642 r = -2 # sort to the beginning of the list if unknown
629 l.append((r,t,n))
643 l.append((r,t,n))
630 l.sort()
644 l.sort()
631 return [(t,n) for r,t,n in l]
645 return [(t,n) for r,t,n in l]
632
646
633 def nodetags(self, node):
647 def nodetags(self, node):
634 '''return the tags associated with a node'''
648 '''return the tags associated with a node'''
635 if not self.nodetagscache:
649 if not self.nodetagscache:
636 self.nodetagscache = {}
650 self.nodetagscache = {}
637 for t,n in self.tags().items():
651 for t,n in self.tags().items():
638 self.nodetagscache.setdefault(n,[]).append(t)
652 self.nodetagscache.setdefault(n,[]).append(t)
639 return self.nodetagscache.get(node, [])
653 return self.nodetagscache.get(node, [])
640
654
641 def lookup(self, key):
655 def lookup(self, key):
642 try:
656 try:
643 return self.tags()[key]
657 return self.tags()[key]
644 except KeyError:
658 except KeyError:
645 try:
659 try:
646 return self.changelog.lookup(key)
660 return self.changelog.lookup(key)
647 except:
661 except:
648 raise RepoError("unknown revision '%s'" % key)
662 raise RepoError("unknown revision '%s'" % key)
649
663
650 def dev(self):
664 def dev(self):
651 if self.remote: return -1
665 if self.remote: return -1
652 return os.stat(self.path).st_dev
666 return os.stat(self.path).st_dev
653
667
654 def join(self, f):
668 def join(self, f):
655 return os.path.join(self.path, f)
669 return os.path.join(self.path, f)
656
670
657 def wjoin(self, f):
671 def wjoin(self, f):
658 return os.path.join(self.root, f)
672 return os.path.join(self.root, f)
659
673
660 def file(self, f):
674 def file(self, f):
661 if f[0] == '/': f = f[1:]
675 if f[0] == '/': f = f[1:]
662 return filelog(self.opener, f)
676 return filelog(self.opener, f)
663
677
664 def getcwd(self):
678 def getcwd(self):
665 cwd = os.getcwd()
679 cwd = os.getcwd()
666 if cwd == self.root: return ''
680 if cwd == self.root: return ''
667 return cwd[len(self.root) + 1:]
681 return cwd[len(self.root) + 1:]
668
682
669 def wfile(self, f, mode='r'):
683 def wfile(self, f, mode='r'):
670 return self.wopener(f, mode)
684 return self.wopener(f, mode)
671
685
672 def transaction(self):
686 def transaction(self):
673 # save dirstate for undo
687 # save dirstate for undo
674 try:
688 try:
675 ds = self.opener("dirstate").read()
689 ds = self.opener("dirstate").read()
676 except IOError:
690 except IOError:
677 ds = ""
691 ds = ""
678 self.opener("journal.dirstate", "w").write(ds)
692 self.opener("journal.dirstate", "w").write(ds)
679
693
680 def after():
694 def after():
681 util.rename(self.join("journal"), self.join("undo"))
695 util.rename(self.join("journal"), self.join("undo"))
682 util.rename(self.join("journal.dirstate"),
696 util.rename(self.join("journal.dirstate"),
683 self.join("undo.dirstate"))
697 self.join("undo.dirstate"))
684
698
685 return transaction.transaction(self.ui.warn, self.opener,
699 return transaction.transaction(self.ui.warn, self.opener,
686 self.join("journal"), after)
700 self.join("journal"), after)
687
701
688 def recover(self):
702 def recover(self):
689 lock = self.lock()
703 lock = self.lock()
690 if os.path.exists(self.join("journal")):
704 if os.path.exists(self.join("journal")):
691 self.ui.status("rolling back interrupted transaction\n")
705 self.ui.status("rolling back interrupted transaction\n")
692 return transaction.rollback(self.opener, self.join("journal"))
706 return transaction.rollback(self.opener, self.join("journal"))
693 else:
707 else:
694 self.ui.warn("no interrupted transaction available\n")
708 self.ui.warn("no interrupted transaction available\n")
695
709
696 def undo(self):
710 def undo(self):
697 lock = self.lock()
711 lock = self.lock()
698 if os.path.exists(self.join("undo")):
712 if os.path.exists(self.join("undo")):
699 self.ui.status("rolling back last transaction\n")
713 self.ui.status("rolling back last transaction\n")
700 transaction.rollback(self.opener, self.join("undo"))
714 transaction.rollback(self.opener, self.join("undo"))
701 self.dirstate = None
715 self.dirstate = None
702 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
716 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
703 self.dirstate = dirstate(self.opener, self.ui, self.root)
717 self.dirstate = dirstate(self.opener, self.ui, self.root)
704 else:
718 else:
705 self.ui.warn("no undo information available\n")
719 self.ui.warn("no undo information available\n")
706
720
707 def lock(self, wait = 1):
721 def lock(self, wait = 1):
708 try:
722 try:
709 return lock.lock(self.join("lock"), 0)
723 return lock.lock(self.join("lock"), 0)
710 except lock.LockHeld, inst:
724 except lock.LockHeld, inst:
711 if wait:
725 if wait:
712 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
726 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
713 return lock.lock(self.join("lock"), wait)
727 return lock.lock(self.join("lock"), wait)
714 raise inst
728 raise inst
715
729
716 def rawcommit(self, files, text, user, date, p1=None, p2=None):
730 def rawcommit(self, files, text, user, date, p1=None, p2=None):
717 orig_parent = self.dirstate.parents()[0] or nullid
731 orig_parent = self.dirstate.parents()[0] or nullid
718 p1 = p1 or self.dirstate.parents()[0] or nullid
732 p1 = p1 or self.dirstate.parents()[0] or nullid
719 p2 = p2 or self.dirstate.parents()[1] or nullid
733 p2 = p2 or self.dirstate.parents()[1] or nullid
720 c1 = self.changelog.read(p1)
734 c1 = self.changelog.read(p1)
721 c2 = self.changelog.read(p2)
735 c2 = self.changelog.read(p2)
722 m1 = self.manifest.read(c1[0])
736 m1 = self.manifest.read(c1[0])
723 mf1 = self.manifest.readflags(c1[0])
737 mf1 = self.manifest.readflags(c1[0])
724 m2 = self.manifest.read(c2[0])
738 m2 = self.manifest.read(c2[0])
725
739
726 if orig_parent == p1:
740 if orig_parent == p1:
727 update_dirstate = 1
741 update_dirstate = 1
728 else:
742 else:
729 update_dirstate = 0
743 update_dirstate = 0
730
744
731 tr = self.transaction()
745 tr = self.transaction()
732 mm = m1.copy()
746 mm = m1.copy()
733 mfm = mf1.copy()
747 mfm = mf1.copy()
734 linkrev = self.changelog.count()
748 linkrev = self.changelog.count()
735 for f in files:
749 for f in files:
736 try:
750 try:
737 t = self.wfile(f).read()
751 t = self.wfile(f).read()
738 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
752 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
739 r = self.file(f)
753 r = self.file(f)
740 mfm[f] = tm
754 mfm[f] = tm
741 mm[f] = r.add(t, {}, tr, linkrev,
755 mm[f] = r.add(t, {}, tr, linkrev,
742 m1.get(f, nullid), m2.get(f, nullid))
756 m1.get(f, nullid), m2.get(f, nullid))
743 if update_dirstate:
757 if update_dirstate:
744 self.dirstate.update([f], "n")
758 self.dirstate.update([f], "n")
745 except IOError:
759 except IOError:
746 try:
760 try:
747 del mm[f]
761 del mm[f]
748 del mfm[f]
762 del mfm[f]
749 if update_dirstate:
763 if update_dirstate:
750 self.dirstate.forget([f])
764 self.dirstate.forget([f])
751 except:
765 except:
752 # deleted from p2?
766 # deleted from p2?
753 pass
767 pass
754
768
755 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
769 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
756 user = user or self.ui.username()
770 user = user or self.ui.username()
757 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
771 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
758 tr.close()
772 tr.close()
759 if update_dirstate:
773 if update_dirstate:
760 self.dirstate.setparents(n, nullid)
774 self.dirstate.setparents(n, nullid)
761
775
762 def commit(self, files = None, text = "", user = None, date = None):
776 def commit(self, files = None, text = "", user = None, date = None):
763 commit = []
777 commit = []
764 remove = []
778 remove = []
765 if files:
779 if files:
766 for f in files:
780 for f in files:
767 s = self.dirstate.state(f)
781 s = self.dirstate.state(f)
768 if s in 'nmai':
782 if s in 'nmai':
769 commit.append(f)
783 commit.append(f)
770 elif s == 'r':
784 elif s == 'r':
771 remove.append(f)
785 remove.append(f)
772 else:
786 else:
773 self.ui.warn("%s not tracked!\n" % f)
787 self.ui.warn("%s not tracked!\n" % f)
774 else:
788 else:
775 (c, a, d, u) = self.changes()
789 (c, a, d, u) = self.changes()
776 commit = c + a
790 commit = c + a
777 remove = d
791 remove = d
778
792
779 if not commit and not remove:
793 if not commit and not remove:
780 self.ui.status("nothing changed\n")
794 self.ui.status("nothing changed\n")
781 return
795 return
782
796
783 if not self.hook("precommit"):
797 if not self.hook("precommit"):
784 return 1
798 return 1
785
799
786 p1, p2 = self.dirstate.parents()
800 p1, p2 = self.dirstate.parents()
787 c1 = self.changelog.read(p1)
801 c1 = self.changelog.read(p1)
788 c2 = self.changelog.read(p2)
802 c2 = self.changelog.read(p2)
789 m1 = self.manifest.read(c1[0])
803 m1 = self.manifest.read(c1[0])
790 mf1 = self.manifest.readflags(c1[0])
804 mf1 = self.manifest.readflags(c1[0])
791 m2 = self.manifest.read(c2[0])
805 m2 = self.manifest.read(c2[0])
792 lock = self.lock()
806 lock = self.lock()
793 tr = self.transaction()
807 tr = self.transaction()
794
808
795 # check in files
809 # check in files
796 new = {}
810 new = {}
797 linkrev = self.changelog.count()
811 linkrev = self.changelog.count()
798 commit.sort()
812 commit.sort()
799 for f in commit:
813 for f in commit:
800 self.ui.note(f + "\n")
814 self.ui.note(f + "\n")
801 try:
815 try:
802 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
816 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
803 t = self.wfile(f).read()
817 t = self.wfile(f).read()
804 except IOError:
818 except IOError:
805 self.ui.warn("trouble committing %s!\n" % f)
819 self.ui.warn("trouble committing %s!\n" % f)
806 raise
820 raise
807
821
808 meta = {}
822 meta = {}
809 cp = self.dirstate.copied(f)
823 cp = self.dirstate.copied(f)
810 if cp:
824 if cp:
811 meta["copy"] = cp
825 meta["copy"] = cp
812 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
826 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
813 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
827 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
814
828
815 r = self.file(f)
829 r = self.file(f)
816 fp1 = m1.get(f, nullid)
830 fp1 = m1.get(f, nullid)
817 fp2 = m2.get(f, nullid)
831 fp2 = m2.get(f, nullid)
818 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
832 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
819
833
820 # update manifest
834 # update manifest
821 m1.update(new)
835 m1.update(new)
822 for f in remove:
836 for f in remove:
823 if f in m1:
837 if f in m1:
824 del m1[f]
838 del m1[f]
825 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
839 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
826 (new, remove))
840 (new, remove))
827
841
828 # add changeset
842 # add changeset
829 new = new.keys()
843 new = new.keys()
830 new.sort()
844 new.sort()
831
845
832 if not text:
846 if not text:
833 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
847 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
834 edittext += "".join(["HG: changed %s\n" % f for f in new])
848 edittext += "".join(["HG: changed %s\n" % f for f in new])
835 edittext += "".join(["HG: removed %s\n" % f for f in remove])
849 edittext += "".join(["HG: removed %s\n" % f for f in remove])
836 edittext = self.ui.edit(edittext)
850 edittext = self.ui.edit(edittext)
837 if not edittext.rstrip():
851 if not edittext.rstrip():
838 return 1
852 return 1
839 text = edittext
853 text = edittext
840
854
841 user = user or self.ui.username()
855 user = user or self.ui.username()
842 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
856 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
843
857
844 tr.close()
858 tr.close()
845
859
846 self.dirstate.setparents(n)
860 self.dirstate.setparents(n)
847 self.dirstate.update(new, "n")
861 self.dirstate.update(new, "n")
848 self.dirstate.forget(remove)
862 self.dirstate.forget(remove)
849
863
850 if not self.hook("commit", node=hex(n)):
864 if not self.hook("commit", node=hex(n)):
851 return 1
865 return 1
852
866
853 def walk(self, node = None, files = [], match = util.always):
867 def walk(self, node = None, files = [], match = util.always):
854 if node:
868 if node:
855 for fn in self.manifest.read(self.changelog.read(node)[0]):
869 for fn in self.manifest.read(self.changelog.read(node)[0]):
856 yield 'm', fn
870 yield 'm', fn
857 else:
871 else:
858 for src, fn in self.dirstate.walk(files, match):
872 for src, fn in self.dirstate.walk(files, match):
859 yield src, fn
873 yield src, fn
860
874
861 def changes(self, node1 = None, node2 = None, files = [],
875 def changes(self, node1 = None, node2 = None, files = [],
862 match = util.always):
876 match = util.always):
863 mf2, u = None, []
877 mf2, u = None, []
864
878
865 def fcmp(fn, mf):
879 def fcmp(fn, mf):
866 t1 = self.wfile(fn).read()
880 t1 = self.wfile(fn).read()
867 t2 = self.file(fn).revision(mf[fn])
881 t2 = self.file(fn).revision(mf[fn])
868 return cmp(t1, t2)
882 return cmp(t1, t2)
869
883
870 def mfmatches(node):
884 def mfmatches(node):
871 mf = dict(self.manifest.read(node))
885 mf = dict(self.manifest.read(node))
872 for fn in mf.keys():
886 for fn in mf.keys():
873 if not match(fn):
887 if not match(fn):
874 del mf[fn]
888 del mf[fn]
875 return mf
889 return mf
876
890
877 # are we comparing the working directory?
891 # are we comparing the working directory?
878 if not node2:
892 if not node2:
879 l, c, a, d, u = self.dirstate.changes(files, match)
893 l, c, a, d, u = self.dirstate.changes(files, match)
880
894
881 # are we comparing working dir against its parent?
895 # are we comparing working dir against its parent?
882 if not node1:
896 if not node1:
883 if l:
897 if l:
884 # do a full compare of any files that might have changed
898 # do a full compare of any files that might have changed
885 change = self.changelog.read(self.dirstate.parents()[0])
899 change = self.changelog.read(self.dirstate.parents()[0])
886 mf2 = mfmatches(change[0])
900 mf2 = mfmatches(change[0])
887 for f in l:
901 for f in l:
888 if fcmp(f, mf2):
902 if fcmp(f, mf2):
889 c.append(f)
903 c.append(f)
890
904
891 for l in c, a, d, u:
905 for l in c, a, d, u:
892 l.sort()
906 l.sort()
893
907
894 return (c, a, d, u)
908 return (c, a, d, u)
895
909
896 # are we comparing working dir against non-tip?
910 # are we comparing working dir against non-tip?
897 # generate a pseudo-manifest for the working dir
911 # generate a pseudo-manifest for the working dir
898 if not node2:
912 if not node2:
899 if not mf2:
913 if not mf2:
900 change = self.changelog.read(self.dirstate.parents()[0])
914 change = self.changelog.read(self.dirstate.parents()[0])
901 mf2 = mfmatches(change[0])
915 mf2 = mfmatches(change[0])
902 for f in a + c + l:
916 for f in a + c + l:
903 mf2[f] = ""
917 mf2[f] = ""
904 for f in d:
918 for f in d:
905 if f in mf2: del mf2[f]
919 if f in mf2: del mf2[f]
906 else:
920 else:
907 change = self.changelog.read(node2)
921 change = self.changelog.read(node2)
908 mf2 = mfmatches(change[0])
922 mf2 = mfmatches(change[0])
909
923
910 # flush lists from dirstate before comparing manifests
924 # flush lists from dirstate before comparing manifests
911 c, a = [], []
925 c, a = [], []
912
926
913 change = self.changelog.read(node1)
927 change = self.changelog.read(node1)
914 mf1 = mfmatches(change[0])
928 mf1 = mfmatches(change[0])
915
929
916 for fn in mf2:
930 for fn in mf2:
917 if mf1.has_key(fn):
931 if mf1.has_key(fn):
918 if mf1[fn] != mf2[fn]:
932 if mf1[fn] != mf2[fn]:
919 if mf2[fn] != "" or fcmp(fn, mf1):
933 if mf2[fn] != "" or fcmp(fn, mf1):
920 c.append(fn)
934 c.append(fn)
921 del mf1[fn]
935 del mf1[fn]
922 else:
936 else:
923 a.append(fn)
937 a.append(fn)
924
938
925 d = mf1.keys()
939 d = mf1.keys()
926
940
927 for l in c, a, d, u:
941 for l in c, a, d, u:
928 l.sort()
942 l.sort()
929
943
930 return (c, a, d, u)
944 return (c, a, d, u)
931
945
932 def add(self, list):
946 def add(self, list):
933 for f in list:
947 for f in list:
934 p = self.wjoin(f)
948 p = self.wjoin(f)
935 if not os.path.exists(p):
949 if not os.path.exists(p):
936 self.ui.warn("%s does not exist!\n" % f)
950 self.ui.warn("%s does not exist!\n" % f)
937 elif not os.path.isfile(p):
951 elif not os.path.isfile(p):
938 self.ui.warn("%s not added: only files supported currently\n" % f)
952 self.ui.warn("%s not added: only files supported currently\n" % f)
939 elif self.dirstate.state(f) in 'an':
953 elif self.dirstate.state(f) in 'an':
940 self.ui.warn("%s already tracked!\n" % f)
954 self.ui.warn("%s already tracked!\n" % f)
941 else:
955 else:
942 self.dirstate.update([f], "a")
956 self.dirstate.update([f], "a")
943
957
944 def forget(self, list):
958 def forget(self, list):
945 for f in list:
959 for f in list:
946 if self.dirstate.state(f) not in 'ai':
960 if self.dirstate.state(f) not in 'ai':
947 self.ui.warn("%s not added!\n" % f)
961 self.ui.warn("%s not added!\n" % f)
948 else:
962 else:
949 self.dirstate.forget([f])
963 self.dirstate.forget([f])
950
964
951 def remove(self, list):
965 def remove(self, list):
952 for f in list:
966 for f in list:
953 p = self.wjoin(f)
967 p = self.wjoin(f)
954 if os.path.exists(p):
968 if os.path.exists(p):
955 self.ui.warn("%s still exists!\n" % f)
969 self.ui.warn("%s still exists!\n" % f)
956 elif self.dirstate.state(f) == 'a':
970 elif self.dirstate.state(f) == 'a':
957 self.ui.warn("%s never committed!\n" % f)
971 self.ui.warn("%s never committed!\n" % f)
958 self.dirstate.forget([f])
972 self.dirstate.forget([f])
959 elif f not in self.dirstate:
973 elif f not in self.dirstate:
960 self.ui.warn("%s not tracked!\n" % f)
974 self.ui.warn("%s not tracked!\n" % f)
961 else:
975 else:
962 self.dirstate.update([f], "r")
976 self.dirstate.update([f], "r")
963
977
964 def copy(self, source, dest):
978 def copy(self, source, dest):
965 p = self.wjoin(dest)
979 p = self.wjoin(dest)
966 if not os.path.exists(p):
980 if not os.path.exists(p):
967 self.ui.warn("%s does not exist!\n" % dest)
981 self.ui.warn("%s does not exist!\n" % dest)
968 elif not os.path.isfile(p):
982 elif not os.path.isfile(p):
969 self.ui.warn("copy failed: %s is not a file\n" % dest)
983 self.ui.warn("copy failed: %s is not a file\n" % dest)
970 else:
984 else:
971 if self.dirstate.state(dest) == '?':
985 if self.dirstate.state(dest) == '?':
972 self.dirstate.update([dest], "a")
986 self.dirstate.update([dest], "a")
973 self.dirstate.copy(source, dest)
987 self.dirstate.copy(source, dest)
974
988
975 def heads(self):
989 def heads(self):
976 return self.changelog.heads()
990 return self.changelog.heads()
977
991
978 def branches(self, nodes):
992 def branches(self, nodes):
979 if not nodes: nodes = [self.changelog.tip()]
993 if not nodes: nodes = [self.changelog.tip()]
980 b = []
994 b = []
981 for n in nodes:
995 for n in nodes:
982 t = n
996 t = n
983 while n:
997 while n:
984 p = self.changelog.parents(n)
998 p = self.changelog.parents(n)
985 if p[1] != nullid or p[0] == nullid:
999 if p[1] != nullid or p[0] == nullid:
986 b.append((t, n, p[0], p[1]))
1000 b.append((t, n, p[0], p[1]))
987 break
1001 break
988 n = p[0]
1002 n = p[0]
989 return b
1003 return b
990
1004
991 def between(self, pairs):
1005 def between(self, pairs):
992 r = []
1006 r = []
993
1007
994 for top, bottom in pairs:
1008 for top, bottom in pairs:
995 n, l, i = top, [], 0
1009 n, l, i = top, [], 0
996 f = 1
1010 f = 1
997
1011
998 while n != bottom:
1012 while n != bottom:
999 p = self.changelog.parents(n)[0]
1013 p = self.changelog.parents(n)[0]
1000 if i == f:
1014 if i == f:
1001 l.append(n)
1015 l.append(n)
1002 f = f * 2
1016 f = f * 2
1003 n = p
1017 n = p
1004 i += 1
1018 i += 1
1005
1019
1006 r.append(l)
1020 r.append(l)
1007
1021
1008 return r
1022 return r
1009
1023
1010 def newer(self, nodes):
1024 def newer(self, nodes):
1011 m = {}
1025 m = {}
1012 nl = []
1026 nl = []
1013 pm = {}
1027 pm = {}
1014 cl = self.changelog
1028 cl = self.changelog
1015 t = l = cl.count()
1029 t = l = cl.count()
1016
1030
1017 # find the lowest numbered node
1031 # find the lowest numbered node
1018 for n in nodes:
1032 for n in nodes:
1019 l = min(l, cl.rev(n))
1033 l = min(l, cl.rev(n))
1020 m[n] = 1
1034 m[n] = 1
1021
1035
1022 for i in xrange(l, t):
1036 for i in xrange(l, t):
1023 n = cl.node(i)
1037 n = cl.node(i)
1024 if n in m: # explicitly listed
1038 if n in m: # explicitly listed
1025 pm[n] = 1
1039 pm[n] = 1
1026 nl.append(n)
1040 nl.append(n)
1027 continue
1041 continue
1028 for p in cl.parents(n):
1042 for p in cl.parents(n):
1029 if p in pm: # parent listed
1043 if p in pm: # parent listed
1030 pm[n] = 1
1044 pm[n] = 1
1031 nl.append(n)
1045 nl.append(n)
1032 break
1046 break
1033
1047
1034 return nl
1048 return nl
1035
1049
1036 def findincoming(self, remote, base={}):
1050 def findincoming(self, remote, base={}):
1037 m = self.changelog.nodemap
1051 m = self.changelog.nodemap
1038 search = []
1052 search = []
1039 fetch = []
1053 fetch = []
1040 seen = {}
1054 seen = {}
1041 seenbranch = {}
1055 seenbranch = {}
1042
1056
1043 # assume we're closer to the tip than the root
1057 # assume we're closer to the tip than the root
1044 # and start by examining the heads
1058 # and start by examining the heads
1045 self.ui.status("searching for changes\n")
1059 self.ui.status("searching for changes\n")
1046 heads = remote.heads()
1060 heads = remote.heads()
1047 unknown = []
1061 unknown = []
1048 for h in heads:
1062 for h in heads:
1049 if h not in m:
1063 if h not in m:
1050 unknown.append(h)
1064 unknown.append(h)
1051 else:
1065 else:
1052 base[h] = 1
1066 base[h] = 1
1053
1067
1054 if not unknown:
1068 if not unknown:
1055 return None
1069 return None
1056
1070
1057 rep = {}
1071 rep = {}
1058 reqcnt = 0
1072 reqcnt = 0
1059
1073
1060 # search through remote branches
1074 # search through remote branches
1061 # a 'branch' here is a linear segment of history, with four parts:
1075 # a 'branch' here is a linear segment of history, with four parts:
1062 # head, root, first parent, second parent
1076 # head, root, first parent, second parent
1063 # (a branch always has two parents (or none) by definition)
1077 # (a branch always has two parents (or none) by definition)
1064 unknown = remote.branches(unknown)
1078 unknown = remote.branches(unknown)
1065 while unknown:
1079 while unknown:
1066 r = []
1080 r = []
1067 while unknown:
1081 while unknown:
1068 n = unknown.pop(0)
1082 n = unknown.pop(0)
1069 if n[0] in seen:
1083 if n[0] in seen:
1070 continue
1084 continue
1071
1085
1072 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1086 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1073 if n[0] == nullid:
1087 if n[0] == nullid:
1074 break
1088 break
1075 if n in seenbranch:
1089 if n in seenbranch:
1076 self.ui.debug("branch already found\n")
1090 self.ui.debug("branch already found\n")
1077 continue
1091 continue
1078 if n[1] and n[1] in m: # do we know the base?
1092 if n[1] and n[1] in m: # do we know the base?
1079 self.ui.debug("found incomplete branch %s:%s\n"
1093 self.ui.debug("found incomplete branch %s:%s\n"
1080 % (short(n[0]), short(n[1])))
1094 % (short(n[0]), short(n[1])))
1081 search.append(n) # schedule branch range for scanning
1095 search.append(n) # schedule branch range for scanning
1082 seenbranch[n] = 1
1096 seenbranch[n] = 1
1083 else:
1097 else:
1084 if n[1] not in seen and n[1] not in fetch:
1098 if n[1] not in seen and n[1] not in fetch:
1085 if n[2] in m and n[3] in m:
1099 if n[2] in m and n[3] in m:
1086 self.ui.debug("found new changeset %s\n" %
1100 self.ui.debug("found new changeset %s\n" %
1087 short(n[1]))
1101 short(n[1]))
1088 fetch.append(n[1]) # earliest unknown
1102 fetch.append(n[1]) # earliest unknown
1089 base[n[2]] = 1 # latest known
1103 base[n[2]] = 1 # latest known
1090 continue
1104 continue
1091
1105
1092 for a in n[2:4]:
1106 for a in n[2:4]:
1093 if a not in rep:
1107 if a not in rep:
1094 r.append(a)
1108 r.append(a)
1095 rep[a] = 1
1109 rep[a] = 1
1096
1110
1097 seen[n[0]] = 1
1111 seen[n[0]] = 1
1098
1112
1099 if r:
1113 if r:
1100 reqcnt += 1
1114 reqcnt += 1
1101 self.ui.debug("request %d: %s\n" %
1115 self.ui.debug("request %d: %s\n" %
1102 (reqcnt, " ".join(map(short, r))))
1116 (reqcnt, " ".join(map(short, r))))
1103 for p in range(0, len(r), 10):
1117 for p in range(0, len(r), 10):
1104 for b in remote.branches(r[p:p+10]):
1118 for b in remote.branches(r[p:p+10]):
1105 self.ui.debug("received %s:%s\n" %
1119 self.ui.debug("received %s:%s\n" %
1106 (short(b[0]), short(b[1])))
1120 (short(b[0]), short(b[1])))
1107 if b[0] not in m and b[0] not in seen:
1121 if b[0] not in m and b[0] not in seen:
1108 unknown.append(b)
1122 unknown.append(b)
1109
1123
1110 # do binary search on the branches we found
1124 # do binary search on the branches we found
1111 while search:
1125 while search:
1112 n = search.pop(0)
1126 n = search.pop(0)
1113 reqcnt += 1
1127 reqcnt += 1
1114 l = remote.between([(n[0], n[1])])[0]
1128 l = remote.between([(n[0], n[1])])[0]
1115 l.append(n[1])
1129 l.append(n[1])
1116 p = n[0]
1130 p = n[0]
1117 f = 1
1131 f = 1
1118 for i in l:
1132 for i in l:
1119 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1133 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1120 if i in m:
1134 if i in m:
1121 if f <= 2:
1135 if f <= 2:
1122 self.ui.debug("found new branch changeset %s\n" %
1136 self.ui.debug("found new branch changeset %s\n" %
1123 short(p))
1137 short(p))
1124 fetch.append(p)
1138 fetch.append(p)
1125 base[i] = 1
1139 base[i] = 1
1126 else:
1140 else:
1127 self.ui.debug("narrowed branch search to %s:%s\n"
1141 self.ui.debug("narrowed branch search to %s:%s\n"
1128 % (short(p), short(i)))
1142 % (short(p), short(i)))
1129 search.append((p, i))
1143 search.append((p, i))
1130 break
1144 break
1131 p, f = i, f * 2
1145 p, f = i, f * 2
1132
1146
1133 # sanity check our fetch list
1147 # sanity check our fetch list
1134 for f in fetch:
1148 for f in fetch:
1135 if f in m:
1149 if f in m:
1136 raise RepoError("already have changeset " + short(f[:4]))
1150 raise RepoError("already have changeset " + short(f[:4]))
1137
1151
1138 if base.keys() == [nullid]:
1152 if base.keys() == [nullid]:
1139 self.ui.warn("warning: pulling from an unrelated repository!\n")
1153 self.ui.warn("warning: pulling from an unrelated repository!\n")
1140
1154
1141 self.ui.note("adding new changesets starting at " +
1155 self.ui.note("adding new changesets starting at " +
1142 " ".join([short(f) for f in fetch]) + "\n")
1156 " ".join([short(f) for f in fetch]) + "\n")
1143
1157
1144 self.ui.debug("%d total queries\n" % reqcnt)
1158 self.ui.debug("%d total queries\n" % reqcnt)
1145
1159
1146 return fetch
1160 return fetch
1147
1161
1148 def findoutgoing(self, remote):
1162 def findoutgoing(self, remote):
1149 base = {}
1163 base = {}
1150 self.findincoming(remote, base)
1164 self.findincoming(remote, base)
1151 remain = dict.fromkeys(self.changelog.nodemap)
1165 remain = dict.fromkeys(self.changelog.nodemap)
1152
1166
1153 # prune everything remote has from the tree
1167 # prune everything remote has from the tree
1154 del remain[nullid]
1168 del remain[nullid]
1155 remove = base.keys()
1169 remove = base.keys()
1156 while remove:
1170 while remove:
1157 n = remove.pop(0)
1171 n = remove.pop(0)
1158 if n in remain:
1172 if n in remain:
1159 del remain[n]
1173 del remain[n]
1160 for p in self.changelog.parents(n):
1174 for p in self.changelog.parents(n):
1161 remove.append(p)
1175 remove.append(p)
1162
1176
1163 # find every node whose parents have been pruned
1177 # find every node whose parents have been pruned
1164 subset = []
1178 subset = []
1165 for n in remain:
1179 for n in remain:
1166 p1, p2 = self.changelog.parents(n)
1180 p1, p2 = self.changelog.parents(n)
1167 if p1 not in remain and p2 not in remain:
1181 if p1 not in remain and p2 not in remain:
1168 subset.append(n)
1182 subset.append(n)
1169
1183
1170 # this is the set of all roots we have to push
1184 # this is the set of all roots we have to push
1171 return subset
1185 return subset
1172
1186
1173 def pull(self, remote):
1187 def pull(self, remote):
1174 lock = self.lock()
1188 lock = self.lock()
1175
1189
1176 # if we have an empty repo, fetch everything
1190 # if we have an empty repo, fetch everything
1177 if self.changelog.tip() == nullid:
1191 if self.changelog.tip() == nullid:
1178 self.ui.status("requesting all changes\n")
1192 self.ui.status("requesting all changes\n")
1179 fetch = [nullid]
1193 fetch = [nullid]
1180 else:
1194 else:
1181 fetch = self.findincoming(remote)
1195 fetch = self.findincoming(remote)
1182
1196
1183 if not fetch:
1197 if not fetch:
1184 self.ui.status("no changes found\n")
1198 self.ui.status("no changes found\n")
1185 return 1
1199 return 1
1186
1200
1187 cg = remote.changegroup(fetch)
1201 cg = remote.changegroup(fetch)
1188 return self.addchangegroup(cg)
1202 return self.addchangegroup(cg)
1189
1203
1190 def push(self, remote):
1204 def push(self, remote):
1191 lock = remote.lock()
1205 lock = remote.lock()
1192 update = self.findoutgoing(remote)
1206 update = self.findoutgoing(remote)
1193 if not update:
1207 if not update:
1194 self.ui.status("no changes found\n")
1208 self.ui.status("no changes found\n")
1195 return 1
1209 return 1
1196
1210
1197 cg = self.changegroup(update)
1211 cg = self.changegroup(update)
1198 return remote.addchangegroup(cg)
1212 return remote.addchangegroup(cg)
1199
1213
1200 def changegroup(self, basenodes):
1214 def changegroup(self, basenodes):
1201 class genread:
1215 class genread:
1202 def __init__(self, generator):
1216 def __init__(self, generator):
1203 self.g = generator
1217 self.g = generator
1204 self.buf = ""
1218 self.buf = ""
1205 def read(self, l):
1219 def read(self, l):
1206 while l > len(self.buf):
1220 while l > len(self.buf):
1207 try:
1221 try:
1208 self.buf += self.g.next()
1222 self.buf += self.g.next()
1209 except StopIteration:
1223 except StopIteration:
1210 break
1224 break
1211 d, self.buf = self.buf[:l], self.buf[l:]
1225 d, self.buf = self.buf[:l], self.buf[l:]
1212 return d
1226 return d
1213
1227
1214 def gengroup():
1228 def gengroup():
1215 nodes = self.newer(basenodes)
1229 nodes = self.newer(basenodes)
1216
1230
1217 # construct the link map
1231 # construct the link map
1218 linkmap = {}
1232 linkmap = {}
1219 for n in nodes:
1233 for n in nodes:
1220 linkmap[self.changelog.rev(n)] = n
1234 linkmap[self.changelog.rev(n)] = n
1221
1235
1222 # construct a list of all changed files
1236 # construct a list of all changed files
1223 changed = {}
1237 changed = {}
1224 for n in nodes:
1238 for n in nodes:
1225 c = self.changelog.read(n)
1239 c = self.changelog.read(n)
1226 for f in c[3]:
1240 for f in c[3]:
1227 changed[f] = 1
1241 changed[f] = 1
1228 changed = changed.keys()
1242 changed = changed.keys()
1229 changed.sort()
1243 changed.sort()
1230
1244
1231 # the changegroup is changesets + manifests + all file revs
1245 # the changegroup is changesets + manifests + all file revs
1232 revs = [ self.changelog.rev(n) for n in nodes ]
1246 revs = [ self.changelog.rev(n) for n in nodes ]
1233
1247
1234 for y in self.changelog.group(linkmap): yield y
1248 for y in self.changelog.group(linkmap): yield y
1235 for y in self.manifest.group(linkmap): yield y
1249 for y in self.manifest.group(linkmap): yield y
1236 for f in changed:
1250 for f in changed:
1237 yield struct.pack(">l", len(f) + 4) + f
1251 yield struct.pack(">l", len(f) + 4) + f
1238 g = self.file(f).group(linkmap)
1252 g = self.file(f).group(linkmap)
1239 for y in g:
1253 for y in g:
1240 yield y
1254 yield y
1241
1255
1242 yield struct.pack(">l", 0)
1256 yield struct.pack(">l", 0)
1243
1257
1244 return genread(gengroup())
1258 return genread(gengroup())
1245
1259
1246 def addchangegroup(self, source):
1260 def addchangegroup(self, source):
1247
1261
1248 def getchunk():
1262 def getchunk():
1249 d = source.read(4)
1263 d = source.read(4)
1250 if not d: return ""
1264 if not d: return ""
1251 l = struct.unpack(">l", d)[0]
1265 l = struct.unpack(">l", d)[0]
1252 if l <= 4: return ""
1266 if l <= 4: return ""
1253 return source.read(l - 4)
1267 return source.read(l - 4)
1254
1268
1255 def getgroup():
1269 def getgroup():
1256 while 1:
1270 while 1:
1257 c = getchunk()
1271 c = getchunk()
1258 if not c: break
1272 if not c: break
1259 yield c
1273 yield c
1260
1274
1261 def csmap(x):
1275 def csmap(x):
1262 self.ui.debug("add changeset %s\n" % short(x))
1276 self.ui.debug("add changeset %s\n" % short(x))
1263 return self.changelog.count()
1277 return self.changelog.count()
1264
1278
1265 def revmap(x):
1279 def revmap(x):
1266 return self.changelog.rev(x)
1280 return self.changelog.rev(x)
1267
1281
1268 if not source: return
1282 if not source: return
1269 changesets = files = revisions = 0
1283 changesets = files = revisions = 0
1270
1284
1271 tr = self.transaction()
1285 tr = self.transaction()
1272
1286
1273 # pull off the changeset group
1287 # pull off the changeset group
1274 self.ui.status("adding changesets\n")
1288 self.ui.status("adding changesets\n")
1275 co = self.changelog.tip()
1289 co = self.changelog.tip()
1276 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1290 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1277 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1291 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1278
1292
1279 # pull off the manifest group
1293 # pull off the manifest group
1280 self.ui.status("adding manifests\n")
1294 self.ui.status("adding manifests\n")
1281 mm = self.manifest.tip()
1295 mm = self.manifest.tip()
1282 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1296 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1283
1297
1284 # process the files
1298 # process the files
1285 self.ui.status("adding file changes\n")
1299 self.ui.status("adding file changes\n")
1286 while 1:
1300 while 1:
1287 f = getchunk()
1301 f = getchunk()
1288 if not f: break
1302 if not f: break
1289 self.ui.debug("adding %s revisions\n" % f)
1303 self.ui.debug("adding %s revisions\n" % f)
1290 fl = self.file(f)
1304 fl = self.file(f)
1291 o = fl.count()
1305 o = fl.count()
1292 n = fl.addgroup(getgroup(), revmap, tr)
1306 n = fl.addgroup(getgroup(), revmap, tr)
1293 revisions += fl.count() - o
1307 revisions += fl.count() - o
1294 files += 1
1308 files += 1
1295
1309
1296 self.ui.status(("added %d changesets" +
1310 self.ui.status(("added %d changesets" +
1297 " with %d changes to %d files\n")
1311 " with %d changes to %d files\n")
1298 % (changesets, revisions, files))
1312 % (changesets, revisions, files))
1299
1313
1300 tr.close()
1314 tr.close()
1301
1315
1302 if not self.hook("changegroup"):
1316 if not self.hook("changegroup"):
1303 return 1
1317 return 1
1304
1318
1305 return
1319 return
1306
1320
1307 def update(self, node, allow=False, force=False, choose=None,
1321 def update(self, node, allow=False, force=False, choose=None,
1308 moddirstate=True):
1322 moddirstate=True):
1309 pl = self.dirstate.parents()
1323 pl = self.dirstate.parents()
1310 if not force and pl[1] != nullid:
1324 if not force and pl[1] != nullid:
1311 self.ui.warn("aborting: outstanding uncommitted merges\n")
1325 self.ui.warn("aborting: outstanding uncommitted merges\n")
1312 return 1
1326 return 1
1313
1327
1314 p1, p2 = pl[0], node
1328 p1, p2 = pl[0], node
1315 pa = self.changelog.ancestor(p1, p2)
1329 pa = self.changelog.ancestor(p1, p2)
1316 m1n = self.changelog.read(p1)[0]
1330 m1n = self.changelog.read(p1)[0]
1317 m2n = self.changelog.read(p2)[0]
1331 m2n = self.changelog.read(p2)[0]
1318 man = self.manifest.ancestor(m1n, m2n)
1332 man = self.manifest.ancestor(m1n, m2n)
1319 m1 = self.manifest.read(m1n)
1333 m1 = self.manifest.read(m1n)
1320 mf1 = self.manifest.readflags(m1n)
1334 mf1 = self.manifest.readflags(m1n)
1321 m2 = self.manifest.read(m2n)
1335 m2 = self.manifest.read(m2n)
1322 mf2 = self.manifest.readflags(m2n)
1336 mf2 = self.manifest.readflags(m2n)
1323 ma = self.manifest.read(man)
1337 ma = self.manifest.read(man)
1324 mfa = self.manifest.readflags(man)
1338 mfa = self.manifest.readflags(man)
1325
1339
1326 (c, a, d, u) = self.changes()
1340 (c, a, d, u) = self.changes()
1327
1341
1328 # is this a jump, or a merge? i.e. is there a linear path
1342 # is this a jump, or a merge? i.e. is there a linear path
1329 # from p1 to p2?
1343 # from p1 to p2?
1330 linear_path = (pa == p1 or pa == p2)
1344 linear_path = (pa == p1 or pa == p2)
1331
1345
1332 # resolve the manifest to determine which files
1346 # resolve the manifest to determine which files
1333 # we care about merging
1347 # we care about merging
1334 self.ui.note("resolving manifests\n")
1348 self.ui.note("resolving manifests\n")
1335 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1349 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1336 (force, allow, moddirstate, linear_path))
1350 (force, allow, moddirstate, linear_path))
1337 self.ui.debug(" ancestor %s local %s remote %s\n" %
1351 self.ui.debug(" ancestor %s local %s remote %s\n" %
1338 (short(man), short(m1n), short(m2n)))
1352 (short(man), short(m1n), short(m2n)))
1339
1353
1340 merge = {}
1354 merge = {}
1341 get = {}
1355 get = {}
1342 remove = []
1356 remove = []
1343 mark = {}
1357 mark = {}
1344
1358
1345 # construct a working dir manifest
1359 # construct a working dir manifest
1346 mw = m1.copy()
1360 mw = m1.copy()
1347 mfw = mf1.copy()
1361 mfw = mf1.copy()
1348 umap = dict.fromkeys(u)
1362 umap = dict.fromkeys(u)
1349
1363
1350 for f in a + c + u:
1364 for f in a + c + u:
1351 mw[f] = ""
1365 mw[f] = ""
1352 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1366 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1353
1367
1354 for f in d:
1368 for f in d:
1355 if f in mw: del mw[f]
1369 if f in mw: del mw[f]
1356
1370
1357 # If we're jumping between revisions (as opposed to merging),
1371 # If we're jumping between revisions (as opposed to merging),
1358 # and if neither the working directory nor the target rev has
1372 # and if neither the working directory nor the target rev has
1359 # the file, then we need to remove it from the dirstate, to
1373 # the file, then we need to remove it from the dirstate, to
1360 # prevent the dirstate from listing the file when it is no
1374 # prevent the dirstate from listing the file when it is no
1361 # longer in the manifest.
1375 # longer in the manifest.
1362 if moddirstate and linear_path and f not in m2:
1376 if moddirstate and linear_path and f not in m2:
1363 self.dirstate.forget((f,))
1377 self.dirstate.forget((f,))
1364
1378
1365 # Compare manifests
1379 # Compare manifests
1366 for f, n in mw.iteritems():
1380 for f, n in mw.iteritems():
1367 if choose and not choose(f): continue
1381 if choose and not choose(f): continue
1368 if f in m2:
1382 if f in m2:
1369 s = 0
1383 s = 0
1370
1384
1371 # is the wfile new since m1, and match m2?
1385 # is the wfile new since m1, and match m2?
1372 if f not in m1:
1386 if f not in m1:
1373 t1 = self.wfile(f).read()
1387 t1 = self.wfile(f).read()
1374 t2 = self.file(f).revision(m2[f])
1388 t2 = self.file(f).revision(m2[f])
1375 if cmp(t1, t2) == 0:
1389 if cmp(t1, t2) == 0:
1376 mark[f] = 1
1390 mark[f] = 1
1377 n = m2[f]
1391 n = m2[f]
1378 del t1, t2
1392 del t1, t2
1379
1393
1380 # are files different?
1394 # are files different?
1381 if n != m2[f]:
1395 if n != m2[f]:
1382 a = ma.get(f, nullid)
1396 a = ma.get(f, nullid)
1383 # are both different from the ancestor?
1397 # are both different from the ancestor?
1384 if n != a and m2[f] != a:
1398 if n != a and m2[f] != a:
1385 self.ui.debug(" %s versions differ, resolve\n" % f)
1399 self.ui.debug(" %s versions differ, resolve\n" % f)
1386 # merge executable bits
1400 # merge executable bits
1387 # "if we changed or they changed, change in merge"
1401 # "if we changed or they changed, change in merge"
1388 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1389 mode = ((a^b) | (a^c)) ^ a
1403 mode = ((a^b) | (a^c)) ^ a
1390 merge[f] = (m1.get(f, nullid), m2[f], mode)
1404 merge[f] = (m1.get(f, nullid), m2[f], mode)
1391 s = 1
1405 s = 1
1392 # are we clobbering?
1406 # are we clobbering?
1393 # is remote's version newer?
1407 # is remote's version newer?
1394 # or are we going back in time?
1408 # or are we going back in time?
1395 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1409 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1396 self.ui.debug(" remote %s is newer, get\n" % f)
1410 self.ui.debug(" remote %s is newer, get\n" % f)
1397 get[f] = m2[f]
1411 get[f] = m2[f]
1398 s = 1
1412 s = 1
1399 else:
1413 else:
1400 mark[f] = 1
1414 mark[f] = 1
1401 elif f in umap:
1415 elif f in umap:
1402 # this unknown file is the same as the checkout
1416 # this unknown file is the same as the checkout
1403 get[f] = m2[f]
1417 get[f] = m2[f]
1404
1418
1405 if not s and mfw[f] != mf2[f]:
1419 if not s and mfw[f] != mf2[f]:
1406 if force:
1420 if force:
1407 self.ui.debug(" updating permissions for %s\n" % f)
1421 self.ui.debug(" updating permissions for %s\n" % f)
1408 util.set_exec(self.wjoin(f), mf2[f])
1422 util.set_exec(self.wjoin(f), mf2[f])
1409 else:
1423 else:
1410 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1424 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1411 mode = ((a^b) | (a^c)) ^ a
1425 mode = ((a^b) | (a^c)) ^ a
1412 if mode != b:
1426 if mode != b:
1413 self.ui.debug(" updating permissions for %s\n" % f)
1427 self.ui.debug(" updating permissions for %s\n" % f)
1414 util.set_exec(self.wjoin(f), mode)
1428 util.set_exec(self.wjoin(f), mode)
1415 mark[f] = 1
1429 mark[f] = 1
1416 del m2[f]
1430 del m2[f]
1417 elif f in ma:
1431 elif f in ma:
1418 if n != ma[f]:
1432 if n != ma[f]:
1419 r = "d"
1433 r = "d"
1420 if not force and (linear_path or allow):
1434 if not force and (linear_path or allow):
1421 r = self.ui.prompt(
1435 r = self.ui.prompt(
1422 (" local changed %s which remote deleted\n" % f) +
1436 (" local changed %s which remote deleted\n" % f) +
1423 "(k)eep or (d)elete?", "[kd]", "k")
1437 "(k)eep or (d)elete?", "[kd]", "k")
1424 if r == "d":
1438 if r == "d":
1425 remove.append(f)
1439 remove.append(f)
1426 else:
1440 else:
1427 self.ui.debug("other deleted %s\n" % f)
1441 self.ui.debug("other deleted %s\n" % f)
1428 remove.append(f) # other deleted it
1442 remove.append(f) # other deleted it
1429 else:
1443 else:
1430 if n == m1.get(f, nullid): # same as parent
1444 if n == m1.get(f, nullid): # same as parent
1431 if p2 == pa: # going backwards?
1445 if p2 == pa: # going backwards?
1432 self.ui.debug("remote deleted %s\n" % f)
1446 self.ui.debug("remote deleted %s\n" % f)
1433 remove.append(f)
1447 remove.append(f)
1434 else:
1448 else:
1435 self.ui.debug("local created %s, keeping\n" % f)
1449 self.ui.debug("local created %s, keeping\n" % f)
1436 else:
1450 else:
1437 self.ui.debug("working dir created %s, keeping\n" % f)
1451 self.ui.debug("working dir created %s, keeping\n" % f)
1438
1452
1439 for f, n in m2.iteritems():
1453 for f, n in m2.iteritems():
1440 if choose and not choose(f): continue
1454 if choose and not choose(f): continue
1441 if f[0] == "/": continue
1455 if f[0] == "/": continue
1442 if f in ma and n != ma[f]:
1456 if f in ma and n != ma[f]:
1443 r = "k"
1457 r = "k"
1444 if not force and (linear_path or allow):
1458 if not force and (linear_path or allow):
1445 r = self.ui.prompt(
1459 r = self.ui.prompt(
1446 ("remote changed %s which local deleted\n" % f) +
1460 ("remote changed %s which local deleted\n" % f) +
1447 "(k)eep or (d)elete?", "[kd]", "k")
1461 "(k)eep or (d)elete?", "[kd]", "k")
1448 if r == "k": get[f] = n
1462 if r == "k": get[f] = n
1449 elif f not in ma:
1463 elif f not in ma:
1450 self.ui.debug("remote created %s\n" % f)
1464 self.ui.debug("remote created %s\n" % f)
1451 get[f] = n
1465 get[f] = n
1452 else:
1466 else:
1453 if force or p2 == pa: # going backwards?
1467 if force or p2 == pa: # going backwards?
1454 self.ui.debug("local deleted %s, recreating\n" % f)
1468 self.ui.debug("local deleted %s, recreating\n" % f)
1455 get[f] = n
1469 get[f] = n
1456 else:
1470 else:
1457 self.ui.debug("local deleted %s\n" % f)
1471 self.ui.debug("local deleted %s\n" % f)
1458
1472
1459 del mw, m1, m2, ma
1473 del mw, m1, m2, ma
1460
1474
1461 if force:
1475 if force:
1462 for f in merge:
1476 for f in merge:
1463 get[f] = merge[f][1]
1477 get[f] = merge[f][1]
1464 merge = {}
1478 merge = {}
1465
1479
1466 if linear_path or force:
1480 if linear_path or force:
1467 # we don't need to do any magic, just jump to the new rev
1481 # we don't need to do any magic, just jump to the new rev
1468 mode = 'n'
1482 mode = 'n'
1469 p1, p2 = p2, nullid
1483 p1, p2 = p2, nullid
1470 else:
1484 else:
1471 if not allow:
1485 if not allow:
1472 self.ui.status("this update spans a branch" +
1486 self.ui.status("this update spans a branch" +
1473 " affecting the following files:\n")
1487 " affecting the following files:\n")
1474 fl = merge.keys() + get.keys()
1488 fl = merge.keys() + get.keys()
1475 fl.sort()
1489 fl.sort()
1476 for f in fl:
1490 for f in fl:
1477 cf = ""
1491 cf = ""
1478 if f in merge: cf = " (resolve)"
1492 if f in merge: cf = " (resolve)"
1479 self.ui.status(" %s%s\n" % (f, cf))
1493 self.ui.status(" %s%s\n" % (f, cf))
1480 self.ui.warn("aborting update spanning branches!\n")
1494 self.ui.warn("aborting update spanning branches!\n")
1481 self.ui.status("(use update -m to perform a branch merge)\n")
1495 self.ui.status("(use update -m to perform a branch merge)\n")
1482 return 1
1496 return 1
1483 # we have to remember what files we needed to get/change
1497 # we have to remember what files we needed to get/change
1484 # because any file that's different from either one of its
1498 # because any file that's different from either one of its
1485 # parents must be in the changeset
1499 # parents must be in the changeset
1486 mode = 'm'
1500 mode = 'm'
1487 if moddirstate:
1501 if moddirstate:
1488 self.dirstate.update(mark.keys(), "m")
1502 self.dirstate.update(mark.keys(), "m")
1489
1503
1490 if moddirstate:
1504 if moddirstate:
1491 self.dirstate.setparents(p1, p2)
1505 self.dirstate.setparents(p1, p2)
1492
1506
1493 # get the files we don't need to change
1507 # get the files we don't need to change
1494 files = get.keys()
1508 files = get.keys()
1495 files.sort()
1509 files.sort()
1496 for f in files:
1510 for f in files:
1497 if f[0] == "/": continue
1511 if f[0] == "/": continue
1498 self.ui.note("getting %s\n" % f)
1512 self.ui.note("getting %s\n" % f)
1499 t = self.file(f).read(get[f])
1513 t = self.file(f).read(get[f])
1500 try:
1514 try:
1501 self.wfile(f, "w").write(t)
1515 self.wfile(f, "w").write(t)
1502 except IOError:
1516 except IOError:
1503 os.makedirs(os.path.dirname(self.wjoin(f)))
1517 os.makedirs(os.path.dirname(self.wjoin(f)))
1504 self.wfile(f, "w").write(t)
1518 self.wfile(f, "w").write(t)
1505 util.set_exec(self.wjoin(f), mf2[f])
1519 util.set_exec(self.wjoin(f), mf2[f])
1506 if moddirstate:
1520 if moddirstate:
1507 self.dirstate.update([f], mode)
1521 self.dirstate.update([f], mode)
1508
1522
1509 # merge the tricky bits
1523 # merge the tricky bits
1510 files = merge.keys()
1524 files = merge.keys()
1511 files.sort()
1525 files.sort()
1512 for f in files:
1526 for f in files:
1513 self.ui.status("merging %s\n" % f)
1527 self.ui.status("merging %s\n" % f)
1514 m, o, flag = merge[f]
1528 m, o, flag = merge[f]
1515 self.merge3(f, m, o)
1529 self.merge3(f, m, o)
1516 util.set_exec(self.wjoin(f), flag)
1530 util.set_exec(self.wjoin(f), flag)
1517 if moddirstate and mode == 'm':
1531 if moddirstate and mode == 'm':
1518 # only update dirstate on branch merge, otherwise we
1532 # only update dirstate on branch merge, otherwise we
1519 # could mark files with changes as unchanged
1533 # could mark files with changes as unchanged
1520 self.dirstate.update([f], mode)
1534 self.dirstate.update([f], mode)
1521
1535
1522 remove.sort()
1536 remove.sort()
1523 for f in remove:
1537 for f in remove:
1524 self.ui.note("removing %s\n" % f)
1538 self.ui.note("removing %s\n" % f)
1525 try:
1539 try:
1526 os.unlink(f)
1540 os.unlink(f)
1527 except OSError, inst:
1541 except OSError, inst:
1528 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1542 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1529 # try removing directories that might now be empty
1543 # try removing directories that might now be empty
1530 try: os.removedirs(os.path.dirname(f))
1544 try: os.removedirs(os.path.dirname(f))
1531 except: pass
1545 except: pass
1532 if moddirstate:
1546 if moddirstate:
1533 if mode == 'n':
1547 if mode == 'n':
1534 self.dirstate.forget(remove)
1548 self.dirstate.forget(remove)
1535 else:
1549 else:
1536 self.dirstate.update(remove, 'r')
1550 self.dirstate.update(remove, 'r')
1537
1551
1538 def merge3(self, fn, my, other):
1552 def merge3(self, fn, my, other):
1539 """perform a 3-way merge in the working directory"""
1553 """perform a 3-way merge in the working directory"""
1540
1554
1541 def temp(prefix, node):
1555 def temp(prefix, node):
1542 pre = "%s~%s." % (os.path.basename(fn), prefix)
1556 pre = "%s~%s." % (os.path.basename(fn), prefix)
1543 (fd, name) = tempfile.mkstemp("", pre)
1557 (fd, name) = tempfile.mkstemp("", pre)
1544 f = os.fdopen(fd, "wb")
1558 f = os.fdopen(fd, "wb")
1545 f.write(fl.revision(node))
1559 f.write(fl.revision(node))
1546 f.close()
1560 f.close()
1547 return name
1561 return name
1548
1562
1549 fl = self.file(fn)
1563 fl = self.file(fn)
1550 base = fl.ancestor(my, other)
1564 base = fl.ancestor(my, other)
1551 a = self.wjoin(fn)
1565 a = self.wjoin(fn)
1552 b = temp("base", base)
1566 b = temp("base", base)
1553 c = temp("other", other)
1567 c = temp("other", other)
1554
1568
1555 self.ui.note("resolving %s\n" % fn)
1569 self.ui.note("resolving %s\n" % fn)
1556 self.ui.debug("file %s: other %s ancestor %s\n" %
1570 self.ui.debug("file %s: other %s ancestor %s\n" %
1557 (fn, short(other), short(base)))
1571 (fn, short(other), short(base)))
1558
1572
1559 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1573 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1560 or "hgmerge")
1574 or "hgmerge")
1561 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1575 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1562 if r:
1576 if r:
1563 self.ui.warn("merging %s failed!\n" % fn)
1577 self.ui.warn("merging %s failed!\n" % fn)
1564
1578
1565 os.unlink(b)
1579 os.unlink(b)
1566 os.unlink(c)
1580 os.unlink(c)
1567
1581
1568 def verify(self):
1582 def verify(self):
1569 filelinkrevs = {}
1583 filelinkrevs = {}
1570 filenodes = {}
1584 filenodes = {}
1571 changesets = revisions = files = 0
1585 changesets = revisions = files = 0
1572 errors = 0
1586 errors = 0
1573
1587
1574 seen = {}
1588 seen = {}
1575 self.ui.status("checking changesets\n")
1589 self.ui.status("checking changesets\n")
1576 for i in range(self.changelog.count()):
1590 for i in range(self.changelog.count()):
1577 changesets += 1
1591 changesets += 1
1578 n = self.changelog.node(i)
1592 n = self.changelog.node(i)
1579 if n in seen:
1593 if n in seen:
1580 self.ui.warn("duplicate changeset at revision %d\n" % i)
1594 self.ui.warn("duplicate changeset at revision %d\n" % i)
1581 errors += 1
1595 errors += 1
1582 seen[n] = 1
1596 seen[n] = 1
1583
1597
1584 for p in self.changelog.parents(n):
1598 for p in self.changelog.parents(n):
1585 if p not in self.changelog.nodemap:
1599 if p not in self.changelog.nodemap:
1586 self.ui.warn("changeset %s has unknown parent %s\n" %
1600 self.ui.warn("changeset %s has unknown parent %s\n" %
1587 (short(n), short(p)))
1601 (short(n), short(p)))
1588 errors += 1
1602 errors += 1
1589 try:
1603 try:
1590 changes = self.changelog.read(n)
1604 changes = self.changelog.read(n)
1591 except Exception, inst:
1605 except Exception, inst:
1592 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1606 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1593 errors += 1
1607 errors += 1
1594
1608
1595 for f in changes[3]:
1609 for f in changes[3]:
1596 filelinkrevs.setdefault(f, []).append(i)
1610 filelinkrevs.setdefault(f, []).append(i)
1597
1611
1598 seen = {}
1612 seen = {}
1599 self.ui.status("checking manifests\n")
1613 self.ui.status("checking manifests\n")
1600 for i in range(self.manifest.count()):
1614 for i in range(self.manifest.count()):
1601 n = self.manifest.node(i)
1615 n = self.manifest.node(i)
1602 if n in seen:
1616 if n in seen:
1603 self.ui.warn("duplicate manifest at revision %d\n" % i)
1617 self.ui.warn("duplicate manifest at revision %d\n" % i)
1604 errors += 1
1618 errors += 1
1605 seen[n] = 1
1619 seen[n] = 1
1606
1620
1607 for p in self.manifest.parents(n):
1621 for p in self.manifest.parents(n):
1608 if p not in self.manifest.nodemap:
1622 if p not in self.manifest.nodemap:
1609 self.ui.warn("manifest %s has unknown parent %s\n" %
1623 self.ui.warn("manifest %s has unknown parent %s\n" %
1610 (short(n), short(p)))
1624 (short(n), short(p)))
1611 errors += 1
1625 errors += 1
1612
1626
1613 try:
1627 try:
1614 delta = mdiff.patchtext(self.manifest.delta(n))
1628 delta = mdiff.patchtext(self.manifest.delta(n))
1615 except KeyboardInterrupt:
1629 except KeyboardInterrupt:
1616 self.ui.warn("aborted")
1630 self.ui.warn("aborted")
1617 sys.exit(0)
1631 sys.exit(0)
1618 except Exception, inst:
1632 except Exception, inst:
1619 self.ui.warn("unpacking manifest %s: %s\n"
1633 self.ui.warn("unpacking manifest %s: %s\n"
1620 % (short(n), inst))
1634 % (short(n), inst))
1621 errors += 1
1635 errors += 1
1622
1636
1623 ff = [ l.split('\0') for l in delta.splitlines() ]
1637 ff = [ l.split('\0') for l in delta.splitlines() ]
1624 for f, fn in ff:
1638 for f, fn in ff:
1625 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1639 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1626
1640
1627 self.ui.status("crosschecking files in changesets and manifests\n")
1641 self.ui.status("crosschecking files in changesets and manifests\n")
1628 for f in filenodes:
1642 for f in filenodes:
1629 if f not in filelinkrevs:
1643 if f not in filelinkrevs:
1630 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1644 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1631 errors += 1
1645 errors += 1
1632
1646
1633 for f in filelinkrevs:
1647 for f in filelinkrevs:
1634 if f not in filenodes:
1648 if f not in filenodes:
1635 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1649 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1636 errors += 1
1650 errors += 1
1637
1651
1638 self.ui.status("checking files\n")
1652 self.ui.status("checking files\n")
1639 ff = filenodes.keys()
1653 ff = filenodes.keys()
1640 ff.sort()
1654 ff.sort()
1641 for f in ff:
1655 for f in ff:
1642 if f == "/dev/null": continue
1656 if f == "/dev/null": continue
1643 files += 1
1657 files += 1
1644 fl = self.file(f)
1658 fl = self.file(f)
1645 nodes = { nullid: 1 }
1659 nodes = { nullid: 1 }
1646 seen = {}
1660 seen = {}
1647 for i in range(fl.count()):
1661 for i in range(fl.count()):
1648 revisions += 1
1662 revisions += 1
1649 n = fl.node(i)
1663 n = fl.node(i)
1650
1664
1651 if n in seen:
1665 if n in seen:
1652 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1666 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1653 errors += 1
1667 errors += 1
1654
1668
1655 if n not in filenodes[f]:
1669 if n not in filenodes[f]:
1656 self.ui.warn("%s: %d:%s not in manifests\n"
1670 self.ui.warn("%s: %d:%s not in manifests\n"
1657 % (f, i, short(n)))
1671 % (f, i, short(n)))
1658 errors += 1
1672 errors += 1
1659 else:
1673 else:
1660 del filenodes[f][n]
1674 del filenodes[f][n]
1661
1675
1662 flr = fl.linkrev(n)
1676 flr = fl.linkrev(n)
1663 if flr not in filelinkrevs[f]:
1677 if flr not in filelinkrevs[f]:
1664 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1678 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1665 % (f, short(n), fl.linkrev(n)))
1679 % (f, short(n), fl.linkrev(n)))
1666 errors += 1
1680 errors += 1
1667 else:
1681 else:
1668 filelinkrevs[f].remove(flr)
1682 filelinkrevs[f].remove(flr)
1669
1683
1670 # verify contents
1684 # verify contents
1671 try:
1685 try:
1672 t = fl.read(n)
1686 t = fl.read(n)
1673 except Exception, inst:
1687 except Exception, inst:
1674 self.ui.warn("unpacking file %s %s: %s\n"
1688 self.ui.warn("unpacking file %s %s: %s\n"
1675 % (f, short(n), inst))
1689 % (f, short(n), inst))
1676 errors += 1
1690 errors += 1
1677
1691
1678 # verify parents
1692 # verify parents
1679 (p1, p2) = fl.parents(n)
1693 (p1, p2) = fl.parents(n)
1680 if p1 not in nodes:
1694 if p1 not in nodes:
1681 self.ui.warn("file %s:%s unknown parent 1 %s" %
1695 self.ui.warn("file %s:%s unknown parent 1 %s" %
1682 (f, short(n), short(p1)))
1696 (f, short(n), short(p1)))
1683 errors += 1
1697 errors += 1
1684 if p2 not in nodes:
1698 if p2 not in nodes:
1685 self.ui.warn("file %s:%s unknown parent 2 %s" %
1699 self.ui.warn("file %s:%s unknown parent 2 %s" %
1686 (f, short(n), short(p1)))
1700 (f, short(n), short(p1)))
1687 errors += 1
1701 errors += 1
1688 nodes[n] = 1
1702 nodes[n] = 1
1689
1703
1690 # cross-check
1704 # cross-check
1691 for node in filenodes[f]:
1705 for node in filenodes[f]:
1692 self.ui.warn("node %s in manifests not in %s\n"
1706 self.ui.warn("node %s in manifests not in %s\n"
1693 % (hex(node), f))
1707 % (hex(node), f))
1694 errors += 1
1708 errors += 1
1695
1709
1696 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1710 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1697 (files, changesets, revisions))
1711 (files, changesets, revisions))
1698
1712
1699 if errors:
1713 if errors:
1700 self.ui.warn("%d integrity errors encountered!\n" % errors)
1714 self.ui.warn("%d integrity errors encountered!\n" % errors)
1701 return 1
1715 return 1
1702
1716
1703 class httprepository:
1717 class httprepository:
1704 def __init__(self, ui, path):
1718 def __init__(self, ui, path):
1705 # fix missing / after hostname
1719 # fix missing / after hostname
1706 s = urlparse.urlsplit(path)
1720 s = urlparse.urlsplit(path)
1707 partial = s[2]
1721 partial = s[2]
1708 if not partial: partial = "/"
1722 if not partial: partial = "/"
1709 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1723 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1710 self.ui = ui
1724 self.ui = ui
1711 no_list = [ "localhost", "127.0.0.1" ]
1725 no_list = [ "localhost", "127.0.0.1" ]
1712 host = ui.config("http_proxy", "host")
1726 host = ui.config("http_proxy", "host")
1713 if host is None:
1727 if host is None:
1714 host = os.environ.get("http_proxy")
1728 host = os.environ.get("http_proxy")
1715 if host and host.startswith('http://'):
1729 if host and host.startswith('http://'):
1716 host = host[7:]
1730 host = host[7:]
1717 user = ui.config("http_proxy", "user")
1731 user = ui.config("http_proxy", "user")
1718 passwd = ui.config("http_proxy", "passwd")
1732 passwd = ui.config("http_proxy", "passwd")
1719 no = ui.config("http_proxy", "no")
1733 no = ui.config("http_proxy", "no")
1720 if no is None:
1734 if no is None:
1721 no = os.environ.get("no_proxy")
1735 no = os.environ.get("no_proxy")
1722 if no:
1736 if no:
1723 no_list = no_list + no.split(",")
1737 no_list = no_list + no.split(",")
1724
1738
1725 no_proxy = 0
1739 no_proxy = 0
1726 for h in no_list:
1740 for h in no_list:
1727 if (path.startswith("http://" + h + "/") or
1741 if (path.startswith("http://" + h + "/") or
1728 path.startswith("http://" + h + ":") or
1742 path.startswith("http://" + h + ":") or
1729 path == "http://" + h):
1743 path == "http://" + h):
1730 no_proxy = 1
1744 no_proxy = 1
1731
1745
1732 # Note: urllib2 takes proxy values from the environment and those will
1746 # Note: urllib2 takes proxy values from the environment and those will
1733 # take precedence
1747 # take precedence
1734 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1748 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1735 if os.environ.has_key(env):
1749 if os.environ.has_key(env):
1736 del os.environ[env]
1750 del os.environ[env]
1737
1751
1738 proxy_handler = urllib2.BaseHandler()
1752 proxy_handler = urllib2.BaseHandler()
1739 if host and not no_proxy:
1753 if host and not no_proxy:
1740 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1754 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1741
1755
1742 authinfo = None
1756 authinfo = None
1743 if user and passwd:
1757 if user and passwd:
1744 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1758 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1745 passmgr.add_password(None, host, user, passwd)
1759 passmgr.add_password(None, host, user, passwd)
1746 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1760 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1747
1761
1748 opener = urllib2.build_opener(proxy_handler, authinfo)
1762 opener = urllib2.build_opener(proxy_handler, authinfo)
1749 urllib2.install_opener(opener)
1763 urllib2.install_opener(opener)
1750
1764
1751 def dev(self):
1765 def dev(self):
1752 return -1
1766 return -1
1753
1767
1754 def do_cmd(self, cmd, **args):
1768 def do_cmd(self, cmd, **args):
1755 self.ui.debug("sending %s command\n" % cmd)
1769 self.ui.debug("sending %s command\n" % cmd)
1756 q = {"cmd": cmd}
1770 q = {"cmd": cmd}
1757 q.update(args)
1771 q.update(args)
1758 qs = urllib.urlencode(q)
1772 qs = urllib.urlencode(q)
1759 cu = "%s?%s" % (self.url, qs)
1773 cu = "%s?%s" % (self.url, qs)
1760 resp = urllib2.urlopen(cu)
1774 resp = urllib2.urlopen(cu)
1761 proto = resp.headers['content-type']
1775 proto = resp.headers['content-type']
1762
1776
1763 # accept old "text/plain" and "application/hg-changegroup" for now
1777 # accept old "text/plain" and "application/hg-changegroup" for now
1764 if not proto.startswith('application/mercurial') and \
1778 if not proto.startswith('application/mercurial') and \
1765 not proto.startswith('text/plain') and \
1779 not proto.startswith('text/plain') and \
1766 not proto.startswith('application/hg-changegroup'):
1780 not proto.startswith('application/hg-changegroup'):
1767 raise RepoError("'%s' does not appear to be an hg repository"
1781 raise RepoError("'%s' does not appear to be an hg repository"
1768 % self.url)
1782 % self.url)
1769
1783
1770 if proto.startswith('application/mercurial'):
1784 if proto.startswith('application/mercurial'):
1771 version = proto[22:]
1785 version = proto[22:]
1772 if float(version) > 0.1:
1786 if float(version) > 0.1:
1773 raise RepoError("'%s' uses newer protocol %s" %
1787 raise RepoError("'%s' uses newer protocol %s" %
1774 (self.url, version))
1788 (self.url, version))
1775
1789
1776 return resp
1790 return resp
1777
1791
1778 def heads(self):
1792 def heads(self):
1779 d = self.do_cmd("heads").read()
1793 d = self.do_cmd("heads").read()
1780 try:
1794 try:
1781 return map(bin, d[:-1].split(" "))
1795 return map(bin, d[:-1].split(" "))
1782 except:
1796 except:
1783 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1797 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1784 raise
1798 raise
1785
1799
1786 def branches(self, nodes):
1800 def branches(self, nodes):
1787 n = " ".join(map(hex, nodes))
1801 n = " ".join(map(hex, nodes))
1788 d = self.do_cmd("branches", nodes=n).read()
1802 d = self.do_cmd("branches", nodes=n).read()
1789 try:
1803 try:
1790 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1804 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1791 return br
1805 return br
1792 except:
1806 except:
1793 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1807 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1794 raise
1808 raise
1795
1809
1796 def between(self, pairs):
1810 def between(self, pairs):
1797 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1811 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1798 d = self.do_cmd("between", pairs=n).read()
1812 d = self.do_cmd("between", pairs=n).read()
1799 try:
1813 try:
1800 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1814 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1801 return p
1815 return p
1802 except:
1816 except:
1803 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1817 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1804 raise
1818 raise
1805
1819
1806 def changegroup(self, nodes):
1820 def changegroup(self, nodes):
1807 n = " ".join(map(hex, nodes))
1821 n = " ".join(map(hex, nodes))
1808 f = self.do_cmd("changegroup", roots=n)
1822 f = self.do_cmd("changegroup", roots=n)
1809 bytes = 0
1823 bytes = 0
1810
1824
1811 class zread:
1825 class zread:
1812 def __init__(self, f):
1826 def __init__(self, f):
1813 self.zd = zlib.decompressobj()
1827 self.zd = zlib.decompressobj()
1814 self.f = f
1828 self.f = f
1815 self.buf = ""
1829 self.buf = ""
1816 def read(self, l):
1830 def read(self, l):
1817 while l > len(self.buf):
1831 while l > len(self.buf):
1818 r = self.f.read(4096)
1832 r = self.f.read(4096)
1819 if r:
1833 if r:
1820 self.buf += self.zd.decompress(r)
1834 self.buf += self.zd.decompress(r)
1821 else:
1835 else:
1822 self.buf += self.zd.flush()
1836 self.buf += self.zd.flush()
1823 break
1837 break
1824 d, self.buf = self.buf[:l], self.buf[l:]
1838 d, self.buf = self.buf[:l], self.buf[l:]
1825 return d
1839 return d
1826
1840
1827 return zread(f)
1841 return zread(f)
1828
1842
1829 class remotelock:
1843 class remotelock:
1830 def __init__(self, repo):
1844 def __init__(self, repo):
1831 self.repo = repo
1845 self.repo = repo
1832 def release(self):
1846 def release(self):
1833 self.repo.unlock()
1847 self.repo.unlock()
1834 self.repo = None
1848 self.repo = None
1835 def __del__(self):
1849 def __del__(self):
1836 if self.repo:
1850 if self.repo:
1837 self.release()
1851 self.release()
1838
1852
1839 class sshrepository:
1853 class sshrepository:
1840 def __init__(self, ui, path):
1854 def __init__(self, ui, path):
1841 self.url = path
1855 self.url = path
1842 self.ui = ui
1856 self.ui = ui
1843
1857
1844 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1858 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1845 if not m:
1859 if not m:
1846 raise RepoError("couldn't parse destination %s\n" % path)
1860 raise RepoError("couldn't parse destination %s\n" % path)
1847
1861
1848 self.user = m.group(2)
1862 self.user = m.group(2)
1849 self.host = m.group(3)
1863 self.host = m.group(3)
1850 self.port = m.group(5)
1864 self.port = m.group(5)
1851 self.path = m.group(7)
1865 self.path = m.group(7)
1852
1866
1853 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1867 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1854 args = self.port and ("%s -p %s") % (args, self.port) or args
1868 args = self.port and ("%s -p %s") % (args, self.port) or args
1855 path = self.path or ""
1869 path = self.path or ""
1856
1870
1857 cmd = "ssh %s 'hg -R %s serve --stdio'"
1871 cmd = "ssh %s 'hg -R %s serve --stdio'"
1858 cmd = cmd % (args, path)
1872 cmd = cmd % (args, path)
1859
1873
1860 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1874 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1861
1875
1862 def readerr(self):
1876 def readerr(self):
1863 while 1:
1877 while 1:
1864 r,w,x = select.select([self.pipee], [], [], 0)
1878 r,w,x = select.select([self.pipee], [], [], 0)
1865 if not r: break
1879 if not r: break
1866 l = self.pipee.readline()
1880 l = self.pipee.readline()
1867 if not l: break
1881 if not l: break
1868 self.ui.status("remote: ", l)
1882 self.ui.status("remote: ", l)
1869
1883
1870 def __del__(self):
1884 def __del__(self):
1871 self.pipeo.close()
1885 self.pipeo.close()
1872 self.pipei.close()
1886 self.pipei.close()
1873 for l in self.pipee:
1887 for l in self.pipee:
1874 self.ui.status("remote: ", l)
1888 self.ui.status("remote: ", l)
1875 self.pipee.close()
1889 self.pipee.close()
1876
1890
1877 def dev(self):
1891 def dev(self):
1878 return -1
1892 return -1
1879
1893
1880 def do_cmd(self, cmd, **args):
1894 def do_cmd(self, cmd, **args):
1881 self.ui.debug("sending %s command\n" % cmd)
1895 self.ui.debug("sending %s command\n" % cmd)
1882 self.pipeo.write("%s\n" % cmd)
1896 self.pipeo.write("%s\n" % cmd)
1883 for k, v in args.items():
1897 for k, v in args.items():
1884 self.pipeo.write("%s %d\n" % (k, len(v)))
1898 self.pipeo.write("%s %d\n" % (k, len(v)))
1885 self.pipeo.write(v)
1899 self.pipeo.write(v)
1886 self.pipeo.flush()
1900 self.pipeo.flush()
1887
1901
1888 return self.pipei
1902 return self.pipei
1889
1903
1890 def call(self, cmd, **args):
1904 def call(self, cmd, **args):
1891 r = self.do_cmd(cmd, **args)
1905 r = self.do_cmd(cmd, **args)
1892 l = r.readline()
1906 l = r.readline()
1893 self.readerr()
1907 self.readerr()
1894 try:
1908 try:
1895 l = int(l)
1909 l = int(l)
1896 except:
1910 except:
1897 raise RepoError("unexpected response '%s'" % l)
1911 raise RepoError("unexpected response '%s'" % l)
1898 return r.read(l)
1912 return r.read(l)
1899
1913
1900 def lock(self):
1914 def lock(self):
1901 self.call("lock")
1915 self.call("lock")
1902 return remotelock(self)
1916 return remotelock(self)
1903
1917
1904 def unlock(self):
1918 def unlock(self):
1905 self.call("unlock")
1919 self.call("unlock")
1906
1920
1907 def heads(self):
1921 def heads(self):
1908 d = self.call("heads")
1922 d = self.call("heads")
1909 try:
1923 try:
1910 return map(bin, d[:-1].split(" "))
1924 return map(bin, d[:-1].split(" "))
1911 except:
1925 except:
1912 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1926 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1913
1927
1914 def branches(self, nodes):
1928 def branches(self, nodes):
1915 n = " ".join(map(hex, nodes))
1929 n = " ".join(map(hex, nodes))
1916 d = self.call("branches", nodes=n)
1930 d = self.call("branches", nodes=n)
1917 try:
1931 try:
1918 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1932 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1919 return br
1933 return br
1920 except:
1934 except:
1921 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1935 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1922
1936
1923 def between(self, pairs):
1937 def between(self, pairs):
1924 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1938 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1925 d = self.call("between", pairs=n)
1939 d = self.call("between", pairs=n)
1926 try:
1940 try:
1927 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1941 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1928 return p
1942 return p
1929 except:
1943 except:
1930 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1944 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1931
1945
1932 def changegroup(self, nodes):
1946 def changegroup(self, nodes):
1933 n = " ".join(map(hex, nodes))
1947 n = " ".join(map(hex, nodes))
1934 f = self.do_cmd("changegroup", roots=n)
1948 f = self.do_cmd("changegroup", roots=n)
1935 return self.pipei
1949 return self.pipei
1936
1950
1937 def addchangegroup(self, cg):
1951 def addchangegroup(self, cg):
1938 d = self.call("addchangegroup")
1952 d = self.call("addchangegroup")
1939 if d:
1953 if d:
1940 raise RepoError("push refused: %s", d)
1954 raise RepoError("push refused: %s", d)
1941
1955
1942 while 1:
1956 while 1:
1943 d = cg.read(4096)
1957 d = cg.read(4096)
1944 if not d: break
1958 if not d: break
1945 self.pipeo.write(d)
1959 self.pipeo.write(d)
1946 self.readerr()
1960 self.readerr()
1947
1961
1948 self.pipeo.flush()
1962 self.pipeo.flush()
1949
1963
1950 self.readerr()
1964 self.readerr()
1951 l = int(self.pipei.readline())
1965 l = int(self.pipei.readline())
1952 return self.pipei.read(l) != ""
1966 return self.pipei.read(l) != ""
1953
1967
1954 def repository(ui, path=None, create=0):
1968 def repository(ui, path=None, create=0):
1955 if path:
1969 if path:
1956 if path.startswith("http://"):
1970 if path.startswith("http://"):
1957 return httprepository(ui, path)
1971 return httprepository(ui, path)
1958 if path.startswith("hg://"):
1972 if path.startswith("hg://"):
1959 return httprepository(ui, path.replace("hg://", "http://"))
1973 return httprepository(ui, path.replace("hg://", "http://"))
1960 if path.startswith("old-http://"):
1974 if path.startswith("old-http://"):
1961 return localrepository(ui, path.replace("old-http://", "http://"))
1975 return localrepository(ui, path.replace("old-http://", "http://"))
1962 if path.startswith("ssh://"):
1976 if path.startswith("ssh://"):
1963 return sshrepository(ui, path)
1977 return sshrepository(ui, path)
1964
1978
1965 return localrepository(ui, path, create)
1979 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now