##// END OF EJS Templates
Make pull count jargon less confusing...
mpm@selenic.com -
r772:f05deda5 default
parent child Browse files
Show More
@@ -1,1957 +1,1957 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".i"),
20 os.path.join("data", path + ".d"))
20 os.path.join("data", path + ".d"))
21
21
22 def read(self, node):
22 def read(self, node):
23 t = self.revision(node)
23 t = self.revision(node)
24 if not t.startswith('\1\n'):
24 if not t.startswith('\1\n'):
25 return t
25 return t
26 s = t.find('\1\n', 2)
26 s = t.find('\1\n', 2)
27 return t[s+2:]
27 return t[s+2:]
28
28
29 def readmeta(self, node):
29 def readmeta(self, node):
30 t = self.revision(node)
30 t = self.revision(node)
31 if not t.startswith('\1\n'):
31 if not t.startswith('\1\n'):
32 return t
32 return t
33 s = t.find('\1\n', 2)
33 s = t.find('\1\n', 2)
34 mt = t[2:s]
34 mt = t[2:s]
35 for l in mt.splitlines():
35 for l in mt.splitlines():
36 k, v = l.split(": ", 1)
36 k, v = l.split(": ", 1)
37 m[k] = v
37 m[k] = v
38 return m
38 return m
39
39
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 def add(self, text, meta, transaction, link, p1=None, p2=None):
41 if meta or text.startswith('\1\n'):
41 if meta or text.startswith('\1\n'):
42 mt = ""
42 mt = ""
43 if meta:
43 if meta:
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
45 text = "\1\n" + "".join(mt) + "\1\n" + text
45 text = "\1\n" + "".join(mt) + "\1\n" + text
46 return self.addrevision(text, transaction, link, p1, p2)
46 return self.addrevision(text, transaction, link, p1, p2)
47
47
48 def annotate(self, node):
48 def annotate(self, node):
49
49
50 def decorate(text, rev):
50 def decorate(text, rev):
51 return ([rev] * len(text.splitlines()), text)
51 return ([rev] * len(text.splitlines()), text)
52
52
53 def pair(parent, child):
53 def pair(parent, child):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
55 child[0][b1:b2] = parent[0][a1:a2]
55 child[0][b1:b2] = parent[0][a1:a2]
56 return child
56 return child
57
57
58 # find all ancestors
58 # find all ancestors
59 needed = {node:1}
59 needed = {node:1}
60 visit = [node]
60 visit = [node]
61 while visit:
61 while visit:
62 n = visit.pop(0)
62 n = visit.pop(0)
63 for p in self.parents(n):
63 for p in self.parents(n):
64 if p not in needed:
64 if p not in needed:
65 needed[p] = 1
65 needed[p] = 1
66 visit.append(p)
66 visit.append(p)
67 else:
67 else:
68 # count how many times we'll use this
68 # count how many times we'll use this
69 needed[p] += 1
69 needed[p] += 1
70
70
71 # sort by revision which is a topological order
71 # sort by revision which is a topological order
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit = [ (self.rev(n), n) for n in needed.keys() ]
73 visit.sort()
73 visit.sort()
74 hist = {}
74 hist = {}
75
75
76 for r,n in visit:
76 for r,n in visit:
77 curr = decorate(self.read(n), self.linkrev(n))
77 curr = decorate(self.read(n), self.linkrev(n))
78 for p in self.parents(n):
78 for p in self.parents(n):
79 if p != nullid:
79 if p != nullid:
80 curr = pair(hist[p], curr)
80 curr = pair(hist[p], curr)
81 # trim the history of unneeded revs
81 # trim the history of unneeded revs
82 needed[p] -= 1
82 needed[p] -= 1
83 if not needed[p]:
83 if not needed[p]:
84 del hist[p]
84 del hist[p]
85 hist[n] = curr
85 hist[n] = curr
86
86
87 return zip(hist[n][0], hist[n][1].splitlines(1))
87 return zip(hist[n][0], hist[n][1].splitlines(1))
88
88
89 class manifest(revlog):
89 class manifest(revlog):
90 def __init__(self, opener):
90 def __init__(self, opener):
91 self.mapcache = None
91 self.mapcache = None
92 self.listcache = None
92 self.listcache = None
93 self.addlist = None
93 self.addlist = None
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
95
95
96 def read(self, node):
96 def read(self, node):
97 if node == nullid: return {} # don't upset local cache
97 if node == nullid: return {} # don't upset local cache
98 if self.mapcache and self.mapcache[0] == node:
98 if self.mapcache and self.mapcache[0] == node:
99 return self.mapcache[1]
99 return self.mapcache[1]
100 text = self.revision(node)
100 text = self.revision(node)
101 map = {}
101 map = {}
102 flag = {}
102 flag = {}
103 self.listcache = (text, text.splitlines(1))
103 self.listcache = (text, text.splitlines(1))
104 for l in self.listcache[1]:
104 for l in self.listcache[1]:
105 (f, n) = l.split('\0')
105 (f, n) = l.split('\0')
106 map[f] = bin(n[:40])
106 map[f] = bin(n[:40])
107 flag[f] = (n[40:-1] == "x")
107 flag[f] = (n[40:-1] == "x")
108 self.mapcache = (node, map, flag)
108 self.mapcache = (node, map, flag)
109 return map
109 return map
110
110
111 def readflags(self, node):
111 def readflags(self, node):
112 if node == nullid: return {} # don't upset local cache
112 if node == nullid: return {} # don't upset local cache
113 if not self.mapcache or self.mapcache[0] != node:
113 if not self.mapcache or self.mapcache[0] != node:
114 self.read(node)
114 self.read(node)
115 return self.mapcache[2]
115 return self.mapcache[2]
116
116
117 def diff(self, a, b):
117 def diff(self, a, b):
118 # this is sneaky, as we're not actually using a and b
118 # this is sneaky, as we're not actually using a and b
119 if self.listcache and self.addlist and self.listcache[0] == a:
119 if self.listcache and self.addlist and self.listcache[0] == a:
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 d = mdiff.diff(self.listcache[1], self.addlist, 1)
121 if mdiff.patch(a, d) != b:
121 if mdiff.patch(a, d) != b:
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 sys.stderr.write("*** sortdiff failed, falling back ***\n")
123 return mdiff.textdiff(a, b)
123 return mdiff.textdiff(a, b)
124 return d
124 return d
125 else:
125 else:
126 return mdiff.textdiff(a, b)
126 return mdiff.textdiff(a, b)
127
127
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
128 def add(self, map, flags, transaction, link, p1=None, p2=None,
129 changed=None):
129 changed=None):
130 # directly generate the mdiff delta from the data collected during
130 # directly generate the mdiff delta from the data collected during
131 # the bisect loop below
131 # the bisect loop below
132 def gendelta(delta):
132 def gendelta(delta):
133 i = 0
133 i = 0
134 result = []
134 result = []
135 while i < len(delta):
135 while i < len(delta):
136 start = delta[i][2]
136 start = delta[i][2]
137 end = delta[i][3]
137 end = delta[i][3]
138 l = delta[i][4]
138 l = delta[i][4]
139 if l == None:
139 if l == None:
140 l = ""
140 l = ""
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
141 while i < len(delta) - 1 and start <= delta[i+1][2] \
142 and end >= delta[i+1][2]:
142 and end >= delta[i+1][2]:
143 if delta[i+1][3] > end:
143 if delta[i+1][3] > end:
144 end = delta[i+1][3]
144 end = delta[i+1][3]
145 if delta[i+1][4]:
145 if delta[i+1][4]:
146 l += delta[i+1][4]
146 l += delta[i+1][4]
147 i += 1
147 i += 1
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
148 result.append(struct.pack(">lll", start, end, len(l)) + l)
149 i += 1
149 i += 1
150 return result
150 return result
151
151
152 # apply the changes collected during the bisect loop to our addlist
152 # apply the changes collected during the bisect loop to our addlist
153 def addlistdelta(addlist, delta):
153 def addlistdelta(addlist, delta):
154 # apply the deltas to the addlist. start from the bottom up
154 # apply the deltas to the addlist. start from the bottom up
155 # so changes to the offsets don't mess things up.
155 # so changes to the offsets don't mess things up.
156 i = len(delta)
156 i = len(delta)
157 while i > 0:
157 while i > 0:
158 i -= 1
158 i -= 1
159 start = delta[i][0]
159 start = delta[i][0]
160 end = delta[i][1]
160 end = delta[i][1]
161 if delta[i][4]:
161 if delta[i][4]:
162 addlist[start:end] = [delta[i][4]]
162 addlist[start:end] = [delta[i][4]]
163 else:
163 else:
164 del addlist[start:end]
164 del addlist[start:end]
165 return addlist
165 return addlist
166
166
167 # calculate the byte offset of the start of each line in the
167 # calculate the byte offset of the start of each line in the
168 # manifest
168 # manifest
169 def calcoffsets(addlist):
169 def calcoffsets(addlist):
170 offsets = [0] * (len(addlist) + 1)
170 offsets = [0] * (len(addlist) + 1)
171 offset = 0
171 offset = 0
172 i = 0
172 i = 0
173 while i < len(addlist):
173 while i < len(addlist):
174 offsets[i] = offset
174 offsets[i] = offset
175 offset += len(addlist[i])
175 offset += len(addlist[i])
176 i += 1
176 i += 1
177 offsets[i] = offset
177 offsets[i] = offset
178 return offsets
178 return offsets
179
179
180 # if we're using the listcache, make sure it is valid and
180 # if we're using the listcache, make sure it is valid and
181 # parented by the same node we're diffing against
181 # parented by the same node we're diffing against
182 if not changed or not self.listcache or not p1 or \
182 if not changed or not self.listcache or not p1 or \
183 self.mapcache[0] != p1:
183 self.mapcache[0] != p1:
184 files = map.keys()
184 files = map.keys()
185 files.sort()
185 files.sort()
186
186
187 self.addlist = ["%s\000%s%s\n" %
187 self.addlist = ["%s\000%s%s\n" %
188 (f, hex(map[f]), flags[f] and "x" or '')
188 (f, hex(map[f]), flags[f] and "x" or '')
189 for f in files]
189 for f in files]
190 cachedelta = None
190 cachedelta = None
191 else:
191 else:
192 addlist = self.listcache[1]
192 addlist = self.listcache[1]
193
193
194 # find the starting offset for each line in the add list
194 # find the starting offset for each line in the add list
195 offsets = calcoffsets(addlist)
195 offsets = calcoffsets(addlist)
196
196
197 # combine the changed lists into one list for sorting
197 # combine the changed lists into one list for sorting
198 work = [[x, 0] for x in changed[0]]
198 work = [[x, 0] for x in changed[0]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
199 work[len(work):] = [[x, 1] for x in changed[1]]
200 work.sort()
200 work.sort()
201
201
202 delta = []
202 delta = []
203 bs = 0
203 bs = 0
204
204
205 for w in work:
205 for w in work:
206 f = w[0]
206 f = w[0]
207 # bs will either be the index of the item or the insert point
207 # bs will either be the index of the item or the insert point
208 bs = bisect.bisect(addlist, f, bs)
208 bs = bisect.bisect(addlist, f, bs)
209 if bs < len(addlist):
209 if bs < len(addlist):
210 fn = addlist[bs][:addlist[bs].index('\0')]
210 fn = addlist[bs][:addlist[bs].index('\0')]
211 else:
211 else:
212 fn = None
212 fn = None
213 if w[1] == 0:
213 if w[1] == 0:
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
214 l = "%s\000%s%s\n" % (f, hex(map[f]),
215 flags[f] and "x" or '')
215 flags[f] and "x" or '')
216 else:
216 else:
217 l = None
217 l = None
218 start = bs
218 start = bs
219 if fn != f:
219 if fn != f:
220 # item not found, insert a new one
220 # item not found, insert a new one
221 end = bs
221 end = bs
222 if w[1] == 1:
222 if w[1] == 1:
223 sys.stderr.write("failed to remove %s from manifest\n"
223 sys.stderr.write("failed to remove %s from manifest\n"
224 % f)
224 % f)
225 sys.exit(1)
225 sys.exit(1)
226 else:
226 else:
227 # item is found, replace/delete the existing line
227 # item is found, replace/delete the existing line
228 end = bs + 1
228 end = bs + 1
229 delta.append([start, end, offsets[start], offsets[end], l])
229 delta.append([start, end, offsets[start], offsets[end], l])
230
230
231 self.addlist = addlistdelta(addlist, delta)
231 self.addlist = addlistdelta(addlist, delta)
232 if self.mapcache[0] == self.tip():
232 if self.mapcache[0] == self.tip():
233 cachedelta = "".join(gendelta(delta))
233 cachedelta = "".join(gendelta(delta))
234 else:
234 else:
235 cachedelta = None
235 cachedelta = None
236
236
237 text = "".join(self.addlist)
237 text = "".join(self.addlist)
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
238 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
239 sys.stderr.write("manifest delta failure\n")
239 sys.stderr.write("manifest delta failure\n")
240 sys.exit(1)
240 sys.exit(1)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
241 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
242 self.mapcache = (n, map, flags)
242 self.mapcache = (n, map, flags)
243 self.listcache = (text, self.addlist)
243 self.listcache = (text, self.addlist)
244 self.addlist = None
244 self.addlist = None
245
245
246 return n
246 return n
247
247
248 class changelog(revlog):
248 class changelog(revlog):
249 def __init__(self, opener):
249 def __init__(self, opener):
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
250 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
251
251
252 def extract(self, text):
252 def extract(self, text):
253 if not text:
253 if not text:
254 return (nullid, "", "0", [], "")
254 return (nullid, "", "0", [], "")
255 last = text.index("\n\n")
255 last = text.index("\n\n")
256 desc = text[last + 2:]
256 desc = text[last + 2:]
257 l = text[:last].splitlines()
257 l = text[:last].splitlines()
258 manifest = bin(l[0])
258 manifest = bin(l[0])
259 user = l[1]
259 user = l[1]
260 date = l[2]
260 date = l[2]
261 files = l[3:]
261 files = l[3:]
262 return (manifest, user, date, files, desc)
262 return (manifest, user, date, files, desc)
263
263
264 def read(self, node):
264 def read(self, node):
265 return self.extract(self.revision(node))
265 return self.extract(self.revision(node))
266
266
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
267 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
268 user=None, date=None):
268 user=None, date=None):
269 date = date or "%d %d" % (time.time(), time.timezone)
269 date = date or "%d %d" % (time.time(), time.timezone)
270 list.sort()
270 list.sort()
271 l = [hex(manifest), user, date] + list + ["", desc]
271 l = [hex(manifest), user, date] + list + ["", desc]
272 text = "\n".join(l)
272 text = "\n".join(l)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
273 return self.addrevision(text, transaction, self.count(), p1, p2)
274
274
275 class dirstate:
275 class dirstate:
276 def __init__(self, opener, ui, root):
276 def __init__(self, opener, ui, root):
277 self.opener = opener
277 self.opener = opener
278 self.root = root
278 self.root = root
279 self.dirty = 0
279 self.dirty = 0
280 self.ui = ui
280 self.ui = ui
281 self.map = None
281 self.map = None
282 self.pl = None
282 self.pl = None
283 self.copies = {}
283 self.copies = {}
284 self.ignorefunc = None
284 self.ignorefunc = None
285
285
286 def wjoin(self, f):
286 def wjoin(self, f):
287 return os.path.join(self.root, f)
287 return os.path.join(self.root, f)
288
288
289 def ignore(self, f):
289 def ignore(self, f):
290 if not self.ignorefunc:
290 if not self.ignorefunc:
291 bigpat = []
291 bigpat = []
292 try:
292 try:
293 l = file(self.wjoin(".hgignore"))
293 l = file(self.wjoin(".hgignore"))
294 for pat in l:
294 for pat in l:
295 if pat != "\n":
295 if pat != "\n":
296 p = util.pconvert(pat[:-1])
296 p = util.pconvert(pat[:-1])
297 try:
297 try:
298 r = re.compile(p)
298 r = re.compile(p)
299 except:
299 except:
300 self.ui.warn("ignoring invalid ignore"
300 self.ui.warn("ignoring invalid ignore"
301 + " regular expression '%s'\n" % p)
301 + " regular expression '%s'\n" % p)
302 else:
302 else:
303 bigpat.append(util.pconvert(pat[:-1]))
303 bigpat.append(util.pconvert(pat[:-1]))
304 except IOError: pass
304 except IOError: pass
305
305
306 if bigpat:
306 if bigpat:
307 s = "(?:%s)" % (")|(?:".join(bigpat))
307 s = "(?:%s)" % (")|(?:".join(bigpat))
308 r = re.compile(s)
308 r = re.compile(s)
309 self.ignorefunc = r.search
309 self.ignorefunc = r.search
310 else:
310 else:
311 self.ignorefunc = util.never
311 self.ignorefunc = util.never
312
312
313 return self.ignorefunc(f)
313 return self.ignorefunc(f)
314
314
315 def __del__(self):
315 def __del__(self):
316 if self.dirty:
316 if self.dirty:
317 self.write()
317 self.write()
318
318
319 def __getitem__(self, key):
319 def __getitem__(self, key):
320 try:
320 try:
321 return self.map[key]
321 return self.map[key]
322 except TypeError:
322 except TypeError:
323 self.read()
323 self.read()
324 return self[key]
324 return self[key]
325
325
326 def __contains__(self, key):
326 def __contains__(self, key):
327 if not self.map: self.read()
327 if not self.map: self.read()
328 return key in self.map
328 return key in self.map
329
329
330 def parents(self):
330 def parents(self):
331 if not self.pl:
331 if not self.pl:
332 self.read()
332 self.read()
333 return self.pl
333 return self.pl
334
334
335 def markdirty(self):
335 def markdirty(self):
336 if not self.dirty:
336 if not self.dirty:
337 self.dirty = 1
337 self.dirty = 1
338
338
339 def setparents(self, p1, p2 = nullid):
339 def setparents(self, p1, p2 = nullid):
340 self.markdirty()
340 self.markdirty()
341 self.pl = p1, p2
341 self.pl = p1, p2
342
342
343 def state(self, key):
343 def state(self, key):
344 try:
344 try:
345 return self[key][0]
345 return self[key][0]
346 except KeyError:
346 except KeyError:
347 return "?"
347 return "?"
348
348
349 def read(self):
349 def read(self):
350 if self.map is not None: return self.map
350 if self.map is not None: return self.map
351
351
352 self.map = {}
352 self.map = {}
353 self.pl = [nullid, nullid]
353 self.pl = [nullid, nullid]
354 try:
354 try:
355 st = self.opener("dirstate").read()
355 st = self.opener("dirstate").read()
356 if not st: return
356 if not st: return
357 except: return
357 except: return
358
358
359 self.pl = [st[:20], st[20: 40]]
359 self.pl = [st[:20], st[20: 40]]
360
360
361 pos = 40
361 pos = 40
362 while pos < len(st):
362 while pos < len(st):
363 e = struct.unpack(">cllll", st[pos:pos+17])
363 e = struct.unpack(">cllll", st[pos:pos+17])
364 l = e[4]
364 l = e[4]
365 pos += 17
365 pos += 17
366 f = st[pos:pos + l]
366 f = st[pos:pos + l]
367 if '\0' in f:
367 if '\0' in f:
368 f, c = f.split('\0')
368 f, c = f.split('\0')
369 self.copies[f] = c
369 self.copies[f] = c
370 self.map[f] = e[:4]
370 self.map[f] = e[:4]
371 pos += l
371 pos += l
372
372
373 def copy(self, source, dest):
373 def copy(self, source, dest):
374 self.read()
374 self.read()
375 self.markdirty()
375 self.markdirty()
376 self.copies[dest] = source
376 self.copies[dest] = source
377
377
378 def copied(self, file):
378 def copied(self, file):
379 return self.copies.get(file, None)
379 return self.copies.get(file, None)
380
380
381 def update(self, files, state):
381 def update(self, files, state):
382 ''' current states:
382 ''' current states:
383 n normal
383 n normal
384 m needs merging
384 m needs merging
385 r marked for removal
385 r marked for removal
386 a marked for addition'''
386 a marked for addition'''
387
387
388 if not files: return
388 if not files: return
389 self.read()
389 self.read()
390 self.markdirty()
390 self.markdirty()
391 for f in files:
391 for f in files:
392 if state == "r":
392 if state == "r":
393 self.map[f] = ('r', 0, 0, 0)
393 self.map[f] = ('r', 0, 0, 0)
394 else:
394 else:
395 s = os.stat(os.path.join(self.root, f))
395 s = os.stat(os.path.join(self.root, f))
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
396 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
397
397
398 def forget(self, files):
398 def forget(self, files):
399 if not files: return
399 if not files: return
400 self.read()
400 self.read()
401 self.markdirty()
401 self.markdirty()
402 for f in files:
402 for f in files:
403 try:
403 try:
404 del self.map[f]
404 del self.map[f]
405 except KeyError:
405 except KeyError:
406 self.ui.warn("not in dirstate: %s!\n" % f)
406 self.ui.warn("not in dirstate: %s!\n" % f)
407 pass
407 pass
408
408
409 def clear(self):
409 def clear(self):
410 self.map = {}
410 self.map = {}
411 self.markdirty()
411 self.markdirty()
412
412
413 def write(self):
413 def write(self):
414 st = self.opener("dirstate", "w")
414 st = self.opener("dirstate", "w")
415 st.write("".join(self.pl))
415 st.write("".join(self.pl))
416 for f, e in self.map.items():
416 for f, e in self.map.items():
417 c = self.copied(f)
417 c = self.copied(f)
418 if c:
418 if c:
419 f = f + "\0" + c
419 f = f + "\0" + c
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
420 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
421 st.write(e + f)
421 st.write(e + f)
422 self.dirty = 0
422 self.dirty = 0
423
423
424 def walk(self, files = None, match = util.always):
424 def walk(self, files = None, match = util.always):
425 self.read()
425 self.read()
426 dc = self.map.copy()
426 dc = self.map.copy()
427 # walk all files by default
427 # walk all files by default
428 if not files: files = [self.root]
428 if not files: files = [self.root]
429 def traverse():
429 def traverse():
430 for f in util.unique(files):
430 for f in util.unique(files):
431 f = os.path.join(self.root, f)
431 f = os.path.join(self.root, f)
432 if os.path.isdir(f):
432 if os.path.isdir(f):
433 for dir, subdirs, fl in os.walk(f):
433 for dir, subdirs, fl in os.walk(f):
434 d = dir[len(self.root) + 1:]
434 d = dir[len(self.root) + 1:]
435 if d == '.hg':
435 if d == '.hg':
436 subdirs[:] = []
436 subdirs[:] = []
437 continue
437 continue
438 for sd in subdirs:
438 for sd in subdirs:
439 ds = os.path.join(d, sd +'/')
439 ds = os.path.join(d, sd +'/')
440 if self.ignore(ds) or not match(ds):
440 if self.ignore(ds) or not match(ds):
441 subdirs.remove(sd)
441 subdirs.remove(sd)
442 for fn in fl:
442 for fn in fl:
443 fn = util.pconvert(os.path.join(d, fn))
443 fn = util.pconvert(os.path.join(d, fn))
444 yield 'f', fn
444 yield 'f', fn
445 else:
445 else:
446 yield 'f', f[len(self.root) + 1:]
446 yield 'f', f[len(self.root) + 1:]
447
447
448 for k in dc.keys():
448 for k in dc.keys():
449 yield 'm', k
449 yield 'm', k
450
450
451 # yield only files that match: all in dirstate, others only if
451 # yield only files that match: all in dirstate, others only if
452 # not in .hgignore
452 # not in .hgignore
453
453
454 for src, fn in util.unique(traverse()):
454 for src, fn in util.unique(traverse()):
455 if fn in dc:
455 if fn in dc:
456 del dc[fn]
456 del dc[fn]
457 elif self.ignore(fn):
457 elif self.ignore(fn):
458 continue
458 continue
459 if match(fn):
459 if match(fn):
460 yield src, fn
460 yield src, fn
461
461
462 def changes(self, files = None, match = util.always):
462 def changes(self, files = None, match = util.always):
463 self.read()
463 self.read()
464 dc = self.map.copy()
464 dc = self.map.copy()
465 lookup, changed, added, unknown = [], [], [], []
465 lookup, changed, added, unknown = [], [], [], []
466
466
467 for src, fn in self.walk(files, match):
467 for src, fn in self.walk(files, match):
468 try: s = os.stat(os.path.join(self.root, fn))
468 try: s = os.stat(os.path.join(self.root, fn))
469 except: continue
469 except: continue
470
470
471 if fn in dc:
471 if fn in dc:
472 c = dc[fn]
472 c = dc[fn]
473 del dc[fn]
473 del dc[fn]
474
474
475 if c[0] == 'm':
475 if c[0] == 'm':
476 changed.append(fn)
476 changed.append(fn)
477 elif c[0] == 'a':
477 elif c[0] == 'a':
478 added.append(fn)
478 added.append(fn)
479 elif c[0] == 'r':
479 elif c[0] == 'r':
480 unknown.append(fn)
480 unknown.append(fn)
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
481 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
482 changed.append(fn)
482 changed.append(fn)
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
483 elif c[1] != s.st_mode or c[3] != s.st_mtime:
484 lookup.append(fn)
484 lookup.append(fn)
485 else:
485 else:
486 if match(fn): unknown.append(fn)
486 if match(fn): unknown.append(fn)
487
487
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
488 return (lookup, changed, added, filter(match, dc.keys()), unknown)
489
489
490 # used to avoid circular references so destructors work
490 # used to avoid circular references so destructors work
491 def opener(base):
491 def opener(base):
492 p = base
492 p = base
493 def o(path, mode="r"):
493 def o(path, mode="r"):
494 if p.startswith("http://"):
494 if p.startswith("http://"):
495 f = os.path.join(p, urllib.quote(path))
495 f = os.path.join(p, urllib.quote(path))
496 return httprangereader.httprangereader(f)
496 return httprangereader.httprangereader(f)
497
497
498 f = os.path.join(p, path)
498 f = os.path.join(p, path)
499
499
500 mode += "b" # for that other OS
500 mode += "b" # for that other OS
501
501
502 if mode[0] != "r":
502 if mode[0] != "r":
503 try:
503 try:
504 s = os.stat(f)
504 s = os.stat(f)
505 except OSError:
505 except OSError:
506 d = os.path.dirname(f)
506 d = os.path.dirname(f)
507 if not os.path.isdir(d):
507 if not os.path.isdir(d):
508 os.makedirs(d)
508 os.makedirs(d)
509 else:
509 else:
510 if s.st_nlink > 1:
510 if s.st_nlink > 1:
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
511 file(f + ".tmp", "wb").write(file(f, "rb").read())
512 util.rename(f+".tmp", f)
512 util.rename(f+".tmp", f)
513
513
514 return file(f, mode)
514 return file(f, mode)
515
515
516 return o
516 return o
517
517
518 class RepoError(Exception): pass
518 class RepoError(Exception): pass
519
519
520 class localrepository:
520 class localrepository:
521 def __init__(self, ui, path=None, create=0):
521 def __init__(self, ui, path=None, create=0):
522 self.remote = 0
522 self.remote = 0
523 if path and path.startswith("http://"):
523 if path and path.startswith("http://"):
524 self.remote = 1
524 self.remote = 1
525 self.path = path
525 self.path = path
526 else:
526 else:
527 if not path:
527 if not path:
528 p = os.getcwd()
528 p = os.getcwd()
529 while not os.path.isdir(os.path.join(p, ".hg")):
529 while not os.path.isdir(os.path.join(p, ".hg")):
530 oldp = p
530 oldp = p
531 p = os.path.dirname(p)
531 p = os.path.dirname(p)
532 if p == oldp: raise RepoError("no repo found")
532 if p == oldp: raise RepoError("no repo found")
533 path = p
533 path = p
534 self.path = os.path.join(path, ".hg")
534 self.path = os.path.join(path, ".hg")
535
535
536 if not create and not os.path.isdir(self.path):
536 if not create and not os.path.isdir(self.path):
537 raise RepoError("repository %s not found" % self.path)
537 raise RepoError("repository %s not found" % self.path)
538
538
539 self.root = path
539 self.root = path
540 self.ui = ui
540 self.ui = ui
541
541
542 if create:
542 if create:
543 os.mkdir(self.path)
543 os.mkdir(self.path)
544 os.mkdir(self.join("data"))
544 os.mkdir(self.join("data"))
545
545
546 self.opener = opener(self.path)
546 self.opener = opener(self.path)
547 self.wopener = opener(self.root)
547 self.wopener = opener(self.root)
548 self.manifest = manifest(self.opener)
548 self.manifest = manifest(self.opener)
549 self.changelog = changelog(self.opener)
549 self.changelog = changelog(self.opener)
550 self.tagscache = None
550 self.tagscache = None
551 self.nodetagscache = None
551 self.nodetagscache = None
552
552
553 if not self.remote:
553 if not self.remote:
554 self.dirstate = dirstate(self.opener, ui, self.root)
554 self.dirstate = dirstate(self.opener, ui, self.root)
555 try:
555 try:
556 self.ui.readconfig(self.opener("hgrc"))
556 self.ui.readconfig(self.opener("hgrc"))
557 except IOError: pass
557 except IOError: pass
558
558
559 def hook(self, name, **args):
559 def hook(self, name, **args):
560 s = self.ui.config("hooks", name)
560 s = self.ui.config("hooks", name)
561 if s:
561 if s:
562 self.ui.note("running hook %s: %s\n" % (name, s))
562 self.ui.note("running hook %s: %s\n" % (name, s))
563 old = {}
563 old = {}
564 for k, v in args.items():
564 for k, v in args.items():
565 k = k.upper()
565 k = k.upper()
566 old[k] = os.environ.get(k, None)
566 old[k] = os.environ.get(k, None)
567 os.environ[k] = v
567 os.environ[k] = v
568
568
569 r = os.system(s)
569 r = os.system(s)
570
570
571 for k, v in old.items():
571 for k, v in old.items():
572 if v != None:
572 if v != None:
573 os.environ[k] = v
573 os.environ[k] = v
574 else:
574 else:
575 del os.environ[k]
575 del os.environ[k]
576
576
577 if r:
577 if r:
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
578 self.ui.warn("abort: %s hook failed with status %d!\n" %
579 (name, r))
579 (name, r))
580 return False
580 return False
581 return True
581 return True
582
582
583 def tags(self):
583 def tags(self):
584 '''return a mapping of tag to node'''
584 '''return a mapping of tag to node'''
585 if not self.tagscache:
585 if not self.tagscache:
586 self.tagscache = {}
586 self.tagscache = {}
587 def addtag(self, k, n):
587 def addtag(self, k, n):
588 try:
588 try:
589 bin_n = bin(n)
589 bin_n = bin(n)
590 except TypeError:
590 except TypeError:
591 bin_n = ''
591 bin_n = ''
592 self.tagscache[k.strip()] = bin_n
592 self.tagscache[k.strip()] = bin_n
593
593
594 try:
594 try:
595 # read each head of the tags file, ending with the tip
595 # read each head of the tags file, ending with the tip
596 # and add each tag found to the map, with "newer" ones
596 # and add each tag found to the map, with "newer" ones
597 # taking precedence
597 # taking precedence
598 fl = self.file(".hgtags")
598 fl = self.file(".hgtags")
599 h = fl.heads()
599 h = fl.heads()
600 h.reverse()
600 h.reverse()
601 for r in h:
601 for r in h:
602 for l in fl.revision(r).splitlines():
602 for l in fl.revision(r).splitlines():
603 if l:
603 if l:
604 n, k = l.split(" ", 1)
604 n, k = l.split(" ", 1)
605 addtag(self, k, n)
605 addtag(self, k, n)
606 except KeyError:
606 except KeyError:
607 pass
607 pass
608
608
609 try:
609 try:
610 f = self.opener("localtags")
610 f = self.opener("localtags")
611 for l in f:
611 for l in f:
612 n, k = l.split(" ", 1)
612 n, k = l.split(" ", 1)
613 addtag(self, k, n)
613 addtag(self, k, n)
614 except IOError:
614 except IOError:
615 pass
615 pass
616
616
617 self.tagscache['tip'] = self.changelog.tip()
617 self.tagscache['tip'] = self.changelog.tip()
618
618
619 return self.tagscache
619 return self.tagscache
620
620
621 def tagslist(self):
621 def tagslist(self):
622 '''return a list of tags ordered by revision'''
622 '''return a list of tags ordered by revision'''
623 l = []
623 l = []
624 for t, n in self.tags().items():
624 for t, n in self.tags().items():
625 try:
625 try:
626 r = self.changelog.rev(n)
626 r = self.changelog.rev(n)
627 except:
627 except:
628 r = -2 # sort to the beginning of the list if unknown
628 r = -2 # sort to the beginning of the list if unknown
629 l.append((r,t,n))
629 l.append((r,t,n))
630 l.sort()
630 l.sort()
631 return [(t,n) for r,t,n in l]
631 return [(t,n) for r,t,n in l]
632
632
633 def nodetags(self, node):
633 def nodetags(self, node):
634 '''return the tags associated with a node'''
634 '''return the tags associated with a node'''
635 if not self.nodetagscache:
635 if not self.nodetagscache:
636 self.nodetagscache = {}
636 self.nodetagscache = {}
637 for t,n in self.tags().items():
637 for t,n in self.tags().items():
638 self.nodetagscache.setdefault(n,[]).append(t)
638 self.nodetagscache.setdefault(n,[]).append(t)
639 return self.nodetagscache.get(node, [])
639 return self.nodetagscache.get(node, [])
640
640
641 def lookup(self, key):
641 def lookup(self, key):
642 try:
642 try:
643 return self.tags()[key]
643 return self.tags()[key]
644 except KeyError:
644 except KeyError:
645 try:
645 try:
646 return self.changelog.lookup(key)
646 return self.changelog.lookup(key)
647 except:
647 except:
648 raise RepoError("unknown revision '%s'" % key)
648 raise RepoError("unknown revision '%s'" % key)
649
649
650 def dev(self):
650 def dev(self):
651 if self.remote: return -1
651 if self.remote: return -1
652 return os.stat(self.path).st_dev
652 return os.stat(self.path).st_dev
653
653
654 def join(self, f):
654 def join(self, f):
655 return os.path.join(self.path, f)
655 return os.path.join(self.path, f)
656
656
657 def wjoin(self, f):
657 def wjoin(self, f):
658 return os.path.join(self.root, f)
658 return os.path.join(self.root, f)
659
659
660 def file(self, f):
660 def file(self, f):
661 if f[0] == '/': f = f[1:]
661 if f[0] == '/': f = f[1:]
662 return filelog(self.opener, f)
662 return filelog(self.opener, f)
663
663
664 def getcwd(self):
664 def getcwd(self):
665 cwd = os.getcwd()
665 cwd = os.getcwd()
666 if cwd == self.root: return ''
666 if cwd == self.root: return ''
667 return cwd[len(self.root) + 1:]
667 return cwd[len(self.root) + 1:]
668
668
669 def wfile(self, f, mode='r'):
669 def wfile(self, f, mode='r'):
670 return self.wopener(f, mode)
670 return self.wopener(f, mode)
671
671
672 def transaction(self):
672 def transaction(self):
673 # save dirstate for undo
673 # save dirstate for undo
674 try:
674 try:
675 ds = self.opener("dirstate").read()
675 ds = self.opener("dirstate").read()
676 except IOError:
676 except IOError:
677 ds = ""
677 ds = ""
678 self.opener("undo.dirstate", "w").write(ds)
678 self.opener("undo.dirstate", "w").write(ds)
679
679
680 return transaction.transaction(self.ui.warn,
680 return transaction.transaction(self.ui.warn,
681 self.opener, self.join("journal"),
681 self.opener, self.join("journal"),
682 self.join("undo"))
682 self.join("undo"))
683
683
684 def recover(self):
684 def recover(self):
685 lock = self.lock()
685 lock = self.lock()
686 if os.path.exists(self.join("journal")):
686 if os.path.exists(self.join("journal")):
687 self.ui.status("rolling back interrupted transaction\n")
687 self.ui.status("rolling back interrupted transaction\n")
688 return transaction.rollback(self.opener, self.join("journal"))
688 return transaction.rollback(self.opener, self.join("journal"))
689 else:
689 else:
690 self.ui.warn("no interrupted transaction available\n")
690 self.ui.warn("no interrupted transaction available\n")
691
691
692 def undo(self):
692 def undo(self):
693 lock = self.lock()
693 lock = self.lock()
694 if os.path.exists(self.join("undo")):
694 if os.path.exists(self.join("undo")):
695 self.ui.status("rolling back last transaction\n")
695 self.ui.status("rolling back last transaction\n")
696 transaction.rollback(self.opener, self.join("undo"))
696 transaction.rollback(self.opener, self.join("undo"))
697 self.dirstate = None
697 self.dirstate = None
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
698 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
699 self.dirstate = dirstate(self.opener, self.ui, self.root)
700 else:
700 else:
701 self.ui.warn("no undo information available\n")
701 self.ui.warn("no undo information available\n")
702
702
703 def lock(self, wait = 1):
703 def lock(self, wait = 1):
704 try:
704 try:
705 return lock.lock(self.join("lock"), 0)
705 return lock.lock(self.join("lock"), 0)
706 except lock.LockHeld, inst:
706 except lock.LockHeld, inst:
707 if wait:
707 if wait:
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
708 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
709 return lock.lock(self.join("lock"), wait)
709 return lock.lock(self.join("lock"), wait)
710 raise inst
710 raise inst
711
711
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
712 def rawcommit(self, files, text, user, date, p1=None, p2=None):
713 orig_parent = self.dirstate.parents()[0] or nullid
713 orig_parent = self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
714 p1 = p1 or self.dirstate.parents()[0] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
715 p2 = p2 or self.dirstate.parents()[1] or nullid
716 c1 = self.changelog.read(p1)
716 c1 = self.changelog.read(p1)
717 c2 = self.changelog.read(p2)
717 c2 = self.changelog.read(p2)
718 m1 = self.manifest.read(c1[0])
718 m1 = self.manifest.read(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
719 mf1 = self.manifest.readflags(c1[0])
720 m2 = self.manifest.read(c2[0])
720 m2 = self.manifest.read(c2[0])
721
721
722 if orig_parent == p1:
722 if orig_parent == p1:
723 update_dirstate = 1
723 update_dirstate = 1
724 else:
724 else:
725 update_dirstate = 0
725 update_dirstate = 0
726
726
727 tr = self.transaction()
727 tr = self.transaction()
728 mm = m1.copy()
728 mm = m1.copy()
729 mfm = mf1.copy()
729 mfm = mf1.copy()
730 linkrev = self.changelog.count()
730 linkrev = self.changelog.count()
731 for f in files:
731 for f in files:
732 try:
732 try:
733 t = self.wfile(f).read()
733 t = self.wfile(f).read()
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
734 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
735 r = self.file(f)
735 r = self.file(f)
736 mfm[f] = tm
736 mfm[f] = tm
737 mm[f] = r.add(t, {}, tr, linkrev,
737 mm[f] = r.add(t, {}, tr, linkrev,
738 m1.get(f, nullid), m2.get(f, nullid))
738 m1.get(f, nullid), m2.get(f, nullid))
739 if update_dirstate:
739 if update_dirstate:
740 self.dirstate.update([f], "n")
740 self.dirstate.update([f], "n")
741 except IOError:
741 except IOError:
742 try:
742 try:
743 del mm[f]
743 del mm[f]
744 del mfm[f]
744 del mfm[f]
745 if update_dirstate:
745 if update_dirstate:
746 self.dirstate.forget([f])
746 self.dirstate.forget([f])
747 except:
747 except:
748 # deleted from p2?
748 # deleted from p2?
749 pass
749 pass
750
750
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
751 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
752 user = user or self.ui.username()
752 user = user or self.ui.username()
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
753 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
754 tr.close()
754 tr.close()
755 if update_dirstate:
755 if update_dirstate:
756 self.dirstate.setparents(n, nullid)
756 self.dirstate.setparents(n, nullid)
757
757
758 def commit(self, files = None, text = "", user = None, date = None):
758 def commit(self, files = None, text = "", user = None, date = None):
759 commit = []
759 commit = []
760 remove = []
760 remove = []
761 if files:
761 if files:
762 for f in files:
762 for f in files:
763 s = self.dirstate.state(f)
763 s = self.dirstate.state(f)
764 if s in 'nmai':
764 if s in 'nmai':
765 commit.append(f)
765 commit.append(f)
766 elif s == 'r':
766 elif s == 'r':
767 remove.append(f)
767 remove.append(f)
768 else:
768 else:
769 self.ui.warn("%s not tracked!\n" % f)
769 self.ui.warn("%s not tracked!\n" % f)
770 else:
770 else:
771 (c, a, d, u) = self.changes()
771 (c, a, d, u) = self.changes()
772 commit = c + a
772 commit = c + a
773 remove = d
773 remove = d
774
774
775 if not commit and not remove:
775 if not commit and not remove:
776 self.ui.status("nothing changed\n")
776 self.ui.status("nothing changed\n")
777 return
777 return
778
778
779 if not self.hook("precommit"):
779 if not self.hook("precommit"):
780 return 1
780 return 1
781
781
782 p1, p2 = self.dirstate.parents()
782 p1, p2 = self.dirstate.parents()
783 c1 = self.changelog.read(p1)
783 c1 = self.changelog.read(p1)
784 c2 = self.changelog.read(p2)
784 c2 = self.changelog.read(p2)
785 m1 = self.manifest.read(c1[0])
785 m1 = self.manifest.read(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
786 mf1 = self.manifest.readflags(c1[0])
787 m2 = self.manifest.read(c2[0])
787 m2 = self.manifest.read(c2[0])
788 lock = self.lock()
788 lock = self.lock()
789 tr = self.transaction()
789 tr = self.transaction()
790
790
791 # check in files
791 # check in files
792 new = {}
792 new = {}
793 linkrev = self.changelog.count()
793 linkrev = self.changelog.count()
794 commit.sort()
794 commit.sort()
795 for f in commit:
795 for f in commit:
796 self.ui.note(f + "\n")
796 self.ui.note(f + "\n")
797 try:
797 try:
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
798 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
799 t = self.wfile(f).read()
799 t = self.wfile(f).read()
800 except IOError:
800 except IOError:
801 self.ui.warn("trouble committing %s!\n" % f)
801 self.ui.warn("trouble committing %s!\n" % f)
802 raise
802 raise
803
803
804 meta = {}
804 meta = {}
805 cp = self.dirstate.copied(f)
805 cp = self.dirstate.copied(f)
806 if cp:
806 if cp:
807 meta["copy"] = cp
807 meta["copy"] = cp
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
808 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
809 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
810
810
811 r = self.file(f)
811 r = self.file(f)
812 fp1 = m1.get(f, nullid)
812 fp1 = m1.get(f, nullid)
813 fp2 = m2.get(f, nullid)
813 fp2 = m2.get(f, nullid)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
814 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
815
815
816 # update manifest
816 # update manifest
817 m1.update(new)
817 m1.update(new)
818 for f in remove:
818 for f in remove:
819 if f in m1:
819 if f in m1:
820 del m1[f]
820 del m1[f]
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
821 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
822 (new, remove))
822 (new, remove))
823
823
824 # add changeset
824 # add changeset
825 new = new.keys()
825 new = new.keys()
826 new.sort()
826 new.sort()
827
827
828 if not text:
828 if not text:
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
829 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
830 edittext += "".join(["HG: changed %s\n" % f for f in new])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
831 edittext += "".join(["HG: removed %s\n" % f for f in remove])
832 edittext = self.ui.edit(edittext)
832 edittext = self.ui.edit(edittext)
833 if not edittext.rstrip():
833 if not edittext.rstrip():
834 return 1
834 return 1
835 text = edittext
835 text = edittext
836
836
837 user = user or self.ui.username()
837 user = user or self.ui.username()
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
838 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
839
839
840 tr.close()
840 tr.close()
841
841
842 self.dirstate.setparents(n)
842 self.dirstate.setparents(n)
843 self.dirstate.update(new, "n")
843 self.dirstate.update(new, "n")
844 self.dirstate.forget(remove)
844 self.dirstate.forget(remove)
845
845
846 if not self.hook("commit", node=hex(n)):
846 if not self.hook("commit", node=hex(n)):
847 return 1
847 return 1
848
848
849 def walk(self, node = None, files = [], match = util.always):
849 def walk(self, node = None, files = [], match = util.always):
850 if node:
850 if node:
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
851 for fn in self.manifest.read(self.changelog.read(node)[0]):
852 yield 'm', fn
852 yield 'm', fn
853 else:
853 else:
854 for src, fn in self.dirstate.walk(files, match):
854 for src, fn in self.dirstate.walk(files, match):
855 yield src, fn
855 yield src, fn
856
856
857 def changes(self, node1 = None, node2 = None, files = [],
857 def changes(self, node1 = None, node2 = None, files = [],
858 match = util.always):
858 match = util.always):
859 mf2, u = None, []
859 mf2, u = None, []
860
860
861 def fcmp(fn, mf):
861 def fcmp(fn, mf):
862 t1 = self.wfile(fn).read()
862 t1 = self.wfile(fn).read()
863 t2 = self.file(fn).revision(mf[fn])
863 t2 = self.file(fn).revision(mf[fn])
864 return cmp(t1, t2)
864 return cmp(t1, t2)
865
865
866 def mfmatches(node):
866 def mfmatches(node):
867 mf = dict(self.manifest.read(node))
867 mf = dict(self.manifest.read(node))
868 for fn in mf.keys():
868 for fn in mf.keys():
869 if not match(fn):
869 if not match(fn):
870 del mf[fn]
870 del mf[fn]
871 return mf
871 return mf
872
872
873 # are we comparing the working directory?
873 # are we comparing the working directory?
874 if not node2:
874 if not node2:
875 l, c, a, d, u = self.dirstate.changes(files, match)
875 l, c, a, d, u = self.dirstate.changes(files, match)
876
876
877 # are we comparing working dir against its parent?
877 # are we comparing working dir against its parent?
878 if not node1:
878 if not node1:
879 if l:
879 if l:
880 # do a full compare of any files that might have changed
880 # do a full compare of any files that might have changed
881 change = self.changelog.read(self.dirstate.parents()[0])
881 change = self.changelog.read(self.dirstate.parents()[0])
882 mf2 = mfmatches(change[0])
882 mf2 = mfmatches(change[0])
883 for f in l:
883 for f in l:
884 if fcmp(f, mf2):
884 if fcmp(f, mf2):
885 c.append(f)
885 c.append(f)
886
886
887 for l in c, a, d, u:
887 for l in c, a, d, u:
888 l.sort()
888 l.sort()
889
889
890 return (c, a, d, u)
890 return (c, a, d, u)
891
891
892 # are we comparing working dir against non-tip?
892 # are we comparing working dir against non-tip?
893 # generate a pseudo-manifest for the working dir
893 # generate a pseudo-manifest for the working dir
894 if not node2:
894 if not node2:
895 if not mf2:
895 if not mf2:
896 change = self.changelog.read(self.dirstate.parents()[0])
896 change = self.changelog.read(self.dirstate.parents()[0])
897 mf2 = mfmatches(change[0])
897 mf2 = mfmatches(change[0])
898 for f in a + c + l:
898 for f in a + c + l:
899 mf2[f] = ""
899 mf2[f] = ""
900 for f in d:
900 for f in d:
901 if f in mf2: del mf2[f]
901 if f in mf2: del mf2[f]
902 else:
902 else:
903 change = self.changelog.read(node2)
903 change = self.changelog.read(node2)
904 mf2 = mfmatches(change[0])
904 mf2 = mfmatches(change[0])
905
905
906 # flush lists from dirstate before comparing manifests
906 # flush lists from dirstate before comparing manifests
907 c, a = [], []
907 c, a = [], []
908
908
909 change = self.changelog.read(node1)
909 change = self.changelog.read(node1)
910 mf1 = mfmatches(change[0])
910 mf1 = mfmatches(change[0])
911
911
912 for fn in mf2:
912 for fn in mf2:
913 if mf1.has_key(fn):
913 if mf1.has_key(fn):
914 if mf1[fn] != mf2[fn]:
914 if mf1[fn] != mf2[fn]:
915 if mf2[fn] != "" or fcmp(fn, mf1):
915 if mf2[fn] != "" or fcmp(fn, mf1):
916 c.append(fn)
916 c.append(fn)
917 del mf1[fn]
917 del mf1[fn]
918 else:
918 else:
919 a.append(fn)
919 a.append(fn)
920
920
921 d = mf1.keys()
921 d = mf1.keys()
922
922
923 for l in c, a, d, u:
923 for l in c, a, d, u:
924 l.sort()
924 l.sort()
925
925
926 return (c, a, d, u)
926 return (c, a, d, u)
927
927
928 def add(self, list):
928 def add(self, list):
929 for f in list:
929 for f in list:
930 p = self.wjoin(f)
930 p = self.wjoin(f)
931 if not os.path.exists(p):
931 if not os.path.exists(p):
932 self.ui.warn("%s does not exist!\n" % f)
932 self.ui.warn("%s does not exist!\n" % f)
933 elif not os.path.isfile(p):
933 elif not os.path.isfile(p):
934 self.ui.warn("%s not added: only files supported currently\n" % f)
934 self.ui.warn("%s not added: only files supported currently\n" % f)
935 elif self.dirstate.state(f) in 'an':
935 elif self.dirstate.state(f) in 'an':
936 self.ui.warn("%s already tracked!\n" % f)
936 self.ui.warn("%s already tracked!\n" % f)
937 else:
937 else:
938 self.dirstate.update([f], "a")
938 self.dirstate.update([f], "a")
939
939
940 def forget(self, list):
940 def forget(self, list):
941 for f in list:
941 for f in list:
942 if self.dirstate.state(f) not in 'ai':
942 if self.dirstate.state(f) not in 'ai':
943 self.ui.warn("%s not added!\n" % f)
943 self.ui.warn("%s not added!\n" % f)
944 else:
944 else:
945 self.dirstate.forget([f])
945 self.dirstate.forget([f])
946
946
947 def remove(self, list):
947 def remove(self, list):
948 for f in list:
948 for f in list:
949 p = self.wjoin(f)
949 p = self.wjoin(f)
950 if os.path.exists(p):
950 if os.path.exists(p):
951 self.ui.warn("%s still exists!\n" % f)
951 self.ui.warn("%s still exists!\n" % f)
952 elif self.dirstate.state(f) == 'a':
952 elif self.dirstate.state(f) == 'a':
953 self.ui.warn("%s never committed!\n" % f)
953 self.ui.warn("%s never committed!\n" % f)
954 self.dirstate.forget([f])
954 self.dirstate.forget([f])
955 elif f not in self.dirstate:
955 elif f not in self.dirstate:
956 self.ui.warn("%s not tracked!\n" % f)
956 self.ui.warn("%s not tracked!\n" % f)
957 else:
957 else:
958 self.dirstate.update([f], "r")
958 self.dirstate.update([f], "r")
959
959
960 def copy(self, source, dest):
960 def copy(self, source, dest):
961 p = self.wjoin(dest)
961 p = self.wjoin(dest)
962 if not os.path.exists(dest):
962 if not os.path.exists(dest):
963 self.ui.warn("%s does not exist!\n" % dest)
963 self.ui.warn("%s does not exist!\n" % dest)
964 elif not os.path.isfile(dest):
964 elif not os.path.isfile(dest):
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
965 self.ui.warn("copy failed: %s is not a file\n" % dest)
966 else:
966 else:
967 if self.dirstate.state(dest) == '?':
967 if self.dirstate.state(dest) == '?':
968 self.dirstate.update([dest], "a")
968 self.dirstate.update([dest], "a")
969 self.dirstate.copy(source, dest)
969 self.dirstate.copy(source, dest)
970
970
971 def heads(self):
971 def heads(self):
972 return self.changelog.heads()
972 return self.changelog.heads()
973
973
974 def branches(self, nodes):
974 def branches(self, nodes):
975 if not nodes: nodes = [self.changelog.tip()]
975 if not nodes: nodes = [self.changelog.tip()]
976 b = []
976 b = []
977 for n in nodes:
977 for n in nodes:
978 t = n
978 t = n
979 while n:
979 while n:
980 p = self.changelog.parents(n)
980 p = self.changelog.parents(n)
981 if p[1] != nullid or p[0] == nullid:
981 if p[1] != nullid or p[0] == nullid:
982 b.append((t, n, p[0], p[1]))
982 b.append((t, n, p[0], p[1]))
983 break
983 break
984 n = p[0]
984 n = p[0]
985 return b
985 return b
986
986
987 def between(self, pairs):
987 def between(self, pairs):
988 r = []
988 r = []
989
989
990 for top, bottom in pairs:
990 for top, bottom in pairs:
991 n, l, i = top, [], 0
991 n, l, i = top, [], 0
992 f = 1
992 f = 1
993
993
994 while n != bottom:
994 while n != bottom:
995 p = self.changelog.parents(n)[0]
995 p = self.changelog.parents(n)[0]
996 if i == f:
996 if i == f:
997 l.append(n)
997 l.append(n)
998 f = f * 2
998 f = f * 2
999 n = p
999 n = p
1000 i += 1
1000 i += 1
1001
1001
1002 r.append(l)
1002 r.append(l)
1003
1003
1004 return r
1004 return r
1005
1005
1006 def newer(self, nodes):
1006 def newer(self, nodes):
1007 m = {}
1007 m = {}
1008 nl = []
1008 nl = []
1009 pm = {}
1009 pm = {}
1010 cl = self.changelog
1010 cl = self.changelog
1011 t = l = cl.count()
1011 t = l = cl.count()
1012
1012
1013 # find the lowest numbered node
1013 # find the lowest numbered node
1014 for n in nodes:
1014 for n in nodes:
1015 l = min(l, cl.rev(n))
1015 l = min(l, cl.rev(n))
1016 m[n] = 1
1016 m[n] = 1
1017
1017
1018 for i in xrange(l, t):
1018 for i in xrange(l, t):
1019 n = cl.node(i)
1019 n = cl.node(i)
1020 if n in m: # explicitly listed
1020 if n in m: # explicitly listed
1021 pm[n] = 1
1021 pm[n] = 1
1022 nl.append(n)
1022 nl.append(n)
1023 continue
1023 continue
1024 for p in cl.parents(n):
1024 for p in cl.parents(n):
1025 if p in pm: # parent listed
1025 if p in pm: # parent listed
1026 pm[n] = 1
1026 pm[n] = 1
1027 nl.append(n)
1027 nl.append(n)
1028 break
1028 break
1029
1029
1030 return nl
1030 return nl
1031
1031
1032 def findincoming(self, remote, base={}):
1032 def findincoming(self, remote, base={}):
1033 m = self.changelog.nodemap
1033 m = self.changelog.nodemap
1034 search = []
1034 search = []
1035 fetch = []
1035 fetch = []
1036 seen = {}
1036 seen = {}
1037 seenbranch = {}
1037 seenbranch = {}
1038
1038
1039 # assume we're closer to the tip than the root
1039 # assume we're closer to the tip than the root
1040 # and start by examining the heads
1040 # and start by examining the heads
1041 self.ui.status("searching for changes\n")
1041 self.ui.status("searching for changes\n")
1042 heads = remote.heads()
1042 heads = remote.heads()
1043 unknown = []
1043 unknown = []
1044 for h in heads:
1044 for h in heads:
1045 if h not in m:
1045 if h not in m:
1046 unknown.append(h)
1046 unknown.append(h)
1047 else:
1047 else:
1048 base[h] = 1
1048 base[h] = 1
1049
1049
1050 if not unknown:
1050 if not unknown:
1051 return None
1051 return None
1052
1052
1053 rep = {}
1053 rep = {}
1054 reqcnt = 0
1054 reqcnt = 0
1055
1055
1056 # search through remote branches
1056 # search through remote branches
1057 # a 'branch' here is a linear segment of history, with four parts:
1057 # a 'branch' here is a linear segment of history, with four parts:
1058 # head, root, first parent, second parent
1058 # head, root, first parent, second parent
1059 # (a branch always has two parents (or none) by definition)
1059 # (a branch always has two parents (or none) by definition)
1060 unknown = remote.branches(unknown)
1060 unknown = remote.branches(unknown)
1061 while unknown:
1061 while unknown:
1062 r = []
1062 r = []
1063 while unknown:
1063 while unknown:
1064 n = unknown.pop(0)
1064 n = unknown.pop(0)
1065 if n[0] in seen:
1065 if n[0] in seen:
1066 continue
1066 continue
1067
1067
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1068 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1069 if n[0] == nullid:
1069 if n[0] == nullid:
1070 break
1070 break
1071 if n in seenbranch:
1071 if n in seenbranch:
1072 self.ui.debug("branch already found\n")
1072 self.ui.debug("branch already found\n")
1073 continue
1073 continue
1074 if n[1] and n[1] in m: # do we know the base?
1074 if n[1] and n[1] in m: # do we know the base?
1075 self.ui.debug("found incomplete branch %s:%s\n"
1075 self.ui.debug("found incomplete branch %s:%s\n"
1076 % (short(n[0]), short(n[1])))
1076 % (short(n[0]), short(n[1])))
1077 search.append(n) # schedule branch range for scanning
1077 search.append(n) # schedule branch range for scanning
1078 seenbranch[n] = 1
1078 seenbranch[n] = 1
1079 else:
1079 else:
1080 if n[1] not in seen and n[1] not in fetch:
1080 if n[1] not in seen and n[1] not in fetch:
1081 if n[2] in m and n[3] in m:
1081 if n[2] in m and n[3] in m:
1082 self.ui.debug("found new changeset %s\n" %
1082 self.ui.debug("found new changeset %s\n" %
1083 short(n[1]))
1083 short(n[1]))
1084 fetch.append(n[1]) # earliest unknown
1084 fetch.append(n[1]) # earliest unknown
1085 base[n[2]] = 1 # latest known
1085 base[n[2]] = 1 # latest known
1086 continue
1086 continue
1087
1087
1088 for a in n[2:4]:
1088 for a in n[2:4]:
1089 if a not in rep:
1089 if a not in rep:
1090 r.append(a)
1090 r.append(a)
1091 rep[a] = 1
1091 rep[a] = 1
1092
1092
1093 seen[n[0]] = 1
1093 seen[n[0]] = 1
1094
1094
1095 if r:
1095 if r:
1096 reqcnt += 1
1096 reqcnt += 1
1097 self.ui.debug("request %d: %s\n" %
1097 self.ui.debug("request %d: %s\n" %
1098 (reqcnt, " ".join(map(short, r))))
1098 (reqcnt, " ".join(map(short, r))))
1099 for p in range(0, len(r), 10):
1099 for p in range(0, len(r), 10):
1100 for b in remote.branches(r[p:p+10]):
1100 for b in remote.branches(r[p:p+10]):
1101 self.ui.debug("received %s:%s\n" %
1101 self.ui.debug("received %s:%s\n" %
1102 (short(b[0]), short(b[1])))
1102 (short(b[0]), short(b[1])))
1103 if b[0] not in m and b[0] not in seen:
1103 if b[0] not in m and b[0] not in seen:
1104 unknown.append(b)
1104 unknown.append(b)
1105
1105
1106 # do binary search on the branches we found
1106 # do binary search on the branches we found
1107 while search:
1107 while search:
1108 n = search.pop(0)
1108 n = search.pop(0)
1109 reqcnt += 1
1109 reqcnt += 1
1110 l = remote.between([(n[0], n[1])])[0]
1110 l = remote.between([(n[0], n[1])])[0]
1111 l.append(n[1])
1111 l.append(n[1])
1112 p = n[0]
1112 p = n[0]
1113 f = 1
1113 f = 1
1114 for i in l:
1114 for i in l:
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1115 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1116 if i in m:
1116 if i in m:
1117 if f <= 2:
1117 if f <= 2:
1118 self.ui.debug("found new branch changeset %s\n" %
1118 self.ui.debug("found new branch changeset %s\n" %
1119 short(p))
1119 short(p))
1120 fetch.append(p)
1120 fetch.append(p)
1121 base[i] = 1
1121 base[i] = 1
1122 else:
1122 else:
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1123 self.ui.debug("narrowed branch search to %s:%s\n"
1124 % (short(p), short(i)))
1124 % (short(p), short(i)))
1125 search.append((p, i))
1125 search.append((p, i))
1126 break
1126 break
1127 p, f = i, f * 2
1127 p, f = i, f * 2
1128
1128
1129 # sanity check our fetch list
1129 # sanity check our fetch list
1130 for f in fetch:
1130 for f in fetch:
1131 if f in m:
1131 if f in m:
1132 raise RepoError("already have changeset " + short(f[:4]))
1132 raise RepoError("already have changeset " + short(f[:4]))
1133
1133
1134 if base.keys() == [nullid]:
1134 if base.keys() == [nullid]:
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1135 self.ui.warn("warning: pulling from an unrelated repository!\n")
1136
1136
1137 self.ui.note("adding new changesets starting at " +
1137 self.ui.note("adding new changesets starting at " +
1138 " ".join([short(f) for f in fetch]) + "\n")
1138 " ".join([short(f) for f in fetch]) + "\n")
1139
1139
1140 self.ui.debug("%d total queries\n" % reqcnt)
1140 self.ui.debug("%d total queries\n" % reqcnt)
1141
1141
1142 return fetch
1142 return fetch
1143
1143
1144 def findoutgoing(self, remote):
1144 def findoutgoing(self, remote):
1145 base = {}
1145 base = {}
1146 self.findincoming(remote, base)
1146 self.findincoming(remote, base)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1148
1148
1149 # prune everything remote has from the tree
1149 # prune everything remote has from the tree
1150 del remain[nullid]
1150 del remain[nullid]
1151 remove = base.keys()
1151 remove = base.keys()
1152 while remove:
1152 while remove:
1153 n = remove.pop(0)
1153 n = remove.pop(0)
1154 if n in remain:
1154 if n in remain:
1155 del remain[n]
1155 del remain[n]
1156 for p in self.changelog.parents(n):
1156 for p in self.changelog.parents(n):
1157 remove.append(p)
1157 remove.append(p)
1158
1158
1159 # find every node whose parents have been pruned
1159 # find every node whose parents have been pruned
1160 subset = []
1160 subset = []
1161 for n in remain:
1161 for n in remain:
1162 p1, p2 = self.changelog.parents(n)
1162 p1, p2 = self.changelog.parents(n)
1163 if p1 not in remain and p2 not in remain:
1163 if p1 not in remain and p2 not in remain:
1164 subset.append(n)
1164 subset.append(n)
1165
1165
1166 # this is the set of all roots we have to push
1166 # this is the set of all roots we have to push
1167 return subset
1167 return subset
1168
1168
1169 def pull(self, remote):
1169 def pull(self, remote):
1170 lock = self.lock()
1170 lock = self.lock()
1171
1171
1172 # if we have an empty repo, fetch everything
1172 # if we have an empty repo, fetch everything
1173 if self.changelog.tip() == nullid:
1173 if self.changelog.tip() == nullid:
1174 self.ui.status("requesting all changes\n")
1174 self.ui.status("requesting all changes\n")
1175 fetch = [nullid]
1175 fetch = [nullid]
1176 else:
1176 else:
1177 fetch = self.findincoming(remote)
1177 fetch = self.findincoming(remote)
1178
1178
1179 if not fetch:
1179 if not fetch:
1180 self.ui.status("no changes found\n")
1180 self.ui.status("no changes found\n")
1181 return 1
1181 return 1
1182
1182
1183 cg = remote.changegroup(fetch)
1183 cg = remote.changegroup(fetch)
1184 return self.addchangegroup(cg)
1184 return self.addchangegroup(cg)
1185
1185
1186 def push(self, remote):
1186 def push(self, remote):
1187 lock = remote.lock()
1187 lock = remote.lock()
1188 update = self.findoutgoing(remote)
1188 update = self.findoutgoing(remote)
1189 if not update:
1189 if not update:
1190 self.ui.status("no changes found\n")
1190 self.ui.status("no changes found\n")
1191 return 1
1191 return 1
1192
1192
1193 cg = self.changegroup(update)
1193 cg = self.changegroup(update)
1194 return remote.addchangegroup(cg)
1194 return remote.addchangegroup(cg)
1195
1195
1196 def changegroup(self, basenodes):
1196 def changegroup(self, basenodes):
1197 class genread:
1197 class genread:
1198 def __init__(self, generator):
1198 def __init__(self, generator):
1199 self.g = generator
1199 self.g = generator
1200 self.buf = ""
1200 self.buf = ""
1201 def read(self, l):
1201 def read(self, l):
1202 while l > len(self.buf):
1202 while l > len(self.buf):
1203 try:
1203 try:
1204 self.buf += self.g.next()
1204 self.buf += self.g.next()
1205 except StopIteration:
1205 except StopIteration:
1206 break
1206 break
1207 d, self.buf = self.buf[:l], self.buf[l:]
1207 d, self.buf = self.buf[:l], self.buf[l:]
1208 return d
1208 return d
1209
1209
1210 def gengroup():
1210 def gengroup():
1211 nodes = self.newer(basenodes)
1211 nodes = self.newer(basenodes)
1212
1212
1213 # construct the link map
1213 # construct the link map
1214 linkmap = {}
1214 linkmap = {}
1215 for n in nodes:
1215 for n in nodes:
1216 linkmap[self.changelog.rev(n)] = n
1216 linkmap[self.changelog.rev(n)] = n
1217
1217
1218 # construct a list of all changed files
1218 # construct a list of all changed files
1219 changed = {}
1219 changed = {}
1220 for n in nodes:
1220 for n in nodes:
1221 c = self.changelog.read(n)
1221 c = self.changelog.read(n)
1222 for f in c[3]:
1222 for f in c[3]:
1223 changed[f] = 1
1223 changed[f] = 1
1224 changed = changed.keys()
1224 changed = changed.keys()
1225 changed.sort()
1225 changed.sort()
1226
1226
1227 # the changegroup is changesets + manifests + all file revs
1227 # the changegroup is changesets + manifests + all file revs
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1228 revs = [ self.changelog.rev(n) for n in nodes ]
1229
1229
1230 for y in self.changelog.group(linkmap): yield y
1230 for y in self.changelog.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1231 for y in self.manifest.group(linkmap): yield y
1232 for f in changed:
1232 for f in changed:
1233 yield struct.pack(">l", len(f) + 4) + f
1233 yield struct.pack(">l", len(f) + 4) + f
1234 g = self.file(f).group(linkmap)
1234 g = self.file(f).group(linkmap)
1235 for y in g:
1235 for y in g:
1236 yield y
1236 yield y
1237
1237
1238 yield struct.pack(">l", 0)
1238 yield struct.pack(">l", 0)
1239
1239
1240 return genread(gengroup())
1240 return genread(gengroup())
1241
1241
1242 def addchangegroup(self, source):
1242 def addchangegroup(self, source):
1243
1243
1244 def getchunk():
1244 def getchunk():
1245 d = source.read(4)
1245 d = source.read(4)
1246 if not d: return ""
1246 if not d: return ""
1247 l = struct.unpack(">l", d)[0]
1247 l = struct.unpack(">l", d)[0]
1248 if l <= 4: return ""
1248 if l <= 4: return ""
1249 return source.read(l - 4)
1249 return source.read(l - 4)
1250
1250
1251 def getgroup():
1251 def getgroup():
1252 while 1:
1252 while 1:
1253 c = getchunk()
1253 c = getchunk()
1254 if not c: break
1254 if not c: break
1255 yield c
1255 yield c
1256
1256
1257 def csmap(x):
1257 def csmap(x):
1258 self.ui.debug("add changeset %s\n" % short(x))
1258 self.ui.debug("add changeset %s\n" % short(x))
1259 return self.changelog.count()
1259 return self.changelog.count()
1260
1260
1261 def revmap(x):
1261 def revmap(x):
1262 return self.changelog.rev(x)
1262 return self.changelog.rev(x)
1263
1263
1264 if not source: return
1264 if not source: return
1265 changesets = files = revisions = 0
1265 changesets = files = revisions = 0
1266
1266
1267 tr = self.transaction()
1267 tr = self.transaction()
1268
1268
1269 # pull off the changeset group
1269 # pull off the changeset group
1270 self.ui.status("adding changesets\n")
1270 self.ui.status("adding changesets\n")
1271 co = self.changelog.tip()
1271 co = self.changelog.tip()
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1272 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1273 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1274
1274
1275 # pull off the manifest group
1275 # pull off the manifest group
1276 self.ui.status("adding manifests\n")
1276 self.ui.status("adding manifests\n")
1277 mm = self.manifest.tip()
1277 mm = self.manifest.tip()
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1278 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1279
1279
1280 # process the files
1280 # process the files
1281 self.ui.status("adding file revisions\n")
1281 self.ui.status("adding file changes\n")
1282 while 1:
1282 while 1:
1283 f = getchunk()
1283 f = getchunk()
1284 if not f: break
1284 if not f: break
1285 self.ui.debug("adding %s revisions\n" % f)
1285 self.ui.debug("adding %s revisions\n" % f)
1286 fl = self.file(f)
1286 fl = self.file(f)
1287 o = fl.count()
1287 o = fl.count()
1288 n = fl.addgroup(getgroup(), revmap, tr)
1288 n = fl.addgroup(getgroup(), revmap, tr)
1289 revisions += fl.count() - o
1289 revisions += fl.count() - o
1290 files += 1
1290 files += 1
1291
1291
1292 self.ui.status(("modified %d files, added %d changesets" +
1292 self.ui.status(("added %d changesets" +
1293 " and %d new revisions\n")
1293 " with %d changes to %d files\n")
1294 % (files, changesets, revisions))
1294 % (changesets, revisions, files))
1295
1295
1296 tr.close()
1296 tr.close()
1297 return
1297 return
1298
1298
1299 def update(self, node, allow=False, force=False, choose=None,
1299 def update(self, node, allow=False, force=False, choose=None,
1300 moddirstate=True):
1300 moddirstate=True):
1301 pl = self.dirstate.parents()
1301 pl = self.dirstate.parents()
1302 if not force and pl[1] != nullid:
1302 if not force and pl[1] != nullid:
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1303 self.ui.warn("aborting: outstanding uncommitted merges\n")
1304 return 1
1304 return 1
1305
1305
1306 p1, p2 = pl[0], node
1306 p1, p2 = pl[0], node
1307 pa = self.changelog.ancestor(p1, p2)
1307 pa = self.changelog.ancestor(p1, p2)
1308 m1n = self.changelog.read(p1)[0]
1308 m1n = self.changelog.read(p1)[0]
1309 m2n = self.changelog.read(p2)[0]
1309 m2n = self.changelog.read(p2)[0]
1310 man = self.manifest.ancestor(m1n, m2n)
1310 man = self.manifest.ancestor(m1n, m2n)
1311 m1 = self.manifest.read(m1n)
1311 m1 = self.manifest.read(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1312 mf1 = self.manifest.readflags(m1n)
1313 m2 = self.manifest.read(m2n)
1313 m2 = self.manifest.read(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1314 mf2 = self.manifest.readflags(m2n)
1315 ma = self.manifest.read(man)
1315 ma = self.manifest.read(man)
1316 mfa = self.manifest.readflags(man)
1316 mfa = self.manifest.readflags(man)
1317
1317
1318 (c, a, d, u) = self.changes()
1318 (c, a, d, u) = self.changes()
1319
1319
1320 # is this a jump, or a merge? i.e. is there a linear path
1320 # is this a jump, or a merge? i.e. is there a linear path
1321 # from p1 to p2?
1321 # from p1 to p2?
1322 linear_path = (pa == p1 or pa == p2)
1322 linear_path = (pa == p1 or pa == p2)
1323
1323
1324 # resolve the manifest to determine which files
1324 # resolve the manifest to determine which files
1325 # we care about merging
1325 # we care about merging
1326 self.ui.note("resolving manifests\n")
1326 self.ui.note("resolving manifests\n")
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1327 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1328 (force, allow, moddirstate, linear_path))
1328 (force, allow, moddirstate, linear_path))
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1329 self.ui.debug(" ancestor %s local %s remote %s\n" %
1330 (short(man), short(m1n), short(m2n)))
1330 (short(man), short(m1n), short(m2n)))
1331
1331
1332 merge = {}
1332 merge = {}
1333 get = {}
1333 get = {}
1334 remove = []
1334 remove = []
1335 mark = {}
1335 mark = {}
1336
1336
1337 # construct a working dir manifest
1337 # construct a working dir manifest
1338 mw = m1.copy()
1338 mw = m1.copy()
1339 mfw = mf1.copy()
1339 mfw = mf1.copy()
1340 umap = dict.fromkeys(u)
1340 umap = dict.fromkeys(u)
1341
1341
1342 for f in a + c + u:
1342 for f in a + c + u:
1343 mw[f] = ""
1343 mw[f] = ""
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1344 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1345
1345
1346 for f in d:
1346 for f in d:
1347 if f in mw: del mw[f]
1347 if f in mw: del mw[f]
1348
1348
1349 # If we're jumping between revisions (as opposed to merging),
1349 # If we're jumping between revisions (as opposed to merging),
1350 # and if neither the working directory nor the target rev has
1350 # and if neither the working directory nor the target rev has
1351 # the file, then we need to remove it from the dirstate, to
1351 # the file, then we need to remove it from the dirstate, to
1352 # prevent the dirstate from listing the file when it is no
1352 # prevent the dirstate from listing the file when it is no
1353 # longer in the manifest.
1353 # longer in the manifest.
1354 if moddirstate and linear_path and f not in m2:
1354 if moddirstate and linear_path and f not in m2:
1355 self.dirstate.forget((f,))
1355 self.dirstate.forget((f,))
1356
1356
1357 # Compare manifests
1357 # Compare manifests
1358 for f, n in mw.iteritems():
1358 for f, n in mw.iteritems():
1359 if choose and not choose(f): continue
1359 if choose and not choose(f): continue
1360 if f in m2:
1360 if f in m2:
1361 s = 0
1361 s = 0
1362
1362
1363 # is the wfile new since m1, and match m2?
1363 # is the wfile new since m1, and match m2?
1364 if f not in m1:
1364 if f not in m1:
1365 t1 = self.wfile(f).read()
1365 t1 = self.wfile(f).read()
1366 t2 = self.file(f).revision(m2[f])
1366 t2 = self.file(f).revision(m2[f])
1367 if cmp(t1, t2) == 0:
1367 if cmp(t1, t2) == 0:
1368 mark[f] = 1
1368 mark[f] = 1
1369 n = m2[f]
1369 n = m2[f]
1370 del t1, t2
1370 del t1, t2
1371
1371
1372 # are files different?
1372 # are files different?
1373 if n != m2[f]:
1373 if n != m2[f]:
1374 a = ma.get(f, nullid)
1374 a = ma.get(f, nullid)
1375 # are both different from the ancestor?
1375 # are both different from the ancestor?
1376 if n != a and m2[f] != a:
1376 if n != a and m2[f] != a:
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1377 self.ui.debug(" %s versions differ, resolve\n" % f)
1378 # merge executable bits
1378 # merge executable bits
1379 # "if we changed or they changed, change in merge"
1379 # "if we changed or they changed, change in merge"
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1380 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1381 mode = ((a^b) | (a^c)) ^ a
1381 mode = ((a^b) | (a^c)) ^ a
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1382 merge[f] = (m1.get(f, nullid), m2[f], mode)
1383 s = 1
1383 s = 1
1384 # are we clobbering?
1384 # are we clobbering?
1385 # is remote's version newer?
1385 # is remote's version newer?
1386 # or are we going back in time?
1386 # or are we going back in time?
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1387 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1388 self.ui.debug(" remote %s is newer, get\n" % f)
1389 get[f] = m2[f]
1389 get[f] = m2[f]
1390 s = 1
1390 s = 1
1391 else:
1391 else:
1392 mark[f] = 1
1392 mark[f] = 1
1393 elif f in umap:
1393 elif f in umap:
1394 # this unknown file is the same as the checkout
1394 # this unknown file is the same as the checkout
1395 get[f] = m2[f]
1395 get[f] = m2[f]
1396
1396
1397 if not s and mfw[f] != mf2[f]:
1397 if not s and mfw[f] != mf2[f]:
1398 if force:
1398 if force:
1399 self.ui.debug(" updating permissions for %s\n" % f)
1399 self.ui.debug(" updating permissions for %s\n" % f)
1400 util.set_exec(self.wjoin(f), mf2[f])
1400 util.set_exec(self.wjoin(f), mf2[f])
1401 else:
1401 else:
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1402 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1403 mode = ((a^b) | (a^c)) ^ a
1403 mode = ((a^b) | (a^c)) ^ a
1404 if mode != b:
1404 if mode != b:
1405 self.ui.debug(" updating permissions for %s\n" % f)
1405 self.ui.debug(" updating permissions for %s\n" % f)
1406 util.set_exec(self.wjoin(f), mode)
1406 util.set_exec(self.wjoin(f), mode)
1407 mark[f] = 1
1407 mark[f] = 1
1408 del m2[f]
1408 del m2[f]
1409 elif f in ma:
1409 elif f in ma:
1410 if n != ma[f]:
1410 if n != ma[f]:
1411 r = "d"
1411 r = "d"
1412 if not force and (linear_path or allow):
1412 if not force and (linear_path or allow):
1413 r = self.ui.prompt(
1413 r = self.ui.prompt(
1414 (" local changed %s which remote deleted\n" % f) +
1414 (" local changed %s which remote deleted\n" % f) +
1415 "(k)eep or (d)elete?", "[kd]", "k")
1415 "(k)eep or (d)elete?", "[kd]", "k")
1416 if r == "d":
1416 if r == "d":
1417 remove.append(f)
1417 remove.append(f)
1418 else:
1418 else:
1419 self.ui.debug("other deleted %s\n" % f)
1419 self.ui.debug("other deleted %s\n" % f)
1420 remove.append(f) # other deleted it
1420 remove.append(f) # other deleted it
1421 else:
1421 else:
1422 if n == m1.get(f, nullid): # same as parent
1422 if n == m1.get(f, nullid): # same as parent
1423 if p2 == pa: # going backwards?
1423 if p2 == pa: # going backwards?
1424 self.ui.debug("remote deleted %s\n" % f)
1424 self.ui.debug("remote deleted %s\n" % f)
1425 remove.append(f)
1425 remove.append(f)
1426 else:
1426 else:
1427 self.ui.debug("local created %s, keeping\n" % f)
1427 self.ui.debug("local created %s, keeping\n" % f)
1428 else:
1428 else:
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1429 self.ui.debug("working dir created %s, keeping\n" % f)
1430
1430
1431 for f, n in m2.iteritems():
1431 for f, n in m2.iteritems():
1432 if choose and not choose(f): continue
1432 if choose and not choose(f): continue
1433 if f[0] == "/": continue
1433 if f[0] == "/": continue
1434 if f in ma and n != ma[f]:
1434 if f in ma and n != ma[f]:
1435 r = "k"
1435 r = "k"
1436 if not force and (linear_path or allow):
1436 if not force and (linear_path or allow):
1437 r = self.ui.prompt(
1437 r = self.ui.prompt(
1438 ("remote changed %s which local deleted\n" % f) +
1438 ("remote changed %s which local deleted\n" % f) +
1439 "(k)eep or (d)elete?", "[kd]", "k")
1439 "(k)eep or (d)elete?", "[kd]", "k")
1440 if r == "k": get[f] = n
1440 if r == "k": get[f] = n
1441 elif f not in ma:
1441 elif f not in ma:
1442 self.ui.debug("remote created %s\n" % f)
1442 self.ui.debug("remote created %s\n" % f)
1443 get[f] = n
1443 get[f] = n
1444 else:
1444 else:
1445 if force or p2 == pa: # going backwards?
1445 if force or p2 == pa: # going backwards?
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1446 self.ui.debug("local deleted %s, recreating\n" % f)
1447 get[f] = n
1447 get[f] = n
1448 else:
1448 else:
1449 self.ui.debug("local deleted %s\n" % f)
1449 self.ui.debug("local deleted %s\n" % f)
1450
1450
1451 del mw, m1, m2, ma
1451 del mw, m1, m2, ma
1452
1452
1453 if force:
1453 if force:
1454 for f in merge:
1454 for f in merge:
1455 get[f] = merge[f][1]
1455 get[f] = merge[f][1]
1456 merge = {}
1456 merge = {}
1457
1457
1458 if linear_path or force:
1458 if linear_path or force:
1459 # we don't need to do any magic, just jump to the new rev
1459 # we don't need to do any magic, just jump to the new rev
1460 mode = 'n'
1460 mode = 'n'
1461 p1, p2 = p2, nullid
1461 p1, p2 = p2, nullid
1462 else:
1462 else:
1463 if not allow:
1463 if not allow:
1464 self.ui.status("this update spans a branch" +
1464 self.ui.status("this update spans a branch" +
1465 " affecting the following files:\n")
1465 " affecting the following files:\n")
1466 fl = merge.keys() + get.keys()
1466 fl = merge.keys() + get.keys()
1467 fl.sort()
1467 fl.sort()
1468 for f in fl:
1468 for f in fl:
1469 cf = ""
1469 cf = ""
1470 if f in merge: cf = " (resolve)"
1470 if f in merge: cf = " (resolve)"
1471 self.ui.status(" %s%s\n" % (f, cf))
1471 self.ui.status(" %s%s\n" % (f, cf))
1472 self.ui.warn("aborting update spanning branches!\n")
1472 self.ui.warn("aborting update spanning branches!\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1473 self.ui.status("(use update -m to perform a branch merge)\n")
1474 return 1
1474 return 1
1475 # we have to remember what files we needed to get/change
1475 # we have to remember what files we needed to get/change
1476 # because any file that's different from either one of its
1476 # because any file that's different from either one of its
1477 # parents must be in the changeset
1477 # parents must be in the changeset
1478 mode = 'm'
1478 mode = 'm'
1479 if moddirstate:
1479 if moddirstate:
1480 self.dirstate.update(mark.keys(), "m")
1480 self.dirstate.update(mark.keys(), "m")
1481
1481
1482 if moddirstate:
1482 if moddirstate:
1483 self.dirstate.setparents(p1, p2)
1483 self.dirstate.setparents(p1, p2)
1484
1484
1485 # get the files we don't need to change
1485 # get the files we don't need to change
1486 files = get.keys()
1486 files = get.keys()
1487 files.sort()
1487 files.sort()
1488 for f in files:
1488 for f in files:
1489 if f[0] == "/": continue
1489 if f[0] == "/": continue
1490 self.ui.note("getting %s\n" % f)
1490 self.ui.note("getting %s\n" % f)
1491 t = self.file(f).read(get[f])
1491 t = self.file(f).read(get[f])
1492 try:
1492 try:
1493 self.wfile(f, "w").write(t)
1493 self.wfile(f, "w").write(t)
1494 except IOError:
1494 except IOError:
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1495 os.makedirs(os.path.dirname(self.wjoin(f)))
1496 self.wfile(f, "w").write(t)
1496 self.wfile(f, "w").write(t)
1497 util.set_exec(self.wjoin(f), mf2[f])
1497 util.set_exec(self.wjoin(f), mf2[f])
1498 if moddirstate:
1498 if moddirstate:
1499 self.dirstate.update([f], mode)
1499 self.dirstate.update([f], mode)
1500
1500
1501 # merge the tricky bits
1501 # merge the tricky bits
1502 files = merge.keys()
1502 files = merge.keys()
1503 files.sort()
1503 files.sort()
1504 for f in files:
1504 for f in files:
1505 self.ui.status("merging %s\n" % f)
1505 self.ui.status("merging %s\n" % f)
1506 m, o, flag = merge[f]
1506 m, o, flag = merge[f]
1507 self.merge3(f, m, o)
1507 self.merge3(f, m, o)
1508 util.set_exec(self.wjoin(f), flag)
1508 util.set_exec(self.wjoin(f), flag)
1509 if moddirstate and mode == 'm':
1509 if moddirstate and mode == 'm':
1510 # only update dirstate on branch merge, otherwise we
1510 # only update dirstate on branch merge, otherwise we
1511 # could mark files with changes as unchanged
1511 # could mark files with changes as unchanged
1512 self.dirstate.update([f], mode)
1512 self.dirstate.update([f], mode)
1513
1513
1514 remove.sort()
1514 remove.sort()
1515 for f in remove:
1515 for f in remove:
1516 self.ui.note("removing %s\n" % f)
1516 self.ui.note("removing %s\n" % f)
1517 try:
1517 try:
1518 os.unlink(f)
1518 os.unlink(f)
1519 except OSError, inst:
1519 except OSError, inst:
1520 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1520 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1521 # try removing directories that might now be empty
1521 # try removing directories that might now be empty
1522 try: os.removedirs(os.path.dirname(f))
1522 try: os.removedirs(os.path.dirname(f))
1523 except: pass
1523 except: pass
1524 if moddirstate:
1524 if moddirstate:
1525 if mode == 'n':
1525 if mode == 'n':
1526 self.dirstate.forget(remove)
1526 self.dirstate.forget(remove)
1527 else:
1527 else:
1528 self.dirstate.update(remove, 'r')
1528 self.dirstate.update(remove, 'r')
1529
1529
1530 def merge3(self, fn, my, other):
1530 def merge3(self, fn, my, other):
1531 """perform a 3-way merge in the working directory"""
1531 """perform a 3-way merge in the working directory"""
1532
1532
1533 def temp(prefix, node):
1533 def temp(prefix, node):
1534 pre = "%s~%s." % (os.path.basename(fn), prefix)
1534 pre = "%s~%s." % (os.path.basename(fn), prefix)
1535 (fd, name) = tempfile.mkstemp("", pre)
1535 (fd, name) = tempfile.mkstemp("", pre)
1536 f = os.fdopen(fd, "wb")
1536 f = os.fdopen(fd, "wb")
1537 f.write(fl.revision(node))
1537 f.write(fl.revision(node))
1538 f.close()
1538 f.close()
1539 return name
1539 return name
1540
1540
1541 fl = self.file(fn)
1541 fl = self.file(fn)
1542 base = fl.ancestor(my, other)
1542 base = fl.ancestor(my, other)
1543 a = self.wjoin(fn)
1543 a = self.wjoin(fn)
1544 b = temp("base", base)
1544 b = temp("base", base)
1545 c = temp("other", other)
1545 c = temp("other", other)
1546
1546
1547 self.ui.note("resolving %s\n" % fn)
1547 self.ui.note("resolving %s\n" % fn)
1548 self.ui.debug("file %s: other %s ancestor %s\n" %
1548 self.ui.debug("file %s: other %s ancestor %s\n" %
1549 (fn, short(other), short(base)))
1549 (fn, short(other), short(base)))
1550
1550
1551 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1551 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1552 or "hgmerge")
1552 or "hgmerge")
1553 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1553 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1554 if r:
1554 if r:
1555 self.ui.warn("merging %s failed!\n" % fn)
1555 self.ui.warn("merging %s failed!\n" % fn)
1556
1556
1557 os.unlink(b)
1557 os.unlink(b)
1558 os.unlink(c)
1558 os.unlink(c)
1559
1559
1560 def verify(self):
1560 def verify(self):
1561 filelinkrevs = {}
1561 filelinkrevs = {}
1562 filenodes = {}
1562 filenodes = {}
1563 changesets = revisions = files = 0
1563 changesets = revisions = files = 0
1564 errors = 0
1564 errors = 0
1565
1565
1566 seen = {}
1566 seen = {}
1567 self.ui.status("checking changesets\n")
1567 self.ui.status("checking changesets\n")
1568 for i in range(self.changelog.count()):
1568 for i in range(self.changelog.count()):
1569 changesets += 1
1569 changesets += 1
1570 n = self.changelog.node(i)
1570 n = self.changelog.node(i)
1571 if n in seen:
1571 if n in seen:
1572 self.ui.warn("duplicate changeset at revision %d\n" % i)
1572 self.ui.warn("duplicate changeset at revision %d\n" % i)
1573 errors += 1
1573 errors += 1
1574 seen[n] = 1
1574 seen[n] = 1
1575
1575
1576 for p in self.changelog.parents(n):
1576 for p in self.changelog.parents(n):
1577 if p not in self.changelog.nodemap:
1577 if p not in self.changelog.nodemap:
1578 self.ui.warn("changeset %s has unknown parent %s\n" %
1578 self.ui.warn("changeset %s has unknown parent %s\n" %
1579 (short(n), short(p)))
1579 (short(n), short(p)))
1580 errors += 1
1580 errors += 1
1581 try:
1581 try:
1582 changes = self.changelog.read(n)
1582 changes = self.changelog.read(n)
1583 except Exception, inst:
1583 except Exception, inst:
1584 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1584 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1585 errors += 1
1585 errors += 1
1586
1586
1587 for f in changes[3]:
1587 for f in changes[3]:
1588 filelinkrevs.setdefault(f, []).append(i)
1588 filelinkrevs.setdefault(f, []).append(i)
1589
1589
1590 seen = {}
1590 seen = {}
1591 self.ui.status("checking manifests\n")
1591 self.ui.status("checking manifests\n")
1592 for i in range(self.manifest.count()):
1592 for i in range(self.manifest.count()):
1593 n = self.manifest.node(i)
1593 n = self.manifest.node(i)
1594 if n in seen:
1594 if n in seen:
1595 self.ui.warn("duplicate manifest at revision %d\n" % i)
1595 self.ui.warn("duplicate manifest at revision %d\n" % i)
1596 errors += 1
1596 errors += 1
1597 seen[n] = 1
1597 seen[n] = 1
1598
1598
1599 for p in self.manifest.parents(n):
1599 for p in self.manifest.parents(n):
1600 if p not in self.manifest.nodemap:
1600 if p not in self.manifest.nodemap:
1601 self.ui.warn("manifest %s has unknown parent %s\n" %
1601 self.ui.warn("manifest %s has unknown parent %s\n" %
1602 (short(n), short(p)))
1602 (short(n), short(p)))
1603 errors += 1
1603 errors += 1
1604
1604
1605 try:
1605 try:
1606 delta = mdiff.patchtext(self.manifest.delta(n))
1606 delta = mdiff.patchtext(self.manifest.delta(n))
1607 except KeyboardInterrupt:
1607 except KeyboardInterrupt:
1608 self.ui.warn("aborted")
1608 self.ui.warn("aborted")
1609 sys.exit(0)
1609 sys.exit(0)
1610 except Exception, inst:
1610 except Exception, inst:
1611 self.ui.warn("unpacking manifest %s: %s\n"
1611 self.ui.warn("unpacking manifest %s: %s\n"
1612 % (short(n), inst))
1612 % (short(n), inst))
1613 errors += 1
1613 errors += 1
1614
1614
1615 ff = [ l.split('\0') for l in delta.splitlines() ]
1615 ff = [ l.split('\0') for l in delta.splitlines() ]
1616 for f, fn in ff:
1616 for f, fn in ff:
1617 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1617 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1618
1618
1619 self.ui.status("crosschecking files in changesets and manifests\n")
1619 self.ui.status("crosschecking files in changesets and manifests\n")
1620 for f in filenodes:
1620 for f in filenodes:
1621 if f not in filelinkrevs:
1621 if f not in filelinkrevs:
1622 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1622 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1623 errors += 1
1623 errors += 1
1624
1624
1625 for f in filelinkrevs:
1625 for f in filelinkrevs:
1626 if f not in filenodes:
1626 if f not in filenodes:
1627 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1627 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1628 errors += 1
1628 errors += 1
1629
1629
1630 self.ui.status("checking files\n")
1630 self.ui.status("checking files\n")
1631 ff = filenodes.keys()
1631 ff = filenodes.keys()
1632 ff.sort()
1632 ff.sort()
1633 for f in ff:
1633 for f in ff:
1634 if f == "/dev/null": continue
1634 if f == "/dev/null": continue
1635 files += 1
1635 files += 1
1636 fl = self.file(f)
1636 fl = self.file(f)
1637 nodes = { nullid: 1 }
1637 nodes = { nullid: 1 }
1638 seen = {}
1638 seen = {}
1639 for i in range(fl.count()):
1639 for i in range(fl.count()):
1640 revisions += 1
1640 revisions += 1
1641 n = fl.node(i)
1641 n = fl.node(i)
1642
1642
1643 if n in seen:
1643 if n in seen:
1644 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1644 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1645 errors += 1
1645 errors += 1
1646
1646
1647 if n not in filenodes[f]:
1647 if n not in filenodes[f]:
1648 self.ui.warn("%s: %d:%s not in manifests\n"
1648 self.ui.warn("%s: %d:%s not in manifests\n"
1649 % (f, i, short(n)))
1649 % (f, i, short(n)))
1650 errors += 1
1650 errors += 1
1651 else:
1651 else:
1652 del filenodes[f][n]
1652 del filenodes[f][n]
1653
1653
1654 flr = fl.linkrev(n)
1654 flr = fl.linkrev(n)
1655 if flr not in filelinkrevs[f]:
1655 if flr not in filelinkrevs[f]:
1656 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1656 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1657 % (f, short(n), fl.linkrev(n)))
1657 % (f, short(n), fl.linkrev(n)))
1658 errors += 1
1658 errors += 1
1659 else:
1659 else:
1660 filelinkrevs[f].remove(flr)
1660 filelinkrevs[f].remove(flr)
1661
1661
1662 # verify contents
1662 # verify contents
1663 try:
1663 try:
1664 t = fl.read(n)
1664 t = fl.read(n)
1665 except Exception, inst:
1665 except Exception, inst:
1666 self.ui.warn("unpacking file %s %s: %s\n"
1666 self.ui.warn("unpacking file %s %s: %s\n"
1667 % (f, short(n), inst))
1667 % (f, short(n), inst))
1668 errors += 1
1668 errors += 1
1669
1669
1670 # verify parents
1670 # verify parents
1671 (p1, p2) = fl.parents(n)
1671 (p1, p2) = fl.parents(n)
1672 if p1 not in nodes:
1672 if p1 not in nodes:
1673 self.ui.warn("file %s:%s unknown parent 1 %s" %
1673 self.ui.warn("file %s:%s unknown parent 1 %s" %
1674 (f, short(n), short(p1)))
1674 (f, short(n), short(p1)))
1675 errors += 1
1675 errors += 1
1676 if p2 not in nodes:
1676 if p2 not in nodes:
1677 self.ui.warn("file %s:%s unknown parent 2 %s" %
1677 self.ui.warn("file %s:%s unknown parent 2 %s" %
1678 (f, short(n), short(p1)))
1678 (f, short(n), short(p1)))
1679 errors += 1
1679 errors += 1
1680 nodes[n] = 1
1680 nodes[n] = 1
1681
1681
1682 # cross-check
1682 # cross-check
1683 for node in filenodes[f]:
1683 for node in filenodes[f]:
1684 self.ui.warn("node %s in manifests not in %s\n"
1684 self.ui.warn("node %s in manifests not in %s\n"
1685 % (hex(node), f))
1685 % (hex(node), f))
1686 errors += 1
1686 errors += 1
1687
1687
1688 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1688 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1689 (files, changesets, revisions))
1689 (files, changesets, revisions))
1690
1690
1691 if errors:
1691 if errors:
1692 self.ui.warn("%d integrity errors encountered!\n" % errors)
1692 self.ui.warn("%d integrity errors encountered!\n" % errors)
1693 return 1
1693 return 1
1694
1694
1695 class httprepository:
1695 class httprepository:
1696 def __init__(self, ui, path):
1696 def __init__(self, ui, path):
1697 # fix missing / after hostname
1697 # fix missing / after hostname
1698 s = urlparse.urlsplit(path)
1698 s = urlparse.urlsplit(path)
1699 partial = s[2]
1699 partial = s[2]
1700 if not partial: partial = "/"
1700 if not partial: partial = "/"
1701 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1701 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1702 self.ui = ui
1702 self.ui = ui
1703 no_list = [ "localhost", "127.0.0.1" ]
1703 no_list = [ "localhost", "127.0.0.1" ]
1704 host = ui.config("http_proxy", "host")
1704 host = ui.config("http_proxy", "host")
1705 if host is None:
1705 if host is None:
1706 host = os.environ.get("http_proxy")
1706 host = os.environ.get("http_proxy")
1707 if host and host.startswith('http://'):
1707 if host and host.startswith('http://'):
1708 host = host[7:]
1708 host = host[7:]
1709 user = ui.config("http_proxy", "user")
1709 user = ui.config("http_proxy", "user")
1710 passwd = ui.config("http_proxy", "passwd")
1710 passwd = ui.config("http_proxy", "passwd")
1711 no = ui.config("http_proxy", "no")
1711 no = ui.config("http_proxy", "no")
1712 if no is None:
1712 if no is None:
1713 no = os.environ.get("no_proxy")
1713 no = os.environ.get("no_proxy")
1714 if no:
1714 if no:
1715 no_list = no_list + no.split(",")
1715 no_list = no_list + no.split(",")
1716
1716
1717 no_proxy = 0
1717 no_proxy = 0
1718 for h in no_list:
1718 for h in no_list:
1719 if (path.startswith("http://" + h + "/") or
1719 if (path.startswith("http://" + h + "/") or
1720 path.startswith("http://" + h + ":") or
1720 path.startswith("http://" + h + ":") or
1721 path == "http://" + h):
1721 path == "http://" + h):
1722 no_proxy = 1
1722 no_proxy = 1
1723
1723
1724 # Note: urllib2 takes proxy values from the environment and those will
1724 # Note: urllib2 takes proxy values from the environment and those will
1725 # take precedence
1725 # take precedence
1726 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1726 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1727 if os.environ.has_key(env):
1727 if os.environ.has_key(env):
1728 del os.environ[env]
1728 del os.environ[env]
1729
1729
1730 proxy_handler = urllib2.BaseHandler()
1730 proxy_handler = urllib2.BaseHandler()
1731 if host and not no_proxy:
1731 if host and not no_proxy:
1732 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1732 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1733
1733
1734 authinfo = None
1734 authinfo = None
1735 if user and passwd:
1735 if user and passwd:
1736 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1736 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1737 passmgr.add_password(None, host, user, passwd)
1737 passmgr.add_password(None, host, user, passwd)
1738 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1738 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1739
1739
1740 opener = urllib2.build_opener(proxy_handler, authinfo)
1740 opener = urllib2.build_opener(proxy_handler, authinfo)
1741 urllib2.install_opener(opener)
1741 urllib2.install_opener(opener)
1742
1742
1743 def dev(self):
1743 def dev(self):
1744 return -1
1744 return -1
1745
1745
1746 def do_cmd(self, cmd, **args):
1746 def do_cmd(self, cmd, **args):
1747 self.ui.debug("sending %s command\n" % cmd)
1747 self.ui.debug("sending %s command\n" % cmd)
1748 q = {"cmd": cmd}
1748 q = {"cmd": cmd}
1749 q.update(args)
1749 q.update(args)
1750 qs = urllib.urlencode(q)
1750 qs = urllib.urlencode(q)
1751 cu = "%s?%s" % (self.url, qs)
1751 cu = "%s?%s" % (self.url, qs)
1752 resp = urllib2.urlopen(cu)
1752 resp = urllib2.urlopen(cu)
1753 proto = resp.headers['content-type']
1753 proto = resp.headers['content-type']
1754
1754
1755 # accept old "text/plain" and "application/hg-changegroup" for now
1755 # accept old "text/plain" and "application/hg-changegroup" for now
1756 if not proto.startswith('application/mercurial') and \
1756 if not proto.startswith('application/mercurial') and \
1757 not proto.startswith('text/plain') and \
1757 not proto.startswith('text/plain') and \
1758 not proto.startswith('application/hg-changegroup'):
1758 not proto.startswith('application/hg-changegroup'):
1759 raise RepoError("'%s' does not appear to be an hg repository"
1759 raise RepoError("'%s' does not appear to be an hg repository"
1760 % self.url)
1760 % self.url)
1761
1761
1762 if proto.startswith('application/mercurial'):
1762 if proto.startswith('application/mercurial'):
1763 version = proto[22:]
1763 version = proto[22:]
1764 if float(version) > 0.1:
1764 if float(version) > 0.1:
1765 raise RepoError("'%s' uses newer protocol %s" %
1765 raise RepoError("'%s' uses newer protocol %s" %
1766 (self.url, version))
1766 (self.url, version))
1767
1767
1768 return resp
1768 return resp
1769
1769
1770 def heads(self):
1770 def heads(self):
1771 d = self.do_cmd("heads").read()
1771 d = self.do_cmd("heads").read()
1772 try:
1772 try:
1773 return map(bin, d[:-1].split(" "))
1773 return map(bin, d[:-1].split(" "))
1774 except:
1774 except:
1775 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1775 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1776 raise
1776 raise
1777
1777
1778 def branches(self, nodes):
1778 def branches(self, nodes):
1779 n = " ".join(map(hex, nodes))
1779 n = " ".join(map(hex, nodes))
1780 d = self.do_cmd("branches", nodes=n).read()
1780 d = self.do_cmd("branches", nodes=n).read()
1781 try:
1781 try:
1782 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1782 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1783 return br
1783 return br
1784 except:
1784 except:
1785 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1785 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1786 raise
1786 raise
1787
1787
1788 def between(self, pairs):
1788 def between(self, pairs):
1789 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1789 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1790 d = self.do_cmd("between", pairs=n).read()
1790 d = self.do_cmd("between", pairs=n).read()
1791 try:
1791 try:
1792 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1792 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1793 return p
1793 return p
1794 except:
1794 except:
1795 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1795 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1796 raise
1796 raise
1797
1797
1798 def changegroup(self, nodes):
1798 def changegroup(self, nodes):
1799 n = " ".join(map(hex, nodes))
1799 n = " ".join(map(hex, nodes))
1800 f = self.do_cmd("changegroup", roots=n)
1800 f = self.do_cmd("changegroup", roots=n)
1801 bytes = 0
1801 bytes = 0
1802
1802
1803 class zread:
1803 class zread:
1804 def __init__(self, f):
1804 def __init__(self, f):
1805 self.zd = zlib.decompressobj()
1805 self.zd = zlib.decompressobj()
1806 self.f = f
1806 self.f = f
1807 self.buf = ""
1807 self.buf = ""
1808 def read(self, l):
1808 def read(self, l):
1809 while l > len(self.buf):
1809 while l > len(self.buf):
1810 r = self.f.read(4096)
1810 r = self.f.read(4096)
1811 if r:
1811 if r:
1812 self.buf += self.zd.decompress(r)
1812 self.buf += self.zd.decompress(r)
1813 else:
1813 else:
1814 self.buf += self.zd.flush()
1814 self.buf += self.zd.flush()
1815 break
1815 break
1816 d, self.buf = self.buf[:l], self.buf[l:]
1816 d, self.buf = self.buf[:l], self.buf[l:]
1817 return d
1817 return d
1818
1818
1819 return zread(f)
1819 return zread(f)
1820
1820
1821 class remotelock:
1821 class remotelock:
1822 def __init__(self, repo):
1822 def __init__(self, repo):
1823 self.repo = repo
1823 self.repo = repo
1824 def release(self):
1824 def release(self):
1825 self.repo.unlock()
1825 self.repo.unlock()
1826 self.repo = None
1826 self.repo = None
1827 def __del__(self):
1827 def __del__(self):
1828 if self.repo:
1828 if self.repo:
1829 self.release()
1829 self.release()
1830
1830
1831 class sshrepository:
1831 class sshrepository:
1832 def __init__(self, ui, path):
1832 def __init__(self, ui, path):
1833 self.url = path
1833 self.url = path
1834 self.ui = ui
1834 self.ui = ui
1835
1835
1836 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1836 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1837 if not m:
1837 if not m:
1838 raise RepoError("couldn't parse destination %s\n" % path)
1838 raise RepoError("couldn't parse destination %s\n" % path)
1839
1839
1840 self.user = m.group(2)
1840 self.user = m.group(2)
1841 self.host = m.group(3)
1841 self.host = m.group(3)
1842 self.port = m.group(5)
1842 self.port = m.group(5)
1843 self.path = m.group(7)
1843 self.path = m.group(7)
1844
1844
1845 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1845 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1846 args = self.port and ("%s -p %s") % (args, self.port) or args
1846 args = self.port and ("%s -p %s") % (args, self.port) or args
1847 path = self.path or ""
1847 path = self.path or ""
1848
1848
1849 cmd = "ssh %s 'hg -R %s serve --stdio'"
1849 cmd = "ssh %s 'hg -R %s serve --stdio'"
1850 cmd = cmd % (args, path)
1850 cmd = cmd % (args, path)
1851
1851
1852 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1852 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1853
1853
1854 def readerr(self):
1854 def readerr(self):
1855 while 1:
1855 while 1:
1856 r,w,x = select.select([self.pipee], [], [], 0)
1856 r,w,x = select.select([self.pipee], [], [], 0)
1857 if not r: break
1857 if not r: break
1858 l = self.pipee.readline()
1858 l = self.pipee.readline()
1859 if not l: break
1859 if not l: break
1860 self.ui.status("remote: ", l)
1860 self.ui.status("remote: ", l)
1861
1861
1862 def __del__(self):
1862 def __del__(self):
1863 self.pipeo.close()
1863 self.pipeo.close()
1864 self.pipei.close()
1864 self.pipei.close()
1865 for l in self.pipee:
1865 for l in self.pipee:
1866 self.ui.status("remote: ", l)
1866 self.ui.status("remote: ", l)
1867 self.pipee.close()
1867 self.pipee.close()
1868
1868
1869 def dev(self):
1869 def dev(self):
1870 return -1
1870 return -1
1871
1871
1872 def do_cmd(self, cmd, **args):
1872 def do_cmd(self, cmd, **args):
1873 self.ui.debug("sending %s command\n" % cmd)
1873 self.ui.debug("sending %s command\n" % cmd)
1874 self.pipeo.write("%s\n" % cmd)
1874 self.pipeo.write("%s\n" % cmd)
1875 for k, v in args.items():
1875 for k, v in args.items():
1876 self.pipeo.write("%s %d\n" % (k, len(v)))
1876 self.pipeo.write("%s %d\n" % (k, len(v)))
1877 self.pipeo.write(v)
1877 self.pipeo.write(v)
1878 self.pipeo.flush()
1878 self.pipeo.flush()
1879
1879
1880 return self.pipei
1880 return self.pipei
1881
1881
1882 def call(self, cmd, **args):
1882 def call(self, cmd, **args):
1883 r = self.do_cmd(cmd, **args)
1883 r = self.do_cmd(cmd, **args)
1884 l = r.readline()
1884 l = r.readline()
1885 self.readerr()
1885 self.readerr()
1886 try:
1886 try:
1887 l = int(l)
1887 l = int(l)
1888 except:
1888 except:
1889 raise RepoError("unexpected response '%s'" % l)
1889 raise RepoError("unexpected response '%s'" % l)
1890 return r.read(l)
1890 return r.read(l)
1891
1891
1892 def lock(self):
1892 def lock(self):
1893 self.call("lock")
1893 self.call("lock")
1894 return remotelock(self)
1894 return remotelock(self)
1895
1895
1896 def unlock(self):
1896 def unlock(self):
1897 self.call("unlock")
1897 self.call("unlock")
1898
1898
1899 def heads(self):
1899 def heads(self):
1900 d = self.call("heads")
1900 d = self.call("heads")
1901 try:
1901 try:
1902 return map(bin, d[:-1].split(" "))
1902 return map(bin, d[:-1].split(" "))
1903 except:
1903 except:
1904 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1904 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1905
1905
1906 def branches(self, nodes):
1906 def branches(self, nodes):
1907 n = " ".join(map(hex, nodes))
1907 n = " ".join(map(hex, nodes))
1908 d = self.call("branches", nodes=n)
1908 d = self.call("branches", nodes=n)
1909 try:
1909 try:
1910 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1910 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1911 return br
1911 return br
1912 except:
1912 except:
1913 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1913 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1914
1914
1915 def between(self, pairs):
1915 def between(self, pairs):
1916 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1916 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1917 d = self.call("between", pairs=n)
1917 d = self.call("between", pairs=n)
1918 try:
1918 try:
1919 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1919 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1920 return p
1920 return p
1921 except:
1921 except:
1922 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1922 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1923
1923
1924 def changegroup(self, nodes):
1924 def changegroup(self, nodes):
1925 n = " ".join(map(hex, nodes))
1925 n = " ".join(map(hex, nodes))
1926 f = self.do_cmd("changegroup", roots=n)
1926 f = self.do_cmd("changegroup", roots=n)
1927 return self.pipei
1927 return self.pipei
1928
1928
1929 def addchangegroup(self, cg):
1929 def addchangegroup(self, cg):
1930 d = self.call("addchangegroup")
1930 d = self.call("addchangegroup")
1931 if d:
1931 if d:
1932 raise RepoError("push refused: %s", d)
1932 raise RepoError("push refused: %s", d)
1933
1933
1934 while 1:
1934 while 1:
1935 d = cg.read(4096)
1935 d = cg.read(4096)
1936 if not d: break
1936 if not d: break
1937 self.pipeo.write(d)
1937 self.pipeo.write(d)
1938 self.readerr()
1938 self.readerr()
1939
1939
1940 self.pipeo.flush()
1940 self.pipeo.flush()
1941
1941
1942 self.readerr()
1942 self.readerr()
1943 l = int(self.pipei.readline())
1943 l = int(self.pipei.readline())
1944 return self.pipei.read(l) != ""
1944 return self.pipei.read(l) != ""
1945
1945
1946 def repository(ui, path=None, create=0):
1946 def repository(ui, path=None, create=0):
1947 if path:
1947 if path:
1948 if path.startswith("http://"):
1948 if path.startswith("http://"):
1949 return httprepository(ui, path)
1949 return httprepository(ui, path)
1950 if path.startswith("hg://"):
1950 if path.startswith("hg://"):
1951 return httprepository(ui, path.replace("hg://", "http://"))
1951 return httprepository(ui, path.replace("hg://", "http://"))
1952 if path.startswith("old-http://"):
1952 if path.startswith("old-http://"):
1953 return localrepository(ui, path.replace("old-http://", "http://"))
1953 return localrepository(ui, path.replace("old-http://", "http://"))
1954 if path.startswith("ssh://"):
1954 if path.startswith("ssh://"):
1955 return sshrepository(ui, path)
1955 return sshrepository(ui, path)
1956
1956
1957 return localrepository(ui, path, create)
1957 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now