##// END OF EJS Templates
Use length of file instead of length of change for the dirstate entry.
Thomas Arendsen Hein -
r863:a7e95e36 default
parent child Browse files
Show More
@@ -1,2042 +1,2042 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse stat")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse stat")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def ignore(self, f):
303 def ignore(self, f):
304 if not self.ignorefunc:
304 if not self.ignorefunc:
305 bigpat = []
305 bigpat = []
306 try:
306 try:
307 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
308 for pat in l:
308 for pat in l:
309 if pat != "\n":
309 if pat != "\n":
310 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
311 try:
311 try:
312 r = re.compile(p)
312 r = re.compile(p)
313 except:
313 except:
314 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
315 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
316 else:
316 else:
317 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
318 except IOError: pass
318 except IOError: pass
319
319
320 if bigpat:
320 if bigpat:
321 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 r = re.compile(s)
322 r = re.compile(s)
323 self.ignorefunc = r.search
323 self.ignorefunc = r.search
324 else:
324 else:
325 self.ignorefunc = util.never
325 self.ignorefunc = util.never
326
326
327 return self.ignorefunc(f)
327 return self.ignorefunc(f)
328
328
329 def __del__(self):
329 def __del__(self):
330 if self.dirty:
330 if self.dirty:
331 self.write()
331 self.write()
332
332
333 def __getitem__(self, key):
333 def __getitem__(self, key):
334 try:
334 try:
335 return self.map[key]
335 return self.map[key]
336 except TypeError:
336 except TypeError:
337 self.read()
337 self.read()
338 return self[key]
338 return self[key]
339
339
340 def __contains__(self, key):
340 def __contains__(self, key):
341 if not self.map: self.read()
341 if not self.map: self.read()
342 return key in self.map
342 return key in self.map
343
343
344 def parents(self):
344 def parents(self):
345 if not self.pl:
345 if not self.pl:
346 self.read()
346 self.read()
347 return self.pl
347 return self.pl
348
348
349 def markdirty(self):
349 def markdirty(self):
350 if not self.dirty:
350 if not self.dirty:
351 self.dirty = 1
351 self.dirty = 1
352
352
353 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
354 self.markdirty()
354 self.markdirty()
355 self.pl = p1, p2
355 self.pl = p1, p2
356
356
357 def state(self, key):
357 def state(self, key):
358 try:
358 try:
359 return self[key][0]
359 return self[key][0]
360 except KeyError:
360 except KeyError:
361 return "?"
361 return "?"
362
362
363 def read(self):
363 def read(self):
364 if self.map is not None: return self.map
364 if self.map is not None: return self.map
365
365
366 self.map = {}
366 self.map = {}
367 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
368 try:
368 try:
369 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
370 if not st: return
370 if not st: return
371 except: return
371 except: return
372
372
373 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
374
374
375 pos = 40
375 pos = 40
376 while pos < len(st):
376 while pos < len(st):
377 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
378 l = e[4]
378 l = e[4]
379 pos += 17
379 pos += 17
380 f = st[pos:pos + l]
380 f = st[pos:pos + l]
381 if '\0' in f:
381 if '\0' in f:
382 f, c = f.split('\0')
382 f, c = f.split('\0')
383 self.copies[f] = c
383 self.copies[f] = c
384 self.map[f] = e[:4]
384 self.map[f] = e[:4]
385 pos += l
385 pos += l
386
386
387 def copy(self, source, dest):
387 def copy(self, source, dest):
388 self.read()
388 self.read()
389 self.markdirty()
389 self.markdirty()
390 self.copies[dest] = source
390 self.copies[dest] = source
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self.copies.get(file, None)
393 return self.copies.get(file, None)
394
394
395 def update(self, files, state, **kw):
395 def update(self, files, state, **kw):
396 ''' current states:
396 ''' current states:
397 n normal
397 n normal
398 m needs merging
398 m needs merging
399 r marked for removal
399 r marked for removal
400 a marked for addition'''
400 a marked for addition'''
401
401
402 if not files: return
402 if not files: return
403 self.read()
403 self.read()
404 self.markdirty()
404 self.markdirty()
405 for f in files:
405 for f in files:
406 if state == "r":
406 if state == "r":
407 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
408 else:
408 else:
409 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
410 st_mode = kw.get('st_mode', s.st_mode)
410 st_mode = kw.get('st_mode', s.st_mode)
411 st_size = kw.get('st_size', s.st_size)
411 st_size = kw.get('st_size', s.st_size)
412 st_mtime = kw.get('st_mtime', s.st_mtime)
412 st_mtime = kw.get('st_mtime', s.st_mtime)
413 self.map[f] = (state, st_mode, st_size, st_mtime)
413 self.map[f] = (state, st_mode, st_size, st_mtime)
414
414
415 def forget(self, files):
415 def forget(self, files):
416 if not files: return
416 if not files: return
417 self.read()
417 self.read()
418 self.markdirty()
418 self.markdirty()
419 for f in files:
419 for f in files:
420 try:
420 try:
421 del self.map[f]
421 del self.map[f]
422 except KeyError:
422 except KeyError:
423 self.ui.warn("not in dirstate: %s!\n" % f)
423 self.ui.warn("not in dirstate: %s!\n" % f)
424 pass
424 pass
425
425
426 def clear(self):
426 def clear(self):
427 self.map = {}
427 self.map = {}
428 self.markdirty()
428 self.markdirty()
429
429
430 def write(self):
430 def write(self):
431 st = self.opener("dirstate", "w")
431 st = self.opener("dirstate", "w")
432 st.write("".join(self.pl))
432 st.write("".join(self.pl))
433 for f, e in self.map.items():
433 for f, e in self.map.items():
434 c = self.copied(f)
434 c = self.copied(f)
435 if c:
435 if c:
436 f = f + "\0" + c
436 f = f + "\0" + c
437 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
437 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
438 st.write(e + f)
438 st.write(e + f)
439 self.dirty = 0
439 self.dirty = 0
440
440
441 def walk(self, files = None, match = util.always):
441 def walk(self, files = None, match = util.always):
442 self.read()
442 self.read()
443 dc = self.map.copy()
443 dc = self.map.copy()
444 # walk all files by default
444 # walk all files by default
445 if not files: files = [self.root]
445 if not files: files = [self.root]
446 known = {'.hg': 1}
446 known = {'.hg': 1}
447 def seen(fn):
447 def seen(fn):
448 if fn in known: return True
448 if fn in known: return True
449 known[fn] = 1
449 known[fn] = 1
450 def traverse():
450 def traverse():
451 for f in util.unique(files):
451 for f in util.unique(files):
452 f = os.path.join(self.root, f)
452 f = os.path.join(self.root, f)
453 if os.path.isdir(f):
453 if os.path.isdir(f):
454 for dir, subdirs, fl in os.walk(f):
454 for dir, subdirs, fl in os.walk(f):
455 d = dir[len(self.root) + 1:]
455 d = dir[len(self.root) + 1:]
456 nd = os.path.normpath(d)
456 nd = os.path.normpath(d)
457 if seen(nd):
457 if seen(nd):
458 subdirs[:] = []
458 subdirs[:] = []
459 continue
459 continue
460 for sd in subdirs:
460 for sd in subdirs:
461 ds = os.path.join(nd, sd +'/')
461 ds = os.path.join(nd, sd +'/')
462 if self.ignore(ds) or not match(ds):
462 if self.ignore(ds) or not match(ds):
463 subdirs.remove(sd)
463 subdirs.remove(sd)
464 subdirs.sort()
464 subdirs.sort()
465 fl.sort()
465 fl.sort()
466 for fn in fl:
466 for fn in fl:
467 fn = util.pconvert(os.path.join(d, fn))
467 fn = util.pconvert(os.path.join(d, fn))
468 yield 'f', fn
468 yield 'f', fn
469 else:
469 else:
470 yield 'f', f[len(self.root) + 1:]
470 yield 'f', f[len(self.root) + 1:]
471
471
472 ks = dc.keys()
472 ks = dc.keys()
473 ks.sort()
473 ks.sort()
474 for k in ks:
474 for k in ks:
475 yield 'm', k
475 yield 'm', k
476
476
477 # yield only files that match: all in dirstate, others only if
477 # yield only files that match: all in dirstate, others only if
478 # not in .hgignore
478 # not in .hgignore
479
479
480 for src, fn in util.unique(traverse()):
480 for src, fn in util.unique(traverse()):
481 fn = os.path.normpath(fn)
481 fn = os.path.normpath(fn)
482 if seen(fn): continue
482 if seen(fn): continue
483 if fn in dc:
483 if fn in dc:
484 del dc[fn]
484 del dc[fn]
485 elif self.ignore(fn):
485 elif self.ignore(fn):
486 continue
486 continue
487 if match(fn):
487 if match(fn):
488 yield src, fn
488 yield src, fn
489
489
490 def changes(self, files=None, match=util.always):
490 def changes(self, files=None, match=util.always):
491 self.read()
491 self.read()
492 dc = self.map.copy()
492 dc = self.map.copy()
493 lookup, modified, added, unknown = [], [], [], []
493 lookup, modified, added, unknown = [], [], [], []
494 removed, deleted = [], []
494 removed, deleted = [], []
495
495
496 for src, fn in self.walk(files, match):
496 for src, fn in self.walk(files, match):
497 try:
497 try:
498 s = os.stat(os.path.join(self.root, fn))
498 s = os.stat(os.path.join(self.root, fn))
499 except OSError:
499 except OSError:
500 continue
500 continue
501 if not stat.S_ISREG(s.st_mode):
501 if not stat.S_ISREG(s.st_mode):
502 continue
502 continue
503 c = dc.get(fn)
503 c = dc.get(fn)
504 if c:
504 if c:
505 del dc[fn]
505 del dc[fn]
506 if c[0] == 'm':
506 if c[0] == 'm':
507 modified.append(fn)
507 modified.append(fn)
508 elif c[0] == 'a':
508 elif c[0] == 'a':
509 added.append(fn)
509 added.append(fn)
510 elif c[0] == 'r':
510 elif c[0] == 'r':
511 unknown.append(fn)
511 unknown.append(fn)
512 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
512 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
513 modified.append(fn)
513 modified.append(fn)
514 elif c[3] != s.st_mtime:
514 elif c[3] != s.st_mtime:
515 lookup.append(fn)
515 lookup.append(fn)
516 else:
516 else:
517 unknown.append(fn)
517 unknown.append(fn)
518
518
519 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
519 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
520 if c[0] == 'r':
520 if c[0] == 'r':
521 removed.append(fn)
521 removed.append(fn)
522 else:
522 else:
523 deleted.append(fn)
523 deleted.append(fn)
524 return (lookup, modified, added, removed + deleted, unknown)
524 return (lookup, modified, added, removed + deleted, unknown)
525
525
526 # used to avoid circular references so destructors work
526 # used to avoid circular references so destructors work
527 def opener(base):
527 def opener(base):
528 p = base
528 p = base
529 def o(path, mode="r"):
529 def o(path, mode="r"):
530 if p.startswith("http://"):
530 if p.startswith("http://"):
531 f = os.path.join(p, urllib.quote(path))
531 f = os.path.join(p, urllib.quote(path))
532 return httprangereader.httprangereader(f)
532 return httprangereader.httprangereader(f)
533
533
534 f = os.path.join(p, path)
534 f = os.path.join(p, path)
535
535
536 mode += "b" # for that other OS
536 mode += "b" # for that other OS
537
537
538 if mode[0] != "r":
538 if mode[0] != "r":
539 try:
539 try:
540 s = os.stat(f)
540 s = os.stat(f)
541 except OSError:
541 except OSError:
542 d = os.path.dirname(f)
542 d = os.path.dirname(f)
543 if not os.path.isdir(d):
543 if not os.path.isdir(d):
544 os.makedirs(d)
544 os.makedirs(d)
545 else:
545 else:
546 if s.st_nlink > 1:
546 if s.st_nlink > 1:
547 file(f + ".tmp", "wb").write(file(f, "rb").read())
547 file(f + ".tmp", "wb").write(file(f, "rb").read())
548 util.rename(f+".tmp", f)
548 util.rename(f+".tmp", f)
549
549
550 return file(f, mode)
550 return file(f, mode)
551
551
552 return o
552 return o
553
553
554 class RepoError(Exception): pass
554 class RepoError(Exception): pass
555
555
556 class localrepository:
556 class localrepository:
557 def __init__(self, ui, path=None, create=0):
557 def __init__(self, ui, path=None, create=0):
558 self.remote = 0
558 self.remote = 0
559 if path and path.startswith("http://"):
559 if path and path.startswith("http://"):
560 self.remote = 1
560 self.remote = 1
561 self.path = path
561 self.path = path
562 else:
562 else:
563 if not path:
563 if not path:
564 p = os.getcwd()
564 p = os.getcwd()
565 while not os.path.isdir(os.path.join(p, ".hg")):
565 while not os.path.isdir(os.path.join(p, ".hg")):
566 oldp = p
566 oldp = p
567 p = os.path.dirname(p)
567 p = os.path.dirname(p)
568 if p == oldp: raise RepoError("no repo found")
568 if p == oldp: raise RepoError("no repo found")
569 path = p
569 path = p
570 self.path = os.path.join(path, ".hg")
570 self.path = os.path.join(path, ".hg")
571
571
572 if not create and not os.path.isdir(self.path):
572 if not create and not os.path.isdir(self.path):
573 raise RepoError("repository %s not found" % self.path)
573 raise RepoError("repository %s not found" % self.path)
574
574
575 self.root = path
575 self.root = path
576 self.ui = ui
576 self.ui = ui
577
577
578 if create:
578 if create:
579 os.mkdir(self.path)
579 os.mkdir(self.path)
580 os.mkdir(self.join("data"))
580 os.mkdir(self.join("data"))
581
581
582 self.opener = opener(self.path)
582 self.opener = opener(self.path)
583 self.wopener = opener(self.root)
583 self.wopener = opener(self.root)
584 self.manifest = manifest(self.opener)
584 self.manifest = manifest(self.opener)
585 self.changelog = changelog(self.opener)
585 self.changelog = changelog(self.opener)
586 self.tagscache = None
586 self.tagscache = None
587 self.nodetagscache = None
587 self.nodetagscache = None
588
588
589 if not self.remote:
589 if not self.remote:
590 self.dirstate = dirstate(self.opener, ui, self.root)
590 self.dirstate = dirstate(self.opener, ui, self.root)
591 try:
591 try:
592 self.ui.readconfig(self.opener("hgrc"))
592 self.ui.readconfig(self.opener("hgrc"))
593 except IOError: pass
593 except IOError: pass
594
594
595 def hook(self, name, **args):
595 def hook(self, name, **args):
596 s = self.ui.config("hooks", name)
596 s = self.ui.config("hooks", name)
597 if s:
597 if s:
598 self.ui.note("running hook %s: %s\n" % (name, s))
598 self.ui.note("running hook %s: %s\n" % (name, s))
599 old = {}
599 old = {}
600 for k, v in args.items():
600 for k, v in args.items():
601 k = k.upper()
601 k = k.upper()
602 old[k] = os.environ.get(k, None)
602 old[k] = os.environ.get(k, None)
603 os.environ[k] = v
603 os.environ[k] = v
604
604
605 r = os.system(s)
605 r = os.system(s)
606
606
607 for k, v in old.items():
607 for k, v in old.items():
608 if v != None:
608 if v != None:
609 os.environ[k] = v
609 os.environ[k] = v
610 else:
610 else:
611 del os.environ[k]
611 del os.environ[k]
612
612
613 if r:
613 if r:
614 self.ui.warn("abort: %s hook failed with status %d!\n" %
614 self.ui.warn("abort: %s hook failed with status %d!\n" %
615 (name, r))
615 (name, r))
616 return False
616 return False
617 return True
617 return True
618
618
619 def tags(self):
619 def tags(self):
620 '''return a mapping of tag to node'''
620 '''return a mapping of tag to node'''
621 if not self.tagscache:
621 if not self.tagscache:
622 self.tagscache = {}
622 self.tagscache = {}
623 def addtag(self, k, n):
623 def addtag(self, k, n):
624 try:
624 try:
625 bin_n = bin(n)
625 bin_n = bin(n)
626 except TypeError:
626 except TypeError:
627 bin_n = ''
627 bin_n = ''
628 self.tagscache[k.strip()] = bin_n
628 self.tagscache[k.strip()] = bin_n
629
629
630 try:
630 try:
631 # read each head of the tags file, ending with the tip
631 # read each head of the tags file, ending with the tip
632 # and add each tag found to the map, with "newer" ones
632 # and add each tag found to the map, with "newer" ones
633 # taking precedence
633 # taking precedence
634 fl = self.file(".hgtags")
634 fl = self.file(".hgtags")
635 h = fl.heads()
635 h = fl.heads()
636 h.reverse()
636 h.reverse()
637 for r in h:
637 for r in h:
638 for l in fl.revision(r).splitlines():
638 for l in fl.revision(r).splitlines():
639 if l:
639 if l:
640 n, k = l.split(" ", 1)
640 n, k = l.split(" ", 1)
641 addtag(self, k, n)
641 addtag(self, k, n)
642 except KeyError:
642 except KeyError:
643 pass
643 pass
644
644
645 try:
645 try:
646 f = self.opener("localtags")
646 f = self.opener("localtags")
647 for l in f:
647 for l in f:
648 n, k = l.split(" ", 1)
648 n, k = l.split(" ", 1)
649 addtag(self, k, n)
649 addtag(self, k, n)
650 except IOError:
650 except IOError:
651 pass
651 pass
652
652
653 self.tagscache['tip'] = self.changelog.tip()
653 self.tagscache['tip'] = self.changelog.tip()
654
654
655 return self.tagscache
655 return self.tagscache
656
656
657 def tagslist(self):
657 def tagslist(self):
658 '''return a list of tags ordered by revision'''
658 '''return a list of tags ordered by revision'''
659 l = []
659 l = []
660 for t, n in self.tags().items():
660 for t, n in self.tags().items():
661 try:
661 try:
662 r = self.changelog.rev(n)
662 r = self.changelog.rev(n)
663 except:
663 except:
664 r = -2 # sort to the beginning of the list if unknown
664 r = -2 # sort to the beginning of the list if unknown
665 l.append((r,t,n))
665 l.append((r,t,n))
666 l.sort()
666 l.sort()
667 return [(t,n) for r,t,n in l]
667 return [(t,n) for r,t,n in l]
668
668
669 def nodetags(self, node):
669 def nodetags(self, node):
670 '''return the tags associated with a node'''
670 '''return the tags associated with a node'''
671 if not self.nodetagscache:
671 if not self.nodetagscache:
672 self.nodetagscache = {}
672 self.nodetagscache = {}
673 for t,n in self.tags().items():
673 for t,n in self.tags().items():
674 self.nodetagscache.setdefault(n,[]).append(t)
674 self.nodetagscache.setdefault(n,[]).append(t)
675 return self.nodetagscache.get(node, [])
675 return self.nodetagscache.get(node, [])
676
676
677 def lookup(self, key):
677 def lookup(self, key):
678 try:
678 try:
679 return self.tags()[key]
679 return self.tags()[key]
680 except KeyError:
680 except KeyError:
681 try:
681 try:
682 return self.changelog.lookup(key)
682 return self.changelog.lookup(key)
683 except:
683 except:
684 raise RepoError("unknown revision '%s'" % key)
684 raise RepoError("unknown revision '%s'" % key)
685
685
686 def dev(self):
686 def dev(self):
687 if self.remote: return -1
687 if self.remote: return -1
688 return os.stat(self.path).st_dev
688 return os.stat(self.path).st_dev
689
689
690 def join(self, f):
690 def join(self, f):
691 return os.path.join(self.path, f)
691 return os.path.join(self.path, f)
692
692
693 def wjoin(self, f):
693 def wjoin(self, f):
694 return os.path.join(self.root, f)
694 return os.path.join(self.root, f)
695
695
696 def file(self, f):
696 def file(self, f):
697 if f[0] == '/': f = f[1:]
697 if f[0] == '/': f = f[1:]
698 return filelog(self.opener, f)
698 return filelog(self.opener, f)
699
699
700 def getcwd(self):
700 def getcwd(self):
701 cwd = os.getcwd()
701 cwd = os.getcwd()
702 if cwd == self.root: return ''
702 if cwd == self.root: return ''
703 return cwd[len(self.root) + 1:]
703 return cwd[len(self.root) + 1:]
704
704
705 def wfile(self, f, mode='r'):
705 def wfile(self, f, mode='r'):
706 return self.wopener(f, mode)
706 return self.wopener(f, mode)
707
707
708 def transaction(self):
708 def transaction(self):
709 # save dirstate for undo
709 # save dirstate for undo
710 try:
710 try:
711 ds = self.opener("dirstate").read()
711 ds = self.opener("dirstate").read()
712 except IOError:
712 except IOError:
713 ds = ""
713 ds = ""
714 self.opener("journal.dirstate", "w").write(ds)
714 self.opener("journal.dirstate", "w").write(ds)
715
715
716 def after():
716 def after():
717 util.rename(self.join("journal"), self.join("undo"))
717 util.rename(self.join("journal"), self.join("undo"))
718 util.rename(self.join("journal.dirstate"),
718 util.rename(self.join("journal.dirstate"),
719 self.join("undo.dirstate"))
719 self.join("undo.dirstate"))
720
720
721 return transaction.transaction(self.ui.warn, self.opener,
721 return transaction.transaction(self.ui.warn, self.opener,
722 self.join("journal"), after)
722 self.join("journal"), after)
723
723
724 def recover(self):
724 def recover(self):
725 lock = self.lock()
725 lock = self.lock()
726 if os.path.exists(self.join("journal")):
726 if os.path.exists(self.join("journal")):
727 self.ui.status("rolling back interrupted transaction\n")
727 self.ui.status("rolling back interrupted transaction\n")
728 return transaction.rollback(self.opener, self.join("journal"))
728 return transaction.rollback(self.opener, self.join("journal"))
729 else:
729 else:
730 self.ui.warn("no interrupted transaction available\n")
730 self.ui.warn("no interrupted transaction available\n")
731
731
732 def undo(self):
732 def undo(self):
733 lock = self.lock()
733 lock = self.lock()
734 if os.path.exists(self.join("undo")):
734 if os.path.exists(self.join("undo")):
735 self.ui.status("rolling back last transaction\n")
735 self.ui.status("rolling back last transaction\n")
736 transaction.rollback(self.opener, self.join("undo"))
736 transaction.rollback(self.opener, self.join("undo"))
737 self.dirstate = None
737 self.dirstate = None
738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
739 self.dirstate = dirstate(self.opener, self.ui, self.root)
739 self.dirstate = dirstate(self.opener, self.ui, self.root)
740 else:
740 else:
741 self.ui.warn("no undo information available\n")
741 self.ui.warn("no undo information available\n")
742
742
743 def lock(self, wait = 1):
743 def lock(self, wait = 1):
744 try:
744 try:
745 return lock.lock(self.join("lock"), 0)
745 return lock.lock(self.join("lock"), 0)
746 except lock.LockHeld, inst:
746 except lock.LockHeld, inst:
747 if wait:
747 if wait:
748 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
748 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
749 return lock.lock(self.join("lock"), wait)
749 return lock.lock(self.join("lock"), wait)
750 raise inst
750 raise inst
751
751
752 def rawcommit(self, files, text, user, date, p1=None, p2=None):
752 def rawcommit(self, files, text, user, date, p1=None, p2=None):
753 orig_parent = self.dirstate.parents()[0] or nullid
753 orig_parent = self.dirstate.parents()[0] or nullid
754 p1 = p1 or self.dirstate.parents()[0] or nullid
754 p1 = p1 or self.dirstate.parents()[0] or nullid
755 p2 = p2 or self.dirstate.parents()[1] or nullid
755 p2 = p2 or self.dirstate.parents()[1] or nullid
756 c1 = self.changelog.read(p1)
756 c1 = self.changelog.read(p1)
757 c2 = self.changelog.read(p2)
757 c2 = self.changelog.read(p2)
758 m1 = self.manifest.read(c1[0])
758 m1 = self.manifest.read(c1[0])
759 mf1 = self.manifest.readflags(c1[0])
759 mf1 = self.manifest.readflags(c1[0])
760 m2 = self.manifest.read(c2[0])
760 m2 = self.manifest.read(c2[0])
761
761
762 if orig_parent == p1:
762 if orig_parent == p1:
763 update_dirstate = 1
763 update_dirstate = 1
764 else:
764 else:
765 update_dirstate = 0
765 update_dirstate = 0
766
766
767 tr = self.transaction()
767 tr = self.transaction()
768 mm = m1.copy()
768 mm = m1.copy()
769 mfm = mf1.copy()
769 mfm = mf1.copy()
770 linkrev = self.changelog.count()
770 linkrev = self.changelog.count()
771 for f in files:
771 for f in files:
772 try:
772 try:
773 t = self.wfile(f).read()
773 t = self.wfile(f).read()
774 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
774 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
775 r = self.file(f)
775 r = self.file(f)
776 mfm[f] = tm
776 mfm[f] = tm
777 mm[f] = r.add(t, {}, tr, linkrev,
777 mm[f] = r.add(t, {}, tr, linkrev,
778 m1.get(f, nullid), m2.get(f, nullid))
778 m1.get(f, nullid), m2.get(f, nullid))
779 if update_dirstate:
779 if update_dirstate:
780 self.dirstate.update([f], "n")
780 self.dirstate.update([f], "n")
781 except IOError:
781 except IOError:
782 try:
782 try:
783 del mm[f]
783 del mm[f]
784 del mfm[f]
784 del mfm[f]
785 if update_dirstate:
785 if update_dirstate:
786 self.dirstate.forget([f])
786 self.dirstate.forget([f])
787 except:
787 except:
788 # deleted from p2?
788 # deleted from p2?
789 pass
789 pass
790
790
791 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
791 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
792 user = user or self.ui.username()
792 user = user or self.ui.username()
793 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
793 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
794 tr.close()
794 tr.close()
795 if update_dirstate:
795 if update_dirstate:
796 self.dirstate.setparents(n, nullid)
796 self.dirstate.setparents(n, nullid)
797
797
798 def commit(self, files = None, text = "", user = None, date = None,
798 def commit(self, files = None, text = "", user = None, date = None,
799 match = util.always):
799 match = util.always):
800 commit = []
800 commit = []
801 remove = []
801 remove = []
802 if files:
802 if files:
803 for f in files:
803 for f in files:
804 s = self.dirstate.state(f)
804 s = self.dirstate.state(f)
805 if s in 'nmai':
805 if s in 'nmai':
806 commit.append(f)
806 commit.append(f)
807 elif s == 'r':
807 elif s == 'r':
808 remove.append(f)
808 remove.append(f)
809 else:
809 else:
810 self.ui.warn("%s not tracked!\n" % f)
810 self.ui.warn("%s not tracked!\n" % f)
811 else:
811 else:
812 (c, a, d, u) = self.changes(match = match)
812 (c, a, d, u) = self.changes(match = match)
813 commit = c + a
813 commit = c + a
814 remove = d
814 remove = d
815
815
816 if not commit and not remove:
816 if not commit and not remove:
817 self.ui.status("nothing changed\n")
817 self.ui.status("nothing changed\n")
818 return
818 return
819
819
820 if not self.hook("precommit"):
820 if not self.hook("precommit"):
821 return 1
821 return 1
822
822
823 p1, p2 = self.dirstate.parents()
823 p1, p2 = self.dirstate.parents()
824 c1 = self.changelog.read(p1)
824 c1 = self.changelog.read(p1)
825 c2 = self.changelog.read(p2)
825 c2 = self.changelog.read(p2)
826 m1 = self.manifest.read(c1[0])
826 m1 = self.manifest.read(c1[0])
827 mf1 = self.manifest.readflags(c1[0])
827 mf1 = self.manifest.readflags(c1[0])
828 m2 = self.manifest.read(c2[0])
828 m2 = self.manifest.read(c2[0])
829 lock = self.lock()
829 lock = self.lock()
830 tr = self.transaction()
830 tr = self.transaction()
831
831
832 # check in files
832 # check in files
833 new = {}
833 new = {}
834 linkrev = self.changelog.count()
834 linkrev = self.changelog.count()
835 commit.sort()
835 commit.sort()
836 for f in commit:
836 for f in commit:
837 self.ui.note(f + "\n")
837 self.ui.note(f + "\n")
838 try:
838 try:
839 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
839 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
840 t = self.wfile(f).read()
840 t = self.wfile(f).read()
841 except IOError:
841 except IOError:
842 self.ui.warn("trouble committing %s!\n" % f)
842 self.ui.warn("trouble committing %s!\n" % f)
843 raise
843 raise
844
844
845 meta = {}
845 meta = {}
846 cp = self.dirstate.copied(f)
846 cp = self.dirstate.copied(f)
847 if cp:
847 if cp:
848 meta["copy"] = cp
848 meta["copy"] = cp
849 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
849 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
850 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
850 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
851
851
852 r = self.file(f)
852 r = self.file(f)
853 fp1 = m1.get(f, nullid)
853 fp1 = m1.get(f, nullid)
854 fp2 = m2.get(f, nullid)
854 fp2 = m2.get(f, nullid)
855 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
855 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
856
856
857 # update manifest
857 # update manifest
858 m1.update(new)
858 m1.update(new)
859 for f in remove:
859 for f in remove:
860 if f in m1:
860 if f in m1:
861 del m1[f]
861 del m1[f]
862 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
862 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
863 (new, remove))
863 (new, remove))
864
864
865 # add changeset
865 # add changeset
866 new = new.keys()
866 new = new.keys()
867 new.sort()
867 new.sort()
868
868
869 if not text:
869 if not text:
870 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
870 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
871 edittext += "".join(["HG: changed %s\n" % f for f in new])
871 edittext += "".join(["HG: changed %s\n" % f for f in new])
872 edittext += "".join(["HG: removed %s\n" % f for f in remove])
872 edittext += "".join(["HG: removed %s\n" % f for f in remove])
873 edittext = self.ui.edit(edittext)
873 edittext = self.ui.edit(edittext)
874 if not edittext.rstrip():
874 if not edittext.rstrip():
875 return 1
875 return 1
876 text = edittext
876 text = edittext
877
877
878 user = user or self.ui.username()
878 user = user or self.ui.username()
879 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
879 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
880
880
881 tr.close()
881 tr.close()
882
882
883 self.dirstate.setparents(n)
883 self.dirstate.setparents(n)
884 self.dirstate.update(new, "n")
884 self.dirstate.update(new, "n")
885 self.dirstate.forget(remove)
885 self.dirstate.forget(remove)
886
886
887 if not self.hook("commit", node=hex(n)):
887 if not self.hook("commit", node=hex(n)):
888 return 1
888 return 1
889
889
890 def walk(self, node = None, files = [], match = util.always):
890 def walk(self, node = None, files = [], match = util.always):
891 if node:
891 if node:
892 for fn in self.manifest.read(self.changelog.read(node)[0]):
892 for fn in self.manifest.read(self.changelog.read(node)[0]):
893 if match(fn): yield 'm', fn
893 if match(fn): yield 'm', fn
894 else:
894 else:
895 for src, fn in self.dirstate.walk(files, match):
895 for src, fn in self.dirstate.walk(files, match):
896 yield src, fn
896 yield src, fn
897
897
898 def changes(self, node1 = None, node2 = None, files = [],
898 def changes(self, node1 = None, node2 = None, files = [],
899 match = util.always):
899 match = util.always):
900 mf2, u = None, []
900 mf2, u = None, []
901
901
902 def fcmp(fn, mf):
902 def fcmp(fn, mf):
903 t1 = self.wfile(fn).read()
903 t1 = self.wfile(fn).read()
904 t2 = self.file(fn).revision(mf[fn])
904 t2 = self.file(fn).revision(mf[fn])
905 return cmp(t1, t2)
905 return cmp(t1, t2)
906
906
907 def mfmatches(node):
907 def mfmatches(node):
908 mf = dict(self.manifest.read(node))
908 mf = dict(self.manifest.read(node))
909 for fn in mf.keys():
909 for fn in mf.keys():
910 if not match(fn):
910 if not match(fn):
911 del mf[fn]
911 del mf[fn]
912 return mf
912 return mf
913
913
914 # are we comparing the working directory?
914 # are we comparing the working directory?
915 if not node2:
915 if not node2:
916 l, c, a, d, u = self.dirstate.changes(files, match)
916 l, c, a, d, u = self.dirstate.changes(files, match)
917
917
918 # are we comparing working dir against its parent?
918 # are we comparing working dir against its parent?
919 if not node1:
919 if not node1:
920 if l:
920 if l:
921 # do a full compare of any files that might have changed
921 # do a full compare of any files that might have changed
922 change = self.changelog.read(self.dirstate.parents()[0])
922 change = self.changelog.read(self.dirstate.parents()[0])
923 mf2 = mfmatches(change[0])
923 mf2 = mfmatches(change[0])
924 for f in l:
924 for f in l:
925 if fcmp(f, mf2):
925 if fcmp(f, mf2):
926 c.append(f)
926 c.append(f)
927
927
928 for l in c, a, d, u:
928 for l in c, a, d, u:
929 l.sort()
929 l.sort()
930
930
931 return (c, a, d, u)
931 return (c, a, d, u)
932
932
933 # are we comparing working dir against non-tip?
933 # are we comparing working dir against non-tip?
934 # generate a pseudo-manifest for the working dir
934 # generate a pseudo-manifest for the working dir
935 if not node2:
935 if not node2:
936 if not mf2:
936 if not mf2:
937 change = self.changelog.read(self.dirstate.parents()[0])
937 change = self.changelog.read(self.dirstate.parents()[0])
938 mf2 = mfmatches(change[0])
938 mf2 = mfmatches(change[0])
939 for f in a + c + l:
939 for f in a + c + l:
940 mf2[f] = ""
940 mf2[f] = ""
941 for f in d:
941 for f in d:
942 if f in mf2: del mf2[f]
942 if f in mf2: del mf2[f]
943 else:
943 else:
944 change = self.changelog.read(node2)
944 change = self.changelog.read(node2)
945 mf2 = mfmatches(change[0])
945 mf2 = mfmatches(change[0])
946
946
947 # flush lists from dirstate before comparing manifests
947 # flush lists from dirstate before comparing manifests
948 c, a = [], []
948 c, a = [], []
949
949
950 change = self.changelog.read(node1)
950 change = self.changelog.read(node1)
951 mf1 = mfmatches(change[0])
951 mf1 = mfmatches(change[0])
952
952
953 for fn in mf2:
953 for fn in mf2:
954 if mf1.has_key(fn):
954 if mf1.has_key(fn):
955 if mf1[fn] != mf2[fn]:
955 if mf1[fn] != mf2[fn]:
956 if mf2[fn] != "" or fcmp(fn, mf1):
956 if mf2[fn] != "" or fcmp(fn, mf1):
957 c.append(fn)
957 c.append(fn)
958 del mf1[fn]
958 del mf1[fn]
959 else:
959 else:
960 a.append(fn)
960 a.append(fn)
961
961
962 d = mf1.keys()
962 d = mf1.keys()
963
963
964 for l in c, a, d, u:
964 for l in c, a, d, u:
965 l.sort()
965 l.sort()
966
966
967 return (c, a, d, u)
967 return (c, a, d, u)
968
968
969 def add(self, list):
969 def add(self, list):
970 for f in list:
970 for f in list:
971 p = self.wjoin(f)
971 p = self.wjoin(f)
972 if not os.path.exists(p):
972 if not os.path.exists(p):
973 self.ui.warn("%s does not exist!\n" % f)
973 self.ui.warn("%s does not exist!\n" % f)
974 elif not os.path.isfile(p):
974 elif not os.path.isfile(p):
975 self.ui.warn("%s not added: only files supported currently\n" % f)
975 self.ui.warn("%s not added: only files supported currently\n" % f)
976 elif self.dirstate.state(f) in 'an':
976 elif self.dirstate.state(f) in 'an':
977 self.ui.warn("%s already tracked!\n" % f)
977 self.ui.warn("%s already tracked!\n" % f)
978 else:
978 else:
979 self.dirstate.update([f], "a")
979 self.dirstate.update([f], "a")
980
980
981 def forget(self, list):
981 def forget(self, list):
982 for f in list:
982 for f in list:
983 if self.dirstate.state(f) not in 'ai':
983 if self.dirstate.state(f) not in 'ai':
984 self.ui.warn("%s not added!\n" % f)
984 self.ui.warn("%s not added!\n" % f)
985 else:
985 else:
986 self.dirstate.forget([f])
986 self.dirstate.forget([f])
987
987
988 def remove(self, list):
988 def remove(self, list):
989 for f in list:
989 for f in list:
990 p = self.wjoin(f)
990 p = self.wjoin(f)
991 if os.path.exists(p):
991 if os.path.exists(p):
992 self.ui.warn("%s still exists!\n" % f)
992 self.ui.warn("%s still exists!\n" % f)
993 elif self.dirstate.state(f) == 'a':
993 elif self.dirstate.state(f) == 'a':
994 self.ui.warn("%s never committed!\n" % f)
994 self.ui.warn("%s never committed!\n" % f)
995 self.dirstate.forget([f])
995 self.dirstate.forget([f])
996 elif f not in self.dirstate:
996 elif f not in self.dirstate:
997 self.ui.warn("%s not tracked!\n" % f)
997 self.ui.warn("%s not tracked!\n" % f)
998 else:
998 else:
999 self.dirstate.update([f], "r")
999 self.dirstate.update([f], "r")
1000
1000
1001 def copy(self, source, dest):
1001 def copy(self, source, dest):
1002 p = self.wjoin(dest)
1002 p = self.wjoin(dest)
1003 if not os.path.exists(p):
1003 if not os.path.exists(p):
1004 self.ui.warn("%s does not exist!\n" % dest)
1004 self.ui.warn("%s does not exist!\n" % dest)
1005 elif not os.path.isfile(p):
1005 elif not os.path.isfile(p):
1006 self.ui.warn("copy failed: %s is not a file\n" % dest)
1006 self.ui.warn("copy failed: %s is not a file\n" % dest)
1007 else:
1007 else:
1008 if self.dirstate.state(dest) == '?':
1008 if self.dirstate.state(dest) == '?':
1009 self.dirstate.update([dest], "a")
1009 self.dirstate.update([dest], "a")
1010 self.dirstate.copy(source, dest)
1010 self.dirstate.copy(source, dest)
1011
1011
1012 def heads(self):
1012 def heads(self):
1013 return self.changelog.heads()
1013 return self.changelog.heads()
1014
1014
1015 def branches(self, nodes):
1015 def branches(self, nodes):
1016 if not nodes: nodes = [self.changelog.tip()]
1016 if not nodes: nodes = [self.changelog.tip()]
1017 b = []
1017 b = []
1018 for n in nodes:
1018 for n in nodes:
1019 t = n
1019 t = n
1020 while n:
1020 while n:
1021 p = self.changelog.parents(n)
1021 p = self.changelog.parents(n)
1022 if p[1] != nullid or p[0] == nullid:
1022 if p[1] != nullid or p[0] == nullid:
1023 b.append((t, n, p[0], p[1]))
1023 b.append((t, n, p[0], p[1]))
1024 break
1024 break
1025 n = p[0]
1025 n = p[0]
1026 return b
1026 return b
1027
1027
1028 def between(self, pairs):
1028 def between(self, pairs):
1029 r = []
1029 r = []
1030
1030
1031 for top, bottom in pairs:
1031 for top, bottom in pairs:
1032 n, l, i = top, [], 0
1032 n, l, i = top, [], 0
1033 f = 1
1033 f = 1
1034
1034
1035 while n != bottom:
1035 while n != bottom:
1036 p = self.changelog.parents(n)[0]
1036 p = self.changelog.parents(n)[0]
1037 if i == f:
1037 if i == f:
1038 l.append(n)
1038 l.append(n)
1039 f = f * 2
1039 f = f * 2
1040 n = p
1040 n = p
1041 i += 1
1041 i += 1
1042
1042
1043 r.append(l)
1043 r.append(l)
1044
1044
1045 return r
1045 return r
1046
1046
1047 def newer(self, nodes):
1047 def newer(self, nodes):
1048 m = {}
1048 m = {}
1049 nl = []
1049 nl = []
1050 pm = {}
1050 pm = {}
1051 cl = self.changelog
1051 cl = self.changelog
1052 t = l = cl.count()
1052 t = l = cl.count()
1053
1053
1054 # find the lowest numbered node
1054 # find the lowest numbered node
1055 for n in nodes:
1055 for n in nodes:
1056 l = min(l, cl.rev(n))
1056 l = min(l, cl.rev(n))
1057 m[n] = 1
1057 m[n] = 1
1058
1058
1059 for i in xrange(l, t):
1059 for i in xrange(l, t):
1060 n = cl.node(i)
1060 n = cl.node(i)
1061 if n in m: # explicitly listed
1061 if n in m: # explicitly listed
1062 pm[n] = 1
1062 pm[n] = 1
1063 nl.append(n)
1063 nl.append(n)
1064 continue
1064 continue
1065 for p in cl.parents(n):
1065 for p in cl.parents(n):
1066 if p in pm: # parent listed
1066 if p in pm: # parent listed
1067 pm[n] = 1
1067 pm[n] = 1
1068 nl.append(n)
1068 nl.append(n)
1069 break
1069 break
1070
1070
1071 return nl
1071 return nl
1072
1072
1073 def findincoming(self, remote, base=None, heads=None):
1073 def findincoming(self, remote, base=None, heads=None):
1074 m = self.changelog.nodemap
1074 m = self.changelog.nodemap
1075 search = []
1075 search = []
1076 fetch = []
1076 fetch = []
1077 seen = {}
1077 seen = {}
1078 seenbranch = {}
1078 seenbranch = {}
1079 if base == None:
1079 if base == None:
1080 base = {}
1080 base = {}
1081
1081
1082 # assume we're closer to the tip than the root
1082 # assume we're closer to the tip than the root
1083 # and start by examining the heads
1083 # and start by examining the heads
1084 self.ui.status("searching for changes\n")
1084 self.ui.status("searching for changes\n")
1085
1085
1086 if not heads:
1086 if not heads:
1087 heads = remote.heads()
1087 heads = remote.heads()
1088
1088
1089 unknown = []
1089 unknown = []
1090 for h in heads:
1090 for h in heads:
1091 if h not in m:
1091 if h not in m:
1092 unknown.append(h)
1092 unknown.append(h)
1093 else:
1093 else:
1094 base[h] = 1
1094 base[h] = 1
1095
1095
1096 if not unknown:
1096 if not unknown:
1097 return None
1097 return None
1098
1098
1099 rep = {}
1099 rep = {}
1100 reqcnt = 0
1100 reqcnt = 0
1101
1101
1102 # search through remote branches
1102 # search through remote branches
1103 # a 'branch' here is a linear segment of history, with four parts:
1103 # a 'branch' here is a linear segment of history, with four parts:
1104 # head, root, first parent, second parent
1104 # head, root, first parent, second parent
1105 # (a branch always has two parents (or none) by definition)
1105 # (a branch always has two parents (or none) by definition)
1106 unknown = remote.branches(unknown)
1106 unknown = remote.branches(unknown)
1107 while unknown:
1107 while unknown:
1108 r = []
1108 r = []
1109 while unknown:
1109 while unknown:
1110 n = unknown.pop(0)
1110 n = unknown.pop(0)
1111 if n[0] in seen:
1111 if n[0] in seen:
1112 continue
1112 continue
1113
1113
1114 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1114 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1115 if n[0] == nullid:
1115 if n[0] == nullid:
1116 break
1116 break
1117 if n in seenbranch:
1117 if n in seenbranch:
1118 self.ui.debug("branch already found\n")
1118 self.ui.debug("branch already found\n")
1119 continue
1119 continue
1120 if n[1] and n[1] in m: # do we know the base?
1120 if n[1] and n[1] in m: # do we know the base?
1121 self.ui.debug("found incomplete branch %s:%s\n"
1121 self.ui.debug("found incomplete branch %s:%s\n"
1122 % (short(n[0]), short(n[1])))
1122 % (short(n[0]), short(n[1])))
1123 search.append(n) # schedule branch range for scanning
1123 search.append(n) # schedule branch range for scanning
1124 seenbranch[n] = 1
1124 seenbranch[n] = 1
1125 else:
1125 else:
1126 if n[1] not in seen and n[1] not in fetch:
1126 if n[1] not in seen and n[1] not in fetch:
1127 if n[2] in m and n[3] in m:
1127 if n[2] in m and n[3] in m:
1128 self.ui.debug("found new changeset %s\n" %
1128 self.ui.debug("found new changeset %s\n" %
1129 short(n[1]))
1129 short(n[1]))
1130 fetch.append(n[1]) # earliest unknown
1130 fetch.append(n[1]) # earliest unknown
1131 base[n[2]] = 1 # latest known
1131 base[n[2]] = 1 # latest known
1132 continue
1132 continue
1133
1133
1134 for a in n[2:4]:
1134 for a in n[2:4]:
1135 if a not in rep:
1135 if a not in rep:
1136 r.append(a)
1136 r.append(a)
1137 rep[a] = 1
1137 rep[a] = 1
1138
1138
1139 seen[n[0]] = 1
1139 seen[n[0]] = 1
1140
1140
1141 if r:
1141 if r:
1142 reqcnt += 1
1142 reqcnt += 1
1143 self.ui.debug("request %d: %s\n" %
1143 self.ui.debug("request %d: %s\n" %
1144 (reqcnt, " ".join(map(short, r))))
1144 (reqcnt, " ".join(map(short, r))))
1145 for p in range(0, len(r), 10):
1145 for p in range(0, len(r), 10):
1146 for b in remote.branches(r[p:p+10]):
1146 for b in remote.branches(r[p:p+10]):
1147 self.ui.debug("received %s:%s\n" %
1147 self.ui.debug("received %s:%s\n" %
1148 (short(b[0]), short(b[1])))
1148 (short(b[0]), short(b[1])))
1149 if b[0] not in m and b[0] not in seen:
1149 if b[0] not in m and b[0] not in seen:
1150 unknown.append(b)
1150 unknown.append(b)
1151
1151
1152 # do binary search on the branches we found
1152 # do binary search on the branches we found
1153 while search:
1153 while search:
1154 n = search.pop(0)
1154 n = search.pop(0)
1155 reqcnt += 1
1155 reqcnt += 1
1156 l = remote.between([(n[0], n[1])])[0]
1156 l = remote.between([(n[0], n[1])])[0]
1157 l.append(n[1])
1157 l.append(n[1])
1158 p = n[0]
1158 p = n[0]
1159 f = 1
1159 f = 1
1160 for i in l:
1160 for i in l:
1161 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1161 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1162 if i in m:
1162 if i in m:
1163 if f <= 2:
1163 if f <= 2:
1164 self.ui.debug("found new branch changeset %s\n" %
1164 self.ui.debug("found new branch changeset %s\n" %
1165 short(p))
1165 short(p))
1166 fetch.append(p)
1166 fetch.append(p)
1167 base[i] = 1
1167 base[i] = 1
1168 else:
1168 else:
1169 self.ui.debug("narrowed branch search to %s:%s\n"
1169 self.ui.debug("narrowed branch search to %s:%s\n"
1170 % (short(p), short(i)))
1170 % (short(p), short(i)))
1171 search.append((p, i))
1171 search.append((p, i))
1172 break
1172 break
1173 p, f = i, f * 2
1173 p, f = i, f * 2
1174
1174
1175 # sanity check our fetch list
1175 # sanity check our fetch list
1176 for f in fetch:
1176 for f in fetch:
1177 if f in m:
1177 if f in m:
1178 raise RepoError("already have changeset " + short(f[:4]))
1178 raise RepoError("already have changeset " + short(f[:4]))
1179
1179
1180 if base.keys() == [nullid]:
1180 if base.keys() == [nullid]:
1181 self.ui.warn("warning: pulling from an unrelated repository!\n")
1181 self.ui.warn("warning: pulling from an unrelated repository!\n")
1182
1182
1183 self.ui.note("adding new changesets starting at " +
1183 self.ui.note("adding new changesets starting at " +
1184 " ".join([short(f) for f in fetch]) + "\n")
1184 " ".join([short(f) for f in fetch]) + "\n")
1185
1185
1186 self.ui.debug("%d total queries\n" % reqcnt)
1186 self.ui.debug("%d total queries\n" % reqcnt)
1187
1187
1188 return fetch
1188 return fetch
1189
1189
1190 def findoutgoing(self, remote, base=None, heads=None):
1190 def findoutgoing(self, remote, base=None, heads=None):
1191 if base == None:
1191 if base == None:
1192 base = {}
1192 base = {}
1193 self.findincoming(remote, base, heads)
1193 self.findincoming(remote, base, heads)
1194
1194
1195 remain = dict.fromkeys(self.changelog.nodemap)
1195 remain = dict.fromkeys(self.changelog.nodemap)
1196
1196
1197 # prune everything remote has from the tree
1197 # prune everything remote has from the tree
1198 del remain[nullid]
1198 del remain[nullid]
1199 remove = base.keys()
1199 remove = base.keys()
1200 while remove:
1200 while remove:
1201 n = remove.pop(0)
1201 n = remove.pop(0)
1202 if n in remain:
1202 if n in remain:
1203 del remain[n]
1203 del remain[n]
1204 for p in self.changelog.parents(n):
1204 for p in self.changelog.parents(n):
1205 remove.append(p)
1205 remove.append(p)
1206
1206
1207 # find every node whose parents have been pruned
1207 # find every node whose parents have been pruned
1208 subset = []
1208 subset = []
1209 for n in remain:
1209 for n in remain:
1210 p1, p2 = self.changelog.parents(n)
1210 p1, p2 = self.changelog.parents(n)
1211 if p1 not in remain and p2 not in remain:
1211 if p1 not in remain and p2 not in remain:
1212 subset.append(n)
1212 subset.append(n)
1213
1213
1214 # this is the set of all roots we have to push
1214 # this is the set of all roots we have to push
1215 return subset
1215 return subset
1216
1216
1217 def pull(self, remote):
1217 def pull(self, remote):
1218 lock = self.lock()
1218 lock = self.lock()
1219
1219
1220 # if we have an empty repo, fetch everything
1220 # if we have an empty repo, fetch everything
1221 if self.changelog.tip() == nullid:
1221 if self.changelog.tip() == nullid:
1222 self.ui.status("requesting all changes\n")
1222 self.ui.status("requesting all changes\n")
1223 fetch = [nullid]
1223 fetch = [nullid]
1224 else:
1224 else:
1225 fetch = self.findincoming(remote)
1225 fetch = self.findincoming(remote)
1226
1226
1227 if not fetch:
1227 if not fetch:
1228 self.ui.status("no changes found\n")
1228 self.ui.status("no changes found\n")
1229 return 1
1229 return 1
1230
1230
1231 cg = remote.changegroup(fetch)
1231 cg = remote.changegroup(fetch)
1232 return self.addchangegroup(cg)
1232 return self.addchangegroup(cg)
1233
1233
1234 def push(self, remote, force=False):
1234 def push(self, remote, force=False):
1235 lock = remote.lock()
1235 lock = remote.lock()
1236
1236
1237 base = {}
1237 base = {}
1238 heads = remote.heads()
1238 heads = remote.heads()
1239 inc = self.findincoming(remote, base, heads)
1239 inc = self.findincoming(remote, base, heads)
1240 if not force and inc:
1240 if not force and inc:
1241 self.ui.warn("abort: unsynced remote changes!\n")
1241 self.ui.warn("abort: unsynced remote changes!\n")
1242 self.ui.status("(did you forget to sync? use push -f to force)\n")
1242 self.ui.status("(did you forget to sync? use push -f to force)\n")
1243 return 1
1243 return 1
1244
1244
1245 update = self.findoutgoing(remote, base)
1245 update = self.findoutgoing(remote, base)
1246 if not update:
1246 if not update:
1247 self.ui.status("no changes found\n")
1247 self.ui.status("no changes found\n")
1248 return 1
1248 return 1
1249 elif not force:
1249 elif not force:
1250 if len(heads) < len(self.changelog.heads()):
1250 if len(heads) < len(self.changelog.heads()):
1251 self.ui.warn("abort: push creates new remote branches!\n")
1251 self.ui.warn("abort: push creates new remote branches!\n")
1252 self.ui.status("(did you forget to merge?" +
1252 self.ui.status("(did you forget to merge?" +
1253 " use push -f to force)\n")
1253 " use push -f to force)\n")
1254 return 1
1254 return 1
1255
1255
1256 cg = self.changegroup(update)
1256 cg = self.changegroup(update)
1257 return remote.addchangegroup(cg)
1257 return remote.addchangegroup(cg)
1258
1258
1259 def changegroup(self, basenodes):
1259 def changegroup(self, basenodes):
1260 class genread:
1260 class genread:
1261 def __init__(self, generator):
1261 def __init__(self, generator):
1262 self.g = generator
1262 self.g = generator
1263 self.buf = ""
1263 self.buf = ""
1264 def read(self, l):
1264 def read(self, l):
1265 while l > len(self.buf):
1265 while l > len(self.buf):
1266 try:
1266 try:
1267 self.buf += self.g.next()
1267 self.buf += self.g.next()
1268 except StopIteration:
1268 except StopIteration:
1269 break
1269 break
1270 d, self.buf = self.buf[:l], self.buf[l:]
1270 d, self.buf = self.buf[:l], self.buf[l:]
1271 return d
1271 return d
1272
1272
1273 def gengroup():
1273 def gengroup():
1274 nodes = self.newer(basenodes)
1274 nodes = self.newer(basenodes)
1275
1275
1276 # construct the link map
1276 # construct the link map
1277 linkmap = {}
1277 linkmap = {}
1278 for n in nodes:
1278 for n in nodes:
1279 linkmap[self.changelog.rev(n)] = n
1279 linkmap[self.changelog.rev(n)] = n
1280
1280
1281 # construct a list of all changed files
1281 # construct a list of all changed files
1282 changed = {}
1282 changed = {}
1283 for n in nodes:
1283 for n in nodes:
1284 c = self.changelog.read(n)
1284 c = self.changelog.read(n)
1285 for f in c[3]:
1285 for f in c[3]:
1286 changed[f] = 1
1286 changed[f] = 1
1287 changed = changed.keys()
1287 changed = changed.keys()
1288 changed.sort()
1288 changed.sort()
1289
1289
1290 # the changegroup is changesets + manifests + all file revs
1290 # the changegroup is changesets + manifests + all file revs
1291 revs = [ self.changelog.rev(n) for n in nodes ]
1291 revs = [ self.changelog.rev(n) for n in nodes ]
1292
1292
1293 for y in self.changelog.group(linkmap): yield y
1293 for y in self.changelog.group(linkmap): yield y
1294 for y in self.manifest.group(linkmap): yield y
1294 for y in self.manifest.group(linkmap): yield y
1295 for f in changed:
1295 for f in changed:
1296 yield struct.pack(">l", len(f) + 4) + f
1296 yield struct.pack(">l", len(f) + 4) + f
1297 g = self.file(f).group(linkmap)
1297 g = self.file(f).group(linkmap)
1298 for y in g:
1298 for y in g:
1299 yield y
1299 yield y
1300
1300
1301 yield struct.pack(">l", 0)
1301 yield struct.pack(">l", 0)
1302
1302
1303 return genread(gengroup())
1303 return genread(gengroup())
1304
1304
1305 def addchangegroup(self, source):
1305 def addchangegroup(self, source):
1306
1306
1307 def getchunk():
1307 def getchunk():
1308 d = source.read(4)
1308 d = source.read(4)
1309 if not d: return ""
1309 if not d: return ""
1310 l = struct.unpack(">l", d)[0]
1310 l = struct.unpack(">l", d)[0]
1311 if l <= 4: return ""
1311 if l <= 4: return ""
1312 return source.read(l - 4)
1312 return source.read(l - 4)
1313
1313
1314 def getgroup():
1314 def getgroup():
1315 while 1:
1315 while 1:
1316 c = getchunk()
1316 c = getchunk()
1317 if not c: break
1317 if not c: break
1318 yield c
1318 yield c
1319
1319
1320 def csmap(x):
1320 def csmap(x):
1321 self.ui.debug("add changeset %s\n" % short(x))
1321 self.ui.debug("add changeset %s\n" % short(x))
1322 return self.changelog.count()
1322 return self.changelog.count()
1323
1323
1324 def revmap(x):
1324 def revmap(x):
1325 return self.changelog.rev(x)
1325 return self.changelog.rev(x)
1326
1326
1327 if not source: return
1327 if not source: return
1328 changesets = files = revisions = 0
1328 changesets = files = revisions = 0
1329
1329
1330 tr = self.transaction()
1330 tr = self.transaction()
1331
1331
1332 # pull off the changeset group
1332 # pull off the changeset group
1333 self.ui.status("adding changesets\n")
1333 self.ui.status("adding changesets\n")
1334 co = self.changelog.tip()
1334 co = self.changelog.tip()
1335 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1335 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1336 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1336 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1337
1337
1338 # pull off the manifest group
1338 # pull off the manifest group
1339 self.ui.status("adding manifests\n")
1339 self.ui.status("adding manifests\n")
1340 mm = self.manifest.tip()
1340 mm = self.manifest.tip()
1341 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1341 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1342
1342
1343 # process the files
1343 # process the files
1344 self.ui.status("adding file changes\n")
1344 self.ui.status("adding file changes\n")
1345 while 1:
1345 while 1:
1346 f = getchunk()
1346 f = getchunk()
1347 if not f: break
1347 if not f: break
1348 self.ui.debug("adding %s revisions\n" % f)
1348 self.ui.debug("adding %s revisions\n" % f)
1349 fl = self.file(f)
1349 fl = self.file(f)
1350 o = fl.count()
1350 o = fl.count()
1351 n = fl.addgroup(getgroup(), revmap, tr)
1351 n = fl.addgroup(getgroup(), revmap, tr)
1352 revisions += fl.count() - o
1352 revisions += fl.count() - o
1353 files += 1
1353 files += 1
1354
1354
1355 self.ui.status(("added %d changesets" +
1355 self.ui.status(("added %d changesets" +
1356 " with %d changes to %d files\n")
1356 " with %d changes to %d files\n")
1357 % (changesets, revisions, files))
1357 % (changesets, revisions, files))
1358
1358
1359 tr.close()
1359 tr.close()
1360
1360
1361 if not self.hook("changegroup"):
1361 if not self.hook("changegroup"):
1362 return 1
1362 return 1
1363
1363
1364 return
1364 return
1365
1365
1366 def update(self, node, allow=False, force=False, choose=None,
1366 def update(self, node, allow=False, force=False, choose=None,
1367 moddirstate=True):
1367 moddirstate=True):
1368 pl = self.dirstate.parents()
1368 pl = self.dirstate.parents()
1369 if not force and pl[1] != nullid:
1369 if not force and pl[1] != nullid:
1370 self.ui.warn("aborting: outstanding uncommitted merges\n")
1370 self.ui.warn("aborting: outstanding uncommitted merges\n")
1371 return 1
1371 return 1
1372
1372
1373 p1, p2 = pl[0], node
1373 p1, p2 = pl[0], node
1374 pa = self.changelog.ancestor(p1, p2)
1374 pa = self.changelog.ancestor(p1, p2)
1375 m1n = self.changelog.read(p1)[0]
1375 m1n = self.changelog.read(p1)[0]
1376 m2n = self.changelog.read(p2)[0]
1376 m2n = self.changelog.read(p2)[0]
1377 man = self.manifest.ancestor(m1n, m2n)
1377 man = self.manifest.ancestor(m1n, m2n)
1378 m1 = self.manifest.read(m1n)
1378 m1 = self.manifest.read(m1n)
1379 mf1 = self.manifest.readflags(m1n)
1379 mf1 = self.manifest.readflags(m1n)
1380 m2 = self.manifest.read(m2n)
1380 m2 = self.manifest.read(m2n)
1381 mf2 = self.manifest.readflags(m2n)
1381 mf2 = self.manifest.readflags(m2n)
1382 ma = self.manifest.read(man)
1382 ma = self.manifest.read(man)
1383 mfa = self.manifest.readflags(man)
1383 mfa = self.manifest.readflags(man)
1384
1384
1385 (c, a, d, u) = self.changes()
1385 (c, a, d, u) = self.changes()
1386
1386
1387 # is this a jump, or a merge? i.e. is there a linear path
1387 # is this a jump, or a merge? i.e. is there a linear path
1388 # from p1 to p2?
1388 # from p1 to p2?
1389 linear_path = (pa == p1 or pa == p2)
1389 linear_path = (pa == p1 or pa == p2)
1390
1390
1391 # resolve the manifest to determine which files
1391 # resolve the manifest to determine which files
1392 # we care about merging
1392 # we care about merging
1393 self.ui.note("resolving manifests\n")
1393 self.ui.note("resolving manifests\n")
1394 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1394 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1395 (force, allow, moddirstate, linear_path))
1395 (force, allow, moddirstate, linear_path))
1396 self.ui.debug(" ancestor %s local %s remote %s\n" %
1396 self.ui.debug(" ancestor %s local %s remote %s\n" %
1397 (short(man), short(m1n), short(m2n)))
1397 (short(man), short(m1n), short(m2n)))
1398
1398
1399 merge = {}
1399 merge = {}
1400 get = {}
1400 get = {}
1401 remove = []
1401 remove = []
1402 mark = {}
1402 mark = {}
1403
1403
1404 # construct a working dir manifest
1404 # construct a working dir manifest
1405 mw = m1.copy()
1405 mw = m1.copy()
1406 mfw = mf1.copy()
1406 mfw = mf1.copy()
1407 umap = dict.fromkeys(u)
1407 umap = dict.fromkeys(u)
1408
1408
1409 for f in a + c + u:
1409 for f in a + c + u:
1410 mw[f] = ""
1410 mw[f] = ""
1411 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1411 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1412
1412
1413 for f in d:
1413 for f in d:
1414 if f in mw: del mw[f]
1414 if f in mw: del mw[f]
1415
1415
1416 # If we're jumping between revisions (as opposed to merging),
1416 # If we're jumping between revisions (as opposed to merging),
1417 # and if neither the working directory nor the target rev has
1417 # and if neither the working directory nor the target rev has
1418 # the file, then we need to remove it from the dirstate, to
1418 # the file, then we need to remove it from the dirstate, to
1419 # prevent the dirstate from listing the file when it is no
1419 # prevent the dirstate from listing the file when it is no
1420 # longer in the manifest.
1420 # longer in the manifest.
1421 if moddirstate and linear_path and f not in m2:
1421 if moddirstate and linear_path and f not in m2:
1422 self.dirstate.forget((f,))
1422 self.dirstate.forget((f,))
1423
1423
1424 # Compare manifests
1424 # Compare manifests
1425 for f, n in mw.iteritems():
1425 for f, n in mw.iteritems():
1426 if choose and not choose(f): continue
1426 if choose and not choose(f): continue
1427 if f in m2:
1427 if f in m2:
1428 s = 0
1428 s = 0
1429
1429
1430 # is the wfile new since m1, and match m2?
1430 # is the wfile new since m1, and match m2?
1431 if f not in m1:
1431 if f not in m1:
1432 t1 = self.wfile(f).read()
1432 t1 = self.wfile(f).read()
1433 t2 = self.file(f).revision(m2[f])
1433 t2 = self.file(f).revision(m2[f])
1434 if cmp(t1, t2) == 0:
1434 if cmp(t1, t2) == 0:
1435 mark[f] = 1
1435 mark[f] = 1
1436 n = m2[f]
1436 n = m2[f]
1437 del t1, t2
1437 del t1, t2
1438
1438
1439 # are files different?
1439 # are files different?
1440 if n != m2[f]:
1440 if n != m2[f]:
1441 a = ma.get(f, nullid)
1441 a = ma.get(f, nullid)
1442 # are both different from the ancestor?
1442 # are both different from the ancestor?
1443 if n != a and m2[f] != a:
1443 if n != a and m2[f] != a:
1444 self.ui.debug(" %s versions differ, resolve\n" % f)
1444 self.ui.debug(" %s versions differ, resolve\n" % f)
1445 # merge executable bits
1445 # merge executable bits
1446 # "if we changed or they changed, change in merge"
1446 # "if we changed or they changed, change in merge"
1447 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1447 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1448 mode = ((a^b) | (a^c)) ^ a
1448 mode = ((a^b) | (a^c)) ^ a
1449 merge[f] = (m1.get(f, nullid), m2[f], mode)
1449 merge[f] = (m1.get(f, nullid), m2[f], mode)
1450 s = 1
1450 s = 1
1451 # are we clobbering?
1451 # are we clobbering?
1452 # is remote's version newer?
1452 # is remote's version newer?
1453 # or are we going back in time?
1453 # or are we going back in time?
1454 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1454 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1455 self.ui.debug(" remote %s is newer, get\n" % f)
1455 self.ui.debug(" remote %s is newer, get\n" % f)
1456 get[f] = m2[f]
1456 get[f] = m2[f]
1457 s = 1
1457 s = 1
1458 else:
1458 else:
1459 mark[f] = 1
1459 mark[f] = 1
1460 elif f in umap:
1460 elif f in umap:
1461 # this unknown file is the same as the checkout
1461 # this unknown file is the same as the checkout
1462 get[f] = m2[f]
1462 get[f] = m2[f]
1463
1463
1464 if not s and mfw[f] != mf2[f]:
1464 if not s and mfw[f] != mf2[f]:
1465 if force:
1465 if force:
1466 self.ui.debug(" updating permissions for %s\n" % f)
1466 self.ui.debug(" updating permissions for %s\n" % f)
1467 util.set_exec(self.wjoin(f), mf2[f])
1467 util.set_exec(self.wjoin(f), mf2[f])
1468 else:
1468 else:
1469 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1469 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1470 mode = ((a^b) | (a^c)) ^ a
1470 mode = ((a^b) | (a^c)) ^ a
1471 if mode != b:
1471 if mode != b:
1472 self.ui.debug(" updating permissions for %s\n" % f)
1472 self.ui.debug(" updating permissions for %s\n" % f)
1473 util.set_exec(self.wjoin(f), mode)
1473 util.set_exec(self.wjoin(f), mode)
1474 mark[f] = 1
1474 mark[f] = 1
1475 del m2[f]
1475 del m2[f]
1476 elif f in ma:
1476 elif f in ma:
1477 if n != ma[f]:
1477 if n != ma[f]:
1478 r = "d"
1478 r = "d"
1479 if not force and (linear_path or allow):
1479 if not force and (linear_path or allow):
1480 r = self.ui.prompt(
1480 r = self.ui.prompt(
1481 (" local changed %s which remote deleted\n" % f) +
1481 (" local changed %s which remote deleted\n" % f) +
1482 "(k)eep or (d)elete?", "[kd]", "k")
1482 "(k)eep or (d)elete?", "[kd]", "k")
1483 if r == "d":
1483 if r == "d":
1484 remove.append(f)
1484 remove.append(f)
1485 else:
1485 else:
1486 self.ui.debug("other deleted %s\n" % f)
1486 self.ui.debug("other deleted %s\n" % f)
1487 remove.append(f) # other deleted it
1487 remove.append(f) # other deleted it
1488 else:
1488 else:
1489 if n == m1.get(f, nullid): # same as parent
1489 if n == m1.get(f, nullid): # same as parent
1490 if p2 == pa: # going backwards?
1490 if p2 == pa: # going backwards?
1491 self.ui.debug("remote deleted %s\n" % f)
1491 self.ui.debug("remote deleted %s\n" % f)
1492 remove.append(f)
1492 remove.append(f)
1493 else:
1493 else:
1494 self.ui.debug("local created %s, keeping\n" % f)
1494 self.ui.debug("local created %s, keeping\n" % f)
1495 else:
1495 else:
1496 self.ui.debug("working dir created %s, keeping\n" % f)
1496 self.ui.debug("working dir created %s, keeping\n" % f)
1497
1497
1498 for f, n in m2.iteritems():
1498 for f, n in m2.iteritems():
1499 if choose and not choose(f): continue
1499 if choose and not choose(f): continue
1500 if f[0] == "/": continue
1500 if f[0] == "/": continue
1501 if f in ma and n != ma[f]:
1501 if f in ma and n != ma[f]:
1502 r = "k"
1502 r = "k"
1503 if not force and (linear_path or allow):
1503 if not force and (linear_path or allow):
1504 r = self.ui.prompt(
1504 r = self.ui.prompt(
1505 ("remote changed %s which local deleted\n" % f) +
1505 ("remote changed %s which local deleted\n" % f) +
1506 "(k)eep or (d)elete?", "[kd]", "k")
1506 "(k)eep or (d)elete?", "[kd]", "k")
1507 if r == "k": get[f] = n
1507 if r == "k": get[f] = n
1508 elif f not in ma:
1508 elif f not in ma:
1509 self.ui.debug("remote created %s\n" % f)
1509 self.ui.debug("remote created %s\n" % f)
1510 get[f] = n
1510 get[f] = n
1511 else:
1511 else:
1512 if force or p2 == pa: # going backwards?
1512 if force or p2 == pa: # going backwards?
1513 self.ui.debug("local deleted %s, recreating\n" % f)
1513 self.ui.debug("local deleted %s, recreating\n" % f)
1514 get[f] = n
1514 get[f] = n
1515 else:
1515 else:
1516 self.ui.debug("local deleted %s\n" % f)
1516 self.ui.debug("local deleted %s\n" % f)
1517
1517
1518 del mw, m1, m2, ma
1518 del mw, m1, m2, ma
1519
1519
1520 if force:
1520 if force:
1521 for f in merge:
1521 for f in merge:
1522 get[f] = merge[f][1]
1522 get[f] = merge[f][1]
1523 merge = {}
1523 merge = {}
1524
1524
1525 if linear_path or force:
1525 if linear_path or force:
1526 # we don't need to do any magic, just jump to the new rev
1526 # we don't need to do any magic, just jump to the new rev
1527 mode = 'n'
1527 mode = 'n'
1528 p1, p2 = p2, nullid
1528 p1, p2 = p2, nullid
1529 else:
1529 else:
1530 if not allow:
1530 if not allow:
1531 self.ui.status("this update spans a branch" +
1531 self.ui.status("this update spans a branch" +
1532 " affecting the following files:\n")
1532 " affecting the following files:\n")
1533 fl = merge.keys() + get.keys()
1533 fl = merge.keys() + get.keys()
1534 fl.sort()
1534 fl.sort()
1535 for f in fl:
1535 for f in fl:
1536 cf = ""
1536 cf = ""
1537 if f in merge: cf = " (resolve)"
1537 if f in merge: cf = " (resolve)"
1538 self.ui.status(" %s%s\n" % (f, cf))
1538 self.ui.status(" %s%s\n" % (f, cf))
1539 self.ui.warn("aborting update spanning branches!\n")
1539 self.ui.warn("aborting update spanning branches!\n")
1540 self.ui.status("(use update -m to merge across branches" +
1540 self.ui.status("(use update -m to merge across branches" +
1541 " or -C to lose changes)\n")
1541 " or -C to lose changes)\n")
1542 return 1
1542 return 1
1543 # we have to remember what files we needed to get/change
1543 # we have to remember what files we needed to get/change
1544 # because any file that's different from either one of its
1544 # because any file that's different from either one of its
1545 # parents must be in the changeset
1545 # parents must be in the changeset
1546 mode = 'm'
1546 mode = 'm'
1547 if moddirstate:
1547 if moddirstate:
1548 self.dirstate.update(mark.keys(), "m")
1548 self.dirstate.update(mark.keys(), "m")
1549
1549
1550 if moddirstate:
1550 if moddirstate:
1551 self.dirstate.setparents(p1, p2)
1551 self.dirstate.setparents(p1, p2)
1552
1552
1553 # get the files we don't need to change
1553 # get the files we don't need to change
1554 files = get.keys()
1554 files = get.keys()
1555 files.sort()
1555 files.sort()
1556 for f in files:
1556 for f in files:
1557 if f[0] == "/": continue
1557 if f[0] == "/": continue
1558 self.ui.note("getting %s\n" % f)
1558 self.ui.note("getting %s\n" % f)
1559 t = self.file(f).read(get[f])
1559 t = self.file(f).read(get[f])
1560 try:
1560 try:
1561 self.wfile(f, "w").write(t)
1561 self.wfile(f, "w").write(t)
1562 except IOError:
1562 except IOError:
1563 os.makedirs(os.path.dirname(self.wjoin(f)))
1563 os.makedirs(os.path.dirname(self.wjoin(f)))
1564 self.wfile(f, "w").write(t)
1564 self.wfile(f, "w").write(t)
1565 util.set_exec(self.wjoin(f), mf2[f])
1565 util.set_exec(self.wjoin(f), mf2[f])
1566 if moddirstate:
1566 if moddirstate:
1567 self.dirstate.update([f], mode)
1567 self.dirstate.update([f], mode)
1568
1568
1569 # merge the tricky bits
1569 # merge the tricky bits
1570 files = merge.keys()
1570 files = merge.keys()
1571 files.sort()
1571 files.sort()
1572 for f in files:
1572 for f in files:
1573 self.ui.status("merging %s\n" % f)
1573 self.ui.status("merging %s\n" % f)
1574 m, o, flag = merge[f]
1574 m, o, flag = merge[f]
1575 self.merge3(f, m, o)
1575 self.merge3(f, m, o)
1576 util.set_exec(self.wjoin(f), flag)
1576 util.set_exec(self.wjoin(f), flag)
1577 if moddirstate:
1577 if moddirstate:
1578 if mode == 'm':
1578 if mode == 'm':
1579 # only update dirstate on branch merge, otherwise we
1579 # only update dirstate on branch merge, otherwise we
1580 # could mark files with changes as unchanged
1580 # could mark files with changes as unchanged
1581 self.dirstate.update([f], mode)
1581 self.dirstate.update([f], mode)
1582 elif p2 == nullid:
1582 elif p2 == nullid:
1583 # update dirstate from parent1's manifest
1583 # update dirstate from parent1's manifest
1584 m1n = self.changelog.read(p1)[0]
1584 m1n = self.changelog.read(p1)[0]
1585 m1 = self.manifest.read(m1n)
1585 m1 = self.manifest.read(m1n)
1586 file_ = self.file(f)
1586 file_ = self.file(f)
1587 f_len = file_.length(file_.rev(m1[f]))
1587 f_len = len(file_.read(m1[f]))
1588 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1588 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1589 else:
1589 else:
1590 self.ui.warn("Second parent without branch merge!?\n"
1590 self.ui.warn("Second parent without branch merge!?\n"
1591 "Dirstate for file %s may be wrong.\n" % f)
1591 "Dirstate for file %s may be wrong.\n" % f)
1592
1592
1593 remove.sort()
1593 remove.sort()
1594 for f in remove:
1594 for f in remove:
1595 self.ui.note("removing %s\n" % f)
1595 self.ui.note("removing %s\n" % f)
1596 try:
1596 try:
1597 os.unlink(f)
1597 os.unlink(f)
1598 except OSError, inst:
1598 except OSError, inst:
1599 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1599 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1600 # try removing directories that might now be empty
1600 # try removing directories that might now be empty
1601 try: os.removedirs(os.path.dirname(f))
1601 try: os.removedirs(os.path.dirname(f))
1602 except: pass
1602 except: pass
1603 if moddirstate:
1603 if moddirstate:
1604 if mode == 'n':
1604 if mode == 'n':
1605 self.dirstate.forget(remove)
1605 self.dirstate.forget(remove)
1606 else:
1606 else:
1607 self.dirstate.update(remove, 'r')
1607 self.dirstate.update(remove, 'r')
1608
1608
1609 def merge3(self, fn, my, other):
1609 def merge3(self, fn, my, other):
1610 """perform a 3-way merge in the working directory"""
1610 """perform a 3-way merge in the working directory"""
1611
1611
1612 def temp(prefix, node):
1612 def temp(prefix, node):
1613 pre = "%s~%s." % (os.path.basename(fn), prefix)
1613 pre = "%s~%s." % (os.path.basename(fn), prefix)
1614 (fd, name) = tempfile.mkstemp("", pre)
1614 (fd, name) = tempfile.mkstemp("", pre)
1615 f = os.fdopen(fd, "wb")
1615 f = os.fdopen(fd, "wb")
1616 f.write(fl.revision(node))
1616 f.write(fl.revision(node))
1617 f.close()
1617 f.close()
1618 return name
1618 return name
1619
1619
1620 fl = self.file(fn)
1620 fl = self.file(fn)
1621 base = fl.ancestor(my, other)
1621 base = fl.ancestor(my, other)
1622 a = self.wjoin(fn)
1622 a = self.wjoin(fn)
1623 b = temp("base", base)
1623 b = temp("base", base)
1624 c = temp("other", other)
1624 c = temp("other", other)
1625
1625
1626 self.ui.note("resolving %s\n" % fn)
1626 self.ui.note("resolving %s\n" % fn)
1627 self.ui.debug("file %s: other %s ancestor %s\n" %
1627 self.ui.debug("file %s: other %s ancestor %s\n" %
1628 (fn, short(other), short(base)))
1628 (fn, short(other), short(base)))
1629
1629
1630 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1630 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1631 or "hgmerge")
1631 or "hgmerge")
1632 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1632 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1633 if r:
1633 if r:
1634 self.ui.warn("merging %s failed!\n" % fn)
1634 self.ui.warn("merging %s failed!\n" % fn)
1635
1635
1636 os.unlink(b)
1636 os.unlink(b)
1637 os.unlink(c)
1637 os.unlink(c)
1638
1638
1639 def verify(self):
1639 def verify(self):
1640 filelinkrevs = {}
1640 filelinkrevs = {}
1641 filenodes = {}
1641 filenodes = {}
1642 changesets = revisions = files = 0
1642 changesets = revisions = files = 0
1643 errors = 0
1643 errors = 0
1644
1644
1645 seen = {}
1645 seen = {}
1646 self.ui.status("checking changesets\n")
1646 self.ui.status("checking changesets\n")
1647 for i in range(self.changelog.count()):
1647 for i in range(self.changelog.count()):
1648 changesets += 1
1648 changesets += 1
1649 n = self.changelog.node(i)
1649 n = self.changelog.node(i)
1650 if n in seen:
1650 if n in seen:
1651 self.ui.warn("duplicate changeset at revision %d\n" % i)
1651 self.ui.warn("duplicate changeset at revision %d\n" % i)
1652 errors += 1
1652 errors += 1
1653 seen[n] = 1
1653 seen[n] = 1
1654
1654
1655 for p in self.changelog.parents(n):
1655 for p in self.changelog.parents(n):
1656 if p not in self.changelog.nodemap:
1656 if p not in self.changelog.nodemap:
1657 self.ui.warn("changeset %s has unknown parent %s\n" %
1657 self.ui.warn("changeset %s has unknown parent %s\n" %
1658 (short(n), short(p)))
1658 (short(n), short(p)))
1659 errors += 1
1659 errors += 1
1660 try:
1660 try:
1661 changes = self.changelog.read(n)
1661 changes = self.changelog.read(n)
1662 except Exception, inst:
1662 except Exception, inst:
1663 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1663 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1664 errors += 1
1664 errors += 1
1665
1665
1666 for f in changes[3]:
1666 for f in changes[3]:
1667 filelinkrevs.setdefault(f, []).append(i)
1667 filelinkrevs.setdefault(f, []).append(i)
1668
1668
1669 seen = {}
1669 seen = {}
1670 self.ui.status("checking manifests\n")
1670 self.ui.status("checking manifests\n")
1671 for i in range(self.manifest.count()):
1671 for i in range(self.manifest.count()):
1672 n = self.manifest.node(i)
1672 n = self.manifest.node(i)
1673 if n in seen:
1673 if n in seen:
1674 self.ui.warn("duplicate manifest at revision %d\n" % i)
1674 self.ui.warn("duplicate manifest at revision %d\n" % i)
1675 errors += 1
1675 errors += 1
1676 seen[n] = 1
1676 seen[n] = 1
1677
1677
1678 for p in self.manifest.parents(n):
1678 for p in self.manifest.parents(n):
1679 if p not in self.manifest.nodemap:
1679 if p not in self.manifest.nodemap:
1680 self.ui.warn("manifest %s has unknown parent %s\n" %
1680 self.ui.warn("manifest %s has unknown parent %s\n" %
1681 (short(n), short(p)))
1681 (short(n), short(p)))
1682 errors += 1
1682 errors += 1
1683
1683
1684 try:
1684 try:
1685 delta = mdiff.patchtext(self.manifest.delta(n))
1685 delta = mdiff.patchtext(self.manifest.delta(n))
1686 except KeyboardInterrupt:
1686 except KeyboardInterrupt:
1687 self.ui.warn("aborted")
1687 self.ui.warn("aborted")
1688 sys.exit(0)
1688 sys.exit(0)
1689 except Exception, inst:
1689 except Exception, inst:
1690 self.ui.warn("unpacking manifest %s: %s\n"
1690 self.ui.warn("unpacking manifest %s: %s\n"
1691 % (short(n), inst))
1691 % (short(n), inst))
1692 errors += 1
1692 errors += 1
1693
1693
1694 ff = [ l.split('\0') for l in delta.splitlines() ]
1694 ff = [ l.split('\0') for l in delta.splitlines() ]
1695 for f, fn in ff:
1695 for f, fn in ff:
1696 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1696 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1697
1697
1698 self.ui.status("crosschecking files in changesets and manifests\n")
1698 self.ui.status("crosschecking files in changesets and manifests\n")
1699 for f in filenodes:
1699 for f in filenodes:
1700 if f not in filelinkrevs:
1700 if f not in filelinkrevs:
1701 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1701 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1702 errors += 1
1702 errors += 1
1703
1703
1704 for f in filelinkrevs:
1704 for f in filelinkrevs:
1705 if f not in filenodes:
1705 if f not in filenodes:
1706 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1706 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1707 errors += 1
1707 errors += 1
1708
1708
1709 self.ui.status("checking files\n")
1709 self.ui.status("checking files\n")
1710 ff = filenodes.keys()
1710 ff = filenodes.keys()
1711 ff.sort()
1711 ff.sort()
1712 for f in ff:
1712 for f in ff:
1713 if f == "/dev/null": continue
1713 if f == "/dev/null": continue
1714 files += 1
1714 files += 1
1715 fl = self.file(f)
1715 fl = self.file(f)
1716 nodes = { nullid: 1 }
1716 nodes = { nullid: 1 }
1717 seen = {}
1717 seen = {}
1718 for i in range(fl.count()):
1718 for i in range(fl.count()):
1719 revisions += 1
1719 revisions += 1
1720 n = fl.node(i)
1720 n = fl.node(i)
1721
1721
1722 if n in seen:
1722 if n in seen:
1723 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1723 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1724 errors += 1
1724 errors += 1
1725
1725
1726 if n not in filenodes[f]:
1726 if n not in filenodes[f]:
1727 self.ui.warn("%s: %d:%s not in manifests\n"
1727 self.ui.warn("%s: %d:%s not in manifests\n"
1728 % (f, i, short(n)))
1728 % (f, i, short(n)))
1729 errors += 1
1729 errors += 1
1730 else:
1730 else:
1731 del filenodes[f][n]
1731 del filenodes[f][n]
1732
1732
1733 flr = fl.linkrev(n)
1733 flr = fl.linkrev(n)
1734 if flr not in filelinkrevs[f]:
1734 if flr not in filelinkrevs[f]:
1735 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1735 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1736 % (f, short(n), fl.linkrev(n)))
1736 % (f, short(n), fl.linkrev(n)))
1737 errors += 1
1737 errors += 1
1738 else:
1738 else:
1739 filelinkrevs[f].remove(flr)
1739 filelinkrevs[f].remove(flr)
1740
1740
1741 # verify contents
1741 # verify contents
1742 try:
1742 try:
1743 t = fl.read(n)
1743 t = fl.read(n)
1744 except Exception, inst:
1744 except Exception, inst:
1745 self.ui.warn("unpacking file %s %s: %s\n"
1745 self.ui.warn("unpacking file %s %s: %s\n"
1746 % (f, short(n), inst))
1746 % (f, short(n), inst))
1747 errors += 1
1747 errors += 1
1748
1748
1749 # verify parents
1749 # verify parents
1750 (p1, p2) = fl.parents(n)
1750 (p1, p2) = fl.parents(n)
1751 if p1 not in nodes:
1751 if p1 not in nodes:
1752 self.ui.warn("file %s:%s unknown parent 1 %s" %
1752 self.ui.warn("file %s:%s unknown parent 1 %s" %
1753 (f, short(n), short(p1)))
1753 (f, short(n), short(p1)))
1754 errors += 1
1754 errors += 1
1755 if p2 not in nodes:
1755 if p2 not in nodes:
1756 self.ui.warn("file %s:%s unknown parent 2 %s" %
1756 self.ui.warn("file %s:%s unknown parent 2 %s" %
1757 (f, short(n), short(p1)))
1757 (f, short(n), short(p1)))
1758 errors += 1
1758 errors += 1
1759 nodes[n] = 1
1759 nodes[n] = 1
1760
1760
1761 # cross-check
1761 # cross-check
1762 for node in filenodes[f]:
1762 for node in filenodes[f]:
1763 self.ui.warn("node %s in manifests not in %s\n"
1763 self.ui.warn("node %s in manifests not in %s\n"
1764 % (hex(node), f))
1764 % (hex(node), f))
1765 errors += 1
1765 errors += 1
1766
1766
1767 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1767 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1768 (files, changesets, revisions))
1768 (files, changesets, revisions))
1769
1769
1770 if errors:
1770 if errors:
1771 self.ui.warn("%d integrity errors encountered!\n" % errors)
1771 self.ui.warn("%d integrity errors encountered!\n" % errors)
1772 return 1
1772 return 1
1773
1773
1774 class httprepository:
1774 class httprepository:
1775 def __init__(self, ui, path):
1775 def __init__(self, ui, path):
1776 # fix missing / after hostname
1776 # fix missing / after hostname
1777 s = urlparse.urlsplit(path)
1777 s = urlparse.urlsplit(path)
1778 partial = s[2]
1778 partial = s[2]
1779 if not partial: partial = "/"
1779 if not partial: partial = "/"
1780 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1780 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1781 self.ui = ui
1781 self.ui = ui
1782 no_list = [ "localhost", "127.0.0.1" ]
1782 no_list = [ "localhost", "127.0.0.1" ]
1783 host = ui.config("http_proxy", "host")
1783 host = ui.config("http_proxy", "host")
1784 if host is None:
1784 if host is None:
1785 host = os.environ.get("http_proxy")
1785 host = os.environ.get("http_proxy")
1786 if host and host.startswith('http://'):
1786 if host and host.startswith('http://'):
1787 host = host[7:]
1787 host = host[7:]
1788 user = ui.config("http_proxy", "user")
1788 user = ui.config("http_proxy", "user")
1789 passwd = ui.config("http_proxy", "passwd")
1789 passwd = ui.config("http_proxy", "passwd")
1790 no = ui.config("http_proxy", "no")
1790 no = ui.config("http_proxy", "no")
1791 if no is None:
1791 if no is None:
1792 no = os.environ.get("no_proxy")
1792 no = os.environ.get("no_proxy")
1793 if no:
1793 if no:
1794 no_list = no_list + no.split(",")
1794 no_list = no_list + no.split(",")
1795
1795
1796 no_proxy = 0
1796 no_proxy = 0
1797 for h in no_list:
1797 for h in no_list:
1798 if (path.startswith("http://" + h + "/") or
1798 if (path.startswith("http://" + h + "/") or
1799 path.startswith("http://" + h + ":") or
1799 path.startswith("http://" + h + ":") or
1800 path == "http://" + h):
1800 path == "http://" + h):
1801 no_proxy = 1
1801 no_proxy = 1
1802
1802
1803 # Note: urllib2 takes proxy values from the environment and those will
1803 # Note: urllib2 takes proxy values from the environment and those will
1804 # take precedence
1804 # take precedence
1805 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1805 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1806 if os.environ.has_key(env):
1806 if os.environ.has_key(env):
1807 del os.environ[env]
1807 del os.environ[env]
1808
1808
1809 proxy_handler = urllib2.BaseHandler()
1809 proxy_handler = urllib2.BaseHandler()
1810 if host and not no_proxy:
1810 if host and not no_proxy:
1811 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1811 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1812
1812
1813 authinfo = None
1813 authinfo = None
1814 if user and passwd:
1814 if user and passwd:
1815 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1815 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1816 passmgr.add_password(None, host, user, passwd)
1816 passmgr.add_password(None, host, user, passwd)
1817 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1817 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1818
1818
1819 opener = urllib2.build_opener(proxy_handler, authinfo)
1819 opener = urllib2.build_opener(proxy_handler, authinfo)
1820 urllib2.install_opener(opener)
1820 urllib2.install_opener(opener)
1821
1821
1822 def dev(self):
1822 def dev(self):
1823 return -1
1823 return -1
1824
1824
1825 def do_cmd(self, cmd, **args):
1825 def do_cmd(self, cmd, **args):
1826 self.ui.debug("sending %s command\n" % cmd)
1826 self.ui.debug("sending %s command\n" % cmd)
1827 q = {"cmd": cmd}
1827 q = {"cmd": cmd}
1828 q.update(args)
1828 q.update(args)
1829 qs = urllib.urlencode(q)
1829 qs = urllib.urlencode(q)
1830 cu = "%s?%s" % (self.url, qs)
1830 cu = "%s?%s" % (self.url, qs)
1831 resp = urllib2.urlopen(cu)
1831 resp = urllib2.urlopen(cu)
1832 proto = resp.headers['content-type']
1832 proto = resp.headers['content-type']
1833
1833
1834 # accept old "text/plain" and "application/hg-changegroup" for now
1834 # accept old "text/plain" and "application/hg-changegroup" for now
1835 if not proto.startswith('application/mercurial') and \
1835 if not proto.startswith('application/mercurial') and \
1836 not proto.startswith('text/plain') and \
1836 not proto.startswith('text/plain') and \
1837 not proto.startswith('application/hg-changegroup'):
1837 not proto.startswith('application/hg-changegroup'):
1838 raise RepoError("'%s' does not appear to be an hg repository"
1838 raise RepoError("'%s' does not appear to be an hg repository"
1839 % self.url)
1839 % self.url)
1840
1840
1841 if proto.startswith('application/mercurial'):
1841 if proto.startswith('application/mercurial'):
1842 version = proto[22:]
1842 version = proto[22:]
1843 if float(version) > 0.1:
1843 if float(version) > 0.1:
1844 raise RepoError("'%s' uses newer protocol %s" %
1844 raise RepoError("'%s' uses newer protocol %s" %
1845 (self.url, version))
1845 (self.url, version))
1846
1846
1847 return resp
1847 return resp
1848
1848
1849 def heads(self):
1849 def heads(self):
1850 d = self.do_cmd("heads").read()
1850 d = self.do_cmd("heads").read()
1851 try:
1851 try:
1852 return map(bin, d[:-1].split(" "))
1852 return map(bin, d[:-1].split(" "))
1853 except:
1853 except:
1854 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1854 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1855 raise
1855 raise
1856
1856
1857 def branches(self, nodes):
1857 def branches(self, nodes):
1858 n = " ".join(map(hex, nodes))
1858 n = " ".join(map(hex, nodes))
1859 d = self.do_cmd("branches", nodes=n).read()
1859 d = self.do_cmd("branches", nodes=n).read()
1860 try:
1860 try:
1861 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1861 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1862 return br
1862 return br
1863 except:
1863 except:
1864 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1864 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1865 raise
1865 raise
1866
1866
1867 def between(self, pairs):
1867 def between(self, pairs):
1868 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1868 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1869 d = self.do_cmd("between", pairs=n).read()
1869 d = self.do_cmd("between", pairs=n).read()
1870 try:
1870 try:
1871 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1871 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1872 return p
1872 return p
1873 except:
1873 except:
1874 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1874 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1875 raise
1875 raise
1876
1876
1877 def changegroup(self, nodes):
1877 def changegroup(self, nodes):
1878 n = " ".join(map(hex, nodes))
1878 n = " ".join(map(hex, nodes))
1879 f = self.do_cmd("changegroup", roots=n)
1879 f = self.do_cmd("changegroup", roots=n)
1880 bytes = 0
1880 bytes = 0
1881
1881
1882 class zread:
1882 class zread:
1883 def __init__(self, f):
1883 def __init__(self, f):
1884 self.zd = zlib.decompressobj()
1884 self.zd = zlib.decompressobj()
1885 self.f = f
1885 self.f = f
1886 self.buf = ""
1886 self.buf = ""
1887 def read(self, l):
1887 def read(self, l):
1888 while l > len(self.buf):
1888 while l > len(self.buf):
1889 r = self.f.read(4096)
1889 r = self.f.read(4096)
1890 if r:
1890 if r:
1891 self.buf += self.zd.decompress(r)
1891 self.buf += self.zd.decompress(r)
1892 else:
1892 else:
1893 self.buf += self.zd.flush()
1893 self.buf += self.zd.flush()
1894 break
1894 break
1895 d, self.buf = self.buf[:l], self.buf[l:]
1895 d, self.buf = self.buf[:l], self.buf[l:]
1896 return d
1896 return d
1897
1897
1898 return zread(f)
1898 return zread(f)
1899
1899
1900 class remotelock:
1900 class remotelock:
1901 def __init__(self, repo):
1901 def __init__(self, repo):
1902 self.repo = repo
1902 self.repo = repo
1903 def release(self):
1903 def release(self):
1904 self.repo.unlock()
1904 self.repo.unlock()
1905 self.repo = None
1905 self.repo = None
1906 def __del__(self):
1906 def __del__(self):
1907 if self.repo:
1907 if self.repo:
1908 self.release()
1908 self.release()
1909
1909
1910 class sshrepository:
1910 class sshrepository:
1911 def __init__(self, ui, path):
1911 def __init__(self, ui, path):
1912 self.url = path
1912 self.url = path
1913 self.ui = ui
1913 self.ui = ui
1914
1914
1915 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1915 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1916 if not m:
1916 if not m:
1917 raise RepoError("couldn't parse destination %s" % path)
1917 raise RepoError("couldn't parse destination %s" % path)
1918
1918
1919 self.user = m.group(2)
1919 self.user = m.group(2)
1920 self.host = m.group(3)
1920 self.host = m.group(3)
1921 self.port = m.group(5)
1921 self.port = m.group(5)
1922 self.path = m.group(7)
1922 self.path = m.group(7)
1923
1923
1924 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1924 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1925 args = self.port and ("%s -p %s") % (args, self.port) or args
1925 args = self.port and ("%s -p %s") % (args, self.port) or args
1926 path = self.path or ""
1926 path = self.path or ""
1927
1927
1928 if not path:
1928 if not path:
1929 raise RepoError("no remote repository path specified")
1929 raise RepoError("no remote repository path specified")
1930
1930
1931 cmd = "ssh %s 'hg -R %s serve --stdio'"
1931 cmd = "ssh %s 'hg -R %s serve --stdio'"
1932 cmd = cmd % (args, path)
1932 cmd = cmd % (args, path)
1933
1933
1934 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1934 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1935
1935
1936 def readerr(self):
1936 def readerr(self):
1937 while 1:
1937 while 1:
1938 r,w,x = select.select([self.pipee], [], [], 0)
1938 r,w,x = select.select([self.pipee], [], [], 0)
1939 if not r: break
1939 if not r: break
1940 l = self.pipee.readline()
1940 l = self.pipee.readline()
1941 if not l: break
1941 if not l: break
1942 self.ui.status("remote: ", l)
1942 self.ui.status("remote: ", l)
1943
1943
1944 def __del__(self):
1944 def __del__(self):
1945 try:
1945 try:
1946 self.pipeo.close()
1946 self.pipeo.close()
1947 self.pipei.close()
1947 self.pipei.close()
1948 for l in self.pipee:
1948 for l in self.pipee:
1949 self.ui.status("remote: ", l)
1949 self.ui.status("remote: ", l)
1950 self.pipee.close()
1950 self.pipee.close()
1951 except:
1951 except:
1952 pass
1952 pass
1953
1953
1954 def dev(self):
1954 def dev(self):
1955 return -1
1955 return -1
1956
1956
1957 def do_cmd(self, cmd, **args):
1957 def do_cmd(self, cmd, **args):
1958 self.ui.debug("sending %s command\n" % cmd)
1958 self.ui.debug("sending %s command\n" % cmd)
1959 self.pipeo.write("%s\n" % cmd)
1959 self.pipeo.write("%s\n" % cmd)
1960 for k, v in args.items():
1960 for k, v in args.items():
1961 self.pipeo.write("%s %d\n" % (k, len(v)))
1961 self.pipeo.write("%s %d\n" % (k, len(v)))
1962 self.pipeo.write(v)
1962 self.pipeo.write(v)
1963 self.pipeo.flush()
1963 self.pipeo.flush()
1964
1964
1965 return self.pipei
1965 return self.pipei
1966
1966
1967 def call(self, cmd, **args):
1967 def call(self, cmd, **args):
1968 r = self.do_cmd(cmd, **args)
1968 r = self.do_cmd(cmd, **args)
1969 l = r.readline()
1969 l = r.readline()
1970 self.readerr()
1970 self.readerr()
1971 try:
1971 try:
1972 l = int(l)
1972 l = int(l)
1973 except:
1973 except:
1974 raise RepoError("unexpected response '%s'" % l)
1974 raise RepoError("unexpected response '%s'" % l)
1975 return r.read(l)
1975 return r.read(l)
1976
1976
1977 def lock(self):
1977 def lock(self):
1978 self.call("lock")
1978 self.call("lock")
1979 return remotelock(self)
1979 return remotelock(self)
1980
1980
1981 def unlock(self):
1981 def unlock(self):
1982 self.call("unlock")
1982 self.call("unlock")
1983
1983
1984 def heads(self):
1984 def heads(self):
1985 d = self.call("heads")
1985 d = self.call("heads")
1986 try:
1986 try:
1987 return map(bin, d[:-1].split(" "))
1987 return map(bin, d[:-1].split(" "))
1988 except:
1988 except:
1989 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1989 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1990
1990
1991 def branches(self, nodes):
1991 def branches(self, nodes):
1992 n = " ".join(map(hex, nodes))
1992 n = " ".join(map(hex, nodes))
1993 d = self.call("branches", nodes=n)
1993 d = self.call("branches", nodes=n)
1994 try:
1994 try:
1995 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1995 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1996 return br
1996 return br
1997 except:
1997 except:
1998 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1998 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1999
1999
2000 def between(self, pairs):
2000 def between(self, pairs):
2001 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2001 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2002 d = self.call("between", pairs=n)
2002 d = self.call("between", pairs=n)
2003 try:
2003 try:
2004 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2004 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2005 return p
2005 return p
2006 except:
2006 except:
2007 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2007 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2008
2008
2009 def changegroup(self, nodes):
2009 def changegroup(self, nodes):
2010 n = " ".join(map(hex, nodes))
2010 n = " ".join(map(hex, nodes))
2011 f = self.do_cmd("changegroup", roots=n)
2011 f = self.do_cmd("changegroup", roots=n)
2012 return self.pipei
2012 return self.pipei
2013
2013
2014 def addchangegroup(self, cg):
2014 def addchangegroup(self, cg):
2015 d = self.call("addchangegroup")
2015 d = self.call("addchangegroup")
2016 if d:
2016 if d:
2017 raise RepoError("push refused: %s", d)
2017 raise RepoError("push refused: %s", d)
2018
2018
2019 while 1:
2019 while 1:
2020 d = cg.read(4096)
2020 d = cg.read(4096)
2021 if not d: break
2021 if not d: break
2022 self.pipeo.write(d)
2022 self.pipeo.write(d)
2023 self.readerr()
2023 self.readerr()
2024
2024
2025 self.pipeo.flush()
2025 self.pipeo.flush()
2026
2026
2027 self.readerr()
2027 self.readerr()
2028 l = int(self.pipei.readline())
2028 l = int(self.pipei.readline())
2029 return self.pipei.read(l) != ""
2029 return self.pipei.read(l) != ""
2030
2030
2031 def repository(ui, path=None, create=0):
2031 def repository(ui, path=None, create=0):
2032 if path:
2032 if path:
2033 if path.startswith("http://"):
2033 if path.startswith("http://"):
2034 return httprepository(ui, path)
2034 return httprepository(ui, path)
2035 if path.startswith("hg://"):
2035 if path.startswith("hg://"):
2036 return httprepository(ui, path.replace("hg://", "http://"))
2036 return httprepository(ui, path.replace("hg://", "http://"))
2037 if path.startswith("old-http://"):
2037 if path.startswith("old-http://"):
2038 return localrepository(ui, path.replace("old-http://", "http://"))
2038 return localrepository(ui, path.replace("old-http://", "http://"))
2039 if path.startswith("ssh://"):
2039 if path.startswith("ssh://"):
2040 return sshrepository(ui, path)
2040 return sshrepository(ui, path)
2041
2041
2042 return localrepository(ui, path, create)
2042 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now