##// END OF EJS Templates
dirstate.changes() now distinguishes 'hg remove'd or just deleted files....
Thomas Arendsen Hein -
r861:cbe5c4d0 default
parent child Browse files
Show More
@@ -1,2020 +1,2028 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse stat")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def ignore(self, f):
303 def ignore(self, f):
304 if not self.ignorefunc:
304 if not self.ignorefunc:
305 bigpat = []
305 bigpat = []
306 try:
306 try:
307 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
308 for pat in l:
308 for pat in l:
309 if pat != "\n":
309 if pat != "\n":
310 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
311 try:
311 try:
312 r = re.compile(p)
312 r = re.compile(p)
313 except:
313 except:
314 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
315 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
316 else:
316 else:
317 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
318 except IOError: pass
318 except IOError: pass
319
319
320 if bigpat:
320 if bigpat:
321 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 r = re.compile(s)
322 r = re.compile(s)
323 self.ignorefunc = r.search
323 self.ignorefunc = r.search
324 else:
324 else:
325 self.ignorefunc = util.never
325 self.ignorefunc = util.never
326
326
327 return self.ignorefunc(f)
327 return self.ignorefunc(f)
328
328
329 def __del__(self):
329 def __del__(self):
330 if self.dirty:
330 if self.dirty:
331 self.write()
331 self.write()
332
332
333 def __getitem__(self, key):
333 def __getitem__(self, key):
334 try:
334 try:
335 return self.map[key]
335 return self.map[key]
336 except TypeError:
336 except TypeError:
337 self.read()
337 self.read()
338 return self[key]
338 return self[key]
339
339
340 def __contains__(self, key):
340 def __contains__(self, key):
341 if not self.map: self.read()
341 if not self.map: self.read()
342 return key in self.map
342 return key in self.map
343
343
344 def parents(self):
344 def parents(self):
345 if not self.pl:
345 if not self.pl:
346 self.read()
346 self.read()
347 return self.pl
347 return self.pl
348
348
349 def markdirty(self):
349 def markdirty(self):
350 if not self.dirty:
350 if not self.dirty:
351 self.dirty = 1
351 self.dirty = 1
352
352
353 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
354 self.markdirty()
354 self.markdirty()
355 self.pl = p1, p2
355 self.pl = p1, p2
356
356
357 def state(self, key):
357 def state(self, key):
358 try:
358 try:
359 return self[key][0]
359 return self[key][0]
360 except KeyError:
360 except KeyError:
361 return "?"
361 return "?"
362
362
363 def read(self):
363 def read(self):
364 if self.map is not None: return self.map
364 if self.map is not None: return self.map
365
365
366 self.map = {}
366 self.map = {}
367 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
368 try:
368 try:
369 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
370 if not st: return
370 if not st: return
371 except: return
371 except: return
372
372
373 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
374
374
375 pos = 40
375 pos = 40
376 while pos < len(st):
376 while pos < len(st):
377 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
378 l = e[4]
378 l = e[4]
379 pos += 17
379 pos += 17
380 f = st[pos:pos + l]
380 f = st[pos:pos + l]
381 if '\0' in f:
381 if '\0' in f:
382 f, c = f.split('\0')
382 f, c = f.split('\0')
383 self.copies[f] = c
383 self.copies[f] = c
384 self.map[f] = e[:4]
384 self.map[f] = e[:4]
385 pos += l
385 pos += l
386
386
387 def copy(self, source, dest):
387 def copy(self, source, dest):
388 self.read()
388 self.read()
389 self.markdirty()
389 self.markdirty()
390 self.copies[dest] = source
390 self.copies[dest] = source
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self.copies.get(file, None)
393 return self.copies.get(file, None)
394
394
395 def update(self, files, state):
395 def update(self, files, state):
396 ''' current states:
396 ''' current states:
397 n normal
397 n normal
398 m needs merging
398 m needs merging
399 r marked for removal
399 r marked for removal
400 a marked for addition'''
400 a marked for addition'''
401
401
402 if not files: return
402 if not files: return
403 self.read()
403 self.read()
404 self.markdirty()
404 self.markdirty()
405 for f in files:
405 for f in files:
406 if state == "r":
406 if state == "r":
407 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
408 else:
408 else:
409 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
411
411
412 def forget(self, files):
412 def forget(self, files):
413 if not files: return
413 if not files: return
414 self.read()
414 self.read()
415 self.markdirty()
415 self.markdirty()
416 for f in files:
416 for f in files:
417 try:
417 try:
418 del self.map[f]
418 del self.map[f]
419 except KeyError:
419 except KeyError:
420 self.ui.warn("not in dirstate: %s!\n" % f)
420 self.ui.warn("not in dirstate: %s!\n" % f)
421 pass
421 pass
422
422
423 def clear(self):
423 def clear(self):
424 self.map = {}
424 self.map = {}
425 self.markdirty()
425 self.markdirty()
426
426
427 def write(self):
427 def write(self):
428 st = self.opener("dirstate", "w")
428 st = self.opener("dirstate", "w")
429 st.write("".join(self.pl))
429 st.write("".join(self.pl))
430 for f, e in self.map.items():
430 for f, e in self.map.items():
431 c = self.copied(f)
431 c = self.copied(f)
432 if c:
432 if c:
433 f = f + "\0" + c
433 f = f + "\0" + c
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
435 st.write(e + f)
435 st.write(e + f)
436 self.dirty = 0
436 self.dirty = 0
437
437
438 def walk(self, files = None, match = util.always):
438 def walk(self, files = None, match = util.always):
439 self.read()
439 self.read()
440 dc = self.map.copy()
440 dc = self.map.copy()
441 # walk all files by default
441 # walk all files by default
442 if not files: files = [self.root]
442 if not files: files = [self.root]
443 known = {'.hg': 1}
443 known = {'.hg': 1}
444 def seen(fn):
444 def seen(fn):
445 if fn in known: return True
445 if fn in known: return True
446 known[fn] = 1
446 known[fn] = 1
447 def traverse():
447 def traverse():
448 for f in util.unique(files):
448 for f in util.unique(files):
449 f = os.path.join(self.root, f)
449 f = os.path.join(self.root, f)
450 if os.path.isdir(f):
450 if os.path.isdir(f):
451 for dir, subdirs, fl in os.walk(f):
451 for dir, subdirs, fl in os.walk(f):
452 d = dir[len(self.root) + 1:]
452 d = dir[len(self.root) + 1:]
453 nd = os.path.normpath(d)
453 nd = os.path.normpath(d)
454 if seen(nd):
454 if seen(nd):
455 subdirs[:] = []
455 subdirs[:] = []
456 continue
456 continue
457 for sd in subdirs:
457 for sd in subdirs:
458 ds = os.path.join(nd, sd +'/')
458 ds = os.path.join(nd, sd +'/')
459 if self.ignore(ds) or not match(ds):
459 if self.ignore(ds) or not match(ds):
460 subdirs.remove(sd)
460 subdirs.remove(sd)
461 subdirs.sort()
461 subdirs.sort()
462 fl.sort()
462 fl.sort()
463 for fn in fl:
463 for fn in fl:
464 fn = util.pconvert(os.path.join(d, fn))
464 fn = util.pconvert(os.path.join(d, fn))
465 yield 'f', fn
465 yield 'f', fn
466 else:
466 else:
467 yield 'f', f[len(self.root) + 1:]
467 yield 'f', f[len(self.root) + 1:]
468
468
469 ks = dc.keys()
469 ks = dc.keys()
470 ks.sort()
470 ks.sort()
471 for k in ks:
471 for k in ks:
472 yield 'm', k
472 yield 'm', k
473
473
474 # yield only files that match: all in dirstate, others only if
474 # yield only files that match: all in dirstate, others only if
475 # not in .hgignore
475 # not in .hgignore
476
476
477 for src, fn in util.unique(traverse()):
477 for src, fn in util.unique(traverse()):
478 fn = os.path.normpath(fn)
478 fn = os.path.normpath(fn)
479 if seen(fn): continue
479 if seen(fn): continue
480 if fn in dc:
480 if fn in dc:
481 del dc[fn]
481 del dc[fn]
482 elif self.ignore(fn):
482 elif self.ignore(fn):
483 continue
483 continue
484 if match(fn):
484 if match(fn):
485 yield src, fn
485 yield src, fn
486
486
487 def changes(self, files = None, match = util.always):
487 def changes(self, files=None, match=util.always):
488 self.read()
488 self.read()
489 dc = self.map.copy()
489 dc = self.map.copy()
490 lookup, changed, added, unknown = [], [], [], []
490 lookup, modified, added, unknown = [], [], [], []
491 removed, deleted = [], []
491
492
492 for src, fn in self.walk(files, match):
493 for src, fn in self.walk(files, match):
493 try: s = os.stat(os.path.join(self.root, fn))
494 try:
494 except: continue
495 s = os.stat(os.path.join(self.root, fn))
495
496 except OSError:
496 if fn in dc:
497 continue
497 c = dc[fn]
498 if not stat.S_ISREG(s.st_mode):
499 continue
500 c = dc.get(fn)
501 if c:
498 del dc[fn]
502 del dc[fn]
499
500 if c[0] == 'm':
503 if c[0] == 'm':
501 changed.append(fn)
504 modified.append(fn)
502 elif c[0] == 'a':
505 elif c[0] == 'a':
503 added.append(fn)
506 added.append(fn)
504 elif c[0] == 'r':
507 elif c[0] == 'r':
505 unknown.append(fn)
508 unknown.append(fn)
506 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
509 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
507 changed.append(fn)
510 modified.append(fn)
508 elif c[1] != s.st_mode or c[3] != s.st_mtime:
511 elif c[3] != s.st_mtime:
509 lookup.append(fn)
512 lookup.append(fn)
510 else:
513 else:
511 if match(fn): unknown.append(fn)
514 unknown.append(fn)
512
515
513 return (lookup, changed, added, filter(match, dc.keys()), unknown)
516 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
517 if c[0] == 'r':
518 removed.append(fn)
519 else:
520 deleted.append(fn)
521 return (lookup, modified, added, removed + deleted, unknown)
514
522
515 # used to avoid circular references so destructors work
523 # used to avoid circular references so destructors work
516 def opener(base):
524 def opener(base):
517 p = base
525 p = base
518 def o(path, mode="r"):
526 def o(path, mode="r"):
519 if p.startswith("http://"):
527 if p.startswith("http://"):
520 f = os.path.join(p, urllib.quote(path))
528 f = os.path.join(p, urllib.quote(path))
521 return httprangereader.httprangereader(f)
529 return httprangereader.httprangereader(f)
522
530
523 f = os.path.join(p, path)
531 f = os.path.join(p, path)
524
532
525 mode += "b" # for that other OS
533 mode += "b" # for that other OS
526
534
527 if mode[0] != "r":
535 if mode[0] != "r":
528 try:
536 try:
529 s = os.stat(f)
537 s = os.stat(f)
530 except OSError:
538 except OSError:
531 d = os.path.dirname(f)
539 d = os.path.dirname(f)
532 if not os.path.isdir(d):
540 if not os.path.isdir(d):
533 os.makedirs(d)
541 os.makedirs(d)
534 else:
542 else:
535 if s.st_nlink > 1:
543 if s.st_nlink > 1:
536 file(f + ".tmp", "wb").write(file(f, "rb").read())
544 file(f + ".tmp", "wb").write(file(f, "rb").read())
537 util.rename(f+".tmp", f)
545 util.rename(f+".tmp", f)
538
546
539 return file(f, mode)
547 return file(f, mode)
540
548
541 return o
549 return o
542
550
543 class RepoError(Exception): pass
551 class RepoError(Exception): pass
544
552
545 class localrepository:
553 class localrepository:
546 def __init__(self, ui, path=None, create=0):
554 def __init__(self, ui, path=None, create=0):
547 self.remote = 0
555 self.remote = 0
548 if path and path.startswith("http://"):
556 if path and path.startswith("http://"):
549 self.remote = 1
557 self.remote = 1
550 self.path = path
558 self.path = path
551 else:
559 else:
552 if not path:
560 if not path:
553 p = os.getcwd()
561 p = os.getcwd()
554 while not os.path.isdir(os.path.join(p, ".hg")):
562 while not os.path.isdir(os.path.join(p, ".hg")):
555 oldp = p
563 oldp = p
556 p = os.path.dirname(p)
564 p = os.path.dirname(p)
557 if p == oldp: raise RepoError("no repo found")
565 if p == oldp: raise RepoError("no repo found")
558 path = p
566 path = p
559 self.path = os.path.join(path, ".hg")
567 self.path = os.path.join(path, ".hg")
560
568
561 if not create and not os.path.isdir(self.path):
569 if not create and not os.path.isdir(self.path):
562 raise RepoError("repository %s not found" % self.path)
570 raise RepoError("repository %s not found" % self.path)
563
571
564 self.root = path
572 self.root = path
565 self.ui = ui
573 self.ui = ui
566
574
567 if create:
575 if create:
568 os.mkdir(self.path)
576 os.mkdir(self.path)
569 os.mkdir(self.join("data"))
577 os.mkdir(self.join("data"))
570
578
571 self.opener = opener(self.path)
579 self.opener = opener(self.path)
572 self.wopener = opener(self.root)
580 self.wopener = opener(self.root)
573 self.manifest = manifest(self.opener)
581 self.manifest = manifest(self.opener)
574 self.changelog = changelog(self.opener)
582 self.changelog = changelog(self.opener)
575 self.tagscache = None
583 self.tagscache = None
576 self.nodetagscache = None
584 self.nodetagscache = None
577
585
578 if not self.remote:
586 if not self.remote:
579 self.dirstate = dirstate(self.opener, ui, self.root)
587 self.dirstate = dirstate(self.opener, ui, self.root)
580 try:
588 try:
581 self.ui.readconfig(self.opener("hgrc"))
589 self.ui.readconfig(self.opener("hgrc"))
582 except IOError: pass
590 except IOError: pass
583
591
584 def hook(self, name, **args):
592 def hook(self, name, **args):
585 s = self.ui.config("hooks", name)
593 s = self.ui.config("hooks", name)
586 if s:
594 if s:
587 self.ui.note("running hook %s: %s\n" % (name, s))
595 self.ui.note("running hook %s: %s\n" % (name, s))
588 old = {}
596 old = {}
589 for k, v in args.items():
597 for k, v in args.items():
590 k = k.upper()
598 k = k.upper()
591 old[k] = os.environ.get(k, None)
599 old[k] = os.environ.get(k, None)
592 os.environ[k] = v
600 os.environ[k] = v
593
601
594 r = os.system(s)
602 r = os.system(s)
595
603
596 for k, v in old.items():
604 for k, v in old.items():
597 if v != None:
605 if v != None:
598 os.environ[k] = v
606 os.environ[k] = v
599 else:
607 else:
600 del os.environ[k]
608 del os.environ[k]
601
609
602 if r:
610 if r:
603 self.ui.warn("abort: %s hook failed with status %d!\n" %
611 self.ui.warn("abort: %s hook failed with status %d!\n" %
604 (name, r))
612 (name, r))
605 return False
613 return False
606 return True
614 return True
607
615
608 def tags(self):
616 def tags(self):
609 '''return a mapping of tag to node'''
617 '''return a mapping of tag to node'''
610 if not self.tagscache:
618 if not self.tagscache:
611 self.tagscache = {}
619 self.tagscache = {}
612 def addtag(self, k, n):
620 def addtag(self, k, n):
613 try:
621 try:
614 bin_n = bin(n)
622 bin_n = bin(n)
615 except TypeError:
623 except TypeError:
616 bin_n = ''
624 bin_n = ''
617 self.tagscache[k.strip()] = bin_n
625 self.tagscache[k.strip()] = bin_n
618
626
619 try:
627 try:
620 # read each head of the tags file, ending with the tip
628 # read each head of the tags file, ending with the tip
621 # and add each tag found to the map, with "newer" ones
629 # and add each tag found to the map, with "newer" ones
622 # taking precedence
630 # taking precedence
623 fl = self.file(".hgtags")
631 fl = self.file(".hgtags")
624 h = fl.heads()
632 h = fl.heads()
625 h.reverse()
633 h.reverse()
626 for r in h:
634 for r in h:
627 for l in fl.revision(r).splitlines():
635 for l in fl.revision(r).splitlines():
628 if l:
636 if l:
629 n, k = l.split(" ", 1)
637 n, k = l.split(" ", 1)
630 addtag(self, k, n)
638 addtag(self, k, n)
631 except KeyError:
639 except KeyError:
632 pass
640 pass
633
641
634 try:
642 try:
635 f = self.opener("localtags")
643 f = self.opener("localtags")
636 for l in f:
644 for l in f:
637 n, k = l.split(" ", 1)
645 n, k = l.split(" ", 1)
638 addtag(self, k, n)
646 addtag(self, k, n)
639 except IOError:
647 except IOError:
640 pass
648 pass
641
649
642 self.tagscache['tip'] = self.changelog.tip()
650 self.tagscache['tip'] = self.changelog.tip()
643
651
644 return self.tagscache
652 return self.tagscache
645
653
646 def tagslist(self):
654 def tagslist(self):
647 '''return a list of tags ordered by revision'''
655 '''return a list of tags ordered by revision'''
648 l = []
656 l = []
649 for t, n in self.tags().items():
657 for t, n in self.tags().items():
650 try:
658 try:
651 r = self.changelog.rev(n)
659 r = self.changelog.rev(n)
652 except:
660 except:
653 r = -2 # sort to the beginning of the list if unknown
661 r = -2 # sort to the beginning of the list if unknown
654 l.append((r,t,n))
662 l.append((r,t,n))
655 l.sort()
663 l.sort()
656 return [(t,n) for r,t,n in l]
664 return [(t,n) for r,t,n in l]
657
665
658 def nodetags(self, node):
666 def nodetags(self, node):
659 '''return the tags associated with a node'''
667 '''return the tags associated with a node'''
660 if not self.nodetagscache:
668 if not self.nodetagscache:
661 self.nodetagscache = {}
669 self.nodetagscache = {}
662 for t,n in self.tags().items():
670 for t,n in self.tags().items():
663 self.nodetagscache.setdefault(n,[]).append(t)
671 self.nodetagscache.setdefault(n,[]).append(t)
664 return self.nodetagscache.get(node, [])
672 return self.nodetagscache.get(node, [])
665
673
666 def lookup(self, key):
674 def lookup(self, key):
667 try:
675 try:
668 return self.tags()[key]
676 return self.tags()[key]
669 except KeyError:
677 except KeyError:
670 try:
678 try:
671 return self.changelog.lookup(key)
679 return self.changelog.lookup(key)
672 except:
680 except:
673 raise RepoError("unknown revision '%s'" % key)
681 raise RepoError("unknown revision '%s'" % key)
674
682
675 def dev(self):
683 def dev(self):
676 if self.remote: return -1
684 if self.remote: return -1
677 return os.stat(self.path).st_dev
685 return os.stat(self.path).st_dev
678
686
679 def join(self, f):
687 def join(self, f):
680 return os.path.join(self.path, f)
688 return os.path.join(self.path, f)
681
689
682 def wjoin(self, f):
690 def wjoin(self, f):
683 return os.path.join(self.root, f)
691 return os.path.join(self.root, f)
684
692
685 def file(self, f):
693 def file(self, f):
686 if f[0] == '/': f = f[1:]
694 if f[0] == '/': f = f[1:]
687 return filelog(self.opener, f)
695 return filelog(self.opener, f)
688
696
689 def getcwd(self):
697 def getcwd(self):
690 cwd = os.getcwd()
698 cwd = os.getcwd()
691 if cwd == self.root: return ''
699 if cwd == self.root: return ''
692 return cwd[len(self.root) + 1:]
700 return cwd[len(self.root) + 1:]
693
701
694 def wfile(self, f, mode='r'):
702 def wfile(self, f, mode='r'):
695 return self.wopener(f, mode)
703 return self.wopener(f, mode)
696
704
697 def transaction(self):
705 def transaction(self):
698 # save dirstate for undo
706 # save dirstate for undo
699 try:
707 try:
700 ds = self.opener("dirstate").read()
708 ds = self.opener("dirstate").read()
701 except IOError:
709 except IOError:
702 ds = ""
710 ds = ""
703 self.opener("journal.dirstate", "w").write(ds)
711 self.opener("journal.dirstate", "w").write(ds)
704
712
705 def after():
713 def after():
706 util.rename(self.join("journal"), self.join("undo"))
714 util.rename(self.join("journal"), self.join("undo"))
707 util.rename(self.join("journal.dirstate"),
715 util.rename(self.join("journal.dirstate"),
708 self.join("undo.dirstate"))
716 self.join("undo.dirstate"))
709
717
710 return transaction.transaction(self.ui.warn, self.opener,
718 return transaction.transaction(self.ui.warn, self.opener,
711 self.join("journal"), after)
719 self.join("journal"), after)
712
720
713 def recover(self):
721 def recover(self):
714 lock = self.lock()
722 lock = self.lock()
715 if os.path.exists(self.join("journal")):
723 if os.path.exists(self.join("journal")):
716 self.ui.status("rolling back interrupted transaction\n")
724 self.ui.status("rolling back interrupted transaction\n")
717 return transaction.rollback(self.opener, self.join("journal"))
725 return transaction.rollback(self.opener, self.join("journal"))
718 else:
726 else:
719 self.ui.warn("no interrupted transaction available\n")
727 self.ui.warn("no interrupted transaction available\n")
720
728
721 def undo(self):
729 def undo(self):
722 lock = self.lock()
730 lock = self.lock()
723 if os.path.exists(self.join("undo")):
731 if os.path.exists(self.join("undo")):
724 self.ui.status("rolling back last transaction\n")
732 self.ui.status("rolling back last transaction\n")
725 transaction.rollback(self.opener, self.join("undo"))
733 transaction.rollback(self.opener, self.join("undo"))
726 self.dirstate = None
734 self.dirstate = None
727 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
735 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
728 self.dirstate = dirstate(self.opener, self.ui, self.root)
736 self.dirstate = dirstate(self.opener, self.ui, self.root)
729 else:
737 else:
730 self.ui.warn("no undo information available\n")
738 self.ui.warn("no undo information available\n")
731
739
732 def lock(self, wait = 1):
740 def lock(self, wait = 1):
733 try:
741 try:
734 return lock.lock(self.join("lock"), 0)
742 return lock.lock(self.join("lock"), 0)
735 except lock.LockHeld, inst:
743 except lock.LockHeld, inst:
736 if wait:
744 if wait:
737 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
745 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
738 return lock.lock(self.join("lock"), wait)
746 return lock.lock(self.join("lock"), wait)
739 raise inst
747 raise inst
740
748
741 def rawcommit(self, files, text, user, date, p1=None, p2=None):
749 def rawcommit(self, files, text, user, date, p1=None, p2=None):
742 orig_parent = self.dirstate.parents()[0] or nullid
750 orig_parent = self.dirstate.parents()[0] or nullid
743 p1 = p1 or self.dirstate.parents()[0] or nullid
751 p1 = p1 or self.dirstate.parents()[0] or nullid
744 p2 = p2 or self.dirstate.parents()[1] or nullid
752 p2 = p2 or self.dirstate.parents()[1] or nullid
745 c1 = self.changelog.read(p1)
753 c1 = self.changelog.read(p1)
746 c2 = self.changelog.read(p2)
754 c2 = self.changelog.read(p2)
747 m1 = self.manifest.read(c1[0])
755 m1 = self.manifest.read(c1[0])
748 mf1 = self.manifest.readflags(c1[0])
756 mf1 = self.manifest.readflags(c1[0])
749 m2 = self.manifest.read(c2[0])
757 m2 = self.manifest.read(c2[0])
750
758
751 if orig_parent == p1:
759 if orig_parent == p1:
752 update_dirstate = 1
760 update_dirstate = 1
753 else:
761 else:
754 update_dirstate = 0
762 update_dirstate = 0
755
763
756 tr = self.transaction()
764 tr = self.transaction()
757 mm = m1.copy()
765 mm = m1.copy()
758 mfm = mf1.copy()
766 mfm = mf1.copy()
759 linkrev = self.changelog.count()
767 linkrev = self.changelog.count()
760 for f in files:
768 for f in files:
761 try:
769 try:
762 t = self.wfile(f).read()
770 t = self.wfile(f).read()
763 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
771 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
764 r = self.file(f)
772 r = self.file(f)
765 mfm[f] = tm
773 mfm[f] = tm
766 mm[f] = r.add(t, {}, tr, linkrev,
774 mm[f] = r.add(t, {}, tr, linkrev,
767 m1.get(f, nullid), m2.get(f, nullid))
775 m1.get(f, nullid), m2.get(f, nullid))
768 if update_dirstate:
776 if update_dirstate:
769 self.dirstate.update([f], "n")
777 self.dirstate.update([f], "n")
770 except IOError:
778 except IOError:
771 try:
779 try:
772 del mm[f]
780 del mm[f]
773 del mfm[f]
781 del mfm[f]
774 if update_dirstate:
782 if update_dirstate:
775 self.dirstate.forget([f])
783 self.dirstate.forget([f])
776 except:
784 except:
777 # deleted from p2?
785 # deleted from p2?
778 pass
786 pass
779
787
780 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
788 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
781 user = user or self.ui.username()
789 user = user or self.ui.username()
782 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
790 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
783 tr.close()
791 tr.close()
784 if update_dirstate:
792 if update_dirstate:
785 self.dirstate.setparents(n, nullid)
793 self.dirstate.setparents(n, nullid)
786
794
787 def commit(self, files = None, text = "", user = None, date = None,
795 def commit(self, files = None, text = "", user = None, date = None,
788 match = util.always):
796 match = util.always):
789 commit = []
797 commit = []
790 remove = []
798 remove = []
791 if files:
799 if files:
792 for f in files:
800 for f in files:
793 s = self.dirstate.state(f)
801 s = self.dirstate.state(f)
794 if s in 'nmai':
802 if s in 'nmai':
795 commit.append(f)
803 commit.append(f)
796 elif s == 'r':
804 elif s == 'r':
797 remove.append(f)
805 remove.append(f)
798 else:
806 else:
799 self.ui.warn("%s not tracked!\n" % f)
807 self.ui.warn("%s not tracked!\n" % f)
800 else:
808 else:
801 (c, a, d, u) = self.changes(match = match)
809 (c, a, d, u) = self.changes(match = match)
802 commit = c + a
810 commit = c + a
803 remove = d
811 remove = d
804
812
805 if not commit and not remove:
813 if not commit and not remove:
806 self.ui.status("nothing changed\n")
814 self.ui.status("nothing changed\n")
807 return
815 return
808
816
809 if not self.hook("precommit"):
817 if not self.hook("precommit"):
810 return 1
818 return 1
811
819
812 p1, p2 = self.dirstate.parents()
820 p1, p2 = self.dirstate.parents()
813 c1 = self.changelog.read(p1)
821 c1 = self.changelog.read(p1)
814 c2 = self.changelog.read(p2)
822 c2 = self.changelog.read(p2)
815 m1 = self.manifest.read(c1[0])
823 m1 = self.manifest.read(c1[0])
816 mf1 = self.manifest.readflags(c1[0])
824 mf1 = self.manifest.readflags(c1[0])
817 m2 = self.manifest.read(c2[0])
825 m2 = self.manifest.read(c2[0])
818 lock = self.lock()
826 lock = self.lock()
819 tr = self.transaction()
827 tr = self.transaction()
820
828
821 # check in files
829 # check in files
822 new = {}
830 new = {}
823 linkrev = self.changelog.count()
831 linkrev = self.changelog.count()
824 commit.sort()
832 commit.sort()
825 for f in commit:
833 for f in commit:
826 self.ui.note(f + "\n")
834 self.ui.note(f + "\n")
827 try:
835 try:
828 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
836 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
829 t = self.wfile(f).read()
837 t = self.wfile(f).read()
830 except IOError:
838 except IOError:
831 self.ui.warn("trouble committing %s!\n" % f)
839 self.ui.warn("trouble committing %s!\n" % f)
832 raise
840 raise
833
841
834 meta = {}
842 meta = {}
835 cp = self.dirstate.copied(f)
843 cp = self.dirstate.copied(f)
836 if cp:
844 if cp:
837 meta["copy"] = cp
845 meta["copy"] = cp
838 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
846 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
839 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
847 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
840
848
841 r = self.file(f)
849 r = self.file(f)
842 fp1 = m1.get(f, nullid)
850 fp1 = m1.get(f, nullid)
843 fp2 = m2.get(f, nullid)
851 fp2 = m2.get(f, nullid)
844 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
852 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
845
853
846 # update manifest
854 # update manifest
847 m1.update(new)
855 m1.update(new)
848 for f in remove:
856 for f in remove:
849 if f in m1:
857 if f in m1:
850 del m1[f]
858 del m1[f]
851 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
859 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
852 (new, remove))
860 (new, remove))
853
861
854 # add changeset
862 # add changeset
855 new = new.keys()
863 new = new.keys()
856 new.sort()
864 new.sort()
857
865
858 if not text:
866 if not text:
859 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
867 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
860 edittext += "".join(["HG: changed %s\n" % f for f in new])
868 edittext += "".join(["HG: changed %s\n" % f for f in new])
861 edittext += "".join(["HG: removed %s\n" % f for f in remove])
869 edittext += "".join(["HG: removed %s\n" % f for f in remove])
862 edittext = self.ui.edit(edittext)
870 edittext = self.ui.edit(edittext)
863 if not edittext.rstrip():
871 if not edittext.rstrip():
864 return 1
872 return 1
865 text = edittext
873 text = edittext
866
874
867 user = user or self.ui.username()
875 user = user or self.ui.username()
868 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
876 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
869
877
870 tr.close()
878 tr.close()
871
879
872 self.dirstate.setparents(n)
880 self.dirstate.setparents(n)
873 self.dirstate.update(new, "n")
881 self.dirstate.update(new, "n")
874 self.dirstate.forget(remove)
882 self.dirstate.forget(remove)
875
883
876 if not self.hook("commit", node=hex(n)):
884 if not self.hook("commit", node=hex(n)):
877 return 1
885 return 1
878
886
879 def walk(self, node = None, files = [], match = util.always):
887 def walk(self, node = None, files = [], match = util.always):
880 if node:
888 if node:
881 for fn in self.manifest.read(self.changelog.read(node)[0]):
889 for fn in self.manifest.read(self.changelog.read(node)[0]):
882 if match(fn): yield 'm', fn
890 if match(fn): yield 'm', fn
883 else:
891 else:
884 for src, fn in self.dirstate.walk(files, match):
892 for src, fn in self.dirstate.walk(files, match):
885 yield src, fn
893 yield src, fn
886
894
887 def changes(self, node1 = None, node2 = None, files = [],
895 def changes(self, node1 = None, node2 = None, files = [],
888 match = util.always):
896 match = util.always):
889 mf2, u = None, []
897 mf2, u = None, []
890
898
891 def fcmp(fn, mf):
899 def fcmp(fn, mf):
892 t1 = self.wfile(fn).read()
900 t1 = self.wfile(fn).read()
893 t2 = self.file(fn).revision(mf[fn])
901 t2 = self.file(fn).revision(mf[fn])
894 return cmp(t1, t2)
902 return cmp(t1, t2)
895
903
896 def mfmatches(node):
904 def mfmatches(node):
897 mf = dict(self.manifest.read(node))
905 mf = dict(self.manifest.read(node))
898 for fn in mf.keys():
906 for fn in mf.keys():
899 if not match(fn):
907 if not match(fn):
900 del mf[fn]
908 del mf[fn]
901 return mf
909 return mf
902
910
903 # are we comparing the working directory?
911 # are we comparing the working directory?
904 if not node2:
912 if not node2:
905 l, c, a, d, u = self.dirstate.changes(files, match)
913 l, c, a, d, u = self.dirstate.changes(files, match)
906
914
907 # are we comparing working dir against its parent?
915 # are we comparing working dir against its parent?
908 if not node1:
916 if not node1:
909 if l:
917 if l:
910 # do a full compare of any files that might have changed
918 # do a full compare of any files that might have changed
911 change = self.changelog.read(self.dirstate.parents()[0])
919 change = self.changelog.read(self.dirstate.parents()[0])
912 mf2 = mfmatches(change[0])
920 mf2 = mfmatches(change[0])
913 for f in l:
921 for f in l:
914 if fcmp(f, mf2):
922 if fcmp(f, mf2):
915 c.append(f)
923 c.append(f)
916
924
917 for l in c, a, d, u:
925 for l in c, a, d, u:
918 l.sort()
926 l.sort()
919
927
920 return (c, a, d, u)
928 return (c, a, d, u)
921
929
922 # are we comparing working dir against non-tip?
930 # are we comparing working dir against non-tip?
923 # generate a pseudo-manifest for the working dir
931 # generate a pseudo-manifest for the working dir
924 if not node2:
932 if not node2:
925 if not mf2:
933 if not mf2:
926 change = self.changelog.read(self.dirstate.parents()[0])
934 change = self.changelog.read(self.dirstate.parents()[0])
927 mf2 = mfmatches(change[0])
935 mf2 = mfmatches(change[0])
928 for f in a + c + l:
936 for f in a + c + l:
929 mf2[f] = ""
937 mf2[f] = ""
930 for f in d:
938 for f in d:
931 if f in mf2: del mf2[f]
939 if f in mf2: del mf2[f]
932 else:
940 else:
933 change = self.changelog.read(node2)
941 change = self.changelog.read(node2)
934 mf2 = mfmatches(change[0])
942 mf2 = mfmatches(change[0])
935
943
936 # flush lists from dirstate before comparing manifests
944 # flush lists from dirstate before comparing manifests
937 c, a = [], []
945 c, a = [], []
938
946
939 change = self.changelog.read(node1)
947 change = self.changelog.read(node1)
940 mf1 = mfmatches(change[0])
948 mf1 = mfmatches(change[0])
941
949
942 for fn in mf2:
950 for fn in mf2:
943 if mf1.has_key(fn):
951 if mf1.has_key(fn):
944 if mf1[fn] != mf2[fn]:
952 if mf1[fn] != mf2[fn]:
945 if mf2[fn] != "" or fcmp(fn, mf1):
953 if mf2[fn] != "" or fcmp(fn, mf1):
946 c.append(fn)
954 c.append(fn)
947 del mf1[fn]
955 del mf1[fn]
948 else:
956 else:
949 a.append(fn)
957 a.append(fn)
950
958
951 d = mf1.keys()
959 d = mf1.keys()
952
960
953 for l in c, a, d, u:
961 for l in c, a, d, u:
954 l.sort()
962 l.sort()
955
963
956 return (c, a, d, u)
964 return (c, a, d, u)
957
965
958 def add(self, list):
966 def add(self, list):
959 for f in list:
967 for f in list:
960 p = self.wjoin(f)
968 p = self.wjoin(f)
961 if not os.path.exists(p):
969 if not os.path.exists(p):
962 self.ui.warn("%s does not exist!\n" % f)
970 self.ui.warn("%s does not exist!\n" % f)
963 elif not os.path.isfile(p):
971 elif not os.path.isfile(p):
964 self.ui.warn("%s not added: only files supported currently\n" % f)
972 self.ui.warn("%s not added: only files supported currently\n" % f)
965 elif self.dirstate.state(f) in 'an':
973 elif self.dirstate.state(f) in 'an':
966 self.ui.warn("%s already tracked!\n" % f)
974 self.ui.warn("%s already tracked!\n" % f)
967 else:
975 else:
968 self.dirstate.update([f], "a")
976 self.dirstate.update([f], "a")
969
977
970 def forget(self, list):
978 def forget(self, list):
971 for f in list:
979 for f in list:
972 if self.dirstate.state(f) not in 'ai':
980 if self.dirstate.state(f) not in 'ai':
973 self.ui.warn("%s not added!\n" % f)
981 self.ui.warn("%s not added!\n" % f)
974 else:
982 else:
975 self.dirstate.forget([f])
983 self.dirstate.forget([f])
976
984
977 def remove(self, list):
985 def remove(self, list):
978 for f in list:
986 for f in list:
979 p = self.wjoin(f)
987 p = self.wjoin(f)
980 if os.path.exists(p):
988 if os.path.exists(p):
981 self.ui.warn("%s still exists!\n" % f)
989 self.ui.warn("%s still exists!\n" % f)
982 elif self.dirstate.state(f) == 'a':
990 elif self.dirstate.state(f) == 'a':
983 self.ui.warn("%s never committed!\n" % f)
991 self.ui.warn("%s never committed!\n" % f)
984 self.dirstate.forget([f])
992 self.dirstate.forget([f])
985 elif f not in self.dirstate:
993 elif f not in self.dirstate:
986 self.ui.warn("%s not tracked!\n" % f)
994 self.ui.warn("%s not tracked!\n" % f)
987 else:
995 else:
988 self.dirstate.update([f], "r")
996 self.dirstate.update([f], "r")
989
997
990 def copy(self, source, dest):
998 def copy(self, source, dest):
991 p = self.wjoin(dest)
999 p = self.wjoin(dest)
992 if not os.path.exists(p):
1000 if not os.path.exists(p):
993 self.ui.warn("%s does not exist!\n" % dest)
1001 self.ui.warn("%s does not exist!\n" % dest)
994 elif not os.path.isfile(p):
1002 elif not os.path.isfile(p):
995 self.ui.warn("copy failed: %s is not a file\n" % dest)
1003 self.ui.warn("copy failed: %s is not a file\n" % dest)
996 else:
1004 else:
997 if self.dirstate.state(dest) == '?':
1005 if self.dirstate.state(dest) == '?':
998 self.dirstate.update([dest], "a")
1006 self.dirstate.update([dest], "a")
999 self.dirstate.copy(source, dest)
1007 self.dirstate.copy(source, dest)
1000
1008
1001 def heads(self):
1009 def heads(self):
1002 return self.changelog.heads()
1010 return self.changelog.heads()
1003
1011
1004 def branches(self, nodes):
1012 def branches(self, nodes):
1005 if not nodes: nodes = [self.changelog.tip()]
1013 if not nodes: nodes = [self.changelog.tip()]
1006 b = []
1014 b = []
1007 for n in nodes:
1015 for n in nodes:
1008 t = n
1016 t = n
1009 while n:
1017 while n:
1010 p = self.changelog.parents(n)
1018 p = self.changelog.parents(n)
1011 if p[1] != nullid or p[0] == nullid:
1019 if p[1] != nullid or p[0] == nullid:
1012 b.append((t, n, p[0], p[1]))
1020 b.append((t, n, p[0], p[1]))
1013 break
1021 break
1014 n = p[0]
1022 n = p[0]
1015 return b
1023 return b
1016
1024
1017 def between(self, pairs):
1025 def between(self, pairs):
1018 r = []
1026 r = []
1019
1027
1020 for top, bottom in pairs:
1028 for top, bottom in pairs:
1021 n, l, i = top, [], 0
1029 n, l, i = top, [], 0
1022 f = 1
1030 f = 1
1023
1031
1024 while n != bottom:
1032 while n != bottom:
1025 p = self.changelog.parents(n)[0]
1033 p = self.changelog.parents(n)[0]
1026 if i == f:
1034 if i == f:
1027 l.append(n)
1035 l.append(n)
1028 f = f * 2
1036 f = f * 2
1029 n = p
1037 n = p
1030 i += 1
1038 i += 1
1031
1039
1032 r.append(l)
1040 r.append(l)
1033
1041
1034 return r
1042 return r
1035
1043
1036 def newer(self, nodes):
1044 def newer(self, nodes):
1037 m = {}
1045 m = {}
1038 nl = []
1046 nl = []
1039 pm = {}
1047 pm = {}
1040 cl = self.changelog
1048 cl = self.changelog
1041 t = l = cl.count()
1049 t = l = cl.count()
1042
1050
1043 # find the lowest numbered node
1051 # find the lowest numbered node
1044 for n in nodes:
1052 for n in nodes:
1045 l = min(l, cl.rev(n))
1053 l = min(l, cl.rev(n))
1046 m[n] = 1
1054 m[n] = 1
1047
1055
1048 for i in xrange(l, t):
1056 for i in xrange(l, t):
1049 n = cl.node(i)
1057 n = cl.node(i)
1050 if n in m: # explicitly listed
1058 if n in m: # explicitly listed
1051 pm[n] = 1
1059 pm[n] = 1
1052 nl.append(n)
1060 nl.append(n)
1053 continue
1061 continue
1054 for p in cl.parents(n):
1062 for p in cl.parents(n):
1055 if p in pm: # parent listed
1063 if p in pm: # parent listed
1056 pm[n] = 1
1064 pm[n] = 1
1057 nl.append(n)
1065 nl.append(n)
1058 break
1066 break
1059
1067
1060 return nl
1068 return nl
1061
1069
1062 def findincoming(self, remote, base=None, heads=None):
1070 def findincoming(self, remote, base=None, heads=None):
1063 m = self.changelog.nodemap
1071 m = self.changelog.nodemap
1064 search = []
1072 search = []
1065 fetch = []
1073 fetch = []
1066 seen = {}
1074 seen = {}
1067 seenbranch = {}
1075 seenbranch = {}
1068 if base == None:
1076 if base == None:
1069 base = {}
1077 base = {}
1070
1078
1071 # assume we're closer to the tip than the root
1079 # assume we're closer to the tip than the root
1072 # and start by examining the heads
1080 # and start by examining the heads
1073 self.ui.status("searching for changes\n")
1081 self.ui.status("searching for changes\n")
1074
1082
1075 if not heads:
1083 if not heads:
1076 heads = remote.heads()
1084 heads = remote.heads()
1077
1085
1078 unknown = []
1086 unknown = []
1079 for h in heads:
1087 for h in heads:
1080 if h not in m:
1088 if h not in m:
1081 unknown.append(h)
1089 unknown.append(h)
1082 else:
1090 else:
1083 base[h] = 1
1091 base[h] = 1
1084
1092
1085 if not unknown:
1093 if not unknown:
1086 return None
1094 return None
1087
1095
1088 rep = {}
1096 rep = {}
1089 reqcnt = 0
1097 reqcnt = 0
1090
1098
1091 # search through remote branches
1099 # search through remote branches
1092 # a 'branch' here is a linear segment of history, with four parts:
1100 # a 'branch' here is a linear segment of history, with four parts:
1093 # head, root, first parent, second parent
1101 # head, root, first parent, second parent
1094 # (a branch always has two parents (or none) by definition)
1102 # (a branch always has two parents (or none) by definition)
1095 unknown = remote.branches(unknown)
1103 unknown = remote.branches(unknown)
1096 while unknown:
1104 while unknown:
1097 r = []
1105 r = []
1098 while unknown:
1106 while unknown:
1099 n = unknown.pop(0)
1107 n = unknown.pop(0)
1100 if n[0] in seen:
1108 if n[0] in seen:
1101 continue
1109 continue
1102
1110
1103 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1111 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1104 if n[0] == nullid:
1112 if n[0] == nullid:
1105 break
1113 break
1106 if n in seenbranch:
1114 if n in seenbranch:
1107 self.ui.debug("branch already found\n")
1115 self.ui.debug("branch already found\n")
1108 continue
1116 continue
1109 if n[1] and n[1] in m: # do we know the base?
1117 if n[1] and n[1] in m: # do we know the base?
1110 self.ui.debug("found incomplete branch %s:%s\n"
1118 self.ui.debug("found incomplete branch %s:%s\n"
1111 % (short(n[0]), short(n[1])))
1119 % (short(n[0]), short(n[1])))
1112 search.append(n) # schedule branch range for scanning
1120 search.append(n) # schedule branch range for scanning
1113 seenbranch[n] = 1
1121 seenbranch[n] = 1
1114 else:
1122 else:
1115 if n[1] not in seen and n[1] not in fetch:
1123 if n[1] not in seen and n[1] not in fetch:
1116 if n[2] in m and n[3] in m:
1124 if n[2] in m and n[3] in m:
1117 self.ui.debug("found new changeset %s\n" %
1125 self.ui.debug("found new changeset %s\n" %
1118 short(n[1]))
1126 short(n[1]))
1119 fetch.append(n[1]) # earliest unknown
1127 fetch.append(n[1]) # earliest unknown
1120 base[n[2]] = 1 # latest known
1128 base[n[2]] = 1 # latest known
1121 continue
1129 continue
1122
1130
1123 for a in n[2:4]:
1131 for a in n[2:4]:
1124 if a not in rep:
1132 if a not in rep:
1125 r.append(a)
1133 r.append(a)
1126 rep[a] = 1
1134 rep[a] = 1
1127
1135
1128 seen[n[0]] = 1
1136 seen[n[0]] = 1
1129
1137
1130 if r:
1138 if r:
1131 reqcnt += 1
1139 reqcnt += 1
1132 self.ui.debug("request %d: %s\n" %
1140 self.ui.debug("request %d: %s\n" %
1133 (reqcnt, " ".join(map(short, r))))
1141 (reqcnt, " ".join(map(short, r))))
1134 for p in range(0, len(r), 10):
1142 for p in range(0, len(r), 10):
1135 for b in remote.branches(r[p:p+10]):
1143 for b in remote.branches(r[p:p+10]):
1136 self.ui.debug("received %s:%s\n" %
1144 self.ui.debug("received %s:%s\n" %
1137 (short(b[0]), short(b[1])))
1145 (short(b[0]), short(b[1])))
1138 if b[0] not in m and b[0] not in seen:
1146 if b[0] not in m and b[0] not in seen:
1139 unknown.append(b)
1147 unknown.append(b)
1140
1148
1141 # do binary search on the branches we found
1149 # do binary search on the branches we found
1142 while search:
1150 while search:
1143 n = search.pop(0)
1151 n = search.pop(0)
1144 reqcnt += 1
1152 reqcnt += 1
1145 l = remote.between([(n[0], n[1])])[0]
1153 l = remote.between([(n[0], n[1])])[0]
1146 l.append(n[1])
1154 l.append(n[1])
1147 p = n[0]
1155 p = n[0]
1148 f = 1
1156 f = 1
1149 for i in l:
1157 for i in l:
1150 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1158 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1151 if i in m:
1159 if i in m:
1152 if f <= 2:
1160 if f <= 2:
1153 self.ui.debug("found new branch changeset %s\n" %
1161 self.ui.debug("found new branch changeset %s\n" %
1154 short(p))
1162 short(p))
1155 fetch.append(p)
1163 fetch.append(p)
1156 base[i] = 1
1164 base[i] = 1
1157 else:
1165 else:
1158 self.ui.debug("narrowed branch search to %s:%s\n"
1166 self.ui.debug("narrowed branch search to %s:%s\n"
1159 % (short(p), short(i)))
1167 % (short(p), short(i)))
1160 search.append((p, i))
1168 search.append((p, i))
1161 break
1169 break
1162 p, f = i, f * 2
1170 p, f = i, f * 2
1163
1171
1164 # sanity check our fetch list
1172 # sanity check our fetch list
1165 for f in fetch:
1173 for f in fetch:
1166 if f in m:
1174 if f in m:
1167 raise RepoError("already have changeset " + short(f[:4]))
1175 raise RepoError("already have changeset " + short(f[:4]))
1168
1176
1169 if base.keys() == [nullid]:
1177 if base.keys() == [nullid]:
1170 self.ui.warn("warning: pulling from an unrelated repository!\n")
1178 self.ui.warn("warning: pulling from an unrelated repository!\n")
1171
1179
1172 self.ui.note("adding new changesets starting at " +
1180 self.ui.note("adding new changesets starting at " +
1173 " ".join([short(f) for f in fetch]) + "\n")
1181 " ".join([short(f) for f in fetch]) + "\n")
1174
1182
1175 self.ui.debug("%d total queries\n" % reqcnt)
1183 self.ui.debug("%d total queries\n" % reqcnt)
1176
1184
1177 return fetch
1185 return fetch
1178
1186
1179 def findoutgoing(self, remote, base=None, heads=None):
1187 def findoutgoing(self, remote, base=None, heads=None):
1180 if base == None:
1188 if base == None:
1181 base = {}
1189 base = {}
1182 self.findincoming(remote, base, heads)
1190 self.findincoming(remote, base, heads)
1183
1191
1184 remain = dict.fromkeys(self.changelog.nodemap)
1192 remain = dict.fromkeys(self.changelog.nodemap)
1185
1193
1186 # prune everything remote has from the tree
1194 # prune everything remote has from the tree
1187 del remain[nullid]
1195 del remain[nullid]
1188 remove = base.keys()
1196 remove = base.keys()
1189 while remove:
1197 while remove:
1190 n = remove.pop(0)
1198 n = remove.pop(0)
1191 if n in remain:
1199 if n in remain:
1192 del remain[n]
1200 del remain[n]
1193 for p in self.changelog.parents(n):
1201 for p in self.changelog.parents(n):
1194 remove.append(p)
1202 remove.append(p)
1195
1203
1196 # find every node whose parents have been pruned
1204 # find every node whose parents have been pruned
1197 subset = []
1205 subset = []
1198 for n in remain:
1206 for n in remain:
1199 p1, p2 = self.changelog.parents(n)
1207 p1, p2 = self.changelog.parents(n)
1200 if p1 not in remain and p2 not in remain:
1208 if p1 not in remain and p2 not in remain:
1201 subset.append(n)
1209 subset.append(n)
1202
1210
1203 # this is the set of all roots we have to push
1211 # this is the set of all roots we have to push
1204 return subset
1212 return subset
1205
1213
1206 def pull(self, remote):
1214 def pull(self, remote):
1207 lock = self.lock()
1215 lock = self.lock()
1208
1216
1209 # if we have an empty repo, fetch everything
1217 # if we have an empty repo, fetch everything
1210 if self.changelog.tip() == nullid:
1218 if self.changelog.tip() == nullid:
1211 self.ui.status("requesting all changes\n")
1219 self.ui.status("requesting all changes\n")
1212 fetch = [nullid]
1220 fetch = [nullid]
1213 else:
1221 else:
1214 fetch = self.findincoming(remote)
1222 fetch = self.findincoming(remote)
1215
1223
1216 if not fetch:
1224 if not fetch:
1217 self.ui.status("no changes found\n")
1225 self.ui.status("no changes found\n")
1218 return 1
1226 return 1
1219
1227
1220 cg = remote.changegroup(fetch)
1228 cg = remote.changegroup(fetch)
1221 return self.addchangegroup(cg)
1229 return self.addchangegroup(cg)
1222
1230
1223 def push(self, remote, force=False):
1231 def push(self, remote, force=False):
1224 lock = remote.lock()
1232 lock = remote.lock()
1225
1233
1226 base = {}
1234 base = {}
1227 heads = remote.heads()
1235 heads = remote.heads()
1228 inc = self.findincoming(remote, base, heads)
1236 inc = self.findincoming(remote, base, heads)
1229 if not force and inc:
1237 if not force and inc:
1230 self.ui.warn("abort: unsynced remote changes!\n")
1238 self.ui.warn("abort: unsynced remote changes!\n")
1231 self.ui.status("(did you forget to sync? use push -f to force)\n")
1239 self.ui.status("(did you forget to sync? use push -f to force)\n")
1232 return 1
1240 return 1
1233
1241
1234 update = self.findoutgoing(remote, base)
1242 update = self.findoutgoing(remote, base)
1235 if not update:
1243 if not update:
1236 self.ui.status("no changes found\n")
1244 self.ui.status("no changes found\n")
1237 return 1
1245 return 1
1238 elif not force:
1246 elif not force:
1239 if len(heads) < len(self.changelog.heads()):
1247 if len(heads) < len(self.changelog.heads()):
1240 self.ui.warn("abort: push creates new remote branches!\n")
1248 self.ui.warn("abort: push creates new remote branches!\n")
1241 self.ui.status("(did you forget to merge?" +
1249 self.ui.status("(did you forget to merge?" +
1242 " use push -f to force)\n")
1250 " use push -f to force)\n")
1243 return 1
1251 return 1
1244
1252
1245 cg = self.changegroup(update)
1253 cg = self.changegroup(update)
1246 return remote.addchangegroup(cg)
1254 return remote.addchangegroup(cg)
1247
1255
1248 def changegroup(self, basenodes):
1256 def changegroup(self, basenodes):
1249 class genread:
1257 class genread:
1250 def __init__(self, generator):
1258 def __init__(self, generator):
1251 self.g = generator
1259 self.g = generator
1252 self.buf = ""
1260 self.buf = ""
1253 def read(self, l):
1261 def read(self, l):
1254 while l > len(self.buf):
1262 while l > len(self.buf):
1255 try:
1263 try:
1256 self.buf += self.g.next()
1264 self.buf += self.g.next()
1257 except StopIteration:
1265 except StopIteration:
1258 break
1266 break
1259 d, self.buf = self.buf[:l], self.buf[l:]
1267 d, self.buf = self.buf[:l], self.buf[l:]
1260 return d
1268 return d
1261
1269
1262 def gengroup():
1270 def gengroup():
1263 nodes = self.newer(basenodes)
1271 nodes = self.newer(basenodes)
1264
1272
1265 # construct the link map
1273 # construct the link map
1266 linkmap = {}
1274 linkmap = {}
1267 for n in nodes:
1275 for n in nodes:
1268 linkmap[self.changelog.rev(n)] = n
1276 linkmap[self.changelog.rev(n)] = n
1269
1277
1270 # construct a list of all changed files
1278 # construct a list of all changed files
1271 changed = {}
1279 changed = {}
1272 for n in nodes:
1280 for n in nodes:
1273 c = self.changelog.read(n)
1281 c = self.changelog.read(n)
1274 for f in c[3]:
1282 for f in c[3]:
1275 changed[f] = 1
1283 changed[f] = 1
1276 changed = changed.keys()
1284 changed = changed.keys()
1277 changed.sort()
1285 changed.sort()
1278
1286
1279 # the changegroup is changesets + manifests + all file revs
1287 # the changegroup is changesets + manifests + all file revs
1280 revs = [ self.changelog.rev(n) for n in nodes ]
1288 revs = [ self.changelog.rev(n) for n in nodes ]
1281
1289
1282 for y in self.changelog.group(linkmap): yield y
1290 for y in self.changelog.group(linkmap): yield y
1283 for y in self.manifest.group(linkmap): yield y
1291 for y in self.manifest.group(linkmap): yield y
1284 for f in changed:
1292 for f in changed:
1285 yield struct.pack(">l", len(f) + 4) + f
1293 yield struct.pack(">l", len(f) + 4) + f
1286 g = self.file(f).group(linkmap)
1294 g = self.file(f).group(linkmap)
1287 for y in g:
1295 for y in g:
1288 yield y
1296 yield y
1289
1297
1290 yield struct.pack(">l", 0)
1298 yield struct.pack(">l", 0)
1291
1299
1292 return genread(gengroup())
1300 return genread(gengroup())
1293
1301
1294 def addchangegroup(self, source):
1302 def addchangegroup(self, source):
1295
1303
1296 def getchunk():
1304 def getchunk():
1297 d = source.read(4)
1305 d = source.read(4)
1298 if not d: return ""
1306 if not d: return ""
1299 l = struct.unpack(">l", d)[0]
1307 l = struct.unpack(">l", d)[0]
1300 if l <= 4: return ""
1308 if l <= 4: return ""
1301 return source.read(l - 4)
1309 return source.read(l - 4)
1302
1310
1303 def getgroup():
1311 def getgroup():
1304 while 1:
1312 while 1:
1305 c = getchunk()
1313 c = getchunk()
1306 if not c: break
1314 if not c: break
1307 yield c
1315 yield c
1308
1316
1309 def csmap(x):
1317 def csmap(x):
1310 self.ui.debug("add changeset %s\n" % short(x))
1318 self.ui.debug("add changeset %s\n" % short(x))
1311 return self.changelog.count()
1319 return self.changelog.count()
1312
1320
1313 def revmap(x):
1321 def revmap(x):
1314 return self.changelog.rev(x)
1322 return self.changelog.rev(x)
1315
1323
1316 if not source: return
1324 if not source: return
1317 changesets = files = revisions = 0
1325 changesets = files = revisions = 0
1318
1326
1319 tr = self.transaction()
1327 tr = self.transaction()
1320
1328
1321 # pull off the changeset group
1329 # pull off the changeset group
1322 self.ui.status("adding changesets\n")
1330 self.ui.status("adding changesets\n")
1323 co = self.changelog.tip()
1331 co = self.changelog.tip()
1324 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1332 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1325 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1333 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1326
1334
1327 # pull off the manifest group
1335 # pull off the manifest group
1328 self.ui.status("adding manifests\n")
1336 self.ui.status("adding manifests\n")
1329 mm = self.manifest.tip()
1337 mm = self.manifest.tip()
1330 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1338 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1331
1339
1332 # process the files
1340 # process the files
1333 self.ui.status("adding file changes\n")
1341 self.ui.status("adding file changes\n")
1334 while 1:
1342 while 1:
1335 f = getchunk()
1343 f = getchunk()
1336 if not f: break
1344 if not f: break
1337 self.ui.debug("adding %s revisions\n" % f)
1345 self.ui.debug("adding %s revisions\n" % f)
1338 fl = self.file(f)
1346 fl = self.file(f)
1339 o = fl.count()
1347 o = fl.count()
1340 n = fl.addgroup(getgroup(), revmap, tr)
1348 n = fl.addgroup(getgroup(), revmap, tr)
1341 revisions += fl.count() - o
1349 revisions += fl.count() - o
1342 files += 1
1350 files += 1
1343
1351
1344 self.ui.status(("added %d changesets" +
1352 self.ui.status(("added %d changesets" +
1345 " with %d changes to %d files\n")
1353 " with %d changes to %d files\n")
1346 % (changesets, revisions, files))
1354 % (changesets, revisions, files))
1347
1355
1348 tr.close()
1356 tr.close()
1349
1357
1350 if not self.hook("changegroup"):
1358 if not self.hook("changegroup"):
1351 return 1
1359 return 1
1352
1360
1353 return
1361 return
1354
1362
1355 def update(self, node, allow=False, force=False, choose=None,
1363 def update(self, node, allow=False, force=False, choose=None,
1356 moddirstate=True):
1364 moddirstate=True):
1357 pl = self.dirstate.parents()
1365 pl = self.dirstate.parents()
1358 if not force and pl[1] != nullid:
1366 if not force and pl[1] != nullid:
1359 self.ui.warn("aborting: outstanding uncommitted merges\n")
1367 self.ui.warn("aborting: outstanding uncommitted merges\n")
1360 return 1
1368 return 1
1361
1369
1362 p1, p2 = pl[0], node
1370 p1, p2 = pl[0], node
1363 pa = self.changelog.ancestor(p1, p2)
1371 pa = self.changelog.ancestor(p1, p2)
1364 m1n = self.changelog.read(p1)[0]
1372 m1n = self.changelog.read(p1)[0]
1365 m2n = self.changelog.read(p2)[0]
1373 m2n = self.changelog.read(p2)[0]
1366 man = self.manifest.ancestor(m1n, m2n)
1374 man = self.manifest.ancestor(m1n, m2n)
1367 m1 = self.manifest.read(m1n)
1375 m1 = self.manifest.read(m1n)
1368 mf1 = self.manifest.readflags(m1n)
1376 mf1 = self.manifest.readflags(m1n)
1369 m2 = self.manifest.read(m2n)
1377 m2 = self.manifest.read(m2n)
1370 mf2 = self.manifest.readflags(m2n)
1378 mf2 = self.manifest.readflags(m2n)
1371 ma = self.manifest.read(man)
1379 ma = self.manifest.read(man)
1372 mfa = self.manifest.readflags(man)
1380 mfa = self.manifest.readflags(man)
1373
1381
1374 (c, a, d, u) = self.changes()
1382 (c, a, d, u) = self.changes()
1375
1383
1376 # is this a jump, or a merge? i.e. is there a linear path
1384 # is this a jump, or a merge? i.e. is there a linear path
1377 # from p1 to p2?
1385 # from p1 to p2?
1378 linear_path = (pa == p1 or pa == p2)
1386 linear_path = (pa == p1 or pa == p2)
1379
1387
1380 # resolve the manifest to determine which files
1388 # resolve the manifest to determine which files
1381 # we care about merging
1389 # we care about merging
1382 self.ui.note("resolving manifests\n")
1390 self.ui.note("resolving manifests\n")
1383 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1391 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1384 (force, allow, moddirstate, linear_path))
1392 (force, allow, moddirstate, linear_path))
1385 self.ui.debug(" ancestor %s local %s remote %s\n" %
1393 self.ui.debug(" ancestor %s local %s remote %s\n" %
1386 (short(man), short(m1n), short(m2n)))
1394 (short(man), short(m1n), short(m2n)))
1387
1395
1388 merge = {}
1396 merge = {}
1389 get = {}
1397 get = {}
1390 remove = []
1398 remove = []
1391 mark = {}
1399 mark = {}
1392
1400
1393 # construct a working dir manifest
1401 # construct a working dir manifest
1394 mw = m1.copy()
1402 mw = m1.copy()
1395 mfw = mf1.copy()
1403 mfw = mf1.copy()
1396 umap = dict.fromkeys(u)
1404 umap = dict.fromkeys(u)
1397
1405
1398 for f in a + c + u:
1406 for f in a + c + u:
1399 mw[f] = ""
1407 mw[f] = ""
1400 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1408 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1401
1409
1402 for f in d:
1410 for f in d:
1403 if f in mw: del mw[f]
1411 if f in mw: del mw[f]
1404
1412
1405 # If we're jumping between revisions (as opposed to merging),
1413 # If we're jumping between revisions (as opposed to merging),
1406 # and if neither the working directory nor the target rev has
1414 # and if neither the working directory nor the target rev has
1407 # the file, then we need to remove it from the dirstate, to
1415 # the file, then we need to remove it from the dirstate, to
1408 # prevent the dirstate from listing the file when it is no
1416 # prevent the dirstate from listing the file when it is no
1409 # longer in the manifest.
1417 # longer in the manifest.
1410 if moddirstate and linear_path and f not in m2:
1418 if moddirstate and linear_path and f not in m2:
1411 self.dirstate.forget((f,))
1419 self.dirstate.forget((f,))
1412
1420
1413 # Compare manifests
1421 # Compare manifests
1414 for f, n in mw.iteritems():
1422 for f, n in mw.iteritems():
1415 if choose and not choose(f): continue
1423 if choose and not choose(f): continue
1416 if f in m2:
1424 if f in m2:
1417 s = 0
1425 s = 0
1418
1426
1419 # is the wfile new since m1, and match m2?
1427 # is the wfile new since m1, and match m2?
1420 if f not in m1:
1428 if f not in m1:
1421 t1 = self.wfile(f).read()
1429 t1 = self.wfile(f).read()
1422 t2 = self.file(f).revision(m2[f])
1430 t2 = self.file(f).revision(m2[f])
1423 if cmp(t1, t2) == 0:
1431 if cmp(t1, t2) == 0:
1424 mark[f] = 1
1432 mark[f] = 1
1425 n = m2[f]
1433 n = m2[f]
1426 del t1, t2
1434 del t1, t2
1427
1435
1428 # are files different?
1436 # are files different?
1429 if n != m2[f]:
1437 if n != m2[f]:
1430 a = ma.get(f, nullid)
1438 a = ma.get(f, nullid)
1431 # are both different from the ancestor?
1439 # are both different from the ancestor?
1432 if n != a and m2[f] != a:
1440 if n != a and m2[f] != a:
1433 self.ui.debug(" %s versions differ, resolve\n" % f)
1441 self.ui.debug(" %s versions differ, resolve\n" % f)
1434 # merge executable bits
1442 # merge executable bits
1435 # "if we changed or they changed, change in merge"
1443 # "if we changed or they changed, change in merge"
1436 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1444 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1437 mode = ((a^b) | (a^c)) ^ a
1445 mode = ((a^b) | (a^c)) ^ a
1438 merge[f] = (m1.get(f, nullid), m2[f], mode)
1446 merge[f] = (m1.get(f, nullid), m2[f], mode)
1439 s = 1
1447 s = 1
1440 # are we clobbering?
1448 # are we clobbering?
1441 # is remote's version newer?
1449 # is remote's version newer?
1442 # or are we going back in time?
1450 # or are we going back in time?
1443 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1451 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1444 self.ui.debug(" remote %s is newer, get\n" % f)
1452 self.ui.debug(" remote %s is newer, get\n" % f)
1445 get[f] = m2[f]
1453 get[f] = m2[f]
1446 s = 1
1454 s = 1
1447 else:
1455 else:
1448 mark[f] = 1
1456 mark[f] = 1
1449 elif f in umap:
1457 elif f in umap:
1450 # this unknown file is the same as the checkout
1458 # this unknown file is the same as the checkout
1451 get[f] = m2[f]
1459 get[f] = m2[f]
1452
1460
1453 if not s and mfw[f] != mf2[f]:
1461 if not s and mfw[f] != mf2[f]:
1454 if force:
1462 if force:
1455 self.ui.debug(" updating permissions for %s\n" % f)
1463 self.ui.debug(" updating permissions for %s\n" % f)
1456 util.set_exec(self.wjoin(f), mf2[f])
1464 util.set_exec(self.wjoin(f), mf2[f])
1457 else:
1465 else:
1458 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1466 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1459 mode = ((a^b) | (a^c)) ^ a
1467 mode = ((a^b) | (a^c)) ^ a
1460 if mode != b:
1468 if mode != b:
1461 self.ui.debug(" updating permissions for %s\n" % f)
1469 self.ui.debug(" updating permissions for %s\n" % f)
1462 util.set_exec(self.wjoin(f), mode)
1470 util.set_exec(self.wjoin(f), mode)
1463 mark[f] = 1
1471 mark[f] = 1
1464 del m2[f]
1472 del m2[f]
1465 elif f in ma:
1473 elif f in ma:
1466 if n != ma[f]:
1474 if n != ma[f]:
1467 r = "d"
1475 r = "d"
1468 if not force and (linear_path or allow):
1476 if not force and (linear_path or allow):
1469 r = self.ui.prompt(
1477 r = self.ui.prompt(
1470 (" local changed %s which remote deleted\n" % f) +
1478 (" local changed %s which remote deleted\n" % f) +
1471 "(k)eep or (d)elete?", "[kd]", "k")
1479 "(k)eep or (d)elete?", "[kd]", "k")
1472 if r == "d":
1480 if r == "d":
1473 remove.append(f)
1481 remove.append(f)
1474 else:
1482 else:
1475 self.ui.debug("other deleted %s\n" % f)
1483 self.ui.debug("other deleted %s\n" % f)
1476 remove.append(f) # other deleted it
1484 remove.append(f) # other deleted it
1477 else:
1485 else:
1478 if n == m1.get(f, nullid): # same as parent
1486 if n == m1.get(f, nullid): # same as parent
1479 if p2 == pa: # going backwards?
1487 if p2 == pa: # going backwards?
1480 self.ui.debug("remote deleted %s\n" % f)
1488 self.ui.debug("remote deleted %s\n" % f)
1481 remove.append(f)
1489 remove.append(f)
1482 else:
1490 else:
1483 self.ui.debug("local created %s, keeping\n" % f)
1491 self.ui.debug("local created %s, keeping\n" % f)
1484 else:
1492 else:
1485 self.ui.debug("working dir created %s, keeping\n" % f)
1493 self.ui.debug("working dir created %s, keeping\n" % f)
1486
1494
1487 for f, n in m2.iteritems():
1495 for f, n in m2.iteritems():
1488 if choose and not choose(f): continue
1496 if choose and not choose(f): continue
1489 if f[0] == "/": continue
1497 if f[0] == "/": continue
1490 if f in ma and n != ma[f]:
1498 if f in ma and n != ma[f]:
1491 r = "k"
1499 r = "k"
1492 if not force and (linear_path or allow):
1500 if not force and (linear_path or allow):
1493 r = self.ui.prompt(
1501 r = self.ui.prompt(
1494 ("remote changed %s which local deleted\n" % f) +
1502 ("remote changed %s which local deleted\n" % f) +
1495 "(k)eep or (d)elete?", "[kd]", "k")
1503 "(k)eep or (d)elete?", "[kd]", "k")
1496 if r == "k": get[f] = n
1504 if r == "k": get[f] = n
1497 elif f not in ma:
1505 elif f not in ma:
1498 self.ui.debug("remote created %s\n" % f)
1506 self.ui.debug("remote created %s\n" % f)
1499 get[f] = n
1507 get[f] = n
1500 else:
1508 else:
1501 if force or p2 == pa: # going backwards?
1509 if force or p2 == pa: # going backwards?
1502 self.ui.debug("local deleted %s, recreating\n" % f)
1510 self.ui.debug("local deleted %s, recreating\n" % f)
1503 get[f] = n
1511 get[f] = n
1504 else:
1512 else:
1505 self.ui.debug("local deleted %s\n" % f)
1513 self.ui.debug("local deleted %s\n" % f)
1506
1514
1507 del mw, m1, m2, ma
1515 del mw, m1, m2, ma
1508
1516
1509 if force:
1517 if force:
1510 for f in merge:
1518 for f in merge:
1511 get[f] = merge[f][1]
1519 get[f] = merge[f][1]
1512 merge = {}
1520 merge = {}
1513
1521
1514 if linear_path or force:
1522 if linear_path or force:
1515 # we don't need to do any magic, just jump to the new rev
1523 # we don't need to do any magic, just jump to the new rev
1516 mode = 'n'
1524 mode = 'n'
1517 p1, p2 = p2, nullid
1525 p1, p2 = p2, nullid
1518 else:
1526 else:
1519 if not allow:
1527 if not allow:
1520 self.ui.status("this update spans a branch" +
1528 self.ui.status("this update spans a branch" +
1521 " affecting the following files:\n")
1529 " affecting the following files:\n")
1522 fl = merge.keys() + get.keys()
1530 fl = merge.keys() + get.keys()
1523 fl.sort()
1531 fl.sort()
1524 for f in fl:
1532 for f in fl:
1525 cf = ""
1533 cf = ""
1526 if f in merge: cf = " (resolve)"
1534 if f in merge: cf = " (resolve)"
1527 self.ui.status(" %s%s\n" % (f, cf))
1535 self.ui.status(" %s%s\n" % (f, cf))
1528 self.ui.warn("aborting update spanning branches!\n")
1536 self.ui.warn("aborting update spanning branches!\n")
1529 self.ui.status("(use update -m to merge across branches" +
1537 self.ui.status("(use update -m to merge across branches" +
1530 " or -C to lose changes)\n")
1538 " or -C to lose changes)\n")
1531 return 1
1539 return 1
1532 # we have to remember what files we needed to get/change
1540 # we have to remember what files we needed to get/change
1533 # because any file that's different from either one of its
1541 # because any file that's different from either one of its
1534 # parents must be in the changeset
1542 # parents must be in the changeset
1535 mode = 'm'
1543 mode = 'm'
1536 if moddirstate:
1544 if moddirstate:
1537 self.dirstate.update(mark.keys(), "m")
1545 self.dirstate.update(mark.keys(), "m")
1538
1546
1539 if moddirstate:
1547 if moddirstate:
1540 self.dirstate.setparents(p1, p2)
1548 self.dirstate.setparents(p1, p2)
1541
1549
1542 # get the files we don't need to change
1550 # get the files we don't need to change
1543 files = get.keys()
1551 files = get.keys()
1544 files.sort()
1552 files.sort()
1545 for f in files:
1553 for f in files:
1546 if f[0] == "/": continue
1554 if f[0] == "/": continue
1547 self.ui.note("getting %s\n" % f)
1555 self.ui.note("getting %s\n" % f)
1548 t = self.file(f).read(get[f])
1556 t = self.file(f).read(get[f])
1549 try:
1557 try:
1550 self.wfile(f, "w").write(t)
1558 self.wfile(f, "w").write(t)
1551 except IOError:
1559 except IOError:
1552 os.makedirs(os.path.dirname(self.wjoin(f)))
1560 os.makedirs(os.path.dirname(self.wjoin(f)))
1553 self.wfile(f, "w").write(t)
1561 self.wfile(f, "w").write(t)
1554 util.set_exec(self.wjoin(f), mf2[f])
1562 util.set_exec(self.wjoin(f), mf2[f])
1555 if moddirstate:
1563 if moddirstate:
1556 self.dirstate.update([f], mode)
1564 self.dirstate.update([f], mode)
1557
1565
1558 # merge the tricky bits
1566 # merge the tricky bits
1559 files = merge.keys()
1567 files = merge.keys()
1560 files.sort()
1568 files.sort()
1561 for f in files:
1569 for f in files:
1562 self.ui.status("merging %s\n" % f)
1570 self.ui.status("merging %s\n" % f)
1563 m, o, flag = merge[f]
1571 m, o, flag = merge[f]
1564 self.merge3(f, m, o)
1572 self.merge3(f, m, o)
1565 util.set_exec(self.wjoin(f), flag)
1573 util.set_exec(self.wjoin(f), flag)
1566 if moddirstate and mode == 'm':
1574 if moddirstate and mode == 'm':
1567 # only update dirstate on branch merge, otherwise we
1575 # only update dirstate on branch merge, otherwise we
1568 # could mark files with changes as unchanged
1576 # could mark files with changes as unchanged
1569 self.dirstate.update([f], mode)
1577 self.dirstate.update([f], mode)
1570
1578
1571 remove.sort()
1579 remove.sort()
1572 for f in remove:
1580 for f in remove:
1573 self.ui.note("removing %s\n" % f)
1581 self.ui.note("removing %s\n" % f)
1574 try:
1582 try:
1575 os.unlink(f)
1583 os.unlink(f)
1576 except OSError, inst:
1584 except OSError, inst:
1577 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1585 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1578 # try removing directories that might now be empty
1586 # try removing directories that might now be empty
1579 try: os.removedirs(os.path.dirname(f))
1587 try: os.removedirs(os.path.dirname(f))
1580 except: pass
1588 except: pass
1581 if moddirstate:
1589 if moddirstate:
1582 if mode == 'n':
1590 if mode == 'n':
1583 self.dirstate.forget(remove)
1591 self.dirstate.forget(remove)
1584 else:
1592 else:
1585 self.dirstate.update(remove, 'r')
1593 self.dirstate.update(remove, 'r')
1586
1594
1587 def merge3(self, fn, my, other):
1595 def merge3(self, fn, my, other):
1588 """perform a 3-way merge in the working directory"""
1596 """perform a 3-way merge in the working directory"""
1589
1597
1590 def temp(prefix, node):
1598 def temp(prefix, node):
1591 pre = "%s~%s." % (os.path.basename(fn), prefix)
1599 pre = "%s~%s." % (os.path.basename(fn), prefix)
1592 (fd, name) = tempfile.mkstemp("", pre)
1600 (fd, name) = tempfile.mkstemp("", pre)
1593 f = os.fdopen(fd, "wb")
1601 f = os.fdopen(fd, "wb")
1594 f.write(fl.revision(node))
1602 f.write(fl.revision(node))
1595 f.close()
1603 f.close()
1596 return name
1604 return name
1597
1605
1598 fl = self.file(fn)
1606 fl = self.file(fn)
1599 base = fl.ancestor(my, other)
1607 base = fl.ancestor(my, other)
1600 a = self.wjoin(fn)
1608 a = self.wjoin(fn)
1601 b = temp("base", base)
1609 b = temp("base", base)
1602 c = temp("other", other)
1610 c = temp("other", other)
1603
1611
1604 self.ui.note("resolving %s\n" % fn)
1612 self.ui.note("resolving %s\n" % fn)
1605 self.ui.debug("file %s: other %s ancestor %s\n" %
1613 self.ui.debug("file %s: other %s ancestor %s\n" %
1606 (fn, short(other), short(base)))
1614 (fn, short(other), short(base)))
1607
1615
1608 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1616 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1609 or "hgmerge")
1617 or "hgmerge")
1610 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1618 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1611 if r:
1619 if r:
1612 self.ui.warn("merging %s failed!\n" % fn)
1620 self.ui.warn("merging %s failed!\n" % fn)
1613
1621
1614 os.unlink(b)
1622 os.unlink(b)
1615 os.unlink(c)
1623 os.unlink(c)
1616
1624
1617 def verify(self):
1625 def verify(self):
1618 filelinkrevs = {}
1626 filelinkrevs = {}
1619 filenodes = {}
1627 filenodes = {}
1620 changesets = revisions = files = 0
1628 changesets = revisions = files = 0
1621 errors = 0
1629 errors = 0
1622
1630
1623 seen = {}
1631 seen = {}
1624 self.ui.status("checking changesets\n")
1632 self.ui.status("checking changesets\n")
1625 for i in range(self.changelog.count()):
1633 for i in range(self.changelog.count()):
1626 changesets += 1
1634 changesets += 1
1627 n = self.changelog.node(i)
1635 n = self.changelog.node(i)
1628 if n in seen:
1636 if n in seen:
1629 self.ui.warn("duplicate changeset at revision %d\n" % i)
1637 self.ui.warn("duplicate changeset at revision %d\n" % i)
1630 errors += 1
1638 errors += 1
1631 seen[n] = 1
1639 seen[n] = 1
1632
1640
1633 for p in self.changelog.parents(n):
1641 for p in self.changelog.parents(n):
1634 if p not in self.changelog.nodemap:
1642 if p not in self.changelog.nodemap:
1635 self.ui.warn("changeset %s has unknown parent %s\n" %
1643 self.ui.warn("changeset %s has unknown parent %s\n" %
1636 (short(n), short(p)))
1644 (short(n), short(p)))
1637 errors += 1
1645 errors += 1
1638 try:
1646 try:
1639 changes = self.changelog.read(n)
1647 changes = self.changelog.read(n)
1640 except Exception, inst:
1648 except Exception, inst:
1641 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1649 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1642 errors += 1
1650 errors += 1
1643
1651
1644 for f in changes[3]:
1652 for f in changes[3]:
1645 filelinkrevs.setdefault(f, []).append(i)
1653 filelinkrevs.setdefault(f, []).append(i)
1646
1654
1647 seen = {}
1655 seen = {}
1648 self.ui.status("checking manifests\n")
1656 self.ui.status("checking manifests\n")
1649 for i in range(self.manifest.count()):
1657 for i in range(self.manifest.count()):
1650 n = self.manifest.node(i)
1658 n = self.manifest.node(i)
1651 if n in seen:
1659 if n in seen:
1652 self.ui.warn("duplicate manifest at revision %d\n" % i)
1660 self.ui.warn("duplicate manifest at revision %d\n" % i)
1653 errors += 1
1661 errors += 1
1654 seen[n] = 1
1662 seen[n] = 1
1655
1663
1656 for p in self.manifest.parents(n):
1664 for p in self.manifest.parents(n):
1657 if p not in self.manifest.nodemap:
1665 if p not in self.manifest.nodemap:
1658 self.ui.warn("manifest %s has unknown parent %s\n" %
1666 self.ui.warn("manifest %s has unknown parent %s\n" %
1659 (short(n), short(p)))
1667 (short(n), short(p)))
1660 errors += 1
1668 errors += 1
1661
1669
1662 try:
1670 try:
1663 delta = mdiff.patchtext(self.manifest.delta(n))
1671 delta = mdiff.patchtext(self.manifest.delta(n))
1664 except KeyboardInterrupt:
1672 except KeyboardInterrupt:
1665 self.ui.warn("aborted")
1673 self.ui.warn("aborted")
1666 sys.exit(0)
1674 sys.exit(0)
1667 except Exception, inst:
1675 except Exception, inst:
1668 self.ui.warn("unpacking manifest %s: %s\n"
1676 self.ui.warn("unpacking manifest %s: %s\n"
1669 % (short(n), inst))
1677 % (short(n), inst))
1670 errors += 1
1678 errors += 1
1671
1679
1672 ff = [ l.split('\0') for l in delta.splitlines() ]
1680 ff = [ l.split('\0') for l in delta.splitlines() ]
1673 for f, fn in ff:
1681 for f, fn in ff:
1674 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1682 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1675
1683
1676 self.ui.status("crosschecking files in changesets and manifests\n")
1684 self.ui.status("crosschecking files in changesets and manifests\n")
1677 for f in filenodes:
1685 for f in filenodes:
1678 if f not in filelinkrevs:
1686 if f not in filelinkrevs:
1679 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1687 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1680 errors += 1
1688 errors += 1
1681
1689
1682 for f in filelinkrevs:
1690 for f in filelinkrevs:
1683 if f not in filenodes:
1691 if f not in filenodes:
1684 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1692 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1685 errors += 1
1693 errors += 1
1686
1694
1687 self.ui.status("checking files\n")
1695 self.ui.status("checking files\n")
1688 ff = filenodes.keys()
1696 ff = filenodes.keys()
1689 ff.sort()
1697 ff.sort()
1690 for f in ff:
1698 for f in ff:
1691 if f == "/dev/null": continue
1699 if f == "/dev/null": continue
1692 files += 1
1700 files += 1
1693 fl = self.file(f)
1701 fl = self.file(f)
1694 nodes = { nullid: 1 }
1702 nodes = { nullid: 1 }
1695 seen = {}
1703 seen = {}
1696 for i in range(fl.count()):
1704 for i in range(fl.count()):
1697 revisions += 1
1705 revisions += 1
1698 n = fl.node(i)
1706 n = fl.node(i)
1699
1707
1700 if n in seen:
1708 if n in seen:
1701 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1709 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1702 errors += 1
1710 errors += 1
1703
1711
1704 if n not in filenodes[f]:
1712 if n not in filenodes[f]:
1705 self.ui.warn("%s: %d:%s not in manifests\n"
1713 self.ui.warn("%s: %d:%s not in manifests\n"
1706 % (f, i, short(n)))
1714 % (f, i, short(n)))
1707 errors += 1
1715 errors += 1
1708 else:
1716 else:
1709 del filenodes[f][n]
1717 del filenodes[f][n]
1710
1718
1711 flr = fl.linkrev(n)
1719 flr = fl.linkrev(n)
1712 if flr not in filelinkrevs[f]:
1720 if flr not in filelinkrevs[f]:
1713 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1721 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1714 % (f, short(n), fl.linkrev(n)))
1722 % (f, short(n), fl.linkrev(n)))
1715 errors += 1
1723 errors += 1
1716 else:
1724 else:
1717 filelinkrevs[f].remove(flr)
1725 filelinkrevs[f].remove(flr)
1718
1726
1719 # verify contents
1727 # verify contents
1720 try:
1728 try:
1721 t = fl.read(n)
1729 t = fl.read(n)
1722 except Exception, inst:
1730 except Exception, inst:
1723 self.ui.warn("unpacking file %s %s: %s\n"
1731 self.ui.warn("unpacking file %s %s: %s\n"
1724 % (f, short(n), inst))
1732 % (f, short(n), inst))
1725 errors += 1
1733 errors += 1
1726
1734
1727 # verify parents
1735 # verify parents
1728 (p1, p2) = fl.parents(n)
1736 (p1, p2) = fl.parents(n)
1729 if p1 not in nodes:
1737 if p1 not in nodes:
1730 self.ui.warn("file %s:%s unknown parent 1 %s" %
1738 self.ui.warn("file %s:%s unknown parent 1 %s" %
1731 (f, short(n), short(p1)))
1739 (f, short(n), short(p1)))
1732 errors += 1
1740 errors += 1
1733 if p2 not in nodes:
1741 if p2 not in nodes:
1734 self.ui.warn("file %s:%s unknown parent 2 %s" %
1742 self.ui.warn("file %s:%s unknown parent 2 %s" %
1735 (f, short(n), short(p1)))
1743 (f, short(n), short(p1)))
1736 errors += 1
1744 errors += 1
1737 nodes[n] = 1
1745 nodes[n] = 1
1738
1746
1739 # cross-check
1747 # cross-check
1740 for node in filenodes[f]:
1748 for node in filenodes[f]:
1741 self.ui.warn("node %s in manifests not in %s\n"
1749 self.ui.warn("node %s in manifests not in %s\n"
1742 % (hex(node), f))
1750 % (hex(node), f))
1743 errors += 1
1751 errors += 1
1744
1752
1745 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1753 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1746 (files, changesets, revisions))
1754 (files, changesets, revisions))
1747
1755
1748 if errors:
1756 if errors:
1749 self.ui.warn("%d integrity errors encountered!\n" % errors)
1757 self.ui.warn("%d integrity errors encountered!\n" % errors)
1750 return 1
1758 return 1
1751
1759
1752 class httprepository:
1760 class httprepository:
1753 def __init__(self, ui, path):
1761 def __init__(self, ui, path):
1754 # fix missing / after hostname
1762 # fix missing / after hostname
1755 s = urlparse.urlsplit(path)
1763 s = urlparse.urlsplit(path)
1756 partial = s[2]
1764 partial = s[2]
1757 if not partial: partial = "/"
1765 if not partial: partial = "/"
1758 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1766 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1759 self.ui = ui
1767 self.ui = ui
1760 no_list = [ "localhost", "127.0.0.1" ]
1768 no_list = [ "localhost", "127.0.0.1" ]
1761 host = ui.config("http_proxy", "host")
1769 host = ui.config("http_proxy", "host")
1762 if host is None:
1770 if host is None:
1763 host = os.environ.get("http_proxy")
1771 host = os.environ.get("http_proxy")
1764 if host and host.startswith('http://'):
1772 if host and host.startswith('http://'):
1765 host = host[7:]
1773 host = host[7:]
1766 user = ui.config("http_proxy", "user")
1774 user = ui.config("http_proxy", "user")
1767 passwd = ui.config("http_proxy", "passwd")
1775 passwd = ui.config("http_proxy", "passwd")
1768 no = ui.config("http_proxy", "no")
1776 no = ui.config("http_proxy", "no")
1769 if no is None:
1777 if no is None:
1770 no = os.environ.get("no_proxy")
1778 no = os.environ.get("no_proxy")
1771 if no:
1779 if no:
1772 no_list = no_list + no.split(",")
1780 no_list = no_list + no.split(",")
1773
1781
1774 no_proxy = 0
1782 no_proxy = 0
1775 for h in no_list:
1783 for h in no_list:
1776 if (path.startswith("http://" + h + "/") or
1784 if (path.startswith("http://" + h + "/") or
1777 path.startswith("http://" + h + ":") or
1785 path.startswith("http://" + h + ":") or
1778 path == "http://" + h):
1786 path == "http://" + h):
1779 no_proxy = 1
1787 no_proxy = 1
1780
1788
1781 # Note: urllib2 takes proxy values from the environment and those will
1789 # Note: urllib2 takes proxy values from the environment and those will
1782 # take precedence
1790 # take precedence
1783 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1791 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1784 if os.environ.has_key(env):
1792 if os.environ.has_key(env):
1785 del os.environ[env]
1793 del os.environ[env]
1786
1794
1787 proxy_handler = urllib2.BaseHandler()
1795 proxy_handler = urllib2.BaseHandler()
1788 if host and not no_proxy:
1796 if host and not no_proxy:
1789 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1797 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1790
1798
1791 authinfo = None
1799 authinfo = None
1792 if user and passwd:
1800 if user and passwd:
1793 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1801 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1794 passmgr.add_password(None, host, user, passwd)
1802 passmgr.add_password(None, host, user, passwd)
1795 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1803 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1796
1804
1797 opener = urllib2.build_opener(proxy_handler, authinfo)
1805 opener = urllib2.build_opener(proxy_handler, authinfo)
1798 urllib2.install_opener(opener)
1806 urllib2.install_opener(opener)
1799
1807
1800 def dev(self):
1808 def dev(self):
1801 return -1
1809 return -1
1802
1810
1803 def do_cmd(self, cmd, **args):
1811 def do_cmd(self, cmd, **args):
1804 self.ui.debug("sending %s command\n" % cmd)
1812 self.ui.debug("sending %s command\n" % cmd)
1805 q = {"cmd": cmd}
1813 q = {"cmd": cmd}
1806 q.update(args)
1814 q.update(args)
1807 qs = urllib.urlencode(q)
1815 qs = urllib.urlencode(q)
1808 cu = "%s?%s" % (self.url, qs)
1816 cu = "%s?%s" % (self.url, qs)
1809 resp = urllib2.urlopen(cu)
1817 resp = urllib2.urlopen(cu)
1810 proto = resp.headers['content-type']
1818 proto = resp.headers['content-type']
1811
1819
1812 # accept old "text/plain" and "application/hg-changegroup" for now
1820 # accept old "text/plain" and "application/hg-changegroup" for now
1813 if not proto.startswith('application/mercurial') and \
1821 if not proto.startswith('application/mercurial') and \
1814 not proto.startswith('text/plain') and \
1822 not proto.startswith('text/plain') and \
1815 not proto.startswith('application/hg-changegroup'):
1823 not proto.startswith('application/hg-changegroup'):
1816 raise RepoError("'%s' does not appear to be an hg repository"
1824 raise RepoError("'%s' does not appear to be an hg repository"
1817 % self.url)
1825 % self.url)
1818
1826
1819 if proto.startswith('application/mercurial'):
1827 if proto.startswith('application/mercurial'):
1820 version = proto[22:]
1828 version = proto[22:]
1821 if float(version) > 0.1:
1829 if float(version) > 0.1:
1822 raise RepoError("'%s' uses newer protocol %s" %
1830 raise RepoError("'%s' uses newer protocol %s" %
1823 (self.url, version))
1831 (self.url, version))
1824
1832
1825 return resp
1833 return resp
1826
1834
1827 def heads(self):
1835 def heads(self):
1828 d = self.do_cmd("heads").read()
1836 d = self.do_cmd("heads").read()
1829 try:
1837 try:
1830 return map(bin, d[:-1].split(" "))
1838 return map(bin, d[:-1].split(" "))
1831 except:
1839 except:
1832 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1840 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1833 raise
1841 raise
1834
1842
1835 def branches(self, nodes):
1843 def branches(self, nodes):
1836 n = " ".join(map(hex, nodes))
1844 n = " ".join(map(hex, nodes))
1837 d = self.do_cmd("branches", nodes=n).read()
1845 d = self.do_cmd("branches", nodes=n).read()
1838 try:
1846 try:
1839 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1847 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1840 return br
1848 return br
1841 except:
1849 except:
1842 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1850 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1843 raise
1851 raise
1844
1852
1845 def between(self, pairs):
1853 def between(self, pairs):
1846 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1854 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1847 d = self.do_cmd("between", pairs=n).read()
1855 d = self.do_cmd("between", pairs=n).read()
1848 try:
1856 try:
1849 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1857 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1850 return p
1858 return p
1851 except:
1859 except:
1852 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1860 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1853 raise
1861 raise
1854
1862
1855 def changegroup(self, nodes):
1863 def changegroup(self, nodes):
1856 n = " ".join(map(hex, nodes))
1864 n = " ".join(map(hex, nodes))
1857 f = self.do_cmd("changegroup", roots=n)
1865 f = self.do_cmd("changegroup", roots=n)
1858 bytes = 0
1866 bytes = 0
1859
1867
1860 class zread:
1868 class zread:
1861 def __init__(self, f):
1869 def __init__(self, f):
1862 self.zd = zlib.decompressobj()
1870 self.zd = zlib.decompressobj()
1863 self.f = f
1871 self.f = f
1864 self.buf = ""
1872 self.buf = ""
1865 def read(self, l):
1873 def read(self, l):
1866 while l > len(self.buf):
1874 while l > len(self.buf):
1867 r = self.f.read(4096)
1875 r = self.f.read(4096)
1868 if r:
1876 if r:
1869 self.buf += self.zd.decompress(r)
1877 self.buf += self.zd.decompress(r)
1870 else:
1878 else:
1871 self.buf += self.zd.flush()
1879 self.buf += self.zd.flush()
1872 break
1880 break
1873 d, self.buf = self.buf[:l], self.buf[l:]
1881 d, self.buf = self.buf[:l], self.buf[l:]
1874 return d
1882 return d
1875
1883
1876 return zread(f)
1884 return zread(f)
1877
1885
1878 class remotelock:
1886 class remotelock:
1879 def __init__(self, repo):
1887 def __init__(self, repo):
1880 self.repo = repo
1888 self.repo = repo
1881 def release(self):
1889 def release(self):
1882 self.repo.unlock()
1890 self.repo.unlock()
1883 self.repo = None
1891 self.repo = None
1884 def __del__(self):
1892 def __del__(self):
1885 if self.repo:
1893 if self.repo:
1886 self.release()
1894 self.release()
1887
1895
1888 class sshrepository:
1896 class sshrepository:
1889 def __init__(self, ui, path):
1897 def __init__(self, ui, path):
1890 self.url = path
1898 self.url = path
1891 self.ui = ui
1899 self.ui = ui
1892
1900
1893 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1901 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1894 if not m:
1902 if not m:
1895 raise RepoError("couldn't parse destination %s" % path)
1903 raise RepoError("couldn't parse destination %s" % path)
1896
1904
1897 self.user = m.group(2)
1905 self.user = m.group(2)
1898 self.host = m.group(3)
1906 self.host = m.group(3)
1899 self.port = m.group(5)
1907 self.port = m.group(5)
1900 self.path = m.group(7)
1908 self.path = m.group(7)
1901
1909
1902 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1910 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1903 args = self.port and ("%s -p %s") % (args, self.port) or args
1911 args = self.port and ("%s -p %s") % (args, self.port) or args
1904 path = self.path or ""
1912 path = self.path or ""
1905
1913
1906 if not path:
1914 if not path:
1907 raise RepoError("no remote repository path specified")
1915 raise RepoError("no remote repository path specified")
1908
1916
1909 cmd = "ssh %s 'hg -R %s serve --stdio'"
1917 cmd = "ssh %s 'hg -R %s serve --stdio'"
1910 cmd = cmd % (args, path)
1918 cmd = cmd % (args, path)
1911
1919
1912 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1920 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1913
1921
1914 def readerr(self):
1922 def readerr(self):
1915 while 1:
1923 while 1:
1916 r,w,x = select.select([self.pipee], [], [], 0)
1924 r,w,x = select.select([self.pipee], [], [], 0)
1917 if not r: break
1925 if not r: break
1918 l = self.pipee.readline()
1926 l = self.pipee.readline()
1919 if not l: break
1927 if not l: break
1920 self.ui.status("remote: ", l)
1928 self.ui.status("remote: ", l)
1921
1929
1922 def __del__(self):
1930 def __del__(self):
1923 try:
1931 try:
1924 self.pipeo.close()
1932 self.pipeo.close()
1925 self.pipei.close()
1933 self.pipei.close()
1926 for l in self.pipee:
1934 for l in self.pipee:
1927 self.ui.status("remote: ", l)
1935 self.ui.status("remote: ", l)
1928 self.pipee.close()
1936 self.pipee.close()
1929 except:
1937 except:
1930 pass
1938 pass
1931
1939
1932 def dev(self):
1940 def dev(self):
1933 return -1
1941 return -1
1934
1942
1935 def do_cmd(self, cmd, **args):
1943 def do_cmd(self, cmd, **args):
1936 self.ui.debug("sending %s command\n" % cmd)
1944 self.ui.debug("sending %s command\n" % cmd)
1937 self.pipeo.write("%s\n" % cmd)
1945 self.pipeo.write("%s\n" % cmd)
1938 for k, v in args.items():
1946 for k, v in args.items():
1939 self.pipeo.write("%s %d\n" % (k, len(v)))
1947 self.pipeo.write("%s %d\n" % (k, len(v)))
1940 self.pipeo.write(v)
1948 self.pipeo.write(v)
1941 self.pipeo.flush()
1949 self.pipeo.flush()
1942
1950
1943 return self.pipei
1951 return self.pipei
1944
1952
1945 def call(self, cmd, **args):
1953 def call(self, cmd, **args):
1946 r = self.do_cmd(cmd, **args)
1954 r = self.do_cmd(cmd, **args)
1947 l = r.readline()
1955 l = r.readline()
1948 self.readerr()
1956 self.readerr()
1949 try:
1957 try:
1950 l = int(l)
1958 l = int(l)
1951 except:
1959 except:
1952 raise RepoError("unexpected response '%s'" % l)
1960 raise RepoError("unexpected response '%s'" % l)
1953 return r.read(l)
1961 return r.read(l)
1954
1962
1955 def lock(self):
1963 def lock(self):
1956 self.call("lock")
1964 self.call("lock")
1957 return remotelock(self)
1965 return remotelock(self)
1958
1966
1959 def unlock(self):
1967 def unlock(self):
1960 self.call("unlock")
1968 self.call("unlock")
1961
1969
1962 def heads(self):
1970 def heads(self):
1963 d = self.call("heads")
1971 d = self.call("heads")
1964 try:
1972 try:
1965 return map(bin, d[:-1].split(" "))
1973 return map(bin, d[:-1].split(" "))
1966 except:
1974 except:
1967 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1975 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1968
1976
1969 def branches(self, nodes):
1977 def branches(self, nodes):
1970 n = " ".join(map(hex, nodes))
1978 n = " ".join(map(hex, nodes))
1971 d = self.call("branches", nodes=n)
1979 d = self.call("branches", nodes=n)
1972 try:
1980 try:
1973 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1981 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1974 return br
1982 return br
1975 except:
1983 except:
1976 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1984 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1977
1985
1978 def between(self, pairs):
1986 def between(self, pairs):
1979 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1987 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1980 d = self.call("between", pairs=n)
1988 d = self.call("between", pairs=n)
1981 try:
1989 try:
1982 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1990 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1983 return p
1991 return p
1984 except:
1992 except:
1985 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1993 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1986
1994
1987 def changegroup(self, nodes):
1995 def changegroup(self, nodes):
1988 n = " ".join(map(hex, nodes))
1996 n = " ".join(map(hex, nodes))
1989 f = self.do_cmd("changegroup", roots=n)
1997 f = self.do_cmd("changegroup", roots=n)
1990 return self.pipei
1998 return self.pipei
1991
1999
1992 def addchangegroup(self, cg):
2000 def addchangegroup(self, cg):
1993 d = self.call("addchangegroup")
2001 d = self.call("addchangegroup")
1994 if d:
2002 if d:
1995 raise RepoError("push refused: %s", d)
2003 raise RepoError("push refused: %s", d)
1996
2004
1997 while 1:
2005 while 1:
1998 d = cg.read(4096)
2006 d = cg.read(4096)
1999 if not d: break
2007 if not d: break
2000 self.pipeo.write(d)
2008 self.pipeo.write(d)
2001 self.readerr()
2009 self.readerr()
2002
2010
2003 self.pipeo.flush()
2011 self.pipeo.flush()
2004
2012
2005 self.readerr()
2013 self.readerr()
2006 l = int(self.pipei.readline())
2014 l = int(self.pipei.readline())
2007 return self.pipei.read(l) != ""
2015 return self.pipei.read(l) != ""
2008
2016
2009 def repository(ui, path=None, create=0):
2017 def repository(ui, path=None, create=0):
2010 if path:
2018 if path:
2011 if path.startswith("http://"):
2019 if path.startswith("http://"):
2012 return httprepository(ui, path)
2020 return httprepository(ui, path)
2013 if path.startswith("hg://"):
2021 if path.startswith("hg://"):
2014 return httprepository(ui, path.replace("hg://", "http://"))
2022 return httprepository(ui, path.replace("hg://", "http://"))
2015 if path.startswith("old-http://"):
2023 if path.startswith("old-http://"):
2016 return localrepository(ui, path.replace("old-http://", "http://"))
2024 return localrepository(ui, path.replace("old-http://", "http://"))
2017 if path.startswith("ssh://"):
2025 if path.startswith("ssh://"):
2018 return sshrepository(ui, path)
2026 return sshrepository(ui, path)
2019
2027
2020 return localrepository(ui, path, create)
2028 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now