##// END OF EJS Templates
Fix 3-way-merge of original parent, workdir and new parent....
Thomas Arendsen Hein -
r862:d70c1c31 default
parent child Browse files
Show More
@@ -1,2028 +1,2042 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse stat")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse stat")
14 demandload(globals(), "bisect select")
14 demandload(globals(), "bisect select")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def ignore(self, f):
303 def ignore(self, f):
304 if not self.ignorefunc:
304 if not self.ignorefunc:
305 bigpat = []
305 bigpat = []
306 try:
306 try:
307 l = file(self.wjoin(".hgignore"))
307 l = file(self.wjoin(".hgignore"))
308 for pat in l:
308 for pat in l:
309 if pat != "\n":
309 if pat != "\n":
310 p = util.pconvert(pat[:-1])
310 p = util.pconvert(pat[:-1])
311 try:
311 try:
312 r = re.compile(p)
312 r = re.compile(p)
313 except:
313 except:
314 self.ui.warn("ignoring invalid ignore"
314 self.ui.warn("ignoring invalid ignore"
315 + " regular expression '%s'\n" % p)
315 + " regular expression '%s'\n" % p)
316 else:
316 else:
317 bigpat.append(util.pconvert(pat[:-1]))
317 bigpat.append(util.pconvert(pat[:-1]))
318 except IOError: pass
318 except IOError: pass
319
319
320 if bigpat:
320 if bigpat:
321 s = "(?:%s)" % (")|(?:".join(bigpat))
321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 r = re.compile(s)
322 r = re.compile(s)
323 self.ignorefunc = r.search
323 self.ignorefunc = r.search
324 else:
324 else:
325 self.ignorefunc = util.never
325 self.ignorefunc = util.never
326
326
327 return self.ignorefunc(f)
327 return self.ignorefunc(f)
328
328
329 def __del__(self):
329 def __del__(self):
330 if self.dirty:
330 if self.dirty:
331 self.write()
331 self.write()
332
332
333 def __getitem__(self, key):
333 def __getitem__(self, key):
334 try:
334 try:
335 return self.map[key]
335 return self.map[key]
336 except TypeError:
336 except TypeError:
337 self.read()
337 self.read()
338 return self[key]
338 return self[key]
339
339
340 def __contains__(self, key):
340 def __contains__(self, key):
341 if not self.map: self.read()
341 if not self.map: self.read()
342 return key in self.map
342 return key in self.map
343
343
344 def parents(self):
344 def parents(self):
345 if not self.pl:
345 if not self.pl:
346 self.read()
346 self.read()
347 return self.pl
347 return self.pl
348
348
349 def markdirty(self):
349 def markdirty(self):
350 if not self.dirty:
350 if not self.dirty:
351 self.dirty = 1
351 self.dirty = 1
352
352
353 def setparents(self, p1, p2 = nullid):
353 def setparents(self, p1, p2 = nullid):
354 self.markdirty()
354 self.markdirty()
355 self.pl = p1, p2
355 self.pl = p1, p2
356
356
357 def state(self, key):
357 def state(self, key):
358 try:
358 try:
359 return self[key][0]
359 return self[key][0]
360 except KeyError:
360 except KeyError:
361 return "?"
361 return "?"
362
362
363 def read(self):
363 def read(self):
364 if self.map is not None: return self.map
364 if self.map is not None: return self.map
365
365
366 self.map = {}
366 self.map = {}
367 self.pl = [nullid, nullid]
367 self.pl = [nullid, nullid]
368 try:
368 try:
369 st = self.opener("dirstate").read()
369 st = self.opener("dirstate").read()
370 if not st: return
370 if not st: return
371 except: return
371 except: return
372
372
373 self.pl = [st[:20], st[20: 40]]
373 self.pl = [st[:20], st[20: 40]]
374
374
375 pos = 40
375 pos = 40
376 while pos < len(st):
376 while pos < len(st):
377 e = struct.unpack(">cllll", st[pos:pos+17])
377 e = struct.unpack(">cllll", st[pos:pos+17])
378 l = e[4]
378 l = e[4]
379 pos += 17
379 pos += 17
380 f = st[pos:pos + l]
380 f = st[pos:pos + l]
381 if '\0' in f:
381 if '\0' in f:
382 f, c = f.split('\0')
382 f, c = f.split('\0')
383 self.copies[f] = c
383 self.copies[f] = c
384 self.map[f] = e[:4]
384 self.map[f] = e[:4]
385 pos += l
385 pos += l
386
386
387 def copy(self, source, dest):
387 def copy(self, source, dest):
388 self.read()
388 self.read()
389 self.markdirty()
389 self.markdirty()
390 self.copies[dest] = source
390 self.copies[dest] = source
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self.copies.get(file, None)
393 return self.copies.get(file, None)
394
394
395 def update(self, files, state):
395 def update(self, files, state, **kw):
396 ''' current states:
396 ''' current states:
397 n normal
397 n normal
398 m needs merging
398 m needs merging
399 r marked for removal
399 r marked for removal
400 a marked for addition'''
400 a marked for addition'''
401
401
402 if not files: return
402 if not files: return
403 self.read()
403 self.read()
404 self.markdirty()
404 self.markdirty()
405 for f in files:
405 for f in files:
406 if state == "r":
406 if state == "r":
407 self.map[f] = ('r', 0, 0, 0)
407 self.map[f] = ('r', 0, 0, 0)
408 else:
408 else:
409 s = os.stat(os.path.join(self.root, f))
409 s = os.stat(os.path.join(self.root, f))
410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
410 st_mode = kw.get('st_mode', s.st_mode)
411 st_size = kw.get('st_size', s.st_size)
412 st_mtime = kw.get('st_mtime', s.st_mtime)
413 self.map[f] = (state, st_mode, st_size, st_mtime)
411
414
412 def forget(self, files):
415 def forget(self, files):
413 if not files: return
416 if not files: return
414 self.read()
417 self.read()
415 self.markdirty()
418 self.markdirty()
416 for f in files:
419 for f in files:
417 try:
420 try:
418 del self.map[f]
421 del self.map[f]
419 except KeyError:
422 except KeyError:
420 self.ui.warn("not in dirstate: %s!\n" % f)
423 self.ui.warn("not in dirstate: %s!\n" % f)
421 pass
424 pass
422
425
423 def clear(self):
426 def clear(self):
424 self.map = {}
427 self.map = {}
425 self.markdirty()
428 self.markdirty()
426
429
427 def write(self):
430 def write(self):
428 st = self.opener("dirstate", "w")
431 st = self.opener("dirstate", "w")
429 st.write("".join(self.pl))
432 st.write("".join(self.pl))
430 for f, e in self.map.items():
433 for f, e in self.map.items():
431 c = self.copied(f)
434 c = self.copied(f)
432 if c:
435 if c:
433 f = f + "\0" + c
436 f = f + "\0" + c
434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
437 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
435 st.write(e + f)
438 st.write(e + f)
436 self.dirty = 0
439 self.dirty = 0
437
440
438 def walk(self, files = None, match = util.always):
441 def walk(self, files = None, match = util.always):
439 self.read()
442 self.read()
440 dc = self.map.copy()
443 dc = self.map.copy()
441 # walk all files by default
444 # walk all files by default
442 if not files: files = [self.root]
445 if not files: files = [self.root]
443 known = {'.hg': 1}
446 known = {'.hg': 1}
444 def seen(fn):
447 def seen(fn):
445 if fn in known: return True
448 if fn in known: return True
446 known[fn] = 1
449 known[fn] = 1
447 def traverse():
450 def traverse():
448 for f in util.unique(files):
451 for f in util.unique(files):
449 f = os.path.join(self.root, f)
452 f = os.path.join(self.root, f)
450 if os.path.isdir(f):
453 if os.path.isdir(f):
451 for dir, subdirs, fl in os.walk(f):
454 for dir, subdirs, fl in os.walk(f):
452 d = dir[len(self.root) + 1:]
455 d = dir[len(self.root) + 1:]
453 nd = os.path.normpath(d)
456 nd = os.path.normpath(d)
454 if seen(nd):
457 if seen(nd):
455 subdirs[:] = []
458 subdirs[:] = []
456 continue
459 continue
457 for sd in subdirs:
460 for sd in subdirs:
458 ds = os.path.join(nd, sd +'/')
461 ds = os.path.join(nd, sd +'/')
459 if self.ignore(ds) or not match(ds):
462 if self.ignore(ds) or not match(ds):
460 subdirs.remove(sd)
463 subdirs.remove(sd)
461 subdirs.sort()
464 subdirs.sort()
462 fl.sort()
465 fl.sort()
463 for fn in fl:
466 for fn in fl:
464 fn = util.pconvert(os.path.join(d, fn))
467 fn = util.pconvert(os.path.join(d, fn))
465 yield 'f', fn
468 yield 'f', fn
466 else:
469 else:
467 yield 'f', f[len(self.root) + 1:]
470 yield 'f', f[len(self.root) + 1:]
468
471
469 ks = dc.keys()
472 ks = dc.keys()
470 ks.sort()
473 ks.sort()
471 for k in ks:
474 for k in ks:
472 yield 'm', k
475 yield 'm', k
473
476
474 # yield only files that match: all in dirstate, others only if
477 # yield only files that match: all in dirstate, others only if
475 # not in .hgignore
478 # not in .hgignore
476
479
477 for src, fn in util.unique(traverse()):
480 for src, fn in util.unique(traverse()):
478 fn = os.path.normpath(fn)
481 fn = os.path.normpath(fn)
479 if seen(fn): continue
482 if seen(fn): continue
480 if fn in dc:
483 if fn in dc:
481 del dc[fn]
484 del dc[fn]
482 elif self.ignore(fn):
485 elif self.ignore(fn):
483 continue
486 continue
484 if match(fn):
487 if match(fn):
485 yield src, fn
488 yield src, fn
486
489
487 def changes(self, files=None, match=util.always):
490 def changes(self, files=None, match=util.always):
488 self.read()
491 self.read()
489 dc = self.map.copy()
492 dc = self.map.copy()
490 lookup, modified, added, unknown = [], [], [], []
493 lookup, modified, added, unknown = [], [], [], []
491 removed, deleted = [], []
494 removed, deleted = [], []
492
495
493 for src, fn in self.walk(files, match):
496 for src, fn in self.walk(files, match):
494 try:
497 try:
495 s = os.stat(os.path.join(self.root, fn))
498 s = os.stat(os.path.join(self.root, fn))
496 except OSError:
499 except OSError:
497 continue
500 continue
498 if not stat.S_ISREG(s.st_mode):
501 if not stat.S_ISREG(s.st_mode):
499 continue
502 continue
500 c = dc.get(fn)
503 c = dc.get(fn)
501 if c:
504 if c:
502 del dc[fn]
505 del dc[fn]
503 if c[0] == 'm':
506 if c[0] == 'm':
504 modified.append(fn)
507 modified.append(fn)
505 elif c[0] == 'a':
508 elif c[0] == 'a':
506 added.append(fn)
509 added.append(fn)
507 elif c[0] == 'r':
510 elif c[0] == 'r':
508 unknown.append(fn)
511 unknown.append(fn)
509 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
512 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
510 modified.append(fn)
513 modified.append(fn)
511 elif c[3] != s.st_mtime:
514 elif c[3] != s.st_mtime:
512 lookup.append(fn)
515 lookup.append(fn)
513 else:
516 else:
514 unknown.append(fn)
517 unknown.append(fn)
515
518
516 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
519 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
517 if c[0] == 'r':
520 if c[0] == 'r':
518 removed.append(fn)
521 removed.append(fn)
519 else:
522 else:
520 deleted.append(fn)
523 deleted.append(fn)
521 return (lookup, modified, added, removed + deleted, unknown)
524 return (lookup, modified, added, removed + deleted, unknown)
522
525
523 # used to avoid circular references so destructors work
526 # used to avoid circular references so destructors work
524 def opener(base):
527 def opener(base):
525 p = base
528 p = base
526 def o(path, mode="r"):
529 def o(path, mode="r"):
527 if p.startswith("http://"):
530 if p.startswith("http://"):
528 f = os.path.join(p, urllib.quote(path))
531 f = os.path.join(p, urllib.quote(path))
529 return httprangereader.httprangereader(f)
532 return httprangereader.httprangereader(f)
530
533
531 f = os.path.join(p, path)
534 f = os.path.join(p, path)
532
535
533 mode += "b" # for that other OS
536 mode += "b" # for that other OS
534
537
535 if mode[0] != "r":
538 if mode[0] != "r":
536 try:
539 try:
537 s = os.stat(f)
540 s = os.stat(f)
538 except OSError:
541 except OSError:
539 d = os.path.dirname(f)
542 d = os.path.dirname(f)
540 if not os.path.isdir(d):
543 if not os.path.isdir(d):
541 os.makedirs(d)
544 os.makedirs(d)
542 else:
545 else:
543 if s.st_nlink > 1:
546 if s.st_nlink > 1:
544 file(f + ".tmp", "wb").write(file(f, "rb").read())
547 file(f + ".tmp", "wb").write(file(f, "rb").read())
545 util.rename(f+".tmp", f)
548 util.rename(f+".tmp", f)
546
549
547 return file(f, mode)
550 return file(f, mode)
548
551
549 return o
552 return o
550
553
551 class RepoError(Exception): pass
554 class RepoError(Exception): pass
552
555
553 class localrepository:
556 class localrepository:
554 def __init__(self, ui, path=None, create=0):
557 def __init__(self, ui, path=None, create=0):
555 self.remote = 0
558 self.remote = 0
556 if path and path.startswith("http://"):
559 if path and path.startswith("http://"):
557 self.remote = 1
560 self.remote = 1
558 self.path = path
561 self.path = path
559 else:
562 else:
560 if not path:
563 if not path:
561 p = os.getcwd()
564 p = os.getcwd()
562 while not os.path.isdir(os.path.join(p, ".hg")):
565 while not os.path.isdir(os.path.join(p, ".hg")):
563 oldp = p
566 oldp = p
564 p = os.path.dirname(p)
567 p = os.path.dirname(p)
565 if p == oldp: raise RepoError("no repo found")
568 if p == oldp: raise RepoError("no repo found")
566 path = p
569 path = p
567 self.path = os.path.join(path, ".hg")
570 self.path = os.path.join(path, ".hg")
568
571
569 if not create and not os.path.isdir(self.path):
572 if not create and not os.path.isdir(self.path):
570 raise RepoError("repository %s not found" % self.path)
573 raise RepoError("repository %s not found" % self.path)
571
574
572 self.root = path
575 self.root = path
573 self.ui = ui
576 self.ui = ui
574
577
575 if create:
578 if create:
576 os.mkdir(self.path)
579 os.mkdir(self.path)
577 os.mkdir(self.join("data"))
580 os.mkdir(self.join("data"))
578
581
579 self.opener = opener(self.path)
582 self.opener = opener(self.path)
580 self.wopener = opener(self.root)
583 self.wopener = opener(self.root)
581 self.manifest = manifest(self.opener)
584 self.manifest = manifest(self.opener)
582 self.changelog = changelog(self.opener)
585 self.changelog = changelog(self.opener)
583 self.tagscache = None
586 self.tagscache = None
584 self.nodetagscache = None
587 self.nodetagscache = None
585
588
586 if not self.remote:
589 if not self.remote:
587 self.dirstate = dirstate(self.opener, ui, self.root)
590 self.dirstate = dirstate(self.opener, ui, self.root)
588 try:
591 try:
589 self.ui.readconfig(self.opener("hgrc"))
592 self.ui.readconfig(self.opener("hgrc"))
590 except IOError: pass
593 except IOError: pass
591
594
592 def hook(self, name, **args):
595 def hook(self, name, **args):
593 s = self.ui.config("hooks", name)
596 s = self.ui.config("hooks", name)
594 if s:
597 if s:
595 self.ui.note("running hook %s: %s\n" % (name, s))
598 self.ui.note("running hook %s: %s\n" % (name, s))
596 old = {}
599 old = {}
597 for k, v in args.items():
600 for k, v in args.items():
598 k = k.upper()
601 k = k.upper()
599 old[k] = os.environ.get(k, None)
602 old[k] = os.environ.get(k, None)
600 os.environ[k] = v
603 os.environ[k] = v
601
604
602 r = os.system(s)
605 r = os.system(s)
603
606
604 for k, v in old.items():
607 for k, v in old.items():
605 if v != None:
608 if v != None:
606 os.environ[k] = v
609 os.environ[k] = v
607 else:
610 else:
608 del os.environ[k]
611 del os.environ[k]
609
612
610 if r:
613 if r:
611 self.ui.warn("abort: %s hook failed with status %d!\n" %
614 self.ui.warn("abort: %s hook failed with status %d!\n" %
612 (name, r))
615 (name, r))
613 return False
616 return False
614 return True
617 return True
615
618
616 def tags(self):
619 def tags(self):
617 '''return a mapping of tag to node'''
620 '''return a mapping of tag to node'''
618 if not self.tagscache:
621 if not self.tagscache:
619 self.tagscache = {}
622 self.tagscache = {}
620 def addtag(self, k, n):
623 def addtag(self, k, n):
621 try:
624 try:
622 bin_n = bin(n)
625 bin_n = bin(n)
623 except TypeError:
626 except TypeError:
624 bin_n = ''
627 bin_n = ''
625 self.tagscache[k.strip()] = bin_n
628 self.tagscache[k.strip()] = bin_n
626
629
627 try:
630 try:
628 # read each head of the tags file, ending with the tip
631 # read each head of the tags file, ending with the tip
629 # and add each tag found to the map, with "newer" ones
632 # and add each tag found to the map, with "newer" ones
630 # taking precedence
633 # taking precedence
631 fl = self.file(".hgtags")
634 fl = self.file(".hgtags")
632 h = fl.heads()
635 h = fl.heads()
633 h.reverse()
636 h.reverse()
634 for r in h:
637 for r in h:
635 for l in fl.revision(r).splitlines():
638 for l in fl.revision(r).splitlines():
636 if l:
639 if l:
637 n, k = l.split(" ", 1)
640 n, k = l.split(" ", 1)
638 addtag(self, k, n)
641 addtag(self, k, n)
639 except KeyError:
642 except KeyError:
640 pass
643 pass
641
644
642 try:
645 try:
643 f = self.opener("localtags")
646 f = self.opener("localtags")
644 for l in f:
647 for l in f:
645 n, k = l.split(" ", 1)
648 n, k = l.split(" ", 1)
646 addtag(self, k, n)
649 addtag(self, k, n)
647 except IOError:
650 except IOError:
648 pass
651 pass
649
652
650 self.tagscache['tip'] = self.changelog.tip()
653 self.tagscache['tip'] = self.changelog.tip()
651
654
652 return self.tagscache
655 return self.tagscache
653
656
654 def tagslist(self):
657 def tagslist(self):
655 '''return a list of tags ordered by revision'''
658 '''return a list of tags ordered by revision'''
656 l = []
659 l = []
657 for t, n in self.tags().items():
660 for t, n in self.tags().items():
658 try:
661 try:
659 r = self.changelog.rev(n)
662 r = self.changelog.rev(n)
660 except:
663 except:
661 r = -2 # sort to the beginning of the list if unknown
664 r = -2 # sort to the beginning of the list if unknown
662 l.append((r,t,n))
665 l.append((r,t,n))
663 l.sort()
666 l.sort()
664 return [(t,n) for r,t,n in l]
667 return [(t,n) for r,t,n in l]
665
668
666 def nodetags(self, node):
669 def nodetags(self, node):
667 '''return the tags associated with a node'''
670 '''return the tags associated with a node'''
668 if not self.nodetagscache:
671 if not self.nodetagscache:
669 self.nodetagscache = {}
672 self.nodetagscache = {}
670 for t,n in self.tags().items():
673 for t,n in self.tags().items():
671 self.nodetagscache.setdefault(n,[]).append(t)
674 self.nodetagscache.setdefault(n,[]).append(t)
672 return self.nodetagscache.get(node, [])
675 return self.nodetagscache.get(node, [])
673
676
674 def lookup(self, key):
677 def lookup(self, key):
675 try:
678 try:
676 return self.tags()[key]
679 return self.tags()[key]
677 except KeyError:
680 except KeyError:
678 try:
681 try:
679 return self.changelog.lookup(key)
682 return self.changelog.lookup(key)
680 except:
683 except:
681 raise RepoError("unknown revision '%s'" % key)
684 raise RepoError("unknown revision '%s'" % key)
682
685
683 def dev(self):
686 def dev(self):
684 if self.remote: return -1
687 if self.remote: return -1
685 return os.stat(self.path).st_dev
688 return os.stat(self.path).st_dev
686
689
687 def join(self, f):
690 def join(self, f):
688 return os.path.join(self.path, f)
691 return os.path.join(self.path, f)
689
692
690 def wjoin(self, f):
693 def wjoin(self, f):
691 return os.path.join(self.root, f)
694 return os.path.join(self.root, f)
692
695
693 def file(self, f):
696 def file(self, f):
694 if f[0] == '/': f = f[1:]
697 if f[0] == '/': f = f[1:]
695 return filelog(self.opener, f)
698 return filelog(self.opener, f)
696
699
697 def getcwd(self):
700 def getcwd(self):
698 cwd = os.getcwd()
701 cwd = os.getcwd()
699 if cwd == self.root: return ''
702 if cwd == self.root: return ''
700 return cwd[len(self.root) + 1:]
703 return cwd[len(self.root) + 1:]
701
704
702 def wfile(self, f, mode='r'):
705 def wfile(self, f, mode='r'):
703 return self.wopener(f, mode)
706 return self.wopener(f, mode)
704
707
705 def transaction(self):
708 def transaction(self):
706 # save dirstate for undo
709 # save dirstate for undo
707 try:
710 try:
708 ds = self.opener("dirstate").read()
711 ds = self.opener("dirstate").read()
709 except IOError:
712 except IOError:
710 ds = ""
713 ds = ""
711 self.opener("journal.dirstate", "w").write(ds)
714 self.opener("journal.dirstate", "w").write(ds)
712
715
713 def after():
716 def after():
714 util.rename(self.join("journal"), self.join("undo"))
717 util.rename(self.join("journal"), self.join("undo"))
715 util.rename(self.join("journal.dirstate"),
718 util.rename(self.join("journal.dirstate"),
716 self.join("undo.dirstate"))
719 self.join("undo.dirstate"))
717
720
718 return transaction.transaction(self.ui.warn, self.opener,
721 return transaction.transaction(self.ui.warn, self.opener,
719 self.join("journal"), after)
722 self.join("journal"), after)
720
723
721 def recover(self):
724 def recover(self):
722 lock = self.lock()
725 lock = self.lock()
723 if os.path.exists(self.join("journal")):
726 if os.path.exists(self.join("journal")):
724 self.ui.status("rolling back interrupted transaction\n")
727 self.ui.status("rolling back interrupted transaction\n")
725 return transaction.rollback(self.opener, self.join("journal"))
728 return transaction.rollback(self.opener, self.join("journal"))
726 else:
729 else:
727 self.ui.warn("no interrupted transaction available\n")
730 self.ui.warn("no interrupted transaction available\n")
728
731
729 def undo(self):
732 def undo(self):
730 lock = self.lock()
733 lock = self.lock()
731 if os.path.exists(self.join("undo")):
734 if os.path.exists(self.join("undo")):
732 self.ui.status("rolling back last transaction\n")
735 self.ui.status("rolling back last transaction\n")
733 transaction.rollback(self.opener, self.join("undo"))
736 transaction.rollback(self.opener, self.join("undo"))
734 self.dirstate = None
737 self.dirstate = None
735 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
736 self.dirstate = dirstate(self.opener, self.ui, self.root)
739 self.dirstate = dirstate(self.opener, self.ui, self.root)
737 else:
740 else:
738 self.ui.warn("no undo information available\n")
741 self.ui.warn("no undo information available\n")
739
742
740 def lock(self, wait = 1):
743 def lock(self, wait = 1):
741 try:
744 try:
742 return lock.lock(self.join("lock"), 0)
745 return lock.lock(self.join("lock"), 0)
743 except lock.LockHeld, inst:
746 except lock.LockHeld, inst:
744 if wait:
747 if wait:
745 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
748 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
746 return lock.lock(self.join("lock"), wait)
749 return lock.lock(self.join("lock"), wait)
747 raise inst
750 raise inst
748
751
749 def rawcommit(self, files, text, user, date, p1=None, p2=None):
752 def rawcommit(self, files, text, user, date, p1=None, p2=None):
750 orig_parent = self.dirstate.parents()[0] or nullid
753 orig_parent = self.dirstate.parents()[0] or nullid
751 p1 = p1 or self.dirstate.parents()[0] or nullid
754 p1 = p1 or self.dirstate.parents()[0] or nullid
752 p2 = p2 or self.dirstate.parents()[1] or nullid
755 p2 = p2 or self.dirstate.parents()[1] or nullid
753 c1 = self.changelog.read(p1)
756 c1 = self.changelog.read(p1)
754 c2 = self.changelog.read(p2)
757 c2 = self.changelog.read(p2)
755 m1 = self.manifest.read(c1[0])
758 m1 = self.manifest.read(c1[0])
756 mf1 = self.manifest.readflags(c1[0])
759 mf1 = self.manifest.readflags(c1[0])
757 m2 = self.manifest.read(c2[0])
760 m2 = self.manifest.read(c2[0])
758
761
759 if orig_parent == p1:
762 if orig_parent == p1:
760 update_dirstate = 1
763 update_dirstate = 1
761 else:
764 else:
762 update_dirstate = 0
765 update_dirstate = 0
763
766
764 tr = self.transaction()
767 tr = self.transaction()
765 mm = m1.copy()
768 mm = m1.copy()
766 mfm = mf1.copy()
769 mfm = mf1.copy()
767 linkrev = self.changelog.count()
770 linkrev = self.changelog.count()
768 for f in files:
771 for f in files:
769 try:
772 try:
770 t = self.wfile(f).read()
773 t = self.wfile(f).read()
771 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
774 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
772 r = self.file(f)
775 r = self.file(f)
773 mfm[f] = tm
776 mfm[f] = tm
774 mm[f] = r.add(t, {}, tr, linkrev,
777 mm[f] = r.add(t, {}, tr, linkrev,
775 m1.get(f, nullid), m2.get(f, nullid))
778 m1.get(f, nullid), m2.get(f, nullid))
776 if update_dirstate:
779 if update_dirstate:
777 self.dirstate.update([f], "n")
780 self.dirstate.update([f], "n")
778 except IOError:
781 except IOError:
779 try:
782 try:
780 del mm[f]
783 del mm[f]
781 del mfm[f]
784 del mfm[f]
782 if update_dirstate:
785 if update_dirstate:
783 self.dirstate.forget([f])
786 self.dirstate.forget([f])
784 except:
787 except:
785 # deleted from p2?
788 # deleted from p2?
786 pass
789 pass
787
790
788 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
791 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
789 user = user or self.ui.username()
792 user = user or self.ui.username()
790 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
793 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
791 tr.close()
794 tr.close()
792 if update_dirstate:
795 if update_dirstate:
793 self.dirstate.setparents(n, nullid)
796 self.dirstate.setparents(n, nullid)
794
797
795 def commit(self, files = None, text = "", user = None, date = None,
798 def commit(self, files = None, text = "", user = None, date = None,
796 match = util.always):
799 match = util.always):
797 commit = []
800 commit = []
798 remove = []
801 remove = []
799 if files:
802 if files:
800 for f in files:
803 for f in files:
801 s = self.dirstate.state(f)
804 s = self.dirstate.state(f)
802 if s in 'nmai':
805 if s in 'nmai':
803 commit.append(f)
806 commit.append(f)
804 elif s == 'r':
807 elif s == 'r':
805 remove.append(f)
808 remove.append(f)
806 else:
809 else:
807 self.ui.warn("%s not tracked!\n" % f)
810 self.ui.warn("%s not tracked!\n" % f)
808 else:
811 else:
809 (c, a, d, u) = self.changes(match = match)
812 (c, a, d, u) = self.changes(match = match)
810 commit = c + a
813 commit = c + a
811 remove = d
814 remove = d
812
815
813 if not commit and not remove:
816 if not commit and not remove:
814 self.ui.status("nothing changed\n")
817 self.ui.status("nothing changed\n")
815 return
818 return
816
819
817 if not self.hook("precommit"):
820 if not self.hook("precommit"):
818 return 1
821 return 1
819
822
820 p1, p2 = self.dirstate.parents()
823 p1, p2 = self.dirstate.parents()
821 c1 = self.changelog.read(p1)
824 c1 = self.changelog.read(p1)
822 c2 = self.changelog.read(p2)
825 c2 = self.changelog.read(p2)
823 m1 = self.manifest.read(c1[0])
826 m1 = self.manifest.read(c1[0])
824 mf1 = self.manifest.readflags(c1[0])
827 mf1 = self.manifest.readflags(c1[0])
825 m2 = self.manifest.read(c2[0])
828 m2 = self.manifest.read(c2[0])
826 lock = self.lock()
829 lock = self.lock()
827 tr = self.transaction()
830 tr = self.transaction()
828
831
829 # check in files
832 # check in files
830 new = {}
833 new = {}
831 linkrev = self.changelog.count()
834 linkrev = self.changelog.count()
832 commit.sort()
835 commit.sort()
833 for f in commit:
836 for f in commit:
834 self.ui.note(f + "\n")
837 self.ui.note(f + "\n")
835 try:
838 try:
836 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
839 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
837 t = self.wfile(f).read()
840 t = self.wfile(f).read()
838 except IOError:
841 except IOError:
839 self.ui.warn("trouble committing %s!\n" % f)
842 self.ui.warn("trouble committing %s!\n" % f)
840 raise
843 raise
841
844
842 meta = {}
845 meta = {}
843 cp = self.dirstate.copied(f)
846 cp = self.dirstate.copied(f)
844 if cp:
847 if cp:
845 meta["copy"] = cp
848 meta["copy"] = cp
846 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
849 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
847 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
850 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
848
851
849 r = self.file(f)
852 r = self.file(f)
850 fp1 = m1.get(f, nullid)
853 fp1 = m1.get(f, nullid)
851 fp2 = m2.get(f, nullid)
854 fp2 = m2.get(f, nullid)
852 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
855 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
853
856
854 # update manifest
857 # update manifest
855 m1.update(new)
858 m1.update(new)
856 for f in remove:
859 for f in remove:
857 if f in m1:
860 if f in m1:
858 del m1[f]
861 del m1[f]
859 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
862 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
860 (new, remove))
863 (new, remove))
861
864
862 # add changeset
865 # add changeset
863 new = new.keys()
866 new = new.keys()
864 new.sort()
867 new.sort()
865
868
866 if not text:
869 if not text:
867 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
870 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
868 edittext += "".join(["HG: changed %s\n" % f for f in new])
871 edittext += "".join(["HG: changed %s\n" % f for f in new])
869 edittext += "".join(["HG: removed %s\n" % f for f in remove])
872 edittext += "".join(["HG: removed %s\n" % f for f in remove])
870 edittext = self.ui.edit(edittext)
873 edittext = self.ui.edit(edittext)
871 if not edittext.rstrip():
874 if not edittext.rstrip():
872 return 1
875 return 1
873 text = edittext
876 text = edittext
874
877
875 user = user or self.ui.username()
878 user = user or self.ui.username()
876 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
879 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
877
880
878 tr.close()
881 tr.close()
879
882
880 self.dirstate.setparents(n)
883 self.dirstate.setparents(n)
881 self.dirstate.update(new, "n")
884 self.dirstate.update(new, "n")
882 self.dirstate.forget(remove)
885 self.dirstate.forget(remove)
883
886
884 if not self.hook("commit", node=hex(n)):
887 if not self.hook("commit", node=hex(n)):
885 return 1
888 return 1
886
889
887 def walk(self, node = None, files = [], match = util.always):
890 def walk(self, node = None, files = [], match = util.always):
888 if node:
891 if node:
889 for fn in self.manifest.read(self.changelog.read(node)[0]):
892 for fn in self.manifest.read(self.changelog.read(node)[0]):
890 if match(fn): yield 'm', fn
893 if match(fn): yield 'm', fn
891 else:
894 else:
892 for src, fn in self.dirstate.walk(files, match):
895 for src, fn in self.dirstate.walk(files, match):
893 yield src, fn
896 yield src, fn
894
897
895 def changes(self, node1 = None, node2 = None, files = [],
898 def changes(self, node1 = None, node2 = None, files = [],
896 match = util.always):
899 match = util.always):
897 mf2, u = None, []
900 mf2, u = None, []
898
901
899 def fcmp(fn, mf):
902 def fcmp(fn, mf):
900 t1 = self.wfile(fn).read()
903 t1 = self.wfile(fn).read()
901 t2 = self.file(fn).revision(mf[fn])
904 t2 = self.file(fn).revision(mf[fn])
902 return cmp(t1, t2)
905 return cmp(t1, t2)
903
906
904 def mfmatches(node):
907 def mfmatches(node):
905 mf = dict(self.manifest.read(node))
908 mf = dict(self.manifest.read(node))
906 for fn in mf.keys():
909 for fn in mf.keys():
907 if not match(fn):
910 if not match(fn):
908 del mf[fn]
911 del mf[fn]
909 return mf
912 return mf
910
913
911 # are we comparing the working directory?
914 # are we comparing the working directory?
912 if not node2:
915 if not node2:
913 l, c, a, d, u = self.dirstate.changes(files, match)
916 l, c, a, d, u = self.dirstate.changes(files, match)
914
917
915 # are we comparing working dir against its parent?
918 # are we comparing working dir against its parent?
916 if not node1:
919 if not node1:
917 if l:
920 if l:
918 # do a full compare of any files that might have changed
921 # do a full compare of any files that might have changed
919 change = self.changelog.read(self.dirstate.parents()[0])
922 change = self.changelog.read(self.dirstate.parents()[0])
920 mf2 = mfmatches(change[0])
923 mf2 = mfmatches(change[0])
921 for f in l:
924 for f in l:
922 if fcmp(f, mf2):
925 if fcmp(f, mf2):
923 c.append(f)
926 c.append(f)
924
927
925 for l in c, a, d, u:
928 for l in c, a, d, u:
926 l.sort()
929 l.sort()
927
930
928 return (c, a, d, u)
931 return (c, a, d, u)
929
932
930 # are we comparing working dir against non-tip?
933 # are we comparing working dir against non-tip?
931 # generate a pseudo-manifest for the working dir
934 # generate a pseudo-manifest for the working dir
932 if not node2:
935 if not node2:
933 if not mf2:
936 if not mf2:
934 change = self.changelog.read(self.dirstate.parents()[0])
937 change = self.changelog.read(self.dirstate.parents()[0])
935 mf2 = mfmatches(change[0])
938 mf2 = mfmatches(change[0])
936 for f in a + c + l:
939 for f in a + c + l:
937 mf2[f] = ""
940 mf2[f] = ""
938 for f in d:
941 for f in d:
939 if f in mf2: del mf2[f]
942 if f in mf2: del mf2[f]
940 else:
943 else:
941 change = self.changelog.read(node2)
944 change = self.changelog.read(node2)
942 mf2 = mfmatches(change[0])
945 mf2 = mfmatches(change[0])
943
946
944 # flush lists from dirstate before comparing manifests
947 # flush lists from dirstate before comparing manifests
945 c, a = [], []
948 c, a = [], []
946
949
947 change = self.changelog.read(node1)
950 change = self.changelog.read(node1)
948 mf1 = mfmatches(change[0])
951 mf1 = mfmatches(change[0])
949
952
950 for fn in mf2:
953 for fn in mf2:
951 if mf1.has_key(fn):
954 if mf1.has_key(fn):
952 if mf1[fn] != mf2[fn]:
955 if mf1[fn] != mf2[fn]:
953 if mf2[fn] != "" or fcmp(fn, mf1):
956 if mf2[fn] != "" or fcmp(fn, mf1):
954 c.append(fn)
957 c.append(fn)
955 del mf1[fn]
958 del mf1[fn]
956 else:
959 else:
957 a.append(fn)
960 a.append(fn)
958
961
959 d = mf1.keys()
962 d = mf1.keys()
960
963
961 for l in c, a, d, u:
964 for l in c, a, d, u:
962 l.sort()
965 l.sort()
963
966
964 return (c, a, d, u)
967 return (c, a, d, u)
965
968
966 def add(self, list):
969 def add(self, list):
967 for f in list:
970 for f in list:
968 p = self.wjoin(f)
971 p = self.wjoin(f)
969 if not os.path.exists(p):
972 if not os.path.exists(p):
970 self.ui.warn("%s does not exist!\n" % f)
973 self.ui.warn("%s does not exist!\n" % f)
971 elif not os.path.isfile(p):
974 elif not os.path.isfile(p):
972 self.ui.warn("%s not added: only files supported currently\n" % f)
975 self.ui.warn("%s not added: only files supported currently\n" % f)
973 elif self.dirstate.state(f) in 'an':
976 elif self.dirstate.state(f) in 'an':
974 self.ui.warn("%s already tracked!\n" % f)
977 self.ui.warn("%s already tracked!\n" % f)
975 else:
978 else:
976 self.dirstate.update([f], "a")
979 self.dirstate.update([f], "a")
977
980
978 def forget(self, list):
981 def forget(self, list):
979 for f in list:
982 for f in list:
980 if self.dirstate.state(f) not in 'ai':
983 if self.dirstate.state(f) not in 'ai':
981 self.ui.warn("%s not added!\n" % f)
984 self.ui.warn("%s not added!\n" % f)
982 else:
985 else:
983 self.dirstate.forget([f])
986 self.dirstate.forget([f])
984
987
985 def remove(self, list):
988 def remove(self, list):
986 for f in list:
989 for f in list:
987 p = self.wjoin(f)
990 p = self.wjoin(f)
988 if os.path.exists(p):
991 if os.path.exists(p):
989 self.ui.warn("%s still exists!\n" % f)
992 self.ui.warn("%s still exists!\n" % f)
990 elif self.dirstate.state(f) == 'a':
993 elif self.dirstate.state(f) == 'a':
991 self.ui.warn("%s never committed!\n" % f)
994 self.ui.warn("%s never committed!\n" % f)
992 self.dirstate.forget([f])
995 self.dirstate.forget([f])
993 elif f not in self.dirstate:
996 elif f not in self.dirstate:
994 self.ui.warn("%s not tracked!\n" % f)
997 self.ui.warn("%s not tracked!\n" % f)
995 else:
998 else:
996 self.dirstate.update([f], "r")
999 self.dirstate.update([f], "r")
997
1000
998 def copy(self, source, dest):
1001 def copy(self, source, dest):
999 p = self.wjoin(dest)
1002 p = self.wjoin(dest)
1000 if not os.path.exists(p):
1003 if not os.path.exists(p):
1001 self.ui.warn("%s does not exist!\n" % dest)
1004 self.ui.warn("%s does not exist!\n" % dest)
1002 elif not os.path.isfile(p):
1005 elif not os.path.isfile(p):
1003 self.ui.warn("copy failed: %s is not a file\n" % dest)
1006 self.ui.warn("copy failed: %s is not a file\n" % dest)
1004 else:
1007 else:
1005 if self.dirstate.state(dest) == '?':
1008 if self.dirstate.state(dest) == '?':
1006 self.dirstate.update([dest], "a")
1009 self.dirstate.update([dest], "a")
1007 self.dirstate.copy(source, dest)
1010 self.dirstate.copy(source, dest)
1008
1011
1009 def heads(self):
1012 def heads(self):
1010 return self.changelog.heads()
1013 return self.changelog.heads()
1011
1014
1012 def branches(self, nodes):
1015 def branches(self, nodes):
1013 if not nodes: nodes = [self.changelog.tip()]
1016 if not nodes: nodes = [self.changelog.tip()]
1014 b = []
1017 b = []
1015 for n in nodes:
1018 for n in nodes:
1016 t = n
1019 t = n
1017 while n:
1020 while n:
1018 p = self.changelog.parents(n)
1021 p = self.changelog.parents(n)
1019 if p[1] != nullid or p[0] == nullid:
1022 if p[1] != nullid or p[0] == nullid:
1020 b.append((t, n, p[0], p[1]))
1023 b.append((t, n, p[0], p[1]))
1021 break
1024 break
1022 n = p[0]
1025 n = p[0]
1023 return b
1026 return b
1024
1027
1025 def between(self, pairs):
1028 def between(self, pairs):
1026 r = []
1029 r = []
1027
1030
1028 for top, bottom in pairs:
1031 for top, bottom in pairs:
1029 n, l, i = top, [], 0
1032 n, l, i = top, [], 0
1030 f = 1
1033 f = 1
1031
1034
1032 while n != bottom:
1035 while n != bottom:
1033 p = self.changelog.parents(n)[0]
1036 p = self.changelog.parents(n)[0]
1034 if i == f:
1037 if i == f:
1035 l.append(n)
1038 l.append(n)
1036 f = f * 2
1039 f = f * 2
1037 n = p
1040 n = p
1038 i += 1
1041 i += 1
1039
1042
1040 r.append(l)
1043 r.append(l)
1041
1044
1042 return r
1045 return r
1043
1046
1044 def newer(self, nodes):
1047 def newer(self, nodes):
1045 m = {}
1048 m = {}
1046 nl = []
1049 nl = []
1047 pm = {}
1050 pm = {}
1048 cl = self.changelog
1051 cl = self.changelog
1049 t = l = cl.count()
1052 t = l = cl.count()
1050
1053
1051 # find the lowest numbered node
1054 # find the lowest numbered node
1052 for n in nodes:
1055 for n in nodes:
1053 l = min(l, cl.rev(n))
1056 l = min(l, cl.rev(n))
1054 m[n] = 1
1057 m[n] = 1
1055
1058
1056 for i in xrange(l, t):
1059 for i in xrange(l, t):
1057 n = cl.node(i)
1060 n = cl.node(i)
1058 if n in m: # explicitly listed
1061 if n in m: # explicitly listed
1059 pm[n] = 1
1062 pm[n] = 1
1060 nl.append(n)
1063 nl.append(n)
1061 continue
1064 continue
1062 for p in cl.parents(n):
1065 for p in cl.parents(n):
1063 if p in pm: # parent listed
1066 if p in pm: # parent listed
1064 pm[n] = 1
1067 pm[n] = 1
1065 nl.append(n)
1068 nl.append(n)
1066 break
1069 break
1067
1070
1068 return nl
1071 return nl
1069
1072
1070 def findincoming(self, remote, base=None, heads=None):
1073 def findincoming(self, remote, base=None, heads=None):
1071 m = self.changelog.nodemap
1074 m = self.changelog.nodemap
1072 search = []
1075 search = []
1073 fetch = []
1076 fetch = []
1074 seen = {}
1077 seen = {}
1075 seenbranch = {}
1078 seenbranch = {}
1076 if base == None:
1079 if base == None:
1077 base = {}
1080 base = {}
1078
1081
1079 # assume we're closer to the tip than the root
1082 # assume we're closer to the tip than the root
1080 # and start by examining the heads
1083 # and start by examining the heads
1081 self.ui.status("searching for changes\n")
1084 self.ui.status("searching for changes\n")
1082
1085
1083 if not heads:
1086 if not heads:
1084 heads = remote.heads()
1087 heads = remote.heads()
1085
1088
1086 unknown = []
1089 unknown = []
1087 for h in heads:
1090 for h in heads:
1088 if h not in m:
1091 if h not in m:
1089 unknown.append(h)
1092 unknown.append(h)
1090 else:
1093 else:
1091 base[h] = 1
1094 base[h] = 1
1092
1095
1093 if not unknown:
1096 if not unknown:
1094 return None
1097 return None
1095
1098
1096 rep = {}
1099 rep = {}
1097 reqcnt = 0
1100 reqcnt = 0
1098
1101
1099 # search through remote branches
1102 # search through remote branches
1100 # a 'branch' here is a linear segment of history, with four parts:
1103 # a 'branch' here is a linear segment of history, with four parts:
1101 # head, root, first parent, second parent
1104 # head, root, first parent, second parent
1102 # (a branch always has two parents (or none) by definition)
1105 # (a branch always has two parents (or none) by definition)
1103 unknown = remote.branches(unknown)
1106 unknown = remote.branches(unknown)
1104 while unknown:
1107 while unknown:
1105 r = []
1108 r = []
1106 while unknown:
1109 while unknown:
1107 n = unknown.pop(0)
1110 n = unknown.pop(0)
1108 if n[0] in seen:
1111 if n[0] in seen:
1109 continue
1112 continue
1110
1113
1111 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1114 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1112 if n[0] == nullid:
1115 if n[0] == nullid:
1113 break
1116 break
1114 if n in seenbranch:
1117 if n in seenbranch:
1115 self.ui.debug("branch already found\n")
1118 self.ui.debug("branch already found\n")
1116 continue
1119 continue
1117 if n[1] and n[1] in m: # do we know the base?
1120 if n[1] and n[1] in m: # do we know the base?
1118 self.ui.debug("found incomplete branch %s:%s\n"
1121 self.ui.debug("found incomplete branch %s:%s\n"
1119 % (short(n[0]), short(n[1])))
1122 % (short(n[0]), short(n[1])))
1120 search.append(n) # schedule branch range for scanning
1123 search.append(n) # schedule branch range for scanning
1121 seenbranch[n] = 1
1124 seenbranch[n] = 1
1122 else:
1125 else:
1123 if n[1] not in seen and n[1] not in fetch:
1126 if n[1] not in seen and n[1] not in fetch:
1124 if n[2] in m and n[3] in m:
1127 if n[2] in m and n[3] in m:
1125 self.ui.debug("found new changeset %s\n" %
1128 self.ui.debug("found new changeset %s\n" %
1126 short(n[1]))
1129 short(n[1]))
1127 fetch.append(n[1]) # earliest unknown
1130 fetch.append(n[1]) # earliest unknown
1128 base[n[2]] = 1 # latest known
1131 base[n[2]] = 1 # latest known
1129 continue
1132 continue
1130
1133
1131 for a in n[2:4]:
1134 for a in n[2:4]:
1132 if a not in rep:
1135 if a not in rep:
1133 r.append(a)
1136 r.append(a)
1134 rep[a] = 1
1137 rep[a] = 1
1135
1138
1136 seen[n[0]] = 1
1139 seen[n[0]] = 1
1137
1140
1138 if r:
1141 if r:
1139 reqcnt += 1
1142 reqcnt += 1
1140 self.ui.debug("request %d: %s\n" %
1143 self.ui.debug("request %d: %s\n" %
1141 (reqcnt, " ".join(map(short, r))))
1144 (reqcnt, " ".join(map(short, r))))
1142 for p in range(0, len(r), 10):
1145 for p in range(0, len(r), 10):
1143 for b in remote.branches(r[p:p+10]):
1146 for b in remote.branches(r[p:p+10]):
1144 self.ui.debug("received %s:%s\n" %
1147 self.ui.debug("received %s:%s\n" %
1145 (short(b[0]), short(b[1])))
1148 (short(b[0]), short(b[1])))
1146 if b[0] not in m and b[0] not in seen:
1149 if b[0] not in m and b[0] not in seen:
1147 unknown.append(b)
1150 unknown.append(b)
1148
1151
1149 # do binary search on the branches we found
1152 # do binary search on the branches we found
1150 while search:
1153 while search:
1151 n = search.pop(0)
1154 n = search.pop(0)
1152 reqcnt += 1
1155 reqcnt += 1
1153 l = remote.between([(n[0], n[1])])[0]
1156 l = remote.between([(n[0], n[1])])[0]
1154 l.append(n[1])
1157 l.append(n[1])
1155 p = n[0]
1158 p = n[0]
1156 f = 1
1159 f = 1
1157 for i in l:
1160 for i in l:
1158 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1161 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1159 if i in m:
1162 if i in m:
1160 if f <= 2:
1163 if f <= 2:
1161 self.ui.debug("found new branch changeset %s\n" %
1164 self.ui.debug("found new branch changeset %s\n" %
1162 short(p))
1165 short(p))
1163 fetch.append(p)
1166 fetch.append(p)
1164 base[i] = 1
1167 base[i] = 1
1165 else:
1168 else:
1166 self.ui.debug("narrowed branch search to %s:%s\n"
1169 self.ui.debug("narrowed branch search to %s:%s\n"
1167 % (short(p), short(i)))
1170 % (short(p), short(i)))
1168 search.append((p, i))
1171 search.append((p, i))
1169 break
1172 break
1170 p, f = i, f * 2
1173 p, f = i, f * 2
1171
1174
1172 # sanity check our fetch list
1175 # sanity check our fetch list
1173 for f in fetch:
1176 for f in fetch:
1174 if f in m:
1177 if f in m:
1175 raise RepoError("already have changeset " + short(f[:4]))
1178 raise RepoError("already have changeset " + short(f[:4]))
1176
1179
1177 if base.keys() == [nullid]:
1180 if base.keys() == [nullid]:
1178 self.ui.warn("warning: pulling from an unrelated repository!\n")
1181 self.ui.warn("warning: pulling from an unrelated repository!\n")
1179
1182
1180 self.ui.note("adding new changesets starting at " +
1183 self.ui.note("adding new changesets starting at " +
1181 " ".join([short(f) for f in fetch]) + "\n")
1184 " ".join([short(f) for f in fetch]) + "\n")
1182
1185
1183 self.ui.debug("%d total queries\n" % reqcnt)
1186 self.ui.debug("%d total queries\n" % reqcnt)
1184
1187
1185 return fetch
1188 return fetch
1186
1189
1187 def findoutgoing(self, remote, base=None, heads=None):
1190 def findoutgoing(self, remote, base=None, heads=None):
1188 if base == None:
1191 if base == None:
1189 base = {}
1192 base = {}
1190 self.findincoming(remote, base, heads)
1193 self.findincoming(remote, base, heads)
1191
1194
1192 remain = dict.fromkeys(self.changelog.nodemap)
1195 remain = dict.fromkeys(self.changelog.nodemap)
1193
1196
1194 # prune everything remote has from the tree
1197 # prune everything remote has from the tree
1195 del remain[nullid]
1198 del remain[nullid]
1196 remove = base.keys()
1199 remove = base.keys()
1197 while remove:
1200 while remove:
1198 n = remove.pop(0)
1201 n = remove.pop(0)
1199 if n in remain:
1202 if n in remain:
1200 del remain[n]
1203 del remain[n]
1201 for p in self.changelog.parents(n):
1204 for p in self.changelog.parents(n):
1202 remove.append(p)
1205 remove.append(p)
1203
1206
1204 # find every node whose parents have been pruned
1207 # find every node whose parents have been pruned
1205 subset = []
1208 subset = []
1206 for n in remain:
1209 for n in remain:
1207 p1, p2 = self.changelog.parents(n)
1210 p1, p2 = self.changelog.parents(n)
1208 if p1 not in remain and p2 not in remain:
1211 if p1 not in remain and p2 not in remain:
1209 subset.append(n)
1212 subset.append(n)
1210
1213
1211 # this is the set of all roots we have to push
1214 # this is the set of all roots we have to push
1212 return subset
1215 return subset
1213
1216
1214 def pull(self, remote):
1217 def pull(self, remote):
1215 lock = self.lock()
1218 lock = self.lock()
1216
1219
1217 # if we have an empty repo, fetch everything
1220 # if we have an empty repo, fetch everything
1218 if self.changelog.tip() == nullid:
1221 if self.changelog.tip() == nullid:
1219 self.ui.status("requesting all changes\n")
1222 self.ui.status("requesting all changes\n")
1220 fetch = [nullid]
1223 fetch = [nullid]
1221 else:
1224 else:
1222 fetch = self.findincoming(remote)
1225 fetch = self.findincoming(remote)
1223
1226
1224 if not fetch:
1227 if not fetch:
1225 self.ui.status("no changes found\n")
1228 self.ui.status("no changes found\n")
1226 return 1
1229 return 1
1227
1230
1228 cg = remote.changegroup(fetch)
1231 cg = remote.changegroup(fetch)
1229 return self.addchangegroup(cg)
1232 return self.addchangegroup(cg)
1230
1233
1231 def push(self, remote, force=False):
1234 def push(self, remote, force=False):
1232 lock = remote.lock()
1235 lock = remote.lock()
1233
1236
1234 base = {}
1237 base = {}
1235 heads = remote.heads()
1238 heads = remote.heads()
1236 inc = self.findincoming(remote, base, heads)
1239 inc = self.findincoming(remote, base, heads)
1237 if not force and inc:
1240 if not force and inc:
1238 self.ui.warn("abort: unsynced remote changes!\n")
1241 self.ui.warn("abort: unsynced remote changes!\n")
1239 self.ui.status("(did you forget to sync? use push -f to force)\n")
1242 self.ui.status("(did you forget to sync? use push -f to force)\n")
1240 return 1
1243 return 1
1241
1244
1242 update = self.findoutgoing(remote, base)
1245 update = self.findoutgoing(remote, base)
1243 if not update:
1246 if not update:
1244 self.ui.status("no changes found\n")
1247 self.ui.status("no changes found\n")
1245 return 1
1248 return 1
1246 elif not force:
1249 elif not force:
1247 if len(heads) < len(self.changelog.heads()):
1250 if len(heads) < len(self.changelog.heads()):
1248 self.ui.warn("abort: push creates new remote branches!\n")
1251 self.ui.warn("abort: push creates new remote branches!\n")
1249 self.ui.status("(did you forget to merge?" +
1252 self.ui.status("(did you forget to merge?" +
1250 " use push -f to force)\n")
1253 " use push -f to force)\n")
1251 return 1
1254 return 1
1252
1255
1253 cg = self.changegroup(update)
1256 cg = self.changegroup(update)
1254 return remote.addchangegroup(cg)
1257 return remote.addchangegroup(cg)
1255
1258
1256 def changegroup(self, basenodes):
1259 def changegroup(self, basenodes):
1257 class genread:
1260 class genread:
1258 def __init__(self, generator):
1261 def __init__(self, generator):
1259 self.g = generator
1262 self.g = generator
1260 self.buf = ""
1263 self.buf = ""
1261 def read(self, l):
1264 def read(self, l):
1262 while l > len(self.buf):
1265 while l > len(self.buf):
1263 try:
1266 try:
1264 self.buf += self.g.next()
1267 self.buf += self.g.next()
1265 except StopIteration:
1268 except StopIteration:
1266 break
1269 break
1267 d, self.buf = self.buf[:l], self.buf[l:]
1270 d, self.buf = self.buf[:l], self.buf[l:]
1268 return d
1271 return d
1269
1272
1270 def gengroup():
1273 def gengroup():
1271 nodes = self.newer(basenodes)
1274 nodes = self.newer(basenodes)
1272
1275
1273 # construct the link map
1276 # construct the link map
1274 linkmap = {}
1277 linkmap = {}
1275 for n in nodes:
1278 for n in nodes:
1276 linkmap[self.changelog.rev(n)] = n
1279 linkmap[self.changelog.rev(n)] = n
1277
1280
1278 # construct a list of all changed files
1281 # construct a list of all changed files
1279 changed = {}
1282 changed = {}
1280 for n in nodes:
1283 for n in nodes:
1281 c = self.changelog.read(n)
1284 c = self.changelog.read(n)
1282 for f in c[3]:
1285 for f in c[3]:
1283 changed[f] = 1
1286 changed[f] = 1
1284 changed = changed.keys()
1287 changed = changed.keys()
1285 changed.sort()
1288 changed.sort()
1286
1289
1287 # the changegroup is changesets + manifests + all file revs
1290 # the changegroup is changesets + manifests + all file revs
1288 revs = [ self.changelog.rev(n) for n in nodes ]
1291 revs = [ self.changelog.rev(n) for n in nodes ]
1289
1292
1290 for y in self.changelog.group(linkmap): yield y
1293 for y in self.changelog.group(linkmap): yield y
1291 for y in self.manifest.group(linkmap): yield y
1294 for y in self.manifest.group(linkmap): yield y
1292 for f in changed:
1295 for f in changed:
1293 yield struct.pack(">l", len(f) + 4) + f
1296 yield struct.pack(">l", len(f) + 4) + f
1294 g = self.file(f).group(linkmap)
1297 g = self.file(f).group(linkmap)
1295 for y in g:
1298 for y in g:
1296 yield y
1299 yield y
1297
1300
1298 yield struct.pack(">l", 0)
1301 yield struct.pack(">l", 0)
1299
1302
1300 return genread(gengroup())
1303 return genread(gengroup())
1301
1304
1302 def addchangegroup(self, source):
1305 def addchangegroup(self, source):
1303
1306
1304 def getchunk():
1307 def getchunk():
1305 d = source.read(4)
1308 d = source.read(4)
1306 if not d: return ""
1309 if not d: return ""
1307 l = struct.unpack(">l", d)[0]
1310 l = struct.unpack(">l", d)[0]
1308 if l <= 4: return ""
1311 if l <= 4: return ""
1309 return source.read(l - 4)
1312 return source.read(l - 4)
1310
1313
1311 def getgroup():
1314 def getgroup():
1312 while 1:
1315 while 1:
1313 c = getchunk()
1316 c = getchunk()
1314 if not c: break
1317 if not c: break
1315 yield c
1318 yield c
1316
1319
1317 def csmap(x):
1320 def csmap(x):
1318 self.ui.debug("add changeset %s\n" % short(x))
1321 self.ui.debug("add changeset %s\n" % short(x))
1319 return self.changelog.count()
1322 return self.changelog.count()
1320
1323
1321 def revmap(x):
1324 def revmap(x):
1322 return self.changelog.rev(x)
1325 return self.changelog.rev(x)
1323
1326
1324 if not source: return
1327 if not source: return
1325 changesets = files = revisions = 0
1328 changesets = files = revisions = 0
1326
1329
1327 tr = self.transaction()
1330 tr = self.transaction()
1328
1331
1329 # pull off the changeset group
1332 # pull off the changeset group
1330 self.ui.status("adding changesets\n")
1333 self.ui.status("adding changesets\n")
1331 co = self.changelog.tip()
1334 co = self.changelog.tip()
1332 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1335 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1333 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1336 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1334
1337
1335 # pull off the manifest group
1338 # pull off the manifest group
1336 self.ui.status("adding manifests\n")
1339 self.ui.status("adding manifests\n")
1337 mm = self.manifest.tip()
1340 mm = self.manifest.tip()
1338 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1341 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1339
1342
1340 # process the files
1343 # process the files
1341 self.ui.status("adding file changes\n")
1344 self.ui.status("adding file changes\n")
1342 while 1:
1345 while 1:
1343 f = getchunk()
1346 f = getchunk()
1344 if not f: break
1347 if not f: break
1345 self.ui.debug("adding %s revisions\n" % f)
1348 self.ui.debug("adding %s revisions\n" % f)
1346 fl = self.file(f)
1349 fl = self.file(f)
1347 o = fl.count()
1350 o = fl.count()
1348 n = fl.addgroup(getgroup(), revmap, tr)
1351 n = fl.addgroup(getgroup(), revmap, tr)
1349 revisions += fl.count() - o
1352 revisions += fl.count() - o
1350 files += 1
1353 files += 1
1351
1354
1352 self.ui.status(("added %d changesets" +
1355 self.ui.status(("added %d changesets" +
1353 " with %d changes to %d files\n")
1356 " with %d changes to %d files\n")
1354 % (changesets, revisions, files))
1357 % (changesets, revisions, files))
1355
1358
1356 tr.close()
1359 tr.close()
1357
1360
1358 if not self.hook("changegroup"):
1361 if not self.hook("changegroup"):
1359 return 1
1362 return 1
1360
1363
1361 return
1364 return
1362
1365
1363 def update(self, node, allow=False, force=False, choose=None,
1366 def update(self, node, allow=False, force=False, choose=None,
1364 moddirstate=True):
1367 moddirstate=True):
1365 pl = self.dirstate.parents()
1368 pl = self.dirstate.parents()
1366 if not force and pl[1] != nullid:
1369 if not force and pl[1] != nullid:
1367 self.ui.warn("aborting: outstanding uncommitted merges\n")
1370 self.ui.warn("aborting: outstanding uncommitted merges\n")
1368 return 1
1371 return 1
1369
1372
1370 p1, p2 = pl[0], node
1373 p1, p2 = pl[0], node
1371 pa = self.changelog.ancestor(p1, p2)
1374 pa = self.changelog.ancestor(p1, p2)
1372 m1n = self.changelog.read(p1)[0]
1375 m1n = self.changelog.read(p1)[0]
1373 m2n = self.changelog.read(p2)[0]
1376 m2n = self.changelog.read(p2)[0]
1374 man = self.manifest.ancestor(m1n, m2n)
1377 man = self.manifest.ancestor(m1n, m2n)
1375 m1 = self.manifest.read(m1n)
1378 m1 = self.manifest.read(m1n)
1376 mf1 = self.manifest.readflags(m1n)
1379 mf1 = self.manifest.readflags(m1n)
1377 m2 = self.manifest.read(m2n)
1380 m2 = self.manifest.read(m2n)
1378 mf2 = self.manifest.readflags(m2n)
1381 mf2 = self.manifest.readflags(m2n)
1379 ma = self.manifest.read(man)
1382 ma = self.manifest.read(man)
1380 mfa = self.manifest.readflags(man)
1383 mfa = self.manifest.readflags(man)
1381
1384
1382 (c, a, d, u) = self.changes()
1385 (c, a, d, u) = self.changes()
1383
1386
1384 # is this a jump, or a merge? i.e. is there a linear path
1387 # is this a jump, or a merge? i.e. is there a linear path
1385 # from p1 to p2?
1388 # from p1 to p2?
1386 linear_path = (pa == p1 or pa == p2)
1389 linear_path = (pa == p1 or pa == p2)
1387
1390
1388 # resolve the manifest to determine which files
1391 # resolve the manifest to determine which files
1389 # we care about merging
1392 # we care about merging
1390 self.ui.note("resolving manifests\n")
1393 self.ui.note("resolving manifests\n")
1391 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1394 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1392 (force, allow, moddirstate, linear_path))
1395 (force, allow, moddirstate, linear_path))
1393 self.ui.debug(" ancestor %s local %s remote %s\n" %
1396 self.ui.debug(" ancestor %s local %s remote %s\n" %
1394 (short(man), short(m1n), short(m2n)))
1397 (short(man), short(m1n), short(m2n)))
1395
1398
1396 merge = {}
1399 merge = {}
1397 get = {}
1400 get = {}
1398 remove = []
1401 remove = []
1399 mark = {}
1402 mark = {}
1400
1403
1401 # construct a working dir manifest
1404 # construct a working dir manifest
1402 mw = m1.copy()
1405 mw = m1.copy()
1403 mfw = mf1.copy()
1406 mfw = mf1.copy()
1404 umap = dict.fromkeys(u)
1407 umap = dict.fromkeys(u)
1405
1408
1406 for f in a + c + u:
1409 for f in a + c + u:
1407 mw[f] = ""
1410 mw[f] = ""
1408 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1411 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1409
1412
1410 for f in d:
1413 for f in d:
1411 if f in mw: del mw[f]
1414 if f in mw: del mw[f]
1412
1415
1413 # If we're jumping between revisions (as opposed to merging),
1416 # If we're jumping between revisions (as opposed to merging),
1414 # and if neither the working directory nor the target rev has
1417 # and if neither the working directory nor the target rev has
1415 # the file, then we need to remove it from the dirstate, to
1418 # the file, then we need to remove it from the dirstate, to
1416 # prevent the dirstate from listing the file when it is no
1419 # prevent the dirstate from listing the file when it is no
1417 # longer in the manifest.
1420 # longer in the manifest.
1418 if moddirstate and linear_path and f not in m2:
1421 if moddirstate and linear_path and f not in m2:
1419 self.dirstate.forget((f,))
1422 self.dirstate.forget((f,))
1420
1423
1421 # Compare manifests
1424 # Compare manifests
1422 for f, n in mw.iteritems():
1425 for f, n in mw.iteritems():
1423 if choose and not choose(f): continue
1426 if choose and not choose(f): continue
1424 if f in m2:
1427 if f in m2:
1425 s = 0
1428 s = 0
1426
1429
1427 # is the wfile new since m1, and match m2?
1430 # is the wfile new since m1, and match m2?
1428 if f not in m1:
1431 if f not in m1:
1429 t1 = self.wfile(f).read()
1432 t1 = self.wfile(f).read()
1430 t2 = self.file(f).revision(m2[f])
1433 t2 = self.file(f).revision(m2[f])
1431 if cmp(t1, t2) == 0:
1434 if cmp(t1, t2) == 0:
1432 mark[f] = 1
1435 mark[f] = 1
1433 n = m2[f]
1436 n = m2[f]
1434 del t1, t2
1437 del t1, t2
1435
1438
1436 # are files different?
1439 # are files different?
1437 if n != m2[f]:
1440 if n != m2[f]:
1438 a = ma.get(f, nullid)
1441 a = ma.get(f, nullid)
1439 # are both different from the ancestor?
1442 # are both different from the ancestor?
1440 if n != a and m2[f] != a:
1443 if n != a and m2[f] != a:
1441 self.ui.debug(" %s versions differ, resolve\n" % f)
1444 self.ui.debug(" %s versions differ, resolve\n" % f)
1442 # merge executable bits
1445 # merge executable bits
1443 # "if we changed or they changed, change in merge"
1446 # "if we changed or they changed, change in merge"
1444 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1447 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1445 mode = ((a^b) | (a^c)) ^ a
1448 mode = ((a^b) | (a^c)) ^ a
1446 merge[f] = (m1.get(f, nullid), m2[f], mode)
1449 merge[f] = (m1.get(f, nullid), m2[f], mode)
1447 s = 1
1450 s = 1
1448 # are we clobbering?
1451 # are we clobbering?
1449 # is remote's version newer?
1452 # is remote's version newer?
1450 # or are we going back in time?
1453 # or are we going back in time?
1451 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1454 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1452 self.ui.debug(" remote %s is newer, get\n" % f)
1455 self.ui.debug(" remote %s is newer, get\n" % f)
1453 get[f] = m2[f]
1456 get[f] = m2[f]
1454 s = 1
1457 s = 1
1455 else:
1458 else:
1456 mark[f] = 1
1459 mark[f] = 1
1457 elif f in umap:
1460 elif f in umap:
1458 # this unknown file is the same as the checkout
1461 # this unknown file is the same as the checkout
1459 get[f] = m2[f]
1462 get[f] = m2[f]
1460
1463
1461 if not s and mfw[f] != mf2[f]:
1464 if not s and mfw[f] != mf2[f]:
1462 if force:
1465 if force:
1463 self.ui.debug(" updating permissions for %s\n" % f)
1466 self.ui.debug(" updating permissions for %s\n" % f)
1464 util.set_exec(self.wjoin(f), mf2[f])
1467 util.set_exec(self.wjoin(f), mf2[f])
1465 else:
1468 else:
1466 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1469 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1467 mode = ((a^b) | (a^c)) ^ a
1470 mode = ((a^b) | (a^c)) ^ a
1468 if mode != b:
1471 if mode != b:
1469 self.ui.debug(" updating permissions for %s\n" % f)
1472 self.ui.debug(" updating permissions for %s\n" % f)
1470 util.set_exec(self.wjoin(f), mode)
1473 util.set_exec(self.wjoin(f), mode)
1471 mark[f] = 1
1474 mark[f] = 1
1472 del m2[f]
1475 del m2[f]
1473 elif f in ma:
1476 elif f in ma:
1474 if n != ma[f]:
1477 if n != ma[f]:
1475 r = "d"
1478 r = "d"
1476 if not force and (linear_path or allow):
1479 if not force and (linear_path or allow):
1477 r = self.ui.prompt(
1480 r = self.ui.prompt(
1478 (" local changed %s which remote deleted\n" % f) +
1481 (" local changed %s which remote deleted\n" % f) +
1479 "(k)eep or (d)elete?", "[kd]", "k")
1482 "(k)eep or (d)elete?", "[kd]", "k")
1480 if r == "d":
1483 if r == "d":
1481 remove.append(f)
1484 remove.append(f)
1482 else:
1485 else:
1483 self.ui.debug("other deleted %s\n" % f)
1486 self.ui.debug("other deleted %s\n" % f)
1484 remove.append(f) # other deleted it
1487 remove.append(f) # other deleted it
1485 else:
1488 else:
1486 if n == m1.get(f, nullid): # same as parent
1489 if n == m1.get(f, nullid): # same as parent
1487 if p2 == pa: # going backwards?
1490 if p2 == pa: # going backwards?
1488 self.ui.debug("remote deleted %s\n" % f)
1491 self.ui.debug("remote deleted %s\n" % f)
1489 remove.append(f)
1492 remove.append(f)
1490 else:
1493 else:
1491 self.ui.debug("local created %s, keeping\n" % f)
1494 self.ui.debug("local created %s, keeping\n" % f)
1492 else:
1495 else:
1493 self.ui.debug("working dir created %s, keeping\n" % f)
1496 self.ui.debug("working dir created %s, keeping\n" % f)
1494
1497
1495 for f, n in m2.iteritems():
1498 for f, n in m2.iteritems():
1496 if choose and not choose(f): continue
1499 if choose and not choose(f): continue
1497 if f[0] == "/": continue
1500 if f[0] == "/": continue
1498 if f in ma and n != ma[f]:
1501 if f in ma and n != ma[f]:
1499 r = "k"
1502 r = "k"
1500 if not force and (linear_path or allow):
1503 if not force and (linear_path or allow):
1501 r = self.ui.prompt(
1504 r = self.ui.prompt(
1502 ("remote changed %s which local deleted\n" % f) +
1505 ("remote changed %s which local deleted\n" % f) +
1503 "(k)eep or (d)elete?", "[kd]", "k")
1506 "(k)eep or (d)elete?", "[kd]", "k")
1504 if r == "k": get[f] = n
1507 if r == "k": get[f] = n
1505 elif f not in ma:
1508 elif f not in ma:
1506 self.ui.debug("remote created %s\n" % f)
1509 self.ui.debug("remote created %s\n" % f)
1507 get[f] = n
1510 get[f] = n
1508 else:
1511 else:
1509 if force or p2 == pa: # going backwards?
1512 if force or p2 == pa: # going backwards?
1510 self.ui.debug("local deleted %s, recreating\n" % f)
1513 self.ui.debug("local deleted %s, recreating\n" % f)
1511 get[f] = n
1514 get[f] = n
1512 else:
1515 else:
1513 self.ui.debug("local deleted %s\n" % f)
1516 self.ui.debug("local deleted %s\n" % f)
1514
1517
1515 del mw, m1, m2, ma
1518 del mw, m1, m2, ma
1516
1519
1517 if force:
1520 if force:
1518 for f in merge:
1521 for f in merge:
1519 get[f] = merge[f][1]
1522 get[f] = merge[f][1]
1520 merge = {}
1523 merge = {}
1521
1524
1522 if linear_path or force:
1525 if linear_path or force:
1523 # we don't need to do any magic, just jump to the new rev
1526 # we don't need to do any magic, just jump to the new rev
1524 mode = 'n'
1527 mode = 'n'
1525 p1, p2 = p2, nullid
1528 p1, p2 = p2, nullid
1526 else:
1529 else:
1527 if not allow:
1530 if not allow:
1528 self.ui.status("this update spans a branch" +
1531 self.ui.status("this update spans a branch" +
1529 " affecting the following files:\n")
1532 " affecting the following files:\n")
1530 fl = merge.keys() + get.keys()
1533 fl = merge.keys() + get.keys()
1531 fl.sort()
1534 fl.sort()
1532 for f in fl:
1535 for f in fl:
1533 cf = ""
1536 cf = ""
1534 if f in merge: cf = " (resolve)"
1537 if f in merge: cf = " (resolve)"
1535 self.ui.status(" %s%s\n" % (f, cf))
1538 self.ui.status(" %s%s\n" % (f, cf))
1536 self.ui.warn("aborting update spanning branches!\n")
1539 self.ui.warn("aborting update spanning branches!\n")
1537 self.ui.status("(use update -m to merge across branches" +
1540 self.ui.status("(use update -m to merge across branches" +
1538 " or -C to lose changes)\n")
1541 " or -C to lose changes)\n")
1539 return 1
1542 return 1
1540 # we have to remember what files we needed to get/change
1543 # we have to remember what files we needed to get/change
1541 # because any file that's different from either one of its
1544 # because any file that's different from either one of its
1542 # parents must be in the changeset
1545 # parents must be in the changeset
1543 mode = 'm'
1546 mode = 'm'
1544 if moddirstate:
1547 if moddirstate:
1545 self.dirstate.update(mark.keys(), "m")
1548 self.dirstate.update(mark.keys(), "m")
1546
1549
1547 if moddirstate:
1550 if moddirstate:
1548 self.dirstate.setparents(p1, p2)
1551 self.dirstate.setparents(p1, p2)
1549
1552
1550 # get the files we don't need to change
1553 # get the files we don't need to change
1551 files = get.keys()
1554 files = get.keys()
1552 files.sort()
1555 files.sort()
1553 for f in files:
1556 for f in files:
1554 if f[0] == "/": continue
1557 if f[0] == "/": continue
1555 self.ui.note("getting %s\n" % f)
1558 self.ui.note("getting %s\n" % f)
1556 t = self.file(f).read(get[f])
1559 t = self.file(f).read(get[f])
1557 try:
1560 try:
1558 self.wfile(f, "w").write(t)
1561 self.wfile(f, "w").write(t)
1559 except IOError:
1562 except IOError:
1560 os.makedirs(os.path.dirname(self.wjoin(f)))
1563 os.makedirs(os.path.dirname(self.wjoin(f)))
1561 self.wfile(f, "w").write(t)
1564 self.wfile(f, "w").write(t)
1562 util.set_exec(self.wjoin(f), mf2[f])
1565 util.set_exec(self.wjoin(f), mf2[f])
1563 if moddirstate:
1566 if moddirstate:
1564 self.dirstate.update([f], mode)
1567 self.dirstate.update([f], mode)
1565
1568
1566 # merge the tricky bits
1569 # merge the tricky bits
1567 files = merge.keys()
1570 files = merge.keys()
1568 files.sort()
1571 files.sort()
1569 for f in files:
1572 for f in files:
1570 self.ui.status("merging %s\n" % f)
1573 self.ui.status("merging %s\n" % f)
1571 m, o, flag = merge[f]
1574 m, o, flag = merge[f]
1572 self.merge3(f, m, o)
1575 self.merge3(f, m, o)
1573 util.set_exec(self.wjoin(f), flag)
1576 util.set_exec(self.wjoin(f), flag)
1574 if moddirstate and mode == 'm':
1577 if moddirstate:
1575 # only update dirstate on branch merge, otherwise we
1578 if mode == 'm':
1576 # could mark files with changes as unchanged
1579 # only update dirstate on branch merge, otherwise we
1577 self.dirstate.update([f], mode)
1580 # could mark files with changes as unchanged
1581 self.dirstate.update([f], mode)
1582 elif p2 == nullid:
1583 # update dirstate from parent1's manifest
1584 m1n = self.changelog.read(p1)[0]
1585 m1 = self.manifest.read(m1n)
1586 file_ = self.file(f)
1587 f_len = file_.length(file_.rev(m1[f]))
1588 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1589 else:
1590 self.ui.warn("Second parent without branch merge!?\n"
1591 "Dirstate for file %s may be wrong.\n" % f)
1578
1592
1579 remove.sort()
1593 remove.sort()
1580 for f in remove:
1594 for f in remove:
1581 self.ui.note("removing %s\n" % f)
1595 self.ui.note("removing %s\n" % f)
1582 try:
1596 try:
1583 os.unlink(f)
1597 os.unlink(f)
1584 except OSError, inst:
1598 except OSError, inst:
1585 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1599 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1586 # try removing directories that might now be empty
1600 # try removing directories that might now be empty
1587 try: os.removedirs(os.path.dirname(f))
1601 try: os.removedirs(os.path.dirname(f))
1588 except: pass
1602 except: pass
1589 if moddirstate:
1603 if moddirstate:
1590 if mode == 'n':
1604 if mode == 'n':
1591 self.dirstate.forget(remove)
1605 self.dirstate.forget(remove)
1592 else:
1606 else:
1593 self.dirstate.update(remove, 'r')
1607 self.dirstate.update(remove, 'r')
1594
1608
1595 def merge3(self, fn, my, other):
1609 def merge3(self, fn, my, other):
1596 """perform a 3-way merge in the working directory"""
1610 """perform a 3-way merge in the working directory"""
1597
1611
1598 def temp(prefix, node):
1612 def temp(prefix, node):
1599 pre = "%s~%s." % (os.path.basename(fn), prefix)
1613 pre = "%s~%s." % (os.path.basename(fn), prefix)
1600 (fd, name) = tempfile.mkstemp("", pre)
1614 (fd, name) = tempfile.mkstemp("", pre)
1601 f = os.fdopen(fd, "wb")
1615 f = os.fdopen(fd, "wb")
1602 f.write(fl.revision(node))
1616 f.write(fl.revision(node))
1603 f.close()
1617 f.close()
1604 return name
1618 return name
1605
1619
1606 fl = self.file(fn)
1620 fl = self.file(fn)
1607 base = fl.ancestor(my, other)
1621 base = fl.ancestor(my, other)
1608 a = self.wjoin(fn)
1622 a = self.wjoin(fn)
1609 b = temp("base", base)
1623 b = temp("base", base)
1610 c = temp("other", other)
1624 c = temp("other", other)
1611
1625
1612 self.ui.note("resolving %s\n" % fn)
1626 self.ui.note("resolving %s\n" % fn)
1613 self.ui.debug("file %s: other %s ancestor %s\n" %
1627 self.ui.debug("file %s: other %s ancestor %s\n" %
1614 (fn, short(other), short(base)))
1628 (fn, short(other), short(base)))
1615
1629
1616 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1630 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1617 or "hgmerge")
1631 or "hgmerge")
1618 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1632 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1619 if r:
1633 if r:
1620 self.ui.warn("merging %s failed!\n" % fn)
1634 self.ui.warn("merging %s failed!\n" % fn)
1621
1635
1622 os.unlink(b)
1636 os.unlink(b)
1623 os.unlink(c)
1637 os.unlink(c)
1624
1638
1625 def verify(self):
1639 def verify(self):
1626 filelinkrevs = {}
1640 filelinkrevs = {}
1627 filenodes = {}
1641 filenodes = {}
1628 changesets = revisions = files = 0
1642 changesets = revisions = files = 0
1629 errors = 0
1643 errors = 0
1630
1644
1631 seen = {}
1645 seen = {}
1632 self.ui.status("checking changesets\n")
1646 self.ui.status("checking changesets\n")
1633 for i in range(self.changelog.count()):
1647 for i in range(self.changelog.count()):
1634 changesets += 1
1648 changesets += 1
1635 n = self.changelog.node(i)
1649 n = self.changelog.node(i)
1636 if n in seen:
1650 if n in seen:
1637 self.ui.warn("duplicate changeset at revision %d\n" % i)
1651 self.ui.warn("duplicate changeset at revision %d\n" % i)
1638 errors += 1
1652 errors += 1
1639 seen[n] = 1
1653 seen[n] = 1
1640
1654
1641 for p in self.changelog.parents(n):
1655 for p in self.changelog.parents(n):
1642 if p not in self.changelog.nodemap:
1656 if p not in self.changelog.nodemap:
1643 self.ui.warn("changeset %s has unknown parent %s\n" %
1657 self.ui.warn("changeset %s has unknown parent %s\n" %
1644 (short(n), short(p)))
1658 (short(n), short(p)))
1645 errors += 1
1659 errors += 1
1646 try:
1660 try:
1647 changes = self.changelog.read(n)
1661 changes = self.changelog.read(n)
1648 except Exception, inst:
1662 except Exception, inst:
1649 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1663 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1650 errors += 1
1664 errors += 1
1651
1665
1652 for f in changes[3]:
1666 for f in changes[3]:
1653 filelinkrevs.setdefault(f, []).append(i)
1667 filelinkrevs.setdefault(f, []).append(i)
1654
1668
1655 seen = {}
1669 seen = {}
1656 self.ui.status("checking manifests\n")
1670 self.ui.status("checking manifests\n")
1657 for i in range(self.manifest.count()):
1671 for i in range(self.manifest.count()):
1658 n = self.manifest.node(i)
1672 n = self.manifest.node(i)
1659 if n in seen:
1673 if n in seen:
1660 self.ui.warn("duplicate manifest at revision %d\n" % i)
1674 self.ui.warn("duplicate manifest at revision %d\n" % i)
1661 errors += 1
1675 errors += 1
1662 seen[n] = 1
1676 seen[n] = 1
1663
1677
1664 for p in self.manifest.parents(n):
1678 for p in self.manifest.parents(n):
1665 if p not in self.manifest.nodemap:
1679 if p not in self.manifest.nodemap:
1666 self.ui.warn("manifest %s has unknown parent %s\n" %
1680 self.ui.warn("manifest %s has unknown parent %s\n" %
1667 (short(n), short(p)))
1681 (short(n), short(p)))
1668 errors += 1
1682 errors += 1
1669
1683
1670 try:
1684 try:
1671 delta = mdiff.patchtext(self.manifest.delta(n))
1685 delta = mdiff.patchtext(self.manifest.delta(n))
1672 except KeyboardInterrupt:
1686 except KeyboardInterrupt:
1673 self.ui.warn("aborted")
1687 self.ui.warn("aborted")
1674 sys.exit(0)
1688 sys.exit(0)
1675 except Exception, inst:
1689 except Exception, inst:
1676 self.ui.warn("unpacking manifest %s: %s\n"
1690 self.ui.warn("unpacking manifest %s: %s\n"
1677 % (short(n), inst))
1691 % (short(n), inst))
1678 errors += 1
1692 errors += 1
1679
1693
1680 ff = [ l.split('\0') for l in delta.splitlines() ]
1694 ff = [ l.split('\0') for l in delta.splitlines() ]
1681 for f, fn in ff:
1695 for f, fn in ff:
1682 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1696 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1683
1697
1684 self.ui.status("crosschecking files in changesets and manifests\n")
1698 self.ui.status("crosschecking files in changesets and manifests\n")
1685 for f in filenodes:
1699 for f in filenodes:
1686 if f not in filelinkrevs:
1700 if f not in filelinkrevs:
1687 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1701 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1688 errors += 1
1702 errors += 1
1689
1703
1690 for f in filelinkrevs:
1704 for f in filelinkrevs:
1691 if f not in filenodes:
1705 if f not in filenodes:
1692 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1706 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1693 errors += 1
1707 errors += 1
1694
1708
1695 self.ui.status("checking files\n")
1709 self.ui.status("checking files\n")
1696 ff = filenodes.keys()
1710 ff = filenodes.keys()
1697 ff.sort()
1711 ff.sort()
1698 for f in ff:
1712 for f in ff:
1699 if f == "/dev/null": continue
1713 if f == "/dev/null": continue
1700 files += 1
1714 files += 1
1701 fl = self.file(f)
1715 fl = self.file(f)
1702 nodes = { nullid: 1 }
1716 nodes = { nullid: 1 }
1703 seen = {}
1717 seen = {}
1704 for i in range(fl.count()):
1718 for i in range(fl.count()):
1705 revisions += 1
1719 revisions += 1
1706 n = fl.node(i)
1720 n = fl.node(i)
1707
1721
1708 if n in seen:
1722 if n in seen:
1709 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1723 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1710 errors += 1
1724 errors += 1
1711
1725
1712 if n not in filenodes[f]:
1726 if n not in filenodes[f]:
1713 self.ui.warn("%s: %d:%s not in manifests\n"
1727 self.ui.warn("%s: %d:%s not in manifests\n"
1714 % (f, i, short(n)))
1728 % (f, i, short(n)))
1715 errors += 1
1729 errors += 1
1716 else:
1730 else:
1717 del filenodes[f][n]
1731 del filenodes[f][n]
1718
1732
1719 flr = fl.linkrev(n)
1733 flr = fl.linkrev(n)
1720 if flr not in filelinkrevs[f]:
1734 if flr not in filelinkrevs[f]:
1721 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1735 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1722 % (f, short(n), fl.linkrev(n)))
1736 % (f, short(n), fl.linkrev(n)))
1723 errors += 1
1737 errors += 1
1724 else:
1738 else:
1725 filelinkrevs[f].remove(flr)
1739 filelinkrevs[f].remove(flr)
1726
1740
1727 # verify contents
1741 # verify contents
1728 try:
1742 try:
1729 t = fl.read(n)
1743 t = fl.read(n)
1730 except Exception, inst:
1744 except Exception, inst:
1731 self.ui.warn("unpacking file %s %s: %s\n"
1745 self.ui.warn("unpacking file %s %s: %s\n"
1732 % (f, short(n), inst))
1746 % (f, short(n), inst))
1733 errors += 1
1747 errors += 1
1734
1748
1735 # verify parents
1749 # verify parents
1736 (p1, p2) = fl.parents(n)
1750 (p1, p2) = fl.parents(n)
1737 if p1 not in nodes:
1751 if p1 not in nodes:
1738 self.ui.warn("file %s:%s unknown parent 1 %s" %
1752 self.ui.warn("file %s:%s unknown parent 1 %s" %
1739 (f, short(n), short(p1)))
1753 (f, short(n), short(p1)))
1740 errors += 1
1754 errors += 1
1741 if p2 not in nodes:
1755 if p2 not in nodes:
1742 self.ui.warn("file %s:%s unknown parent 2 %s" %
1756 self.ui.warn("file %s:%s unknown parent 2 %s" %
1743 (f, short(n), short(p1)))
1757 (f, short(n), short(p1)))
1744 errors += 1
1758 errors += 1
1745 nodes[n] = 1
1759 nodes[n] = 1
1746
1760
1747 # cross-check
1761 # cross-check
1748 for node in filenodes[f]:
1762 for node in filenodes[f]:
1749 self.ui.warn("node %s in manifests not in %s\n"
1763 self.ui.warn("node %s in manifests not in %s\n"
1750 % (hex(node), f))
1764 % (hex(node), f))
1751 errors += 1
1765 errors += 1
1752
1766
1753 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1767 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1754 (files, changesets, revisions))
1768 (files, changesets, revisions))
1755
1769
1756 if errors:
1770 if errors:
1757 self.ui.warn("%d integrity errors encountered!\n" % errors)
1771 self.ui.warn("%d integrity errors encountered!\n" % errors)
1758 return 1
1772 return 1
1759
1773
1760 class httprepository:
1774 class httprepository:
1761 def __init__(self, ui, path):
1775 def __init__(self, ui, path):
1762 # fix missing / after hostname
1776 # fix missing / after hostname
1763 s = urlparse.urlsplit(path)
1777 s = urlparse.urlsplit(path)
1764 partial = s[2]
1778 partial = s[2]
1765 if not partial: partial = "/"
1779 if not partial: partial = "/"
1766 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1780 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1767 self.ui = ui
1781 self.ui = ui
1768 no_list = [ "localhost", "127.0.0.1" ]
1782 no_list = [ "localhost", "127.0.0.1" ]
1769 host = ui.config("http_proxy", "host")
1783 host = ui.config("http_proxy", "host")
1770 if host is None:
1784 if host is None:
1771 host = os.environ.get("http_proxy")
1785 host = os.environ.get("http_proxy")
1772 if host and host.startswith('http://'):
1786 if host and host.startswith('http://'):
1773 host = host[7:]
1787 host = host[7:]
1774 user = ui.config("http_proxy", "user")
1788 user = ui.config("http_proxy", "user")
1775 passwd = ui.config("http_proxy", "passwd")
1789 passwd = ui.config("http_proxy", "passwd")
1776 no = ui.config("http_proxy", "no")
1790 no = ui.config("http_proxy", "no")
1777 if no is None:
1791 if no is None:
1778 no = os.environ.get("no_proxy")
1792 no = os.environ.get("no_proxy")
1779 if no:
1793 if no:
1780 no_list = no_list + no.split(",")
1794 no_list = no_list + no.split(",")
1781
1795
1782 no_proxy = 0
1796 no_proxy = 0
1783 for h in no_list:
1797 for h in no_list:
1784 if (path.startswith("http://" + h + "/") or
1798 if (path.startswith("http://" + h + "/") or
1785 path.startswith("http://" + h + ":") or
1799 path.startswith("http://" + h + ":") or
1786 path == "http://" + h):
1800 path == "http://" + h):
1787 no_proxy = 1
1801 no_proxy = 1
1788
1802
1789 # Note: urllib2 takes proxy values from the environment and those will
1803 # Note: urllib2 takes proxy values from the environment and those will
1790 # take precedence
1804 # take precedence
1791 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1805 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1792 if os.environ.has_key(env):
1806 if os.environ.has_key(env):
1793 del os.environ[env]
1807 del os.environ[env]
1794
1808
1795 proxy_handler = urllib2.BaseHandler()
1809 proxy_handler = urllib2.BaseHandler()
1796 if host and not no_proxy:
1810 if host and not no_proxy:
1797 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1811 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1798
1812
1799 authinfo = None
1813 authinfo = None
1800 if user and passwd:
1814 if user and passwd:
1801 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1815 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1802 passmgr.add_password(None, host, user, passwd)
1816 passmgr.add_password(None, host, user, passwd)
1803 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1817 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1804
1818
1805 opener = urllib2.build_opener(proxy_handler, authinfo)
1819 opener = urllib2.build_opener(proxy_handler, authinfo)
1806 urllib2.install_opener(opener)
1820 urllib2.install_opener(opener)
1807
1821
1808 def dev(self):
1822 def dev(self):
1809 return -1
1823 return -1
1810
1824
1811 def do_cmd(self, cmd, **args):
1825 def do_cmd(self, cmd, **args):
1812 self.ui.debug("sending %s command\n" % cmd)
1826 self.ui.debug("sending %s command\n" % cmd)
1813 q = {"cmd": cmd}
1827 q = {"cmd": cmd}
1814 q.update(args)
1828 q.update(args)
1815 qs = urllib.urlencode(q)
1829 qs = urllib.urlencode(q)
1816 cu = "%s?%s" % (self.url, qs)
1830 cu = "%s?%s" % (self.url, qs)
1817 resp = urllib2.urlopen(cu)
1831 resp = urllib2.urlopen(cu)
1818 proto = resp.headers['content-type']
1832 proto = resp.headers['content-type']
1819
1833
1820 # accept old "text/plain" and "application/hg-changegroup" for now
1834 # accept old "text/plain" and "application/hg-changegroup" for now
1821 if not proto.startswith('application/mercurial') and \
1835 if not proto.startswith('application/mercurial') and \
1822 not proto.startswith('text/plain') and \
1836 not proto.startswith('text/plain') and \
1823 not proto.startswith('application/hg-changegroup'):
1837 not proto.startswith('application/hg-changegroup'):
1824 raise RepoError("'%s' does not appear to be an hg repository"
1838 raise RepoError("'%s' does not appear to be an hg repository"
1825 % self.url)
1839 % self.url)
1826
1840
1827 if proto.startswith('application/mercurial'):
1841 if proto.startswith('application/mercurial'):
1828 version = proto[22:]
1842 version = proto[22:]
1829 if float(version) > 0.1:
1843 if float(version) > 0.1:
1830 raise RepoError("'%s' uses newer protocol %s" %
1844 raise RepoError("'%s' uses newer protocol %s" %
1831 (self.url, version))
1845 (self.url, version))
1832
1846
1833 return resp
1847 return resp
1834
1848
1835 def heads(self):
1849 def heads(self):
1836 d = self.do_cmd("heads").read()
1850 d = self.do_cmd("heads").read()
1837 try:
1851 try:
1838 return map(bin, d[:-1].split(" "))
1852 return map(bin, d[:-1].split(" "))
1839 except:
1853 except:
1840 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1854 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1841 raise
1855 raise
1842
1856
1843 def branches(self, nodes):
1857 def branches(self, nodes):
1844 n = " ".join(map(hex, nodes))
1858 n = " ".join(map(hex, nodes))
1845 d = self.do_cmd("branches", nodes=n).read()
1859 d = self.do_cmd("branches", nodes=n).read()
1846 try:
1860 try:
1847 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1861 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1848 return br
1862 return br
1849 except:
1863 except:
1850 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1864 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1851 raise
1865 raise
1852
1866
1853 def between(self, pairs):
1867 def between(self, pairs):
1854 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1868 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1855 d = self.do_cmd("between", pairs=n).read()
1869 d = self.do_cmd("between", pairs=n).read()
1856 try:
1870 try:
1857 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1871 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1858 return p
1872 return p
1859 except:
1873 except:
1860 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1874 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1861 raise
1875 raise
1862
1876
1863 def changegroup(self, nodes):
1877 def changegroup(self, nodes):
1864 n = " ".join(map(hex, nodes))
1878 n = " ".join(map(hex, nodes))
1865 f = self.do_cmd("changegroup", roots=n)
1879 f = self.do_cmd("changegroup", roots=n)
1866 bytes = 0
1880 bytes = 0
1867
1881
1868 class zread:
1882 class zread:
1869 def __init__(self, f):
1883 def __init__(self, f):
1870 self.zd = zlib.decompressobj()
1884 self.zd = zlib.decompressobj()
1871 self.f = f
1885 self.f = f
1872 self.buf = ""
1886 self.buf = ""
1873 def read(self, l):
1887 def read(self, l):
1874 while l > len(self.buf):
1888 while l > len(self.buf):
1875 r = self.f.read(4096)
1889 r = self.f.read(4096)
1876 if r:
1890 if r:
1877 self.buf += self.zd.decompress(r)
1891 self.buf += self.zd.decompress(r)
1878 else:
1892 else:
1879 self.buf += self.zd.flush()
1893 self.buf += self.zd.flush()
1880 break
1894 break
1881 d, self.buf = self.buf[:l], self.buf[l:]
1895 d, self.buf = self.buf[:l], self.buf[l:]
1882 return d
1896 return d
1883
1897
1884 return zread(f)
1898 return zread(f)
1885
1899
1886 class remotelock:
1900 class remotelock:
1887 def __init__(self, repo):
1901 def __init__(self, repo):
1888 self.repo = repo
1902 self.repo = repo
1889 def release(self):
1903 def release(self):
1890 self.repo.unlock()
1904 self.repo.unlock()
1891 self.repo = None
1905 self.repo = None
1892 def __del__(self):
1906 def __del__(self):
1893 if self.repo:
1907 if self.repo:
1894 self.release()
1908 self.release()
1895
1909
1896 class sshrepository:
1910 class sshrepository:
1897 def __init__(self, ui, path):
1911 def __init__(self, ui, path):
1898 self.url = path
1912 self.url = path
1899 self.ui = ui
1913 self.ui = ui
1900
1914
1901 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1915 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
1902 if not m:
1916 if not m:
1903 raise RepoError("couldn't parse destination %s" % path)
1917 raise RepoError("couldn't parse destination %s" % path)
1904
1918
1905 self.user = m.group(2)
1919 self.user = m.group(2)
1906 self.host = m.group(3)
1920 self.host = m.group(3)
1907 self.port = m.group(5)
1921 self.port = m.group(5)
1908 self.path = m.group(7)
1922 self.path = m.group(7)
1909
1923
1910 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1924 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1911 args = self.port and ("%s -p %s") % (args, self.port) or args
1925 args = self.port and ("%s -p %s") % (args, self.port) or args
1912 path = self.path or ""
1926 path = self.path or ""
1913
1927
1914 if not path:
1928 if not path:
1915 raise RepoError("no remote repository path specified")
1929 raise RepoError("no remote repository path specified")
1916
1930
1917 cmd = "ssh %s 'hg -R %s serve --stdio'"
1931 cmd = "ssh %s 'hg -R %s serve --stdio'"
1918 cmd = cmd % (args, path)
1932 cmd = cmd % (args, path)
1919
1933
1920 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1934 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1921
1935
1922 def readerr(self):
1936 def readerr(self):
1923 while 1:
1937 while 1:
1924 r,w,x = select.select([self.pipee], [], [], 0)
1938 r,w,x = select.select([self.pipee], [], [], 0)
1925 if not r: break
1939 if not r: break
1926 l = self.pipee.readline()
1940 l = self.pipee.readline()
1927 if not l: break
1941 if not l: break
1928 self.ui.status("remote: ", l)
1942 self.ui.status("remote: ", l)
1929
1943
1930 def __del__(self):
1944 def __del__(self):
1931 try:
1945 try:
1932 self.pipeo.close()
1946 self.pipeo.close()
1933 self.pipei.close()
1947 self.pipei.close()
1934 for l in self.pipee:
1948 for l in self.pipee:
1935 self.ui.status("remote: ", l)
1949 self.ui.status("remote: ", l)
1936 self.pipee.close()
1950 self.pipee.close()
1937 except:
1951 except:
1938 pass
1952 pass
1939
1953
1940 def dev(self):
1954 def dev(self):
1941 return -1
1955 return -1
1942
1956
1943 def do_cmd(self, cmd, **args):
1957 def do_cmd(self, cmd, **args):
1944 self.ui.debug("sending %s command\n" % cmd)
1958 self.ui.debug("sending %s command\n" % cmd)
1945 self.pipeo.write("%s\n" % cmd)
1959 self.pipeo.write("%s\n" % cmd)
1946 for k, v in args.items():
1960 for k, v in args.items():
1947 self.pipeo.write("%s %d\n" % (k, len(v)))
1961 self.pipeo.write("%s %d\n" % (k, len(v)))
1948 self.pipeo.write(v)
1962 self.pipeo.write(v)
1949 self.pipeo.flush()
1963 self.pipeo.flush()
1950
1964
1951 return self.pipei
1965 return self.pipei
1952
1966
1953 def call(self, cmd, **args):
1967 def call(self, cmd, **args):
1954 r = self.do_cmd(cmd, **args)
1968 r = self.do_cmd(cmd, **args)
1955 l = r.readline()
1969 l = r.readline()
1956 self.readerr()
1970 self.readerr()
1957 try:
1971 try:
1958 l = int(l)
1972 l = int(l)
1959 except:
1973 except:
1960 raise RepoError("unexpected response '%s'" % l)
1974 raise RepoError("unexpected response '%s'" % l)
1961 return r.read(l)
1975 return r.read(l)
1962
1976
1963 def lock(self):
1977 def lock(self):
1964 self.call("lock")
1978 self.call("lock")
1965 return remotelock(self)
1979 return remotelock(self)
1966
1980
1967 def unlock(self):
1981 def unlock(self):
1968 self.call("unlock")
1982 self.call("unlock")
1969
1983
1970 def heads(self):
1984 def heads(self):
1971 d = self.call("heads")
1985 d = self.call("heads")
1972 try:
1986 try:
1973 return map(bin, d[:-1].split(" "))
1987 return map(bin, d[:-1].split(" "))
1974 except:
1988 except:
1975 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1989 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1976
1990
1977 def branches(self, nodes):
1991 def branches(self, nodes):
1978 n = " ".join(map(hex, nodes))
1992 n = " ".join(map(hex, nodes))
1979 d = self.call("branches", nodes=n)
1993 d = self.call("branches", nodes=n)
1980 try:
1994 try:
1981 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1995 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1982 return br
1996 return br
1983 except:
1997 except:
1984 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1998 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1985
1999
1986 def between(self, pairs):
2000 def between(self, pairs):
1987 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2001 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1988 d = self.call("between", pairs=n)
2002 d = self.call("between", pairs=n)
1989 try:
2003 try:
1990 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2004 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1991 return p
2005 return p
1992 except:
2006 except:
1993 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2007 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1994
2008
1995 def changegroup(self, nodes):
2009 def changegroup(self, nodes):
1996 n = " ".join(map(hex, nodes))
2010 n = " ".join(map(hex, nodes))
1997 f = self.do_cmd("changegroup", roots=n)
2011 f = self.do_cmd("changegroup", roots=n)
1998 return self.pipei
2012 return self.pipei
1999
2013
2000 def addchangegroup(self, cg):
2014 def addchangegroup(self, cg):
2001 d = self.call("addchangegroup")
2015 d = self.call("addchangegroup")
2002 if d:
2016 if d:
2003 raise RepoError("push refused: %s", d)
2017 raise RepoError("push refused: %s", d)
2004
2018
2005 while 1:
2019 while 1:
2006 d = cg.read(4096)
2020 d = cg.read(4096)
2007 if not d: break
2021 if not d: break
2008 self.pipeo.write(d)
2022 self.pipeo.write(d)
2009 self.readerr()
2023 self.readerr()
2010
2024
2011 self.pipeo.flush()
2025 self.pipeo.flush()
2012
2026
2013 self.readerr()
2027 self.readerr()
2014 l = int(self.pipei.readline())
2028 l = int(self.pipei.readline())
2015 return self.pipei.read(l) != ""
2029 return self.pipei.read(l) != ""
2016
2030
2017 def repository(ui, path=None, create=0):
2031 def repository(ui, path=None, create=0):
2018 if path:
2032 if path:
2019 if path.startswith("http://"):
2033 if path.startswith("http://"):
2020 return httprepository(ui, path)
2034 return httprepository(ui, path)
2021 if path.startswith("hg://"):
2035 if path.startswith("hg://"):
2022 return httprepository(ui, path.replace("hg://", "http://"))
2036 return httprepository(ui, path.replace("hg://", "http://"))
2023 if path.startswith("old-http://"):
2037 if path.startswith("old-http://"):
2024 return localrepository(ui, path.replace("old-http://", "http://"))
2038 return localrepository(ui, path.replace("old-http://", "http://"))
2025 if path.startswith("ssh://"):
2039 if path.startswith("ssh://"):
2026 return sshrepository(ui, path)
2040 return sshrepository(ui, path)
2027
2041
2028 return localrepository(ui, path, create)
2042 return localrepository(ui, path, create)
@@ -1,47 +1,46 b''
1 + hg init
1 + hg init
2 + hg add file1 file2
2 + hg add file1 file2
3 + hg commit -m added file1 and file2 -d 0 0 -u user
3 + hg commit -m added file1 and file2 -d 0 0 -u user
4 + hg commit -m changed file1 -d 0 0 -u user
4 + hg commit -m changed file1 -d 0 0 -u user
5 + hg -q log
5 + hg -q log
6 1:3aa14bbc23d90e3f8b5b639b4a43d76509bae76c
6 1:3aa14bbc23d90e3f8b5b639b4a43d76509bae76c
7 0:8633637036c18f021d771208e16ae3508ab81d28
7 0:8633637036c18f021d771208e16ae3508ab81d28
8 + hg id
8 + hg id
9 3aa14bbc23d9 tip
9 3aa14bbc23d9 tip
10 + hg update -C 0
10 + hg update -C 0
11 + hg id
11 + hg id
12 8633637036c1
12 8633637036c1
13 + hg id
13 + hg id
14 8633637036c1+
14 8633637036c1+
15 + hg revert
15 + hg revert
16 + hg diff
16 + hg diff
17 + hg status
17 + hg status
18 + hg id
18 + hg id
19 8633637036c1
19 8633637036c1
20 + hg update
20 + hg update
21 + hg diff
21 + hg diff
22 + hg status
22 + hg status
23 + hg id
23 + hg id
24 3aa14bbc23d9 tip
24 3aa14bbc23d9 tip
25 + hg update -C 0
25 + hg update -C 0
26 + hg update
26 + hg update
27 merging file1
27 merging file1
28 + hg diff
28 + hg diff
29 FIXME: This is a known bug:
30 + hg status
29 + hg status
31 + hg id
30 + hg id
32 3aa14bbc23d9 tip
31 3aa14bbc23d9 tip
33 + hg revert
32 + hg revert
34 + hg diff
33 + hg diff
35 + hg status
34 + hg status
36 + hg id
35 + hg id
37 3aa14bbc23d9 tip
36 3aa14bbc23d9 tip
38 + hg revert -r tip
37 + hg revert -r tip
39 + hg diff
38 + hg diff
40 + hg status
39 + hg status
41 + hg id
40 + hg id
42 3aa14bbc23d9 tip
41 3aa14bbc23d9 tip
43 + hg update -C
42 + hg update -C
44 + hg diff
43 + hg diff
45 + hg status
44 + hg status
46 + hg id
45 + hg id
47 3aa14bbc23d9 tip
46 3aa14bbc23d9 tip
@@ -1,60 +1,59 b''
1 + hg init
1 + hg init
2 + hg add file1 file2
2 + hg add file1 file2
3 + hg commit -m added file1 and file2 -d 0 0 -u user
3 + hg commit -m added file1 and file2 -d 0 0 -u user
4 + hg commit -m changed file1 -d 0 0 -u user
4 + hg commit -m changed file1 -d 0 0 -u user
5 + hg -q log
5 + hg -q log
6 1:3aa14bbc23d90e3f8b5b639b4a43d76509bae76c
6 1:3aa14bbc23d90e3f8b5b639b4a43d76509bae76c
7 0:8633637036c18f021d771208e16ae3508ab81d28
7 0:8633637036c18f021d771208e16ae3508ab81d28
8 + hg id
8 + hg id
9 3aa14bbc23d9 tip
9 3aa14bbc23d9 tip
10 + hg update -C 0
10 + hg update -C 0
11 + hg id
11 + hg id
12 8633637036c1
12 8633637036c1
13 + hg id
13 + hg id
14 8633637036c1+
14 8633637036c1+
15 + hg revert
15 + hg revert
16 + hg diff
16 + hg diff
17 + hg status
17 + hg status
18 + hg id
18 + hg id
19 8633637036c1
19 8633637036c1
20 + hg update
20 + hg update
21 + hg diff
21 + hg diff
22 + hg status
22 + hg status
23 + hg id
23 + hg id
24 3aa14bbc23d9 tip
24 3aa14bbc23d9 tip
25 + hg update -C 0
25 + hg update -C 0
26 + hg update
26 + hg update
27 merge: warning: conflicts during merge
27 merge: warning: conflicts during merge
28 merging file1
28 merging file1
29 merging file1 failed!
29 merging file1 failed!
30 + hg diff
30 + hg diff
31 diff -r 3aa14bbc23d9 file1
31 diff -r 3aa14bbc23d9 file1
32 --- a/file1
32 --- a/file1
33 +++ b/file1
33 +++ b/file1
34 @@ -1,2 +1,6 @@
34 @@ -1,2 +1,6 @@
35 added file1
35 added file1
36 +<<<<<<<
36 +<<<<<<<
37 +changed file1 different
37 +changed file1 different
38 +=======
38 +=======
39 changed file1
39 changed file1
40 +>>>>>>>
40 +>>>>>>>
41 + hg status
41 + hg status
42 M file1
42 M file1
43 + hg id
43 + hg id
44 3aa14bbc23d9+ tip
44 3aa14bbc23d9+ tip
45 + hg revert
45 + hg revert
46 + hg diff
46 + hg diff
47 FIXME: This is a known bug:
48 + hg status
47 + hg status
49 + hg id
48 + hg id
50 3aa14bbc23d9 tip
49 3aa14bbc23d9 tip
51 + hg revert -r tip
50 + hg revert -r tip
52 + hg diff
51 + hg diff
53 + hg status
52 + hg status
54 + hg id
53 + hg id
55 3aa14bbc23d9 tip
54 3aa14bbc23d9 tip
56 + hg update -C
55 + hg update -C
57 + hg diff
56 + hg diff
58 + hg status
57 + hg status
59 + hg id
58 + hg id
60 3aa14bbc23d9 tip
59 3aa14bbc23d9 tip
General Comments 0
You need to be logged in to leave comments. Login now