##// END OF EJS Templates
Deal with repos with missing timezones
mpm@selenic.com -
r1013:2e8b8da9 default
parent child Browse files
Show More
@@ -1,2277 +1,2279 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 if " " not in date:
276 date += " 0" # some tools used -d without a timezone
275 files = l[3:]
277 files = l[3:]
276 return (manifest, user, date, files, desc)
278 return (manifest, user, date, files, desc)
277
279
278 def read(self, node):
280 def read(self, node):
279 return self.extract(self.revision(node))
281 return self.extract(self.revision(node))
280
282
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
283 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
284 user=None, date=None):
283 if not date:
285 if not date:
284 if time.daylight: offset = time.altzone
286 if time.daylight: offset = time.altzone
285 else: offset = time.timezone
287 else: offset = time.timezone
286 date = "%d %d" % (time.time(), offset)
288 date = "%d %d" % (time.time(), offset)
287 list.sort()
289 list.sort()
288 l = [hex(manifest), user, date] + list + ["", desc]
290 l = [hex(manifest), user, date] + list + ["", desc]
289 text = "\n".join(l)
291 text = "\n".join(l)
290 return self.addrevision(text, transaction, self.count(), p1, p2)
292 return self.addrevision(text, transaction, self.count(), p1, p2)
291
293
292 class dirstate:
294 class dirstate:
293 def __init__(self, opener, ui, root):
295 def __init__(self, opener, ui, root):
294 self.opener = opener
296 self.opener = opener
295 self.root = root
297 self.root = root
296 self.dirty = 0
298 self.dirty = 0
297 self.ui = ui
299 self.ui = ui
298 self.map = None
300 self.map = None
299 self.pl = None
301 self.pl = None
300 self.copies = {}
302 self.copies = {}
301 self.ignorefunc = None
303 self.ignorefunc = None
302
304
303 def wjoin(self, f):
305 def wjoin(self, f):
304 return os.path.join(self.root, f)
306 return os.path.join(self.root, f)
305
307
306 def getcwd(self):
308 def getcwd(self):
307 cwd = os.getcwd()
309 cwd = os.getcwd()
308 if cwd == self.root: return ''
310 if cwd == self.root: return ''
309 return cwd[len(self.root) + 1:]
311 return cwd[len(self.root) + 1:]
310
312
311 def ignore(self, f):
313 def ignore(self, f):
312 if not self.ignorefunc:
314 if not self.ignorefunc:
313 bigpat = []
315 bigpat = []
314 try:
316 try:
315 l = file(self.wjoin(".hgignore"))
317 l = file(self.wjoin(".hgignore"))
316 for pat in l:
318 for pat in l:
317 p = pat.rstrip()
319 p = pat.rstrip()
318 if p:
320 if p:
319 try:
321 try:
320 re.compile(p)
322 re.compile(p)
321 except:
323 except:
322 self.ui.warn("ignoring invalid ignore"
324 self.ui.warn("ignoring invalid ignore"
323 + " regular expression '%s'\n" % p)
325 + " regular expression '%s'\n" % p)
324 else:
326 else:
325 bigpat.append(p)
327 bigpat.append(p)
326 except IOError: pass
328 except IOError: pass
327
329
328 if bigpat:
330 if bigpat:
329 s = "(?:%s)" % (")|(?:".join(bigpat))
331 s = "(?:%s)" % (")|(?:".join(bigpat))
330 r = re.compile(s)
332 r = re.compile(s)
331 self.ignorefunc = r.search
333 self.ignorefunc = r.search
332 else:
334 else:
333 self.ignorefunc = util.never
335 self.ignorefunc = util.never
334
336
335 return self.ignorefunc(f)
337 return self.ignorefunc(f)
336
338
337 def __del__(self):
339 def __del__(self):
338 if self.dirty:
340 if self.dirty:
339 self.write()
341 self.write()
340
342
341 def __getitem__(self, key):
343 def __getitem__(self, key):
342 try:
344 try:
343 return self.map[key]
345 return self.map[key]
344 except TypeError:
346 except TypeError:
345 self.read()
347 self.read()
346 return self[key]
348 return self[key]
347
349
348 def __contains__(self, key):
350 def __contains__(self, key):
349 if not self.map: self.read()
351 if not self.map: self.read()
350 return key in self.map
352 return key in self.map
351
353
352 def parents(self):
354 def parents(self):
353 if not self.pl:
355 if not self.pl:
354 self.read()
356 self.read()
355 return self.pl
357 return self.pl
356
358
357 def markdirty(self):
359 def markdirty(self):
358 if not self.dirty:
360 if not self.dirty:
359 self.dirty = 1
361 self.dirty = 1
360
362
361 def setparents(self, p1, p2 = nullid):
363 def setparents(self, p1, p2 = nullid):
362 self.markdirty()
364 self.markdirty()
363 self.pl = p1, p2
365 self.pl = p1, p2
364
366
365 def state(self, key):
367 def state(self, key):
366 try:
368 try:
367 return self[key][0]
369 return self[key][0]
368 except KeyError:
370 except KeyError:
369 return "?"
371 return "?"
370
372
371 def read(self):
373 def read(self):
372 if self.map is not None: return self.map
374 if self.map is not None: return self.map
373
375
374 self.map = {}
376 self.map = {}
375 self.pl = [nullid, nullid]
377 self.pl = [nullid, nullid]
376 try:
378 try:
377 st = self.opener("dirstate").read()
379 st = self.opener("dirstate").read()
378 if not st: return
380 if not st: return
379 except: return
381 except: return
380
382
381 self.pl = [st[:20], st[20: 40]]
383 self.pl = [st[:20], st[20: 40]]
382
384
383 pos = 40
385 pos = 40
384 while pos < len(st):
386 while pos < len(st):
385 e = struct.unpack(">cllll", st[pos:pos+17])
387 e = struct.unpack(">cllll", st[pos:pos+17])
386 l = e[4]
388 l = e[4]
387 pos += 17
389 pos += 17
388 f = st[pos:pos + l]
390 f = st[pos:pos + l]
389 if '\0' in f:
391 if '\0' in f:
390 f, c = f.split('\0')
392 f, c = f.split('\0')
391 self.copies[f] = c
393 self.copies[f] = c
392 self.map[f] = e[:4]
394 self.map[f] = e[:4]
393 pos += l
395 pos += l
394
396
395 def copy(self, source, dest):
397 def copy(self, source, dest):
396 self.read()
398 self.read()
397 self.markdirty()
399 self.markdirty()
398 self.copies[dest] = source
400 self.copies[dest] = source
399
401
400 def copied(self, file):
402 def copied(self, file):
401 return self.copies.get(file, None)
403 return self.copies.get(file, None)
402
404
403 def update(self, files, state, **kw):
405 def update(self, files, state, **kw):
404 ''' current states:
406 ''' current states:
405 n normal
407 n normal
406 m needs merging
408 m needs merging
407 r marked for removal
409 r marked for removal
408 a marked for addition'''
410 a marked for addition'''
409
411
410 if not files: return
412 if not files: return
411 self.read()
413 self.read()
412 self.markdirty()
414 self.markdirty()
413 for f in files:
415 for f in files:
414 if state == "r":
416 if state == "r":
415 self.map[f] = ('r', 0, 0, 0)
417 self.map[f] = ('r', 0, 0, 0)
416 else:
418 else:
417 s = os.stat(os.path.join(self.root, f))
419 s = os.stat(os.path.join(self.root, f))
418 st_size = kw.get('st_size', s.st_size)
420 st_size = kw.get('st_size', s.st_size)
419 st_mtime = kw.get('st_mtime', s.st_mtime)
421 st_mtime = kw.get('st_mtime', s.st_mtime)
420 self.map[f] = (state, s.st_mode, st_size, st_mtime)
422 self.map[f] = (state, s.st_mode, st_size, st_mtime)
421
423
422 def forget(self, files):
424 def forget(self, files):
423 if not files: return
425 if not files: return
424 self.read()
426 self.read()
425 self.markdirty()
427 self.markdirty()
426 for f in files:
428 for f in files:
427 try:
429 try:
428 del self.map[f]
430 del self.map[f]
429 except KeyError:
431 except KeyError:
430 self.ui.warn("not in dirstate: %s!\n" % f)
432 self.ui.warn("not in dirstate: %s!\n" % f)
431 pass
433 pass
432
434
433 def clear(self):
435 def clear(self):
434 self.map = {}
436 self.map = {}
435 self.markdirty()
437 self.markdirty()
436
438
437 def write(self):
439 def write(self):
438 st = self.opener("dirstate", "w")
440 st = self.opener("dirstate", "w")
439 st.write("".join(self.pl))
441 st.write("".join(self.pl))
440 for f, e in self.map.items():
442 for f, e in self.map.items():
441 c = self.copied(f)
443 c = self.copied(f)
442 if c:
444 if c:
443 f = f + "\0" + c
445 f = f + "\0" + c
444 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
446 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
445 st.write(e + f)
447 st.write(e + f)
446 self.dirty = 0
448 self.dirty = 0
447
449
448 def filterfiles(self, files):
450 def filterfiles(self, files):
449 ret = {}
451 ret = {}
450 unknown = []
452 unknown = []
451
453
452 for x in files:
454 for x in files:
453 if x is '.':
455 if x is '.':
454 return self.map.copy()
456 return self.map.copy()
455 if x not in self.map:
457 if x not in self.map:
456 unknown.append(x)
458 unknown.append(x)
457 else:
459 else:
458 ret[x] = self.map[x]
460 ret[x] = self.map[x]
459
461
460 if not unknown:
462 if not unknown:
461 return ret
463 return ret
462
464
463 b = self.map.keys()
465 b = self.map.keys()
464 b.sort()
466 b.sort()
465 blen = len(b)
467 blen = len(b)
466
468
467 for x in unknown:
469 for x in unknown:
468 bs = bisect.bisect(b, x)
470 bs = bisect.bisect(b, x)
469 if bs != 0 and b[bs-1] == x:
471 if bs != 0 and b[bs-1] == x:
470 ret[x] = self.map[x]
472 ret[x] = self.map[x]
471 continue
473 continue
472 while bs < blen:
474 while bs < blen:
473 s = b[bs]
475 s = b[bs]
474 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
476 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
475 ret[s] = self.map[s]
477 ret[s] = self.map[s]
476 else:
478 else:
477 break
479 break
478 bs += 1
480 bs += 1
479 return ret
481 return ret
480
482
481 def walk(self, files = None, match = util.always, dc=None):
483 def walk(self, files = None, match = util.always, dc=None):
482 self.read()
484 self.read()
483
485
484 # walk all files by default
486 # walk all files by default
485 if not files:
487 if not files:
486 files = [self.root]
488 files = [self.root]
487 if not dc:
489 if not dc:
488 dc = self.map.copy()
490 dc = self.map.copy()
489 elif not dc:
491 elif not dc:
490 dc = self.filterfiles(files)
492 dc = self.filterfiles(files)
491
493
492 known = {'.hg': 1}
494 known = {'.hg': 1}
493 def seen(fn):
495 def seen(fn):
494 if fn in known: return True
496 if fn in known: return True
495 known[fn] = 1
497 known[fn] = 1
496 def traverse():
498 def traverse():
497 for ff in util.unique(files):
499 for ff in util.unique(files):
498 f = os.path.join(self.root, ff)
500 f = os.path.join(self.root, ff)
499 try:
501 try:
500 st = os.stat(f)
502 st = os.stat(f)
501 except OSError, inst:
503 except OSError, inst:
502 if ff not in dc: self.ui.warn('%s: %s\n' % (
504 if ff not in dc: self.ui.warn('%s: %s\n' % (
503 util.pathto(self.getcwd(), ff),
505 util.pathto(self.getcwd(), ff),
504 inst.strerror))
506 inst.strerror))
505 continue
507 continue
506 if stat.S_ISDIR(st.st_mode):
508 if stat.S_ISDIR(st.st_mode):
507 for dir, subdirs, fl in os.walk(f):
509 for dir, subdirs, fl in os.walk(f):
508 d = dir[len(self.root) + 1:]
510 d = dir[len(self.root) + 1:]
509 nd = util.normpath(d)
511 nd = util.normpath(d)
510 if nd == '.': nd = ''
512 if nd == '.': nd = ''
511 if seen(nd):
513 if seen(nd):
512 subdirs[:] = []
514 subdirs[:] = []
513 continue
515 continue
514 for sd in subdirs:
516 for sd in subdirs:
515 ds = os.path.join(nd, sd +'/')
517 ds = os.path.join(nd, sd +'/')
516 if self.ignore(ds) or not match(ds):
518 if self.ignore(ds) or not match(ds):
517 subdirs.remove(sd)
519 subdirs.remove(sd)
518 subdirs.sort()
520 subdirs.sort()
519 fl.sort()
521 fl.sort()
520 for fn in fl:
522 for fn in fl:
521 fn = util.pconvert(os.path.join(d, fn))
523 fn = util.pconvert(os.path.join(d, fn))
522 yield 'f', fn
524 yield 'f', fn
523 elif stat.S_ISREG(st.st_mode):
525 elif stat.S_ISREG(st.st_mode):
524 yield 'f', ff
526 yield 'f', ff
525 else:
527 else:
526 kind = 'unknown'
528 kind = 'unknown'
527 if stat.S_ISCHR(st.st_mode): kind = 'character device'
529 if stat.S_ISCHR(st.st_mode): kind = 'character device'
528 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
530 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
529 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
531 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
530 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
532 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
531 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
533 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
532 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
534 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
533 util.pathto(self.getcwd(), ff),
535 util.pathto(self.getcwd(), ff),
534 kind))
536 kind))
535
537
536 ks = dc.keys()
538 ks = dc.keys()
537 ks.sort()
539 ks.sort()
538 for k in ks:
540 for k in ks:
539 yield 'm', k
541 yield 'm', k
540
542
541 # yield only files that match: all in dirstate, others only if
543 # yield only files that match: all in dirstate, others only if
542 # not in .hgignore
544 # not in .hgignore
543
545
544 for src, fn in util.unique(traverse()):
546 for src, fn in util.unique(traverse()):
545 fn = util.normpath(fn)
547 fn = util.normpath(fn)
546 if seen(fn): continue
548 if seen(fn): continue
547 if fn not in dc and self.ignore(fn):
549 if fn not in dc and self.ignore(fn):
548 continue
550 continue
549 if match(fn):
551 if match(fn):
550 yield src, fn
552 yield src, fn
551
553
552 def changes(self, files=None, match=util.always):
554 def changes(self, files=None, match=util.always):
553 self.read()
555 self.read()
554 if not files:
556 if not files:
555 dc = self.map.copy()
557 dc = self.map.copy()
556 else:
558 else:
557 dc = self.filterfiles(files)
559 dc = self.filterfiles(files)
558 lookup, modified, added, unknown = [], [], [], []
560 lookup, modified, added, unknown = [], [], [], []
559 removed, deleted = [], []
561 removed, deleted = [], []
560
562
561 for src, fn in self.walk(files, match, dc=dc):
563 for src, fn in self.walk(files, match, dc=dc):
562 try:
564 try:
563 s = os.stat(os.path.join(self.root, fn))
565 s = os.stat(os.path.join(self.root, fn))
564 except OSError:
566 except OSError:
565 continue
567 continue
566 if not stat.S_ISREG(s.st_mode):
568 if not stat.S_ISREG(s.st_mode):
567 continue
569 continue
568 c = dc.get(fn)
570 c = dc.get(fn)
569 if c:
571 if c:
570 del dc[fn]
572 del dc[fn]
571 if c[0] == 'm':
573 if c[0] == 'm':
572 modified.append(fn)
574 modified.append(fn)
573 elif c[0] == 'a':
575 elif c[0] == 'a':
574 added.append(fn)
576 added.append(fn)
575 elif c[0] == 'r':
577 elif c[0] == 'r':
576 unknown.append(fn)
578 unknown.append(fn)
577 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
579 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
578 modified.append(fn)
580 modified.append(fn)
579 elif c[3] != s.st_mtime:
581 elif c[3] != s.st_mtime:
580 lookup.append(fn)
582 lookup.append(fn)
581 else:
583 else:
582 unknown.append(fn)
584 unknown.append(fn)
583
585
584 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
586 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
585 if c[0] == 'r':
587 if c[0] == 'r':
586 removed.append(fn)
588 removed.append(fn)
587 else:
589 else:
588 deleted.append(fn)
590 deleted.append(fn)
589 return (lookup, modified, added, removed + deleted, unknown)
591 return (lookup, modified, added, removed + deleted, unknown)
590
592
591 # used to avoid circular references so destructors work
593 # used to avoid circular references so destructors work
592 def opener(base):
594 def opener(base):
593 p = base
595 p = base
594 def o(path, mode="r"):
596 def o(path, mode="r"):
595 if p.startswith("http://"):
597 if p.startswith("http://"):
596 f = os.path.join(p, urllib.quote(path))
598 f = os.path.join(p, urllib.quote(path))
597 return httprangereader.httprangereader(f)
599 return httprangereader.httprangereader(f)
598
600
599 f = os.path.join(p, path)
601 f = os.path.join(p, path)
600
602
601 mode += "b" # for that other OS
603 mode += "b" # for that other OS
602
604
603 if mode[0] != "r":
605 if mode[0] != "r":
604 try:
606 try:
605 s = os.stat(f)
607 s = os.stat(f)
606 except OSError:
608 except OSError:
607 d = os.path.dirname(f)
609 d = os.path.dirname(f)
608 if not os.path.isdir(d):
610 if not os.path.isdir(d):
609 os.makedirs(d)
611 os.makedirs(d)
610 else:
612 else:
611 if s.st_nlink > 1:
613 if s.st_nlink > 1:
612 file(f + ".tmp", "wb").write(file(f, "rb").read())
614 file(f + ".tmp", "wb").write(file(f, "rb").read())
613 util.rename(f+".tmp", f)
615 util.rename(f+".tmp", f)
614
616
615 return file(f, mode)
617 return file(f, mode)
616
618
617 return o
619 return o
618
620
619 class RepoError(Exception): pass
621 class RepoError(Exception): pass
620
622
621 class localrepository:
623 class localrepository:
622 def __init__(self, ui, path=None, create=0):
624 def __init__(self, ui, path=None, create=0):
623 self.remote = 0
625 self.remote = 0
624 if path and path.startswith("http://"):
626 if path and path.startswith("http://"):
625 self.remote = 1
627 self.remote = 1
626 self.path = path
628 self.path = path
627 else:
629 else:
628 if not path:
630 if not path:
629 p = os.getcwd()
631 p = os.getcwd()
630 while not os.path.isdir(os.path.join(p, ".hg")):
632 while not os.path.isdir(os.path.join(p, ".hg")):
631 oldp = p
633 oldp = p
632 p = os.path.dirname(p)
634 p = os.path.dirname(p)
633 if p == oldp: raise RepoError("no repo found")
635 if p == oldp: raise RepoError("no repo found")
634 path = p
636 path = p
635 self.path = os.path.join(path, ".hg")
637 self.path = os.path.join(path, ".hg")
636
638
637 if not create and not os.path.isdir(self.path):
639 if not create and not os.path.isdir(self.path):
638 raise RepoError("repository %s not found" % self.path)
640 raise RepoError("repository %s not found" % self.path)
639
641
640 self.root = os.path.abspath(path)
642 self.root = os.path.abspath(path)
641 self.ui = ui
643 self.ui = ui
642
644
643 if create:
645 if create:
644 os.mkdir(self.path)
646 os.mkdir(self.path)
645 os.mkdir(self.join("data"))
647 os.mkdir(self.join("data"))
646
648
647 self.opener = opener(self.path)
649 self.opener = opener(self.path)
648 self.wopener = opener(self.root)
650 self.wopener = opener(self.root)
649 self.manifest = manifest(self.opener)
651 self.manifest = manifest(self.opener)
650 self.changelog = changelog(self.opener)
652 self.changelog = changelog(self.opener)
651 self.tagscache = None
653 self.tagscache = None
652 self.nodetagscache = None
654 self.nodetagscache = None
653
655
654 if not self.remote:
656 if not self.remote:
655 self.dirstate = dirstate(self.opener, ui, self.root)
657 self.dirstate = dirstate(self.opener, ui, self.root)
656 try:
658 try:
657 self.ui.readconfig(self.opener("hgrc"))
659 self.ui.readconfig(self.opener("hgrc"))
658 except IOError: pass
660 except IOError: pass
659
661
660 def hook(self, name, **args):
662 def hook(self, name, **args):
661 s = self.ui.config("hooks", name)
663 s = self.ui.config("hooks", name)
662 if s:
664 if s:
663 self.ui.note("running hook %s: %s\n" % (name, s))
665 self.ui.note("running hook %s: %s\n" % (name, s))
664 old = {}
666 old = {}
665 for k, v in args.items():
667 for k, v in args.items():
666 k = k.upper()
668 k = k.upper()
667 old[k] = os.environ.get(k, None)
669 old[k] = os.environ.get(k, None)
668 os.environ[k] = v
670 os.environ[k] = v
669
671
670 r = os.system(s)
672 r = os.system(s)
671
673
672 for k, v in old.items():
674 for k, v in old.items():
673 if v != None:
675 if v != None:
674 os.environ[k] = v
676 os.environ[k] = v
675 else:
677 else:
676 del os.environ[k]
678 del os.environ[k]
677
679
678 if r:
680 if r:
679 self.ui.warn("abort: %s hook failed with status %d!\n" %
681 self.ui.warn("abort: %s hook failed with status %d!\n" %
680 (name, r))
682 (name, r))
681 return False
683 return False
682 return True
684 return True
683
685
684 def tags(self):
686 def tags(self):
685 '''return a mapping of tag to node'''
687 '''return a mapping of tag to node'''
686 if not self.tagscache:
688 if not self.tagscache:
687 self.tagscache = {}
689 self.tagscache = {}
688 def addtag(self, k, n):
690 def addtag(self, k, n):
689 try:
691 try:
690 bin_n = bin(n)
692 bin_n = bin(n)
691 except TypeError:
693 except TypeError:
692 bin_n = ''
694 bin_n = ''
693 self.tagscache[k.strip()] = bin_n
695 self.tagscache[k.strip()] = bin_n
694
696
695 try:
697 try:
696 # read each head of the tags file, ending with the tip
698 # read each head of the tags file, ending with the tip
697 # and add each tag found to the map, with "newer" ones
699 # and add each tag found to the map, with "newer" ones
698 # taking precedence
700 # taking precedence
699 fl = self.file(".hgtags")
701 fl = self.file(".hgtags")
700 h = fl.heads()
702 h = fl.heads()
701 h.reverse()
703 h.reverse()
702 for r in h:
704 for r in h:
703 for l in fl.read(r).splitlines():
705 for l in fl.read(r).splitlines():
704 if l:
706 if l:
705 n, k = l.split(" ", 1)
707 n, k = l.split(" ", 1)
706 addtag(self, k, n)
708 addtag(self, k, n)
707 except KeyError:
709 except KeyError:
708 pass
710 pass
709
711
710 try:
712 try:
711 f = self.opener("localtags")
713 f = self.opener("localtags")
712 for l in f:
714 for l in f:
713 n, k = l.split(" ", 1)
715 n, k = l.split(" ", 1)
714 addtag(self, k, n)
716 addtag(self, k, n)
715 except IOError:
717 except IOError:
716 pass
718 pass
717
719
718 self.tagscache['tip'] = self.changelog.tip()
720 self.tagscache['tip'] = self.changelog.tip()
719
721
720 return self.tagscache
722 return self.tagscache
721
723
722 def tagslist(self):
724 def tagslist(self):
723 '''return a list of tags ordered by revision'''
725 '''return a list of tags ordered by revision'''
724 l = []
726 l = []
725 for t, n in self.tags().items():
727 for t, n in self.tags().items():
726 try:
728 try:
727 r = self.changelog.rev(n)
729 r = self.changelog.rev(n)
728 except:
730 except:
729 r = -2 # sort to the beginning of the list if unknown
731 r = -2 # sort to the beginning of the list if unknown
730 l.append((r,t,n))
732 l.append((r,t,n))
731 l.sort()
733 l.sort()
732 return [(t,n) for r,t,n in l]
734 return [(t,n) for r,t,n in l]
733
735
734 def nodetags(self, node):
736 def nodetags(self, node):
735 '''return the tags associated with a node'''
737 '''return the tags associated with a node'''
736 if not self.nodetagscache:
738 if not self.nodetagscache:
737 self.nodetagscache = {}
739 self.nodetagscache = {}
738 for t,n in self.tags().items():
740 for t,n in self.tags().items():
739 self.nodetagscache.setdefault(n,[]).append(t)
741 self.nodetagscache.setdefault(n,[]).append(t)
740 return self.nodetagscache.get(node, [])
742 return self.nodetagscache.get(node, [])
741
743
742 def lookup(self, key):
744 def lookup(self, key):
743 try:
745 try:
744 return self.tags()[key]
746 return self.tags()[key]
745 except KeyError:
747 except KeyError:
746 try:
748 try:
747 return self.changelog.lookup(key)
749 return self.changelog.lookup(key)
748 except:
750 except:
749 raise RepoError("unknown revision '%s'" % key)
751 raise RepoError("unknown revision '%s'" % key)
750
752
751 def dev(self):
753 def dev(self):
752 if self.remote: return -1
754 if self.remote: return -1
753 return os.stat(self.path).st_dev
755 return os.stat(self.path).st_dev
754
756
755 def local(self):
757 def local(self):
756 return not self.remote
758 return not self.remote
757
759
758 def join(self, f):
760 def join(self, f):
759 return os.path.join(self.path, f)
761 return os.path.join(self.path, f)
760
762
761 def wjoin(self, f):
763 def wjoin(self, f):
762 return os.path.join(self.root, f)
764 return os.path.join(self.root, f)
763
765
764 def file(self, f):
766 def file(self, f):
765 if f[0] == '/': f = f[1:]
767 if f[0] == '/': f = f[1:]
766 return filelog(self.opener, f)
768 return filelog(self.opener, f)
767
769
768 def getcwd(self):
770 def getcwd(self):
769 return self.dirstate.getcwd()
771 return self.dirstate.getcwd()
770
772
771 def wfile(self, f, mode='r'):
773 def wfile(self, f, mode='r'):
772 return self.wopener(f, mode)
774 return self.wopener(f, mode)
773
775
774 def transaction(self):
776 def transaction(self):
775 # save dirstate for undo
777 # save dirstate for undo
776 try:
778 try:
777 ds = self.opener("dirstate").read()
779 ds = self.opener("dirstate").read()
778 except IOError:
780 except IOError:
779 ds = ""
781 ds = ""
780 self.opener("journal.dirstate", "w").write(ds)
782 self.opener("journal.dirstate", "w").write(ds)
781
783
782 def after():
784 def after():
783 util.rename(self.join("journal"), self.join("undo"))
785 util.rename(self.join("journal"), self.join("undo"))
784 util.rename(self.join("journal.dirstate"),
786 util.rename(self.join("journal.dirstate"),
785 self.join("undo.dirstate"))
787 self.join("undo.dirstate"))
786
788
787 return transaction.transaction(self.ui.warn, self.opener,
789 return transaction.transaction(self.ui.warn, self.opener,
788 self.join("journal"), after)
790 self.join("journal"), after)
789
791
790 def recover(self):
792 def recover(self):
791 lock = self.lock()
793 lock = self.lock()
792 if os.path.exists(self.join("journal")):
794 if os.path.exists(self.join("journal")):
793 self.ui.status("rolling back interrupted transaction\n")
795 self.ui.status("rolling back interrupted transaction\n")
794 return transaction.rollback(self.opener, self.join("journal"))
796 return transaction.rollback(self.opener, self.join("journal"))
795 else:
797 else:
796 self.ui.warn("no interrupted transaction available\n")
798 self.ui.warn("no interrupted transaction available\n")
797
799
798 def undo(self):
800 def undo(self):
799 lock = self.lock()
801 lock = self.lock()
800 if os.path.exists(self.join("undo")):
802 if os.path.exists(self.join("undo")):
801 self.ui.status("rolling back last transaction\n")
803 self.ui.status("rolling back last transaction\n")
802 transaction.rollback(self.opener, self.join("undo"))
804 transaction.rollback(self.opener, self.join("undo"))
803 self.dirstate = None
805 self.dirstate = None
804 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
806 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
805 self.dirstate = dirstate(self.opener, self.ui, self.root)
807 self.dirstate = dirstate(self.opener, self.ui, self.root)
806 else:
808 else:
807 self.ui.warn("no undo information available\n")
809 self.ui.warn("no undo information available\n")
808
810
809 def lock(self, wait = 1):
811 def lock(self, wait = 1):
810 try:
812 try:
811 return lock.lock(self.join("lock"), 0)
813 return lock.lock(self.join("lock"), 0)
812 except lock.LockHeld, inst:
814 except lock.LockHeld, inst:
813 if wait:
815 if wait:
814 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
816 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
815 return lock.lock(self.join("lock"), wait)
817 return lock.lock(self.join("lock"), wait)
816 raise inst
818 raise inst
817
819
818 def rawcommit(self, files, text, user, date, p1=None, p2=None):
820 def rawcommit(self, files, text, user, date, p1=None, p2=None):
819 orig_parent = self.dirstate.parents()[0] or nullid
821 orig_parent = self.dirstate.parents()[0] or nullid
820 p1 = p1 or self.dirstate.parents()[0] or nullid
822 p1 = p1 or self.dirstate.parents()[0] or nullid
821 p2 = p2 or self.dirstate.parents()[1] or nullid
823 p2 = p2 or self.dirstate.parents()[1] or nullid
822 c1 = self.changelog.read(p1)
824 c1 = self.changelog.read(p1)
823 c2 = self.changelog.read(p2)
825 c2 = self.changelog.read(p2)
824 m1 = self.manifest.read(c1[0])
826 m1 = self.manifest.read(c1[0])
825 mf1 = self.manifest.readflags(c1[0])
827 mf1 = self.manifest.readflags(c1[0])
826 m2 = self.manifest.read(c2[0])
828 m2 = self.manifest.read(c2[0])
827 changed = []
829 changed = []
828
830
829 if orig_parent == p1:
831 if orig_parent == p1:
830 update_dirstate = 1
832 update_dirstate = 1
831 else:
833 else:
832 update_dirstate = 0
834 update_dirstate = 0
833
835
834 tr = self.transaction()
836 tr = self.transaction()
835 mm = m1.copy()
837 mm = m1.copy()
836 mfm = mf1.copy()
838 mfm = mf1.copy()
837 linkrev = self.changelog.count()
839 linkrev = self.changelog.count()
838 for f in files:
840 for f in files:
839 try:
841 try:
840 t = self.wfile(f).read()
842 t = self.wfile(f).read()
841 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
843 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
842 r = self.file(f)
844 r = self.file(f)
843 mfm[f] = tm
845 mfm[f] = tm
844
846
845 fp1 = m1.get(f, nullid)
847 fp1 = m1.get(f, nullid)
846 fp2 = m2.get(f, nullid)
848 fp2 = m2.get(f, nullid)
847
849
848 # is the same revision on two branches of a merge?
850 # is the same revision on two branches of a merge?
849 if fp2 == fp1:
851 if fp2 == fp1:
850 fp2 = nullid
852 fp2 = nullid
851
853
852 if fp2 != nullid:
854 if fp2 != nullid:
853 # is one parent an ancestor of the other?
855 # is one parent an ancestor of the other?
854 fpa = r.ancestor(fp1, fp2)
856 fpa = r.ancestor(fp1, fp2)
855 if fpa == fp1:
857 if fpa == fp1:
856 fp1, fp2 = fp2, nullid
858 fp1, fp2 = fp2, nullid
857 elif fpa == fp2:
859 elif fpa == fp2:
858 fp2 = nullid
860 fp2 = nullid
859
861
860 # is the file unmodified from the parent?
862 # is the file unmodified from the parent?
861 if t == r.read(fp1):
863 if t == r.read(fp1):
862 # record the proper existing parent in manifest
864 # record the proper existing parent in manifest
863 # no need to add a revision
865 # no need to add a revision
864 mm[f] = fp1
866 mm[f] = fp1
865 continue
867 continue
866
868
867 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
869 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
868 changed.append(f)
870 changed.append(f)
869 if update_dirstate:
871 if update_dirstate:
870 self.dirstate.update([f], "n")
872 self.dirstate.update([f], "n")
871 except IOError:
873 except IOError:
872 try:
874 try:
873 del mm[f]
875 del mm[f]
874 del mfm[f]
876 del mfm[f]
875 if update_dirstate:
877 if update_dirstate:
876 self.dirstate.forget([f])
878 self.dirstate.forget([f])
877 except:
879 except:
878 # deleted from p2?
880 # deleted from p2?
879 pass
881 pass
880
882
881 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
883 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
882 user = user or self.ui.username()
884 user = user or self.ui.username()
883 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
885 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
884 tr.close()
886 tr.close()
885 if update_dirstate:
887 if update_dirstate:
886 self.dirstate.setparents(n, nullid)
888 self.dirstate.setparents(n, nullid)
887
889
888 def commit(self, files = None, text = "", user = None, date = None,
890 def commit(self, files = None, text = "", user = None, date = None,
889 match = util.always, force=False):
891 match = util.always, force=False):
890 commit = []
892 commit = []
891 remove = []
893 remove = []
892 changed = []
894 changed = []
893
895
894 if files:
896 if files:
895 for f in files:
897 for f in files:
896 s = self.dirstate.state(f)
898 s = self.dirstate.state(f)
897 if s in 'nmai':
899 if s in 'nmai':
898 commit.append(f)
900 commit.append(f)
899 elif s == 'r':
901 elif s == 'r':
900 remove.append(f)
902 remove.append(f)
901 else:
903 else:
902 self.ui.warn("%s not tracked!\n" % f)
904 self.ui.warn("%s not tracked!\n" % f)
903 else:
905 else:
904 (c, a, d, u) = self.changes(match = match)
906 (c, a, d, u) = self.changes(match = match)
905 commit = c + a
907 commit = c + a
906 remove = d
908 remove = d
907
909
908 p1, p2 = self.dirstate.parents()
910 p1, p2 = self.dirstate.parents()
909 c1 = self.changelog.read(p1)
911 c1 = self.changelog.read(p1)
910 c2 = self.changelog.read(p2)
912 c2 = self.changelog.read(p2)
911 m1 = self.manifest.read(c1[0])
913 m1 = self.manifest.read(c1[0])
912 mf1 = self.manifest.readflags(c1[0])
914 mf1 = self.manifest.readflags(c1[0])
913 m2 = self.manifest.read(c2[0])
915 m2 = self.manifest.read(c2[0])
914
916
915 if not commit and not remove and not force and p2 == nullid:
917 if not commit and not remove and not force and p2 == nullid:
916 self.ui.status("nothing changed\n")
918 self.ui.status("nothing changed\n")
917 return None
919 return None
918
920
919 if not self.hook("precommit"):
921 if not self.hook("precommit"):
920 return None
922 return None
921
923
922 lock = self.lock()
924 lock = self.lock()
923 tr = self.transaction()
925 tr = self.transaction()
924
926
925 # check in files
927 # check in files
926 new = {}
928 new = {}
927 linkrev = self.changelog.count()
929 linkrev = self.changelog.count()
928 commit.sort()
930 commit.sort()
929 for f in commit:
931 for f in commit:
930 self.ui.note(f + "\n")
932 self.ui.note(f + "\n")
931 try:
933 try:
932 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
934 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
933 t = self.wfile(f).read()
935 t = self.wfile(f).read()
934 except IOError:
936 except IOError:
935 self.ui.warn("trouble committing %s!\n" % f)
937 self.ui.warn("trouble committing %s!\n" % f)
936 raise
938 raise
937
939
938 meta = {}
940 meta = {}
939 cp = self.dirstate.copied(f)
941 cp = self.dirstate.copied(f)
940 if cp:
942 if cp:
941 meta["copy"] = cp
943 meta["copy"] = cp
942 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
944 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
943 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
945 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
944
946
945 r = self.file(f)
947 r = self.file(f)
946 fp1 = m1.get(f, nullid)
948 fp1 = m1.get(f, nullid)
947 fp2 = m2.get(f, nullid)
949 fp2 = m2.get(f, nullid)
948
950
949 # is the same revision on two branches of a merge?
951 # is the same revision on two branches of a merge?
950 if fp2 == fp1:
952 if fp2 == fp1:
951 fp2 = nullid
953 fp2 = nullid
952
954
953 if fp2 != nullid:
955 if fp2 != nullid:
954 # is one parent an ancestor of the other?
956 # is one parent an ancestor of the other?
955 fpa = r.ancestor(fp1, fp2)
957 fpa = r.ancestor(fp1, fp2)
956 if fpa == fp1:
958 if fpa == fp1:
957 fp1, fp2 = fp2, nullid
959 fp1, fp2 = fp2, nullid
958 elif fpa == fp2:
960 elif fpa == fp2:
959 fp2 = nullid
961 fp2 = nullid
960
962
961 # is the file unmodified from the parent?
963 # is the file unmodified from the parent?
962 if not meta and t == r.read(fp1):
964 if not meta and t == r.read(fp1):
963 # record the proper existing parent in manifest
965 # record the proper existing parent in manifest
964 # no need to add a revision
966 # no need to add a revision
965 new[f] = fp1
967 new[f] = fp1
966 continue
968 continue
967
969
968 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
970 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
969 # remember what we've added so that we can later calculate
971 # remember what we've added so that we can later calculate
970 # the files to pull from a set of changesets
972 # the files to pull from a set of changesets
971 changed.append(f)
973 changed.append(f)
972
974
973 # update manifest
975 # update manifest
974 m1.update(new)
976 m1.update(new)
975 for f in remove:
977 for f in remove:
976 if f in m1:
978 if f in m1:
977 del m1[f]
979 del m1[f]
978 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
980 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
979 (new, remove))
981 (new, remove))
980
982
981 # add changeset
983 # add changeset
982 new = new.keys()
984 new = new.keys()
983 new.sort()
985 new.sort()
984
986
985 if not text:
987 if not text:
986 edittext = ""
988 edittext = ""
987 if p2 != nullid:
989 if p2 != nullid:
988 edittext += "HG: branch merge\n"
990 edittext += "HG: branch merge\n"
989 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
991 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
990 edittext += "".join(["HG: changed %s\n" % f for f in changed])
992 edittext += "".join(["HG: changed %s\n" % f for f in changed])
991 edittext += "".join(["HG: removed %s\n" % f for f in remove])
993 edittext += "".join(["HG: removed %s\n" % f for f in remove])
992 if not changed and not remove:
994 if not changed and not remove:
993 edittext += "HG: no files changed\n"
995 edittext += "HG: no files changed\n"
994 edittext = self.ui.edit(edittext)
996 edittext = self.ui.edit(edittext)
995 if not edittext.rstrip():
997 if not edittext.rstrip():
996 return None
998 return None
997 text = edittext
999 text = edittext
998
1000
999 user = user or self.ui.username()
1001 user = user or self.ui.username()
1000 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1002 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
1001 tr.close()
1003 tr.close()
1002
1004
1003 self.dirstate.setparents(n)
1005 self.dirstate.setparents(n)
1004 self.dirstate.update(new, "n")
1006 self.dirstate.update(new, "n")
1005 self.dirstate.forget(remove)
1007 self.dirstate.forget(remove)
1006
1008
1007 if not self.hook("commit", node=hex(n)):
1009 if not self.hook("commit", node=hex(n)):
1008 return None
1010 return None
1009 return n
1011 return n
1010
1012
1011 def walk(self, node = None, files = [], match = util.always):
1013 def walk(self, node = None, files = [], match = util.always):
1012 if node:
1014 if node:
1013 for fn in self.manifest.read(self.changelog.read(node)[0]):
1015 for fn in self.manifest.read(self.changelog.read(node)[0]):
1014 if match(fn): yield 'm', fn
1016 if match(fn): yield 'm', fn
1015 else:
1017 else:
1016 for src, fn in self.dirstate.walk(files, match):
1018 for src, fn in self.dirstate.walk(files, match):
1017 yield src, fn
1019 yield src, fn
1018
1020
1019 def changes(self, node1 = None, node2 = None, files = [],
1021 def changes(self, node1 = None, node2 = None, files = [],
1020 match = util.always):
1022 match = util.always):
1021 mf2, u = None, []
1023 mf2, u = None, []
1022
1024
1023 def fcmp(fn, mf):
1025 def fcmp(fn, mf):
1024 t1 = self.wfile(fn).read()
1026 t1 = self.wfile(fn).read()
1025 t2 = self.file(fn).read(mf.get(fn, nullid))
1027 t2 = self.file(fn).read(mf.get(fn, nullid))
1026 return cmp(t1, t2)
1028 return cmp(t1, t2)
1027
1029
1028 def mfmatches(node):
1030 def mfmatches(node):
1029 mf = dict(self.manifest.read(node))
1031 mf = dict(self.manifest.read(node))
1030 for fn in mf.keys():
1032 for fn in mf.keys():
1031 if not match(fn):
1033 if not match(fn):
1032 del mf[fn]
1034 del mf[fn]
1033 return mf
1035 return mf
1034
1036
1035 # are we comparing the working directory?
1037 # are we comparing the working directory?
1036 if not node2:
1038 if not node2:
1037 l, c, a, d, u = self.dirstate.changes(files, match)
1039 l, c, a, d, u = self.dirstate.changes(files, match)
1038
1040
1039 # are we comparing working dir against its parent?
1041 # are we comparing working dir against its parent?
1040 if not node1:
1042 if not node1:
1041 if l:
1043 if l:
1042 # do a full compare of any files that might have changed
1044 # do a full compare of any files that might have changed
1043 change = self.changelog.read(self.dirstate.parents()[0])
1045 change = self.changelog.read(self.dirstate.parents()[0])
1044 mf2 = mfmatches(change[0])
1046 mf2 = mfmatches(change[0])
1045 for f in l:
1047 for f in l:
1046 if fcmp(f, mf2):
1048 if fcmp(f, mf2):
1047 c.append(f)
1049 c.append(f)
1048
1050
1049 for l in c, a, d, u:
1051 for l in c, a, d, u:
1050 l.sort()
1052 l.sort()
1051
1053
1052 return (c, a, d, u)
1054 return (c, a, d, u)
1053
1055
1054 # are we comparing working dir against non-tip?
1056 # are we comparing working dir against non-tip?
1055 # generate a pseudo-manifest for the working dir
1057 # generate a pseudo-manifest for the working dir
1056 if not node2:
1058 if not node2:
1057 if not mf2:
1059 if not mf2:
1058 change = self.changelog.read(self.dirstate.parents()[0])
1060 change = self.changelog.read(self.dirstate.parents()[0])
1059 mf2 = mfmatches(change[0])
1061 mf2 = mfmatches(change[0])
1060 for f in a + c + l:
1062 for f in a + c + l:
1061 mf2[f] = ""
1063 mf2[f] = ""
1062 for f in d:
1064 for f in d:
1063 if f in mf2: del mf2[f]
1065 if f in mf2: del mf2[f]
1064 else:
1066 else:
1065 change = self.changelog.read(node2)
1067 change = self.changelog.read(node2)
1066 mf2 = mfmatches(change[0])
1068 mf2 = mfmatches(change[0])
1067
1069
1068 # flush lists from dirstate before comparing manifests
1070 # flush lists from dirstate before comparing manifests
1069 c, a = [], []
1071 c, a = [], []
1070
1072
1071 change = self.changelog.read(node1)
1073 change = self.changelog.read(node1)
1072 mf1 = mfmatches(change[0])
1074 mf1 = mfmatches(change[0])
1073
1075
1074 for fn in mf2:
1076 for fn in mf2:
1075 if mf1.has_key(fn):
1077 if mf1.has_key(fn):
1076 if mf1[fn] != mf2[fn]:
1078 if mf1[fn] != mf2[fn]:
1077 if mf2[fn] != "" or fcmp(fn, mf1):
1079 if mf2[fn] != "" or fcmp(fn, mf1):
1078 c.append(fn)
1080 c.append(fn)
1079 del mf1[fn]
1081 del mf1[fn]
1080 else:
1082 else:
1081 a.append(fn)
1083 a.append(fn)
1082
1084
1083 d = mf1.keys()
1085 d = mf1.keys()
1084
1086
1085 for l in c, a, d, u:
1087 for l in c, a, d, u:
1086 l.sort()
1088 l.sort()
1087
1089
1088 return (c, a, d, u)
1090 return (c, a, d, u)
1089
1091
1090 def add(self, list):
1092 def add(self, list):
1091 for f in list:
1093 for f in list:
1092 p = self.wjoin(f)
1094 p = self.wjoin(f)
1093 if not os.path.exists(p):
1095 if not os.path.exists(p):
1094 self.ui.warn("%s does not exist!\n" % f)
1096 self.ui.warn("%s does not exist!\n" % f)
1095 elif not os.path.isfile(p):
1097 elif not os.path.isfile(p):
1096 self.ui.warn("%s not added: only files supported currently\n" % f)
1098 self.ui.warn("%s not added: only files supported currently\n" % f)
1097 elif self.dirstate.state(f) in 'an':
1099 elif self.dirstate.state(f) in 'an':
1098 self.ui.warn("%s already tracked!\n" % f)
1100 self.ui.warn("%s already tracked!\n" % f)
1099 else:
1101 else:
1100 self.dirstate.update([f], "a")
1102 self.dirstate.update([f], "a")
1101
1103
1102 def forget(self, list):
1104 def forget(self, list):
1103 for f in list:
1105 for f in list:
1104 if self.dirstate.state(f) not in 'ai':
1106 if self.dirstate.state(f) not in 'ai':
1105 self.ui.warn("%s not added!\n" % f)
1107 self.ui.warn("%s not added!\n" % f)
1106 else:
1108 else:
1107 self.dirstate.forget([f])
1109 self.dirstate.forget([f])
1108
1110
1109 def remove(self, list):
1111 def remove(self, list):
1110 for f in list:
1112 for f in list:
1111 p = self.wjoin(f)
1113 p = self.wjoin(f)
1112 if os.path.exists(p):
1114 if os.path.exists(p):
1113 self.ui.warn("%s still exists!\n" % f)
1115 self.ui.warn("%s still exists!\n" % f)
1114 elif self.dirstate.state(f) == 'a':
1116 elif self.dirstate.state(f) == 'a':
1115 self.ui.warn("%s never committed!\n" % f)
1117 self.ui.warn("%s never committed!\n" % f)
1116 self.dirstate.forget([f])
1118 self.dirstate.forget([f])
1117 elif f not in self.dirstate:
1119 elif f not in self.dirstate:
1118 self.ui.warn("%s not tracked!\n" % f)
1120 self.ui.warn("%s not tracked!\n" % f)
1119 else:
1121 else:
1120 self.dirstate.update([f], "r")
1122 self.dirstate.update([f], "r")
1121
1123
1122 def copy(self, source, dest):
1124 def copy(self, source, dest):
1123 p = self.wjoin(dest)
1125 p = self.wjoin(dest)
1124 if not os.path.exists(p):
1126 if not os.path.exists(p):
1125 self.ui.warn("%s does not exist!\n" % dest)
1127 self.ui.warn("%s does not exist!\n" % dest)
1126 elif not os.path.isfile(p):
1128 elif not os.path.isfile(p):
1127 self.ui.warn("copy failed: %s is not a file\n" % dest)
1129 self.ui.warn("copy failed: %s is not a file\n" % dest)
1128 else:
1130 else:
1129 if self.dirstate.state(dest) == '?':
1131 if self.dirstate.state(dest) == '?':
1130 self.dirstate.update([dest], "a")
1132 self.dirstate.update([dest], "a")
1131 self.dirstate.copy(source, dest)
1133 self.dirstate.copy(source, dest)
1132
1134
1133 def heads(self):
1135 def heads(self):
1134 return self.changelog.heads()
1136 return self.changelog.heads()
1135
1137
1136 # branchlookup returns a dict giving a list of branches for
1138 # branchlookup returns a dict giving a list of branches for
1137 # each head. A branch is defined as the tag of a node or
1139 # each head. A branch is defined as the tag of a node or
1138 # the branch of the node's parents. If a node has multiple
1140 # the branch of the node's parents. If a node has multiple
1139 # branch tags, tags are eliminated if they are visible from other
1141 # branch tags, tags are eliminated if they are visible from other
1140 # branch tags.
1142 # branch tags.
1141 #
1143 #
1142 # So, for this graph: a->b->c->d->e
1144 # So, for this graph: a->b->c->d->e
1143 # \ /
1145 # \ /
1144 # aa -----/
1146 # aa -----/
1145 # a has tag 2.6.12
1147 # a has tag 2.6.12
1146 # d has tag 2.6.13
1148 # d has tag 2.6.13
1147 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1149 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1148 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1150 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1149 # from the list.
1151 # from the list.
1150 #
1152 #
1151 # It is possible that more than one head will have the same branch tag.
1153 # It is possible that more than one head will have the same branch tag.
1152 # callers need to check the result for multiple heads under the same
1154 # callers need to check the result for multiple heads under the same
1153 # branch tag if that is a problem for them (ie checkout of a specific
1155 # branch tag if that is a problem for them (ie checkout of a specific
1154 # branch).
1156 # branch).
1155 #
1157 #
1156 # passing in a specific branch will limit the depth of the search
1158 # passing in a specific branch will limit the depth of the search
1157 # through the parents. It won't limit the branches returned in the
1159 # through the parents. It won't limit the branches returned in the
1158 # result though.
1160 # result though.
1159 def branchlookup(self, heads=None, branch=None):
1161 def branchlookup(self, heads=None, branch=None):
1160 if not heads:
1162 if not heads:
1161 heads = self.heads()
1163 heads = self.heads()
1162 headt = [ h for h in heads ]
1164 headt = [ h for h in heads ]
1163 chlog = self.changelog
1165 chlog = self.changelog
1164 branches = {}
1166 branches = {}
1165 merges = []
1167 merges = []
1166 seenmerge = {}
1168 seenmerge = {}
1167
1169
1168 # traverse the tree once for each head, recording in the branches
1170 # traverse the tree once for each head, recording in the branches
1169 # dict which tags are visible from this head. The branches
1171 # dict which tags are visible from this head. The branches
1170 # dict also records which tags are visible from each tag
1172 # dict also records which tags are visible from each tag
1171 # while we traverse.
1173 # while we traverse.
1172 while headt or merges:
1174 while headt or merges:
1173 if merges:
1175 if merges:
1174 n, found = merges.pop()
1176 n, found = merges.pop()
1175 visit = [n]
1177 visit = [n]
1176 else:
1178 else:
1177 h = headt.pop()
1179 h = headt.pop()
1178 visit = [h]
1180 visit = [h]
1179 found = [h]
1181 found = [h]
1180 seen = {}
1182 seen = {}
1181 while visit:
1183 while visit:
1182 n = visit.pop()
1184 n = visit.pop()
1183 if n in seen:
1185 if n in seen:
1184 continue
1186 continue
1185 pp = chlog.parents(n)
1187 pp = chlog.parents(n)
1186 tags = self.nodetags(n)
1188 tags = self.nodetags(n)
1187 if tags:
1189 if tags:
1188 for x in tags:
1190 for x in tags:
1189 if x == 'tip':
1191 if x == 'tip':
1190 continue
1192 continue
1191 for f in found:
1193 for f in found:
1192 branches.setdefault(f, {})[n] = 1
1194 branches.setdefault(f, {})[n] = 1
1193 branches.setdefault(n, {})[n] = 1
1195 branches.setdefault(n, {})[n] = 1
1194 break
1196 break
1195 if n not in found:
1197 if n not in found:
1196 found.append(n)
1198 found.append(n)
1197 if branch in tags:
1199 if branch in tags:
1198 continue
1200 continue
1199 seen[n] = 1
1201 seen[n] = 1
1200 if pp[1] != nullid and n not in seenmerge:
1202 if pp[1] != nullid and n not in seenmerge:
1201 merges.append((pp[1], [x for x in found]))
1203 merges.append((pp[1], [x for x in found]))
1202 seenmerge[n] = 1
1204 seenmerge[n] = 1
1203 if pp[0] != nullid:
1205 if pp[0] != nullid:
1204 visit.append(pp[0])
1206 visit.append(pp[0])
1205 # traverse the branches dict, eliminating branch tags from each
1207 # traverse the branches dict, eliminating branch tags from each
1206 # head that are visible from another branch tag for that head.
1208 # head that are visible from another branch tag for that head.
1207 out = {}
1209 out = {}
1208 viscache = {}
1210 viscache = {}
1209 for h in heads:
1211 for h in heads:
1210 def visible(node):
1212 def visible(node):
1211 if node in viscache:
1213 if node in viscache:
1212 return viscache[node]
1214 return viscache[node]
1213 ret = {}
1215 ret = {}
1214 visit = [node]
1216 visit = [node]
1215 while visit:
1217 while visit:
1216 x = visit.pop()
1218 x = visit.pop()
1217 if x in viscache:
1219 if x in viscache:
1218 ret.update(viscache[x])
1220 ret.update(viscache[x])
1219 elif x not in ret:
1221 elif x not in ret:
1220 ret[x] = 1
1222 ret[x] = 1
1221 if x in branches:
1223 if x in branches:
1222 visit[len(visit):] = branches[x].keys()
1224 visit[len(visit):] = branches[x].keys()
1223 viscache[node] = ret
1225 viscache[node] = ret
1224 return ret
1226 return ret
1225 if h not in branches:
1227 if h not in branches:
1226 continue
1228 continue
1227 # O(n^2), but somewhat limited. This only searches the
1229 # O(n^2), but somewhat limited. This only searches the
1228 # tags visible from a specific head, not all the tags in the
1230 # tags visible from a specific head, not all the tags in the
1229 # whole repo.
1231 # whole repo.
1230 for b in branches[h]:
1232 for b in branches[h]:
1231 vis = False
1233 vis = False
1232 for bb in branches[h].keys():
1234 for bb in branches[h].keys():
1233 if b != bb:
1235 if b != bb:
1234 if b in visible(bb):
1236 if b in visible(bb):
1235 vis = True
1237 vis = True
1236 break
1238 break
1237 if not vis:
1239 if not vis:
1238 l = out.setdefault(h, [])
1240 l = out.setdefault(h, [])
1239 l[len(l):] = self.nodetags(b)
1241 l[len(l):] = self.nodetags(b)
1240 return out
1242 return out
1241
1243
1242 def branches(self, nodes):
1244 def branches(self, nodes):
1243 if not nodes: nodes = [self.changelog.tip()]
1245 if not nodes: nodes = [self.changelog.tip()]
1244 b = []
1246 b = []
1245 for n in nodes:
1247 for n in nodes:
1246 t = n
1248 t = n
1247 while n:
1249 while n:
1248 p = self.changelog.parents(n)
1250 p = self.changelog.parents(n)
1249 if p[1] != nullid or p[0] == nullid:
1251 if p[1] != nullid or p[0] == nullid:
1250 b.append((t, n, p[0], p[1]))
1252 b.append((t, n, p[0], p[1]))
1251 break
1253 break
1252 n = p[0]
1254 n = p[0]
1253 return b
1255 return b
1254
1256
1255 def between(self, pairs):
1257 def between(self, pairs):
1256 r = []
1258 r = []
1257
1259
1258 for top, bottom in pairs:
1260 for top, bottom in pairs:
1259 n, l, i = top, [], 0
1261 n, l, i = top, [], 0
1260 f = 1
1262 f = 1
1261
1263
1262 while n != bottom:
1264 while n != bottom:
1263 p = self.changelog.parents(n)[0]
1265 p = self.changelog.parents(n)[0]
1264 if i == f:
1266 if i == f:
1265 l.append(n)
1267 l.append(n)
1266 f = f * 2
1268 f = f * 2
1267 n = p
1269 n = p
1268 i += 1
1270 i += 1
1269
1271
1270 r.append(l)
1272 r.append(l)
1271
1273
1272 return r
1274 return r
1273
1275
1274 def newer(self, nodes):
1276 def newer(self, nodes):
1275 m = {}
1277 m = {}
1276 nl = []
1278 nl = []
1277 pm = {}
1279 pm = {}
1278 cl = self.changelog
1280 cl = self.changelog
1279 t = l = cl.count()
1281 t = l = cl.count()
1280
1282
1281 # find the lowest numbered node
1283 # find the lowest numbered node
1282 for n in nodes:
1284 for n in nodes:
1283 l = min(l, cl.rev(n))
1285 l = min(l, cl.rev(n))
1284 m[n] = 1
1286 m[n] = 1
1285
1287
1286 for i in xrange(l, t):
1288 for i in xrange(l, t):
1287 n = cl.node(i)
1289 n = cl.node(i)
1288 if n in m: # explicitly listed
1290 if n in m: # explicitly listed
1289 pm[n] = 1
1291 pm[n] = 1
1290 nl.append(n)
1292 nl.append(n)
1291 continue
1293 continue
1292 for p in cl.parents(n):
1294 for p in cl.parents(n):
1293 if p in pm: # parent listed
1295 if p in pm: # parent listed
1294 pm[n] = 1
1296 pm[n] = 1
1295 nl.append(n)
1297 nl.append(n)
1296 break
1298 break
1297
1299
1298 return nl
1300 return nl
1299
1301
1300 def findincoming(self, remote, base=None, heads=None):
1302 def findincoming(self, remote, base=None, heads=None):
1301 m = self.changelog.nodemap
1303 m = self.changelog.nodemap
1302 search = []
1304 search = []
1303 fetch = []
1305 fetch = []
1304 seen = {}
1306 seen = {}
1305 seenbranch = {}
1307 seenbranch = {}
1306 if base == None:
1308 if base == None:
1307 base = {}
1309 base = {}
1308
1310
1309 # assume we're closer to the tip than the root
1311 # assume we're closer to the tip than the root
1310 # and start by examining the heads
1312 # and start by examining the heads
1311 self.ui.status("searching for changes\n")
1313 self.ui.status("searching for changes\n")
1312
1314
1313 if not heads:
1315 if not heads:
1314 heads = remote.heads()
1316 heads = remote.heads()
1315
1317
1316 unknown = []
1318 unknown = []
1317 for h in heads:
1319 for h in heads:
1318 if h not in m:
1320 if h not in m:
1319 unknown.append(h)
1321 unknown.append(h)
1320 else:
1322 else:
1321 base[h] = 1
1323 base[h] = 1
1322
1324
1323 if not unknown:
1325 if not unknown:
1324 return None
1326 return None
1325
1327
1326 rep = {}
1328 rep = {}
1327 reqcnt = 0
1329 reqcnt = 0
1328
1330
1329 # search through remote branches
1331 # search through remote branches
1330 # a 'branch' here is a linear segment of history, with four parts:
1332 # a 'branch' here is a linear segment of history, with four parts:
1331 # head, root, first parent, second parent
1333 # head, root, first parent, second parent
1332 # (a branch always has two parents (or none) by definition)
1334 # (a branch always has two parents (or none) by definition)
1333 unknown = remote.branches(unknown)
1335 unknown = remote.branches(unknown)
1334 while unknown:
1336 while unknown:
1335 r = []
1337 r = []
1336 while unknown:
1338 while unknown:
1337 n = unknown.pop(0)
1339 n = unknown.pop(0)
1338 if n[0] in seen:
1340 if n[0] in seen:
1339 continue
1341 continue
1340
1342
1341 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1343 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1342 if n[0] == nullid:
1344 if n[0] == nullid:
1343 break
1345 break
1344 if n in seenbranch:
1346 if n in seenbranch:
1345 self.ui.debug("branch already found\n")
1347 self.ui.debug("branch already found\n")
1346 continue
1348 continue
1347 if n[1] and n[1] in m: # do we know the base?
1349 if n[1] and n[1] in m: # do we know the base?
1348 self.ui.debug("found incomplete branch %s:%s\n"
1350 self.ui.debug("found incomplete branch %s:%s\n"
1349 % (short(n[0]), short(n[1])))
1351 % (short(n[0]), short(n[1])))
1350 search.append(n) # schedule branch range for scanning
1352 search.append(n) # schedule branch range for scanning
1351 seenbranch[n] = 1
1353 seenbranch[n] = 1
1352 else:
1354 else:
1353 if n[1] not in seen and n[1] not in fetch:
1355 if n[1] not in seen and n[1] not in fetch:
1354 if n[2] in m and n[3] in m:
1356 if n[2] in m and n[3] in m:
1355 self.ui.debug("found new changeset %s\n" %
1357 self.ui.debug("found new changeset %s\n" %
1356 short(n[1]))
1358 short(n[1]))
1357 fetch.append(n[1]) # earliest unknown
1359 fetch.append(n[1]) # earliest unknown
1358 base[n[2]] = 1 # latest known
1360 base[n[2]] = 1 # latest known
1359 continue
1361 continue
1360
1362
1361 for a in n[2:4]:
1363 for a in n[2:4]:
1362 if a not in rep:
1364 if a not in rep:
1363 r.append(a)
1365 r.append(a)
1364 rep[a] = 1
1366 rep[a] = 1
1365
1367
1366 seen[n[0]] = 1
1368 seen[n[0]] = 1
1367
1369
1368 if r:
1370 if r:
1369 reqcnt += 1
1371 reqcnt += 1
1370 self.ui.debug("request %d: %s\n" %
1372 self.ui.debug("request %d: %s\n" %
1371 (reqcnt, " ".join(map(short, r))))
1373 (reqcnt, " ".join(map(short, r))))
1372 for p in range(0, len(r), 10):
1374 for p in range(0, len(r), 10):
1373 for b in remote.branches(r[p:p+10]):
1375 for b in remote.branches(r[p:p+10]):
1374 self.ui.debug("received %s:%s\n" %
1376 self.ui.debug("received %s:%s\n" %
1375 (short(b[0]), short(b[1])))
1377 (short(b[0]), short(b[1])))
1376 if b[0] not in m and b[0] not in seen:
1378 if b[0] not in m and b[0] not in seen:
1377 unknown.append(b)
1379 unknown.append(b)
1378
1380
1379 # do binary search on the branches we found
1381 # do binary search on the branches we found
1380 while search:
1382 while search:
1381 n = search.pop(0)
1383 n = search.pop(0)
1382 reqcnt += 1
1384 reqcnt += 1
1383 l = remote.between([(n[0], n[1])])[0]
1385 l = remote.between([(n[0], n[1])])[0]
1384 l.append(n[1])
1386 l.append(n[1])
1385 p = n[0]
1387 p = n[0]
1386 f = 1
1388 f = 1
1387 for i in l:
1389 for i in l:
1388 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1390 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1389 if i in m:
1391 if i in m:
1390 if f <= 2:
1392 if f <= 2:
1391 self.ui.debug("found new branch changeset %s\n" %
1393 self.ui.debug("found new branch changeset %s\n" %
1392 short(p))
1394 short(p))
1393 fetch.append(p)
1395 fetch.append(p)
1394 base[i] = 1
1396 base[i] = 1
1395 else:
1397 else:
1396 self.ui.debug("narrowed branch search to %s:%s\n"
1398 self.ui.debug("narrowed branch search to %s:%s\n"
1397 % (short(p), short(i)))
1399 % (short(p), short(i)))
1398 search.append((p, i))
1400 search.append((p, i))
1399 break
1401 break
1400 p, f = i, f * 2
1402 p, f = i, f * 2
1401
1403
1402 # sanity check our fetch list
1404 # sanity check our fetch list
1403 for f in fetch:
1405 for f in fetch:
1404 if f in m:
1406 if f in m:
1405 raise RepoError("already have changeset " + short(f[:4]))
1407 raise RepoError("already have changeset " + short(f[:4]))
1406
1408
1407 if base.keys() == [nullid]:
1409 if base.keys() == [nullid]:
1408 self.ui.warn("warning: pulling from an unrelated repository!\n")
1410 self.ui.warn("warning: pulling from an unrelated repository!\n")
1409
1411
1410 self.ui.note("adding new changesets starting at " +
1412 self.ui.note("adding new changesets starting at " +
1411 " ".join([short(f) for f in fetch]) + "\n")
1413 " ".join([short(f) for f in fetch]) + "\n")
1412
1414
1413 self.ui.debug("%d total queries\n" % reqcnt)
1415 self.ui.debug("%d total queries\n" % reqcnt)
1414
1416
1415 return fetch
1417 return fetch
1416
1418
1417 def findoutgoing(self, remote, base=None, heads=None):
1419 def findoutgoing(self, remote, base=None, heads=None):
1418 if base == None:
1420 if base == None:
1419 base = {}
1421 base = {}
1420 self.findincoming(remote, base, heads)
1422 self.findincoming(remote, base, heads)
1421
1423
1422 remain = dict.fromkeys(self.changelog.nodemap)
1424 remain = dict.fromkeys(self.changelog.nodemap)
1423
1425
1424 # prune everything remote has from the tree
1426 # prune everything remote has from the tree
1425 del remain[nullid]
1427 del remain[nullid]
1426 remove = base.keys()
1428 remove = base.keys()
1427 while remove:
1429 while remove:
1428 n = remove.pop(0)
1430 n = remove.pop(0)
1429 if n in remain:
1431 if n in remain:
1430 del remain[n]
1432 del remain[n]
1431 for p in self.changelog.parents(n):
1433 for p in self.changelog.parents(n):
1432 remove.append(p)
1434 remove.append(p)
1433
1435
1434 # find every node whose parents have been pruned
1436 # find every node whose parents have been pruned
1435 subset = []
1437 subset = []
1436 for n in remain:
1438 for n in remain:
1437 p1, p2 = self.changelog.parents(n)
1439 p1, p2 = self.changelog.parents(n)
1438 if p1 not in remain and p2 not in remain:
1440 if p1 not in remain and p2 not in remain:
1439 subset.append(n)
1441 subset.append(n)
1440
1442
1441 # this is the set of all roots we have to push
1443 # this is the set of all roots we have to push
1442 return subset
1444 return subset
1443
1445
1444 def pull(self, remote):
1446 def pull(self, remote):
1445 lock = self.lock()
1447 lock = self.lock()
1446
1448
1447 # if we have an empty repo, fetch everything
1449 # if we have an empty repo, fetch everything
1448 if self.changelog.tip() == nullid:
1450 if self.changelog.tip() == nullid:
1449 self.ui.status("requesting all changes\n")
1451 self.ui.status("requesting all changes\n")
1450 fetch = [nullid]
1452 fetch = [nullid]
1451 else:
1453 else:
1452 fetch = self.findincoming(remote)
1454 fetch = self.findincoming(remote)
1453
1455
1454 if not fetch:
1456 if not fetch:
1455 self.ui.status("no changes found\n")
1457 self.ui.status("no changes found\n")
1456 return 1
1458 return 1
1457
1459
1458 cg = remote.changegroup(fetch)
1460 cg = remote.changegroup(fetch)
1459 return self.addchangegroup(cg)
1461 return self.addchangegroup(cg)
1460
1462
1461 def push(self, remote, force=False):
1463 def push(self, remote, force=False):
1462 lock = remote.lock()
1464 lock = remote.lock()
1463
1465
1464 base = {}
1466 base = {}
1465 heads = remote.heads()
1467 heads = remote.heads()
1466 inc = self.findincoming(remote, base, heads)
1468 inc = self.findincoming(remote, base, heads)
1467 if not force and inc:
1469 if not force and inc:
1468 self.ui.warn("abort: unsynced remote changes!\n")
1470 self.ui.warn("abort: unsynced remote changes!\n")
1469 self.ui.status("(did you forget to sync? use push -f to force)\n")
1471 self.ui.status("(did you forget to sync? use push -f to force)\n")
1470 return 1
1472 return 1
1471
1473
1472 update = self.findoutgoing(remote, base)
1474 update = self.findoutgoing(remote, base)
1473 if not update:
1475 if not update:
1474 self.ui.status("no changes found\n")
1476 self.ui.status("no changes found\n")
1475 return 1
1477 return 1
1476 elif not force:
1478 elif not force:
1477 if len(heads) < len(self.changelog.heads()):
1479 if len(heads) < len(self.changelog.heads()):
1478 self.ui.warn("abort: push creates new remote branches!\n")
1480 self.ui.warn("abort: push creates new remote branches!\n")
1479 self.ui.status("(did you forget to merge?" +
1481 self.ui.status("(did you forget to merge?" +
1480 " use push -f to force)\n")
1482 " use push -f to force)\n")
1481 return 1
1483 return 1
1482
1484
1483 cg = self.changegroup(update)
1485 cg = self.changegroup(update)
1484 return remote.addchangegroup(cg)
1486 return remote.addchangegroup(cg)
1485
1487
1486 def changegroup(self, basenodes):
1488 def changegroup(self, basenodes):
1487 class genread:
1489 class genread:
1488 def __init__(self, generator):
1490 def __init__(self, generator):
1489 self.g = generator
1491 self.g = generator
1490 self.buf = ""
1492 self.buf = ""
1491 def fillbuf(self):
1493 def fillbuf(self):
1492 self.buf += "".join(self.g)
1494 self.buf += "".join(self.g)
1493
1495
1494 def read(self, l):
1496 def read(self, l):
1495 while l > len(self.buf):
1497 while l > len(self.buf):
1496 try:
1498 try:
1497 self.buf += self.g.next()
1499 self.buf += self.g.next()
1498 except StopIteration:
1500 except StopIteration:
1499 break
1501 break
1500 d, self.buf = self.buf[:l], self.buf[l:]
1502 d, self.buf = self.buf[:l], self.buf[l:]
1501 return d
1503 return d
1502
1504
1503 def gengroup():
1505 def gengroup():
1504 nodes = self.newer(basenodes)
1506 nodes = self.newer(basenodes)
1505
1507
1506 # construct the link map
1508 # construct the link map
1507 linkmap = {}
1509 linkmap = {}
1508 for n in nodes:
1510 for n in nodes:
1509 linkmap[self.changelog.rev(n)] = n
1511 linkmap[self.changelog.rev(n)] = n
1510
1512
1511 # construct a list of all changed files
1513 # construct a list of all changed files
1512 changed = {}
1514 changed = {}
1513 for n in nodes:
1515 for n in nodes:
1514 c = self.changelog.read(n)
1516 c = self.changelog.read(n)
1515 for f in c[3]:
1517 for f in c[3]:
1516 changed[f] = 1
1518 changed[f] = 1
1517 changed = changed.keys()
1519 changed = changed.keys()
1518 changed.sort()
1520 changed.sort()
1519
1521
1520 # the changegroup is changesets + manifests + all file revs
1522 # the changegroup is changesets + manifests + all file revs
1521 revs = [ self.changelog.rev(n) for n in nodes ]
1523 revs = [ self.changelog.rev(n) for n in nodes ]
1522
1524
1523 for y in self.changelog.group(linkmap): yield y
1525 for y in self.changelog.group(linkmap): yield y
1524 for y in self.manifest.group(linkmap): yield y
1526 for y in self.manifest.group(linkmap): yield y
1525 for f in changed:
1527 for f in changed:
1526 yield struct.pack(">l", len(f) + 4) + f
1528 yield struct.pack(">l", len(f) + 4) + f
1527 g = self.file(f).group(linkmap)
1529 g = self.file(f).group(linkmap)
1528 for y in g:
1530 for y in g:
1529 yield y
1531 yield y
1530
1532
1531 yield struct.pack(">l", 0)
1533 yield struct.pack(">l", 0)
1532
1534
1533 return genread(gengroup())
1535 return genread(gengroup())
1534
1536
1535 def addchangegroup(self, source):
1537 def addchangegroup(self, source):
1536
1538
1537 def getchunk():
1539 def getchunk():
1538 d = source.read(4)
1540 d = source.read(4)
1539 if not d: return ""
1541 if not d: return ""
1540 l = struct.unpack(">l", d)[0]
1542 l = struct.unpack(">l", d)[0]
1541 if l <= 4: return ""
1543 if l <= 4: return ""
1542 return source.read(l - 4)
1544 return source.read(l - 4)
1543
1545
1544 def getgroup():
1546 def getgroup():
1545 while 1:
1547 while 1:
1546 c = getchunk()
1548 c = getchunk()
1547 if not c: break
1549 if not c: break
1548 yield c
1550 yield c
1549
1551
1550 def csmap(x):
1552 def csmap(x):
1551 self.ui.debug("add changeset %s\n" % short(x))
1553 self.ui.debug("add changeset %s\n" % short(x))
1552 return self.changelog.count()
1554 return self.changelog.count()
1553
1555
1554 def revmap(x):
1556 def revmap(x):
1555 return self.changelog.rev(x)
1557 return self.changelog.rev(x)
1556
1558
1557 if not source: return
1559 if not source: return
1558 changesets = files = revisions = 0
1560 changesets = files = revisions = 0
1559
1561
1560 tr = self.transaction()
1562 tr = self.transaction()
1561
1563
1562 # pull off the changeset group
1564 # pull off the changeset group
1563 self.ui.status("adding changesets\n")
1565 self.ui.status("adding changesets\n")
1564 co = self.changelog.tip()
1566 co = self.changelog.tip()
1565 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1567 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1566 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1568 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1567
1569
1568 # pull off the manifest group
1570 # pull off the manifest group
1569 self.ui.status("adding manifests\n")
1571 self.ui.status("adding manifests\n")
1570 mm = self.manifest.tip()
1572 mm = self.manifest.tip()
1571 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1573 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1572
1574
1573 # process the files
1575 # process the files
1574 self.ui.status("adding file changes\n")
1576 self.ui.status("adding file changes\n")
1575 while 1:
1577 while 1:
1576 f = getchunk()
1578 f = getchunk()
1577 if not f: break
1579 if not f: break
1578 self.ui.debug("adding %s revisions\n" % f)
1580 self.ui.debug("adding %s revisions\n" % f)
1579 fl = self.file(f)
1581 fl = self.file(f)
1580 o = fl.count()
1582 o = fl.count()
1581 n = fl.addgroup(getgroup(), revmap, tr)
1583 n = fl.addgroup(getgroup(), revmap, tr)
1582 revisions += fl.count() - o
1584 revisions += fl.count() - o
1583 files += 1
1585 files += 1
1584
1586
1585 self.ui.status(("added %d changesets" +
1587 self.ui.status(("added %d changesets" +
1586 " with %d changes to %d files\n")
1588 " with %d changes to %d files\n")
1587 % (changesets, revisions, files))
1589 % (changesets, revisions, files))
1588
1590
1589 tr.close()
1591 tr.close()
1590
1592
1591 if not self.hook("changegroup"):
1593 if not self.hook("changegroup"):
1592 return 1
1594 return 1
1593
1595
1594 return
1596 return
1595
1597
1596 def update(self, node, allow=False, force=False, choose=None,
1598 def update(self, node, allow=False, force=False, choose=None,
1597 moddirstate=True):
1599 moddirstate=True):
1598 pl = self.dirstate.parents()
1600 pl = self.dirstate.parents()
1599 if not force and pl[1] != nullid:
1601 if not force and pl[1] != nullid:
1600 self.ui.warn("aborting: outstanding uncommitted merges\n")
1602 self.ui.warn("aborting: outstanding uncommitted merges\n")
1601 return 1
1603 return 1
1602
1604
1603 p1, p2 = pl[0], node
1605 p1, p2 = pl[0], node
1604 pa = self.changelog.ancestor(p1, p2)
1606 pa = self.changelog.ancestor(p1, p2)
1605 m1n = self.changelog.read(p1)[0]
1607 m1n = self.changelog.read(p1)[0]
1606 m2n = self.changelog.read(p2)[0]
1608 m2n = self.changelog.read(p2)[0]
1607 man = self.manifest.ancestor(m1n, m2n)
1609 man = self.manifest.ancestor(m1n, m2n)
1608 m1 = self.manifest.read(m1n)
1610 m1 = self.manifest.read(m1n)
1609 mf1 = self.manifest.readflags(m1n)
1611 mf1 = self.manifest.readflags(m1n)
1610 m2 = self.manifest.read(m2n)
1612 m2 = self.manifest.read(m2n)
1611 mf2 = self.manifest.readflags(m2n)
1613 mf2 = self.manifest.readflags(m2n)
1612 ma = self.manifest.read(man)
1614 ma = self.manifest.read(man)
1613 mfa = self.manifest.readflags(man)
1615 mfa = self.manifest.readflags(man)
1614
1616
1615 (c, a, d, u) = self.changes()
1617 (c, a, d, u) = self.changes()
1616
1618
1617 # is this a jump, or a merge? i.e. is there a linear path
1619 # is this a jump, or a merge? i.e. is there a linear path
1618 # from p1 to p2?
1620 # from p1 to p2?
1619 linear_path = (pa == p1 or pa == p2)
1621 linear_path = (pa == p1 or pa == p2)
1620
1622
1621 # resolve the manifest to determine which files
1623 # resolve the manifest to determine which files
1622 # we care about merging
1624 # we care about merging
1623 self.ui.note("resolving manifests\n")
1625 self.ui.note("resolving manifests\n")
1624 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1626 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1625 (force, allow, moddirstate, linear_path))
1627 (force, allow, moddirstate, linear_path))
1626 self.ui.debug(" ancestor %s local %s remote %s\n" %
1628 self.ui.debug(" ancestor %s local %s remote %s\n" %
1627 (short(man), short(m1n), short(m2n)))
1629 (short(man), short(m1n), short(m2n)))
1628
1630
1629 merge = {}
1631 merge = {}
1630 get = {}
1632 get = {}
1631 remove = []
1633 remove = []
1632
1634
1633 # construct a working dir manifest
1635 # construct a working dir manifest
1634 mw = m1.copy()
1636 mw = m1.copy()
1635 mfw = mf1.copy()
1637 mfw = mf1.copy()
1636 umap = dict.fromkeys(u)
1638 umap = dict.fromkeys(u)
1637
1639
1638 for f in a + c + u:
1640 for f in a + c + u:
1639 mw[f] = ""
1641 mw[f] = ""
1640 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1642 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1641
1643
1642 for f in d:
1644 for f in d:
1643 if f in mw: del mw[f]
1645 if f in mw: del mw[f]
1644
1646
1645 # If we're jumping between revisions (as opposed to merging),
1647 # If we're jumping between revisions (as opposed to merging),
1646 # and if neither the working directory nor the target rev has
1648 # and if neither the working directory nor the target rev has
1647 # the file, then we need to remove it from the dirstate, to
1649 # the file, then we need to remove it from the dirstate, to
1648 # prevent the dirstate from listing the file when it is no
1650 # prevent the dirstate from listing the file when it is no
1649 # longer in the manifest.
1651 # longer in the manifest.
1650 if moddirstate and linear_path and f not in m2:
1652 if moddirstate and linear_path and f not in m2:
1651 self.dirstate.forget((f,))
1653 self.dirstate.forget((f,))
1652
1654
1653 # Compare manifests
1655 # Compare manifests
1654 for f, n in mw.iteritems():
1656 for f, n in mw.iteritems():
1655 if choose and not choose(f): continue
1657 if choose and not choose(f): continue
1656 if f in m2:
1658 if f in m2:
1657 s = 0
1659 s = 0
1658
1660
1659 # is the wfile new since m1, and match m2?
1661 # is the wfile new since m1, and match m2?
1660 if f not in m1:
1662 if f not in m1:
1661 t1 = self.wfile(f).read()
1663 t1 = self.wfile(f).read()
1662 t2 = self.file(f).read(m2[f])
1664 t2 = self.file(f).read(m2[f])
1663 if cmp(t1, t2) == 0:
1665 if cmp(t1, t2) == 0:
1664 n = m2[f]
1666 n = m2[f]
1665 del t1, t2
1667 del t1, t2
1666
1668
1667 # are files different?
1669 # are files different?
1668 if n != m2[f]:
1670 if n != m2[f]:
1669 a = ma.get(f, nullid)
1671 a = ma.get(f, nullid)
1670 # are both different from the ancestor?
1672 # are both different from the ancestor?
1671 if n != a and m2[f] != a:
1673 if n != a and m2[f] != a:
1672 self.ui.debug(" %s versions differ, resolve\n" % f)
1674 self.ui.debug(" %s versions differ, resolve\n" % f)
1673 # merge executable bits
1675 # merge executable bits
1674 # "if we changed or they changed, change in merge"
1676 # "if we changed or they changed, change in merge"
1675 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1677 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1676 mode = ((a^b) | (a^c)) ^ a
1678 mode = ((a^b) | (a^c)) ^ a
1677 merge[f] = (m1.get(f, nullid), m2[f], mode)
1679 merge[f] = (m1.get(f, nullid), m2[f], mode)
1678 s = 1
1680 s = 1
1679 # are we clobbering?
1681 # are we clobbering?
1680 # is remote's version newer?
1682 # is remote's version newer?
1681 # or are we going back in time?
1683 # or are we going back in time?
1682 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1684 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1683 self.ui.debug(" remote %s is newer, get\n" % f)
1685 self.ui.debug(" remote %s is newer, get\n" % f)
1684 get[f] = m2[f]
1686 get[f] = m2[f]
1685 s = 1
1687 s = 1
1686 elif f in umap:
1688 elif f in umap:
1687 # this unknown file is the same as the checkout
1689 # this unknown file is the same as the checkout
1688 get[f] = m2[f]
1690 get[f] = m2[f]
1689
1691
1690 if not s and mfw[f] != mf2[f]:
1692 if not s and mfw[f] != mf2[f]:
1691 if force:
1693 if force:
1692 self.ui.debug(" updating permissions for %s\n" % f)
1694 self.ui.debug(" updating permissions for %s\n" % f)
1693 util.set_exec(self.wjoin(f), mf2[f])
1695 util.set_exec(self.wjoin(f), mf2[f])
1694 else:
1696 else:
1695 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1697 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1696 mode = ((a^b) | (a^c)) ^ a
1698 mode = ((a^b) | (a^c)) ^ a
1697 if mode != b:
1699 if mode != b:
1698 self.ui.debug(" updating permissions for %s\n" % f)
1700 self.ui.debug(" updating permissions for %s\n" % f)
1699 util.set_exec(self.wjoin(f), mode)
1701 util.set_exec(self.wjoin(f), mode)
1700 del m2[f]
1702 del m2[f]
1701 elif f in ma:
1703 elif f in ma:
1702 if n != ma[f]:
1704 if n != ma[f]:
1703 r = "d"
1705 r = "d"
1704 if not force and (linear_path or allow):
1706 if not force and (linear_path or allow):
1705 r = self.ui.prompt(
1707 r = self.ui.prompt(
1706 (" local changed %s which remote deleted\n" % f) +
1708 (" local changed %s which remote deleted\n" % f) +
1707 "(k)eep or (d)elete?", "[kd]", "k")
1709 "(k)eep or (d)elete?", "[kd]", "k")
1708 if r == "d":
1710 if r == "d":
1709 remove.append(f)
1711 remove.append(f)
1710 else:
1712 else:
1711 self.ui.debug("other deleted %s\n" % f)
1713 self.ui.debug("other deleted %s\n" % f)
1712 remove.append(f) # other deleted it
1714 remove.append(f) # other deleted it
1713 else:
1715 else:
1714 if n == m1.get(f, nullid): # same as parent
1716 if n == m1.get(f, nullid): # same as parent
1715 if p2 == pa: # going backwards?
1717 if p2 == pa: # going backwards?
1716 self.ui.debug("remote deleted %s\n" % f)
1718 self.ui.debug("remote deleted %s\n" % f)
1717 remove.append(f)
1719 remove.append(f)
1718 else:
1720 else:
1719 self.ui.debug("local created %s, keeping\n" % f)
1721 self.ui.debug("local created %s, keeping\n" % f)
1720 else:
1722 else:
1721 self.ui.debug("working dir created %s, keeping\n" % f)
1723 self.ui.debug("working dir created %s, keeping\n" % f)
1722
1724
1723 for f, n in m2.iteritems():
1725 for f, n in m2.iteritems():
1724 if choose and not choose(f): continue
1726 if choose and not choose(f): continue
1725 if f[0] == "/": continue
1727 if f[0] == "/": continue
1726 if f in ma and n != ma[f]:
1728 if f in ma and n != ma[f]:
1727 r = "k"
1729 r = "k"
1728 if not force and (linear_path or allow):
1730 if not force and (linear_path or allow):
1729 r = self.ui.prompt(
1731 r = self.ui.prompt(
1730 ("remote changed %s which local deleted\n" % f) +
1732 ("remote changed %s which local deleted\n" % f) +
1731 "(k)eep or (d)elete?", "[kd]", "k")
1733 "(k)eep or (d)elete?", "[kd]", "k")
1732 if r == "k": get[f] = n
1734 if r == "k": get[f] = n
1733 elif f not in ma:
1735 elif f not in ma:
1734 self.ui.debug("remote created %s\n" % f)
1736 self.ui.debug("remote created %s\n" % f)
1735 get[f] = n
1737 get[f] = n
1736 else:
1738 else:
1737 if force or p2 == pa: # going backwards?
1739 if force or p2 == pa: # going backwards?
1738 self.ui.debug("local deleted %s, recreating\n" % f)
1740 self.ui.debug("local deleted %s, recreating\n" % f)
1739 get[f] = n
1741 get[f] = n
1740 else:
1742 else:
1741 self.ui.debug("local deleted %s\n" % f)
1743 self.ui.debug("local deleted %s\n" % f)
1742
1744
1743 del mw, m1, m2, ma
1745 del mw, m1, m2, ma
1744
1746
1745 if force:
1747 if force:
1746 for f in merge:
1748 for f in merge:
1747 get[f] = merge[f][1]
1749 get[f] = merge[f][1]
1748 merge = {}
1750 merge = {}
1749
1751
1750 if linear_path or force:
1752 if linear_path or force:
1751 # we don't need to do any magic, just jump to the new rev
1753 # we don't need to do any magic, just jump to the new rev
1752 branch_merge = False
1754 branch_merge = False
1753 p1, p2 = p2, nullid
1755 p1, p2 = p2, nullid
1754 else:
1756 else:
1755 if not allow:
1757 if not allow:
1756 self.ui.status("this update spans a branch" +
1758 self.ui.status("this update spans a branch" +
1757 " affecting the following files:\n")
1759 " affecting the following files:\n")
1758 fl = merge.keys() + get.keys()
1760 fl = merge.keys() + get.keys()
1759 fl.sort()
1761 fl.sort()
1760 for f in fl:
1762 for f in fl:
1761 cf = ""
1763 cf = ""
1762 if f in merge: cf = " (resolve)"
1764 if f in merge: cf = " (resolve)"
1763 self.ui.status(" %s%s\n" % (f, cf))
1765 self.ui.status(" %s%s\n" % (f, cf))
1764 self.ui.warn("aborting update spanning branches!\n")
1766 self.ui.warn("aborting update spanning branches!\n")
1765 self.ui.status("(use update -m to merge across branches" +
1767 self.ui.status("(use update -m to merge across branches" +
1766 " or -C to lose changes)\n")
1768 " or -C to lose changes)\n")
1767 return 1
1769 return 1
1768 branch_merge = True
1770 branch_merge = True
1769
1771
1770 if moddirstate:
1772 if moddirstate:
1771 self.dirstate.setparents(p1, p2)
1773 self.dirstate.setparents(p1, p2)
1772
1774
1773 # get the files we don't need to change
1775 # get the files we don't need to change
1774 files = get.keys()
1776 files = get.keys()
1775 files.sort()
1777 files.sort()
1776 for f in files:
1778 for f in files:
1777 if f[0] == "/": continue
1779 if f[0] == "/": continue
1778 self.ui.note("getting %s\n" % f)
1780 self.ui.note("getting %s\n" % f)
1779 t = self.file(f).read(get[f])
1781 t = self.file(f).read(get[f])
1780 try:
1782 try:
1781 self.wfile(f, "w").write(t)
1783 self.wfile(f, "w").write(t)
1782 except IOError:
1784 except IOError:
1783 os.makedirs(os.path.dirname(self.wjoin(f)))
1785 os.makedirs(os.path.dirname(self.wjoin(f)))
1784 self.wfile(f, "w").write(t)
1786 self.wfile(f, "w").write(t)
1785 util.set_exec(self.wjoin(f), mf2[f])
1787 util.set_exec(self.wjoin(f), mf2[f])
1786 if moddirstate:
1788 if moddirstate:
1787 if branch_merge:
1789 if branch_merge:
1788 self.dirstate.update([f], 'n', st_mtime=-1)
1790 self.dirstate.update([f], 'n', st_mtime=-1)
1789 else:
1791 else:
1790 self.dirstate.update([f], 'n')
1792 self.dirstate.update([f], 'n')
1791
1793
1792 # merge the tricky bits
1794 # merge the tricky bits
1793 files = merge.keys()
1795 files = merge.keys()
1794 files.sort()
1796 files.sort()
1795 for f in files:
1797 for f in files:
1796 self.ui.status("merging %s\n" % f)
1798 self.ui.status("merging %s\n" % f)
1797 my, other, flag = merge[f]
1799 my, other, flag = merge[f]
1798 self.merge3(f, my, other)
1800 self.merge3(f, my, other)
1799 util.set_exec(self.wjoin(f), flag)
1801 util.set_exec(self.wjoin(f), flag)
1800 if moddirstate:
1802 if moddirstate:
1801 if branch_merge:
1803 if branch_merge:
1802 # We've done a branch merge, mark this file as merged
1804 # We've done a branch merge, mark this file as merged
1803 # so that we properly record the merger later
1805 # so that we properly record the merger later
1804 self.dirstate.update([f], 'm')
1806 self.dirstate.update([f], 'm')
1805 else:
1807 else:
1806 # We've update-merged a locally modified file, so
1808 # We've update-merged a locally modified file, so
1807 # we set the dirstate to emulate a normal checkout
1809 # we set the dirstate to emulate a normal checkout
1808 # of that file some time in the past. Thus our
1810 # of that file some time in the past. Thus our
1809 # merge will appear as a normal local file
1811 # merge will appear as a normal local file
1810 # modification.
1812 # modification.
1811 f_len = len(self.file(f).read(other))
1813 f_len = len(self.file(f).read(other))
1812 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1814 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1813
1815
1814 remove.sort()
1816 remove.sort()
1815 for f in remove:
1817 for f in remove:
1816 self.ui.note("removing %s\n" % f)
1818 self.ui.note("removing %s\n" % f)
1817 try:
1819 try:
1818 os.unlink(self.wjoin(f))
1820 os.unlink(self.wjoin(f))
1819 except OSError, inst:
1821 except OSError, inst:
1820 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1822 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1821 # try removing directories that might now be empty
1823 # try removing directories that might now be empty
1822 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1824 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1823 except: pass
1825 except: pass
1824 if moddirstate:
1826 if moddirstate:
1825 if branch_merge:
1827 if branch_merge:
1826 self.dirstate.update(remove, 'r')
1828 self.dirstate.update(remove, 'r')
1827 else:
1829 else:
1828 self.dirstate.forget(remove)
1830 self.dirstate.forget(remove)
1829
1831
1830 def merge3(self, fn, my, other):
1832 def merge3(self, fn, my, other):
1831 """perform a 3-way merge in the working directory"""
1833 """perform a 3-way merge in the working directory"""
1832
1834
1833 def temp(prefix, node):
1835 def temp(prefix, node):
1834 pre = "%s~%s." % (os.path.basename(fn), prefix)
1836 pre = "%s~%s." % (os.path.basename(fn), prefix)
1835 (fd, name) = tempfile.mkstemp("", pre)
1837 (fd, name) = tempfile.mkstemp("", pre)
1836 f = os.fdopen(fd, "wb")
1838 f = os.fdopen(fd, "wb")
1837 f.write(fl.read(node))
1839 f.write(fl.read(node))
1838 f.close()
1840 f.close()
1839 return name
1841 return name
1840
1842
1841 fl = self.file(fn)
1843 fl = self.file(fn)
1842 base = fl.ancestor(my, other)
1844 base = fl.ancestor(my, other)
1843 a = self.wjoin(fn)
1845 a = self.wjoin(fn)
1844 b = temp("base", base)
1846 b = temp("base", base)
1845 c = temp("other", other)
1847 c = temp("other", other)
1846
1848
1847 self.ui.note("resolving %s\n" % fn)
1849 self.ui.note("resolving %s\n" % fn)
1848 self.ui.debug("file %s: other %s ancestor %s\n" %
1850 self.ui.debug("file %s: other %s ancestor %s\n" %
1849 (fn, short(other), short(base)))
1851 (fn, short(other), short(base)))
1850
1852
1851 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1853 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1852 or "hgmerge")
1854 or "hgmerge")
1853 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1855 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1854 if r:
1856 if r:
1855 self.ui.warn("merging %s failed!\n" % fn)
1857 self.ui.warn("merging %s failed!\n" % fn)
1856
1858
1857 os.unlink(b)
1859 os.unlink(b)
1858 os.unlink(c)
1860 os.unlink(c)
1859
1861
1860 def verify(self):
1862 def verify(self):
1861 filelinkrevs = {}
1863 filelinkrevs = {}
1862 filenodes = {}
1864 filenodes = {}
1863 changesets = revisions = files = 0
1865 changesets = revisions = files = 0
1864 errors = 0
1866 errors = 0
1865
1867
1866 seen = {}
1868 seen = {}
1867 self.ui.status("checking changesets\n")
1869 self.ui.status("checking changesets\n")
1868 for i in range(self.changelog.count()):
1870 for i in range(self.changelog.count()):
1869 changesets += 1
1871 changesets += 1
1870 n = self.changelog.node(i)
1872 n = self.changelog.node(i)
1871 if n in seen:
1873 if n in seen:
1872 self.ui.warn("duplicate changeset at revision %d\n" % i)
1874 self.ui.warn("duplicate changeset at revision %d\n" % i)
1873 errors += 1
1875 errors += 1
1874 seen[n] = 1
1876 seen[n] = 1
1875
1877
1876 for p in self.changelog.parents(n):
1878 for p in self.changelog.parents(n):
1877 if p not in self.changelog.nodemap:
1879 if p not in self.changelog.nodemap:
1878 self.ui.warn("changeset %s has unknown parent %s\n" %
1880 self.ui.warn("changeset %s has unknown parent %s\n" %
1879 (short(n), short(p)))
1881 (short(n), short(p)))
1880 errors += 1
1882 errors += 1
1881 try:
1883 try:
1882 changes = self.changelog.read(n)
1884 changes = self.changelog.read(n)
1883 except Exception, inst:
1885 except Exception, inst:
1884 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1886 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1885 errors += 1
1887 errors += 1
1886
1888
1887 for f in changes[3]:
1889 for f in changes[3]:
1888 filelinkrevs.setdefault(f, []).append(i)
1890 filelinkrevs.setdefault(f, []).append(i)
1889
1891
1890 seen = {}
1892 seen = {}
1891 self.ui.status("checking manifests\n")
1893 self.ui.status("checking manifests\n")
1892 for i in range(self.manifest.count()):
1894 for i in range(self.manifest.count()):
1893 n = self.manifest.node(i)
1895 n = self.manifest.node(i)
1894 if n in seen:
1896 if n in seen:
1895 self.ui.warn("duplicate manifest at revision %d\n" % i)
1897 self.ui.warn("duplicate manifest at revision %d\n" % i)
1896 errors += 1
1898 errors += 1
1897 seen[n] = 1
1899 seen[n] = 1
1898
1900
1899 for p in self.manifest.parents(n):
1901 for p in self.manifest.parents(n):
1900 if p not in self.manifest.nodemap:
1902 if p not in self.manifest.nodemap:
1901 self.ui.warn("manifest %s has unknown parent %s\n" %
1903 self.ui.warn("manifest %s has unknown parent %s\n" %
1902 (short(n), short(p)))
1904 (short(n), short(p)))
1903 errors += 1
1905 errors += 1
1904
1906
1905 try:
1907 try:
1906 delta = mdiff.patchtext(self.manifest.delta(n))
1908 delta = mdiff.patchtext(self.manifest.delta(n))
1907 except KeyboardInterrupt:
1909 except KeyboardInterrupt:
1908 self.ui.warn("aborted")
1910 self.ui.warn("aborted")
1909 sys.exit(0)
1911 sys.exit(0)
1910 except Exception, inst:
1912 except Exception, inst:
1911 self.ui.warn("unpacking manifest %s: %s\n"
1913 self.ui.warn("unpacking manifest %s: %s\n"
1912 % (short(n), inst))
1914 % (short(n), inst))
1913 errors += 1
1915 errors += 1
1914
1916
1915 ff = [ l.split('\0') for l in delta.splitlines() ]
1917 ff = [ l.split('\0') for l in delta.splitlines() ]
1916 for f, fn in ff:
1918 for f, fn in ff:
1917 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1919 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1918
1920
1919 self.ui.status("crosschecking files in changesets and manifests\n")
1921 self.ui.status("crosschecking files in changesets and manifests\n")
1920 for f in filenodes:
1922 for f in filenodes:
1921 if f not in filelinkrevs:
1923 if f not in filelinkrevs:
1922 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1924 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1923 errors += 1
1925 errors += 1
1924
1926
1925 for f in filelinkrevs:
1927 for f in filelinkrevs:
1926 if f not in filenodes:
1928 if f not in filenodes:
1927 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1929 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1928 errors += 1
1930 errors += 1
1929
1931
1930 self.ui.status("checking files\n")
1932 self.ui.status("checking files\n")
1931 ff = filenodes.keys()
1933 ff = filenodes.keys()
1932 ff.sort()
1934 ff.sort()
1933 for f in ff:
1935 for f in ff:
1934 if f == "/dev/null": continue
1936 if f == "/dev/null": continue
1935 files += 1
1937 files += 1
1936 fl = self.file(f)
1938 fl = self.file(f)
1937 nodes = { nullid: 1 }
1939 nodes = { nullid: 1 }
1938 seen = {}
1940 seen = {}
1939 for i in range(fl.count()):
1941 for i in range(fl.count()):
1940 revisions += 1
1942 revisions += 1
1941 n = fl.node(i)
1943 n = fl.node(i)
1942
1944
1943 if n in seen:
1945 if n in seen:
1944 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1946 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1945 errors += 1
1947 errors += 1
1946
1948
1947 if n not in filenodes[f]:
1949 if n not in filenodes[f]:
1948 self.ui.warn("%s: %d:%s not in manifests\n"
1950 self.ui.warn("%s: %d:%s not in manifests\n"
1949 % (f, i, short(n)))
1951 % (f, i, short(n)))
1950 errors += 1
1952 errors += 1
1951 else:
1953 else:
1952 del filenodes[f][n]
1954 del filenodes[f][n]
1953
1955
1954 flr = fl.linkrev(n)
1956 flr = fl.linkrev(n)
1955 if flr not in filelinkrevs[f]:
1957 if flr not in filelinkrevs[f]:
1956 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1958 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1957 % (f, short(n), fl.linkrev(n)))
1959 % (f, short(n), fl.linkrev(n)))
1958 errors += 1
1960 errors += 1
1959 else:
1961 else:
1960 filelinkrevs[f].remove(flr)
1962 filelinkrevs[f].remove(flr)
1961
1963
1962 # verify contents
1964 # verify contents
1963 try:
1965 try:
1964 t = fl.read(n)
1966 t = fl.read(n)
1965 except Exception, inst:
1967 except Exception, inst:
1966 self.ui.warn("unpacking file %s %s: %s\n"
1968 self.ui.warn("unpacking file %s %s: %s\n"
1967 % (f, short(n), inst))
1969 % (f, short(n), inst))
1968 errors += 1
1970 errors += 1
1969
1971
1970 # verify parents
1972 # verify parents
1971 (p1, p2) = fl.parents(n)
1973 (p1, p2) = fl.parents(n)
1972 if p1 not in nodes:
1974 if p1 not in nodes:
1973 self.ui.warn("file %s:%s unknown parent 1 %s" %
1975 self.ui.warn("file %s:%s unknown parent 1 %s" %
1974 (f, short(n), short(p1)))
1976 (f, short(n), short(p1)))
1975 errors += 1
1977 errors += 1
1976 if p2 not in nodes:
1978 if p2 not in nodes:
1977 self.ui.warn("file %s:%s unknown parent 2 %s" %
1979 self.ui.warn("file %s:%s unknown parent 2 %s" %
1978 (f, short(n), short(p1)))
1980 (f, short(n), short(p1)))
1979 errors += 1
1981 errors += 1
1980 nodes[n] = 1
1982 nodes[n] = 1
1981
1983
1982 # cross-check
1984 # cross-check
1983 for node in filenodes[f]:
1985 for node in filenodes[f]:
1984 self.ui.warn("node %s in manifests not in %s\n"
1986 self.ui.warn("node %s in manifests not in %s\n"
1985 % (hex(node), f))
1987 % (hex(node), f))
1986 errors += 1
1988 errors += 1
1987
1989
1988 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1990 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1989 (files, changesets, revisions))
1991 (files, changesets, revisions))
1990
1992
1991 if errors:
1993 if errors:
1992 self.ui.warn("%d integrity errors encountered!\n" % errors)
1994 self.ui.warn("%d integrity errors encountered!\n" % errors)
1993 return 1
1995 return 1
1994
1996
1995 class remoterepository:
1997 class remoterepository:
1996 def local(self):
1998 def local(self):
1997 return False
1999 return False
1998
2000
1999 class httprepository(remoterepository):
2001 class httprepository(remoterepository):
2000 def __init__(self, ui, path):
2002 def __init__(self, ui, path):
2001 # fix missing / after hostname
2003 # fix missing / after hostname
2002 s = urlparse.urlsplit(path)
2004 s = urlparse.urlsplit(path)
2003 partial = s[2]
2005 partial = s[2]
2004 if not partial: partial = "/"
2006 if not partial: partial = "/"
2005 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2007 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
2006 self.ui = ui
2008 self.ui = ui
2007 no_list = [ "localhost", "127.0.0.1" ]
2009 no_list = [ "localhost", "127.0.0.1" ]
2008 host = ui.config("http_proxy", "host")
2010 host = ui.config("http_proxy", "host")
2009 if host is None:
2011 if host is None:
2010 host = os.environ.get("http_proxy")
2012 host = os.environ.get("http_proxy")
2011 if host and host.startswith('http://'):
2013 if host and host.startswith('http://'):
2012 host = host[7:]
2014 host = host[7:]
2013 user = ui.config("http_proxy", "user")
2015 user = ui.config("http_proxy", "user")
2014 passwd = ui.config("http_proxy", "passwd")
2016 passwd = ui.config("http_proxy", "passwd")
2015 no = ui.config("http_proxy", "no")
2017 no = ui.config("http_proxy", "no")
2016 if no is None:
2018 if no is None:
2017 no = os.environ.get("no_proxy")
2019 no = os.environ.get("no_proxy")
2018 if no:
2020 if no:
2019 no_list = no_list + no.split(",")
2021 no_list = no_list + no.split(",")
2020
2022
2021 no_proxy = 0
2023 no_proxy = 0
2022 for h in no_list:
2024 for h in no_list:
2023 if (path.startswith("http://" + h + "/") or
2025 if (path.startswith("http://" + h + "/") or
2024 path.startswith("http://" + h + ":") or
2026 path.startswith("http://" + h + ":") or
2025 path == "http://" + h):
2027 path == "http://" + h):
2026 no_proxy = 1
2028 no_proxy = 1
2027
2029
2028 # Note: urllib2 takes proxy values from the environment and those will
2030 # Note: urllib2 takes proxy values from the environment and those will
2029 # take precedence
2031 # take precedence
2030 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2032 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
2031 try:
2033 try:
2032 if os.environ.has_key(env):
2034 if os.environ.has_key(env):
2033 del os.environ[env]
2035 del os.environ[env]
2034 except OSError:
2036 except OSError:
2035 pass
2037 pass
2036
2038
2037 proxy_handler = urllib2.BaseHandler()
2039 proxy_handler = urllib2.BaseHandler()
2038 if host and not no_proxy:
2040 if host and not no_proxy:
2039 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2041 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
2040
2042
2041 authinfo = None
2043 authinfo = None
2042 if user and passwd:
2044 if user and passwd:
2043 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2045 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
2044 passmgr.add_password(None, host, user, passwd)
2046 passmgr.add_password(None, host, user, passwd)
2045 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2047 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
2046
2048
2047 opener = urllib2.build_opener(proxy_handler, authinfo)
2049 opener = urllib2.build_opener(proxy_handler, authinfo)
2048 urllib2.install_opener(opener)
2050 urllib2.install_opener(opener)
2049
2051
2050 def dev(self):
2052 def dev(self):
2051 return -1
2053 return -1
2052
2054
2053 def do_cmd(self, cmd, **args):
2055 def do_cmd(self, cmd, **args):
2054 self.ui.debug("sending %s command\n" % cmd)
2056 self.ui.debug("sending %s command\n" % cmd)
2055 q = {"cmd": cmd}
2057 q = {"cmd": cmd}
2056 q.update(args)
2058 q.update(args)
2057 qs = urllib.urlencode(q)
2059 qs = urllib.urlencode(q)
2058 cu = "%s?%s" % (self.url, qs)
2060 cu = "%s?%s" % (self.url, qs)
2059 resp = urllib2.urlopen(cu)
2061 resp = urllib2.urlopen(cu)
2060 proto = resp.headers['content-type']
2062 proto = resp.headers['content-type']
2061
2063
2062 # accept old "text/plain" and "application/hg-changegroup" for now
2064 # accept old "text/plain" and "application/hg-changegroup" for now
2063 if not proto.startswith('application/mercurial') and \
2065 if not proto.startswith('application/mercurial') and \
2064 not proto.startswith('text/plain') and \
2066 not proto.startswith('text/plain') and \
2065 not proto.startswith('application/hg-changegroup'):
2067 not proto.startswith('application/hg-changegroup'):
2066 raise RepoError("'%s' does not appear to be an hg repository"
2068 raise RepoError("'%s' does not appear to be an hg repository"
2067 % self.url)
2069 % self.url)
2068
2070
2069 if proto.startswith('application/mercurial'):
2071 if proto.startswith('application/mercurial'):
2070 version = proto[22:]
2072 version = proto[22:]
2071 if float(version) > 0.1:
2073 if float(version) > 0.1:
2072 raise RepoError("'%s' uses newer protocol %s" %
2074 raise RepoError("'%s' uses newer protocol %s" %
2073 (self.url, version))
2075 (self.url, version))
2074
2076
2075 return resp
2077 return resp
2076
2078
2077 def heads(self):
2079 def heads(self):
2078 d = self.do_cmd("heads").read()
2080 d = self.do_cmd("heads").read()
2079 try:
2081 try:
2080 return map(bin, d[:-1].split(" "))
2082 return map(bin, d[:-1].split(" "))
2081 except:
2083 except:
2082 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2084 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2083 raise
2085 raise
2084
2086
2085 def branches(self, nodes):
2087 def branches(self, nodes):
2086 n = " ".join(map(hex, nodes))
2088 n = " ".join(map(hex, nodes))
2087 d = self.do_cmd("branches", nodes=n).read()
2089 d = self.do_cmd("branches", nodes=n).read()
2088 try:
2090 try:
2089 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2091 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2090 return br
2092 return br
2091 except:
2093 except:
2092 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2094 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2093 raise
2095 raise
2094
2096
2095 def between(self, pairs):
2097 def between(self, pairs):
2096 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2098 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2097 d = self.do_cmd("between", pairs=n).read()
2099 d = self.do_cmd("between", pairs=n).read()
2098 try:
2100 try:
2099 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2101 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2100 return p
2102 return p
2101 except:
2103 except:
2102 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2104 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2103 raise
2105 raise
2104
2106
2105 def changegroup(self, nodes):
2107 def changegroup(self, nodes):
2106 n = " ".join(map(hex, nodes))
2108 n = " ".join(map(hex, nodes))
2107 f = self.do_cmd("changegroup", roots=n)
2109 f = self.do_cmd("changegroup", roots=n)
2108 bytes = 0
2110 bytes = 0
2109
2111
2110 class zread:
2112 class zread:
2111 def __init__(self, f):
2113 def __init__(self, f):
2112 self.zd = zlib.decompressobj()
2114 self.zd = zlib.decompressobj()
2113 self.f = f
2115 self.f = f
2114 self.buf = ""
2116 self.buf = ""
2115 def read(self, l):
2117 def read(self, l):
2116 while l > len(self.buf):
2118 while l > len(self.buf):
2117 r = self.f.read(4096)
2119 r = self.f.read(4096)
2118 if r:
2120 if r:
2119 self.buf += self.zd.decompress(r)
2121 self.buf += self.zd.decompress(r)
2120 else:
2122 else:
2121 self.buf += self.zd.flush()
2123 self.buf += self.zd.flush()
2122 break
2124 break
2123 d, self.buf = self.buf[:l], self.buf[l:]
2125 d, self.buf = self.buf[:l], self.buf[l:]
2124 return d
2126 return d
2125
2127
2126 return zread(f)
2128 return zread(f)
2127
2129
2128 class remotelock:
2130 class remotelock:
2129 def __init__(self, repo):
2131 def __init__(self, repo):
2130 self.repo = repo
2132 self.repo = repo
2131 def release(self):
2133 def release(self):
2132 self.repo.unlock()
2134 self.repo.unlock()
2133 self.repo = None
2135 self.repo = None
2134 def __del__(self):
2136 def __del__(self):
2135 if self.repo:
2137 if self.repo:
2136 self.release()
2138 self.release()
2137
2139
2138 class sshrepository(remoterepository):
2140 class sshrepository(remoterepository):
2139 def __init__(self, ui, path):
2141 def __init__(self, ui, path):
2140 self.url = path
2142 self.url = path
2141 self.ui = ui
2143 self.ui = ui
2142
2144
2143 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2145 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2144 if not m:
2146 if not m:
2145 raise RepoError("couldn't parse destination %s" % path)
2147 raise RepoError("couldn't parse destination %s" % path)
2146
2148
2147 self.user = m.group(2)
2149 self.user = m.group(2)
2148 self.host = m.group(3)
2150 self.host = m.group(3)
2149 self.port = m.group(5)
2151 self.port = m.group(5)
2150 self.path = m.group(7)
2152 self.path = m.group(7)
2151
2153
2152 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2154 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2153 args = self.port and ("%s -p %s") % (args, self.port) or args
2155 args = self.port and ("%s -p %s") % (args, self.port) or args
2154 path = self.path or ""
2156 path = self.path or ""
2155
2157
2156 if not path:
2158 if not path:
2157 raise RepoError("no remote repository path specified")
2159 raise RepoError("no remote repository path specified")
2158
2160
2159 sshcmd = self.ui.config("ui", "ssh", "ssh")
2161 sshcmd = self.ui.config("ui", "ssh", "ssh")
2160 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2162 remotecmd = self.ui.config("ui", "remotecmd", "hg")
2161 cmd = "%s %s '%s -R %s serve --stdio'"
2163 cmd = "%s %s '%s -R %s serve --stdio'"
2162 cmd = cmd % (sshcmd, args, remotecmd, path)
2164 cmd = cmd % (sshcmd, args, remotecmd, path)
2163
2165
2164 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2166 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2165
2167
2166 def readerr(self):
2168 def readerr(self):
2167 while 1:
2169 while 1:
2168 r,w,x = select.select([self.pipee], [], [], 0)
2170 r,w,x = select.select([self.pipee], [], [], 0)
2169 if not r: break
2171 if not r: break
2170 l = self.pipee.readline()
2172 l = self.pipee.readline()
2171 if not l: break
2173 if not l: break
2172 self.ui.status("remote: ", l)
2174 self.ui.status("remote: ", l)
2173
2175
2174 def __del__(self):
2176 def __del__(self):
2175 try:
2177 try:
2176 self.pipeo.close()
2178 self.pipeo.close()
2177 self.pipei.close()
2179 self.pipei.close()
2178 for l in self.pipee:
2180 for l in self.pipee:
2179 self.ui.status("remote: ", l)
2181 self.ui.status("remote: ", l)
2180 self.pipee.close()
2182 self.pipee.close()
2181 except:
2183 except:
2182 pass
2184 pass
2183
2185
2184 def dev(self):
2186 def dev(self):
2185 return -1
2187 return -1
2186
2188
2187 def do_cmd(self, cmd, **args):
2189 def do_cmd(self, cmd, **args):
2188 self.ui.debug("sending %s command\n" % cmd)
2190 self.ui.debug("sending %s command\n" % cmd)
2189 self.pipeo.write("%s\n" % cmd)
2191 self.pipeo.write("%s\n" % cmd)
2190 for k, v in args.items():
2192 for k, v in args.items():
2191 self.pipeo.write("%s %d\n" % (k, len(v)))
2193 self.pipeo.write("%s %d\n" % (k, len(v)))
2192 self.pipeo.write(v)
2194 self.pipeo.write(v)
2193 self.pipeo.flush()
2195 self.pipeo.flush()
2194
2196
2195 return self.pipei
2197 return self.pipei
2196
2198
2197 def call(self, cmd, **args):
2199 def call(self, cmd, **args):
2198 r = self.do_cmd(cmd, **args)
2200 r = self.do_cmd(cmd, **args)
2199 l = r.readline()
2201 l = r.readline()
2200 self.readerr()
2202 self.readerr()
2201 try:
2203 try:
2202 l = int(l)
2204 l = int(l)
2203 except:
2205 except:
2204 raise RepoError("unexpected response '%s'" % l)
2206 raise RepoError("unexpected response '%s'" % l)
2205 return r.read(l)
2207 return r.read(l)
2206
2208
2207 def lock(self):
2209 def lock(self):
2208 self.call("lock")
2210 self.call("lock")
2209 return remotelock(self)
2211 return remotelock(self)
2210
2212
2211 def unlock(self):
2213 def unlock(self):
2212 self.call("unlock")
2214 self.call("unlock")
2213
2215
2214 def heads(self):
2216 def heads(self):
2215 d = self.call("heads")
2217 d = self.call("heads")
2216 try:
2218 try:
2217 return map(bin, d[:-1].split(" "))
2219 return map(bin, d[:-1].split(" "))
2218 except:
2220 except:
2219 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2221 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2220
2222
2221 def branches(self, nodes):
2223 def branches(self, nodes):
2222 n = " ".join(map(hex, nodes))
2224 n = " ".join(map(hex, nodes))
2223 d = self.call("branches", nodes=n)
2225 d = self.call("branches", nodes=n)
2224 try:
2226 try:
2225 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2227 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2226 return br
2228 return br
2227 except:
2229 except:
2228 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2230 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2229
2231
2230 def between(self, pairs):
2232 def between(self, pairs):
2231 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2233 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2232 d = self.call("between", pairs=n)
2234 d = self.call("between", pairs=n)
2233 try:
2235 try:
2234 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2236 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2235 return p
2237 return p
2236 except:
2238 except:
2237 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2239 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2238
2240
2239 def changegroup(self, nodes):
2241 def changegroup(self, nodes):
2240 n = " ".join(map(hex, nodes))
2242 n = " ".join(map(hex, nodes))
2241 f = self.do_cmd("changegroup", roots=n)
2243 f = self.do_cmd("changegroup", roots=n)
2242 return self.pipei
2244 return self.pipei
2243
2245
2244 def addchangegroup(self, cg):
2246 def addchangegroup(self, cg):
2245 d = self.call("addchangegroup")
2247 d = self.call("addchangegroup")
2246 if d:
2248 if d:
2247 raise RepoError("push refused: %s", d)
2249 raise RepoError("push refused: %s", d)
2248
2250
2249 while 1:
2251 while 1:
2250 d = cg.read(4096)
2252 d = cg.read(4096)
2251 if not d: break
2253 if not d: break
2252 self.pipeo.write(d)
2254 self.pipeo.write(d)
2253 self.readerr()
2255 self.readerr()
2254
2256
2255 self.pipeo.flush()
2257 self.pipeo.flush()
2256
2258
2257 self.readerr()
2259 self.readerr()
2258 l = int(self.pipei.readline())
2260 l = int(self.pipei.readline())
2259 return self.pipei.read(l) != ""
2261 return self.pipei.read(l) != ""
2260
2262
2261 class httpsrepository(httprepository):
2263 class httpsrepository(httprepository):
2262 pass
2264 pass
2263
2265
2264 def repository(ui, path=None, create=0):
2266 def repository(ui, path=None, create=0):
2265 if path:
2267 if path:
2266 if path.startswith("http://"):
2268 if path.startswith("http://"):
2267 return httprepository(ui, path)
2269 return httprepository(ui, path)
2268 if path.startswith("https://"):
2270 if path.startswith("https://"):
2269 return httpsrepository(ui, path)
2271 return httpsrepository(ui, path)
2270 if path.startswith("hg://"):
2272 if path.startswith("hg://"):
2271 return httprepository(ui, path.replace("hg://", "http://"))
2273 return httprepository(ui, path.replace("hg://", "http://"))
2272 if path.startswith("old-http://"):
2274 if path.startswith("old-http://"):
2273 return localrepository(ui, path.replace("old-http://", "http://"))
2275 return localrepository(ui, path.replace("old-http://", "http://"))
2274 if path.startswith("ssh://"):
2276 if path.startswith("ssh://"):
2275 return sshrepository(ui, path)
2277 return sshrepository(ui, path)
2276
2278
2277 return localrepository(ui, path, create)
2279 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now