##// END OF EJS Templates
Fixed --repository option when handling relative path...
tksoh@users.sf.net -
r933:9c43d68a default
parent child Browse files
Show More
@@ -1,2225 +1,2225 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def getcwd(self):
303 def getcwd(self):
304 cwd = os.getcwd()
304 cwd = os.getcwd()
305 if cwd == self.root: return ''
305 if cwd == self.root: return ''
306 return cwd[len(self.root) + 1:]
306 return cwd[len(self.root) + 1:]
307
307
308 def ignore(self, f):
308 def ignore(self, f):
309 if not self.ignorefunc:
309 if not self.ignorefunc:
310 bigpat = []
310 bigpat = []
311 try:
311 try:
312 l = file(self.wjoin(".hgignore"))
312 l = file(self.wjoin(".hgignore"))
313 for pat in l:
313 for pat in l:
314 p = pat.rstrip()
314 p = pat.rstrip()
315 if p:
315 if p:
316 try:
316 try:
317 re.compile(p)
317 re.compile(p)
318 except:
318 except:
319 self.ui.warn("ignoring invalid ignore"
319 self.ui.warn("ignoring invalid ignore"
320 + " regular expression '%s'\n" % p)
320 + " regular expression '%s'\n" % p)
321 else:
321 else:
322 bigpat.append(p)
322 bigpat.append(p)
323 except IOError: pass
323 except IOError: pass
324
324
325 if bigpat:
325 if bigpat:
326 s = "(?:%s)" % (")|(?:".join(bigpat))
326 s = "(?:%s)" % (")|(?:".join(bigpat))
327 r = re.compile(s)
327 r = re.compile(s)
328 self.ignorefunc = r.search
328 self.ignorefunc = r.search
329 else:
329 else:
330 self.ignorefunc = util.never
330 self.ignorefunc = util.never
331
331
332 return self.ignorefunc(f)
332 return self.ignorefunc(f)
333
333
334 def __del__(self):
334 def __del__(self):
335 if self.dirty:
335 if self.dirty:
336 self.write()
336 self.write()
337
337
338 def __getitem__(self, key):
338 def __getitem__(self, key):
339 try:
339 try:
340 return self.map[key]
340 return self.map[key]
341 except TypeError:
341 except TypeError:
342 self.read()
342 self.read()
343 return self[key]
343 return self[key]
344
344
345 def __contains__(self, key):
345 def __contains__(self, key):
346 if not self.map: self.read()
346 if not self.map: self.read()
347 return key in self.map
347 return key in self.map
348
348
349 def parents(self):
349 def parents(self):
350 if not self.pl:
350 if not self.pl:
351 self.read()
351 self.read()
352 return self.pl
352 return self.pl
353
353
354 def markdirty(self):
354 def markdirty(self):
355 if not self.dirty:
355 if not self.dirty:
356 self.dirty = 1
356 self.dirty = 1
357
357
358 def setparents(self, p1, p2 = nullid):
358 def setparents(self, p1, p2 = nullid):
359 self.markdirty()
359 self.markdirty()
360 self.pl = p1, p2
360 self.pl = p1, p2
361
361
362 def state(self, key):
362 def state(self, key):
363 try:
363 try:
364 return self[key][0]
364 return self[key][0]
365 except KeyError:
365 except KeyError:
366 return "?"
366 return "?"
367
367
368 def read(self):
368 def read(self):
369 if self.map is not None: return self.map
369 if self.map is not None: return self.map
370
370
371 self.map = {}
371 self.map = {}
372 self.pl = [nullid, nullid]
372 self.pl = [nullid, nullid]
373 try:
373 try:
374 st = self.opener("dirstate").read()
374 st = self.opener("dirstate").read()
375 if not st: return
375 if not st: return
376 except: return
376 except: return
377
377
378 self.pl = [st[:20], st[20: 40]]
378 self.pl = [st[:20], st[20: 40]]
379
379
380 pos = 40
380 pos = 40
381 while pos < len(st):
381 while pos < len(st):
382 e = struct.unpack(">cllll", st[pos:pos+17])
382 e = struct.unpack(">cllll", st[pos:pos+17])
383 l = e[4]
383 l = e[4]
384 pos += 17
384 pos += 17
385 f = st[pos:pos + l]
385 f = st[pos:pos + l]
386 if '\0' in f:
386 if '\0' in f:
387 f, c = f.split('\0')
387 f, c = f.split('\0')
388 self.copies[f] = c
388 self.copies[f] = c
389 self.map[f] = e[:4]
389 self.map[f] = e[:4]
390 pos += l
390 pos += l
391
391
392 def copy(self, source, dest):
392 def copy(self, source, dest):
393 self.read()
393 self.read()
394 self.markdirty()
394 self.markdirty()
395 self.copies[dest] = source
395 self.copies[dest] = source
396
396
397 def copied(self, file):
397 def copied(self, file):
398 return self.copies.get(file, None)
398 return self.copies.get(file, None)
399
399
400 def update(self, files, state, **kw):
400 def update(self, files, state, **kw):
401 ''' current states:
401 ''' current states:
402 n normal
402 n normal
403 m needs merging
403 m needs merging
404 r marked for removal
404 r marked for removal
405 a marked for addition'''
405 a marked for addition'''
406
406
407 if not files: return
407 if not files: return
408 self.read()
408 self.read()
409 self.markdirty()
409 self.markdirty()
410 for f in files:
410 for f in files:
411 if state == "r":
411 if state == "r":
412 self.map[f] = ('r', 0, 0, 0)
412 self.map[f] = ('r', 0, 0, 0)
413 else:
413 else:
414 s = os.stat(os.path.join(self.root, f))
414 s = os.stat(os.path.join(self.root, f))
415 st_size = kw.get('st_size', s.st_size)
415 st_size = kw.get('st_size', s.st_size)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
418
418
419 def forget(self, files):
419 def forget(self, files):
420 if not files: return
420 if not files: return
421 self.read()
421 self.read()
422 self.markdirty()
422 self.markdirty()
423 for f in files:
423 for f in files:
424 try:
424 try:
425 del self.map[f]
425 del self.map[f]
426 except KeyError:
426 except KeyError:
427 self.ui.warn("not in dirstate: %s!\n" % f)
427 self.ui.warn("not in dirstate: %s!\n" % f)
428 pass
428 pass
429
429
430 def clear(self):
430 def clear(self):
431 self.map = {}
431 self.map = {}
432 self.markdirty()
432 self.markdirty()
433
433
434 def write(self):
434 def write(self):
435 st = self.opener("dirstate", "w")
435 st = self.opener("dirstate", "w")
436 st.write("".join(self.pl))
436 st.write("".join(self.pl))
437 for f, e in self.map.items():
437 for f, e in self.map.items():
438 c = self.copied(f)
438 c = self.copied(f)
439 if c:
439 if c:
440 f = f + "\0" + c
440 f = f + "\0" + c
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
442 st.write(e + f)
442 st.write(e + f)
443 self.dirty = 0
443 self.dirty = 0
444
444
445 def filterfiles(self, files):
445 def filterfiles(self, files):
446 ret = {}
446 ret = {}
447 unknown = []
447 unknown = []
448
448
449 for x in files:
449 for x in files:
450 if x is '.':
450 if x is '.':
451 return self.map.copy()
451 return self.map.copy()
452 if x not in self.map:
452 if x not in self.map:
453 unknown.append(x)
453 unknown.append(x)
454 else:
454 else:
455 ret[x] = self.map[x]
455 ret[x] = self.map[x]
456
456
457 if not unknown:
457 if not unknown:
458 return ret
458 return ret
459
459
460 b = self.map.keys()
460 b = self.map.keys()
461 b.sort()
461 b.sort()
462 blen = len(b)
462 blen = len(b)
463
463
464 for x in unknown:
464 for x in unknown:
465 bs = bisect.bisect(b, x)
465 bs = bisect.bisect(b, x)
466 if bs != 0 and b[bs-1] == x:
466 if bs != 0 and b[bs-1] == x:
467 ret[x] = self.map[x]
467 ret[x] = self.map[x]
468 continue
468 continue
469 while bs < blen:
469 while bs < blen:
470 s = b[bs]
470 s = b[bs]
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
472 ret[s] = self.map[s]
472 ret[s] = self.map[s]
473 else:
473 else:
474 break
474 break
475 bs += 1
475 bs += 1
476 return ret
476 return ret
477
477
478 def walk(self, files = None, match = util.always, dc=None):
478 def walk(self, files = None, match = util.always, dc=None):
479 self.read()
479 self.read()
480
480
481 # walk all files by default
481 # walk all files by default
482 if not files:
482 if not files:
483 files = [self.root]
483 files = [self.root]
484 if not dc:
484 if not dc:
485 dc = self.map.copy()
485 dc = self.map.copy()
486 elif not dc:
486 elif not dc:
487 dc = self.filterfiles(files)
487 dc = self.filterfiles(files)
488
488
489 known = {'.hg': 1}
489 known = {'.hg': 1}
490 def seen(fn):
490 def seen(fn):
491 if fn in known: return True
491 if fn in known: return True
492 known[fn] = 1
492 known[fn] = 1
493 def traverse():
493 def traverse():
494 for ff in util.unique(files):
494 for ff in util.unique(files):
495 f = os.path.join(self.root, ff)
495 f = os.path.join(self.root, ff)
496 try:
496 try:
497 st = os.stat(f)
497 st = os.stat(f)
498 except OSError, inst:
498 except OSError, inst:
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
500 util.pathto(self.getcwd(), ff),
500 util.pathto(self.getcwd(), ff),
501 inst.strerror))
501 inst.strerror))
502 continue
502 continue
503 if stat.S_ISDIR(st.st_mode):
503 if stat.S_ISDIR(st.st_mode):
504 for dir, subdirs, fl in os.walk(f):
504 for dir, subdirs, fl in os.walk(f):
505 d = dir[len(self.root) + 1:]
505 d = dir[len(self.root) + 1:]
506 nd = util.normpath(d)
506 nd = util.normpath(d)
507 if nd == '.': nd = ''
507 if nd == '.': nd = ''
508 if seen(nd):
508 if seen(nd):
509 subdirs[:] = []
509 subdirs[:] = []
510 continue
510 continue
511 for sd in subdirs:
511 for sd in subdirs:
512 ds = os.path.join(nd, sd +'/')
512 ds = os.path.join(nd, sd +'/')
513 if self.ignore(ds) or not match(ds):
513 if self.ignore(ds) or not match(ds):
514 subdirs.remove(sd)
514 subdirs.remove(sd)
515 subdirs.sort()
515 subdirs.sort()
516 fl.sort()
516 fl.sort()
517 for fn in fl:
517 for fn in fl:
518 fn = util.pconvert(os.path.join(d, fn))
518 fn = util.pconvert(os.path.join(d, fn))
519 yield 'f', fn
519 yield 'f', fn
520 elif stat.S_ISREG(st.st_mode):
520 elif stat.S_ISREG(st.st_mode):
521 yield 'f', ff
521 yield 'f', ff
522 else:
522 else:
523 kind = 'unknown'
523 kind = 'unknown'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
530 util.pathto(self.getcwd(), ff),
530 util.pathto(self.getcwd(), ff),
531 kind))
531 kind))
532
532
533 ks = dc.keys()
533 ks = dc.keys()
534 ks.sort()
534 ks.sort()
535 for k in ks:
535 for k in ks:
536 yield 'm', k
536 yield 'm', k
537
537
538 # yield only files that match: all in dirstate, others only if
538 # yield only files that match: all in dirstate, others only if
539 # not in .hgignore
539 # not in .hgignore
540
540
541 for src, fn in util.unique(traverse()):
541 for src, fn in util.unique(traverse()):
542 fn = util.normpath(fn)
542 fn = util.normpath(fn)
543 if seen(fn): continue
543 if seen(fn): continue
544 if fn not in dc and self.ignore(fn):
544 if fn not in dc and self.ignore(fn):
545 continue
545 continue
546 if match(fn):
546 if match(fn):
547 yield src, fn
547 yield src, fn
548
548
549 def changes(self, files=None, match=util.always):
549 def changes(self, files=None, match=util.always):
550 self.read()
550 self.read()
551 if not files:
551 if not files:
552 dc = self.map.copy()
552 dc = self.map.copy()
553 else:
553 else:
554 dc = self.filterfiles(files)
554 dc = self.filterfiles(files)
555 lookup, modified, added, unknown = [], [], [], []
555 lookup, modified, added, unknown = [], [], [], []
556 removed, deleted = [], []
556 removed, deleted = [], []
557
557
558 for src, fn in self.walk(files, match, dc=dc):
558 for src, fn in self.walk(files, match, dc=dc):
559 try:
559 try:
560 s = os.stat(os.path.join(self.root, fn))
560 s = os.stat(os.path.join(self.root, fn))
561 except OSError:
561 except OSError:
562 continue
562 continue
563 if not stat.S_ISREG(s.st_mode):
563 if not stat.S_ISREG(s.st_mode):
564 continue
564 continue
565 c = dc.get(fn)
565 c = dc.get(fn)
566 if c:
566 if c:
567 del dc[fn]
567 del dc[fn]
568 if c[0] == 'm':
568 if c[0] == 'm':
569 modified.append(fn)
569 modified.append(fn)
570 elif c[0] == 'a':
570 elif c[0] == 'a':
571 added.append(fn)
571 added.append(fn)
572 elif c[0] == 'r':
572 elif c[0] == 'r':
573 unknown.append(fn)
573 unknown.append(fn)
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
575 modified.append(fn)
575 modified.append(fn)
576 elif c[3] != s.st_mtime:
576 elif c[3] != s.st_mtime:
577 lookup.append(fn)
577 lookup.append(fn)
578 else:
578 else:
579 unknown.append(fn)
579 unknown.append(fn)
580
580
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
582 if c[0] == 'r':
582 if c[0] == 'r':
583 removed.append(fn)
583 removed.append(fn)
584 else:
584 else:
585 deleted.append(fn)
585 deleted.append(fn)
586 return (lookup, modified, added, removed + deleted, unknown)
586 return (lookup, modified, added, removed + deleted, unknown)
587
587
588 # used to avoid circular references so destructors work
588 # used to avoid circular references so destructors work
589 def opener(base):
589 def opener(base):
590 p = base
590 p = base
591 def o(path, mode="r"):
591 def o(path, mode="r"):
592 if p.startswith("http://"):
592 if p.startswith("http://"):
593 f = os.path.join(p, urllib.quote(path))
593 f = os.path.join(p, urllib.quote(path))
594 return httprangereader.httprangereader(f)
594 return httprangereader.httprangereader(f)
595
595
596 f = os.path.join(p, path)
596 f = os.path.join(p, path)
597
597
598 mode += "b" # for that other OS
598 mode += "b" # for that other OS
599
599
600 if mode[0] != "r":
600 if mode[0] != "r":
601 try:
601 try:
602 s = os.stat(f)
602 s = os.stat(f)
603 except OSError:
603 except OSError:
604 d = os.path.dirname(f)
604 d = os.path.dirname(f)
605 if not os.path.isdir(d):
605 if not os.path.isdir(d):
606 os.makedirs(d)
606 os.makedirs(d)
607 else:
607 else:
608 if s.st_nlink > 1:
608 if s.st_nlink > 1:
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
610 util.rename(f+".tmp", f)
610 util.rename(f+".tmp", f)
611
611
612 return file(f, mode)
612 return file(f, mode)
613
613
614 return o
614 return o
615
615
616 class RepoError(Exception): pass
616 class RepoError(Exception): pass
617
617
618 class localrepository:
618 class localrepository:
619 def __init__(self, ui, path=None, create=0):
619 def __init__(self, ui, path=None, create=0):
620 self.remote = 0
620 self.remote = 0
621 if path and path.startswith("http://"):
621 if path and path.startswith("http://"):
622 self.remote = 1
622 self.remote = 1
623 self.path = path
623 self.path = path
624 else:
624 else:
625 if not path:
625 if not path:
626 p = os.getcwd()
626 p = os.getcwd()
627 while not os.path.isdir(os.path.join(p, ".hg")):
627 while not os.path.isdir(os.path.join(p, ".hg")):
628 oldp = p
628 oldp = p
629 p = os.path.dirname(p)
629 p = os.path.dirname(p)
630 if p == oldp: raise RepoError("no repo found")
630 if p == oldp: raise RepoError("no repo found")
631 path = p
631 path = p
632 self.path = os.path.join(path, ".hg")
632 self.path = os.path.join(path, ".hg")
633
633
634 if not create and not os.path.isdir(self.path):
634 if not create and not os.path.isdir(self.path):
635 raise RepoError("repository %s not found" % self.path)
635 raise RepoError("repository %s not found" % self.path)
636
636
637 self.root = path
637 self.root = os.path.abspath(path)
638 self.ui = ui
638 self.ui = ui
639
639
640 if create:
640 if create:
641 os.mkdir(self.path)
641 os.mkdir(self.path)
642 os.mkdir(self.join("data"))
642 os.mkdir(self.join("data"))
643
643
644 self.opener = opener(self.path)
644 self.opener = opener(self.path)
645 self.wopener = opener(self.root)
645 self.wopener = opener(self.root)
646 self.manifest = manifest(self.opener)
646 self.manifest = manifest(self.opener)
647 self.changelog = changelog(self.opener)
647 self.changelog = changelog(self.opener)
648 self.tagscache = None
648 self.tagscache = None
649 self.nodetagscache = None
649 self.nodetagscache = None
650
650
651 if not self.remote:
651 if not self.remote:
652 self.dirstate = dirstate(self.opener, ui, self.root)
652 self.dirstate = dirstate(self.opener, ui, self.root)
653 try:
653 try:
654 self.ui.readconfig(self.opener("hgrc"))
654 self.ui.readconfig(self.opener("hgrc"))
655 except IOError: pass
655 except IOError: pass
656
656
657 def hook(self, name, **args):
657 def hook(self, name, **args):
658 s = self.ui.config("hooks", name)
658 s = self.ui.config("hooks", name)
659 if s:
659 if s:
660 self.ui.note("running hook %s: %s\n" % (name, s))
660 self.ui.note("running hook %s: %s\n" % (name, s))
661 old = {}
661 old = {}
662 for k, v in args.items():
662 for k, v in args.items():
663 k = k.upper()
663 k = k.upper()
664 old[k] = os.environ.get(k, None)
664 old[k] = os.environ.get(k, None)
665 os.environ[k] = v
665 os.environ[k] = v
666
666
667 r = os.system(s)
667 r = os.system(s)
668
668
669 for k, v in old.items():
669 for k, v in old.items():
670 if v != None:
670 if v != None:
671 os.environ[k] = v
671 os.environ[k] = v
672 else:
672 else:
673 del os.environ[k]
673 del os.environ[k]
674
674
675 if r:
675 if r:
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
677 (name, r))
677 (name, r))
678 return False
678 return False
679 return True
679 return True
680
680
681 def tags(self):
681 def tags(self):
682 '''return a mapping of tag to node'''
682 '''return a mapping of tag to node'''
683 if not self.tagscache:
683 if not self.tagscache:
684 self.tagscache = {}
684 self.tagscache = {}
685 def addtag(self, k, n):
685 def addtag(self, k, n):
686 try:
686 try:
687 bin_n = bin(n)
687 bin_n = bin(n)
688 except TypeError:
688 except TypeError:
689 bin_n = ''
689 bin_n = ''
690 self.tagscache[k.strip()] = bin_n
690 self.tagscache[k.strip()] = bin_n
691
691
692 try:
692 try:
693 # read each head of the tags file, ending with the tip
693 # read each head of the tags file, ending with the tip
694 # and add each tag found to the map, with "newer" ones
694 # and add each tag found to the map, with "newer" ones
695 # taking precedence
695 # taking precedence
696 fl = self.file(".hgtags")
696 fl = self.file(".hgtags")
697 h = fl.heads()
697 h = fl.heads()
698 h.reverse()
698 h.reverse()
699 for r in h:
699 for r in h:
700 for l in fl.revision(r).splitlines():
700 for l in fl.revision(r).splitlines():
701 if l:
701 if l:
702 n, k = l.split(" ", 1)
702 n, k = l.split(" ", 1)
703 addtag(self, k, n)
703 addtag(self, k, n)
704 except KeyError:
704 except KeyError:
705 pass
705 pass
706
706
707 try:
707 try:
708 f = self.opener("localtags")
708 f = self.opener("localtags")
709 for l in f:
709 for l in f:
710 n, k = l.split(" ", 1)
710 n, k = l.split(" ", 1)
711 addtag(self, k, n)
711 addtag(self, k, n)
712 except IOError:
712 except IOError:
713 pass
713 pass
714
714
715 self.tagscache['tip'] = self.changelog.tip()
715 self.tagscache['tip'] = self.changelog.tip()
716
716
717 return self.tagscache
717 return self.tagscache
718
718
719 def tagslist(self):
719 def tagslist(self):
720 '''return a list of tags ordered by revision'''
720 '''return a list of tags ordered by revision'''
721 l = []
721 l = []
722 for t, n in self.tags().items():
722 for t, n in self.tags().items():
723 try:
723 try:
724 r = self.changelog.rev(n)
724 r = self.changelog.rev(n)
725 except:
725 except:
726 r = -2 # sort to the beginning of the list if unknown
726 r = -2 # sort to the beginning of the list if unknown
727 l.append((r,t,n))
727 l.append((r,t,n))
728 l.sort()
728 l.sort()
729 return [(t,n) for r,t,n in l]
729 return [(t,n) for r,t,n in l]
730
730
731 def nodetags(self, node):
731 def nodetags(self, node):
732 '''return the tags associated with a node'''
732 '''return the tags associated with a node'''
733 if not self.nodetagscache:
733 if not self.nodetagscache:
734 self.nodetagscache = {}
734 self.nodetagscache = {}
735 for t,n in self.tags().items():
735 for t,n in self.tags().items():
736 self.nodetagscache.setdefault(n,[]).append(t)
736 self.nodetagscache.setdefault(n,[]).append(t)
737 return self.nodetagscache.get(node, [])
737 return self.nodetagscache.get(node, [])
738
738
739 def lookup(self, key):
739 def lookup(self, key):
740 try:
740 try:
741 return self.tags()[key]
741 return self.tags()[key]
742 except KeyError:
742 except KeyError:
743 try:
743 try:
744 return self.changelog.lookup(key)
744 return self.changelog.lookup(key)
745 except:
745 except:
746 raise RepoError("unknown revision '%s'" % key)
746 raise RepoError("unknown revision '%s'" % key)
747
747
748 def dev(self):
748 def dev(self):
749 if self.remote: return -1
749 if self.remote: return -1
750 return os.stat(self.path).st_dev
750 return os.stat(self.path).st_dev
751
751
752 def local(self):
752 def local(self):
753 return not self.remote
753 return not self.remote
754
754
755 def join(self, f):
755 def join(self, f):
756 return os.path.join(self.path, f)
756 return os.path.join(self.path, f)
757
757
758 def wjoin(self, f):
758 def wjoin(self, f):
759 return os.path.join(self.root, f)
759 return os.path.join(self.root, f)
760
760
761 def file(self, f):
761 def file(self, f):
762 if f[0] == '/': f = f[1:]
762 if f[0] == '/': f = f[1:]
763 return filelog(self.opener, f)
763 return filelog(self.opener, f)
764
764
765 def getcwd(self):
765 def getcwd(self):
766 return self.dirstate.getcwd()
766 return self.dirstate.getcwd()
767
767
768 def wfile(self, f, mode='r'):
768 def wfile(self, f, mode='r'):
769 return self.wopener(f, mode)
769 return self.wopener(f, mode)
770
770
771 def transaction(self):
771 def transaction(self):
772 # save dirstate for undo
772 # save dirstate for undo
773 try:
773 try:
774 ds = self.opener("dirstate").read()
774 ds = self.opener("dirstate").read()
775 except IOError:
775 except IOError:
776 ds = ""
776 ds = ""
777 self.opener("journal.dirstate", "w").write(ds)
777 self.opener("journal.dirstate", "w").write(ds)
778
778
779 def after():
779 def after():
780 util.rename(self.join("journal"), self.join("undo"))
780 util.rename(self.join("journal"), self.join("undo"))
781 util.rename(self.join("journal.dirstate"),
781 util.rename(self.join("journal.dirstate"),
782 self.join("undo.dirstate"))
782 self.join("undo.dirstate"))
783
783
784 return transaction.transaction(self.ui.warn, self.opener,
784 return transaction.transaction(self.ui.warn, self.opener,
785 self.join("journal"), after)
785 self.join("journal"), after)
786
786
787 def recover(self):
787 def recover(self):
788 lock = self.lock()
788 lock = self.lock()
789 if os.path.exists(self.join("journal")):
789 if os.path.exists(self.join("journal")):
790 self.ui.status("rolling back interrupted transaction\n")
790 self.ui.status("rolling back interrupted transaction\n")
791 return transaction.rollback(self.opener, self.join("journal"))
791 return transaction.rollback(self.opener, self.join("journal"))
792 else:
792 else:
793 self.ui.warn("no interrupted transaction available\n")
793 self.ui.warn("no interrupted transaction available\n")
794
794
795 def undo(self):
795 def undo(self):
796 lock = self.lock()
796 lock = self.lock()
797 if os.path.exists(self.join("undo")):
797 if os.path.exists(self.join("undo")):
798 self.ui.status("rolling back last transaction\n")
798 self.ui.status("rolling back last transaction\n")
799 transaction.rollback(self.opener, self.join("undo"))
799 transaction.rollback(self.opener, self.join("undo"))
800 self.dirstate = None
800 self.dirstate = None
801 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
801 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
802 self.dirstate = dirstate(self.opener, self.ui, self.root)
802 self.dirstate = dirstate(self.opener, self.ui, self.root)
803 else:
803 else:
804 self.ui.warn("no undo information available\n")
804 self.ui.warn("no undo information available\n")
805
805
806 def lock(self, wait = 1):
806 def lock(self, wait = 1):
807 try:
807 try:
808 return lock.lock(self.join("lock"), 0)
808 return lock.lock(self.join("lock"), 0)
809 except lock.LockHeld, inst:
809 except lock.LockHeld, inst:
810 if wait:
810 if wait:
811 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
811 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
812 return lock.lock(self.join("lock"), wait)
812 return lock.lock(self.join("lock"), wait)
813 raise inst
813 raise inst
814
814
815 def rawcommit(self, files, text, user, date, p1=None, p2=None):
815 def rawcommit(self, files, text, user, date, p1=None, p2=None):
816 orig_parent = self.dirstate.parents()[0] or nullid
816 orig_parent = self.dirstate.parents()[0] or nullid
817 p1 = p1 or self.dirstate.parents()[0] or nullid
817 p1 = p1 or self.dirstate.parents()[0] or nullid
818 p2 = p2 or self.dirstate.parents()[1] or nullid
818 p2 = p2 or self.dirstate.parents()[1] or nullid
819 c1 = self.changelog.read(p1)
819 c1 = self.changelog.read(p1)
820 c2 = self.changelog.read(p2)
820 c2 = self.changelog.read(p2)
821 m1 = self.manifest.read(c1[0])
821 m1 = self.manifest.read(c1[0])
822 mf1 = self.manifest.readflags(c1[0])
822 mf1 = self.manifest.readflags(c1[0])
823 m2 = self.manifest.read(c2[0])
823 m2 = self.manifest.read(c2[0])
824
824
825 if orig_parent == p1:
825 if orig_parent == p1:
826 update_dirstate = 1
826 update_dirstate = 1
827 else:
827 else:
828 update_dirstate = 0
828 update_dirstate = 0
829
829
830 tr = self.transaction()
830 tr = self.transaction()
831 mm = m1.copy()
831 mm = m1.copy()
832 mfm = mf1.copy()
832 mfm = mf1.copy()
833 linkrev = self.changelog.count()
833 linkrev = self.changelog.count()
834 for f in files:
834 for f in files:
835 try:
835 try:
836 t = self.wfile(f).read()
836 t = self.wfile(f).read()
837 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
837 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
838 r = self.file(f)
838 r = self.file(f)
839 mfm[f] = tm
839 mfm[f] = tm
840 mm[f] = r.add(t, {}, tr, linkrev,
840 mm[f] = r.add(t, {}, tr, linkrev,
841 m1.get(f, nullid), m2.get(f, nullid))
841 m1.get(f, nullid), m2.get(f, nullid))
842 if update_dirstate:
842 if update_dirstate:
843 self.dirstate.update([f], "n")
843 self.dirstate.update([f], "n")
844 except IOError:
844 except IOError:
845 try:
845 try:
846 del mm[f]
846 del mm[f]
847 del mfm[f]
847 del mfm[f]
848 if update_dirstate:
848 if update_dirstate:
849 self.dirstate.forget([f])
849 self.dirstate.forget([f])
850 except:
850 except:
851 # deleted from p2?
851 # deleted from p2?
852 pass
852 pass
853
853
854 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
854 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
855 user = user or self.ui.username()
855 user = user or self.ui.username()
856 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
856 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
857 tr.close()
857 tr.close()
858 if update_dirstate:
858 if update_dirstate:
859 self.dirstate.setparents(n, nullid)
859 self.dirstate.setparents(n, nullid)
860
860
861 def commit(self, files = None, text = "", user = None, date = None,
861 def commit(self, files = None, text = "", user = None, date = None,
862 match = util.always, force=False):
862 match = util.always, force=False):
863 commit = []
863 commit = []
864 remove = []
864 remove = []
865 if files:
865 if files:
866 for f in files:
866 for f in files:
867 s = self.dirstate.state(f)
867 s = self.dirstate.state(f)
868 if s in 'nmai':
868 if s in 'nmai':
869 commit.append(f)
869 commit.append(f)
870 elif s == 'r':
870 elif s == 'r':
871 remove.append(f)
871 remove.append(f)
872 else:
872 else:
873 self.ui.warn("%s not tracked!\n" % f)
873 self.ui.warn("%s not tracked!\n" % f)
874 else:
874 else:
875 (c, a, d, u) = self.changes(match = match)
875 (c, a, d, u) = self.changes(match = match)
876 commit = c + a
876 commit = c + a
877 remove = d
877 remove = d
878
878
879 if not commit and not remove and not force:
879 if not commit and not remove and not force:
880 self.ui.status("nothing changed\n")
880 self.ui.status("nothing changed\n")
881 return None
881 return None
882
882
883 if not self.hook("precommit"):
883 if not self.hook("precommit"):
884 return None
884 return None
885
885
886 p1, p2 = self.dirstate.parents()
886 p1, p2 = self.dirstate.parents()
887 c1 = self.changelog.read(p1)
887 c1 = self.changelog.read(p1)
888 c2 = self.changelog.read(p2)
888 c2 = self.changelog.read(p2)
889 m1 = self.manifest.read(c1[0])
889 m1 = self.manifest.read(c1[0])
890 mf1 = self.manifest.readflags(c1[0])
890 mf1 = self.manifest.readflags(c1[0])
891 m2 = self.manifest.read(c2[0])
891 m2 = self.manifest.read(c2[0])
892 lock = self.lock()
892 lock = self.lock()
893 tr = self.transaction()
893 tr = self.transaction()
894
894
895 # check in files
895 # check in files
896 new = {}
896 new = {}
897 linkrev = self.changelog.count()
897 linkrev = self.changelog.count()
898 commit.sort()
898 commit.sort()
899 for f in commit:
899 for f in commit:
900 self.ui.note(f + "\n")
900 self.ui.note(f + "\n")
901 try:
901 try:
902 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
902 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
903 t = self.wfile(f).read()
903 t = self.wfile(f).read()
904 except IOError:
904 except IOError:
905 self.ui.warn("trouble committing %s!\n" % f)
905 self.ui.warn("trouble committing %s!\n" % f)
906 raise
906 raise
907
907
908 meta = {}
908 meta = {}
909 cp = self.dirstate.copied(f)
909 cp = self.dirstate.copied(f)
910 if cp:
910 if cp:
911 meta["copy"] = cp
911 meta["copy"] = cp
912 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
912 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
913 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
913 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
914
914
915 r = self.file(f)
915 r = self.file(f)
916 fp1 = m1.get(f, nullid)
916 fp1 = m1.get(f, nullid)
917 fp2 = m2.get(f, nullid)
917 fp2 = m2.get(f, nullid)
918 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
918 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
919
919
920 # update manifest
920 # update manifest
921 m1.update(new)
921 m1.update(new)
922 for f in remove:
922 for f in remove:
923 if f in m1:
923 if f in m1:
924 del m1[f]
924 del m1[f]
925 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
925 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
926 (new, remove))
926 (new, remove))
927
927
928 # add changeset
928 # add changeset
929 new = new.keys()
929 new = new.keys()
930 new.sort()
930 new.sort()
931
931
932 if not text:
932 if not text:
933 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
933 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
934 edittext += "".join(["HG: changed %s\n" % f for f in new])
934 edittext += "".join(["HG: changed %s\n" % f for f in new])
935 edittext += "".join(["HG: removed %s\n" % f for f in remove])
935 edittext += "".join(["HG: removed %s\n" % f for f in remove])
936 edittext = self.ui.edit(edittext)
936 edittext = self.ui.edit(edittext)
937 if not edittext.rstrip():
937 if not edittext.rstrip():
938 return None
938 return None
939 text = edittext
939 text = edittext
940
940
941 user = user or self.ui.username()
941 user = user or self.ui.username()
942 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
942 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
943 tr.close()
943 tr.close()
944
944
945 self.dirstate.setparents(n)
945 self.dirstate.setparents(n)
946 self.dirstate.update(new, "n")
946 self.dirstate.update(new, "n")
947 self.dirstate.forget(remove)
947 self.dirstate.forget(remove)
948
948
949 if not self.hook("commit", node=hex(n)):
949 if not self.hook("commit", node=hex(n)):
950 return None
950 return None
951 return n
951 return n
952
952
953 def walk(self, node = None, files = [], match = util.always):
953 def walk(self, node = None, files = [], match = util.always):
954 if node:
954 if node:
955 for fn in self.manifest.read(self.changelog.read(node)[0]):
955 for fn in self.manifest.read(self.changelog.read(node)[0]):
956 if match(fn): yield 'm', fn
956 if match(fn): yield 'm', fn
957 else:
957 else:
958 for src, fn in self.dirstate.walk(files, match):
958 for src, fn in self.dirstate.walk(files, match):
959 yield src, fn
959 yield src, fn
960
960
961 def changes(self, node1 = None, node2 = None, files = [],
961 def changes(self, node1 = None, node2 = None, files = [],
962 match = util.always):
962 match = util.always):
963 mf2, u = None, []
963 mf2, u = None, []
964
964
965 def fcmp(fn, mf):
965 def fcmp(fn, mf):
966 t1 = self.wfile(fn).read()
966 t1 = self.wfile(fn).read()
967 t2 = self.file(fn).revision(mf[fn])
967 t2 = self.file(fn).revision(mf[fn])
968 return cmp(t1, t2)
968 return cmp(t1, t2)
969
969
970 def mfmatches(node):
970 def mfmatches(node):
971 mf = dict(self.manifest.read(node))
971 mf = dict(self.manifest.read(node))
972 for fn in mf.keys():
972 for fn in mf.keys():
973 if not match(fn):
973 if not match(fn):
974 del mf[fn]
974 del mf[fn]
975 return mf
975 return mf
976
976
977 # are we comparing the working directory?
977 # are we comparing the working directory?
978 if not node2:
978 if not node2:
979 l, c, a, d, u = self.dirstate.changes(files, match)
979 l, c, a, d, u = self.dirstate.changes(files, match)
980
980
981 # are we comparing working dir against its parent?
981 # are we comparing working dir against its parent?
982 if not node1:
982 if not node1:
983 if l:
983 if l:
984 # do a full compare of any files that might have changed
984 # do a full compare of any files that might have changed
985 change = self.changelog.read(self.dirstate.parents()[0])
985 change = self.changelog.read(self.dirstate.parents()[0])
986 mf2 = mfmatches(change[0])
986 mf2 = mfmatches(change[0])
987 for f in l:
987 for f in l:
988 if fcmp(f, mf2):
988 if fcmp(f, mf2):
989 c.append(f)
989 c.append(f)
990
990
991 for l in c, a, d, u:
991 for l in c, a, d, u:
992 l.sort()
992 l.sort()
993
993
994 return (c, a, d, u)
994 return (c, a, d, u)
995
995
996 # are we comparing working dir against non-tip?
996 # are we comparing working dir against non-tip?
997 # generate a pseudo-manifest for the working dir
997 # generate a pseudo-manifest for the working dir
998 if not node2:
998 if not node2:
999 if not mf2:
999 if not mf2:
1000 change = self.changelog.read(self.dirstate.parents()[0])
1000 change = self.changelog.read(self.dirstate.parents()[0])
1001 mf2 = mfmatches(change[0])
1001 mf2 = mfmatches(change[0])
1002 for f in a + c + l:
1002 for f in a + c + l:
1003 mf2[f] = ""
1003 mf2[f] = ""
1004 for f in d:
1004 for f in d:
1005 if f in mf2: del mf2[f]
1005 if f in mf2: del mf2[f]
1006 else:
1006 else:
1007 change = self.changelog.read(node2)
1007 change = self.changelog.read(node2)
1008 mf2 = mfmatches(change[0])
1008 mf2 = mfmatches(change[0])
1009
1009
1010 # flush lists from dirstate before comparing manifests
1010 # flush lists from dirstate before comparing manifests
1011 c, a = [], []
1011 c, a = [], []
1012
1012
1013 change = self.changelog.read(node1)
1013 change = self.changelog.read(node1)
1014 mf1 = mfmatches(change[0])
1014 mf1 = mfmatches(change[0])
1015
1015
1016 for fn in mf2:
1016 for fn in mf2:
1017 if mf1.has_key(fn):
1017 if mf1.has_key(fn):
1018 if mf1[fn] != mf2[fn]:
1018 if mf1[fn] != mf2[fn]:
1019 if mf2[fn] != "" or fcmp(fn, mf1):
1019 if mf2[fn] != "" or fcmp(fn, mf1):
1020 c.append(fn)
1020 c.append(fn)
1021 del mf1[fn]
1021 del mf1[fn]
1022 else:
1022 else:
1023 a.append(fn)
1023 a.append(fn)
1024
1024
1025 d = mf1.keys()
1025 d = mf1.keys()
1026
1026
1027 for l in c, a, d, u:
1027 for l in c, a, d, u:
1028 l.sort()
1028 l.sort()
1029
1029
1030 return (c, a, d, u)
1030 return (c, a, d, u)
1031
1031
1032 def add(self, list):
1032 def add(self, list):
1033 for f in list:
1033 for f in list:
1034 p = self.wjoin(f)
1034 p = self.wjoin(f)
1035 if not os.path.exists(p):
1035 if not os.path.exists(p):
1036 self.ui.warn("%s does not exist!\n" % f)
1036 self.ui.warn("%s does not exist!\n" % f)
1037 elif not os.path.isfile(p):
1037 elif not os.path.isfile(p):
1038 self.ui.warn("%s not added: only files supported currently\n" % f)
1038 self.ui.warn("%s not added: only files supported currently\n" % f)
1039 elif self.dirstate.state(f) in 'an':
1039 elif self.dirstate.state(f) in 'an':
1040 self.ui.warn("%s already tracked!\n" % f)
1040 self.ui.warn("%s already tracked!\n" % f)
1041 else:
1041 else:
1042 self.dirstate.update([f], "a")
1042 self.dirstate.update([f], "a")
1043
1043
1044 def forget(self, list):
1044 def forget(self, list):
1045 for f in list:
1045 for f in list:
1046 if self.dirstate.state(f) not in 'ai':
1046 if self.dirstate.state(f) not in 'ai':
1047 self.ui.warn("%s not added!\n" % f)
1047 self.ui.warn("%s not added!\n" % f)
1048 else:
1048 else:
1049 self.dirstate.forget([f])
1049 self.dirstate.forget([f])
1050
1050
1051 def remove(self, list):
1051 def remove(self, list):
1052 for f in list:
1052 for f in list:
1053 p = self.wjoin(f)
1053 p = self.wjoin(f)
1054 if os.path.exists(p):
1054 if os.path.exists(p):
1055 self.ui.warn("%s still exists!\n" % f)
1055 self.ui.warn("%s still exists!\n" % f)
1056 elif self.dirstate.state(f) == 'a':
1056 elif self.dirstate.state(f) == 'a':
1057 self.ui.warn("%s never committed!\n" % f)
1057 self.ui.warn("%s never committed!\n" % f)
1058 self.dirstate.forget([f])
1058 self.dirstate.forget([f])
1059 elif f not in self.dirstate:
1059 elif f not in self.dirstate:
1060 self.ui.warn("%s not tracked!\n" % f)
1060 self.ui.warn("%s not tracked!\n" % f)
1061 else:
1061 else:
1062 self.dirstate.update([f], "r")
1062 self.dirstate.update([f], "r")
1063
1063
1064 def copy(self, source, dest):
1064 def copy(self, source, dest):
1065 p = self.wjoin(dest)
1065 p = self.wjoin(dest)
1066 if not os.path.exists(p):
1066 if not os.path.exists(p):
1067 self.ui.warn("%s does not exist!\n" % dest)
1067 self.ui.warn("%s does not exist!\n" % dest)
1068 elif not os.path.isfile(p):
1068 elif not os.path.isfile(p):
1069 self.ui.warn("copy failed: %s is not a file\n" % dest)
1069 self.ui.warn("copy failed: %s is not a file\n" % dest)
1070 else:
1070 else:
1071 if self.dirstate.state(dest) == '?':
1071 if self.dirstate.state(dest) == '?':
1072 self.dirstate.update([dest], "a")
1072 self.dirstate.update([dest], "a")
1073 self.dirstate.copy(source, dest)
1073 self.dirstate.copy(source, dest)
1074
1074
1075 def heads(self):
1075 def heads(self):
1076 return self.changelog.heads()
1076 return self.changelog.heads()
1077
1077
1078 # branchlookup returns a dict giving a list of branches for
1078 # branchlookup returns a dict giving a list of branches for
1079 # each head. A branch is defined as the tag of a node or
1079 # each head. A branch is defined as the tag of a node or
1080 # the branch of the node's parents. If a node has multiple
1080 # the branch of the node's parents. If a node has multiple
1081 # branch tags, tags are eliminated if they are visible from other
1081 # branch tags, tags are eliminated if they are visible from other
1082 # branch tags.
1082 # branch tags.
1083 #
1083 #
1084 # So, for this graph: a->b->c->d->e
1084 # So, for this graph: a->b->c->d->e
1085 # \ /
1085 # \ /
1086 # aa -----/
1086 # aa -----/
1087 # a has tag 2.6.12
1087 # a has tag 2.6.12
1088 # d has tag 2.6.13
1088 # d has tag 2.6.13
1089 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1089 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1090 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1090 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1091 # from the list.
1091 # from the list.
1092 #
1092 #
1093 # It is possible that more than one head will have the same branch tag.
1093 # It is possible that more than one head will have the same branch tag.
1094 # callers need to check the result for multiple heads under the same
1094 # callers need to check the result for multiple heads under the same
1095 # branch tag if that is a problem for them (ie checkout of a specific
1095 # branch tag if that is a problem for them (ie checkout of a specific
1096 # branch).
1096 # branch).
1097 #
1097 #
1098 # passing in a specific branch will limit the depth of the search
1098 # passing in a specific branch will limit the depth of the search
1099 # through the parents. It won't limit the branches returned in the
1099 # through the parents. It won't limit the branches returned in the
1100 # result though.
1100 # result though.
1101 def branchlookup(self, heads=None, branch=None):
1101 def branchlookup(self, heads=None, branch=None):
1102 if not heads:
1102 if not heads:
1103 heads = self.heads()
1103 heads = self.heads()
1104 headt = [ h for h in heads ]
1104 headt = [ h for h in heads ]
1105 chlog = self.changelog
1105 chlog = self.changelog
1106 branches = {}
1106 branches = {}
1107 merges = []
1107 merges = []
1108 seenmerge = {}
1108 seenmerge = {}
1109
1109
1110 # traverse the tree once for each head, recording in the branches
1110 # traverse the tree once for each head, recording in the branches
1111 # dict which tags are visible from this head. The branches
1111 # dict which tags are visible from this head. The branches
1112 # dict also records which tags are visible from each tag
1112 # dict also records which tags are visible from each tag
1113 # while we traverse.
1113 # while we traverse.
1114 while headt or merges:
1114 while headt or merges:
1115 if merges:
1115 if merges:
1116 n, found = merges.pop()
1116 n, found = merges.pop()
1117 visit = [n]
1117 visit = [n]
1118 else:
1118 else:
1119 h = headt.pop()
1119 h = headt.pop()
1120 visit = [h]
1120 visit = [h]
1121 found = [h]
1121 found = [h]
1122 seen = {}
1122 seen = {}
1123 while visit:
1123 while visit:
1124 n = visit.pop()
1124 n = visit.pop()
1125 if n in seen:
1125 if n in seen:
1126 continue
1126 continue
1127 pp = chlog.parents(n)
1127 pp = chlog.parents(n)
1128 tags = self.nodetags(n)
1128 tags = self.nodetags(n)
1129 if tags:
1129 if tags:
1130 for x in tags:
1130 for x in tags:
1131 if x == 'tip':
1131 if x == 'tip':
1132 continue
1132 continue
1133 for f in found:
1133 for f in found:
1134 branches.setdefault(f, {})[n] = 1
1134 branches.setdefault(f, {})[n] = 1
1135 branches.setdefault(n, {})[n] = 1
1135 branches.setdefault(n, {})[n] = 1
1136 break
1136 break
1137 if n not in found:
1137 if n not in found:
1138 found.append(n)
1138 found.append(n)
1139 if branch in tags:
1139 if branch in tags:
1140 continue
1140 continue
1141 seen[n] = 1
1141 seen[n] = 1
1142 if pp[1] != nullid and n not in seenmerge:
1142 if pp[1] != nullid and n not in seenmerge:
1143 merges.append((pp[1], [x for x in found]))
1143 merges.append((pp[1], [x for x in found]))
1144 seenmerge[n] = 1
1144 seenmerge[n] = 1
1145 if pp[0] != nullid:
1145 if pp[0] != nullid:
1146 visit.append(pp[0])
1146 visit.append(pp[0])
1147 # traverse the branches dict, eliminating branch tags from each
1147 # traverse the branches dict, eliminating branch tags from each
1148 # head that are visible from another branch tag for that head.
1148 # head that are visible from another branch tag for that head.
1149 out = {}
1149 out = {}
1150 viscache = {}
1150 viscache = {}
1151 for h in heads:
1151 for h in heads:
1152 def visible(node):
1152 def visible(node):
1153 if node in viscache:
1153 if node in viscache:
1154 return viscache[node]
1154 return viscache[node]
1155 ret = {}
1155 ret = {}
1156 visit = [node]
1156 visit = [node]
1157 while visit:
1157 while visit:
1158 x = visit.pop()
1158 x = visit.pop()
1159 if x in viscache:
1159 if x in viscache:
1160 ret.update(viscache[x])
1160 ret.update(viscache[x])
1161 elif x not in ret:
1161 elif x not in ret:
1162 ret[x] = 1
1162 ret[x] = 1
1163 if x in branches:
1163 if x in branches:
1164 visit[len(visit):] = branches[x].keys()
1164 visit[len(visit):] = branches[x].keys()
1165 viscache[node] = ret
1165 viscache[node] = ret
1166 return ret
1166 return ret
1167 if h not in branches:
1167 if h not in branches:
1168 continue
1168 continue
1169 # O(n^2), but somewhat limited. This only searches the
1169 # O(n^2), but somewhat limited. This only searches the
1170 # tags visible from a specific head, not all the tags in the
1170 # tags visible from a specific head, not all the tags in the
1171 # whole repo.
1171 # whole repo.
1172 for b in branches[h]:
1172 for b in branches[h]:
1173 vis = False
1173 vis = False
1174 for bb in branches[h].keys():
1174 for bb in branches[h].keys():
1175 if b != bb:
1175 if b != bb:
1176 if b in visible(bb):
1176 if b in visible(bb):
1177 vis = True
1177 vis = True
1178 break
1178 break
1179 if not vis:
1179 if not vis:
1180 l = out.setdefault(h, [])
1180 l = out.setdefault(h, [])
1181 l[len(l):] = self.nodetags(b)
1181 l[len(l):] = self.nodetags(b)
1182 return out
1182 return out
1183
1183
1184 def branches(self, nodes):
1184 def branches(self, nodes):
1185 if not nodes: nodes = [self.changelog.tip()]
1185 if not nodes: nodes = [self.changelog.tip()]
1186 b = []
1186 b = []
1187 for n in nodes:
1187 for n in nodes:
1188 t = n
1188 t = n
1189 while n:
1189 while n:
1190 p = self.changelog.parents(n)
1190 p = self.changelog.parents(n)
1191 if p[1] != nullid or p[0] == nullid:
1191 if p[1] != nullid or p[0] == nullid:
1192 b.append((t, n, p[0], p[1]))
1192 b.append((t, n, p[0], p[1]))
1193 break
1193 break
1194 n = p[0]
1194 n = p[0]
1195 return b
1195 return b
1196
1196
1197 def between(self, pairs):
1197 def between(self, pairs):
1198 r = []
1198 r = []
1199
1199
1200 for top, bottom in pairs:
1200 for top, bottom in pairs:
1201 n, l, i = top, [], 0
1201 n, l, i = top, [], 0
1202 f = 1
1202 f = 1
1203
1203
1204 while n != bottom:
1204 while n != bottom:
1205 p = self.changelog.parents(n)[0]
1205 p = self.changelog.parents(n)[0]
1206 if i == f:
1206 if i == f:
1207 l.append(n)
1207 l.append(n)
1208 f = f * 2
1208 f = f * 2
1209 n = p
1209 n = p
1210 i += 1
1210 i += 1
1211
1211
1212 r.append(l)
1212 r.append(l)
1213
1213
1214 return r
1214 return r
1215
1215
1216 def newer(self, nodes):
1216 def newer(self, nodes):
1217 m = {}
1217 m = {}
1218 nl = []
1218 nl = []
1219 pm = {}
1219 pm = {}
1220 cl = self.changelog
1220 cl = self.changelog
1221 t = l = cl.count()
1221 t = l = cl.count()
1222
1222
1223 # find the lowest numbered node
1223 # find the lowest numbered node
1224 for n in nodes:
1224 for n in nodes:
1225 l = min(l, cl.rev(n))
1225 l = min(l, cl.rev(n))
1226 m[n] = 1
1226 m[n] = 1
1227
1227
1228 for i in xrange(l, t):
1228 for i in xrange(l, t):
1229 n = cl.node(i)
1229 n = cl.node(i)
1230 if n in m: # explicitly listed
1230 if n in m: # explicitly listed
1231 pm[n] = 1
1231 pm[n] = 1
1232 nl.append(n)
1232 nl.append(n)
1233 continue
1233 continue
1234 for p in cl.parents(n):
1234 for p in cl.parents(n):
1235 if p in pm: # parent listed
1235 if p in pm: # parent listed
1236 pm[n] = 1
1236 pm[n] = 1
1237 nl.append(n)
1237 nl.append(n)
1238 break
1238 break
1239
1239
1240 return nl
1240 return nl
1241
1241
1242 def findincoming(self, remote, base=None, heads=None):
1242 def findincoming(self, remote, base=None, heads=None):
1243 m = self.changelog.nodemap
1243 m = self.changelog.nodemap
1244 search = []
1244 search = []
1245 fetch = []
1245 fetch = []
1246 seen = {}
1246 seen = {}
1247 seenbranch = {}
1247 seenbranch = {}
1248 if base == None:
1248 if base == None:
1249 base = {}
1249 base = {}
1250
1250
1251 # assume we're closer to the tip than the root
1251 # assume we're closer to the tip than the root
1252 # and start by examining the heads
1252 # and start by examining the heads
1253 self.ui.status("searching for changes\n")
1253 self.ui.status("searching for changes\n")
1254
1254
1255 if not heads:
1255 if not heads:
1256 heads = remote.heads()
1256 heads = remote.heads()
1257
1257
1258 unknown = []
1258 unknown = []
1259 for h in heads:
1259 for h in heads:
1260 if h not in m:
1260 if h not in m:
1261 unknown.append(h)
1261 unknown.append(h)
1262 else:
1262 else:
1263 base[h] = 1
1263 base[h] = 1
1264
1264
1265 if not unknown:
1265 if not unknown:
1266 return None
1266 return None
1267
1267
1268 rep = {}
1268 rep = {}
1269 reqcnt = 0
1269 reqcnt = 0
1270
1270
1271 # search through remote branches
1271 # search through remote branches
1272 # a 'branch' here is a linear segment of history, with four parts:
1272 # a 'branch' here is a linear segment of history, with four parts:
1273 # head, root, first parent, second parent
1273 # head, root, first parent, second parent
1274 # (a branch always has two parents (or none) by definition)
1274 # (a branch always has two parents (or none) by definition)
1275 unknown = remote.branches(unknown)
1275 unknown = remote.branches(unknown)
1276 while unknown:
1276 while unknown:
1277 r = []
1277 r = []
1278 while unknown:
1278 while unknown:
1279 n = unknown.pop(0)
1279 n = unknown.pop(0)
1280 if n[0] in seen:
1280 if n[0] in seen:
1281 continue
1281 continue
1282
1282
1283 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1283 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1284 if n[0] == nullid:
1284 if n[0] == nullid:
1285 break
1285 break
1286 if n in seenbranch:
1286 if n in seenbranch:
1287 self.ui.debug("branch already found\n")
1287 self.ui.debug("branch already found\n")
1288 continue
1288 continue
1289 if n[1] and n[1] in m: # do we know the base?
1289 if n[1] and n[1] in m: # do we know the base?
1290 self.ui.debug("found incomplete branch %s:%s\n"
1290 self.ui.debug("found incomplete branch %s:%s\n"
1291 % (short(n[0]), short(n[1])))
1291 % (short(n[0]), short(n[1])))
1292 search.append(n) # schedule branch range for scanning
1292 search.append(n) # schedule branch range for scanning
1293 seenbranch[n] = 1
1293 seenbranch[n] = 1
1294 else:
1294 else:
1295 if n[1] not in seen and n[1] not in fetch:
1295 if n[1] not in seen and n[1] not in fetch:
1296 if n[2] in m and n[3] in m:
1296 if n[2] in m and n[3] in m:
1297 self.ui.debug("found new changeset %s\n" %
1297 self.ui.debug("found new changeset %s\n" %
1298 short(n[1]))
1298 short(n[1]))
1299 fetch.append(n[1]) # earliest unknown
1299 fetch.append(n[1]) # earliest unknown
1300 base[n[2]] = 1 # latest known
1300 base[n[2]] = 1 # latest known
1301 continue
1301 continue
1302
1302
1303 for a in n[2:4]:
1303 for a in n[2:4]:
1304 if a not in rep:
1304 if a not in rep:
1305 r.append(a)
1305 r.append(a)
1306 rep[a] = 1
1306 rep[a] = 1
1307
1307
1308 seen[n[0]] = 1
1308 seen[n[0]] = 1
1309
1309
1310 if r:
1310 if r:
1311 reqcnt += 1
1311 reqcnt += 1
1312 self.ui.debug("request %d: %s\n" %
1312 self.ui.debug("request %d: %s\n" %
1313 (reqcnt, " ".join(map(short, r))))
1313 (reqcnt, " ".join(map(short, r))))
1314 for p in range(0, len(r), 10):
1314 for p in range(0, len(r), 10):
1315 for b in remote.branches(r[p:p+10]):
1315 for b in remote.branches(r[p:p+10]):
1316 self.ui.debug("received %s:%s\n" %
1316 self.ui.debug("received %s:%s\n" %
1317 (short(b[0]), short(b[1])))
1317 (short(b[0]), short(b[1])))
1318 if b[0] not in m and b[0] not in seen:
1318 if b[0] not in m and b[0] not in seen:
1319 unknown.append(b)
1319 unknown.append(b)
1320
1320
1321 # do binary search on the branches we found
1321 # do binary search on the branches we found
1322 while search:
1322 while search:
1323 n = search.pop(0)
1323 n = search.pop(0)
1324 reqcnt += 1
1324 reqcnt += 1
1325 l = remote.between([(n[0], n[1])])[0]
1325 l = remote.between([(n[0], n[1])])[0]
1326 l.append(n[1])
1326 l.append(n[1])
1327 p = n[0]
1327 p = n[0]
1328 f = 1
1328 f = 1
1329 for i in l:
1329 for i in l:
1330 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1330 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1331 if i in m:
1331 if i in m:
1332 if f <= 2:
1332 if f <= 2:
1333 self.ui.debug("found new branch changeset %s\n" %
1333 self.ui.debug("found new branch changeset %s\n" %
1334 short(p))
1334 short(p))
1335 fetch.append(p)
1335 fetch.append(p)
1336 base[i] = 1
1336 base[i] = 1
1337 else:
1337 else:
1338 self.ui.debug("narrowed branch search to %s:%s\n"
1338 self.ui.debug("narrowed branch search to %s:%s\n"
1339 % (short(p), short(i)))
1339 % (short(p), short(i)))
1340 search.append((p, i))
1340 search.append((p, i))
1341 break
1341 break
1342 p, f = i, f * 2
1342 p, f = i, f * 2
1343
1343
1344 # sanity check our fetch list
1344 # sanity check our fetch list
1345 for f in fetch:
1345 for f in fetch:
1346 if f in m:
1346 if f in m:
1347 raise RepoError("already have changeset " + short(f[:4]))
1347 raise RepoError("already have changeset " + short(f[:4]))
1348
1348
1349 if base.keys() == [nullid]:
1349 if base.keys() == [nullid]:
1350 self.ui.warn("warning: pulling from an unrelated repository!\n")
1350 self.ui.warn("warning: pulling from an unrelated repository!\n")
1351
1351
1352 self.ui.note("adding new changesets starting at " +
1352 self.ui.note("adding new changesets starting at " +
1353 " ".join([short(f) for f in fetch]) + "\n")
1353 " ".join([short(f) for f in fetch]) + "\n")
1354
1354
1355 self.ui.debug("%d total queries\n" % reqcnt)
1355 self.ui.debug("%d total queries\n" % reqcnt)
1356
1356
1357 return fetch
1357 return fetch
1358
1358
1359 def findoutgoing(self, remote, base=None, heads=None):
1359 def findoutgoing(self, remote, base=None, heads=None):
1360 if base == None:
1360 if base == None:
1361 base = {}
1361 base = {}
1362 self.findincoming(remote, base, heads)
1362 self.findincoming(remote, base, heads)
1363
1363
1364 remain = dict.fromkeys(self.changelog.nodemap)
1364 remain = dict.fromkeys(self.changelog.nodemap)
1365
1365
1366 # prune everything remote has from the tree
1366 # prune everything remote has from the tree
1367 del remain[nullid]
1367 del remain[nullid]
1368 remove = base.keys()
1368 remove = base.keys()
1369 while remove:
1369 while remove:
1370 n = remove.pop(0)
1370 n = remove.pop(0)
1371 if n in remain:
1371 if n in remain:
1372 del remain[n]
1372 del remain[n]
1373 for p in self.changelog.parents(n):
1373 for p in self.changelog.parents(n):
1374 remove.append(p)
1374 remove.append(p)
1375
1375
1376 # find every node whose parents have been pruned
1376 # find every node whose parents have been pruned
1377 subset = []
1377 subset = []
1378 for n in remain:
1378 for n in remain:
1379 p1, p2 = self.changelog.parents(n)
1379 p1, p2 = self.changelog.parents(n)
1380 if p1 not in remain and p2 not in remain:
1380 if p1 not in remain and p2 not in remain:
1381 subset.append(n)
1381 subset.append(n)
1382
1382
1383 # this is the set of all roots we have to push
1383 # this is the set of all roots we have to push
1384 return subset
1384 return subset
1385
1385
1386 def pull(self, remote):
1386 def pull(self, remote):
1387 lock = self.lock()
1387 lock = self.lock()
1388
1388
1389 # if we have an empty repo, fetch everything
1389 # if we have an empty repo, fetch everything
1390 if self.changelog.tip() == nullid:
1390 if self.changelog.tip() == nullid:
1391 self.ui.status("requesting all changes\n")
1391 self.ui.status("requesting all changes\n")
1392 fetch = [nullid]
1392 fetch = [nullid]
1393 else:
1393 else:
1394 fetch = self.findincoming(remote)
1394 fetch = self.findincoming(remote)
1395
1395
1396 if not fetch:
1396 if not fetch:
1397 self.ui.status("no changes found\n")
1397 self.ui.status("no changes found\n")
1398 return 1
1398 return 1
1399
1399
1400 cg = remote.changegroup(fetch)
1400 cg = remote.changegroup(fetch)
1401 return self.addchangegroup(cg)
1401 return self.addchangegroup(cg)
1402
1402
1403 def push(self, remote, force=False):
1403 def push(self, remote, force=False):
1404 lock = remote.lock()
1404 lock = remote.lock()
1405
1405
1406 base = {}
1406 base = {}
1407 heads = remote.heads()
1407 heads = remote.heads()
1408 inc = self.findincoming(remote, base, heads)
1408 inc = self.findincoming(remote, base, heads)
1409 if not force and inc:
1409 if not force and inc:
1410 self.ui.warn("abort: unsynced remote changes!\n")
1410 self.ui.warn("abort: unsynced remote changes!\n")
1411 self.ui.status("(did you forget to sync? use push -f to force)\n")
1411 self.ui.status("(did you forget to sync? use push -f to force)\n")
1412 return 1
1412 return 1
1413
1413
1414 update = self.findoutgoing(remote, base)
1414 update = self.findoutgoing(remote, base)
1415 if not update:
1415 if not update:
1416 self.ui.status("no changes found\n")
1416 self.ui.status("no changes found\n")
1417 return 1
1417 return 1
1418 elif not force:
1418 elif not force:
1419 if len(heads) < len(self.changelog.heads()):
1419 if len(heads) < len(self.changelog.heads()):
1420 self.ui.warn("abort: push creates new remote branches!\n")
1420 self.ui.warn("abort: push creates new remote branches!\n")
1421 self.ui.status("(did you forget to merge?" +
1421 self.ui.status("(did you forget to merge?" +
1422 " use push -f to force)\n")
1422 " use push -f to force)\n")
1423 return 1
1423 return 1
1424
1424
1425 cg = self.changegroup(update)
1425 cg = self.changegroup(update)
1426 return remote.addchangegroup(cg)
1426 return remote.addchangegroup(cg)
1427
1427
1428 def changegroup(self, basenodes):
1428 def changegroup(self, basenodes):
1429 class genread:
1429 class genread:
1430 def __init__(self, generator):
1430 def __init__(self, generator):
1431 self.g = generator
1431 self.g = generator
1432 self.buf = ""
1432 self.buf = ""
1433 def fillbuf(self):
1433 def fillbuf(self):
1434 self.buf += "".join(self.g)
1434 self.buf += "".join(self.g)
1435
1435
1436 def read(self, l):
1436 def read(self, l):
1437 while l > len(self.buf):
1437 while l > len(self.buf):
1438 try:
1438 try:
1439 self.buf += self.g.next()
1439 self.buf += self.g.next()
1440 except StopIteration:
1440 except StopIteration:
1441 break
1441 break
1442 d, self.buf = self.buf[:l], self.buf[l:]
1442 d, self.buf = self.buf[:l], self.buf[l:]
1443 return d
1443 return d
1444
1444
1445 def gengroup():
1445 def gengroup():
1446 nodes = self.newer(basenodes)
1446 nodes = self.newer(basenodes)
1447
1447
1448 # construct the link map
1448 # construct the link map
1449 linkmap = {}
1449 linkmap = {}
1450 for n in nodes:
1450 for n in nodes:
1451 linkmap[self.changelog.rev(n)] = n
1451 linkmap[self.changelog.rev(n)] = n
1452
1452
1453 # construct a list of all changed files
1453 # construct a list of all changed files
1454 changed = {}
1454 changed = {}
1455 for n in nodes:
1455 for n in nodes:
1456 c = self.changelog.read(n)
1456 c = self.changelog.read(n)
1457 for f in c[3]:
1457 for f in c[3]:
1458 changed[f] = 1
1458 changed[f] = 1
1459 changed = changed.keys()
1459 changed = changed.keys()
1460 changed.sort()
1460 changed.sort()
1461
1461
1462 # the changegroup is changesets + manifests + all file revs
1462 # the changegroup is changesets + manifests + all file revs
1463 revs = [ self.changelog.rev(n) for n in nodes ]
1463 revs = [ self.changelog.rev(n) for n in nodes ]
1464
1464
1465 for y in self.changelog.group(linkmap): yield y
1465 for y in self.changelog.group(linkmap): yield y
1466 for y in self.manifest.group(linkmap): yield y
1466 for y in self.manifest.group(linkmap): yield y
1467 for f in changed:
1467 for f in changed:
1468 yield struct.pack(">l", len(f) + 4) + f
1468 yield struct.pack(">l", len(f) + 4) + f
1469 g = self.file(f).group(linkmap)
1469 g = self.file(f).group(linkmap)
1470 for y in g:
1470 for y in g:
1471 yield y
1471 yield y
1472
1472
1473 yield struct.pack(">l", 0)
1473 yield struct.pack(">l", 0)
1474
1474
1475 return genread(gengroup())
1475 return genread(gengroup())
1476
1476
1477 def addchangegroup(self, source):
1477 def addchangegroup(self, source):
1478
1478
1479 def getchunk():
1479 def getchunk():
1480 d = source.read(4)
1480 d = source.read(4)
1481 if not d: return ""
1481 if not d: return ""
1482 l = struct.unpack(">l", d)[0]
1482 l = struct.unpack(">l", d)[0]
1483 if l <= 4: return ""
1483 if l <= 4: return ""
1484 return source.read(l - 4)
1484 return source.read(l - 4)
1485
1485
1486 def getgroup():
1486 def getgroup():
1487 while 1:
1487 while 1:
1488 c = getchunk()
1488 c = getchunk()
1489 if not c: break
1489 if not c: break
1490 yield c
1490 yield c
1491
1491
1492 def csmap(x):
1492 def csmap(x):
1493 self.ui.debug("add changeset %s\n" % short(x))
1493 self.ui.debug("add changeset %s\n" % short(x))
1494 return self.changelog.count()
1494 return self.changelog.count()
1495
1495
1496 def revmap(x):
1496 def revmap(x):
1497 return self.changelog.rev(x)
1497 return self.changelog.rev(x)
1498
1498
1499 if not source: return
1499 if not source: return
1500 changesets = files = revisions = 0
1500 changesets = files = revisions = 0
1501
1501
1502 tr = self.transaction()
1502 tr = self.transaction()
1503
1503
1504 # pull off the changeset group
1504 # pull off the changeset group
1505 self.ui.status("adding changesets\n")
1505 self.ui.status("adding changesets\n")
1506 co = self.changelog.tip()
1506 co = self.changelog.tip()
1507 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1507 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1508 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1508 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1509
1509
1510 # pull off the manifest group
1510 # pull off the manifest group
1511 self.ui.status("adding manifests\n")
1511 self.ui.status("adding manifests\n")
1512 mm = self.manifest.tip()
1512 mm = self.manifest.tip()
1513 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1513 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1514
1514
1515 # process the files
1515 # process the files
1516 self.ui.status("adding file changes\n")
1516 self.ui.status("adding file changes\n")
1517 while 1:
1517 while 1:
1518 f = getchunk()
1518 f = getchunk()
1519 if not f: break
1519 if not f: break
1520 self.ui.debug("adding %s revisions\n" % f)
1520 self.ui.debug("adding %s revisions\n" % f)
1521 fl = self.file(f)
1521 fl = self.file(f)
1522 o = fl.count()
1522 o = fl.count()
1523 n = fl.addgroup(getgroup(), revmap, tr)
1523 n = fl.addgroup(getgroup(), revmap, tr)
1524 revisions += fl.count() - o
1524 revisions += fl.count() - o
1525 files += 1
1525 files += 1
1526
1526
1527 self.ui.status(("added %d changesets" +
1527 self.ui.status(("added %d changesets" +
1528 " with %d changes to %d files\n")
1528 " with %d changes to %d files\n")
1529 % (changesets, revisions, files))
1529 % (changesets, revisions, files))
1530
1530
1531 tr.close()
1531 tr.close()
1532
1532
1533 if not self.hook("changegroup"):
1533 if not self.hook("changegroup"):
1534 return 1
1534 return 1
1535
1535
1536 return
1536 return
1537
1537
1538 def update(self, node, allow=False, force=False, choose=None,
1538 def update(self, node, allow=False, force=False, choose=None,
1539 moddirstate=True):
1539 moddirstate=True):
1540 pl = self.dirstate.parents()
1540 pl = self.dirstate.parents()
1541 if not force and pl[1] != nullid:
1541 if not force and pl[1] != nullid:
1542 self.ui.warn("aborting: outstanding uncommitted merges\n")
1542 self.ui.warn("aborting: outstanding uncommitted merges\n")
1543 return 1
1543 return 1
1544
1544
1545 p1, p2 = pl[0], node
1545 p1, p2 = pl[0], node
1546 pa = self.changelog.ancestor(p1, p2)
1546 pa = self.changelog.ancestor(p1, p2)
1547 m1n = self.changelog.read(p1)[0]
1547 m1n = self.changelog.read(p1)[0]
1548 m2n = self.changelog.read(p2)[0]
1548 m2n = self.changelog.read(p2)[0]
1549 man = self.manifest.ancestor(m1n, m2n)
1549 man = self.manifest.ancestor(m1n, m2n)
1550 m1 = self.manifest.read(m1n)
1550 m1 = self.manifest.read(m1n)
1551 mf1 = self.manifest.readflags(m1n)
1551 mf1 = self.manifest.readflags(m1n)
1552 m2 = self.manifest.read(m2n)
1552 m2 = self.manifest.read(m2n)
1553 mf2 = self.manifest.readflags(m2n)
1553 mf2 = self.manifest.readflags(m2n)
1554 ma = self.manifest.read(man)
1554 ma = self.manifest.read(man)
1555 mfa = self.manifest.readflags(man)
1555 mfa = self.manifest.readflags(man)
1556
1556
1557 (c, a, d, u) = self.changes()
1557 (c, a, d, u) = self.changes()
1558
1558
1559 # is this a jump, or a merge? i.e. is there a linear path
1559 # is this a jump, or a merge? i.e. is there a linear path
1560 # from p1 to p2?
1560 # from p1 to p2?
1561 linear_path = (pa == p1 or pa == p2)
1561 linear_path = (pa == p1 or pa == p2)
1562
1562
1563 # resolve the manifest to determine which files
1563 # resolve the manifest to determine which files
1564 # we care about merging
1564 # we care about merging
1565 self.ui.note("resolving manifests\n")
1565 self.ui.note("resolving manifests\n")
1566 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1566 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1567 (force, allow, moddirstate, linear_path))
1567 (force, allow, moddirstate, linear_path))
1568 self.ui.debug(" ancestor %s local %s remote %s\n" %
1568 self.ui.debug(" ancestor %s local %s remote %s\n" %
1569 (short(man), short(m1n), short(m2n)))
1569 (short(man), short(m1n), short(m2n)))
1570
1570
1571 merge = {}
1571 merge = {}
1572 get = {}
1572 get = {}
1573 remove = []
1573 remove = []
1574 mark = {}
1574 mark = {}
1575
1575
1576 # construct a working dir manifest
1576 # construct a working dir manifest
1577 mw = m1.copy()
1577 mw = m1.copy()
1578 mfw = mf1.copy()
1578 mfw = mf1.copy()
1579 umap = dict.fromkeys(u)
1579 umap = dict.fromkeys(u)
1580
1580
1581 for f in a + c + u:
1581 for f in a + c + u:
1582 mw[f] = ""
1582 mw[f] = ""
1583 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1583 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1584
1584
1585 for f in d:
1585 for f in d:
1586 if f in mw: del mw[f]
1586 if f in mw: del mw[f]
1587
1587
1588 # If we're jumping between revisions (as opposed to merging),
1588 # If we're jumping between revisions (as opposed to merging),
1589 # and if neither the working directory nor the target rev has
1589 # and if neither the working directory nor the target rev has
1590 # the file, then we need to remove it from the dirstate, to
1590 # the file, then we need to remove it from the dirstate, to
1591 # prevent the dirstate from listing the file when it is no
1591 # prevent the dirstate from listing the file when it is no
1592 # longer in the manifest.
1592 # longer in the manifest.
1593 if moddirstate and linear_path and f not in m2:
1593 if moddirstate and linear_path and f not in m2:
1594 self.dirstate.forget((f,))
1594 self.dirstate.forget((f,))
1595
1595
1596 # Compare manifests
1596 # Compare manifests
1597 for f, n in mw.iteritems():
1597 for f, n in mw.iteritems():
1598 if choose and not choose(f): continue
1598 if choose and not choose(f): continue
1599 if f in m2:
1599 if f in m2:
1600 s = 0
1600 s = 0
1601
1601
1602 # is the wfile new since m1, and match m2?
1602 # is the wfile new since m1, and match m2?
1603 if f not in m1:
1603 if f not in m1:
1604 t1 = self.wfile(f).read()
1604 t1 = self.wfile(f).read()
1605 t2 = self.file(f).revision(m2[f])
1605 t2 = self.file(f).revision(m2[f])
1606 if cmp(t1, t2) == 0:
1606 if cmp(t1, t2) == 0:
1607 mark[f] = 1
1607 mark[f] = 1
1608 n = m2[f]
1608 n = m2[f]
1609 del t1, t2
1609 del t1, t2
1610
1610
1611 # are files different?
1611 # are files different?
1612 if n != m2[f]:
1612 if n != m2[f]:
1613 a = ma.get(f, nullid)
1613 a = ma.get(f, nullid)
1614 # are both different from the ancestor?
1614 # are both different from the ancestor?
1615 if n != a and m2[f] != a:
1615 if n != a and m2[f] != a:
1616 self.ui.debug(" %s versions differ, resolve\n" % f)
1616 self.ui.debug(" %s versions differ, resolve\n" % f)
1617 # merge executable bits
1617 # merge executable bits
1618 # "if we changed or they changed, change in merge"
1618 # "if we changed or they changed, change in merge"
1619 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1619 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1620 mode = ((a^b) | (a^c)) ^ a
1620 mode = ((a^b) | (a^c)) ^ a
1621 merge[f] = (m1.get(f, nullid), m2[f], mode)
1621 merge[f] = (m1.get(f, nullid), m2[f], mode)
1622 s = 1
1622 s = 1
1623 # are we clobbering?
1623 # are we clobbering?
1624 # is remote's version newer?
1624 # is remote's version newer?
1625 # or are we going back in time?
1625 # or are we going back in time?
1626 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1626 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1627 self.ui.debug(" remote %s is newer, get\n" % f)
1627 self.ui.debug(" remote %s is newer, get\n" % f)
1628 get[f] = m2[f]
1628 get[f] = m2[f]
1629 s = 1
1629 s = 1
1630 else:
1630 else:
1631 mark[f] = 1
1631 mark[f] = 1
1632 elif f in umap:
1632 elif f in umap:
1633 # this unknown file is the same as the checkout
1633 # this unknown file is the same as the checkout
1634 get[f] = m2[f]
1634 get[f] = m2[f]
1635
1635
1636 if not s and mfw[f] != mf2[f]:
1636 if not s and mfw[f] != mf2[f]:
1637 if force:
1637 if force:
1638 self.ui.debug(" updating permissions for %s\n" % f)
1638 self.ui.debug(" updating permissions for %s\n" % f)
1639 util.set_exec(self.wjoin(f), mf2[f])
1639 util.set_exec(self.wjoin(f), mf2[f])
1640 else:
1640 else:
1641 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1641 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1642 mode = ((a^b) | (a^c)) ^ a
1642 mode = ((a^b) | (a^c)) ^ a
1643 if mode != b:
1643 if mode != b:
1644 self.ui.debug(" updating permissions for %s\n" % f)
1644 self.ui.debug(" updating permissions for %s\n" % f)
1645 util.set_exec(self.wjoin(f), mode)
1645 util.set_exec(self.wjoin(f), mode)
1646 mark[f] = 1
1646 mark[f] = 1
1647 del m2[f]
1647 del m2[f]
1648 elif f in ma:
1648 elif f in ma:
1649 if n != ma[f]:
1649 if n != ma[f]:
1650 r = "d"
1650 r = "d"
1651 if not force and (linear_path or allow):
1651 if not force and (linear_path or allow):
1652 r = self.ui.prompt(
1652 r = self.ui.prompt(
1653 (" local changed %s which remote deleted\n" % f) +
1653 (" local changed %s which remote deleted\n" % f) +
1654 "(k)eep or (d)elete?", "[kd]", "k")
1654 "(k)eep or (d)elete?", "[kd]", "k")
1655 if r == "d":
1655 if r == "d":
1656 remove.append(f)
1656 remove.append(f)
1657 else:
1657 else:
1658 self.ui.debug("other deleted %s\n" % f)
1658 self.ui.debug("other deleted %s\n" % f)
1659 remove.append(f) # other deleted it
1659 remove.append(f) # other deleted it
1660 else:
1660 else:
1661 if n == m1.get(f, nullid): # same as parent
1661 if n == m1.get(f, nullid): # same as parent
1662 if p2 == pa: # going backwards?
1662 if p2 == pa: # going backwards?
1663 self.ui.debug("remote deleted %s\n" % f)
1663 self.ui.debug("remote deleted %s\n" % f)
1664 remove.append(f)
1664 remove.append(f)
1665 else:
1665 else:
1666 self.ui.debug("local created %s, keeping\n" % f)
1666 self.ui.debug("local created %s, keeping\n" % f)
1667 else:
1667 else:
1668 self.ui.debug("working dir created %s, keeping\n" % f)
1668 self.ui.debug("working dir created %s, keeping\n" % f)
1669
1669
1670 for f, n in m2.iteritems():
1670 for f, n in m2.iteritems():
1671 if choose and not choose(f): continue
1671 if choose and not choose(f): continue
1672 if f[0] == "/": continue
1672 if f[0] == "/": continue
1673 if f in ma and n != ma[f]:
1673 if f in ma and n != ma[f]:
1674 r = "k"
1674 r = "k"
1675 if not force and (linear_path or allow):
1675 if not force and (linear_path or allow):
1676 r = self.ui.prompt(
1676 r = self.ui.prompt(
1677 ("remote changed %s which local deleted\n" % f) +
1677 ("remote changed %s which local deleted\n" % f) +
1678 "(k)eep or (d)elete?", "[kd]", "k")
1678 "(k)eep or (d)elete?", "[kd]", "k")
1679 if r == "k": get[f] = n
1679 if r == "k": get[f] = n
1680 elif f not in ma:
1680 elif f not in ma:
1681 self.ui.debug("remote created %s\n" % f)
1681 self.ui.debug("remote created %s\n" % f)
1682 get[f] = n
1682 get[f] = n
1683 else:
1683 else:
1684 if force or p2 == pa: # going backwards?
1684 if force or p2 == pa: # going backwards?
1685 self.ui.debug("local deleted %s, recreating\n" % f)
1685 self.ui.debug("local deleted %s, recreating\n" % f)
1686 get[f] = n
1686 get[f] = n
1687 else:
1687 else:
1688 self.ui.debug("local deleted %s\n" % f)
1688 self.ui.debug("local deleted %s\n" % f)
1689
1689
1690 del mw, m1, m2, ma
1690 del mw, m1, m2, ma
1691
1691
1692 if force:
1692 if force:
1693 for f in merge:
1693 for f in merge:
1694 get[f] = merge[f][1]
1694 get[f] = merge[f][1]
1695 merge = {}
1695 merge = {}
1696
1696
1697 if linear_path or force:
1697 if linear_path or force:
1698 # we don't need to do any magic, just jump to the new rev
1698 # we don't need to do any magic, just jump to the new rev
1699 mode = 'n'
1699 mode = 'n'
1700 p1, p2 = p2, nullid
1700 p1, p2 = p2, nullid
1701 else:
1701 else:
1702 if not allow:
1702 if not allow:
1703 self.ui.status("this update spans a branch" +
1703 self.ui.status("this update spans a branch" +
1704 " affecting the following files:\n")
1704 " affecting the following files:\n")
1705 fl = merge.keys() + get.keys()
1705 fl = merge.keys() + get.keys()
1706 fl.sort()
1706 fl.sort()
1707 for f in fl:
1707 for f in fl:
1708 cf = ""
1708 cf = ""
1709 if f in merge: cf = " (resolve)"
1709 if f in merge: cf = " (resolve)"
1710 self.ui.status(" %s%s\n" % (f, cf))
1710 self.ui.status(" %s%s\n" % (f, cf))
1711 self.ui.warn("aborting update spanning branches!\n")
1711 self.ui.warn("aborting update spanning branches!\n")
1712 self.ui.status("(use update -m to merge across branches" +
1712 self.ui.status("(use update -m to merge across branches" +
1713 " or -C to lose changes)\n")
1713 " or -C to lose changes)\n")
1714 return 1
1714 return 1
1715 # we have to remember what files we needed to get/change
1715 # we have to remember what files we needed to get/change
1716 # because any file that's different from either one of its
1716 # because any file that's different from either one of its
1717 # parents must be in the changeset
1717 # parents must be in the changeset
1718 mode = 'm'
1718 mode = 'm'
1719 if moddirstate:
1719 if moddirstate:
1720 self.dirstate.update(mark.keys(), "m")
1720 self.dirstate.update(mark.keys(), "m")
1721
1721
1722 if moddirstate:
1722 if moddirstate:
1723 self.dirstate.setparents(p1, p2)
1723 self.dirstate.setparents(p1, p2)
1724
1724
1725 # get the files we don't need to change
1725 # get the files we don't need to change
1726 files = get.keys()
1726 files = get.keys()
1727 files.sort()
1727 files.sort()
1728 for f in files:
1728 for f in files:
1729 if f[0] == "/": continue
1729 if f[0] == "/": continue
1730 self.ui.note("getting %s\n" % f)
1730 self.ui.note("getting %s\n" % f)
1731 t = self.file(f).read(get[f])
1731 t = self.file(f).read(get[f])
1732 try:
1732 try:
1733 self.wfile(f, "w").write(t)
1733 self.wfile(f, "w").write(t)
1734 except IOError:
1734 except IOError:
1735 os.makedirs(os.path.dirname(self.wjoin(f)))
1735 os.makedirs(os.path.dirname(self.wjoin(f)))
1736 self.wfile(f, "w").write(t)
1736 self.wfile(f, "w").write(t)
1737 util.set_exec(self.wjoin(f), mf2[f])
1737 util.set_exec(self.wjoin(f), mf2[f])
1738 if moddirstate:
1738 if moddirstate:
1739 self.dirstate.update([f], mode)
1739 self.dirstate.update([f], mode)
1740
1740
1741 # merge the tricky bits
1741 # merge the tricky bits
1742 files = merge.keys()
1742 files = merge.keys()
1743 files.sort()
1743 files.sort()
1744 for f in files:
1744 for f in files:
1745 self.ui.status("merging %s\n" % f)
1745 self.ui.status("merging %s\n" % f)
1746 m, o, flag = merge[f]
1746 m, o, flag = merge[f]
1747 self.merge3(f, m, o)
1747 self.merge3(f, m, o)
1748 util.set_exec(self.wjoin(f), flag)
1748 util.set_exec(self.wjoin(f), flag)
1749 if moddirstate:
1749 if moddirstate:
1750 if mode == 'm':
1750 if mode == 'm':
1751 # only update dirstate on branch merge, otherwise we
1751 # only update dirstate on branch merge, otherwise we
1752 # could mark files with changes as unchanged
1752 # could mark files with changes as unchanged
1753 self.dirstate.update([f], mode)
1753 self.dirstate.update([f], mode)
1754 elif p2 == nullid:
1754 elif p2 == nullid:
1755 # update dirstate from parent1's manifest
1755 # update dirstate from parent1's manifest
1756 m1n = self.changelog.read(p1)[0]
1756 m1n = self.changelog.read(p1)[0]
1757 m1 = self.manifest.read(m1n)
1757 m1 = self.manifest.read(m1n)
1758 f_len = len(self.file(f).read(m1[f]))
1758 f_len = len(self.file(f).read(m1[f]))
1759 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1759 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1760 else:
1760 else:
1761 self.ui.warn("Second parent without branch merge!?\n"
1761 self.ui.warn("Second parent without branch merge!?\n"
1762 "Dirstate for file %s may be wrong.\n" % f)
1762 "Dirstate for file %s may be wrong.\n" % f)
1763
1763
1764 remove.sort()
1764 remove.sort()
1765 for f in remove:
1765 for f in remove:
1766 self.ui.note("removing %s\n" % f)
1766 self.ui.note("removing %s\n" % f)
1767 try:
1767 try:
1768 os.unlink(f)
1768 os.unlink(f)
1769 except OSError, inst:
1769 except OSError, inst:
1770 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1770 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1771 # try removing directories that might now be empty
1771 # try removing directories that might now be empty
1772 try: os.removedirs(os.path.dirname(f))
1772 try: os.removedirs(os.path.dirname(f))
1773 except: pass
1773 except: pass
1774 if moddirstate:
1774 if moddirstate:
1775 if mode == 'n':
1775 if mode == 'n':
1776 self.dirstate.forget(remove)
1776 self.dirstate.forget(remove)
1777 else:
1777 else:
1778 self.dirstate.update(remove, 'r')
1778 self.dirstate.update(remove, 'r')
1779
1779
1780 def merge3(self, fn, my, other):
1780 def merge3(self, fn, my, other):
1781 """perform a 3-way merge in the working directory"""
1781 """perform a 3-way merge in the working directory"""
1782
1782
1783 def temp(prefix, node):
1783 def temp(prefix, node):
1784 pre = "%s~%s." % (os.path.basename(fn), prefix)
1784 pre = "%s~%s." % (os.path.basename(fn), prefix)
1785 (fd, name) = tempfile.mkstemp("", pre)
1785 (fd, name) = tempfile.mkstemp("", pre)
1786 f = os.fdopen(fd, "wb")
1786 f = os.fdopen(fd, "wb")
1787 f.write(fl.revision(node))
1787 f.write(fl.revision(node))
1788 f.close()
1788 f.close()
1789 return name
1789 return name
1790
1790
1791 fl = self.file(fn)
1791 fl = self.file(fn)
1792 base = fl.ancestor(my, other)
1792 base = fl.ancestor(my, other)
1793 a = self.wjoin(fn)
1793 a = self.wjoin(fn)
1794 b = temp("base", base)
1794 b = temp("base", base)
1795 c = temp("other", other)
1795 c = temp("other", other)
1796
1796
1797 self.ui.note("resolving %s\n" % fn)
1797 self.ui.note("resolving %s\n" % fn)
1798 self.ui.debug("file %s: other %s ancestor %s\n" %
1798 self.ui.debug("file %s: other %s ancestor %s\n" %
1799 (fn, short(other), short(base)))
1799 (fn, short(other), short(base)))
1800
1800
1801 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1801 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1802 or "hgmerge")
1802 or "hgmerge")
1803 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1803 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1804 if r:
1804 if r:
1805 self.ui.warn("merging %s failed!\n" % fn)
1805 self.ui.warn("merging %s failed!\n" % fn)
1806
1806
1807 os.unlink(b)
1807 os.unlink(b)
1808 os.unlink(c)
1808 os.unlink(c)
1809
1809
1810 def verify(self):
1810 def verify(self):
1811 filelinkrevs = {}
1811 filelinkrevs = {}
1812 filenodes = {}
1812 filenodes = {}
1813 changesets = revisions = files = 0
1813 changesets = revisions = files = 0
1814 errors = 0
1814 errors = 0
1815
1815
1816 seen = {}
1816 seen = {}
1817 self.ui.status("checking changesets\n")
1817 self.ui.status("checking changesets\n")
1818 for i in range(self.changelog.count()):
1818 for i in range(self.changelog.count()):
1819 changesets += 1
1819 changesets += 1
1820 n = self.changelog.node(i)
1820 n = self.changelog.node(i)
1821 if n in seen:
1821 if n in seen:
1822 self.ui.warn("duplicate changeset at revision %d\n" % i)
1822 self.ui.warn("duplicate changeset at revision %d\n" % i)
1823 errors += 1
1823 errors += 1
1824 seen[n] = 1
1824 seen[n] = 1
1825
1825
1826 for p in self.changelog.parents(n):
1826 for p in self.changelog.parents(n):
1827 if p not in self.changelog.nodemap:
1827 if p not in self.changelog.nodemap:
1828 self.ui.warn("changeset %s has unknown parent %s\n" %
1828 self.ui.warn("changeset %s has unknown parent %s\n" %
1829 (short(n), short(p)))
1829 (short(n), short(p)))
1830 errors += 1
1830 errors += 1
1831 try:
1831 try:
1832 changes = self.changelog.read(n)
1832 changes = self.changelog.read(n)
1833 except Exception, inst:
1833 except Exception, inst:
1834 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1834 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1835 errors += 1
1835 errors += 1
1836
1836
1837 for f in changes[3]:
1837 for f in changes[3]:
1838 filelinkrevs.setdefault(f, []).append(i)
1838 filelinkrevs.setdefault(f, []).append(i)
1839
1839
1840 seen = {}
1840 seen = {}
1841 self.ui.status("checking manifests\n")
1841 self.ui.status("checking manifests\n")
1842 for i in range(self.manifest.count()):
1842 for i in range(self.manifest.count()):
1843 n = self.manifest.node(i)
1843 n = self.manifest.node(i)
1844 if n in seen:
1844 if n in seen:
1845 self.ui.warn("duplicate manifest at revision %d\n" % i)
1845 self.ui.warn("duplicate manifest at revision %d\n" % i)
1846 errors += 1
1846 errors += 1
1847 seen[n] = 1
1847 seen[n] = 1
1848
1848
1849 for p in self.manifest.parents(n):
1849 for p in self.manifest.parents(n):
1850 if p not in self.manifest.nodemap:
1850 if p not in self.manifest.nodemap:
1851 self.ui.warn("manifest %s has unknown parent %s\n" %
1851 self.ui.warn("manifest %s has unknown parent %s\n" %
1852 (short(n), short(p)))
1852 (short(n), short(p)))
1853 errors += 1
1853 errors += 1
1854
1854
1855 try:
1855 try:
1856 delta = mdiff.patchtext(self.manifest.delta(n))
1856 delta = mdiff.patchtext(self.manifest.delta(n))
1857 except KeyboardInterrupt:
1857 except KeyboardInterrupt:
1858 self.ui.warn("aborted")
1858 self.ui.warn("aborted")
1859 sys.exit(0)
1859 sys.exit(0)
1860 except Exception, inst:
1860 except Exception, inst:
1861 self.ui.warn("unpacking manifest %s: %s\n"
1861 self.ui.warn("unpacking manifest %s: %s\n"
1862 % (short(n), inst))
1862 % (short(n), inst))
1863 errors += 1
1863 errors += 1
1864
1864
1865 ff = [ l.split('\0') for l in delta.splitlines() ]
1865 ff = [ l.split('\0') for l in delta.splitlines() ]
1866 for f, fn in ff:
1866 for f, fn in ff:
1867 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1867 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1868
1868
1869 self.ui.status("crosschecking files in changesets and manifests\n")
1869 self.ui.status("crosschecking files in changesets and manifests\n")
1870 for f in filenodes:
1870 for f in filenodes:
1871 if f not in filelinkrevs:
1871 if f not in filelinkrevs:
1872 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1872 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1873 errors += 1
1873 errors += 1
1874
1874
1875 for f in filelinkrevs:
1875 for f in filelinkrevs:
1876 if f not in filenodes:
1876 if f not in filenodes:
1877 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1877 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1878 errors += 1
1878 errors += 1
1879
1879
1880 self.ui.status("checking files\n")
1880 self.ui.status("checking files\n")
1881 ff = filenodes.keys()
1881 ff = filenodes.keys()
1882 ff.sort()
1882 ff.sort()
1883 for f in ff:
1883 for f in ff:
1884 if f == "/dev/null": continue
1884 if f == "/dev/null": continue
1885 files += 1
1885 files += 1
1886 fl = self.file(f)
1886 fl = self.file(f)
1887 nodes = { nullid: 1 }
1887 nodes = { nullid: 1 }
1888 seen = {}
1888 seen = {}
1889 for i in range(fl.count()):
1889 for i in range(fl.count()):
1890 revisions += 1
1890 revisions += 1
1891 n = fl.node(i)
1891 n = fl.node(i)
1892
1892
1893 if n in seen:
1893 if n in seen:
1894 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1894 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1895 errors += 1
1895 errors += 1
1896
1896
1897 if n not in filenodes[f]:
1897 if n not in filenodes[f]:
1898 self.ui.warn("%s: %d:%s not in manifests\n"
1898 self.ui.warn("%s: %d:%s not in manifests\n"
1899 % (f, i, short(n)))
1899 % (f, i, short(n)))
1900 errors += 1
1900 errors += 1
1901 else:
1901 else:
1902 del filenodes[f][n]
1902 del filenodes[f][n]
1903
1903
1904 flr = fl.linkrev(n)
1904 flr = fl.linkrev(n)
1905 if flr not in filelinkrevs[f]:
1905 if flr not in filelinkrevs[f]:
1906 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1906 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1907 % (f, short(n), fl.linkrev(n)))
1907 % (f, short(n), fl.linkrev(n)))
1908 errors += 1
1908 errors += 1
1909 else:
1909 else:
1910 filelinkrevs[f].remove(flr)
1910 filelinkrevs[f].remove(flr)
1911
1911
1912 # verify contents
1912 # verify contents
1913 try:
1913 try:
1914 t = fl.read(n)
1914 t = fl.read(n)
1915 except Exception, inst:
1915 except Exception, inst:
1916 self.ui.warn("unpacking file %s %s: %s\n"
1916 self.ui.warn("unpacking file %s %s: %s\n"
1917 % (f, short(n), inst))
1917 % (f, short(n), inst))
1918 errors += 1
1918 errors += 1
1919
1919
1920 # verify parents
1920 # verify parents
1921 (p1, p2) = fl.parents(n)
1921 (p1, p2) = fl.parents(n)
1922 if p1 not in nodes:
1922 if p1 not in nodes:
1923 self.ui.warn("file %s:%s unknown parent 1 %s" %
1923 self.ui.warn("file %s:%s unknown parent 1 %s" %
1924 (f, short(n), short(p1)))
1924 (f, short(n), short(p1)))
1925 errors += 1
1925 errors += 1
1926 if p2 not in nodes:
1926 if p2 not in nodes:
1927 self.ui.warn("file %s:%s unknown parent 2 %s" %
1927 self.ui.warn("file %s:%s unknown parent 2 %s" %
1928 (f, short(n), short(p1)))
1928 (f, short(n), short(p1)))
1929 errors += 1
1929 errors += 1
1930 nodes[n] = 1
1930 nodes[n] = 1
1931
1931
1932 # cross-check
1932 # cross-check
1933 for node in filenodes[f]:
1933 for node in filenodes[f]:
1934 self.ui.warn("node %s in manifests not in %s\n"
1934 self.ui.warn("node %s in manifests not in %s\n"
1935 % (hex(node), f))
1935 % (hex(node), f))
1936 errors += 1
1936 errors += 1
1937
1937
1938 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1938 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1939 (files, changesets, revisions))
1939 (files, changesets, revisions))
1940
1940
1941 if errors:
1941 if errors:
1942 self.ui.warn("%d integrity errors encountered!\n" % errors)
1942 self.ui.warn("%d integrity errors encountered!\n" % errors)
1943 return 1
1943 return 1
1944
1944
1945 class remoterepository:
1945 class remoterepository:
1946 def local(self):
1946 def local(self):
1947 return False
1947 return False
1948
1948
1949 class httprepository(remoterepository):
1949 class httprepository(remoterepository):
1950 def __init__(self, ui, path):
1950 def __init__(self, ui, path):
1951 # fix missing / after hostname
1951 # fix missing / after hostname
1952 s = urlparse.urlsplit(path)
1952 s = urlparse.urlsplit(path)
1953 partial = s[2]
1953 partial = s[2]
1954 if not partial: partial = "/"
1954 if not partial: partial = "/"
1955 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1955 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1956 self.ui = ui
1956 self.ui = ui
1957 no_list = [ "localhost", "127.0.0.1" ]
1957 no_list = [ "localhost", "127.0.0.1" ]
1958 host = ui.config("http_proxy", "host")
1958 host = ui.config("http_proxy", "host")
1959 if host is None:
1959 if host is None:
1960 host = os.environ.get("http_proxy")
1960 host = os.environ.get("http_proxy")
1961 if host and host.startswith('http://'):
1961 if host and host.startswith('http://'):
1962 host = host[7:]
1962 host = host[7:]
1963 user = ui.config("http_proxy", "user")
1963 user = ui.config("http_proxy", "user")
1964 passwd = ui.config("http_proxy", "passwd")
1964 passwd = ui.config("http_proxy", "passwd")
1965 no = ui.config("http_proxy", "no")
1965 no = ui.config("http_proxy", "no")
1966 if no is None:
1966 if no is None:
1967 no = os.environ.get("no_proxy")
1967 no = os.environ.get("no_proxy")
1968 if no:
1968 if no:
1969 no_list = no_list + no.split(",")
1969 no_list = no_list + no.split(",")
1970
1970
1971 no_proxy = 0
1971 no_proxy = 0
1972 for h in no_list:
1972 for h in no_list:
1973 if (path.startswith("http://" + h + "/") or
1973 if (path.startswith("http://" + h + "/") or
1974 path.startswith("http://" + h + ":") or
1974 path.startswith("http://" + h + ":") or
1975 path == "http://" + h):
1975 path == "http://" + h):
1976 no_proxy = 1
1976 no_proxy = 1
1977
1977
1978 # Note: urllib2 takes proxy values from the environment and those will
1978 # Note: urllib2 takes proxy values from the environment and those will
1979 # take precedence
1979 # take precedence
1980 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1980 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1981 try:
1981 try:
1982 if os.environ.has_key(env):
1982 if os.environ.has_key(env):
1983 del os.environ[env]
1983 del os.environ[env]
1984 except OSError:
1984 except OSError:
1985 pass
1985 pass
1986
1986
1987 proxy_handler = urllib2.BaseHandler()
1987 proxy_handler = urllib2.BaseHandler()
1988 if host and not no_proxy:
1988 if host and not no_proxy:
1989 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1989 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1990
1990
1991 authinfo = None
1991 authinfo = None
1992 if user and passwd:
1992 if user and passwd:
1993 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1993 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1994 passmgr.add_password(None, host, user, passwd)
1994 passmgr.add_password(None, host, user, passwd)
1995 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1995 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1996
1996
1997 opener = urllib2.build_opener(proxy_handler, authinfo)
1997 opener = urllib2.build_opener(proxy_handler, authinfo)
1998 urllib2.install_opener(opener)
1998 urllib2.install_opener(opener)
1999
1999
2000 def dev(self):
2000 def dev(self):
2001 return -1
2001 return -1
2002
2002
2003 def do_cmd(self, cmd, **args):
2003 def do_cmd(self, cmd, **args):
2004 self.ui.debug("sending %s command\n" % cmd)
2004 self.ui.debug("sending %s command\n" % cmd)
2005 q = {"cmd": cmd}
2005 q = {"cmd": cmd}
2006 q.update(args)
2006 q.update(args)
2007 qs = urllib.urlencode(q)
2007 qs = urllib.urlencode(q)
2008 cu = "%s?%s" % (self.url, qs)
2008 cu = "%s?%s" % (self.url, qs)
2009 resp = urllib2.urlopen(cu)
2009 resp = urllib2.urlopen(cu)
2010 proto = resp.headers['content-type']
2010 proto = resp.headers['content-type']
2011
2011
2012 # accept old "text/plain" and "application/hg-changegroup" for now
2012 # accept old "text/plain" and "application/hg-changegroup" for now
2013 if not proto.startswith('application/mercurial') and \
2013 if not proto.startswith('application/mercurial') and \
2014 not proto.startswith('text/plain') and \
2014 not proto.startswith('text/plain') and \
2015 not proto.startswith('application/hg-changegroup'):
2015 not proto.startswith('application/hg-changegroup'):
2016 raise RepoError("'%s' does not appear to be an hg repository"
2016 raise RepoError("'%s' does not appear to be an hg repository"
2017 % self.url)
2017 % self.url)
2018
2018
2019 if proto.startswith('application/mercurial'):
2019 if proto.startswith('application/mercurial'):
2020 version = proto[22:]
2020 version = proto[22:]
2021 if float(version) > 0.1:
2021 if float(version) > 0.1:
2022 raise RepoError("'%s' uses newer protocol %s" %
2022 raise RepoError("'%s' uses newer protocol %s" %
2023 (self.url, version))
2023 (self.url, version))
2024
2024
2025 return resp
2025 return resp
2026
2026
2027 def heads(self):
2027 def heads(self):
2028 d = self.do_cmd("heads").read()
2028 d = self.do_cmd("heads").read()
2029 try:
2029 try:
2030 return map(bin, d[:-1].split(" "))
2030 return map(bin, d[:-1].split(" "))
2031 except:
2031 except:
2032 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2032 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2033 raise
2033 raise
2034
2034
2035 def branches(self, nodes):
2035 def branches(self, nodes):
2036 n = " ".join(map(hex, nodes))
2036 n = " ".join(map(hex, nodes))
2037 d = self.do_cmd("branches", nodes=n).read()
2037 d = self.do_cmd("branches", nodes=n).read()
2038 try:
2038 try:
2039 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2039 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2040 return br
2040 return br
2041 except:
2041 except:
2042 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2042 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2043 raise
2043 raise
2044
2044
2045 def between(self, pairs):
2045 def between(self, pairs):
2046 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2046 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2047 d = self.do_cmd("between", pairs=n).read()
2047 d = self.do_cmd("between", pairs=n).read()
2048 try:
2048 try:
2049 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2049 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2050 return p
2050 return p
2051 except:
2051 except:
2052 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2052 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2053 raise
2053 raise
2054
2054
2055 def changegroup(self, nodes):
2055 def changegroup(self, nodes):
2056 n = " ".join(map(hex, nodes))
2056 n = " ".join(map(hex, nodes))
2057 f = self.do_cmd("changegroup", roots=n)
2057 f = self.do_cmd("changegroup", roots=n)
2058 bytes = 0
2058 bytes = 0
2059
2059
2060 class zread:
2060 class zread:
2061 def __init__(self, f):
2061 def __init__(self, f):
2062 self.zd = zlib.decompressobj()
2062 self.zd = zlib.decompressobj()
2063 self.f = f
2063 self.f = f
2064 self.buf = ""
2064 self.buf = ""
2065 def read(self, l):
2065 def read(self, l):
2066 while l > len(self.buf):
2066 while l > len(self.buf):
2067 r = self.f.read(4096)
2067 r = self.f.read(4096)
2068 if r:
2068 if r:
2069 self.buf += self.zd.decompress(r)
2069 self.buf += self.zd.decompress(r)
2070 else:
2070 else:
2071 self.buf += self.zd.flush()
2071 self.buf += self.zd.flush()
2072 break
2072 break
2073 d, self.buf = self.buf[:l], self.buf[l:]
2073 d, self.buf = self.buf[:l], self.buf[l:]
2074 return d
2074 return d
2075
2075
2076 return zread(f)
2076 return zread(f)
2077
2077
2078 class remotelock:
2078 class remotelock:
2079 def __init__(self, repo):
2079 def __init__(self, repo):
2080 self.repo = repo
2080 self.repo = repo
2081 def release(self):
2081 def release(self):
2082 self.repo.unlock()
2082 self.repo.unlock()
2083 self.repo = None
2083 self.repo = None
2084 def __del__(self):
2084 def __del__(self):
2085 if self.repo:
2085 if self.repo:
2086 self.release()
2086 self.release()
2087
2087
2088 class sshrepository(remoterepository):
2088 class sshrepository(remoterepository):
2089 def __init__(self, ui, path):
2089 def __init__(self, ui, path):
2090 self.url = path
2090 self.url = path
2091 self.ui = ui
2091 self.ui = ui
2092
2092
2093 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2093 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2094 if not m:
2094 if not m:
2095 raise RepoError("couldn't parse destination %s" % path)
2095 raise RepoError("couldn't parse destination %s" % path)
2096
2096
2097 self.user = m.group(2)
2097 self.user = m.group(2)
2098 self.host = m.group(3)
2098 self.host = m.group(3)
2099 self.port = m.group(5)
2099 self.port = m.group(5)
2100 self.path = m.group(7)
2100 self.path = m.group(7)
2101
2101
2102 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2102 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2103 args = self.port and ("%s -p %s") % (args, self.port) or args
2103 args = self.port and ("%s -p %s") % (args, self.port) or args
2104 path = self.path or ""
2104 path = self.path or ""
2105
2105
2106 if not path:
2106 if not path:
2107 raise RepoError("no remote repository path specified")
2107 raise RepoError("no remote repository path specified")
2108
2108
2109 cmd = "ssh %s 'hg -R %s serve --stdio'"
2109 cmd = "ssh %s 'hg -R %s serve --stdio'"
2110 cmd = cmd % (args, path)
2110 cmd = cmd % (args, path)
2111
2111
2112 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2112 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2113
2113
2114 def readerr(self):
2114 def readerr(self):
2115 while 1:
2115 while 1:
2116 r,w,x = select.select([self.pipee], [], [], 0)
2116 r,w,x = select.select([self.pipee], [], [], 0)
2117 if not r: break
2117 if not r: break
2118 l = self.pipee.readline()
2118 l = self.pipee.readline()
2119 if not l: break
2119 if not l: break
2120 self.ui.status("remote: ", l)
2120 self.ui.status("remote: ", l)
2121
2121
2122 def __del__(self):
2122 def __del__(self):
2123 try:
2123 try:
2124 self.pipeo.close()
2124 self.pipeo.close()
2125 self.pipei.close()
2125 self.pipei.close()
2126 for l in self.pipee:
2126 for l in self.pipee:
2127 self.ui.status("remote: ", l)
2127 self.ui.status("remote: ", l)
2128 self.pipee.close()
2128 self.pipee.close()
2129 except:
2129 except:
2130 pass
2130 pass
2131
2131
2132 def dev(self):
2132 def dev(self):
2133 return -1
2133 return -1
2134
2134
2135 def do_cmd(self, cmd, **args):
2135 def do_cmd(self, cmd, **args):
2136 self.ui.debug("sending %s command\n" % cmd)
2136 self.ui.debug("sending %s command\n" % cmd)
2137 self.pipeo.write("%s\n" % cmd)
2137 self.pipeo.write("%s\n" % cmd)
2138 for k, v in args.items():
2138 for k, v in args.items():
2139 self.pipeo.write("%s %d\n" % (k, len(v)))
2139 self.pipeo.write("%s %d\n" % (k, len(v)))
2140 self.pipeo.write(v)
2140 self.pipeo.write(v)
2141 self.pipeo.flush()
2141 self.pipeo.flush()
2142
2142
2143 return self.pipei
2143 return self.pipei
2144
2144
2145 def call(self, cmd, **args):
2145 def call(self, cmd, **args):
2146 r = self.do_cmd(cmd, **args)
2146 r = self.do_cmd(cmd, **args)
2147 l = r.readline()
2147 l = r.readline()
2148 self.readerr()
2148 self.readerr()
2149 try:
2149 try:
2150 l = int(l)
2150 l = int(l)
2151 except:
2151 except:
2152 raise RepoError("unexpected response '%s'" % l)
2152 raise RepoError("unexpected response '%s'" % l)
2153 return r.read(l)
2153 return r.read(l)
2154
2154
2155 def lock(self):
2155 def lock(self):
2156 self.call("lock")
2156 self.call("lock")
2157 return remotelock(self)
2157 return remotelock(self)
2158
2158
2159 def unlock(self):
2159 def unlock(self):
2160 self.call("unlock")
2160 self.call("unlock")
2161
2161
2162 def heads(self):
2162 def heads(self):
2163 d = self.call("heads")
2163 d = self.call("heads")
2164 try:
2164 try:
2165 return map(bin, d[:-1].split(" "))
2165 return map(bin, d[:-1].split(" "))
2166 except:
2166 except:
2167 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2167 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2168
2168
2169 def branches(self, nodes):
2169 def branches(self, nodes):
2170 n = " ".join(map(hex, nodes))
2170 n = " ".join(map(hex, nodes))
2171 d = self.call("branches", nodes=n)
2171 d = self.call("branches", nodes=n)
2172 try:
2172 try:
2173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2174 return br
2174 return br
2175 except:
2175 except:
2176 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2176 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2177
2177
2178 def between(self, pairs):
2178 def between(self, pairs):
2179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2180 d = self.call("between", pairs=n)
2180 d = self.call("between", pairs=n)
2181 try:
2181 try:
2182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2183 return p
2183 return p
2184 except:
2184 except:
2185 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2185 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2186
2186
2187 def changegroup(self, nodes):
2187 def changegroup(self, nodes):
2188 n = " ".join(map(hex, nodes))
2188 n = " ".join(map(hex, nodes))
2189 f = self.do_cmd("changegroup", roots=n)
2189 f = self.do_cmd("changegroup", roots=n)
2190 return self.pipei
2190 return self.pipei
2191
2191
2192 def addchangegroup(self, cg):
2192 def addchangegroup(self, cg):
2193 d = self.call("addchangegroup")
2193 d = self.call("addchangegroup")
2194 if d:
2194 if d:
2195 raise RepoError("push refused: %s", d)
2195 raise RepoError("push refused: %s", d)
2196
2196
2197 while 1:
2197 while 1:
2198 d = cg.read(4096)
2198 d = cg.read(4096)
2199 if not d: break
2199 if not d: break
2200 self.pipeo.write(d)
2200 self.pipeo.write(d)
2201 self.readerr()
2201 self.readerr()
2202
2202
2203 self.pipeo.flush()
2203 self.pipeo.flush()
2204
2204
2205 self.readerr()
2205 self.readerr()
2206 l = int(self.pipei.readline())
2206 l = int(self.pipei.readline())
2207 return self.pipei.read(l) != ""
2207 return self.pipei.read(l) != ""
2208
2208
2209 class httpsrepository(httprepository):
2209 class httpsrepository(httprepository):
2210 pass
2210 pass
2211
2211
2212 def repository(ui, path=None, create=0):
2212 def repository(ui, path=None, create=0):
2213 if path:
2213 if path:
2214 if path.startswith("http://"):
2214 if path.startswith("http://"):
2215 return httprepository(ui, path)
2215 return httprepository(ui, path)
2216 if path.startswith("https://"):
2216 if path.startswith("https://"):
2217 return httpsrepository(ui, path)
2217 return httpsrepository(ui, path)
2218 if path.startswith("hg://"):
2218 if path.startswith("hg://"):
2219 return httprepository(ui, path.replace("hg://", "http://"))
2219 return httprepository(ui, path.replace("hg://", "http://"))
2220 if path.startswith("old-http://"):
2220 if path.startswith("old-http://"):
2221 return localrepository(ui, path.replace("old-http://", "http://"))
2221 return localrepository(ui, path.replace("old-http://", "http://"))
2222 if path.startswith("ssh://"):
2222 if path.startswith("ssh://"):
2223 return sshrepository(ui, path)
2223 return sshrepository(ui, path)
2224
2224
2225 return localrepository(ui, path, create)
2225 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now