##// END OF EJS Templates
Add a local() method to repository classes
mpm@selenic.com -
r926:b765e970 default
parent child Browse files
Show More
@@ -1,2218 +1,2225 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def getcwd(self):
303 def getcwd(self):
304 cwd = os.getcwd()
304 cwd = os.getcwd()
305 if cwd == self.root: return ''
305 if cwd == self.root: return ''
306 return cwd[len(self.root) + 1:]
306 return cwd[len(self.root) + 1:]
307
307
308 def ignore(self, f):
308 def ignore(self, f):
309 if not self.ignorefunc:
309 if not self.ignorefunc:
310 bigpat = []
310 bigpat = []
311 try:
311 try:
312 l = file(self.wjoin(".hgignore"))
312 l = file(self.wjoin(".hgignore"))
313 for pat in l:
313 for pat in l:
314 p = pat.rstrip()
314 p = pat.rstrip()
315 if p:
315 if p:
316 try:
316 try:
317 re.compile(p)
317 re.compile(p)
318 except:
318 except:
319 self.ui.warn("ignoring invalid ignore"
319 self.ui.warn("ignoring invalid ignore"
320 + " regular expression '%s'\n" % p)
320 + " regular expression '%s'\n" % p)
321 else:
321 else:
322 bigpat.append(p)
322 bigpat.append(p)
323 except IOError: pass
323 except IOError: pass
324
324
325 if bigpat:
325 if bigpat:
326 s = "(?:%s)" % (")|(?:".join(bigpat))
326 s = "(?:%s)" % (")|(?:".join(bigpat))
327 r = re.compile(s)
327 r = re.compile(s)
328 self.ignorefunc = r.search
328 self.ignorefunc = r.search
329 else:
329 else:
330 self.ignorefunc = util.never
330 self.ignorefunc = util.never
331
331
332 return self.ignorefunc(f)
332 return self.ignorefunc(f)
333
333
334 def __del__(self):
334 def __del__(self):
335 if self.dirty:
335 if self.dirty:
336 self.write()
336 self.write()
337
337
338 def __getitem__(self, key):
338 def __getitem__(self, key):
339 try:
339 try:
340 return self.map[key]
340 return self.map[key]
341 except TypeError:
341 except TypeError:
342 self.read()
342 self.read()
343 return self[key]
343 return self[key]
344
344
345 def __contains__(self, key):
345 def __contains__(self, key):
346 if not self.map: self.read()
346 if not self.map: self.read()
347 return key in self.map
347 return key in self.map
348
348
349 def parents(self):
349 def parents(self):
350 if not self.pl:
350 if not self.pl:
351 self.read()
351 self.read()
352 return self.pl
352 return self.pl
353
353
354 def markdirty(self):
354 def markdirty(self):
355 if not self.dirty:
355 if not self.dirty:
356 self.dirty = 1
356 self.dirty = 1
357
357
358 def setparents(self, p1, p2 = nullid):
358 def setparents(self, p1, p2 = nullid):
359 self.markdirty()
359 self.markdirty()
360 self.pl = p1, p2
360 self.pl = p1, p2
361
361
362 def state(self, key):
362 def state(self, key):
363 try:
363 try:
364 return self[key][0]
364 return self[key][0]
365 except KeyError:
365 except KeyError:
366 return "?"
366 return "?"
367
367
368 def read(self):
368 def read(self):
369 if self.map is not None: return self.map
369 if self.map is not None: return self.map
370
370
371 self.map = {}
371 self.map = {}
372 self.pl = [nullid, nullid]
372 self.pl = [nullid, nullid]
373 try:
373 try:
374 st = self.opener("dirstate").read()
374 st = self.opener("dirstate").read()
375 if not st: return
375 if not st: return
376 except: return
376 except: return
377
377
378 self.pl = [st[:20], st[20: 40]]
378 self.pl = [st[:20], st[20: 40]]
379
379
380 pos = 40
380 pos = 40
381 while pos < len(st):
381 while pos < len(st):
382 e = struct.unpack(">cllll", st[pos:pos+17])
382 e = struct.unpack(">cllll", st[pos:pos+17])
383 l = e[4]
383 l = e[4]
384 pos += 17
384 pos += 17
385 f = st[pos:pos + l]
385 f = st[pos:pos + l]
386 if '\0' in f:
386 if '\0' in f:
387 f, c = f.split('\0')
387 f, c = f.split('\0')
388 self.copies[f] = c
388 self.copies[f] = c
389 self.map[f] = e[:4]
389 self.map[f] = e[:4]
390 pos += l
390 pos += l
391
391
392 def copy(self, source, dest):
392 def copy(self, source, dest):
393 self.read()
393 self.read()
394 self.markdirty()
394 self.markdirty()
395 self.copies[dest] = source
395 self.copies[dest] = source
396
396
397 def copied(self, file):
397 def copied(self, file):
398 return self.copies.get(file, None)
398 return self.copies.get(file, None)
399
399
400 def update(self, files, state, **kw):
400 def update(self, files, state, **kw):
401 ''' current states:
401 ''' current states:
402 n normal
402 n normal
403 m needs merging
403 m needs merging
404 r marked for removal
404 r marked for removal
405 a marked for addition'''
405 a marked for addition'''
406
406
407 if not files: return
407 if not files: return
408 self.read()
408 self.read()
409 self.markdirty()
409 self.markdirty()
410 for f in files:
410 for f in files:
411 if state == "r":
411 if state == "r":
412 self.map[f] = ('r', 0, 0, 0)
412 self.map[f] = ('r', 0, 0, 0)
413 else:
413 else:
414 s = os.stat(os.path.join(self.root, f))
414 s = os.stat(os.path.join(self.root, f))
415 st_size = kw.get('st_size', s.st_size)
415 st_size = kw.get('st_size', s.st_size)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
418
418
419 def forget(self, files):
419 def forget(self, files):
420 if not files: return
420 if not files: return
421 self.read()
421 self.read()
422 self.markdirty()
422 self.markdirty()
423 for f in files:
423 for f in files:
424 try:
424 try:
425 del self.map[f]
425 del self.map[f]
426 except KeyError:
426 except KeyError:
427 self.ui.warn("not in dirstate: %s!\n" % f)
427 self.ui.warn("not in dirstate: %s!\n" % f)
428 pass
428 pass
429
429
430 def clear(self):
430 def clear(self):
431 self.map = {}
431 self.map = {}
432 self.markdirty()
432 self.markdirty()
433
433
434 def write(self):
434 def write(self):
435 st = self.opener("dirstate", "w")
435 st = self.opener("dirstate", "w")
436 st.write("".join(self.pl))
436 st.write("".join(self.pl))
437 for f, e in self.map.items():
437 for f, e in self.map.items():
438 c = self.copied(f)
438 c = self.copied(f)
439 if c:
439 if c:
440 f = f + "\0" + c
440 f = f + "\0" + c
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
442 st.write(e + f)
442 st.write(e + f)
443 self.dirty = 0
443 self.dirty = 0
444
444
445 def filterfiles(self, files):
445 def filterfiles(self, files):
446 ret = {}
446 ret = {}
447 unknown = []
447 unknown = []
448
448
449 for x in files:
449 for x in files:
450 if x is '.':
450 if x is '.':
451 return self.map.copy()
451 return self.map.copy()
452 if x not in self.map:
452 if x not in self.map:
453 unknown.append(x)
453 unknown.append(x)
454 else:
454 else:
455 ret[x] = self.map[x]
455 ret[x] = self.map[x]
456
456
457 if not unknown:
457 if not unknown:
458 return ret
458 return ret
459
459
460 b = self.map.keys()
460 b = self.map.keys()
461 b.sort()
461 b.sort()
462 blen = len(b)
462 blen = len(b)
463
463
464 for x in unknown:
464 for x in unknown:
465 bs = bisect.bisect(b, x)
465 bs = bisect.bisect(b, x)
466 if bs != 0 and b[bs-1] == x:
466 if bs != 0 and b[bs-1] == x:
467 ret[x] = self.map[x]
467 ret[x] = self.map[x]
468 continue
468 continue
469 while bs < blen:
469 while bs < blen:
470 s = b[bs]
470 s = b[bs]
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
472 ret[s] = self.map[s]
472 ret[s] = self.map[s]
473 else:
473 else:
474 break
474 break
475 bs += 1
475 bs += 1
476 return ret
476 return ret
477
477
478 def walk(self, files = None, match = util.always, dc=None):
478 def walk(self, files = None, match = util.always, dc=None):
479 self.read()
479 self.read()
480
480
481 # walk all files by default
481 # walk all files by default
482 if not files:
482 if not files:
483 files = [self.root]
483 files = [self.root]
484 if not dc:
484 if not dc:
485 dc = self.map.copy()
485 dc = self.map.copy()
486 elif not dc:
486 elif not dc:
487 dc = self.filterfiles(files)
487 dc = self.filterfiles(files)
488
488
489 known = {'.hg': 1}
489 known = {'.hg': 1}
490 def seen(fn):
490 def seen(fn):
491 if fn in known: return True
491 if fn in known: return True
492 known[fn] = 1
492 known[fn] = 1
493 def traverse():
493 def traverse():
494 for ff in util.unique(files):
494 for ff in util.unique(files):
495 f = os.path.join(self.root, ff)
495 f = os.path.join(self.root, ff)
496 try:
496 try:
497 st = os.stat(f)
497 st = os.stat(f)
498 except OSError, inst:
498 except OSError, inst:
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
500 util.pathto(self.getcwd(), ff),
500 util.pathto(self.getcwd(), ff),
501 inst.strerror))
501 inst.strerror))
502 continue
502 continue
503 if stat.S_ISDIR(st.st_mode):
503 if stat.S_ISDIR(st.st_mode):
504 for dir, subdirs, fl in os.walk(f):
504 for dir, subdirs, fl in os.walk(f):
505 d = dir[len(self.root) + 1:]
505 d = dir[len(self.root) + 1:]
506 nd = util.normpath(d)
506 nd = util.normpath(d)
507 if nd == '.': nd = ''
507 if nd == '.': nd = ''
508 if seen(nd):
508 if seen(nd):
509 subdirs[:] = []
509 subdirs[:] = []
510 continue
510 continue
511 for sd in subdirs:
511 for sd in subdirs:
512 ds = os.path.join(nd, sd +'/')
512 ds = os.path.join(nd, sd +'/')
513 if self.ignore(ds) or not match(ds):
513 if self.ignore(ds) or not match(ds):
514 subdirs.remove(sd)
514 subdirs.remove(sd)
515 subdirs.sort()
515 subdirs.sort()
516 fl.sort()
516 fl.sort()
517 for fn in fl:
517 for fn in fl:
518 fn = util.pconvert(os.path.join(d, fn))
518 fn = util.pconvert(os.path.join(d, fn))
519 yield 'f', fn
519 yield 'f', fn
520 elif stat.S_ISREG(st.st_mode):
520 elif stat.S_ISREG(st.st_mode):
521 yield 'f', ff
521 yield 'f', ff
522 else:
522 else:
523 kind = 'unknown'
523 kind = 'unknown'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
530 util.pathto(self.getcwd(), ff),
530 util.pathto(self.getcwd(), ff),
531 kind))
531 kind))
532
532
533 ks = dc.keys()
533 ks = dc.keys()
534 ks.sort()
534 ks.sort()
535 for k in ks:
535 for k in ks:
536 yield 'm', k
536 yield 'm', k
537
537
538 # yield only files that match: all in dirstate, others only if
538 # yield only files that match: all in dirstate, others only if
539 # not in .hgignore
539 # not in .hgignore
540
540
541 for src, fn in util.unique(traverse()):
541 for src, fn in util.unique(traverse()):
542 fn = util.normpath(fn)
542 fn = util.normpath(fn)
543 if seen(fn): continue
543 if seen(fn): continue
544 if fn not in dc and self.ignore(fn):
544 if fn not in dc and self.ignore(fn):
545 continue
545 continue
546 if match(fn):
546 if match(fn):
547 yield src, fn
547 yield src, fn
548
548
549 def changes(self, files=None, match=util.always):
549 def changes(self, files=None, match=util.always):
550 self.read()
550 self.read()
551 if not files:
551 if not files:
552 dc = self.map.copy()
552 dc = self.map.copy()
553 else:
553 else:
554 dc = self.filterfiles(files)
554 dc = self.filterfiles(files)
555 lookup, modified, added, unknown = [], [], [], []
555 lookup, modified, added, unknown = [], [], [], []
556 removed, deleted = [], []
556 removed, deleted = [], []
557
557
558 for src, fn in self.walk(files, match, dc=dc):
558 for src, fn in self.walk(files, match, dc=dc):
559 try:
559 try:
560 s = os.stat(os.path.join(self.root, fn))
560 s = os.stat(os.path.join(self.root, fn))
561 except OSError:
561 except OSError:
562 continue
562 continue
563 if not stat.S_ISREG(s.st_mode):
563 if not stat.S_ISREG(s.st_mode):
564 continue
564 continue
565 c = dc.get(fn)
565 c = dc.get(fn)
566 if c:
566 if c:
567 del dc[fn]
567 del dc[fn]
568 if c[0] == 'm':
568 if c[0] == 'm':
569 modified.append(fn)
569 modified.append(fn)
570 elif c[0] == 'a':
570 elif c[0] == 'a':
571 added.append(fn)
571 added.append(fn)
572 elif c[0] == 'r':
572 elif c[0] == 'r':
573 unknown.append(fn)
573 unknown.append(fn)
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
575 modified.append(fn)
575 modified.append(fn)
576 elif c[3] != s.st_mtime:
576 elif c[3] != s.st_mtime:
577 lookup.append(fn)
577 lookup.append(fn)
578 else:
578 else:
579 unknown.append(fn)
579 unknown.append(fn)
580
580
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
582 if c[0] == 'r':
582 if c[0] == 'r':
583 removed.append(fn)
583 removed.append(fn)
584 else:
584 else:
585 deleted.append(fn)
585 deleted.append(fn)
586 return (lookup, modified, added, removed + deleted, unknown)
586 return (lookup, modified, added, removed + deleted, unknown)
587
587
588 # used to avoid circular references so destructors work
588 # used to avoid circular references so destructors work
589 def opener(base):
589 def opener(base):
590 p = base
590 p = base
591 def o(path, mode="r"):
591 def o(path, mode="r"):
592 if p.startswith("http://"):
592 if p.startswith("http://"):
593 f = os.path.join(p, urllib.quote(path))
593 f = os.path.join(p, urllib.quote(path))
594 return httprangereader.httprangereader(f)
594 return httprangereader.httprangereader(f)
595
595
596 f = os.path.join(p, path)
596 f = os.path.join(p, path)
597
597
598 mode += "b" # for that other OS
598 mode += "b" # for that other OS
599
599
600 if mode[0] != "r":
600 if mode[0] != "r":
601 try:
601 try:
602 s = os.stat(f)
602 s = os.stat(f)
603 except OSError:
603 except OSError:
604 d = os.path.dirname(f)
604 d = os.path.dirname(f)
605 if not os.path.isdir(d):
605 if not os.path.isdir(d):
606 os.makedirs(d)
606 os.makedirs(d)
607 else:
607 else:
608 if s.st_nlink > 1:
608 if s.st_nlink > 1:
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
610 util.rename(f+".tmp", f)
610 util.rename(f+".tmp", f)
611
611
612 return file(f, mode)
612 return file(f, mode)
613
613
614 return o
614 return o
615
615
616 class RepoError(Exception): pass
616 class RepoError(Exception): pass
617
617
618 class localrepository:
618 class localrepository:
619 def __init__(self, ui, path=None, create=0):
619 def __init__(self, ui, path=None, create=0):
620 self.remote = 0
620 self.remote = 0
621 if path and path.startswith("http://"):
621 if path and path.startswith("http://"):
622 self.remote = 1
622 self.remote = 1
623 self.path = path
623 self.path = path
624 else:
624 else:
625 if not path:
625 if not path:
626 p = os.getcwd()
626 p = os.getcwd()
627 while not os.path.isdir(os.path.join(p, ".hg")):
627 while not os.path.isdir(os.path.join(p, ".hg")):
628 oldp = p
628 oldp = p
629 p = os.path.dirname(p)
629 p = os.path.dirname(p)
630 if p == oldp: raise RepoError("no repo found")
630 if p == oldp: raise RepoError("no repo found")
631 path = p
631 path = p
632 self.path = os.path.join(path, ".hg")
632 self.path = os.path.join(path, ".hg")
633
633
634 if not create and not os.path.isdir(self.path):
634 if not create and not os.path.isdir(self.path):
635 raise RepoError("repository %s not found" % self.path)
635 raise RepoError("repository %s not found" % self.path)
636
636
637 self.root = path
637 self.root = path
638 self.ui = ui
638 self.ui = ui
639
639
640 if create:
640 if create:
641 os.mkdir(self.path)
641 os.mkdir(self.path)
642 os.mkdir(self.join("data"))
642 os.mkdir(self.join("data"))
643
643
644 self.opener = opener(self.path)
644 self.opener = opener(self.path)
645 self.wopener = opener(self.root)
645 self.wopener = opener(self.root)
646 self.manifest = manifest(self.opener)
646 self.manifest = manifest(self.opener)
647 self.changelog = changelog(self.opener)
647 self.changelog = changelog(self.opener)
648 self.tagscache = None
648 self.tagscache = None
649 self.nodetagscache = None
649 self.nodetagscache = None
650
650
651 if not self.remote:
651 if not self.remote:
652 self.dirstate = dirstate(self.opener, ui, self.root)
652 self.dirstate = dirstate(self.opener, ui, self.root)
653 try:
653 try:
654 self.ui.readconfig(self.opener("hgrc"))
654 self.ui.readconfig(self.opener("hgrc"))
655 except IOError: pass
655 except IOError: pass
656
656
657 def hook(self, name, **args):
657 def hook(self, name, **args):
658 s = self.ui.config("hooks", name)
658 s = self.ui.config("hooks", name)
659 if s:
659 if s:
660 self.ui.note("running hook %s: %s\n" % (name, s))
660 self.ui.note("running hook %s: %s\n" % (name, s))
661 old = {}
661 old = {}
662 for k, v in args.items():
662 for k, v in args.items():
663 k = k.upper()
663 k = k.upper()
664 old[k] = os.environ.get(k, None)
664 old[k] = os.environ.get(k, None)
665 os.environ[k] = v
665 os.environ[k] = v
666
666
667 r = os.system(s)
667 r = os.system(s)
668
668
669 for k, v in old.items():
669 for k, v in old.items():
670 if v != None:
670 if v != None:
671 os.environ[k] = v
671 os.environ[k] = v
672 else:
672 else:
673 del os.environ[k]
673 del os.environ[k]
674
674
675 if r:
675 if r:
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
677 (name, r))
677 (name, r))
678 return False
678 return False
679 return True
679 return True
680
680
681 def tags(self):
681 def tags(self):
682 '''return a mapping of tag to node'''
682 '''return a mapping of tag to node'''
683 if not self.tagscache:
683 if not self.tagscache:
684 self.tagscache = {}
684 self.tagscache = {}
685 def addtag(self, k, n):
685 def addtag(self, k, n):
686 try:
686 try:
687 bin_n = bin(n)
687 bin_n = bin(n)
688 except TypeError:
688 except TypeError:
689 bin_n = ''
689 bin_n = ''
690 self.tagscache[k.strip()] = bin_n
690 self.tagscache[k.strip()] = bin_n
691
691
692 try:
692 try:
693 # read each head of the tags file, ending with the tip
693 # read each head of the tags file, ending with the tip
694 # and add each tag found to the map, with "newer" ones
694 # and add each tag found to the map, with "newer" ones
695 # taking precedence
695 # taking precedence
696 fl = self.file(".hgtags")
696 fl = self.file(".hgtags")
697 h = fl.heads()
697 h = fl.heads()
698 h.reverse()
698 h.reverse()
699 for r in h:
699 for r in h:
700 for l in fl.revision(r).splitlines():
700 for l in fl.revision(r).splitlines():
701 if l:
701 if l:
702 n, k = l.split(" ", 1)
702 n, k = l.split(" ", 1)
703 addtag(self, k, n)
703 addtag(self, k, n)
704 except KeyError:
704 except KeyError:
705 pass
705 pass
706
706
707 try:
707 try:
708 f = self.opener("localtags")
708 f = self.opener("localtags")
709 for l in f:
709 for l in f:
710 n, k = l.split(" ", 1)
710 n, k = l.split(" ", 1)
711 addtag(self, k, n)
711 addtag(self, k, n)
712 except IOError:
712 except IOError:
713 pass
713 pass
714
714
715 self.tagscache['tip'] = self.changelog.tip()
715 self.tagscache['tip'] = self.changelog.tip()
716
716
717 return self.tagscache
717 return self.tagscache
718
718
719 def tagslist(self):
719 def tagslist(self):
720 '''return a list of tags ordered by revision'''
720 '''return a list of tags ordered by revision'''
721 l = []
721 l = []
722 for t, n in self.tags().items():
722 for t, n in self.tags().items():
723 try:
723 try:
724 r = self.changelog.rev(n)
724 r = self.changelog.rev(n)
725 except:
725 except:
726 r = -2 # sort to the beginning of the list if unknown
726 r = -2 # sort to the beginning of the list if unknown
727 l.append((r,t,n))
727 l.append((r,t,n))
728 l.sort()
728 l.sort()
729 return [(t,n) for r,t,n in l]
729 return [(t,n) for r,t,n in l]
730
730
731 def nodetags(self, node):
731 def nodetags(self, node):
732 '''return the tags associated with a node'''
732 '''return the tags associated with a node'''
733 if not self.nodetagscache:
733 if not self.nodetagscache:
734 self.nodetagscache = {}
734 self.nodetagscache = {}
735 for t,n in self.tags().items():
735 for t,n in self.tags().items():
736 self.nodetagscache.setdefault(n,[]).append(t)
736 self.nodetagscache.setdefault(n,[]).append(t)
737 return self.nodetagscache.get(node, [])
737 return self.nodetagscache.get(node, [])
738
738
739 def lookup(self, key):
739 def lookup(self, key):
740 try:
740 try:
741 return self.tags()[key]
741 return self.tags()[key]
742 except KeyError:
742 except KeyError:
743 try:
743 try:
744 return self.changelog.lookup(key)
744 return self.changelog.lookup(key)
745 except:
745 except:
746 raise RepoError("unknown revision '%s'" % key)
746 raise RepoError("unknown revision '%s'" % key)
747
747
748 def dev(self):
748 def dev(self):
749 if self.remote: return -1
749 if self.remote: return -1
750 return os.stat(self.path).st_dev
750 return os.stat(self.path).st_dev
751
751
752 def local(self):
753 return not self.remote
754
752 def join(self, f):
755 def join(self, f):
753 return os.path.join(self.path, f)
756 return os.path.join(self.path, f)
754
757
755 def wjoin(self, f):
758 def wjoin(self, f):
756 return os.path.join(self.root, f)
759 return os.path.join(self.root, f)
757
760
758 def file(self, f):
761 def file(self, f):
759 if f[0] == '/': f = f[1:]
762 if f[0] == '/': f = f[1:]
760 return filelog(self.opener, f)
763 return filelog(self.opener, f)
761
764
762 def getcwd(self):
765 def getcwd(self):
763 return self.dirstate.getcwd()
766 return self.dirstate.getcwd()
764
767
765 def wfile(self, f, mode='r'):
768 def wfile(self, f, mode='r'):
766 return self.wopener(f, mode)
769 return self.wopener(f, mode)
767
770
768 def transaction(self):
771 def transaction(self):
769 # save dirstate for undo
772 # save dirstate for undo
770 try:
773 try:
771 ds = self.opener("dirstate").read()
774 ds = self.opener("dirstate").read()
772 except IOError:
775 except IOError:
773 ds = ""
776 ds = ""
774 self.opener("journal.dirstate", "w").write(ds)
777 self.opener("journal.dirstate", "w").write(ds)
775
778
776 def after():
779 def after():
777 util.rename(self.join("journal"), self.join("undo"))
780 util.rename(self.join("journal"), self.join("undo"))
778 util.rename(self.join("journal.dirstate"),
781 util.rename(self.join("journal.dirstate"),
779 self.join("undo.dirstate"))
782 self.join("undo.dirstate"))
780
783
781 return transaction.transaction(self.ui.warn, self.opener,
784 return transaction.transaction(self.ui.warn, self.opener,
782 self.join("journal"), after)
785 self.join("journal"), after)
783
786
784 def recover(self):
787 def recover(self):
785 lock = self.lock()
788 lock = self.lock()
786 if os.path.exists(self.join("journal")):
789 if os.path.exists(self.join("journal")):
787 self.ui.status("rolling back interrupted transaction\n")
790 self.ui.status("rolling back interrupted transaction\n")
788 return transaction.rollback(self.opener, self.join("journal"))
791 return transaction.rollback(self.opener, self.join("journal"))
789 else:
792 else:
790 self.ui.warn("no interrupted transaction available\n")
793 self.ui.warn("no interrupted transaction available\n")
791
794
792 def undo(self):
795 def undo(self):
793 lock = self.lock()
796 lock = self.lock()
794 if os.path.exists(self.join("undo")):
797 if os.path.exists(self.join("undo")):
795 self.ui.status("rolling back last transaction\n")
798 self.ui.status("rolling back last transaction\n")
796 transaction.rollback(self.opener, self.join("undo"))
799 transaction.rollback(self.opener, self.join("undo"))
797 self.dirstate = None
800 self.dirstate = None
798 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
801 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
799 self.dirstate = dirstate(self.opener, self.ui, self.root)
802 self.dirstate = dirstate(self.opener, self.ui, self.root)
800 else:
803 else:
801 self.ui.warn("no undo information available\n")
804 self.ui.warn("no undo information available\n")
802
805
803 def lock(self, wait = 1):
806 def lock(self, wait = 1):
804 try:
807 try:
805 return lock.lock(self.join("lock"), 0)
808 return lock.lock(self.join("lock"), 0)
806 except lock.LockHeld, inst:
809 except lock.LockHeld, inst:
807 if wait:
810 if wait:
808 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
811 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
809 return lock.lock(self.join("lock"), wait)
812 return lock.lock(self.join("lock"), wait)
810 raise inst
813 raise inst
811
814
812 def rawcommit(self, files, text, user, date, p1=None, p2=None):
815 def rawcommit(self, files, text, user, date, p1=None, p2=None):
813 orig_parent = self.dirstate.parents()[0] or nullid
816 orig_parent = self.dirstate.parents()[0] or nullid
814 p1 = p1 or self.dirstate.parents()[0] or nullid
817 p1 = p1 or self.dirstate.parents()[0] or nullid
815 p2 = p2 or self.dirstate.parents()[1] or nullid
818 p2 = p2 or self.dirstate.parents()[1] or nullid
816 c1 = self.changelog.read(p1)
819 c1 = self.changelog.read(p1)
817 c2 = self.changelog.read(p2)
820 c2 = self.changelog.read(p2)
818 m1 = self.manifest.read(c1[0])
821 m1 = self.manifest.read(c1[0])
819 mf1 = self.manifest.readflags(c1[0])
822 mf1 = self.manifest.readflags(c1[0])
820 m2 = self.manifest.read(c2[0])
823 m2 = self.manifest.read(c2[0])
821
824
822 if orig_parent == p1:
825 if orig_parent == p1:
823 update_dirstate = 1
826 update_dirstate = 1
824 else:
827 else:
825 update_dirstate = 0
828 update_dirstate = 0
826
829
827 tr = self.transaction()
830 tr = self.transaction()
828 mm = m1.copy()
831 mm = m1.copy()
829 mfm = mf1.copy()
832 mfm = mf1.copy()
830 linkrev = self.changelog.count()
833 linkrev = self.changelog.count()
831 for f in files:
834 for f in files:
832 try:
835 try:
833 t = self.wfile(f).read()
836 t = self.wfile(f).read()
834 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
837 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
835 r = self.file(f)
838 r = self.file(f)
836 mfm[f] = tm
839 mfm[f] = tm
837 mm[f] = r.add(t, {}, tr, linkrev,
840 mm[f] = r.add(t, {}, tr, linkrev,
838 m1.get(f, nullid), m2.get(f, nullid))
841 m1.get(f, nullid), m2.get(f, nullid))
839 if update_dirstate:
842 if update_dirstate:
840 self.dirstate.update([f], "n")
843 self.dirstate.update([f], "n")
841 except IOError:
844 except IOError:
842 try:
845 try:
843 del mm[f]
846 del mm[f]
844 del mfm[f]
847 del mfm[f]
845 if update_dirstate:
848 if update_dirstate:
846 self.dirstate.forget([f])
849 self.dirstate.forget([f])
847 except:
850 except:
848 # deleted from p2?
851 # deleted from p2?
849 pass
852 pass
850
853
851 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
854 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
852 user = user or self.ui.username()
855 user = user or self.ui.username()
853 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
856 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
854 tr.close()
857 tr.close()
855 if update_dirstate:
858 if update_dirstate:
856 self.dirstate.setparents(n, nullid)
859 self.dirstate.setparents(n, nullid)
857
860
858 def commit(self, files = None, text = "", user = None, date = None,
861 def commit(self, files = None, text = "", user = None, date = None,
859 match = util.always, force=False):
862 match = util.always, force=False):
860 commit = []
863 commit = []
861 remove = []
864 remove = []
862 if files:
865 if files:
863 for f in files:
866 for f in files:
864 s = self.dirstate.state(f)
867 s = self.dirstate.state(f)
865 if s in 'nmai':
868 if s in 'nmai':
866 commit.append(f)
869 commit.append(f)
867 elif s == 'r':
870 elif s == 'r':
868 remove.append(f)
871 remove.append(f)
869 else:
872 else:
870 self.ui.warn("%s not tracked!\n" % f)
873 self.ui.warn("%s not tracked!\n" % f)
871 else:
874 else:
872 (c, a, d, u) = self.changes(match = match)
875 (c, a, d, u) = self.changes(match = match)
873 commit = c + a
876 commit = c + a
874 remove = d
877 remove = d
875
878
876 if not commit and not remove and not force:
879 if not commit and not remove and not force:
877 self.ui.status("nothing changed\n")
880 self.ui.status("nothing changed\n")
878 return None
881 return None
879
882
880 if not self.hook("precommit"):
883 if not self.hook("precommit"):
881 return None
884 return None
882
885
883 p1, p2 = self.dirstate.parents()
886 p1, p2 = self.dirstate.parents()
884 c1 = self.changelog.read(p1)
887 c1 = self.changelog.read(p1)
885 c2 = self.changelog.read(p2)
888 c2 = self.changelog.read(p2)
886 m1 = self.manifest.read(c1[0])
889 m1 = self.manifest.read(c1[0])
887 mf1 = self.manifest.readflags(c1[0])
890 mf1 = self.manifest.readflags(c1[0])
888 m2 = self.manifest.read(c2[0])
891 m2 = self.manifest.read(c2[0])
889 lock = self.lock()
892 lock = self.lock()
890 tr = self.transaction()
893 tr = self.transaction()
891
894
892 # check in files
895 # check in files
893 new = {}
896 new = {}
894 linkrev = self.changelog.count()
897 linkrev = self.changelog.count()
895 commit.sort()
898 commit.sort()
896 for f in commit:
899 for f in commit:
897 self.ui.note(f + "\n")
900 self.ui.note(f + "\n")
898 try:
901 try:
899 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
902 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
900 t = self.wfile(f).read()
903 t = self.wfile(f).read()
901 except IOError:
904 except IOError:
902 self.ui.warn("trouble committing %s!\n" % f)
905 self.ui.warn("trouble committing %s!\n" % f)
903 raise
906 raise
904
907
905 meta = {}
908 meta = {}
906 cp = self.dirstate.copied(f)
909 cp = self.dirstate.copied(f)
907 if cp:
910 if cp:
908 meta["copy"] = cp
911 meta["copy"] = cp
909 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
912 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
910 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
913 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
911
914
912 r = self.file(f)
915 r = self.file(f)
913 fp1 = m1.get(f, nullid)
916 fp1 = m1.get(f, nullid)
914 fp2 = m2.get(f, nullid)
917 fp2 = m2.get(f, nullid)
915 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
918 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
916
919
917 # update manifest
920 # update manifest
918 m1.update(new)
921 m1.update(new)
919 for f in remove:
922 for f in remove:
920 if f in m1:
923 if f in m1:
921 del m1[f]
924 del m1[f]
922 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
925 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
923 (new, remove))
926 (new, remove))
924
927
925 # add changeset
928 # add changeset
926 new = new.keys()
929 new = new.keys()
927 new.sort()
930 new.sort()
928
931
929 if not text:
932 if not text:
930 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
933 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
931 edittext += "".join(["HG: changed %s\n" % f for f in new])
934 edittext += "".join(["HG: changed %s\n" % f for f in new])
932 edittext += "".join(["HG: removed %s\n" % f for f in remove])
935 edittext += "".join(["HG: removed %s\n" % f for f in remove])
933 edittext = self.ui.edit(edittext)
936 edittext = self.ui.edit(edittext)
934 if not edittext.rstrip():
937 if not edittext.rstrip():
935 return None
938 return None
936 text = edittext
939 text = edittext
937
940
938 user = user or self.ui.username()
941 user = user or self.ui.username()
939 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
942 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
940 tr.close()
943 tr.close()
941
944
942 self.dirstate.setparents(n)
945 self.dirstate.setparents(n)
943 self.dirstate.update(new, "n")
946 self.dirstate.update(new, "n")
944 self.dirstate.forget(remove)
947 self.dirstate.forget(remove)
945
948
946 if not self.hook("commit", node=hex(n)):
949 if not self.hook("commit", node=hex(n)):
947 return None
950 return None
948 return n
951 return n
949
952
950 def walk(self, node = None, files = [], match = util.always):
953 def walk(self, node = None, files = [], match = util.always):
951 if node:
954 if node:
952 for fn in self.manifest.read(self.changelog.read(node)[0]):
955 for fn in self.manifest.read(self.changelog.read(node)[0]):
953 if match(fn): yield 'm', fn
956 if match(fn): yield 'm', fn
954 else:
957 else:
955 for src, fn in self.dirstate.walk(files, match):
958 for src, fn in self.dirstate.walk(files, match):
956 yield src, fn
959 yield src, fn
957
960
958 def changes(self, node1 = None, node2 = None, files = [],
961 def changes(self, node1 = None, node2 = None, files = [],
959 match = util.always):
962 match = util.always):
960 mf2, u = None, []
963 mf2, u = None, []
961
964
962 def fcmp(fn, mf):
965 def fcmp(fn, mf):
963 t1 = self.wfile(fn).read()
966 t1 = self.wfile(fn).read()
964 t2 = self.file(fn).revision(mf[fn])
967 t2 = self.file(fn).revision(mf[fn])
965 return cmp(t1, t2)
968 return cmp(t1, t2)
966
969
967 def mfmatches(node):
970 def mfmatches(node):
968 mf = dict(self.manifest.read(node))
971 mf = dict(self.manifest.read(node))
969 for fn in mf.keys():
972 for fn in mf.keys():
970 if not match(fn):
973 if not match(fn):
971 del mf[fn]
974 del mf[fn]
972 return mf
975 return mf
973
976
974 # are we comparing the working directory?
977 # are we comparing the working directory?
975 if not node2:
978 if not node2:
976 l, c, a, d, u = self.dirstate.changes(files, match)
979 l, c, a, d, u = self.dirstate.changes(files, match)
977
980
978 # are we comparing working dir against its parent?
981 # are we comparing working dir against its parent?
979 if not node1:
982 if not node1:
980 if l:
983 if l:
981 # do a full compare of any files that might have changed
984 # do a full compare of any files that might have changed
982 change = self.changelog.read(self.dirstate.parents()[0])
985 change = self.changelog.read(self.dirstate.parents()[0])
983 mf2 = mfmatches(change[0])
986 mf2 = mfmatches(change[0])
984 for f in l:
987 for f in l:
985 if fcmp(f, mf2):
988 if fcmp(f, mf2):
986 c.append(f)
989 c.append(f)
987
990
988 for l in c, a, d, u:
991 for l in c, a, d, u:
989 l.sort()
992 l.sort()
990
993
991 return (c, a, d, u)
994 return (c, a, d, u)
992
995
993 # are we comparing working dir against non-tip?
996 # are we comparing working dir against non-tip?
994 # generate a pseudo-manifest for the working dir
997 # generate a pseudo-manifest for the working dir
995 if not node2:
998 if not node2:
996 if not mf2:
999 if not mf2:
997 change = self.changelog.read(self.dirstate.parents()[0])
1000 change = self.changelog.read(self.dirstate.parents()[0])
998 mf2 = mfmatches(change[0])
1001 mf2 = mfmatches(change[0])
999 for f in a + c + l:
1002 for f in a + c + l:
1000 mf2[f] = ""
1003 mf2[f] = ""
1001 for f in d:
1004 for f in d:
1002 if f in mf2: del mf2[f]
1005 if f in mf2: del mf2[f]
1003 else:
1006 else:
1004 change = self.changelog.read(node2)
1007 change = self.changelog.read(node2)
1005 mf2 = mfmatches(change[0])
1008 mf2 = mfmatches(change[0])
1006
1009
1007 # flush lists from dirstate before comparing manifests
1010 # flush lists from dirstate before comparing manifests
1008 c, a = [], []
1011 c, a = [], []
1009
1012
1010 change = self.changelog.read(node1)
1013 change = self.changelog.read(node1)
1011 mf1 = mfmatches(change[0])
1014 mf1 = mfmatches(change[0])
1012
1015
1013 for fn in mf2:
1016 for fn in mf2:
1014 if mf1.has_key(fn):
1017 if mf1.has_key(fn):
1015 if mf1[fn] != mf2[fn]:
1018 if mf1[fn] != mf2[fn]:
1016 if mf2[fn] != "" or fcmp(fn, mf1):
1019 if mf2[fn] != "" or fcmp(fn, mf1):
1017 c.append(fn)
1020 c.append(fn)
1018 del mf1[fn]
1021 del mf1[fn]
1019 else:
1022 else:
1020 a.append(fn)
1023 a.append(fn)
1021
1024
1022 d = mf1.keys()
1025 d = mf1.keys()
1023
1026
1024 for l in c, a, d, u:
1027 for l in c, a, d, u:
1025 l.sort()
1028 l.sort()
1026
1029
1027 return (c, a, d, u)
1030 return (c, a, d, u)
1028
1031
1029 def add(self, list):
1032 def add(self, list):
1030 for f in list:
1033 for f in list:
1031 p = self.wjoin(f)
1034 p = self.wjoin(f)
1032 if not os.path.exists(p):
1035 if not os.path.exists(p):
1033 self.ui.warn("%s does not exist!\n" % f)
1036 self.ui.warn("%s does not exist!\n" % f)
1034 elif not os.path.isfile(p):
1037 elif not os.path.isfile(p):
1035 self.ui.warn("%s not added: only files supported currently\n" % f)
1038 self.ui.warn("%s not added: only files supported currently\n" % f)
1036 elif self.dirstate.state(f) in 'an':
1039 elif self.dirstate.state(f) in 'an':
1037 self.ui.warn("%s already tracked!\n" % f)
1040 self.ui.warn("%s already tracked!\n" % f)
1038 else:
1041 else:
1039 self.dirstate.update([f], "a")
1042 self.dirstate.update([f], "a")
1040
1043
1041 def forget(self, list):
1044 def forget(self, list):
1042 for f in list:
1045 for f in list:
1043 if self.dirstate.state(f) not in 'ai':
1046 if self.dirstate.state(f) not in 'ai':
1044 self.ui.warn("%s not added!\n" % f)
1047 self.ui.warn("%s not added!\n" % f)
1045 else:
1048 else:
1046 self.dirstate.forget([f])
1049 self.dirstate.forget([f])
1047
1050
1048 def remove(self, list):
1051 def remove(self, list):
1049 for f in list:
1052 for f in list:
1050 p = self.wjoin(f)
1053 p = self.wjoin(f)
1051 if os.path.exists(p):
1054 if os.path.exists(p):
1052 self.ui.warn("%s still exists!\n" % f)
1055 self.ui.warn("%s still exists!\n" % f)
1053 elif self.dirstate.state(f) == 'a':
1056 elif self.dirstate.state(f) == 'a':
1054 self.ui.warn("%s never committed!\n" % f)
1057 self.ui.warn("%s never committed!\n" % f)
1055 self.dirstate.forget([f])
1058 self.dirstate.forget([f])
1056 elif f not in self.dirstate:
1059 elif f not in self.dirstate:
1057 self.ui.warn("%s not tracked!\n" % f)
1060 self.ui.warn("%s not tracked!\n" % f)
1058 else:
1061 else:
1059 self.dirstate.update([f], "r")
1062 self.dirstate.update([f], "r")
1060
1063
1061 def copy(self, source, dest):
1064 def copy(self, source, dest):
1062 p = self.wjoin(dest)
1065 p = self.wjoin(dest)
1063 if not os.path.exists(p):
1066 if not os.path.exists(p):
1064 self.ui.warn("%s does not exist!\n" % dest)
1067 self.ui.warn("%s does not exist!\n" % dest)
1065 elif not os.path.isfile(p):
1068 elif not os.path.isfile(p):
1066 self.ui.warn("copy failed: %s is not a file\n" % dest)
1069 self.ui.warn("copy failed: %s is not a file\n" % dest)
1067 else:
1070 else:
1068 if self.dirstate.state(dest) == '?':
1071 if self.dirstate.state(dest) == '?':
1069 self.dirstate.update([dest], "a")
1072 self.dirstate.update([dest], "a")
1070 self.dirstate.copy(source, dest)
1073 self.dirstate.copy(source, dest)
1071
1074
1072 def heads(self):
1075 def heads(self):
1073 return self.changelog.heads()
1076 return self.changelog.heads()
1074
1077
1075 # branchlookup returns a dict giving a list of branches for
1078 # branchlookup returns a dict giving a list of branches for
1076 # each head. A branch is defined as the tag of a node or
1079 # each head. A branch is defined as the tag of a node or
1077 # the branch of the node's parents. If a node has multiple
1080 # the branch of the node's parents. If a node has multiple
1078 # branch tags, tags are eliminated if they are visible from other
1081 # branch tags, tags are eliminated if they are visible from other
1079 # branch tags.
1082 # branch tags.
1080 #
1083 #
1081 # So, for this graph: a->b->c->d->e
1084 # So, for this graph: a->b->c->d->e
1082 # \ /
1085 # \ /
1083 # aa -----/
1086 # aa -----/
1084 # a has tag 2.6.12
1087 # a has tag 2.6.12
1085 # d has tag 2.6.13
1088 # d has tag 2.6.13
1086 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1089 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1087 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1090 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1088 # from the list.
1091 # from the list.
1089 #
1092 #
1090 # It is possible that more than one head will have the same branch tag.
1093 # It is possible that more than one head will have the same branch tag.
1091 # callers need to check the result for multiple heads under the same
1094 # callers need to check the result for multiple heads under the same
1092 # branch tag if that is a problem for them (ie checkout of a specific
1095 # branch tag if that is a problem for them (ie checkout of a specific
1093 # branch).
1096 # branch).
1094 #
1097 #
1095 # passing in a specific branch will limit the depth of the search
1098 # passing in a specific branch will limit the depth of the search
1096 # through the parents. It won't limit the branches returned in the
1099 # through the parents. It won't limit the branches returned in the
1097 # result though.
1100 # result though.
1098 def branchlookup(self, heads=None, branch=None):
1101 def branchlookup(self, heads=None, branch=None):
1099 if not heads:
1102 if not heads:
1100 heads = self.heads()
1103 heads = self.heads()
1101 headt = [ h for h in heads ]
1104 headt = [ h for h in heads ]
1102 chlog = self.changelog
1105 chlog = self.changelog
1103 branches = {}
1106 branches = {}
1104 merges = []
1107 merges = []
1105 seenmerge = {}
1108 seenmerge = {}
1106
1109
1107 # traverse the tree once for each head, recording in the branches
1110 # traverse the tree once for each head, recording in the branches
1108 # dict which tags are visible from this head. The branches
1111 # dict which tags are visible from this head. The branches
1109 # dict also records which tags are visible from each tag
1112 # dict also records which tags are visible from each tag
1110 # while we traverse.
1113 # while we traverse.
1111 while headt or merges:
1114 while headt or merges:
1112 if merges:
1115 if merges:
1113 n, found = merges.pop()
1116 n, found = merges.pop()
1114 visit = [n]
1117 visit = [n]
1115 else:
1118 else:
1116 h = headt.pop()
1119 h = headt.pop()
1117 visit = [h]
1120 visit = [h]
1118 found = [h]
1121 found = [h]
1119 seen = {}
1122 seen = {}
1120 while visit:
1123 while visit:
1121 n = visit.pop()
1124 n = visit.pop()
1122 if n in seen:
1125 if n in seen:
1123 continue
1126 continue
1124 pp = chlog.parents(n)
1127 pp = chlog.parents(n)
1125 tags = self.nodetags(n)
1128 tags = self.nodetags(n)
1126 if tags:
1129 if tags:
1127 for x in tags:
1130 for x in tags:
1128 if x == 'tip':
1131 if x == 'tip':
1129 continue
1132 continue
1130 for f in found:
1133 for f in found:
1131 branches.setdefault(f, {})[n] = 1
1134 branches.setdefault(f, {})[n] = 1
1132 branches.setdefault(n, {})[n] = 1
1135 branches.setdefault(n, {})[n] = 1
1133 break
1136 break
1134 if n not in found:
1137 if n not in found:
1135 found.append(n)
1138 found.append(n)
1136 if branch in tags:
1139 if branch in tags:
1137 continue
1140 continue
1138 seen[n] = 1
1141 seen[n] = 1
1139 if pp[1] != nullid and n not in seenmerge:
1142 if pp[1] != nullid and n not in seenmerge:
1140 merges.append((pp[1], [x for x in found]))
1143 merges.append((pp[1], [x for x in found]))
1141 seenmerge[n] = 1
1144 seenmerge[n] = 1
1142 if pp[0] != nullid:
1145 if pp[0] != nullid:
1143 visit.append(pp[0])
1146 visit.append(pp[0])
1144 # traverse the branches dict, eliminating branch tags from each
1147 # traverse the branches dict, eliminating branch tags from each
1145 # head that are visible from another branch tag for that head.
1148 # head that are visible from another branch tag for that head.
1146 out = {}
1149 out = {}
1147 viscache = {}
1150 viscache = {}
1148 for h in heads:
1151 for h in heads:
1149 def visible(node):
1152 def visible(node):
1150 if node in viscache:
1153 if node in viscache:
1151 return viscache[node]
1154 return viscache[node]
1152 ret = {}
1155 ret = {}
1153 visit = [node]
1156 visit = [node]
1154 while visit:
1157 while visit:
1155 x = visit.pop()
1158 x = visit.pop()
1156 if x in viscache:
1159 if x in viscache:
1157 ret.update(viscache[x])
1160 ret.update(viscache[x])
1158 elif x not in ret:
1161 elif x not in ret:
1159 ret[x] = 1
1162 ret[x] = 1
1160 if x in branches:
1163 if x in branches:
1161 visit[len(visit):] = branches[x].keys()
1164 visit[len(visit):] = branches[x].keys()
1162 viscache[node] = ret
1165 viscache[node] = ret
1163 return ret
1166 return ret
1164 if h not in branches:
1167 if h not in branches:
1165 continue
1168 continue
1166 # O(n^2), but somewhat limited. This only searches the
1169 # O(n^2), but somewhat limited. This only searches the
1167 # tags visible from a specific head, not all the tags in the
1170 # tags visible from a specific head, not all the tags in the
1168 # whole repo.
1171 # whole repo.
1169 for b in branches[h]:
1172 for b in branches[h]:
1170 vis = False
1173 vis = False
1171 for bb in branches[h].keys():
1174 for bb in branches[h].keys():
1172 if b != bb:
1175 if b != bb:
1173 if b in visible(bb):
1176 if b in visible(bb):
1174 vis = True
1177 vis = True
1175 break
1178 break
1176 if not vis:
1179 if not vis:
1177 l = out.setdefault(h, [])
1180 l = out.setdefault(h, [])
1178 l[len(l):] = self.nodetags(b)
1181 l[len(l):] = self.nodetags(b)
1179 return out
1182 return out
1180
1183
1181 def branches(self, nodes):
1184 def branches(self, nodes):
1182 if not nodes: nodes = [self.changelog.tip()]
1185 if not nodes: nodes = [self.changelog.tip()]
1183 b = []
1186 b = []
1184 for n in nodes:
1187 for n in nodes:
1185 t = n
1188 t = n
1186 while n:
1189 while n:
1187 p = self.changelog.parents(n)
1190 p = self.changelog.parents(n)
1188 if p[1] != nullid or p[0] == nullid:
1191 if p[1] != nullid or p[0] == nullid:
1189 b.append((t, n, p[0], p[1]))
1192 b.append((t, n, p[0], p[1]))
1190 break
1193 break
1191 n = p[0]
1194 n = p[0]
1192 return b
1195 return b
1193
1196
1194 def between(self, pairs):
1197 def between(self, pairs):
1195 r = []
1198 r = []
1196
1199
1197 for top, bottom in pairs:
1200 for top, bottom in pairs:
1198 n, l, i = top, [], 0
1201 n, l, i = top, [], 0
1199 f = 1
1202 f = 1
1200
1203
1201 while n != bottom:
1204 while n != bottom:
1202 p = self.changelog.parents(n)[0]
1205 p = self.changelog.parents(n)[0]
1203 if i == f:
1206 if i == f:
1204 l.append(n)
1207 l.append(n)
1205 f = f * 2
1208 f = f * 2
1206 n = p
1209 n = p
1207 i += 1
1210 i += 1
1208
1211
1209 r.append(l)
1212 r.append(l)
1210
1213
1211 return r
1214 return r
1212
1215
1213 def newer(self, nodes):
1216 def newer(self, nodes):
1214 m = {}
1217 m = {}
1215 nl = []
1218 nl = []
1216 pm = {}
1219 pm = {}
1217 cl = self.changelog
1220 cl = self.changelog
1218 t = l = cl.count()
1221 t = l = cl.count()
1219
1222
1220 # find the lowest numbered node
1223 # find the lowest numbered node
1221 for n in nodes:
1224 for n in nodes:
1222 l = min(l, cl.rev(n))
1225 l = min(l, cl.rev(n))
1223 m[n] = 1
1226 m[n] = 1
1224
1227
1225 for i in xrange(l, t):
1228 for i in xrange(l, t):
1226 n = cl.node(i)
1229 n = cl.node(i)
1227 if n in m: # explicitly listed
1230 if n in m: # explicitly listed
1228 pm[n] = 1
1231 pm[n] = 1
1229 nl.append(n)
1232 nl.append(n)
1230 continue
1233 continue
1231 for p in cl.parents(n):
1234 for p in cl.parents(n):
1232 if p in pm: # parent listed
1235 if p in pm: # parent listed
1233 pm[n] = 1
1236 pm[n] = 1
1234 nl.append(n)
1237 nl.append(n)
1235 break
1238 break
1236
1239
1237 return nl
1240 return nl
1238
1241
1239 def findincoming(self, remote, base=None, heads=None):
1242 def findincoming(self, remote, base=None, heads=None):
1240 m = self.changelog.nodemap
1243 m = self.changelog.nodemap
1241 search = []
1244 search = []
1242 fetch = []
1245 fetch = []
1243 seen = {}
1246 seen = {}
1244 seenbranch = {}
1247 seenbranch = {}
1245 if base == None:
1248 if base == None:
1246 base = {}
1249 base = {}
1247
1250
1248 # assume we're closer to the tip than the root
1251 # assume we're closer to the tip than the root
1249 # and start by examining the heads
1252 # and start by examining the heads
1250 self.ui.status("searching for changes\n")
1253 self.ui.status("searching for changes\n")
1251
1254
1252 if not heads:
1255 if not heads:
1253 heads = remote.heads()
1256 heads = remote.heads()
1254
1257
1255 unknown = []
1258 unknown = []
1256 for h in heads:
1259 for h in heads:
1257 if h not in m:
1260 if h not in m:
1258 unknown.append(h)
1261 unknown.append(h)
1259 else:
1262 else:
1260 base[h] = 1
1263 base[h] = 1
1261
1264
1262 if not unknown:
1265 if not unknown:
1263 return None
1266 return None
1264
1267
1265 rep = {}
1268 rep = {}
1266 reqcnt = 0
1269 reqcnt = 0
1267
1270
1268 # search through remote branches
1271 # search through remote branches
1269 # a 'branch' here is a linear segment of history, with four parts:
1272 # a 'branch' here is a linear segment of history, with four parts:
1270 # head, root, first parent, second parent
1273 # head, root, first parent, second parent
1271 # (a branch always has two parents (or none) by definition)
1274 # (a branch always has two parents (or none) by definition)
1272 unknown = remote.branches(unknown)
1275 unknown = remote.branches(unknown)
1273 while unknown:
1276 while unknown:
1274 r = []
1277 r = []
1275 while unknown:
1278 while unknown:
1276 n = unknown.pop(0)
1279 n = unknown.pop(0)
1277 if n[0] in seen:
1280 if n[0] in seen:
1278 continue
1281 continue
1279
1282
1280 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1283 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1281 if n[0] == nullid:
1284 if n[0] == nullid:
1282 break
1285 break
1283 if n in seenbranch:
1286 if n in seenbranch:
1284 self.ui.debug("branch already found\n")
1287 self.ui.debug("branch already found\n")
1285 continue
1288 continue
1286 if n[1] and n[1] in m: # do we know the base?
1289 if n[1] and n[1] in m: # do we know the base?
1287 self.ui.debug("found incomplete branch %s:%s\n"
1290 self.ui.debug("found incomplete branch %s:%s\n"
1288 % (short(n[0]), short(n[1])))
1291 % (short(n[0]), short(n[1])))
1289 search.append(n) # schedule branch range for scanning
1292 search.append(n) # schedule branch range for scanning
1290 seenbranch[n] = 1
1293 seenbranch[n] = 1
1291 else:
1294 else:
1292 if n[1] not in seen and n[1] not in fetch:
1295 if n[1] not in seen and n[1] not in fetch:
1293 if n[2] in m and n[3] in m:
1296 if n[2] in m and n[3] in m:
1294 self.ui.debug("found new changeset %s\n" %
1297 self.ui.debug("found new changeset %s\n" %
1295 short(n[1]))
1298 short(n[1]))
1296 fetch.append(n[1]) # earliest unknown
1299 fetch.append(n[1]) # earliest unknown
1297 base[n[2]] = 1 # latest known
1300 base[n[2]] = 1 # latest known
1298 continue
1301 continue
1299
1302
1300 for a in n[2:4]:
1303 for a in n[2:4]:
1301 if a not in rep:
1304 if a not in rep:
1302 r.append(a)
1305 r.append(a)
1303 rep[a] = 1
1306 rep[a] = 1
1304
1307
1305 seen[n[0]] = 1
1308 seen[n[0]] = 1
1306
1309
1307 if r:
1310 if r:
1308 reqcnt += 1
1311 reqcnt += 1
1309 self.ui.debug("request %d: %s\n" %
1312 self.ui.debug("request %d: %s\n" %
1310 (reqcnt, " ".join(map(short, r))))
1313 (reqcnt, " ".join(map(short, r))))
1311 for p in range(0, len(r), 10):
1314 for p in range(0, len(r), 10):
1312 for b in remote.branches(r[p:p+10]):
1315 for b in remote.branches(r[p:p+10]):
1313 self.ui.debug("received %s:%s\n" %
1316 self.ui.debug("received %s:%s\n" %
1314 (short(b[0]), short(b[1])))
1317 (short(b[0]), short(b[1])))
1315 if b[0] not in m and b[0] not in seen:
1318 if b[0] not in m and b[0] not in seen:
1316 unknown.append(b)
1319 unknown.append(b)
1317
1320
1318 # do binary search on the branches we found
1321 # do binary search on the branches we found
1319 while search:
1322 while search:
1320 n = search.pop(0)
1323 n = search.pop(0)
1321 reqcnt += 1
1324 reqcnt += 1
1322 l = remote.between([(n[0], n[1])])[0]
1325 l = remote.between([(n[0], n[1])])[0]
1323 l.append(n[1])
1326 l.append(n[1])
1324 p = n[0]
1327 p = n[0]
1325 f = 1
1328 f = 1
1326 for i in l:
1329 for i in l:
1327 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1330 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1328 if i in m:
1331 if i in m:
1329 if f <= 2:
1332 if f <= 2:
1330 self.ui.debug("found new branch changeset %s\n" %
1333 self.ui.debug("found new branch changeset %s\n" %
1331 short(p))
1334 short(p))
1332 fetch.append(p)
1335 fetch.append(p)
1333 base[i] = 1
1336 base[i] = 1
1334 else:
1337 else:
1335 self.ui.debug("narrowed branch search to %s:%s\n"
1338 self.ui.debug("narrowed branch search to %s:%s\n"
1336 % (short(p), short(i)))
1339 % (short(p), short(i)))
1337 search.append((p, i))
1340 search.append((p, i))
1338 break
1341 break
1339 p, f = i, f * 2
1342 p, f = i, f * 2
1340
1343
1341 # sanity check our fetch list
1344 # sanity check our fetch list
1342 for f in fetch:
1345 for f in fetch:
1343 if f in m:
1346 if f in m:
1344 raise RepoError("already have changeset " + short(f[:4]))
1347 raise RepoError("already have changeset " + short(f[:4]))
1345
1348
1346 if base.keys() == [nullid]:
1349 if base.keys() == [nullid]:
1347 self.ui.warn("warning: pulling from an unrelated repository!\n")
1350 self.ui.warn("warning: pulling from an unrelated repository!\n")
1348
1351
1349 self.ui.note("adding new changesets starting at " +
1352 self.ui.note("adding new changesets starting at " +
1350 " ".join([short(f) for f in fetch]) + "\n")
1353 " ".join([short(f) for f in fetch]) + "\n")
1351
1354
1352 self.ui.debug("%d total queries\n" % reqcnt)
1355 self.ui.debug("%d total queries\n" % reqcnt)
1353
1356
1354 return fetch
1357 return fetch
1355
1358
1356 def findoutgoing(self, remote, base=None, heads=None):
1359 def findoutgoing(self, remote, base=None, heads=None):
1357 if base == None:
1360 if base == None:
1358 base = {}
1361 base = {}
1359 self.findincoming(remote, base, heads)
1362 self.findincoming(remote, base, heads)
1360
1363
1361 remain = dict.fromkeys(self.changelog.nodemap)
1364 remain = dict.fromkeys(self.changelog.nodemap)
1362
1365
1363 # prune everything remote has from the tree
1366 # prune everything remote has from the tree
1364 del remain[nullid]
1367 del remain[nullid]
1365 remove = base.keys()
1368 remove = base.keys()
1366 while remove:
1369 while remove:
1367 n = remove.pop(0)
1370 n = remove.pop(0)
1368 if n in remain:
1371 if n in remain:
1369 del remain[n]
1372 del remain[n]
1370 for p in self.changelog.parents(n):
1373 for p in self.changelog.parents(n):
1371 remove.append(p)
1374 remove.append(p)
1372
1375
1373 # find every node whose parents have been pruned
1376 # find every node whose parents have been pruned
1374 subset = []
1377 subset = []
1375 for n in remain:
1378 for n in remain:
1376 p1, p2 = self.changelog.parents(n)
1379 p1, p2 = self.changelog.parents(n)
1377 if p1 not in remain and p2 not in remain:
1380 if p1 not in remain and p2 not in remain:
1378 subset.append(n)
1381 subset.append(n)
1379
1382
1380 # this is the set of all roots we have to push
1383 # this is the set of all roots we have to push
1381 return subset
1384 return subset
1382
1385
1383 def pull(self, remote):
1386 def pull(self, remote):
1384 lock = self.lock()
1387 lock = self.lock()
1385
1388
1386 # if we have an empty repo, fetch everything
1389 # if we have an empty repo, fetch everything
1387 if self.changelog.tip() == nullid:
1390 if self.changelog.tip() == nullid:
1388 self.ui.status("requesting all changes\n")
1391 self.ui.status("requesting all changes\n")
1389 fetch = [nullid]
1392 fetch = [nullid]
1390 else:
1393 else:
1391 fetch = self.findincoming(remote)
1394 fetch = self.findincoming(remote)
1392
1395
1393 if not fetch:
1396 if not fetch:
1394 self.ui.status("no changes found\n")
1397 self.ui.status("no changes found\n")
1395 return 1
1398 return 1
1396
1399
1397 cg = remote.changegroup(fetch)
1400 cg = remote.changegroup(fetch)
1398 return self.addchangegroup(cg)
1401 return self.addchangegroup(cg)
1399
1402
1400 def push(self, remote, force=False):
1403 def push(self, remote, force=False):
1401 lock = remote.lock()
1404 lock = remote.lock()
1402
1405
1403 base = {}
1406 base = {}
1404 heads = remote.heads()
1407 heads = remote.heads()
1405 inc = self.findincoming(remote, base, heads)
1408 inc = self.findincoming(remote, base, heads)
1406 if not force and inc:
1409 if not force and inc:
1407 self.ui.warn("abort: unsynced remote changes!\n")
1410 self.ui.warn("abort: unsynced remote changes!\n")
1408 self.ui.status("(did you forget to sync? use push -f to force)\n")
1411 self.ui.status("(did you forget to sync? use push -f to force)\n")
1409 return 1
1412 return 1
1410
1413
1411 update = self.findoutgoing(remote, base)
1414 update = self.findoutgoing(remote, base)
1412 if not update:
1415 if not update:
1413 self.ui.status("no changes found\n")
1416 self.ui.status("no changes found\n")
1414 return 1
1417 return 1
1415 elif not force:
1418 elif not force:
1416 if len(heads) < len(self.changelog.heads()):
1419 if len(heads) < len(self.changelog.heads()):
1417 self.ui.warn("abort: push creates new remote branches!\n")
1420 self.ui.warn("abort: push creates new remote branches!\n")
1418 self.ui.status("(did you forget to merge?" +
1421 self.ui.status("(did you forget to merge?" +
1419 " use push -f to force)\n")
1422 " use push -f to force)\n")
1420 return 1
1423 return 1
1421
1424
1422 cg = self.changegroup(update)
1425 cg = self.changegroup(update)
1423 return remote.addchangegroup(cg)
1426 return remote.addchangegroup(cg)
1424
1427
1425 def changegroup(self, basenodes):
1428 def changegroup(self, basenodes):
1426 class genread:
1429 class genread:
1427 def __init__(self, generator):
1430 def __init__(self, generator):
1428 self.g = generator
1431 self.g = generator
1429 self.buf = ""
1432 self.buf = ""
1430 def fillbuf(self):
1433 def fillbuf(self):
1431 self.buf += "".join(self.g)
1434 self.buf += "".join(self.g)
1432
1435
1433 def read(self, l):
1436 def read(self, l):
1434 while l > len(self.buf):
1437 while l > len(self.buf):
1435 try:
1438 try:
1436 self.buf += self.g.next()
1439 self.buf += self.g.next()
1437 except StopIteration:
1440 except StopIteration:
1438 break
1441 break
1439 d, self.buf = self.buf[:l], self.buf[l:]
1442 d, self.buf = self.buf[:l], self.buf[l:]
1440 return d
1443 return d
1441
1444
1442 def gengroup():
1445 def gengroup():
1443 nodes = self.newer(basenodes)
1446 nodes = self.newer(basenodes)
1444
1447
1445 # construct the link map
1448 # construct the link map
1446 linkmap = {}
1449 linkmap = {}
1447 for n in nodes:
1450 for n in nodes:
1448 linkmap[self.changelog.rev(n)] = n
1451 linkmap[self.changelog.rev(n)] = n
1449
1452
1450 # construct a list of all changed files
1453 # construct a list of all changed files
1451 changed = {}
1454 changed = {}
1452 for n in nodes:
1455 for n in nodes:
1453 c = self.changelog.read(n)
1456 c = self.changelog.read(n)
1454 for f in c[3]:
1457 for f in c[3]:
1455 changed[f] = 1
1458 changed[f] = 1
1456 changed = changed.keys()
1459 changed = changed.keys()
1457 changed.sort()
1460 changed.sort()
1458
1461
1459 # the changegroup is changesets + manifests + all file revs
1462 # the changegroup is changesets + manifests + all file revs
1460 revs = [ self.changelog.rev(n) for n in nodes ]
1463 revs = [ self.changelog.rev(n) for n in nodes ]
1461
1464
1462 for y in self.changelog.group(linkmap): yield y
1465 for y in self.changelog.group(linkmap): yield y
1463 for y in self.manifest.group(linkmap): yield y
1466 for y in self.manifest.group(linkmap): yield y
1464 for f in changed:
1467 for f in changed:
1465 yield struct.pack(">l", len(f) + 4) + f
1468 yield struct.pack(">l", len(f) + 4) + f
1466 g = self.file(f).group(linkmap)
1469 g = self.file(f).group(linkmap)
1467 for y in g:
1470 for y in g:
1468 yield y
1471 yield y
1469
1472
1470 yield struct.pack(">l", 0)
1473 yield struct.pack(">l", 0)
1471
1474
1472 return genread(gengroup())
1475 return genread(gengroup())
1473
1476
1474 def addchangegroup(self, source):
1477 def addchangegroup(self, source):
1475
1478
1476 def getchunk():
1479 def getchunk():
1477 d = source.read(4)
1480 d = source.read(4)
1478 if not d: return ""
1481 if not d: return ""
1479 l = struct.unpack(">l", d)[0]
1482 l = struct.unpack(">l", d)[0]
1480 if l <= 4: return ""
1483 if l <= 4: return ""
1481 return source.read(l - 4)
1484 return source.read(l - 4)
1482
1485
1483 def getgroup():
1486 def getgroup():
1484 while 1:
1487 while 1:
1485 c = getchunk()
1488 c = getchunk()
1486 if not c: break
1489 if not c: break
1487 yield c
1490 yield c
1488
1491
1489 def csmap(x):
1492 def csmap(x):
1490 self.ui.debug("add changeset %s\n" % short(x))
1493 self.ui.debug("add changeset %s\n" % short(x))
1491 return self.changelog.count()
1494 return self.changelog.count()
1492
1495
1493 def revmap(x):
1496 def revmap(x):
1494 return self.changelog.rev(x)
1497 return self.changelog.rev(x)
1495
1498
1496 if not source: return
1499 if not source: return
1497 changesets = files = revisions = 0
1500 changesets = files = revisions = 0
1498
1501
1499 tr = self.transaction()
1502 tr = self.transaction()
1500
1503
1501 # pull off the changeset group
1504 # pull off the changeset group
1502 self.ui.status("adding changesets\n")
1505 self.ui.status("adding changesets\n")
1503 co = self.changelog.tip()
1506 co = self.changelog.tip()
1504 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1507 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1505 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1508 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1506
1509
1507 # pull off the manifest group
1510 # pull off the manifest group
1508 self.ui.status("adding manifests\n")
1511 self.ui.status("adding manifests\n")
1509 mm = self.manifest.tip()
1512 mm = self.manifest.tip()
1510 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1513 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1511
1514
1512 # process the files
1515 # process the files
1513 self.ui.status("adding file changes\n")
1516 self.ui.status("adding file changes\n")
1514 while 1:
1517 while 1:
1515 f = getchunk()
1518 f = getchunk()
1516 if not f: break
1519 if not f: break
1517 self.ui.debug("adding %s revisions\n" % f)
1520 self.ui.debug("adding %s revisions\n" % f)
1518 fl = self.file(f)
1521 fl = self.file(f)
1519 o = fl.count()
1522 o = fl.count()
1520 n = fl.addgroup(getgroup(), revmap, tr)
1523 n = fl.addgroup(getgroup(), revmap, tr)
1521 revisions += fl.count() - o
1524 revisions += fl.count() - o
1522 files += 1
1525 files += 1
1523
1526
1524 self.ui.status(("added %d changesets" +
1527 self.ui.status(("added %d changesets" +
1525 " with %d changes to %d files\n")
1528 " with %d changes to %d files\n")
1526 % (changesets, revisions, files))
1529 % (changesets, revisions, files))
1527
1530
1528 tr.close()
1531 tr.close()
1529
1532
1530 if not self.hook("changegroup"):
1533 if not self.hook("changegroup"):
1531 return 1
1534 return 1
1532
1535
1533 return
1536 return
1534
1537
1535 def update(self, node, allow=False, force=False, choose=None,
1538 def update(self, node, allow=False, force=False, choose=None,
1536 moddirstate=True):
1539 moddirstate=True):
1537 pl = self.dirstate.parents()
1540 pl = self.dirstate.parents()
1538 if not force and pl[1] != nullid:
1541 if not force and pl[1] != nullid:
1539 self.ui.warn("aborting: outstanding uncommitted merges\n")
1542 self.ui.warn("aborting: outstanding uncommitted merges\n")
1540 return 1
1543 return 1
1541
1544
1542 p1, p2 = pl[0], node
1545 p1, p2 = pl[0], node
1543 pa = self.changelog.ancestor(p1, p2)
1546 pa = self.changelog.ancestor(p1, p2)
1544 m1n = self.changelog.read(p1)[0]
1547 m1n = self.changelog.read(p1)[0]
1545 m2n = self.changelog.read(p2)[0]
1548 m2n = self.changelog.read(p2)[0]
1546 man = self.manifest.ancestor(m1n, m2n)
1549 man = self.manifest.ancestor(m1n, m2n)
1547 m1 = self.manifest.read(m1n)
1550 m1 = self.manifest.read(m1n)
1548 mf1 = self.manifest.readflags(m1n)
1551 mf1 = self.manifest.readflags(m1n)
1549 m2 = self.manifest.read(m2n)
1552 m2 = self.manifest.read(m2n)
1550 mf2 = self.manifest.readflags(m2n)
1553 mf2 = self.manifest.readflags(m2n)
1551 ma = self.manifest.read(man)
1554 ma = self.manifest.read(man)
1552 mfa = self.manifest.readflags(man)
1555 mfa = self.manifest.readflags(man)
1553
1556
1554 (c, a, d, u) = self.changes()
1557 (c, a, d, u) = self.changes()
1555
1558
1556 # is this a jump, or a merge? i.e. is there a linear path
1559 # is this a jump, or a merge? i.e. is there a linear path
1557 # from p1 to p2?
1560 # from p1 to p2?
1558 linear_path = (pa == p1 or pa == p2)
1561 linear_path = (pa == p1 or pa == p2)
1559
1562
1560 # resolve the manifest to determine which files
1563 # resolve the manifest to determine which files
1561 # we care about merging
1564 # we care about merging
1562 self.ui.note("resolving manifests\n")
1565 self.ui.note("resolving manifests\n")
1563 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1566 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1564 (force, allow, moddirstate, linear_path))
1567 (force, allow, moddirstate, linear_path))
1565 self.ui.debug(" ancestor %s local %s remote %s\n" %
1568 self.ui.debug(" ancestor %s local %s remote %s\n" %
1566 (short(man), short(m1n), short(m2n)))
1569 (short(man), short(m1n), short(m2n)))
1567
1570
1568 merge = {}
1571 merge = {}
1569 get = {}
1572 get = {}
1570 remove = []
1573 remove = []
1571 mark = {}
1574 mark = {}
1572
1575
1573 # construct a working dir manifest
1576 # construct a working dir manifest
1574 mw = m1.copy()
1577 mw = m1.copy()
1575 mfw = mf1.copy()
1578 mfw = mf1.copy()
1576 umap = dict.fromkeys(u)
1579 umap = dict.fromkeys(u)
1577
1580
1578 for f in a + c + u:
1581 for f in a + c + u:
1579 mw[f] = ""
1582 mw[f] = ""
1580 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1583 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1581
1584
1582 for f in d:
1585 for f in d:
1583 if f in mw: del mw[f]
1586 if f in mw: del mw[f]
1584
1587
1585 # If we're jumping between revisions (as opposed to merging),
1588 # If we're jumping between revisions (as opposed to merging),
1586 # and if neither the working directory nor the target rev has
1589 # and if neither the working directory nor the target rev has
1587 # the file, then we need to remove it from the dirstate, to
1590 # the file, then we need to remove it from the dirstate, to
1588 # prevent the dirstate from listing the file when it is no
1591 # prevent the dirstate from listing the file when it is no
1589 # longer in the manifest.
1592 # longer in the manifest.
1590 if moddirstate and linear_path and f not in m2:
1593 if moddirstate and linear_path and f not in m2:
1591 self.dirstate.forget((f,))
1594 self.dirstate.forget((f,))
1592
1595
1593 # Compare manifests
1596 # Compare manifests
1594 for f, n in mw.iteritems():
1597 for f, n in mw.iteritems():
1595 if choose and not choose(f): continue
1598 if choose and not choose(f): continue
1596 if f in m2:
1599 if f in m2:
1597 s = 0
1600 s = 0
1598
1601
1599 # is the wfile new since m1, and match m2?
1602 # is the wfile new since m1, and match m2?
1600 if f not in m1:
1603 if f not in m1:
1601 t1 = self.wfile(f).read()
1604 t1 = self.wfile(f).read()
1602 t2 = self.file(f).revision(m2[f])
1605 t2 = self.file(f).revision(m2[f])
1603 if cmp(t1, t2) == 0:
1606 if cmp(t1, t2) == 0:
1604 mark[f] = 1
1607 mark[f] = 1
1605 n = m2[f]
1608 n = m2[f]
1606 del t1, t2
1609 del t1, t2
1607
1610
1608 # are files different?
1611 # are files different?
1609 if n != m2[f]:
1612 if n != m2[f]:
1610 a = ma.get(f, nullid)
1613 a = ma.get(f, nullid)
1611 # are both different from the ancestor?
1614 # are both different from the ancestor?
1612 if n != a and m2[f] != a:
1615 if n != a and m2[f] != a:
1613 self.ui.debug(" %s versions differ, resolve\n" % f)
1616 self.ui.debug(" %s versions differ, resolve\n" % f)
1614 # merge executable bits
1617 # merge executable bits
1615 # "if we changed or they changed, change in merge"
1618 # "if we changed or they changed, change in merge"
1616 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1619 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1617 mode = ((a^b) | (a^c)) ^ a
1620 mode = ((a^b) | (a^c)) ^ a
1618 merge[f] = (m1.get(f, nullid), m2[f], mode)
1621 merge[f] = (m1.get(f, nullid), m2[f], mode)
1619 s = 1
1622 s = 1
1620 # are we clobbering?
1623 # are we clobbering?
1621 # is remote's version newer?
1624 # is remote's version newer?
1622 # or are we going back in time?
1625 # or are we going back in time?
1623 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1626 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1624 self.ui.debug(" remote %s is newer, get\n" % f)
1627 self.ui.debug(" remote %s is newer, get\n" % f)
1625 get[f] = m2[f]
1628 get[f] = m2[f]
1626 s = 1
1629 s = 1
1627 else:
1630 else:
1628 mark[f] = 1
1631 mark[f] = 1
1629 elif f in umap:
1632 elif f in umap:
1630 # this unknown file is the same as the checkout
1633 # this unknown file is the same as the checkout
1631 get[f] = m2[f]
1634 get[f] = m2[f]
1632
1635
1633 if not s and mfw[f] != mf2[f]:
1636 if not s and mfw[f] != mf2[f]:
1634 if force:
1637 if force:
1635 self.ui.debug(" updating permissions for %s\n" % f)
1638 self.ui.debug(" updating permissions for %s\n" % f)
1636 util.set_exec(self.wjoin(f), mf2[f])
1639 util.set_exec(self.wjoin(f), mf2[f])
1637 else:
1640 else:
1638 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1641 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1639 mode = ((a^b) | (a^c)) ^ a
1642 mode = ((a^b) | (a^c)) ^ a
1640 if mode != b:
1643 if mode != b:
1641 self.ui.debug(" updating permissions for %s\n" % f)
1644 self.ui.debug(" updating permissions for %s\n" % f)
1642 util.set_exec(self.wjoin(f), mode)
1645 util.set_exec(self.wjoin(f), mode)
1643 mark[f] = 1
1646 mark[f] = 1
1644 del m2[f]
1647 del m2[f]
1645 elif f in ma:
1648 elif f in ma:
1646 if n != ma[f]:
1649 if n != ma[f]:
1647 r = "d"
1650 r = "d"
1648 if not force and (linear_path or allow):
1651 if not force and (linear_path or allow):
1649 r = self.ui.prompt(
1652 r = self.ui.prompt(
1650 (" local changed %s which remote deleted\n" % f) +
1653 (" local changed %s which remote deleted\n" % f) +
1651 "(k)eep or (d)elete?", "[kd]", "k")
1654 "(k)eep or (d)elete?", "[kd]", "k")
1652 if r == "d":
1655 if r == "d":
1653 remove.append(f)
1656 remove.append(f)
1654 else:
1657 else:
1655 self.ui.debug("other deleted %s\n" % f)
1658 self.ui.debug("other deleted %s\n" % f)
1656 remove.append(f) # other deleted it
1659 remove.append(f) # other deleted it
1657 else:
1660 else:
1658 if n == m1.get(f, nullid): # same as parent
1661 if n == m1.get(f, nullid): # same as parent
1659 if p2 == pa: # going backwards?
1662 if p2 == pa: # going backwards?
1660 self.ui.debug("remote deleted %s\n" % f)
1663 self.ui.debug("remote deleted %s\n" % f)
1661 remove.append(f)
1664 remove.append(f)
1662 else:
1665 else:
1663 self.ui.debug("local created %s, keeping\n" % f)
1666 self.ui.debug("local created %s, keeping\n" % f)
1664 else:
1667 else:
1665 self.ui.debug("working dir created %s, keeping\n" % f)
1668 self.ui.debug("working dir created %s, keeping\n" % f)
1666
1669
1667 for f, n in m2.iteritems():
1670 for f, n in m2.iteritems():
1668 if choose and not choose(f): continue
1671 if choose and not choose(f): continue
1669 if f[0] == "/": continue
1672 if f[0] == "/": continue
1670 if f in ma and n != ma[f]:
1673 if f in ma and n != ma[f]:
1671 r = "k"
1674 r = "k"
1672 if not force and (linear_path or allow):
1675 if not force and (linear_path or allow):
1673 r = self.ui.prompt(
1676 r = self.ui.prompt(
1674 ("remote changed %s which local deleted\n" % f) +
1677 ("remote changed %s which local deleted\n" % f) +
1675 "(k)eep or (d)elete?", "[kd]", "k")
1678 "(k)eep or (d)elete?", "[kd]", "k")
1676 if r == "k": get[f] = n
1679 if r == "k": get[f] = n
1677 elif f not in ma:
1680 elif f not in ma:
1678 self.ui.debug("remote created %s\n" % f)
1681 self.ui.debug("remote created %s\n" % f)
1679 get[f] = n
1682 get[f] = n
1680 else:
1683 else:
1681 if force or p2 == pa: # going backwards?
1684 if force or p2 == pa: # going backwards?
1682 self.ui.debug("local deleted %s, recreating\n" % f)
1685 self.ui.debug("local deleted %s, recreating\n" % f)
1683 get[f] = n
1686 get[f] = n
1684 else:
1687 else:
1685 self.ui.debug("local deleted %s\n" % f)
1688 self.ui.debug("local deleted %s\n" % f)
1686
1689
1687 del mw, m1, m2, ma
1690 del mw, m1, m2, ma
1688
1691
1689 if force:
1692 if force:
1690 for f in merge:
1693 for f in merge:
1691 get[f] = merge[f][1]
1694 get[f] = merge[f][1]
1692 merge = {}
1695 merge = {}
1693
1696
1694 if linear_path or force:
1697 if linear_path or force:
1695 # we don't need to do any magic, just jump to the new rev
1698 # we don't need to do any magic, just jump to the new rev
1696 mode = 'n'
1699 mode = 'n'
1697 p1, p2 = p2, nullid
1700 p1, p2 = p2, nullid
1698 else:
1701 else:
1699 if not allow:
1702 if not allow:
1700 self.ui.status("this update spans a branch" +
1703 self.ui.status("this update spans a branch" +
1701 " affecting the following files:\n")
1704 " affecting the following files:\n")
1702 fl = merge.keys() + get.keys()
1705 fl = merge.keys() + get.keys()
1703 fl.sort()
1706 fl.sort()
1704 for f in fl:
1707 for f in fl:
1705 cf = ""
1708 cf = ""
1706 if f in merge: cf = " (resolve)"
1709 if f in merge: cf = " (resolve)"
1707 self.ui.status(" %s%s\n" % (f, cf))
1710 self.ui.status(" %s%s\n" % (f, cf))
1708 self.ui.warn("aborting update spanning branches!\n")
1711 self.ui.warn("aborting update spanning branches!\n")
1709 self.ui.status("(use update -m to merge across branches" +
1712 self.ui.status("(use update -m to merge across branches" +
1710 " or -C to lose changes)\n")
1713 " or -C to lose changes)\n")
1711 return 1
1714 return 1
1712 # we have to remember what files we needed to get/change
1715 # we have to remember what files we needed to get/change
1713 # because any file that's different from either one of its
1716 # because any file that's different from either one of its
1714 # parents must be in the changeset
1717 # parents must be in the changeset
1715 mode = 'm'
1718 mode = 'm'
1716 if moddirstate:
1719 if moddirstate:
1717 self.dirstate.update(mark.keys(), "m")
1720 self.dirstate.update(mark.keys(), "m")
1718
1721
1719 if moddirstate:
1722 if moddirstate:
1720 self.dirstate.setparents(p1, p2)
1723 self.dirstate.setparents(p1, p2)
1721
1724
1722 # get the files we don't need to change
1725 # get the files we don't need to change
1723 files = get.keys()
1726 files = get.keys()
1724 files.sort()
1727 files.sort()
1725 for f in files:
1728 for f in files:
1726 if f[0] == "/": continue
1729 if f[0] == "/": continue
1727 self.ui.note("getting %s\n" % f)
1730 self.ui.note("getting %s\n" % f)
1728 t = self.file(f).read(get[f])
1731 t = self.file(f).read(get[f])
1729 try:
1732 try:
1730 self.wfile(f, "w").write(t)
1733 self.wfile(f, "w").write(t)
1731 except IOError:
1734 except IOError:
1732 os.makedirs(os.path.dirname(self.wjoin(f)))
1735 os.makedirs(os.path.dirname(self.wjoin(f)))
1733 self.wfile(f, "w").write(t)
1736 self.wfile(f, "w").write(t)
1734 util.set_exec(self.wjoin(f), mf2[f])
1737 util.set_exec(self.wjoin(f), mf2[f])
1735 if moddirstate:
1738 if moddirstate:
1736 self.dirstate.update([f], mode)
1739 self.dirstate.update([f], mode)
1737
1740
1738 # merge the tricky bits
1741 # merge the tricky bits
1739 files = merge.keys()
1742 files = merge.keys()
1740 files.sort()
1743 files.sort()
1741 for f in files:
1744 for f in files:
1742 self.ui.status("merging %s\n" % f)
1745 self.ui.status("merging %s\n" % f)
1743 m, o, flag = merge[f]
1746 m, o, flag = merge[f]
1744 self.merge3(f, m, o)
1747 self.merge3(f, m, o)
1745 util.set_exec(self.wjoin(f), flag)
1748 util.set_exec(self.wjoin(f), flag)
1746 if moddirstate:
1749 if moddirstate:
1747 if mode == 'm':
1750 if mode == 'm':
1748 # only update dirstate on branch merge, otherwise we
1751 # only update dirstate on branch merge, otherwise we
1749 # could mark files with changes as unchanged
1752 # could mark files with changes as unchanged
1750 self.dirstate.update([f], mode)
1753 self.dirstate.update([f], mode)
1751 elif p2 == nullid:
1754 elif p2 == nullid:
1752 # update dirstate from parent1's manifest
1755 # update dirstate from parent1's manifest
1753 m1n = self.changelog.read(p1)[0]
1756 m1n = self.changelog.read(p1)[0]
1754 m1 = self.manifest.read(m1n)
1757 m1 = self.manifest.read(m1n)
1755 f_len = len(self.file(f).read(m1[f]))
1758 f_len = len(self.file(f).read(m1[f]))
1756 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1759 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1757 else:
1760 else:
1758 self.ui.warn("Second parent without branch merge!?\n"
1761 self.ui.warn("Second parent without branch merge!?\n"
1759 "Dirstate for file %s may be wrong.\n" % f)
1762 "Dirstate for file %s may be wrong.\n" % f)
1760
1763
1761 remove.sort()
1764 remove.sort()
1762 for f in remove:
1765 for f in remove:
1763 self.ui.note("removing %s\n" % f)
1766 self.ui.note("removing %s\n" % f)
1764 try:
1767 try:
1765 os.unlink(f)
1768 os.unlink(f)
1766 except OSError, inst:
1769 except OSError, inst:
1767 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1770 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1768 # try removing directories that might now be empty
1771 # try removing directories that might now be empty
1769 try: os.removedirs(os.path.dirname(f))
1772 try: os.removedirs(os.path.dirname(f))
1770 except: pass
1773 except: pass
1771 if moddirstate:
1774 if moddirstate:
1772 if mode == 'n':
1775 if mode == 'n':
1773 self.dirstate.forget(remove)
1776 self.dirstate.forget(remove)
1774 else:
1777 else:
1775 self.dirstate.update(remove, 'r')
1778 self.dirstate.update(remove, 'r')
1776
1779
1777 def merge3(self, fn, my, other):
1780 def merge3(self, fn, my, other):
1778 """perform a 3-way merge in the working directory"""
1781 """perform a 3-way merge in the working directory"""
1779
1782
1780 def temp(prefix, node):
1783 def temp(prefix, node):
1781 pre = "%s~%s." % (os.path.basename(fn), prefix)
1784 pre = "%s~%s." % (os.path.basename(fn), prefix)
1782 (fd, name) = tempfile.mkstemp("", pre)
1785 (fd, name) = tempfile.mkstemp("", pre)
1783 f = os.fdopen(fd, "wb")
1786 f = os.fdopen(fd, "wb")
1784 f.write(fl.revision(node))
1787 f.write(fl.revision(node))
1785 f.close()
1788 f.close()
1786 return name
1789 return name
1787
1790
1788 fl = self.file(fn)
1791 fl = self.file(fn)
1789 base = fl.ancestor(my, other)
1792 base = fl.ancestor(my, other)
1790 a = self.wjoin(fn)
1793 a = self.wjoin(fn)
1791 b = temp("base", base)
1794 b = temp("base", base)
1792 c = temp("other", other)
1795 c = temp("other", other)
1793
1796
1794 self.ui.note("resolving %s\n" % fn)
1797 self.ui.note("resolving %s\n" % fn)
1795 self.ui.debug("file %s: other %s ancestor %s\n" %
1798 self.ui.debug("file %s: other %s ancestor %s\n" %
1796 (fn, short(other), short(base)))
1799 (fn, short(other), short(base)))
1797
1800
1798 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1801 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1799 or "hgmerge")
1802 or "hgmerge")
1800 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1803 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1801 if r:
1804 if r:
1802 self.ui.warn("merging %s failed!\n" % fn)
1805 self.ui.warn("merging %s failed!\n" % fn)
1803
1806
1804 os.unlink(b)
1807 os.unlink(b)
1805 os.unlink(c)
1808 os.unlink(c)
1806
1809
1807 def verify(self):
1810 def verify(self):
1808 filelinkrevs = {}
1811 filelinkrevs = {}
1809 filenodes = {}
1812 filenodes = {}
1810 changesets = revisions = files = 0
1813 changesets = revisions = files = 0
1811 errors = 0
1814 errors = 0
1812
1815
1813 seen = {}
1816 seen = {}
1814 self.ui.status("checking changesets\n")
1817 self.ui.status("checking changesets\n")
1815 for i in range(self.changelog.count()):
1818 for i in range(self.changelog.count()):
1816 changesets += 1
1819 changesets += 1
1817 n = self.changelog.node(i)
1820 n = self.changelog.node(i)
1818 if n in seen:
1821 if n in seen:
1819 self.ui.warn("duplicate changeset at revision %d\n" % i)
1822 self.ui.warn("duplicate changeset at revision %d\n" % i)
1820 errors += 1
1823 errors += 1
1821 seen[n] = 1
1824 seen[n] = 1
1822
1825
1823 for p in self.changelog.parents(n):
1826 for p in self.changelog.parents(n):
1824 if p not in self.changelog.nodemap:
1827 if p not in self.changelog.nodemap:
1825 self.ui.warn("changeset %s has unknown parent %s\n" %
1828 self.ui.warn("changeset %s has unknown parent %s\n" %
1826 (short(n), short(p)))
1829 (short(n), short(p)))
1827 errors += 1
1830 errors += 1
1828 try:
1831 try:
1829 changes = self.changelog.read(n)
1832 changes = self.changelog.read(n)
1830 except Exception, inst:
1833 except Exception, inst:
1831 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1834 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1832 errors += 1
1835 errors += 1
1833
1836
1834 for f in changes[3]:
1837 for f in changes[3]:
1835 filelinkrevs.setdefault(f, []).append(i)
1838 filelinkrevs.setdefault(f, []).append(i)
1836
1839
1837 seen = {}
1840 seen = {}
1838 self.ui.status("checking manifests\n")
1841 self.ui.status("checking manifests\n")
1839 for i in range(self.manifest.count()):
1842 for i in range(self.manifest.count()):
1840 n = self.manifest.node(i)
1843 n = self.manifest.node(i)
1841 if n in seen:
1844 if n in seen:
1842 self.ui.warn("duplicate manifest at revision %d\n" % i)
1845 self.ui.warn("duplicate manifest at revision %d\n" % i)
1843 errors += 1
1846 errors += 1
1844 seen[n] = 1
1847 seen[n] = 1
1845
1848
1846 for p in self.manifest.parents(n):
1849 for p in self.manifest.parents(n):
1847 if p not in self.manifest.nodemap:
1850 if p not in self.manifest.nodemap:
1848 self.ui.warn("manifest %s has unknown parent %s\n" %
1851 self.ui.warn("manifest %s has unknown parent %s\n" %
1849 (short(n), short(p)))
1852 (short(n), short(p)))
1850 errors += 1
1853 errors += 1
1851
1854
1852 try:
1855 try:
1853 delta = mdiff.patchtext(self.manifest.delta(n))
1856 delta = mdiff.patchtext(self.manifest.delta(n))
1854 except KeyboardInterrupt:
1857 except KeyboardInterrupt:
1855 self.ui.warn("aborted")
1858 self.ui.warn("aborted")
1856 sys.exit(0)
1859 sys.exit(0)
1857 except Exception, inst:
1860 except Exception, inst:
1858 self.ui.warn("unpacking manifest %s: %s\n"
1861 self.ui.warn("unpacking manifest %s: %s\n"
1859 % (short(n), inst))
1862 % (short(n), inst))
1860 errors += 1
1863 errors += 1
1861
1864
1862 ff = [ l.split('\0') for l in delta.splitlines() ]
1865 ff = [ l.split('\0') for l in delta.splitlines() ]
1863 for f, fn in ff:
1866 for f, fn in ff:
1864 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1867 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1865
1868
1866 self.ui.status("crosschecking files in changesets and manifests\n")
1869 self.ui.status("crosschecking files in changesets and manifests\n")
1867 for f in filenodes:
1870 for f in filenodes:
1868 if f not in filelinkrevs:
1871 if f not in filelinkrevs:
1869 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1872 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1870 errors += 1
1873 errors += 1
1871
1874
1872 for f in filelinkrevs:
1875 for f in filelinkrevs:
1873 if f not in filenodes:
1876 if f not in filenodes:
1874 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1877 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1875 errors += 1
1878 errors += 1
1876
1879
1877 self.ui.status("checking files\n")
1880 self.ui.status("checking files\n")
1878 ff = filenodes.keys()
1881 ff = filenodes.keys()
1879 ff.sort()
1882 ff.sort()
1880 for f in ff:
1883 for f in ff:
1881 if f == "/dev/null": continue
1884 if f == "/dev/null": continue
1882 files += 1
1885 files += 1
1883 fl = self.file(f)
1886 fl = self.file(f)
1884 nodes = { nullid: 1 }
1887 nodes = { nullid: 1 }
1885 seen = {}
1888 seen = {}
1886 for i in range(fl.count()):
1889 for i in range(fl.count()):
1887 revisions += 1
1890 revisions += 1
1888 n = fl.node(i)
1891 n = fl.node(i)
1889
1892
1890 if n in seen:
1893 if n in seen:
1891 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1894 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1892 errors += 1
1895 errors += 1
1893
1896
1894 if n not in filenodes[f]:
1897 if n not in filenodes[f]:
1895 self.ui.warn("%s: %d:%s not in manifests\n"
1898 self.ui.warn("%s: %d:%s not in manifests\n"
1896 % (f, i, short(n)))
1899 % (f, i, short(n)))
1897 errors += 1
1900 errors += 1
1898 else:
1901 else:
1899 del filenodes[f][n]
1902 del filenodes[f][n]
1900
1903
1901 flr = fl.linkrev(n)
1904 flr = fl.linkrev(n)
1902 if flr not in filelinkrevs[f]:
1905 if flr not in filelinkrevs[f]:
1903 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1906 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1904 % (f, short(n), fl.linkrev(n)))
1907 % (f, short(n), fl.linkrev(n)))
1905 errors += 1
1908 errors += 1
1906 else:
1909 else:
1907 filelinkrevs[f].remove(flr)
1910 filelinkrevs[f].remove(flr)
1908
1911
1909 # verify contents
1912 # verify contents
1910 try:
1913 try:
1911 t = fl.read(n)
1914 t = fl.read(n)
1912 except Exception, inst:
1915 except Exception, inst:
1913 self.ui.warn("unpacking file %s %s: %s\n"
1916 self.ui.warn("unpacking file %s %s: %s\n"
1914 % (f, short(n), inst))
1917 % (f, short(n), inst))
1915 errors += 1
1918 errors += 1
1916
1919
1917 # verify parents
1920 # verify parents
1918 (p1, p2) = fl.parents(n)
1921 (p1, p2) = fl.parents(n)
1919 if p1 not in nodes:
1922 if p1 not in nodes:
1920 self.ui.warn("file %s:%s unknown parent 1 %s" %
1923 self.ui.warn("file %s:%s unknown parent 1 %s" %
1921 (f, short(n), short(p1)))
1924 (f, short(n), short(p1)))
1922 errors += 1
1925 errors += 1
1923 if p2 not in nodes:
1926 if p2 not in nodes:
1924 self.ui.warn("file %s:%s unknown parent 2 %s" %
1927 self.ui.warn("file %s:%s unknown parent 2 %s" %
1925 (f, short(n), short(p1)))
1928 (f, short(n), short(p1)))
1926 errors += 1
1929 errors += 1
1927 nodes[n] = 1
1930 nodes[n] = 1
1928
1931
1929 # cross-check
1932 # cross-check
1930 for node in filenodes[f]:
1933 for node in filenodes[f]:
1931 self.ui.warn("node %s in manifests not in %s\n"
1934 self.ui.warn("node %s in manifests not in %s\n"
1932 % (hex(node), f))
1935 % (hex(node), f))
1933 errors += 1
1936 errors += 1
1934
1937
1935 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1938 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1936 (files, changesets, revisions))
1939 (files, changesets, revisions))
1937
1940
1938 if errors:
1941 if errors:
1939 self.ui.warn("%d integrity errors encountered!\n" % errors)
1942 self.ui.warn("%d integrity errors encountered!\n" % errors)
1940 return 1
1943 return 1
1941
1944
1942 class httprepository:
1945 class remoterepository:
1946 def local(self):
1947 return False
1948
1949 class httprepository(remoterepository):
1943 def __init__(self, ui, path):
1950 def __init__(self, ui, path):
1944 # fix missing / after hostname
1951 # fix missing / after hostname
1945 s = urlparse.urlsplit(path)
1952 s = urlparse.urlsplit(path)
1946 partial = s[2]
1953 partial = s[2]
1947 if not partial: partial = "/"
1954 if not partial: partial = "/"
1948 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1955 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1949 self.ui = ui
1956 self.ui = ui
1950 no_list = [ "localhost", "127.0.0.1" ]
1957 no_list = [ "localhost", "127.0.0.1" ]
1951 host = ui.config("http_proxy", "host")
1958 host = ui.config("http_proxy", "host")
1952 if host is None:
1959 if host is None:
1953 host = os.environ.get("http_proxy")
1960 host = os.environ.get("http_proxy")
1954 if host and host.startswith('http://'):
1961 if host and host.startswith('http://'):
1955 host = host[7:]
1962 host = host[7:]
1956 user = ui.config("http_proxy", "user")
1963 user = ui.config("http_proxy", "user")
1957 passwd = ui.config("http_proxy", "passwd")
1964 passwd = ui.config("http_proxy", "passwd")
1958 no = ui.config("http_proxy", "no")
1965 no = ui.config("http_proxy", "no")
1959 if no is None:
1966 if no is None:
1960 no = os.environ.get("no_proxy")
1967 no = os.environ.get("no_proxy")
1961 if no:
1968 if no:
1962 no_list = no_list + no.split(",")
1969 no_list = no_list + no.split(",")
1963
1970
1964 no_proxy = 0
1971 no_proxy = 0
1965 for h in no_list:
1972 for h in no_list:
1966 if (path.startswith("http://" + h + "/") or
1973 if (path.startswith("http://" + h + "/") or
1967 path.startswith("http://" + h + ":") or
1974 path.startswith("http://" + h + ":") or
1968 path == "http://" + h):
1975 path == "http://" + h):
1969 no_proxy = 1
1976 no_proxy = 1
1970
1977
1971 # Note: urllib2 takes proxy values from the environment and those will
1978 # Note: urllib2 takes proxy values from the environment and those will
1972 # take precedence
1979 # take precedence
1973 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1980 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1974 try:
1981 try:
1975 if os.environ.has_key(env):
1982 if os.environ.has_key(env):
1976 del os.environ[env]
1983 del os.environ[env]
1977 except OSError:
1984 except OSError:
1978 pass
1985 pass
1979
1986
1980 proxy_handler = urllib2.BaseHandler()
1987 proxy_handler = urllib2.BaseHandler()
1981 if host and not no_proxy:
1988 if host and not no_proxy:
1982 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1989 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1983
1990
1984 authinfo = None
1991 authinfo = None
1985 if user and passwd:
1992 if user and passwd:
1986 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1993 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1987 passmgr.add_password(None, host, user, passwd)
1994 passmgr.add_password(None, host, user, passwd)
1988 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1995 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1989
1996
1990 opener = urllib2.build_opener(proxy_handler, authinfo)
1997 opener = urllib2.build_opener(proxy_handler, authinfo)
1991 urllib2.install_opener(opener)
1998 urllib2.install_opener(opener)
1992
1999
1993 def dev(self):
2000 def dev(self):
1994 return -1
2001 return -1
1995
2002
1996 def do_cmd(self, cmd, **args):
2003 def do_cmd(self, cmd, **args):
1997 self.ui.debug("sending %s command\n" % cmd)
2004 self.ui.debug("sending %s command\n" % cmd)
1998 q = {"cmd": cmd}
2005 q = {"cmd": cmd}
1999 q.update(args)
2006 q.update(args)
2000 qs = urllib.urlencode(q)
2007 qs = urllib.urlencode(q)
2001 cu = "%s?%s" % (self.url, qs)
2008 cu = "%s?%s" % (self.url, qs)
2002 resp = urllib2.urlopen(cu)
2009 resp = urllib2.urlopen(cu)
2003 proto = resp.headers['content-type']
2010 proto = resp.headers['content-type']
2004
2011
2005 # accept old "text/plain" and "application/hg-changegroup" for now
2012 # accept old "text/plain" and "application/hg-changegroup" for now
2006 if not proto.startswith('application/mercurial') and \
2013 if not proto.startswith('application/mercurial') and \
2007 not proto.startswith('text/plain') and \
2014 not proto.startswith('text/plain') and \
2008 not proto.startswith('application/hg-changegroup'):
2015 not proto.startswith('application/hg-changegroup'):
2009 raise RepoError("'%s' does not appear to be an hg repository"
2016 raise RepoError("'%s' does not appear to be an hg repository"
2010 % self.url)
2017 % self.url)
2011
2018
2012 if proto.startswith('application/mercurial'):
2019 if proto.startswith('application/mercurial'):
2013 version = proto[22:]
2020 version = proto[22:]
2014 if float(version) > 0.1:
2021 if float(version) > 0.1:
2015 raise RepoError("'%s' uses newer protocol %s" %
2022 raise RepoError("'%s' uses newer protocol %s" %
2016 (self.url, version))
2023 (self.url, version))
2017
2024
2018 return resp
2025 return resp
2019
2026
2020 def heads(self):
2027 def heads(self):
2021 d = self.do_cmd("heads").read()
2028 d = self.do_cmd("heads").read()
2022 try:
2029 try:
2023 return map(bin, d[:-1].split(" "))
2030 return map(bin, d[:-1].split(" "))
2024 except:
2031 except:
2025 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2032 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2026 raise
2033 raise
2027
2034
2028 def branches(self, nodes):
2035 def branches(self, nodes):
2029 n = " ".join(map(hex, nodes))
2036 n = " ".join(map(hex, nodes))
2030 d = self.do_cmd("branches", nodes=n).read()
2037 d = self.do_cmd("branches", nodes=n).read()
2031 try:
2038 try:
2032 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2039 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2033 return br
2040 return br
2034 except:
2041 except:
2035 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2042 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2036 raise
2043 raise
2037
2044
2038 def between(self, pairs):
2045 def between(self, pairs):
2039 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2046 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2040 d = self.do_cmd("between", pairs=n).read()
2047 d = self.do_cmd("between", pairs=n).read()
2041 try:
2048 try:
2042 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2049 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2043 return p
2050 return p
2044 except:
2051 except:
2045 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2052 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2046 raise
2053 raise
2047
2054
2048 def changegroup(self, nodes):
2055 def changegroup(self, nodes):
2049 n = " ".join(map(hex, nodes))
2056 n = " ".join(map(hex, nodes))
2050 f = self.do_cmd("changegroup", roots=n)
2057 f = self.do_cmd("changegroup", roots=n)
2051 bytes = 0
2058 bytes = 0
2052
2059
2053 class zread:
2060 class zread:
2054 def __init__(self, f):
2061 def __init__(self, f):
2055 self.zd = zlib.decompressobj()
2062 self.zd = zlib.decompressobj()
2056 self.f = f
2063 self.f = f
2057 self.buf = ""
2064 self.buf = ""
2058 def read(self, l):
2065 def read(self, l):
2059 while l > len(self.buf):
2066 while l > len(self.buf):
2060 r = self.f.read(4096)
2067 r = self.f.read(4096)
2061 if r:
2068 if r:
2062 self.buf += self.zd.decompress(r)
2069 self.buf += self.zd.decompress(r)
2063 else:
2070 else:
2064 self.buf += self.zd.flush()
2071 self.buf += self.zd.flush()
2065 break
2072 break
2066 d, self.buf = self.buf[:l], self.buf[l:]
2073 d, self.buf = self.buf[:l], self.buf[l:]
2067 return d
2074 return d
2068
2075
2069 return zread(f)
2076 return zread(f)
2070
2077
2071 class remotelock:
2078 class remotelock:
2072 def __init__(self, repo):
2079 def __init__(self, repo):
2073 self.repo = repo
2080 self.repo = repo
2074 def release(self):
2081 def release(self):
2075 self.repo.unlock()
2082 self.repo.unlock()
2076 self.repo = None
2083 self.repo = None
2077 def __del__(self):
2084 def __del__(self):
2078 if self.repo:
2085 if self.repo:
2079 self.release()
2086 self.release()
2080
2087
2081 class sshrepository:
2088 class sshrepository(remoterepository):
2082 def __init__(self, ui, path):
2089 def __init__(self, ui, path):
2083 self.url = path
2090 self.url = path
2084 self.ui = ui
2091 self.ui = ui
2085
2092
2086 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2093 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2087 if not m:
2094 if not m:
2088 raise RepoError("couldn't parse destination %s" % path)
2095 raise RepoError("couldn't parse destination %s" % path)
2089
2096
2090 self.user = m.group(2)
2097 self.user = m.group(2)
2091 self.host = m.group(3)
2098 self.host = m.group(3)
2092 self.port = m.group(5)
2099 self.port = m.group(5)
2093 self.path = m.group(7)
2100 self.path = m.group(7)
2094
2101
2095 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2102 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2096 args = self.port and ("%s -p %s") % (args, self.port) or args
2103 args = self.port and ("%s -p %s") % (args, self.port) or args
2097 path = self.path or ""
2104 path = self.path or ""
2098
2105
2099 if not path:
2106 if not path:
2100 raise RepoError("no remote repository path specified")
2107 raise RepoError("no remote repository path specified")
2101
2108
2102 cmd = "ssh %s 'hg -R %s serve --stdio'"
2109 cmd = "ssh %s 'hg -R %s serve --stdio'"
2103 cmd = cmd % (args, path)
2110 cmd = cmd % (args, path)
2104
2111
2105 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2112 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2106
2113
2107 def readerr(self):
2114 def readerr(self):
2108 while 1:
2115 while 1:
2109 r,w,x = select.select([self.pipee], [], [], 0)
2116 r,w,x = select.select([self.pipee], [], [], 0)
2110 if not r: break
2117 if not r: break
2111 l = self.pipee.readline()
2118 l = self.pipee.readline()
2112 if not l: break
2119 if not l: break
2113 self.ui.status("remote: ", l)
2120 self.ui.status("remote: ", l)
2114
2121
2115 def __del__(self):
2122 def __del__(self):
2116 try:
2123 try:
2117 self.pipeo.close()
2124 self.pipeo.close()
2118 self.pipei.close()
2125 self.pipei.close()
2119 for l in self.pipee:
2126 for l in self.pipee:
2120 self.ui.status("remote: ", l)
2127 self.ui.status("remote: ", l)
2121 self.pipee.close()
2128 self.pipee.close()
2122 except:
2129 except:
2123 pass
2130 pass
2124
2131
2125 def dev(self):
2132 def dev(self):
2126 return -1
2133 return -1
2127
2134
2128 def do_cmd(self, cmd, **args):
2135 def do_cmd(self, cmd, **args):
2129 self.ui.debug("sending %s command\n" % cmd)
2136 self.ui.debug("sending %s command\n" % cmd)
2130 self.pipeo.write("%s\n" % cmd)
2137 self.pipeo.write("%s\n" % cmd)
2131 for k, v in args.items():
2138 for k, v in args.items():
2132 self.pipeo.write("%s %d\n" % (k, len(v)))
2139 self.pipeo.write("%s %d\n" % (k, len(v)))
2133 self.pipeo.write(v)
2140 self.pipeo.write(v)
2134 self.pipeo.flush()
2141 self.pipeo.flush()
2135
2142
2136 return self.pipei
2143 return self.pipei
2137
2144
2138 def call(self, cmd, **args):
2145 def call(self, cmd, **args):
2139 r = self.do_cmd(cmd, **args)
2146 r = self.do_cmd(cmd, **args)
2140 l = r.readline()
2147 l = r.readline()
2141 self.readerr()
2148 self.readerr()
2142 try:
2149 try:
2143 l = int(l)
2150 l = int(l)
2144 except:
2151 except:
2145 raise RepoError("unexpected response '%s'" % l)
2152 raise RepoError("unexpected response '%s'" % l)
2146 return r.read(l)
2153 return r.read(l)
2147
2154
2148 def lock(self):
2155 def lock(self):
2149 self.call("lock")
2156 self.call("lock")
2150 return remotelock(self)
2157 return remotelock(self)
2151
2158
2152 def unlock(self):
2159 def unlock(self):
2153 self.call("unlock")
2160 self.call("unlock")
2154
2161
2155 def heads(self):
2162 def heads(self):
2156 d = self.call("heads")
2163 d = self.call("heads")
2157 try:
2164 try:
2158 return map(bin, d[:-1].split(" "))
2165 return map(bin, d[:-1].split(" "))
2159 except:
2166 except:
2160 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2167 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2161
2168
2162 def branches(self, nodes):
2169 def branches(self, nodes):
2163 n = " ".join(map(hex, nodes))
2170 n = " ".join(map(hex, nodes))
2164 d = self.call("branches", nodes=n)
2171 d = self.call("branches", nodes=n)
2165 try:
2172 try:
2166 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2167 return br
2174 return br
2168 except:
2175 except:
2169 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2176 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2170
2177
2171 def between(self, pairs):
2178 def between(self, pairs):
2172 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2173 d = self.call("between", pairs=n)
2180 d = self.call("between", pairs=n)
2174 try:
2181 try:
2175 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2176 return p
2183 return p
2177 except:
2184 except:
2178 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2185 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2179
2186
2180 def changegroup(self, nodes):
2187 def changegroup(self, nodes):
2181 n = " ".join(map(hex, nodes))
2188 n = " ".join(map(hex, nodes))
2182 f = self.do_cmd("changegroup", roots=n)
2189 f = self.do_cmd("changegroup", roots=n)
2183 return self.pipei
2190 return self.pipei
2184
2191
2185 def addchangegroup(self, cg):
2192 def addchangegroup(self, cg):
2186 d = self.call("addchangegroup")
2193 d = self.call("addchangegroup")
2187 if d:
2194 if d:
2188 raise RepoError("push refused: %s", d)
2195 raise RepoError("push refused: %s", d)
2189
2196
2190 while 1:
2197 while 1:
2191 d = cg.read(4096)
2198 d = cg.read(4096)
2192 if not d: break
2199 if not d: break
2193 self.pipeo.write(d)
2200 self.pipeo.write(d)
2194 self.readerr()
2201 self.readerr()
2195
2202
2196 self.pipeo.flush()
2203 self.pipeo.flush()
2197
2204
2198 self.readerr()
2205 self.readerr()
2199 l = int(self.pipei.readline())
2206 l = int(self.pipei.readline())
2200 return self.pipei.read(l) != ""
2207 return self.pipei.read(l) != ""
2201
2208
2202 class httpsrepository(httprepository):
2209 class httpsrepository(httprepository):
2203 pass
2210 pass
2204
2211
2205 def repository(ui, path=None, create=0):
2212 def repository(ui, path=None, create=0):
2206 if path:
2213 if path:
2207 if path.startswith("http://"):
2214 if path.startswith("http://"):
2208 return httprepository(ui, path)
2215 return httprepository(ui, path)
2209 if path.startswith("https://"):
2216 if path.startswith("https://"):
2210 return httpsrepository(ui, path)
2217 return httpsrepository(ui, path)
2211 if path.startswith("hg://"):
2218 if path.startswith("hg://"):
2212 return httprepository(ui, path.replace("hg://", "http://"))
2219 return httprepository(ui, path.replace("hg://", "http://"))
2213 if path.startswith("old-http://"):
2220 if path.startswith("old-http://"):
2214 return localrepository(ui, path.replace("old-http://", "http://"))
2221 return localrepository(ui, path.replace("old-http://", "http://"))
2215 if path.startswith("ssh://"):
2222 if path.startswith("ssh://"):
2216 return sshrepository(ui, path)
2223 return sshrepository(ui, path)
2217
2224
2218 return localrepository(ui, path, create)
2225 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now