##// END OF EJS Templates
Change repo.comit to return None on error or the new revision number on...
mason@suse.com -
r901:120cba94 default
parent child Browse files
Show More
@@ -1,2210 +1,2210 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 demandload(globals(), "bisect errno select stat")
14 demandload(globals(), "bisect errno select stat")
15
15
16 class filelog(revlog):
16 class filelog(revlog):
17 def __init__(self, opener, path):
17 def __init__(self, opener, path):
18 revlog.__init__(self, opener,
18 revlog.__init__(self, opener,
19 os.path.join("data", self.encodedir(path + ".i")),
19 os.path.join("data", self.encodedir(path + ".i")),
20 os.path.join("data", self.encodedir(path + ".d")))
20 os.path.join("data", self.encodedir(path + ".d")))
21
21
22 # This avoids a collision between a file named foo and a dir named
22 # This avoids a collision between a file named foo and a dir named
23 # foo.i or foo.d
23 # foo.i or foo.d
24 def encodedir(self, path):
24 def encodedir(self, path):
25 return (path
25 return (path
26 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
27 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
28 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
29
29
30 def decodedir(self, path):
30 def decodedir(self, path):
31 return (path
31 return (path
32 .replace(".d.hg/", ".d/")
32 .replace(".d.hg/", ".d/")
33 .replace(".i.hg/", ".i/")
33 .replace(".i.hg/", ".i/")
34 .replace(".hg.hg/", ".hg/"))
34 .replace(".hg.hg/", ".hg/"))
35
35
36 def read(self, node):
36 def read(self, node):
37 t = self.revision(node)
37 t = self.revision(node)
38 if not t.startswith('\1\n'):
38 if not t.startswith('\1\n'):
39 return t
39 return t
40 s = t.find('\1\n', 2)
40 s = t.find('\1\n', 2)
41 return t[s+2:]
41 return t[s+2:]
42
42
43 def readmeta(self, node):
43 def readmeta(self, node):
44 t = self.revision(node)
44 t = self.revision(node)
45 if not t.startswith('\1\n'):
45 if not t.startswith('\1\n'):
46 return t
46 return t
47 s = t.find('\1\n', 2)
47 s = t.find('\1\n', 2)
48 mt = t[2:s]
48 mt = t[2:s]
49 for l in mt.splitlines():
49 for l in mt.splitlines():
50 k, v = l.split(": ", 1)
50 k, v = l.split(": ", 1)
51 m[k] = v
51 m[k] = v
52 return m
52 return m
53
53
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 if meta or text.startswith('\1\n'):
55 if meta or text.startswith('\1\n'):
56 mt = ""
56 mt = ""
57 if meta:
57 if meta:
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 text = "\1\n" + "".join(mt) + "\1\n" + text
59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 return self.addrevision(text, transaction, link, p1, p2)
60 return self.addrevision(text, transaction, link, p1, p2)
61
61
62 def annotate(self, node):
62 def annotate(self, node):
63
63
64 def decorate(text, rev):
64 def decorate(text, rev):
65 return ([rev] * len(text.splitlines()), text)
65 return ([rev] * len(text.splitlines()), text)
66
66
67 def pair(parent, child):
67 def pair(parent, child):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 child[0][b1:b2] = parent[0][a1:a2]
69 child[0][b1:b2] = parent[0][a1:a2]
70 return child
70 return child
71
71
72 # find all ancestors
72 # find all ancestors
73 needed = {node:1}
73 needed = {node:1}
74 visit = [node]
74 visit = [node]
75 while visit:
75 while visit:
76 n = visit.pop(0)
76 n = visit.pop(0)
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p not in needed:
78 if p not in needed:
79 needed[p] = 1
79 needed[p] = 1
80 visit.append(p)
80 visit.append(p)
81 else:
81 else:
82 # count how many times we'll use this
82 # count how many times we'll use this
83 needed[p] += 1
83 needed[p] += 1
84
84
85 # sort by revision which is a topological order
85 # sort by revision which is a topological order
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 visit.sort()
87 visit.sort()
88 hist = {}
88 hist = {}
89
89
90 for r,n in visit:
90 for r,n in visit:
91 curr = decorate(self.read(n), self.linkrev(n))
91 curr = decorate(self.read(n), self.linkrev(n))
92 for p in self.parents(n):
92 for p in self.parents(n):
93 if p != nullid:
93 if p != nullid:
94 curr = pair(hist[p], curr)
94 curr = pair(hist[p], curr)
95 # trim the history of unneeded revs
95 # trim the history of unneeded revs
96 needed[p] -= 1
96 needed[p] -= 1
97 if not needed[p]:
97 if not needed[p]:
98 del hist[p]
98 del hist[p]
99 hist[n] = curr
99 hist[n] = curr
100
100
101 return zip(hist[n][0], hist[n][1].splitlines(1))
101 return zip(hist[n][0], hist[n][1].splitlines(1))
102
102
103 class manifest(revlog):
103 class manifest(revlog):
104 def __init__(self, opener):
104 def __init__(self, opener):
105 self.mapcache = None
105 self.mapcache = None
106 self.listcache = None
106 self.listcache = None
107 self.addlist = None
107 self.addlist = None
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109
109
110 def read(self, node):
110 def read(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if self.mapcache and self.mapcache[0] == node:
112 if self.mapcache and self.mapcache[0] == node:
113 return self.mapcache[1]
113 return self.mapcache[1]
114 text = self.revision(node)
114 text = self.revision(node)
115 map = {}
115 map = {}
116 flag = {}
116 flag = {}
117 self.listcache = (text, text.splitlines(1))
117 self.listcache = (text, text.splitlines(1))
118 for l in self.listcache[1]:
118 for l in self.listcache[1]:
119 (f, n) = l.split('\0')
119 (f, n) = l.split('\0')
120 map[f] = bin(n[:40])
120 map[f] = bin(n[:40])
121 flag[f] = (n[40:-1] == "x")
121 flag[f] = (n[40:-1] == "x")
122 self.mapcache = (node, map, flag)
122 self.mapcache = (node, map, flag)
123 return map
123 return map
124
124
125 def readflags(self, node):
125 def readflags(self, node):
126 if node == nullid: return {} # don't upset local cache
126 if node == nullid: return {} # don't upset local cache
127 if not self.mapcache or self.mapcache[0] != node:
127 if not self.mapcache or self.mapcache[0] != node:
128 self.read(node)
128 self.read(node)
129 return self.mapcache[2]
129 return self.mapcache[2]
130
130
131 def diff(self, a, b):
131 def diff(self, a, b):
132 # this is sneaky, as we're not actually using a and b
132 # this is sneaky, as we're not actually using a and b
133 if self.listcache and self.addlist and self.listcache[0] == a:
133 if self.listcache and self.addlist and self.listcache[0] == a:
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 if mdiff.patch(a, d) != b:
135 if mdiff.patch(a, d) != b:
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 return mdiff.textdiff(a, b)
137 return mdiff.textdiff(a, b)
138 return d
138 return d
139 else:
139 else:
140 return mdiff.textdiff(a, b)
140 return mdiff.textdiff(a, b)
141
141
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 changed=None):
143 changed=None):
144 # directly generate the mdiff delta from the data collected during
144 # directly generate the mdiff delta from the data collected during
145 # the bisect loop below
145 # the bisect loop below
146 def gendelta(delta):
146 def gendelta(delta):
147 i = 0
147 i = 0
148 result = []
148 result = []
149 while i < len(delta):
149 while i < len(delta):
150 start = delta[i][2]
150 start = delta[i][2]
151 end = delta[i][3]
151 end = delta[i][3]
152 l = delta[i][4]
152 l = delta[i][4]
153 if l == None:
153 if l == None:
154 l = ""
154 l = ""
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 and end >= delta[i+1][2]:
156 and end >= delta[i+1][2]:
157 if delta[i+1][3] > end:
157 if delta[i+1][3] > end:
158 end = delta[i+1][3]
158 end = delta[i+1][3]
159 if delta[i+1][4]:
159 if delta[i+1][4]:
160 l += delta[i+1][4]
160 l += delta[i+1][4]
161 i += 1
161 i += 1
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 i += 1
163 i += 1
164 return result
164 return result
165
165
166 # apply the changes collected during the bisect loop to our addlist
166 # apply the changes collected during the bisect loop to our addlist
167 def addlistdelta(addlist, delta):
167 def addlistdelta(addlist, delta):
168 # apply the deltas to the addlist. start from the bottom up
168 # apply the deltas to the addlist. start from the bottom up
169 # so changes to the offsets don't mess things up.
169 # so changes to the offsets don't mess things up.
170 i = len(delta)
170 i = len(delta)
171 while i > 0:
171 while i > 0:
172 i -= 1
172 i -= 1
173 start = delta[i][0]
173 start = delta[i][0]
174 end = delta[i][1]
174 end = delta[i][1]
175 if delta[i][4]:
175 if delta[i][4]:
176 addlist[start:end] = [delta[i][4]]
176 addlist[start:end] = [delta[i][4]]
177 else:
177 else:
178 del addlist[start:end]
178 del addlist[start:end]
179 return addlist
179 return addlist
180
180
181 # calculate the byte offset of the start of each line in the
181 # calculate the byte offset of the start of each line in the
182 # manifest
182 # manifest
183 def calcoffsets(addlist):
183 def calcoffsets(addlist):
184 offsets = [0] * (len(addlist) + 1)
184 offsets = [0] * (len(addlist) + 1)
185 offset = 0
185 offset = 0
186 i = 0
186 i = 0
187 while i < len(addlist):
187 while i < len(addlist):
188 offsets[i] = offset
188 offsets[i] = offset
189 offset += len(addlist[i])
189 offset += len(addlist[i])
190 i += 1
190 i += 1
191 offsets[i] = offset
191 offsets[i] = offset
192 return offsets
192 return offsets
193
193
194 # if we're using the listcache, make sure it is valid and
194 # if we're using the listcache, make sure it is valid and
195 # parented by the same node we're diffing against
195 # parented by the same node we're diffing against
196 if not changed or not self.listcache or not p1 or \
196 if not changed or not self.listcache or not p1 or \
197 self.mapcache[0] != p1:
197 self.mapcache[0] != p1:
198 files = map.keys()
198 files = map.keys()
199 files.sort()
199 files.sort()
200
200
201 self.addlist = ["%s\000%s%s\n" %
201 self.addlist = ["%s\000%s%s\n" %
202 (f, hex(map[f]), flags[f] and "x" or '')
202 (f, hex(map[f]), flags[f] and "x" or '')
203 for f in files]
203 for f in files]
204 cachedelta = None
204 cachedelta = None
205 else:
205 else:
206 addlist = self.listcache[1]
206 addlist = self.listcache[1]
207
207
208 # find the starting offset for each line in the add list
208 # find the starting offset for each line in the add list
209 offsets = calcoffsets(addlist)
209 offsets = calcoffsets(addlist)
210
210
211 # combine the changed lists into one list for sorting
211 # combine the changed lists into one list for sorting
212 work = [[x, 0] for x in changed[0]]
212 work = [[x, 0] for x in changed[0]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
213 work[len(work):] = [[x, 1] for x in changed[1]]
214 work.sort()
214 work.sort()
215
215
216 delta = []
216 delta = []
217 bs = 0
217 bs = 0
218
218
219 for w in work:
219 for w in work:
220 f = w[0]
220 f = w[0]
221 # bs will either be the index of the item or the insert point
221 # bs will either be the index of the item or the insert point
222 bs = bisect.bisect(addlist, f, bs)
222 bs = bisect.bisect(addlist, f, bs)
223 if bs < len(addlist):
223 if bs < len(addlist):
224 fn = addlist[bs][:addlist[bs].index('\0')]
224 fn = addlist[bs][:addlist[bs].index('\0')]
225 else:
225 else:
226 fn = None
226 fn = None
227 if w[1] == 0:
227 if w[1] == 0:
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 flags[f] and "x" or '')
229 flags[f] and "x" or '')
230 else:
230 else:
231 l = None
231 l = None
232 start = bs
232 start = bs
233 if fn != f:
233 if fn != f:
234 # item not found, insert a new one
234 # item not found, insert a new one
235 end = bs
235 end = bs
236 if w[1] == 1:
236 if w[1] == 1:
237 sys.stderr.write("failed to remove %s from manifest\n"
237 sys.stderr.write("failed to remove %s from manifest\n"
238 % f)
238 % f)
239 sys.exit(1)
239 sys.exit(1)
240 else:
240 else:
241 # item is found, replace/delete the existing line
241 # item is found, replace/delete the existing line
242 end = bs + 1
242 end = bs + 1
243 delta.append([start, end, offsets[start], offsets[end], l])
243 delta.append([start, end, offsets[start], offsets[end], l])
244
244
245 self.addlist = addlistdelta(addlist, delta)
245 self.addlist = addlistdelta(addlist, delta)
246 if self.mapcache[0] == self.tip():
246 if self.mapcache[0] == self.tip():
247 cachedelta = "".join(gendelta(delta))
247 cachedelta = "".join(gendelta(delta))
248 else:
248 else:
249 cachedelta = None
249 cachedelta = None
250
250
251 text = "".join(self.addlist)
251 text = "".join(self.addlist)
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 sys.stderr.write("manifest delta failure\n")
253 sys.stderr.write("manifest delta failure\n")
254 sys.exit(1)
254 sys.exit(1)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 self.mapcache = (n, map, flags)
256 self.mapcache = (n, map, flags)
257 self.listcache = (text, self.addlist)
257 self.listcache = (text, self.addlist)
258 self.addlist = None
258 self.addlist = None
259
259
260 return n
260 return n
261
261
262 class changelog(revlog):
262 class changelog(revlog):
263 def __init__(self, opener):
263 def __init__(self, opener):
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265
265
266 def extract(self, text):
266 def extract(self, text):
267 if not text:
267 if not text:
268 return (nullid, "", "0", [], "")
268 return (nullid, "", "0", [], "")
269 last = text.index("\n\n")
269 last = text.index("\n\n")
270 desc = text[last + 2:]
270 desc = text[last + 2:]
271 l = text[:last].splitlines()
271 l = text[:last].splitlines()
272 manifest = bin(l[0])
272 manifest = bin(l[0])
273 user = l[1]
273 user = l[1]
274 date = l[2]
274 date = l[2]
275 files = l[3:]
275 files = l[3:]
276 return (manifest, user, date, files, desc)
276 return (manifest, user, date, files, desc)
277
277
278 def read(self, node):
278 def read(self, node):
279 return self.extract(self.revision(node))
279 return self.extract(self.revision(node))
280
280
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 user=None, date=None):
282 user=None, date=None):
283 date = date or "%d %d" % (time.time(), time.timezone)
283 date = date or "%d %d" % (time.time(), time.timezone)
284 list.sort()
284 list.sort()
285 l = [hex(manifest), user, date] + list + ["", desc]
285 l = [hex(manifest), user, date] + list + ["", desc]
286 text = "\n".join(l)
286 text = "\n".join(l)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
287 return self.addrevision(text, transaction, self.count(), p1, p2)
288
288
289 class dirstate:
289 class dirstate:
290 def __init__(self, opener, ui, root):
290 def __init__(self, opener, ui, root):
291 self.opener = opener
291 self.opener = opener
292 self.root = root
292 self.root = root
293 self.dirty = 0
293 self.dirty = 0
294 self.ui = ui
294 self.ui = ui
295 self.map = None
295 self.map = None
296 self.pl = None
296 self.pl = None
297 self.copies = {}
297 self.copies = {}
298 self.ignorefunc = None
298 self.ignorefunc = None
299
299
300 def wjoin(self, f):
300 def wjoin(self, f):
301 return os.path.join(self.root, f)
301 return os.path.join(self.root, f)
302
302
303 def getcwd(self):
303 def getcwd(self):
304 cwd = os.getcwd()
304 cwd = os.getcwd()
305 if cwd == self.root: return ''
305 if cwd == self.root: return ''
306 return cwd[len(self.root) + 1:]
306 return cwd[len(self.root) + 1:]
307
307
308 def ignore(self, f):
308 def ignore(self, f):
309 if not self.ignorefunc:
309 if not self.ignorefunc:
310 bigpat = []
310 bigpat = []
311 try:
311 try:
312 l = file(self.wjoin(".hgignore"))
312 l = file(self.wjoin(".hgignore"))
313 for pat in l:
313 for pat in l:
314 if pat != "\n":
314 if pat != "\n":
315 p = pat[:-1]
315 p = pat[:-1]
316 try:
316 try:
317 re.compile(p)
317 re.compile(p)
318 except:
318 except:
319 self.ui.warn("ignoring invalid ignore"
319 self.ui.warn("ignoring invalid ignore"
320 + " regular expression '%s'\n" % p)
320 + " regular expression '%s'\n" % p)
321 else:
321 else:
322 bigpat.append(p)
322 bigpat.append(p)
323 except IOError: pass
323 except IOError: pass
324
324
325 if bigpat:
325 if bigpat:
326 s = "(?:%s)" % (")|(?:".join(bigpat))
326 s = "(?:%s)" % (")|(?:".join(bigpat))
327 r = re.compile(s)
327 r = re.compile(s)
328 self.ignorefunc = r.search
328 self.ignorefunc = r.search
329 else:
329 else:
330 self.ignorefunc = util.never
330 self.ignorefunc = util.never
331
331
332 return self.ignorefunc(f)
332 return self.ignorefunc(f)
333
333
334 def __del__(self):
334 def __del__(self):
335 if self.dirty:
335 if self.dirty:
336 self.write()
336 self.write()
337
337
338 def __getitem__(self, key):
338 def __getitem__(self, key):
339 try:
339 try:
340 return self.map[key]
340 return self.map[key]
341 except TypeError:
341 except TypeError:
342 self.read()
342 self.read()
343 return self[key]
343 return self[key]
344
344
345 def __contains__(self, key):
345 def __contains__(self, key):
346 if not self.map: self.read()
346 if not self.map: self.read()
347 return key in self.map
347 return key in self.map
348
348
349 def parents(self):
349 def parents(self):
350 if not self.pl:
350 if not self.pl:
351 self.read()
351 self.read()
352 return self.pl
352 return self.pl
353
353
354 def markdirty(self):
354 def markdirty(self):
355 if not self.dirty:
355 if not self.dirty:
356 self.dirty = 1
356 self.dirty = 1
357
357
358 def setparents(self, p1, p2 = nullid):
358 def setparents(self, p1, p2 = nullid):
359 self.markdirty()
359 self.markdirty()
360 self.pl = p1, p2
360 self.pl = p1, p2
361
361
362 def state(self, key):
362 def state(self, key):
363 try:
363 try:
364 return self[key][0]
364 return self[key][0]
365 except KeyError:
365 except KeyError:
366 return "?"
366 return "?"
367
367
368 def read(self):
368 def read(self):
369 if self.map is not None: return self.map
369 if self.map is not None: return self.map
370
370
371 self.map = {}
371 self.map = {}
372 self.pl = [nullid, nullid]
372 self.pl = [nullid, nullid]
373 try:
373 try:
374 st = self.opener("dirstate").read()
374 st = self.opener("dirstate").read()
375 if not st: return
375 if not st: return
376 except: return
376 except: return
377
377
378 self.pl = [st[:20], st[20: 40]]
378 self.pl = [st[:20], st[20: 40]]
379
379
380 pos = 40
380 pos = 40
381 while pos < len(st):
381 while pos < len(st):
382 e = struct.unpack(">cllll", st[pos:pos+17])
382 e = struct.unpack(">cllll", st[pos:pos+17])
383 l = e[4]
383 l = e[4]
384 pos += 17
384 pos += 17
385 f = st[pos:pos + l]
385 f = st[pos:pos + l]
386 if '\0' in f:
386 if '\0' in f:
387 f, c = f.split('\0')
387 f, c = f.split('\0')
388 self.copies[f] = c
388 self.copies[f] = c
389 self.map[f] = e[:4]
389 self.map[f] = e[:4]
390 pos += l
390 pos += l
391
391
392 def copy(self, source, dest):
392 def copy(self, source, dest):
393 self.read()
393 self.read()
394 self.markdirty()
394 self.markdirty()
395 self.copies[dest] = source
395 self.copies[dest] = source
396
396
397 def copied(self, file):
397 def copied(self, file):
398 return self.copies.get(file, None)
398 return self.copies.get(file, None)
399
399
400 def update(self, files, state, **kw):
400 def update(self, files, state, **kw):
401 ''' current states:
401 ''' current states:
402 n normal
402 n normal
403 m needs merging
403 m needs merging
404 r marked for removal
404 r marked for removal
405 a marked for addition'''
405 a marked for addition'''
406
406
407 if not files: return
407 if not files: return
408 self.read()
408 self.read()
409 self.markdirty()
409 self.markdirty()
410 for f in files:
410 for f in files:
411 if state == "r":
411 if state == "r":
412 self.map[f] = ('r', 0, 0, 0)
412 self.map[f] = ('r', 0, 0, 0)
413 else:
413 else:
414 s = os.stat(os.path.join(self.root, f))
414 s = os.stat(os.path.join(self.root, f))
415 st_size = kw.get('st_size', s.st_size)
415 st_size = kw.get('st_size', s.st_size)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
416 st_mtime = kw.get('st_mtime', s.st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
417 self.map[f] = (state, s.st_mode, st_size, st_mtime)
418
418
419 def forget(self, files):
419 def forget(self, files):
420 if not files: return
420 if not files: return
421 self.read()
421 self.read()
422 self.markdirty()
422 self.markdirty()
423 for f in files:
423 for f in files:
424 try:
424 try:
425 del self.map[f]
425 del self.map[f]
426 except KeyError:
426 except KeyError:
427 self.ui.warn("not in dirstate: %s!\n" % f)
427 self.ui.warn("not in dirstate: %s!\n" % f)
428 pass
428 pass
429
429
430 def clear(self):
430 def clear(self):
431 self.map = {}
431 self.map = {}
432 self.markdirty()
432 self.markdirty()
433
433
434 def write(self):
434 def write(self):
435 st = self.opener("dirstate", "w")
435 st = self.opener("dirstate", "w")
436 st.write("".join(self.pl))
436 st.write("".join(self.pl))
437 for f, e in self.map.items():
437 for f, e in self.map.items():
438 c = self.copied(f)
438 c = self.copied(f)
439 if c:
439 if c:
440 f = f + "\0" + c
440 f = f + "\0" + c
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
441 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
442 st.write(e + f)
442 st.write(e + f)
443 self.dirty = 0
443 self.dirty = 0
444
444
445 def filterfiles(self, files):
445 def filterfiles(self, files):
446 ret = {}
446 ret = {}
447 unknown = []
447 unknown = []
448
448
449 for x in files:
449 for x in files:
450 if x is '.':
450 if x is '.':
451 return self.map.copy()
451 return self.map.copy()
452 if x not in self.map:
452 if x not in self.map:
453 unknown.append(x)
453 unknown.append(x)
454 else:
454 else:
455 ret[x] = self.map[x]
455 ret[x] = self.map[x]
456
456
457 if not unknown:
457 if not unknown:
458 return ret
458 return ret
459
459
460 b = self.map.keys()
460 b = self.map.keys()
461 b.sort()
461 b.sort()
462 blen = len(b)
462 blen = len(b)
463
463
464 for x in unknown:
464 for x in unknown:
465 bs = bisect.bisect(b, x)
465 bs = bisect.bisect(b, x)
466 if bs != 0 and b[bs-1] == x:
466 if bs != 0 and b[bs-1] == x:
467 ret[x] = self.map[x]
467 ret[x] = self.map[x]
468 continue
468 continue
469 while bs < blen:
469 while bs < blen:
470 s = b[bs]
470 s = b[bs]
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
471 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
472 ret[s] = self.map[s]
472 ret[s] = self.map[s]
473 else:
473 else:
474 break
474 break
475 bs += 1
475 bs += 1
476 return ret
476 return ret
477
477
478 def walk(self, files = None, match = util.always, dc=None):
478 def walk(self, files = None, match = util.always, dc=None):
479 self.read()
479 self.read()
480
480
481 # walk all files by default
481 # walk all files by default
482 if not files:
482 if not files:
483 files = [self.root]
483 files = [self.root]
484 if not dc:
484 if not dc:
485 dc = self.map.copy()
485 dc = self.map.copy()
486 elif not dc:
486 elif not dc:
487 dc = self.filterfiles(files)
487 dc = self.filterfiles(files)
488
488
489 known = {'.hg': 1}
489 known = {'.hg': 1}
490 def seen(fn):
490 def seen(fn):
491 if fn in known: return True
491 if fn in known: return True
492 known[fn] = 1
492 known[fn] = 1
493 def traverse():
493 def traverse():
494 for ff in util.unique(files):
494 for ff in util.unique(files):
495 f = os.path.join(self.root, ff)
495 f = os.path.join(self.root, ff)
496 try:
496 try:
497 st = os.stat(f)
497 st = os.stat(f)
498 except OSError, inst:
498 except OSError, inst:
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
499 if ff not in dc: self.ui.warn('%s: %s\n' % (
500 util.pathto(self.getcwd(), ff),
500 util.pathto(self.getcwd(), ff),
501 inst.strerror))
501 inst.strerror))
502 continue
502 continue
503 if stat.S_ISDIR(st.st_mode):
503 if stat.S_ISDIR(st.st_mode):
504 for dir, subdirs, fl in os.walk(f):
504 for dir, subdirs, fl in os.walk(f):
505 d = dir[len(self.root) + 1:]
505 d = dir[len(self.root) + 1:]
506 nd = util.normpath(d)
506 nd = util.normpath(d)
507 if nd == '.': nd = ''
507 if nd == '.': nd = ''
508 if seen(nd):
508 if seen(nd):
509 subdirs[:] = []
509 subdirs[:] = []
510 continue
510 continue
511 for sd in subdirs:
511 for sd in subdirs:
512 ds = os.path.join(nd, sd +'/')
512 ds = os.path.join(nd, sd +'/')
513 if self.ignore(ds) or not match(ds):
513 if self.ignore(ds) or not match(ds):
514 subdirs.remove(sd)
514 subdirs.remove(sd)
515 subdirs.sort()
515 subdirs.sort()
516 fl.sort()
516 fl.sort()
517 for fn in fl:
517 for fn in fl:
518 fn = util.pconvert(os.path.join(d, fn))
518 fn = util.pconvert(os.path.join(d, fn))
519 yield 'f', fn
519 yield 'f', fn
520 elif stat.S_ISREG(st.st_mode):
520 elif stat.S_ISREG(st.st_mode):
521 yield 'f', ff
521 yield 'f', ff
522 else:
522 else:
523 kind = 'unknown'
523 kind = 'unknown'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
524 if stat.S_ISCHR(st.st_mode): kind = 'character device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
525 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
526 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
527 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
528 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
529 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
530 util.pathto(self.getcwd(), ff),
530 util.pathto(self.getcwd(), ff),
531 kind))
531 kind))
532
532
533 ks = dc.keys()
533 ks = dc.keys()
534 ks.sort()
534 ks.sort()
535 for k in ks:
535 for k in ks:
536 yield 'm', k
536 yield 'm', k
537
537
538 # yield only files that match: all in dirstate, others only if
538 # yield only files that match: all in dirstate, others only if
539 # not in .hgignore
539 # not in .hgignore
540
540
541 for src, fn in util.unique(traverse()):
541 for src, fn in util.unique(traverse()):
542 fn = util.normpath(fn)
542 fn = util.normpath(fn)
543 if seen(fn): continue
543 if seen(fn): continue
544 if fn not in dc and self.ignore(fn):
544 if fn not in dc and self.ignore(fn):
545 continue
545 continue
546 if match(fn):
546 if match(fn):
547 yield src, fn
547 yield src, fn
548
548
549 def changes(self, files=None, match=util.always):
549 def changes(self, files=None, match=util.always):
550 self.read()
550 self.read()
551 if not files:
551 if not files:
552 dc = self.map.copy()
552 dc = self.map.copy()
553 else:
553 else:
554 dc = self.filterfiles(files)
554 dc = self.filterfiles(files)
555 lookup, modified, added, unknown = [], [], [], []
555 lookup, modified, added, unknown = [], [], [], []
556 removed, deleted = [], []
556 removed, deleted = [], []
557
557
558 for src, fn in self.walk(files, match, dc=dc):
558 for src, fn in self.walk(files, match, dc=dc):
559 try:
559 try:
560 s = os.stat(os.path.join(self.root, fn))
560 s = os.stat(os.path.join(self.root, fn))
561 except OSError:
561 except OSError:
562 continue
562 continue
563 if not stat.S_ISREG(s.st_mode):
563 if not stat.S_ISREG(s.st_mode):
564 continue
564 continue
565 c = dc.get(fn)
565 c = dc.get(fn)
566 if c:
566 if c:
567 del dc[fn]
567 del dc[fn]
568 if c[0] == 'm':
568 if c[0] == 'm':
569 modified.append(fn)
569 modified.append(fn)
570 elif c[0] == 'a':
570 elif c[0] == 'a':
571 added.append(fn)
571 added.append(fn)
572 elif c[0] == 'r':
572 elif c[0] == 'r':
573 unknown.append(fn)
573 unknown.append(fn)
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
574 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
575 modified.append(fn)
575 modified.append(fn)
576 elif c[3] != s.st_mtime:
576 elif c[3] != s.st_mtime:
577 lookup.append(fn)
577 lookup.append(fn)
578 else:
578 else:
579 unknown.append(fn)
579 unknown.append(fn)
580
580
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
581 for fn, c in [(fn, c) for fn, c in dc.items() if match(fn)]:
582 if c[0] == 'r':
582 if c[0] == 'r':
583 removed.append(fn)
583 removed.append(fn)
584 else:
584 else:
585 deleted.append(fn)
585 deleted.append(fn)
586 return (lookup, modified, added, removed + deleted, unknown)
586 return (lookup, modified, added, removed + deleted, unknown)
587
587
588 # used to avoid circular references so destructors work
588 # used to avoid circular references so destructors work
589 def opener(base):
589 def opener(base):
590 p = base
590 p = base
591 def o(path, mode="r"):
591 def o(path, mode="r"):
592 if p.startswith("http://"):
592 if p.startswith("http://"):
593 f = os.path.join(p, urllib.quote(path))
593 f = os.path.join(p, urllib.quote(path))
594 return httprangereader.httprangereader(f)
594 return httprangereader.httprangereader(f)
595
595
596 f = os.path.join(p, path)
596 f = os.path.join(p, path)
597
597
598 mode += "b" # for that other OS
598 mode += "b" # for that other OS
599
599
600 if mode[0] != "r":
600 if mode[0] != "r":
601 try:
601 try:
602 s = os.stat(f)
602 s = os.stat(f)
603 except OSError:
603 except OSError:
604 d = os.path.dirname(f)
604 d = os.path.dirname(f)
605 if not os.path.isdir(d):
605 if not os.path.isdir(d):
606 os.makedirs(d)
606 os.makedirs(d)
607 else:
607 else:
608 if s.st_nlink > 1:
608 if s.st_nlink > 1:
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
609 file(f + ".tmp", "wb").write(file(f, "rb").read())
610 util.rename(f+".tmp", f)
610 util.rename(f+".tmp", f)
611
611
612 return file(f, mode)
612 return file(f, mode)
613
613
614 return o
614 return o
615
615
616 class RepoError(Exception): pass
616 class RepoError(Exception): pass
617
617
618 class localrepository:
618 class localrepository:
619 def __init__(self, ui, path=None, create=0):
619 def __init__(self, ui, path=None, create=0):
620 self.remote = 0
620 self.remote = 0
621 if path and path.startswith("http://"):
621 if path and path.startswith("http://"):
622 self.remote = 1
622 self.remote = 1
623 self.path = path
623 self.path = path
624 else:
624 else:
625 if not path:
625 if not path:
626 p = os.getcwd()
626 p = os.getcwd()
627 while not os.path.isdir(os.path.join(p, ".hg")):
627 while not os.path.isdir(os.path.join(p, ".hg")):
628 oldp = p
628 oldp = p
629 p = os.path.dirname(p)
629 p = os.path.dirname(p)
630 if p == oldp: raise RepoError("no repo found")
630 if p == oldp: raise RepoError("no repo found")
631 path = p
631 path = p
632 self.path = os.path.join(path, ".hg")
632 self.path = os.path.join(path, ".hg")
633
633
634 if not create and not os.path.isdir(self.path):
634 if not create and not os.path.isdir(self.path):
635 raise RepoError("repository %s not found" % self.path)
635 raise RepoError("repository %s not found" % self.path)
636
636
637 self.root = path
637 self.root = path
638 self.ui = ui
638 self.ui = ui
639
639
640 if create:
640 if create:
641 os.mkdir(self.path)
641 os.mkdir(self.path)
642 os.mkdir(self.join("data"))
642 os.mkdir(self.join("data"))
643
643
644 self.opener = opener(self.path)
644 self.opener = opener(self.path)
645 self.wopener = opener(self.root)
645 self.wopener = opener(self.root)
646 self.manifest = manifest(self.opener)
646 self.manifest = manifest(self.opener)
647 self.changelog = changelog(self.opener)
647 self.changelog = changelog(self.opener)
648 self.tagscache = None
648 self.tagscache = None
649 self.nodetagscache = None
649 self.nodetagscache = None
650
650
651 if not self.remote:
651 if not self.remote:
652 self.dirstate = dirstate(self.opener, ui, self.root)
652 self.dirstate = dirstate(self.opener, ui, self.root)
653 try:
653 try:
654 self.ui.readconfig(self.opener("hgrc"))
654 self.ui.readconfig(self.opener("hgrc"))
655 except IOError: pass
655 except IOError: pass
656
656
657 def hook(self, name, **args):
657 def hook(self, name, **args):
658 s = self.ui.config("hooks", name)
658 s = self.ui.config("hooks", name)
659 if s:
659 if s:
660 self.ui.note("running hook %s: %s\n" % (name, s))
660 self.ui.note("running hook %s: %s\n" % (name, s))
661 old = {}
661 old = {}
662 for k, v in args.items():
662 for k, v in args.items():
663 k = k.upper()
663 k = k.upper()
664 old[k] = os.environ.get(k, None)
664 old[k] = os.environ.get(k, None)
665 os.environ[k] = v
665 os.environ[k] = v
666
666
667 r = os.system(s)
667 r = os.system(s)
668
668
669 for k, v in old.items():
669 for k, v in old.items():
670 if v != None:
670 if v != None:
671 os.environ[k] = v
671 os.environ[k] = v
672 else:
672 else:
673 del os.environ[k]
673 del os.environ[k]
674
674
675 if r:
675 if r:
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
676 self.ui.warn("abort: %s hook failed with status %d!\n" %
677 (name, r))
677 (name, r))
678 return False
678 return False
679 return True
679 return True
680
680
681 def tags(self):
681 def tags(self):
682 '''return a mapping of tag to node'''
682 '''return a mapping of tag to node'''
683 if not self.tagscache:
683 if not self.tagscache:
684 self.tagscache = {}
684 self.tagscache = {}
685 def addtag(self, k, n):
685 def addtag(self, k, n):
686 try:
686 try:
687 bin_n = bin(n)
687 bin_n = bin(n)
688 except TypeError:
688 except TypeError:
689 bin_n = ''
689 bin_n = ''
690 self.tagscache[k.strip()] = bin_n
690 self.tagscache[k.strip()] = bin_n
691
691
692 try:
692 try:
693 # read each head of the tags file, ending with the tip
693 # read each head of the tags file, ending with the tip
694 # and add each tag found to the map, with "newer" ones
694 # and add each tag found to the map, with "newer" ones
695 # taking precedence
695 # taking precedence
696 fl = self.file(".hgtags")
696 fl = self.file(".hgtags")
697 h = fl.heads()
697 h = fl.heads()
698 h.reverse()
698 h.reverse()
699 for r in h:
699 for r in h:
700 for l in fl.revision(r).splitlines():
700 for l in fl.revision(r).splitlines():
701 if l:
701 if l:
702 n, k = l.split(" ", 1)
702 n, k = l.split(" ", 1)
703 addtag(self, k, n)
703 addtag(self, k, n)
704 except KeyError:
704 except KeyError:
705 pass
705 pass
706
706
707 try:
707 try:
708 f = self.opener("localtags")
708 f = self.opener("localtags")
709 for l in f:
709 for l in f:
710 n, k = l.split(" ", 1)
710 n, k = l.split(" ", 1)
711 addtag(self, k, n)
711 addtag(self, k, n)
712 except IOError:
712 except IOError:
713 pass
713 pass
714
714
715 self.tagscache['tip'] = self.changelog.tip()
715 self.tagscache['tip'] = self.changelog.tip()
716
716
717 return self.tagscache
717 return self.tagscache
718
718
719 def tagslist(self):
719 def tagslist(self):
720 '''return a list of tags ordered by revision'''
720 '''return a list of tags ordered by revision'''
721 l = []
721 l = []
722 for t, n in self.tags().items():
722 for t, n in self.tags().items():
723 try:
723 try:
724 r = self.changelog.rev(n)
724 r = self.changelog.rev(n)
725 except:
725 except:
726 r = -2 # sort to the beginning of the list if unknown
726 r = -2 # sort to the beginning of the list if unknown
727 l.append((r,t,n))
727 l.append((r,t,n))
728 l.sort()
728 l.sort()
729 return [(t,n) for r,t,n in l]
729 return [(t,n) for r,t,n in l]
730
730
731 def nodetags(self, node):
731 def nodetags(self, node):
732 '''return the tags associated with a node'''
732 '''return the tags associated with a node'''
733 if not self.nodetagscache:
733 if not self.nodetagscache:
734 self.nodetagscache = {}
734 self.nodetagscache = {}
735 for t,n in self.tags().items():
735 for t,n in self.tags().items():
736 self.nodetagscache.setdefault(n,[]).append(t)
736 self.nodetagscache.setdefault(n,[]).append(t)
737 return self.nodetagscache.get(node, [])
737 return self.nodetagscache.get(node, [])
738
738
739 def lookup(self, key):
739 def lookup(self, key):
740 try:
740 try:
741 return self.tags()[key]
741 return self.tags()[key]
742 except KeyError:
742 except KeyError:
743 try:
743 try:
744 return self.changelog.lookup(key)
744 return self.changelog.lookup(key)
745 except:
745 except:
746 raise RepoError("unknown revision '%s'" % key)
746 raise RepoError("unknown revision '%s'" % key)
747
747
748 def dev(self):
748 def dev(self):
749 if self.remote: return -1
749 if self.remote: return -1
750 return os.stat(self.path).st_dev
750 return os.stat(self.path).st_dev
751
751
752 def join(self, f):
752 def join(self, f):
753 return os.path.join(self.path, f)
753 return os.path.join(self.path, f)
754
754
755 def wjoin(self, f):
755 def wjoin(self, f):
756 return os.path.join(self.root, f)
756 return os.path.join(self.root, f)
757
757
758 def file(self, f):
758 def file(self, f):
759 if f[0] == '/': f = f[1:]
759 if f[0] == '/': f = f[1:]
760 return filelog(self.opener, f)
760 return filelog(self.opener, f)
761
761
762 def getcwd(self):
762 def getcwd(self):
763 return self.dirstate.getcwd()
763 return self.dirstate.getcwd()
764
764
765 def wfile(self, f, mode='r'):
765 def wfile(self, f, mode='r'):
766 return self.wopener(f, mode)
766 return self.wopener(f, mode)
767
767
768 def transaction(self):
768 def transaction(self):
769 # save dirstate for undo
769 # save dirstate for undo
770 try:
770 try:
771 ds = self.opener("dirstate").read()
771 ds = self.opener("dirstate").read()
772 except IOError:
772 except IOError:
773 ds = ""
773 ds = ""
774 self.opener("journal.dirstate", "w").write(ds)
774 self.opener("journal.dirstate", "w").write(ds)
775
775
776 def after():
776 def after():
777 util.rename(self.join("journal"), self.join("undo"))
777 util.rename(self.join("journal"), self.join("undo"))
778 util.rename(self.join("journal.dirstate"),
778 util.rename(self.join("journal.dirstate"),
779 self.join("undo.dirstate"))
779 self.join("undo.dirstate"))
780
780
781 return transaction.transaction(self.ui.warn, self.opener,
781 return transaction.transaction(self.ui.warn, self.opener,
782 self.join("journal"), after)
782 self.join("journal"), after)
783
783
784 def recover(self):
784 def recover(self):
785 lock = self.lock()
785 lock = self.lock()
786 if os.path.exists(self.join("journal")):
786 if os.path.exists(self.join("journal")):
787 self.ui.status("rolling back interrupted transaction\n")
787 self.ui.status("rolling back interrupted transaction\n")
788 return transaction.rollback(self.opener, self.join("journal"))
788 return transaction.rollback(self.opener, self.join("journal"))
789 else:
789 else:
790 self.ui.warn("no interrupted transaction available\n")
790 self.ui.warn("no interrupted transaction available\n")
791
791
792 def undo(self):
792 def undo(self):
793 lock = self.lock()
793 lock = self.lock()
794 if os.path.exists(self.join("undo")):
794 if os.path.exists(self.join("undo")):
795 self.ui.status("rolling back last transaction\n")
795 self.ui.status("rolling back last transaction\n")
796 transaction.rollback(self.opener, self.join("undo"))
796 transaction.rollback(self.opener, self.join("undo"))
797 self.dirstate = None
797 self.dirstate = None
798 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
798 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
799 self.dirstate = dirstate(self.opener, self.ui, self.root)
799 self.dirstate = dirstate(self.opener, self.ui, self.root)
800 else:
800 else:
801 self.ui.warn("no undo information available\n")
801 self.ui.warn("no undo information available\n")
802
802
803 def lock(self, wait = 1):
803 def lock(self, wait = 1):
804 try:
804 try:
805 return lock.lock(self.join("lock"), 0)
805 return lock.lock(self.join("lock"), 0)
806 except lock.LockHeld, inst:
806 except lock.LockHeld, inst:
807 if wait:
807 if wait:
808 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
808 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
809 return lock.lock(self.join("lock"), wait)
809 return lock.lock(self.join("lock"), wait)
810 raise inst
810 raise inst
811
811
812 def rawcommit(self, files, text, user, date, p1=None, p2=None):
812 def rawcommit(self, files, text, user, date, p1=None, p2=None):
813 orig_parent = self.dirstate.parents()[0] or nullid
813 orig_parent = self.dirstate.parents()[0] or nullid
814 p1 = p1 or self.dirstate.parents()[0] or nullid
814 p1 = p1 or self.dirstate.parents()[0] or nullid
815 p2 = p2 or self.dirstate.parents()[1] or nullid
815 p2 = p2 or self.dirstate.parents()[1] or nullid
816 c1 = self.changelog.read(p1)
816 c1 = self.changelog.read(p1)
817 c2 = self.changelog.read(p2)
817 c2 = self.changelog.read(p2)
818 m1 = self.manifest.read(c1[0])
818 m1 = self.manifest.read(c1[0])
819 mf1 = self.manifest.readflags(c1[0])
819 mf1 = self.manifest.readflags(c1[0])
820 m2 = self.manifest.read(c2[0])
820 m2 = self.manifest.read(c2[0])
821
821
822 if orig_parent == p1:
822 if orig_parent == p1:
823 update_dirstate = 1
823 update_dirstate = 1
824 else:
824 else:
825 update_dirstate = 0
825 update_dirstate = 0
826
826
827 tr = self.transaction()
827 tr = self.transaction()
828 mm = m1.copy()
828 mm = m1.copy()
829 mfm = mf1.copy()
829 mfm = mf1.copy()
830 linkrev = self.changelog.count()
830 linkrev = self.changelog.count()
831 for f in files:
831 for f in files:
832 try:
832 try:
833 t = self.wfile(f).read()
833 t = self.wfile(f).read()
834 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
834 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
835 r = self.file(f)
835 r = self.file(f)
836 mfm[f] = tm
836 mfm[f] = tm
837 mm[f] = r.add(t, {}, tr, linkrev,
837 mm[f] = r.add(t, {}, tr, linkrev,
838 m1.get(f, nullid), m2.get(f, nullid))
838 m1.get(f, nullid), m2.get(f, nullid))
839 if update_dirstate:
839 if update_dirstate:
840 self.dirstate.update([f], "n")
840 self.dirstate.update([f], "n")
841 except IOError:
841 except IOError:
842 try:
842 try:
843 del mm[f]
843 del mm[f]
844 del mfm[f]
844 del mfm[f]
845 if update_dirstate:
845 if update_dirstate:
846 self.dirstate.forget([f])
846 self.dirstate.forget([f])
847 except:
847 except:
848 # deleted from p2?
848 # deleted from p2?
849 pass
849 pass
850
850
851 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
851 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
852 user = user or self.ui.username()
852 user = user or self.ui.username()
853 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
853 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
854 tr.close()
854 tr.close()
855 if update_dirstate:
855 if update_dirstate:
856 self.dirstate.setparents(n, nullid)
856 self.dirstate.setparents(n, nullid)
857
857
858 def commit(self, files = None, text = "", user = None, date = None,
858 def commit(self, files = None, text = "", user = None, date = None,
859 match = util.always, force=False):
859 match = util.always, force=False):
860 commit = []
860 commit = []
861 remove = []
861 remove = []
862 if files:
862 if files:
863 for f in files:
863 for f in files:
864 s = self.dirstate.state(f)
864 s = self.dirstate.state(f)
865 if s in 'nmai':
865 if s in 'nmai':
866 commit.append(f)
866 commit.append(f)
867 elif s == 'r':
867 elif s == 'r':
868 remove.append(f)
868 remove.append(f)
869 else:
869 else:
870 self.ui.warn("%s not tracked!\n" % f)
870 self.ui.warn("%s not tracked!\n" % f)
871 else:
871 else:
872 (c, a, d, u) = self.changes(match = match)
872 (c, a, d, u) = self.changes(match = match)
873 commit = c + a
873 commit = c + a
874 remove = d
874 remove = d
875
875
876 if not commit and not remove and not force:
876 if not commit and not remove and not force:
877 self.ui.status("nothing changed\n")
877 self.ui.status("nothing changed\n")
878 return
878 return None
879
879
880 if not self.hook("precommit"):
880 if not self.hook("precommit"):
881 return 1
881 return None
882
882
883 p1, p2 = self.dirstate.parents()
883 p1, p2 = self.dirstate.parents()
884 c1 = self.changelog.read(p1)
884 c1 = self.changelog.read(p1)
885 c2 = self.changelog.read(p2)
885 c2 = self.changelog.read(p2)
886 m1 = self.manifest.read(c1[0])
886 m1 = self.manifest.read(c1[0])
887 mf1 = self.manifest.readflags(c1[0])
887 mf1 = self.manifest.readflags(c1[0])
888 m2 = self.manifest.read(c2[0])
888 m2 = self.manifest.read(c2[0])
889 lock = self.lock()
889 lock = self.lock()
890 tr = self.transaction()
890 tr = self.transaction()
891
891
892 # check in files
892 # check in files
893 new = {}
893 new = {}
894 linkrev = self.changelog.count()
894 linkrev = self.changelog.count()
895 commit.sort()
895 commit.sort()
896 for f in commit:
896 for f in commit:
897 self.ui.note(f + "\n")
897 self.ui.note(f + "\n")
898 try:
898 try:
899 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
899 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
900 t = self.wfile(f).read()
900 t = self.wfile(f).read()
901 except IOError:
901 except IOError:
902 self.ui.warn("trouble committing %s!\n" % f)
902 self.ui.warn("trouble committing %s!\n" % f)
903 raise
903 raise
904
904
905 meta = {}
905 meta = {}
906 cp = self.dirstate.copied(f)
906 cp = self.dirstate.copied(f)
907 if cp:
907 if cp:
908 meta["copy"] = cp
908 meta["copy"] = cp
909 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
909 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
910 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
910 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
911
911
912 r = self.file(f)
912 r = self.file(f)
913 fp1 = m1.get(f, nullid)
913 fp1 = m1.get(f, nullid)
914 fp2 = m2.get(f, nullid)
914 fp2 = m2.get(f, nullid)
915 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
915 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
916
916
917 # update manifest
917 # update manifest
918 m1.update(new)
918 m1.update(new)
919 for f in remove:
919 for f in remove:
920 if f in m1:
920 if f in m1:
921 del m1[f]
921 del m1[f]
922 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
922 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
923 (new, remove))
923 (new, remove))
924
924
925 # add changeset
925 # add changeset
926 new = new.keys()
926 new = new.keys()
927 new.sort()
927 new.sort()
928
928
929 if not text:
929 if not text:
930 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
930 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
931 edittext += "".join(["HG: changed %s\n" % f for f in new])
931 edittext += "".join(["HG: changed %s\n" % f for f in new])
932 edittext += "".join(["HG: removed %s\n" % f for f in remove])
932 edittext += "".join(["HG: removed %s\n" % f for f in remove])
933 edittext = self.ui.edit(edittext)
933 edittext = self.ui.edit(edittext)
934 if not edittext.rstrip():
934 if not edittext.rstrip():
935 return 1
935 return None
936 text = edittext
936 text = edittext
937
937
938 user = user or self.ui.username()
938 user = user or self.ui.username()
939 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
939 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
940
941 tr.close()
940 tr.close()
942
941
943 self.dirstate.setparents(n)
942 self.dirstate.setparents(n)
944 self.dirstate.update(new, "n")
943 self.dirstate.update(new, "n")
945 self.dirstate.forget(remove)
944 self.dirstate.forget(remove)
946
945
947 if not self.hook("commit", node=hex(n)):
946 if not self.hook("commit", node=hex(n)):
948 return 1
947 return None
948 return n
949
949
950 def walk(self, node = None, files = [], match = util.always):
950 def walk(self, node = None, files = [], match = util.always):
951 if node:
951 if node:
952 for fn in self.manifest.read(self.changelog.read(node)[0]):
952 for fn in self.manifest.read(self.changelog.read(node)[0]):
953 if match(fn): yield 'm', fn
953 if match(fn): yield 'm', fn
954 else:
954 else:
955 for src, fn in self.dirstate.walk(files, match):
955 for src, fn in self.dirstate.walk(files, match):
956 yield src, fn
956 yield src, fn
957
957
958 def changes(self, node1 = None, node2 = None, files = [],
958 def changes(self, node1 = None, node2 = None, files = [],
959 match = util.always):
959 match = util.always):
960 mf2, u = None, []
960 mf2, u = None, []
961
961
962 def fcmp(fn, mf):
962 def fcmp(fn, mf):
963 t1 = self.wfile(fn).read()
963 t1 = self.wfile(fn).read()
964 t2 = self.file(fn).revision(mf[fn])
964 t2 = self.file(fn).revision(mf[fn])
965 return cmp(t1, t2)
965 return cmp(t1, t2)
966
966
967 def mfmatches(node):
967 def mfmatches(node):
968 mf = dict(self.manifest.read(node))
968 mf = dict(self.manifest.read(node))
969 for fn in mf.keys():
969 for fn in mf.keys():
970 if not match(fn):
970 if not match(fn):
971 del mf[fn]
971 del mf[fn]
972 return mf
972 return mf
973
973
974 # are we comparing the working directory?
974 # are we comparing the working directory?
975 if not node2:
975 if not node2:
976 l, c, a, d, u = self.dirstate.changes(files, match)
976 l, c, a, d, u = self.dirstate.changes(files, match)
977
977
978 # are we comparing working dir against its parent?
978 # are we comparing working dir against its parent?
979 if not node1:
979 if not node1:
980 if l:
980 if l:
981 # do a full compare of any files that might have changed
981 # do a full compare of any files that might have changed
982 change = self.changelog.read(self.dirstate.parents()[0])
982 change = self.changelog.read(self.dirstate.parents()[0])
983 mf2 = mfmatches(change[0])
983 mf2 = mfmatches(change[0])
984 for f in l:
984 for f in l:
985 if fcmp(f, mf2):
985 if fcmp(f, mf2):
986 c.append(f)
986 c.append(f)
987
987
988 for l in c, a, d, u:
988 for l in c, a, d, u:
989 l.sort()
989 l.sort()
990
990
991 return (c, a, d, u)
991 return (c, a, d, u)
992
992
993 # are we comparing working dir against non-tip?
993 # are we comparing working dir against non-tip?
994 # generate a pseudo-manifest for the working dir
994 # generate a pseudo-manifest for the working dir
995 if not node2:
995 if not node2:
996 if not mf2:
996 if not mf2:
997 change = self.changelog.read(self.dirstate.parents()[0])
997 change = self.changelog.read(self.dirstate.parents()[0])
998 mf2 = mfmatches(change[0])
998 mf2 = mfmatches(change[0])
999 for f in a + c + l:
999 for f in a + c + l:
1000 mf2[f] = ""
1000 mf2[f] = ""
1001 for f in d:
1001 for f in d:
1002 if f in mf2: del mf2[f]
1002 if f in mf2: del mf2[f]
1003 else:
1003 else:
1004 change = self.changelog.read(node2)
1004 change = self.changelog.read(node2)
1005 mf2 = mfmatches(change[0])
1005 mf2 = mfmatches(change[0])
1006
1006
1007 # flush lists from dirstate before comparing manifests
1007 # flush lists from dirstate before comparing manifests
1008 c, a = [], []
1008 c, a = [], []
1009
1009
1010 change = self.changelog.read(node1)
1010 change = self.changelog.read(node1)
1011 mf1 = mfmatches(change[0])
1011 mf1 = mfmatches(change[0])
1012
1012
1013 for fn in mf2:
1013 for fn in mf2:
1014 if mf1.has_key(fn):
1014 if mf1.has_key(fn):
1015 if mf1[fn] != mf2[fn]:
1015 if mf1[fn] != mf2[fn]:
1016 if mf2[fn] != "" or fcmp(fn, mf1):
1016 if mf2[fn] != "" or fcmp(fn, mf1):
1017 c.append(fn)
1017 c.append(fn)
1018 del mf1[fn]
1018 del mf1[fn]
1019 else:
1019 else:
1020 a.append(fn)
1020 a.append(fn)
1021
1021
1022 d = mf1.keys()
1022 d = mf1.keys()
1023
1023
1024 for l in c, a, d, u:
1024 for l in c, a, d, u:
1025 l.sort()
1025 l.sort()
1026
1026
1027 return (c, a, d, u)
1027 return (c, a, d, u)
1028
1028
1029 def add(self, list):
1029 def add(self, list):
1030 for f in list:
1030 for f in list:
1031 p = self.wjoin(f)
1031 p = self.wjoin(f)
1032 if not os.path.exists(p):
1032 if not os.path.exists(p):
1033 self.ui.warn("%s does not exist!\n" % f)
1033 self.ui.warn("%s does not exist!\n" % f)
1034 elif not os.path.isfile(p):
1034 elif not os.path.isfile(p):
1035 self.ui.warn("%s not added: only files supported currently\n" % f)
1035 self.ui.warn("%s not added: only files supported currently\n" % f)
1036 elif self.dirstate.state(f) in 'an':
1036 elif self.dirstate.state(f) in 'an':
1037 self.ui.warn("%s already tracked!\n" % f)
1037 self.ui.warn("%s already tracked!\n" % f)
1038 else:
1038 else:
1039 self.dirstate.update([f], "a")
1039 self.dirstate.update([f], "a")
1040
1040
1041 def forget(self, list):
1041 def forget(self, list):
1042 for f in list:
1042 for f in list:
1043 if self.dirstate.state(f) not in 'ai':
1043 if self.dirstate.state(f) not in 'ai':
1044 self.ui.warn("%s not added!\n" % f)
1044 self.ui.warn("%s not added!\n" % f)
1045 else:
1045 else:
1046 self.dirstate.forget([f])
1046 self.dirstate.forget([f])
1047
1047
1048 def remove(self, list):
1048 def remove(self, list):
1049 for f in list:
1049 for f in list:
1050 p = self.wjoin(f)
1050 p = self.wjoin(f)
1051 if os.path.exists(p):
1051 if os.path.exists(p):
1052 self.ui.warn("%s still exists!\n" % f)
1052 self.ui.warn("%s still exists!\n" % f)
1053 elif self.dirstate.state(f) == 'a':
1053 elif self.dirstate.state(f) == 'a':
1054 self.ui.warn("%s never committed!\n" % f)
1054 self.ui.warn("%s never committed!\n" % f)
1055 self.dirstate.forget([f])
1055 self.dirstate.forget([f])
1056 elif f not in self.dirstate:
1056 elif f not in self.dirstate:
1057 self.ui.warn("%s not tracked!\n" % f)
1057 self.ui.warn("%s not tracked!\n" % f)
1058 else:
1058 else:
1059 self.dirstate.update([f], "r")
1059 self.dirstate.update([f], "r")
1060
1060
1061 def copy(self, source, dest):
1061 def copy(self, source, dest):
1062 p = self.wjoin(dest)
1062 p = self.wjoin(dest)
1063 if not os.path.exists(p):
1063 if not os.path.exists(p):
1064 self.ui.warn("%s does not exist!\n" % dest)
1064 self.ui.warn("%s does not exist!\n" % dest)
1065 elif not os.path.isfile(p):
1065 elif not os.path.isfile(p):
1066 self.ui.warn("copy failed: %s is not a file\n" % dest)
1066 self.ui.warn("copy failed: %s is not a file\n" % dest)
1067 else:
1067 else:
1068 if self.dirstate.state(dest) == '?':
1068 if self.dirstate.state(dest) == '?':
1069 self.dirstate.update([dest], "a")
1069 self.dirstate.update([dest], "a")
1070 self.dirstate.copy(source, dest)
1070 self.dirstate.copy(source, dest)
1071
1071
1072 def heads(self):
1072 def heads(self):
1073 return self.changelog.heads()
1073 return self.changelog.heads()
1074
1074
1075 # branchlookup returns a dict giving a list of branches for
1075 # branchlookup returns a dict giving a list of branches for
1076 # each head. A branch is defined as the tag of a node or
1076 # each head. A branch is defined as the tag of a node or
1077 # the branch of the node's parents. If a node has multiple
1077 # the branch of the node's parents. If a node has multiple
1078 # branch tags, tags are eliminated if they are visible from other
1078 # branch tags, tags are eliminated if they are visible from other
1079 # branch tags.
1079 # branch tags.
1080 #
1080 #
1081 # So, for this graph: a->b->c->d->e
1081 # So, for this graph: a->b->c->d->e
1082 # \ /
1082 # \ /
1083 # aa -----/
1083 # aa -----/
1084 # a has tag 2.6.12
1084 # a has tag 2.6.12
1085 # d has tag 2.6.13
1085 # d has tag 2.6.13
1086 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1086 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1087 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1087 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1088 # from the list.
1088 # from the list.
1089 #
1089 #
1090 # It is possible that more than one head will have the same branch tag.
1090 # It is possible that more than one head will have the same branch tag.
1091 # callers need to check the result for multiple heads under the same
1091 # callers need to check the result for multiple heads under the same
1092 # branch tag if that is a problem for them (ie checkout of a specific
1092 # branch tag if that is a problem for them (ie checkout of a specific
1093 # branch).
1093 # branch).
1094 #
1094 #
1095 # passing in a specific branch will limit the depth of the search
1095 # passing in a specific branch will limit the depth of the search
1096 # through the parents. It won't limit the branches returned in the
1096 # through the parents. It won't limit the branches returned in the
1097 # result though.
1097 # result though.
1098 def branchlookup(self, heads=None, branch=None):
1098 def branchlookup(self, heads=None, branch=None):
1099 if not heads:
1099 if not heads:
1100 heads = self.heads()
1100 heads = self.heads()
1101 headt = [ h for h in heads ]
1101 headt = [ h for h in heads ]
1102 chlog = self.changelog
1102 chlog = self.changelog
1103 branches = {}
1103 branches = {}
1104 merges = []
1104 merges = []
1105 seenmerge = {}
1105 seenmerge = {}
1106
1106
1107 # traverse the tree once for each head, recording in the branches
1107 # traverse the tree once for each head, recording in the branches
1108 # dict which tags are visible from this head. The branches
1108 # dict which tags are visible from this head. The branches
1109 # dict also records which tags are visible from each tag
1109 # dict also records which tags are visible from each tag
1110 # while we traverse.
1110 # while we traverse.
1111 while headt or merges:
1111 while headt or merges:
1112 if merges:
1112 if merges:
1113 n, found = merges.pop()
1113 n, found = merges.pop()
1114 visit = [n]
1114 visit = [n]
1115 else:
1115 else:
1116 h = headt.pop()
1116 h = headt.pop()
1117 visit = [h]
1117 visit = [h]
1118 found = [h]
1118 found = [h]
1119 seen = {}
1119 seen = {}
1120 while visit:
1120 while visit:
1121 n = visit.pop()
1121 n = visit.pop()
1122 if n in seen:
1122 if n in seen:
1123 continue
1123 continue
1124 pp = chlog.parents(n)
1124 pp = chlog.parents(n)
1125 tags = self.nodetags(n)
1125 tags = self.nodetags(n)
1126 if tags:
1126 if tags:
1127 for x in tags:
1127 for x in tags:
1128 if x == 'tip':
1128 if x == 'tip':
1129 continue
1129 continue
1130 for f in found:
1130 for f in found:
1131 branches.setdefault(f, {})[n] = 1
1131 branches.setdefault(f, {})[n] = 1
1132 branches.setdefault(n, {})[n] = 1
1132 branches.setdefault(n, {})[n] = 1
1133 break
1133 break
1134 if n not in found:
1134 if n not in found:
1135 found.append(n)
1135 found.append(n)
1136 if branch in tags:
1136 if branch in tags:
1137 continue
1137 continue
1138 seen[n] = 1
1138 seen[n] = 1
1139 if pp[1] != nullid and n not in seenmerge:
1139 if pp[1] != nullid and n not in seenmerge:
1140 merges.append((pp[1], [x for x in found]))
1140 merges.append((pp[1], [x for x in found]))
1141 seenmerge[n] = 1
1141 seenmerge[n] = 1
1142 if pp[0] != nullid:
1142 if pp[0] != nullid:
1143 visit.append(pp[0])
1143 visit.append(pp[0])
1144 # traverse the branches dict, eliminating branch tags from each
1144 # traverse the branches dict, eliminating branch tags from each
1145 # head that are visible from another branch tag for that head.
1145 # head that are visible from another branch tag for that head.
1146 out = {}
1146 out = {}
1147 viscache = {}
1147 viscache = {}
1148 for h in heads:
1148 for h in heads:
1149 def visible(node):
1149 def visible(node):
1150 if node in viscache:
1150 if node in viscache:
1151 return viscache[node]
1151 return viscache[node]
1152 ret = {}
1152 ret = {}
1153 visit = [node]
1153 visit = [node]
1154 while visit:
1154 while visit:
1155 x = visit.pop()
1155 x = visit.pop()
1156 if x in viscache:
1156 if x in viscache:
1157 ret.update(viscache[x])
1157 ret.update(viscache[x])
1158 elif x not in ret:
1158 elif x not in ret:
1159 ret[x] = 1
1159 ret[x] = 1
1160 if x in branches:
1160 if x in branches:
1161 visit[len(visit):] = branches[x].keys()
1161 visit[len(visit):] = branches[x].keys()
1162 viscache[node] = ret
1162 viscache[node] = ret
1163 return ret
1163 return ret
1164 if h not in branches:
1164 if h not in branches:
1165 continue
1165 continue
1166 # O(n^2), but somewhat limited. This only searches the
1166 # O(n^2), but somewhat limited. This only searches the
1167 # tags visible from a specific head, not all the tags in the
1167 # tags visible from a specific head, not all the tags in the
1168 # whole repo.
1168 # whole repo.
1169 for b in branches[h]:
1169 for b in branches[h]:
1170 vis = False
1170 vis = False
1171 for bb in branches[h].keys():
1171 for bb in branches[h].keys():
1172 if b != bb:
1172 if b != bb:
1173 if b in visible(bb):
1173 if b in visible(bb):
1174 vis = True
1174 vis = True
1175 break
1175 break
1176 if not vis:
1176 if not vis:
1177 l = out.setdefault(h, [])
1177 l = out.setdefault(h, [])
1178 l[len(l):] = self.nodetags(b)
1178 l[len(l):] = self.nodetags(b)
1179 return out
1179 return out
1180
1180
1181 def branches(self, nodes):
1181 def branches(self, nodes):
1182 if not nodes: nodes = [self.changelog.tip()]
1182 if not nodes: nodes = [self.changelog.tip()]
1183 b = []
1183 b = []
1184 for n in nodes:
1184 for n in nodes:
1185 t = n
1185 t = n
1186 while n:
1186 while n:
1187 p = self.changelog.parents(n)
1187 p = self.changelog.parents(n)
1188 if p[1] != nullid or p[0] == nullid:
1188 if p[1] != nullid or p[0] == nullid:
1189 b.append((t, n, p[0], p[1]))
1189 b.append((t, n, p[0], p[1]))
1190 break
1190 break
1191 n = p[0]
1191 n = p[0]
1192 return b
1192 return b
1193
1193
1194 def between(self, pairs):
1194 def between(self, pairs):
1195 r = []
1195 r = []
1196
1196
1197 for top, bottom in pairs:
1197 for top, bottom in pairs:
1198 n, l, i = top, [], 0
1198 n, l, i = top, [], 0
1199 f = 1
1199 f = 1
1200
1200
1201 while n != bottom:
1201 while n != bottom:
1202 p = self.changelog.parents(n)[0]
1202 p = self.changelog.parents(n)[0]
1203 if i == f:
1203 if i == f:
1204 l.append(n)
1204 l.append(n)
1205 f = f * 2
1205 f = f * 2
1206 n = p
1206 n = p
1207 i += 1
1207 i += 1
1208
1208
1209 r.append(l)
1209 r.append(l)
1210
1210
1211 return r
1211 return r
1212
1212
1213 def newer(self, nodes):
1213 def newer(self, nodes):
1214 m = {}
1214 m = {}
1215 nl = []
1215 nl = []
1216 pm = {}
1216 pm = {}
1217 cl = self.changelog
1217 cl = self.changelog
1218 t = l = cl.count()
1218 t = l = cl.count()
1219
1219
1220 # find the lowest numbered node
1220 # find the lowest numbered node
1221 for n in nodes:
1221 for n in nodes:
1222 l = min(l, cl.rev(n))
1222 l = min(l, cl.rev(n))
1223 m[n] = 1
1223 m[n] = 1
1224
1224
1225 for i in xrange(l, t):
1225 for i in xrange(l, t):
1226 n = cl.node(i)
1226 n = cl.node(i)
1227 if n in m: # explicitly listed
1227 if n in m: # explicitly listed
1228 pm[n] = 1
1228 pm[n] = 1
1229 nl.append(n)
1229 nl.append(n)
1230 continue
1230 continue
1231 for p in cl.parents(n):
1231 for p in cl.parents(n):
1232 if p in pm: # parent listed
1232 if p in pm: # parent listed
1233 pm[n] = 1
1233 pm[n] = 1
1234 nl.append(n)
1234 nl.append(n)
1235 break
1235 break
1236
1236
1237 return nl
1237 return nl
1238
1238
1239 def findincoming(self, remote, base=None, heads=None):
1239 def findincoming(self, remote, base=None, heads=None):
1240 m = self.changelog.nodemap
1240 m = self.changelog.nodemap
1241 search = []
1241 search = []
1242 fetch = []
1242 fetch = []
1243 seen = {}
1243 seen = {}
1244 seenbranch = {}
1244 seenbranch = {}
1245 if base == None:
1245 if base == None:
1246 base = {}
1246 base = {}
1247
1247
1248 # assume we're closer to the tip than the root
1248 # assume we're closer to the tip than the root
1249 # and start by examining the heads
1249 # and start by examining the heads
1250 self.ui.status("searching for changes\n")
1250 self.ui.status("searching for changes\n")
1251
1251
1252 if not heads:
1252 if not heads:
1253 heads = remote.heads()
1253 heads = remote.heads()
1254
1254
1255 unknown = []
1255 unknown = []
1256 for h in heads:
1256 for h in heads:
1257 if h not in m:
1257 if h not in m:
1258 unknown.append(h)
1258 unknown.append(h)
1259 else:
1259 else:
1260 base[h] = 1
1260 base[h] = 1
1261
1261
1262 if not unknown:
1262 if not unknown:
1263 return None
1263 return None
1264
1264
1265 rep = {}
1265 rep = {}
1266 reqcnt = 0
1266 reqcnt = 0
1267
1267
1268 # search through remote branches
1268 # search through remote branches
1269 # a 'branch' here is a linear segment of history, with four parts:
1269 # a 'branch' here is a linear segment of history, with four parts:
1270 # head, root, first parent, second parent
1270 # head, root, first parent, second parent
1271 # (a branch always has two parents (or none) by definition)
1271 # (a branch always has two parents (or none) by definition)
1272 unknown = remote.branches(unknown)
1272 unknown = remote.branches(unknown)
1273 while unknown:
1273 while unknown:
1274 r = []
1274 r = []
1275 while unknown:
1275 while unknown:
1276 n = unknown.pop(0)
1276 n = unknown.pop(0)
1277 if n[0] in seen:
1277 if n[0] in seen:
1278 continue
1278 continue
1279
1279
1280 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1280 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1281 if n[0] == nullid:
1281 if n[0] == nullid:
1282 break
1282 break
1283 if n in seenbranch:
1283 if n in seenbranch:
1284 self.ui.debug("branch already found\n")
1284 self.ui.debug("branch already found\n")
1285 continue
1285 continue
1286 if n[1] and n[1] in m: # do we know the base?
1286 if n[1] and n[1] in m: # do we know the base?
1287 self.ui.debug("found incomplete branch %s:%s\n"
1287 self.ui.debug("found incomplete branch %s:%s\n"
1288 % (short(n[0]), short(n[1])))
1288 % (short(n[0]), short(n[1])))
1289 search.append(n) # schedule branch range for scanning
1289 search.append(n) # schedule branch range for scanning
1290 seenbranch[n] = 1
1290 seenbranch[n] = 1
1291 else:
1291 else:
1292 if n[1] not in seen and n[1] not in fetch:
1292 if n[1] not in seen and n[1] not in fetch:
1293 if n[2] in m and n[3] in m:
1293 if n[2] in m and n[3] in m:
1294 self.ui.debug("found new changeset %s\n" %
1294 self.ui.debug("found new changeset %s\n" %
1295 short(n[1]))
1295 short(n[1]))
1296 fetch.append(n[1]) # earliest unknown
1296 fetch.append(n[1]) # earliest unknown
1297 base[n[2]] = 1 # latest known
1297 base[n[2]] = 1 # latest known
1298 continue
1298 continue
1299
1299
1300 for a in n[2:4]:
1300 for a in n[2:4]:
1301 if a not in rep:
1301 if a not in rep:
1302 r.append(a)
1302 r.append(a)
1303 rep[a] = 1
1303 rep[a] = 1
1304
1304
1305 seen[n[0]] = 1
1305 seen[n[0]] = 1
1306
1306
1307 if r:
1307 if r:
1308 reqcnt += 1
1308 reqcnt += 1
1309 self.ui.debug("request %d: %s\n" %
1309 self.ui.debug("request %d: %s\n" %
1310 (reqcnt, " ".join(map(short, r))))
1310 (reqcnt, " ".join(map(short, r))))
1311 for p in range(0, len(r), 10):
1311 for p in range(0, len(r), 10):
1312 for b in remote.branches(r[p:p+10]):
1312 for b in remote.branches(r[p:p+10]):
1313 self.ui.debug("received %s:%s\n" %
1313 self.ui.debug("received %s:%s\n" %
1314 (short(b[0]), short(b[1])))
1314 (short(b[0]), short(b[1])))
1315 if b[0] not in m and b[0] not in seen:
1315 if b[0] not in m and b[0] not in seen:
1316 unknown.append(b)
1316 unknown.append(b)
1317
1317
1318 # do binary search on the branches we found
1318 # do binary search on the branches we found
1319 while search:
1319 while search:
1320 n = search.pop(0)
1320 n = search.pop(0)
1321 reqcnt += 1
1321 reqcnt += 1
1322 l = remote.between([(n[0], n[1])])[0]
1322 l = remote.between([(n[0], n[1])])[0]
1323 l.append(n[1])
1323 l.append(n[1])
1324 p = n[0]
1324 p = n[0]
1325 f = 1
1325 f = 1
1326 for i in l:
1326 for i in l:
1327 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1327 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1328 if i in m:
1328 if i in m:
1329 if f <= 2:
1329 if f <= 2:
1330 self.ui.debug("found new branch changeset %s\n" %
1330 self.ui.debug("found new branch changeset %s\n" %
1331 short(p))
1331 short(p))
1332 fetch.append(p)
1332 fetch.append(p)
1333 base[i] = 1
1333 base[i] = 1
1334 else:
1334 else:
1335 self.ui.debug("narrowed branch search to %s:%s\n"
1335 self.ui.debug("narrowed branch search to %s:%s\n"
1336 % (short(p), short(i)))
1336 % (short(p), short(i)))
1337 search.append((p, i))
1337 search.append((p, i))
1338 break
1338 break
1339 p, f = i, f * 2
1339 p, f = i, f * 2
1340
1340
1341 # sanity check our fetch list
1341 # sanity check our fetch list
1342 for f in fetch:
1342 for f in fetch:
1343 if f in m:
1343 if f in m:
1344 raise RepoError("already have changeset " + short(f[:4]))
1344 raise RepoError("already have changeset " + short(f[:4]))
1345
1345
1346 if base.keys() == [nullid]:
1346 if base.keys() == [nullid]:
1347 self.ui.warn("warning: pulling from an unrelated repository!\n")
1347 self.ui.warn("warning: pulling from an unrelated repository!\n")
1348
1348
1349 self.ui.note("adding new changesets starting at " +
1349 self.ui.note("adding new changesets starting at " +
1350 " ".join([short(f) for f in fetch]) + "\n")
1350 " ".join([short(f) for f in fetch]) + "\n")
1351
1351
1352 self.ui.debug("%d total queries\n" % reqcnt)
1352 self.ui.debug("%d total queries\n" % reqcnt)
1353
1353
1354 return fetch
1354 return fetch
1355
1355
1356 def findoutgoing(self, remote, base=None, heads=None):
1356 def findoutgoing(self, remote, base=None, heads=None):
1357 if base == None:
1357 if base == None:
1358 base = {}
1358 base = {}
1359 self.findincoming(remote, base, heads)
1359 self.findincoming(remote, base, heads)
1360
1360
1361 remain = dict.fromkeys(self.changelog.nodemap)
1361 remain = dict.fromkeys(self.changelog.nodemap)
1362
1362
1363 # prune everything remote has from the tree
1363 # prune everything remote has from the tree
1364 del remain[nullid]
1364 del remain[nullid]
1365 remove = base.keys()
1365 remove = base.keys()
1366 while remove:
1366 while remove:
1367 n = remove.pop(0)
1367 n = remove.pop(0)
1368 if n in remain:
1368 if n in remain:
1369 del remain[n]
1369 del remain[n]
1370 for p in self.changelog.parents(n):
1370 for p in self.changelog.parents(n):
1371 remove.append(p)
1371 remove.append(p)
1372
1372
1373 # find every node whose parents have been pruned
1373 # find every node whose parents have been pruned
1374 subset = []
1374 subset = []
1375 for n in remain:
1375 for n in remain:
1376 p1, p2 = self.changelog.parents(n)
1376 p1, p2 = self.changelog.parents(n)
1377 if p1 not in remain and p2 not in remain:
1377 if p1 not in remain and p2 not in remain:
1378 subset.append(n)
1378 subset.append(n)
1379
1379
1380 # this is the set of all roots we have to push
1380 # this is the set of all roots we have to push
1381 return subset
1381 return subset
1382
1382
1383 def pull(self, remote):
1383 def pull(self, remote):
1384 lock = self.lock()
1384 lock = self.lock()
1385
1385
1386 # if we have an empty repo, fetch everything
1386 # if we have an empty repo, fetch everything
1387 if self.changelog.tip() == nullid:
1387 if self.changelog.tip() == nullid:
1388 self.ui.status("requesting all changes\n")
1388 self.ui.status("requesting all changes\n")
1389 fetch = [nullid]
1389 fetch = [nullid]
1390 else:
1390 else:
1391 fetch = self.findincoming(remote)
1391 fetch = self.findincoming(remote)
1392
1392
1393 if not fetch:
1393 if not fetch:
1394 self.ui.status("no changes found\n")
1394 self.ui.status("no changes found\n")
1395 return 1
1395 return 1
1396
1396
1397 cg = remote.changegroup(fetch)
1397 cg = remote.changegroup(fetch)
1398 return self.addchangegroup(cg)
1398 return self.addchangegroup(cg)
1399
1399
1400 def push(self, remote, force=False):
1400 def push(self, remote, force=False):
1401 lock = remote.lock()
1401 lock = remote.lock()
1402
1402
1403 base = {}
1403 base = {}
1404 heads = remote.heads()
1404 heads = remote.heads()
1405 inc = self.findincoming(remote, base, heads)
1405 inc = self.findincoming(remote, base, heads)
1406 if not force and inc:
1406 if not force and inc:
1407 self.ui.warn("abort: unsynced remote changes!\n")
1407 self.ui.warn("abort: unsynced remote changes!\n")
1408 self.ui.status("(did you forget to sync? use push -f to force)\n")
1408 self.ui.status("(did you forget to sync? use push -f to force)\n")
1409 return 1
1409 return 1
1410
1410
1411 update = self.findoutgoing(remote, base)
1411 update = self.findoutgoing(remote, base)
1412 if not update:
1412 if not update:
1413 self.ui.status("no changes found\n")
1413 self.ui.status("no changes found\n")
1414 return 1
1414 return 1
1415 elif not force:
1415 elif not force:
1416 if len(heads) < len(self.changelog.heads()):
1416 if len(heads) < len(self.changelog.heads()):
1417 self.ui.warn("abort: push creates new remote branches!\n")
1417 self.ui.warn("abort: push creates new remote branches!\n")
1418 self.ui.status("(did you forget to merge?" +
1418 self.ui.status("(did you forget to merge?" +
1419 " use push -f to force)\n")
1419 " use push -f to force)\n")
1420 return 1
1420 return 1
1421
1421
1422 cg = self.changegroup(update)
1422 cg = self.changegroup(update)
1423 return remote.addchangegroup(cg)
1423 return remote.addchangegroup(cg)
1424
1424
1425 def changegroup(self, basenodes):
1425 def changegroup(self, basenodes):
1426 class genread:
1426 class genread:
1427 def __init__(self, generator):
1427 def __init__(self, generator):
1428 self.g = generator
1428 self.g = generator
1429 self.buf = ""
1429 self.buf = ""
1430 def read(self, l):
1430 def read(self, l):
1431 while l > len(self.buf):
1431 while l > len(self.buf):
1432 try:
1432 try:
1433 self.buf += self.g.next()
1433 self.buf += self.g.next()
1434 except StopIteration:
1434 except StopIteration:
1435 break
1435 break
1436 d, self.buf = self.buf[:l], self.buf[l:]
1436 d, self.buf = self.buf[:l], self.buf[l:]
1437 return d
1437 return d
1438
1438
1439 def gengroup():
1439 def gengroup():
1440 nodes = self.newer(basenodes)
1440 nodes = self.newer(basenodes)
1441
1441
1442 # construct the link map
1442 # construct the link map
1443 linkmap = {}
1443 linkmap = {}
1444 for n in nodes:
1444 for n in nodes:
1445 linkmap[self.changelog.rev(n)] = n
1445 linkmap[self.changelog.rev(n)] = n
1446
1446
1447 # construct a list of all changed files
1447 # construct a list of all changed files
1448 changed = {}
1448 changed = {}
1449 for n in nodes:
1449 for n in nodes:
1450 c = self.changelog.read(n)
1450 c = self.changelog.read(n)
1451 for f in c[3]:
1451 for f in c[3]:
1452 changed[f] = 1
1452 changed[f] = 1
1453 changed = changed.keys()
1453 changed = changed.keys()
1454 changed.sort()
1454 changed.sort()
1455
1455
1456 # the changegroup is changesets + manifests + all file revs
1456 # the changegroup is changesets + manifests + all file revs
1457 revs = [ self.changelog.rev(n) for n in nodes ]
1457 revs = [ self.changelog.rev(n) for n in nodes ]
1458
1458
1459 for y in self.changelog.group(linkmap): yield y
1459 for y in self.changelog.group(linkmap): yield y
1460 for y in self.manifest.group(linkmap): yield y
1460 for y in self.manifest.group(linkmap): yield y
1461 for f in changed:
1461 for f in changed:
1462 yield struct.pack(">l", len(f) + 4) + f
1462 yield struct.pack(">l", len(f) + 4) + f
1463 g = self.file(f).group(linkmap)
1463 g = self.file(f).group(linkmap)
1464 for y in g:
1464 for y in g:
1465 yield y
1465 yield y
1466
1466
1467 yield struct.pack(">l", 0)
1467 yield struct.pack(">l", 0)
1468
1468
1469 return genread(gengroup())
1469 return genread(gengroup())
1470
1470
1471 def addchangegroup(self, source):
1471 def addchangegroup(self, source):
1472
1472
1473 def getchunk():
1473 def getchunk():
1474 d = source.read(4)
1474 d = source.read(4)
1475 if not d: return ""
1475 if not d: return ""
1476 l = struct.unpack(">l", d)[0]
1476 l = struct.unpack(">l", d)[0]
1477 if l <= 4: return ""
1477 if l <= 4: return ""
1478 return source.read(l - 4)
1478 return source.read(l - 4)
1479
1479
1480 def getgroup():
1480 def getgroup():
1481 while 1:
1481 while 1:
1482 c = getchunk()
1482 c = getchunk()
1483 if not c: break
1483 if not c: break
1484 yield c
1484 yield c
1485
1485
1486 def csmap(x):
1486 def csmap(x):
1487 self.ui.debug("add changeset %s\n" % short(x))
1487 self.ui.debug("add changeset %s\n" % short(x))
1488 return self.changelog.count()
1488 return self.changelog.count()
1489
1489
1490 def revmap(x):
1490 def revmap(x):
1491 return self.changelog.rev(x)
1491 return self.changelog.rev(x)
1492
1492
1493 if not source: return
1493 if not source: return
1494 changesets = files = revisions = 0
1494 changesets = files = revisions = 0
1495
1495
1496 tr = self.transaction()
1496 tr = self.transaction()
1497
1497
1498 # pull off the changeset group
1498 # pull off the changeset group
1499 self.ui.status("adding changesets\n")
1499 self.ui.status("adding changesets\n")
1500 co = self.changelog.tip()
1500 co = self.changelog.tip()
1501 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1501 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1502 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1502 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1503
1503
1504 # pull off the manifest group
1504 # pull off the manifest group
1505 self.ui.status("adding manifests\n")
1505 self.ui.status("adding manifests\n")
1506 mm = self.manifest.tip()
1506 mm = self.manifest.tip()
1507 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1507 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1508
1508
1509 # process the files
1509 # process the files
1510 self.ui.status("adding file changes\n")
1510 self.ui.status("adding file changes\n")
1511 while 1:
1511 while 1:
1512 f = getchunk()
1512 f = getchunk()
1513 if not f: break
1513 if not f: break
1514 self.ui.debug("adding %s revisions\n" % f)
1514 self.ui.debug("adding %s revisions\n" % f)
1515 fl = self.file(f)
1515 fl = self.file(f)
1516 o = fl.count()
1516 o = fl.count()
1517 n = fl.addgroup(getgroup(), revmap, tr)
1517 n = fl.addgroup(getgroup(), revmap, tr)
1518 revisions += fl.count() - o
1518 revisions += fl.count() - o
1519 files += 1
1519 files += 1
1520
1520
1521 self.ui.status(("added %d changesets" +
1521 self.ui.status(("added %d changesets" +
1522 " with %d changes to %d files\n")
1522 " with %d changes to %d files\n")
1523 % (changesets, revisions, files))
1523 % (changesets, revisions, files))
1524
1524
1525 tr.close()
1525 tr.close()
1526
1526
1527 if not self.hook("changegroup"):
1527 if not self.hook("changegroup"):
1528 return 1
1528 return 1
1529
1529
1530 return
1530 return
1531
1531
1532 def update(self, node, allow=False, force=False, choose=None,
1532 def update(self, node, allow=False, force=False, choose=None,
1533 moddirstate=True):
1533 moddirstate=True):
1534 pl = self.dirstate.parents()
1534 pl = self.dirstate.parents()
1535 if not force and pl[1] != nullid:
1535 if not force and pl[1] != nullid:
1536 self.ui.warn("aborting: outstanding uncommitted merges\n")
1536 self.ui.warn("aborting: outstanding uncommitted merges\n")
1537 return 1
1537 return 1
1538
1538
1539 p1, p2 = pl[0], node
1539 p1, p2 = pl[0], node
1540 pa = self.changelog.ancestor(p1, p2)
1540 pa = self.changelog.ancestor(p1, p2)
1541 m1n = self.changelog.read(p1)[0]
1541 m1n = self.changelog.read(p1)[0]
1542 m2n = self.changelog.read(p2)[0]
1542 m2n = self.changelog.read(p2)[0]
1543 man = self.manifest.ancestor(m1n, m2n)
1543 man = self.manifest.ancestor(m1n, m2n)
1544 m1 = self.manifest.read(m1n)
1544 m1 = self.manifest.read(m1n)
1545 mf1 = self.manifest.readflags(m1n)
1545 mf1 = self.manifest.readflags(m1n)
1546 m2 = self.manifest.read(m2n)
1546 m2 = self.manifest.read(m2n)
1547 mf2 = self.manifest.readflags(m2n)
1547 mf2 = self.manifest.readflags(m2n)
1548 ma = self.manifest.read(man)
1548 ma = self.manifest.read(man)
1549 mfa = self.manifest.readflags(man)
1549 mfa = self.manifest.readflags(man)
1550
1550
1551 (c, a, d, u) = self.changes()
1551 (c, a, d, u) = self.changes()
1552
1552
1553 # is this a jump, or a merge? i.e. is there a linear path
1553 # is this a jump, or a merge? i.e. is there a linear path
1554 # from p1 to p2?
1554 # from p1 to p2?
1555 linear_path = (pa == p1 or pa == p2)
1555 linear_path = (pa == p1 or pa == p2)
1556
1556
1557 # resolve the manifest to determine which files
1557 # resolve the manifest to determine which files
1558 # we care about merging
1558 # we care about merging
1559 self.ui.note("resolving manifests\n")
1559 self.ui.note("resolving manifests\n")
1560 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1560 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1561 (force, allow, moddirstate, linear_path))
1561 (force, allow, moddirstate, linear_path))
1562 self.ui.debug(" ancestor %s local %s remote %s\n" %
1562 self.ui.debug(" ancestor %s local %s remote %s\n" %
1563 (short(man), short(m1n), short(m2n)))
1563 (short(man), short(m1n), short(m2n)))
1564
1564
1565 merge = {}
1565 merge = {}
1566 get = {}
1566 get = {}
1567 remove = []
1567 remove = []
1568 mark = {}
1568 mark = {}
1569
1569
1570 # construct a working dir manifest
1570 # construct a working dir manifest
1571 mw = m1.copy()
1571 mw = m1.copy()
1572 mfw = mf1.copy()
1572 mfw = mf1.copy()
1573 umap = dict.fromkeys(u)
1573 umap = dict.fromkeys(u)
1574
1574
1575 for f in a + c + u:
1575 for f in a + c + u:
1576 mw[f] = ""
1576 mw[f] = ""
1577 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1577 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1578
1578
1579 for f in d:
1579 for f in d:
1580 if f in mw: del mw[f]
1580 if f in mw: del mw[f]
1581
1581
1582 # If we're jumping between revisions (as opposed to merging),
1582 # If we're jumping between revisions (as opposed to merging),
1583 # and if neither the working directory nor the target rev has
1583 # and if neither the working directory nor the target rev has
1584 # the file, then we need to remove it from the dirstate, to
1584 # the file, then we need to remove it from the dirstate, to
1585 # prevent the dirstate from listing the file when it is no
1585 # prevent the dirstate from listing the file when it is no
1586 # longer in the manifest.
1586 # longer in the manifest.
1587 if moddirstate and linear_path and f not in m2:
1587 if moddirstate and linear_path and f not in m2:
1588 self.dirstate.forget((f,))
1588 self.dirstate.forget((f,))
1589
1589
1590 # Compare manifests
1590 # Compare manifests
1591 for f, n in mw.iteritems():
1591 for f, n in mw.iteritems():
1592 if choose and not choose(f): continue
1592 if choose and not choose(f): continue
1593 if f in m2:
1593 if f in m2:
1594 s = 0
1594 s = 0
1595
1595
1596 # is the wfile new since m1, and match m2?
1596 # is the wfile new since m1, and match m2?
1597 if f not in m1:
1597 if f not in m1:
1598 t1 = self.wfile(f).read()
1598 t1 = self.wfile(f).read()
1599 t2 = self.file(f).revision(m2[f])
1599 t2 = self.file(f).revision(m2[f])
1600 if cmp(t1, t2) == 0:
1600 if cmp(t1, t2) == 0:
1601 mark[f] = 1
1601 mark[f] = 1
1602 n = m2[f]
1602 n = m2[f]
1603 del t1, t2
1603 del t1, t2
1604
1604
1605 # are files different?
1605 # are files different?
1606 if n != m2[f]:
1606 if n != m2[f]:
1607 a = ma.get(f, nullid)
1607 a = ma.get(f, nullid)
1608 # are both different from the ancestor?
1608 # are both different from the ancestor?
1609 if n != a and m2[f] != a:
1609 if n != a and m2[f] != a:
1610 self.ui.debug(" %s versions differ, resolve\n" % f)
1610 self.ui.debug(" %s versions differ, resolve\n" % f)
1611 # merge executable bits
1611 # merge executable bits
1612 # "if we changed or they changed, change in merge"
1612 # "if we changed or they changed, change in merge"
1613 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1613 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1614 mode = ((a^b) | (a^c)) ^ a
1614 mode = ((a^b) | (a^c)) ^ a
1615 merge[f] = (m1.get(f, nullid), m2[f], mode)
1615 merge[f] = (m1.get(f, nullid), m2[f], mode)
1616 s = 1
1616 s = 1
1617 # are we clobbering?
1617 # are we clobbering?
1618 # is remote's version newer?
1618 # is remote's version newer?
1619 # or are we going back in time?
1619 # or are we going back in time?
1620 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1620 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1621 self.ui.debug(" remote %s is newer, get\n" % f)
1621 self.ui.debug(" remote %s is newer, get\n" % f)
1622 get[f] = m2[f]
1622 get[f] = m2[f]
1623 s = 1
1623 s = 1
1624 else:
1624 else:
1625 mark[f] = 1
1625 mark[f] = 1
1626 elif f in umap:
1626 elif f in umap:
1627 # this unknown file is the same as the checkout
1627 # this unknown file is the same as the checkout
1628 get[f] = m2[f]
1628 get[f] = m2[f]
1629
1629
1630 if not s and mfw[f] != mf2[f]:
1630 if not s and mfw[f] != mf2[f]:
1631 if force:
1631 if force:
1632 self.ui.debug(" updating permissions for %s\n" % f)
1632 self.ui.debug(" updating permissions for %s\n" % f)
1633 util.set_exec(self.wjoin(f), mf2[f])
1633 util.set_exec(self.wjoin(f), mf2[f])
1634 else:
1634 else:
1635 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1635 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1636 mode = ((a^b) | (a^c)) ^ a
1636 mode = ((a^b) | (a^c)) ^ a
1637 if mode != b:
1637 if mode != b:
1638 self.ui.debug(" updating permissions for %s\n" % f)
1638 self.ui.debug(" updating permissions for %s\n" % f)
1639 util.set_exec(self.wjoin(f), mode)
1639 util.set_exec(self.wjoin(f), mode)
1640 mark[f] = 1
1640 mark[f] = 1
1641 del m2[f]
1641 del m2[f]
1642 elif f in ma:
1642 elif f in ma:
1643 if n != ma[f]:
1643 if n != ma[f]:
1644 r = "d"
1644 r = "d"
1645 if not force and (linear_path or allow):
1645 if not force and (linear_path or allow):
1646 r = self.ui.prompt(
1646 r = self.ui.prompt(
1647 (" local changed %s which remote deleted\n" % f) +
1647 (" local changed %s which remote deleted\n" % f) +
1648 "(k)eep or (d)elete?", "[kd]", "k")
1648 "(k)eep or (d)elete?", "[kd]", "k")
1649 if r == "d":
1649 if r == "d":
1650 remove.append(f)
1650 remove.append(f)
1651 else:
1651 else:
1652 self.ui.debug("other deleted %s\n" % f)
1652 self.ui.debug("other deleted %s\n" % f)
1653 remove.append(f) # other deleted it
1653 remove.append(f) # other deleted it
1654 else:
1654 else:
1655 if n == m1.get(f, nullid): # same as parent
1655 if n == m1.get(f, nullid): # same as parent
1656 if p2 == pa: # going backwards?
1656 if p2 == pa: # going backwards?
1657 self.ui.debug("remote deleted %s\n" % f)
1657 self.ui.debug("remote deleted %s\n" % f)
1658 remove.append(f)
1658 remove.append(f)
1659 else:
1659 else:
1660 self.ui.debug("local created %s, keeping\n" % f)
1660 self.ui.debug("local created %s, keeping\n" % f)
1661 else:
1661 else:
1662 self.ui.debug("working dir created %s, keeping\n" % f)
1662 self.ui.debug("working dir created %s, keeping\n" % f)
1663
1663
1664 for f, n in m2.iteritems():
1664 for f, n in m2.iteritems():
1665 if choose and not choose(f): continue
1665 if choose and not choose(f): continue
1666 if f[0] == "/": continue
1666 if f[0] == "/": continue
1667 if f in ma and n != ma[f]:
1667 if f in ma and n != ma[f]:
1668 r = "k"
1668 r = "k"
1669 if not force and (linear_path or allow):
1669 if not force and (linear_path or allow):
1670 r = self.ui.prompt(
1670 r = self.ui.prompt(
1671 ("remote changed %s which local deleted\n" % f) +
1671 ("remote changed %s which local deleted\n" % f) +
1672 "(k)eep or (d)elete?", "[kd]", "k")
1672 "(k)eep or (d)elete?", "[kd]", "k")
1673 if r == "k": get[f] = n
1673 if r == "k": get[f] = n
1674 elif f not in ma:
1674 elif f not in ma:
1675 self.ui.debug("remote created %s\n" % f)
1675 self.ui.debug("remote created %s\n" % f)
1676 get[f] = n
1676 get[f] = n
1677 else:
1677 else:
1678 if force or p2 == pa: # going backwards?
1678 if force or p2 == pa: # going backwards?
1679 self.ui.debug("local deleted %s, recreating\n" % f)
1679 self.ui.debug("local deleted %s, recreating\n" % f)
1680 get[f] = n
1680 get[f] = n
1681 else:
1681 else:
1682 self.ui.debug("local deleted %s\n" % f)
1682 self.ui.debug("local deleted %s\n" % f)
1683
1683
1684 del mw, m1, m2, ma
1684 del mw, m1, m2, ma
1685
1685
1686 if force:
1686 if force:
1687 for f in merge:
1687 for f in merge:
1688 get[f] = merge[f][1]
1688 get[f] = merge[f][1]
1689 merge = {}
1689 merge = {}
1690
1690
1691 if linear_path or force:
1691 if linear_path or force:
1692 # we don't need to do any magic, just jump to the new rev
1692 # we don't need to do any magic, just jump to the new rev
1693 mode = 'n'
1693 mode = 'n'
1694 p1, p2 = p2, nullid
1694 p1, p2 = p2, nullid
1695 else:
1695 else:
1696 if not allow:
1696 if not allow:
1697 self.ui.status("this update spans a branch" +
1697 self.ui.status("this update spans a branch" +
1698 " affecting the following files:\n")
1698 " affecting the following files:\n")
1699 fl = merge.keys() + get.keys()
1699 fl = merge.keys() + get.keys()
1700 fl.sort()
1700 fl.sort()
1701 for f in fl:
1701 for f in fl:
1702 cf = ""
1702 cf = ""
1703 if f in merge: cf = " (resolve)"
1703 if f in merge: cf = " (resolve)"
1704 self.ui.status(" %s%s\n" % (f, cf))
1704 self.ui.status(" %s%s\n" % (f, cf))
1705 self.ui.warn("aborting update spanning branches!\n")
1705 self.ui.warn("aborting update spanning branches!\n")
1706 self.ui.status("(use update -m to merge across branches" +
1706 self.ui.status("(use update -m to merge across branches" +
1707 " or -C to lose changes)\n")
1707 " or -C to lose changes)\n")
1708 return 1
1708 return 1
1709 # we have to remember what files we needed to get/change
1709 # we have to remember what files we needed to get/change
1710 # because any file that's different from either one of its
1710 # because any file that's different from either one of its
1711 # parents must be in the changeset
1711 # parents must be in the changeset
1712 mode = 'm'
1712 mode = 'm'
1713 if moddirstate:
1713 if moddirstate:
1714 self.dirstate.update(mark.keys(), "m")
1714 self.dirstate.update(mark.keys(), "m")
1715
1715
1716 if moddirstate:
1716 if moddirstate:
1717 self.dirstate.setparents(p1, p2)
1717 self.dirstate.setparents(p1, p2)
1718
1718
1719 # get the files we don't need to change
1719 # get the files we don't need to change
1720 files = get.keys()
1720 files = get.keys()
1721 files.sort()
1721 files.sort()
1722 for f in files:
1722 for f in files:
1723 if f[0] == "/": continue
1723 if f[0] == "/": continue
1724 self.ui.note("getting %s\n" % f)
1724 self.ui.note("getting %s\n" % f)
1725 t = self.file(f).read(get[f])
1725 t = self.file(f).read(get[f])
1726 try:
1726 try:
1727 self.wfile(f, "w").write(t)
1727 self.wfile(f, "w").write(t)
1728 except IOError:
1728 except IOError:
1729 os.makedirs(os.path.dirname(self.wjoin(f)))
1729 os.makedirs(os.path.dirname(self.wjoin(f)))
1730 self.wfile(f, "w").write(t)
1730 self.wfile(f, "w").write(t)
1731 util.set_exec(self.wjoin(f), mf2[f])
1731 util.set_exec(self.wjoin(f), mf2[f])
1732 if moddirstate:
1732 if moddirstate:
1733 self.dirstate.update([f], mode)
1733 self.dirstate.update([f], mode)
1734
1734
1735 # merge the tricky bits
1735 # merge the tricky bits
1736 files = merge.keys()
1736 files = merge.keys()
1737 files.sort()
1737 files.sort()
1738 for f in files:
1738 for f in files:
1739 self.ui.status("merging %s\n" % f)
1739 self.ui.status("merging %s\n" % f)
1740 m, o, flag = merge[f]
1740 m, o, flag = merge[f]
1741 self.merge3(f, m, o)
1741 self.merge3(f, m, o)
1742 util.set_exec(self.wjoin(f), flag)
1742 util.set_exec(self.wjoin(f), flag)
1743 if moddirstate:
1743 if moddirstate:
1744 if mode == 'm':
1744 if mode == 'm':
1745 # only update dirstate on branch merge, otherwise we
1745 # only update dirstate on branch merge, otherwise we
1746 # could mark files with changes as unchanged
1746 # could mark files with changes as unchanged
1747 self.dirstate.update([f], mode)
1747 self.dirstate.update([f], mode)
1748 elif p2 == nullid:
1748 elif p2 == nullid:
1749 # update dirstate from parent1's manifest
1749 # update dirstate from parent1's manifest
1750 m1n = self.changelog.read(p1)[0]
1750 m1n = self.changelog.read(p1)[0]
1751 m1 = self.manifest.read(m1n)
1751 m1 = self.manifest.read(m1n)
1752 f_len = len(self.file(f).read(m1[f]))
1752 f_len = len(self.file(f).read(m1[f]))
1753 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1753 self.dirstate.update([f], mode, st_size=f_len, st_mtime=0)
1754 else:
1754 else:
1755 self.ui.warn("Second parent without branch merge!?\n"
1755 self.ui.warn("Second parent without branch merge!?\n"
1756 "Dirstate for file %s may be wrong.\n" % f)
1756 "Dirstate for file %s may be wrong.\n" % f)
1757
1757
1758 remove.sort()
1758 remove.sort()
1759 for f in remove:
1759 for f in remove:
1760 self.ui.note("removing %s\n" % f)
1760 self.ui.note("removing %s\n" % f)
1761 try:
1761 try:
1762 os.unlink(f)
1762 os.unlink(f)
1763 except OSError, inst:
1763 except OSError, inst:
1764 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1764 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1765 # try removing directories that might now be empty
1765 # try removing directories that might now be empty
1766 try: os.removedirs(os.path.dirname(f))
1766 try: os.removedirs(os.path.dirname(f))
1767 except: pass
1767 except: pass
1768 if moddirstate:
1768 if moddirstate:
1769 if mode == 'n':
1769 if mode == 'n':
1770 self.dirstate.forget(remove)
1770 self.dirstate.forget(remove)
1771 else:
1771 else:
1772 self.dirstate.update(remove, 'r')
1772 self.dirstate.update(remove, 'r')
1773
1773
1774 def merge3(self, fn, my, other):
1774 def merge3(self, fn, my, other):
1775 """perform a 3-way merge in the working directory"""
1775 """perform a 3-way merge in the working directory"""
1776
1776
1777 def temp(prefix, node):
1777 def temp(prefix, node):
1778 pre = "%s~%s." % (os.path.basename(fn), prefix)
1778 pre = "%s~%s." % (os.path.basename(fn), prefix)
1779 (fd, name) = tempfile.mkstemp("", pre)
1779 (fd, name) = tempfile.mkstemp("", pre)
1780 f = os.fdopen(fd, "wb")
1780 f = os.fdopen(fd, "wb")
1781 f.write(fl.revision(node))
1781 f.write(fl.revision(node))
1782 f.close()
1782 f.close()
1783 return name
1783 return name
1784
1784
1785 fl = self.file(fn)
1785 fl = self.file(fn)
1786 base = fl.ancestor(my, other)
1786 base = fl.ancestor(my, other)
1787 a = self.wjoin(fn)
1787 a = self.wjoin(fn)
1788 b = temp("base", base)
1788 b = temp("base", base)
1789 c = temp("other", other)
1789 c = temp("other", other)
1790
1790
1791 self.ui.note("resolving %s\n" % fn)
1791 self.ui.note("resolving %s\n" % fn)
1792 self.ui.debug("file %s: other %s ancestor %s\n" %
1792 self.ui.debug("file %s: other %s ancestor %s\n" %
1793 (fn, short(other), short(base)))
1793 (fn, short(other), short(base)))
1794
1794
1795 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1795 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1796 or "hgmerge")
1796 or "hgmerge")
1797 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1797 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1798 if r:
1798 if r:
1799 self.ui.warn("merging %s failed!\n" % fn)
1799 self.ui.warn("merging %s failed!\n" % fn)
1800
1800
1801 os.unlink(b)
1801 os.unlink(b)
1802 os.unlink(c)
1802 os.unlink(c)
1803
1803
1804 def verify(self):
1804 def verify(self):
1805 filelinkrevs = {}
1805 filelinkrevs = {}
1806 filenodes = {}
1806 filenodes = {}
1807 changesets = revisions = files = 0
1807 changesets = revisions = files = 0
1808 errors = 0
1808 errors = 0
1809
1809
1810 seen = {}
1810 seen = {}
1811 self.ui.status("checking changesets\n")
1811 self.ui.status("checking changesets\n")
1812 for i in range(self.changelog.count()):
1812 for i in range(self.changelog.count()):
1813 changesets += 1
1813 changesets += 1
1814 n = self.changelog.node(i)
1814 n = self.changelog.node(i)
1815 if n in seen:
1815 if n in seen:
1816 self.ui.warn("duplicate changeset at revision %d\n" % i)
1816 self.ui.warn("duplicate changeset at revision %d\n" % i)
1817 errors += 1
1817 errors += 1
1818 seen[n] = 1
1818 seen[n] = 1
1819
1819
1820 for p in self.changelog.parents(n):
1820 for p in self.changelog.parents(n):
1821 if p not in self.changelog.nodemap:
1821 if p not in self.changelog.nodemap:
1822 self.ui.warn("changeset %s has unknown parent %s\n" %
1822 self.ui.warn("changeset %s has unknown parent %s\n" %
1823 (short(n), short(p)))
1823 (short(n), short(p)))
1824 errors += 1
1824 errors += 1
1825 try:
1825 try:
1826 changes = self.changelog.read(n)
1826 changes = self.changelog.read(n)
1827 except Exception, inst:
1827 except Exception, inst:
1828 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1828 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1829 errors += 1
1829 errors += 1
1830
1830
1831 for f in changes[3]:
1831 for f in changes[3]:
1832 filelinkrevs.setdefault(f, []).append(i)
1832 filelinkrevs.setdefault(f, []).append(i)
1833
1833
1834 seen = {}
1834 seen = {}
1835 self.ui.status("checking manifests\n")
1835 self.ui.status("checking manifests\n")
1836 for i in range(self.manifest.count()):
1836 for i in range(self.manifest.count()):
1837 n = self.manifest.node(i)
1837 n = self.manifest.node(i)
1838 if n in seen:
1838 if n in seen:
1839 self.ui.warn("duplicate manifest at revision %d\n" % i)
1839 self.ui.warn("duplicate manifest at revision %d\n" % i)
1840 errors += 1
1840 errors += 1
1841 seen[n] = 1
1841 seen[n] = 1
1842
1842
1843 for p in self.manifest.parents(n):
1843 for p in self.manifest.parents(n):
1844 if p not in self.manifest.nodemap:
1844 if p not in self.manifest.nodemap:
1845 self.ui.warn("manifest %s has unknown parent %s\n" %
1845 self.ui.warn("manifest %s has unknown parent %s\n" %
1846 (short(n), short(p)))
1846 (short(n), short(p)))
1847 errors += 1
1847 errors += 1
1848
1848
1849 try:
1849 try:
1850 delta = mdiff.patchtext(self.manifest.delta(n))
1850 delta = mdiff.patchtext(self.manifest.delta(n))
1851 except KeyboardInterrupt:
1851 except KeyboardInterrupt:
1852 self.ui.warn("aborted")
1852 self.ui.warn("aborted")
1853 sys.exit(0)
1853 sys.exit(0)
1854 except Exception, inst:
1854 except Exception, inst:
1855 self.ui.warn("unpacking manifest %s: %s\n"
1855 self.ui.warn("unpacking manifest %s: %s\n"
1856 % (short(n), inst))
1856 % (short(n), inst))
1857 errors += 1
1857 errors += 1
1858
1858
1859 ff = [ l.split('\0') for l in delta.splitlines() ]
1859 ff = [ l.split('\0') for l in delta.splitlines() ]
1860 for f, fn in ff:
1860 for f, fn in ff:
1861 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1861 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1862
1862
1863 self.ui.status("crosschecking files in changesets and manifests\n")
1863 self.ui.status("crosschecking files in changesets and manifests\n")
1864 for f in filenodes:
1864 for f in filenodes:
1865 if f not in filelinkrevs:
1865 if f not in filelinkrevs:
1866 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1866 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1867 errors += 1
1867 errors += 1
1868
1868
1869 for f in filelinkrevs:
1869 for f in filelinkrevs:
1870 if f not in filenodes:
1870 if f not in filenodes:
1871 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1871 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1872 errors += 1
1872 errors += 1
1873
1873
1874 self.ui.status("checking files\n")
1874 self.ui.status("checking files\n")
1875 ff = filenodes.keys()
1875 ff = filenodes.keys()
1876 ff.sort()
1876 ff.sort()
1877 for f in ff:
1877 for f in ff:
1878 if f == "/dev/null": continue
1878 if f == "/dev/null": continue
1879 files += 1
1879 files += 1
1880 fl = self.file(f)
1880 fl = self.file(f)
1881 nodes = { nullid: 1 }
1881 nodes = { nullid: 1 }
1882 seen = {}
1882 seen = {}
1883 for i in range(fl.count()):
1883 for i in range(fl.count()):
1884 revisions += 1
1884 revisions += 1
1885 n = fl.node(i)
1885 n = fl.node(i)
1886
1886
1887 if n in seen:
1887 if n in seen:
1888 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1888 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1889 errors += 1
1889 errors += 1
1890
1890
1891 if n not in filenodes[f]:
1891 if n not in filenodes[f]:
1892 self.ui.warn("%s: %d:%s not in manifests\n"
1892 self.ui.warn("%s: %d:%s not in manifests\n"
1893 % (f, i, short(n)))
1893 % (f, i, short(n)))
1894 errors += 1
1894 errors += 1
1895 else:
1895 else:
1896 del filenodes[f][n]
1896 del filenodes[f][n]
1897
1897
1898 flr = fl.linkrev(n)
1898 flr = fl.linkrev(n)
1899 if flr not in filelinkrevs[f]:
1899 if flr not in filelinkrevs[f]:
1900 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1900 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1901 % (f, short(n), fl.linkrev(n)))
1901 % (f, short(n), fl.linkrev(n)))
1902 errors += 1
1902 errors += 1
1903 else:
1903 else:
1904 filelinkrevs[f].remove(flr)
1904 filelinkrevs[f].remove(flr)
1905
1905
1906 # verify contents
1906 # verify contents
1907 try:
1907 try:
1908 t = fl.read(n)
1908 t = fl.read(n)
1909 except Exception, inst:
1909 except Exception, inst:
1910 self.ui.warn("unpacking file %s %s: %s\n"
1910 self.ui.warn("unpacking file %s %s: %s\n"
1911 % (f, short(n), inst))
1911 % (f, short(n), inst))
1912 errors += 1
1912 errors += 1
1913
1913
1914 # verify parents
1914 # verify parents
1915 (p1, p2) = fl.parents(n)
1915 (p1, p2) = fl.parents(n)
1916 if p1 not in nodes:
1916 if p1 not in nodes:
1917 self.ui.warn("file %s:%s unknown parent 1 %s" %
1917 self.ui.warn("file %s:%s unknown parent 1 %s" %
1918 (f, short(n), short(p1)))
1918 (f, short(n), short(p1)))
1919 errors += 1
1919 errors += 1
1920 if p2 not in nodes:
1920 if p2 not in nodes:
1921 self.ui.warn("file %s:%s unknown parent 2 %s" %
1921 self.ui.warn("file %s:%s unknown parent 2 %s" %
1922 (f, short(n), short(p1)))
1922 (f, short(n), short(p1)))
1923 errors += 1
1923 errors += 1
1924 nodes[n] = 1
1924 nodes[n] = 1
1925
1925
1926 # cross-check
1926 # cross-check
1927 for node in filenodes[f]:
1927 for node in filenodes[f]:
1928 self.ui.warn("node %s in manifests not in %s\n"
1928 self.ui.warn("node %s in manifests not in %s\n"
1929 % (hex(node), f))
1929 % (hex(node), f))
1930 errors += 1
1930 errors += 1
1931
1931
1932 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1932 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1933 (files, changesets, revisions))
1933 (files, changesets, revisions))
1934
1934
1935 if errors:
1935 if errors:
1936 self.ui.warn("%d integrity errors encountered!\n" % errors)
1936 self.ui.warn("%d integrity errors encountered!\n" % errors)
1937 return 1
1937 return 1
1938
1938
1939 class httprepository:
1939 class httprepository:
1940 def __init__(self, ui, path):
1940 def __init__(self, ui, path):
1941 # fix missing / after hostname
1941 # fix missing / after hostname
1942 s = urlparse.urlsplit(path)
1942 s = urlparse.urlsplit(path)
1943 partial = s[2]
1943 partial = s[2]
1944 if not partial: partial = "/"
1944 if not partial: partial = "/"
1945 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1945 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1946 self.ui = ui
1946 self.ui = ui
1947 no_list = [ "localhost", "127.0.0.1" ]
1947 no_list = [ "localhost", "127.0.0.1" ]
1948 host = ui.config("http_proxy", "host")
1948 host = ui.config("http_proxy", "host")
1949 if host is None:
1949 if host is None:
1950 host = os.environ.get("http_proxy")
1950 host = os.environ.get("http_proxy")
1951 if host and host.startswith('http://'):
1951 if host and host.startswith('http://'):
1952 host = host[7:]
1952 host = host[7:]
1953 user = ui.config("http_proxy", "user")
1953 user = ui.config("http_proxy", "user")
1954 passwd = ui.config("http_proxy", "passwd")
1954 passwd = ui.config("http_proxy", "passwd")
1955 no = ui.config("http_proxy", "no")
1955 no = ui.config("http_proxy", "no")
1956 if no is None:
1956 if no is None:
1957 no = os.environ.get("no_proxy")
1957 no = os.environ.get("no_proxy")
1958 if no:
1958 if no:
1959 no_list = no_list + no.split(",")
1959 no_list = no_list + no.split(",")
1960
1960
1961 no_proxy = 0
1961 no_proxy = 0
1962 for h in no_list:
1962 for h in no_list:
1963 if (path.startswith("http://" + h + "/") or
1963 if (path.startswith("http://" + h + "/") or
1964 path.startswith("http://" + h + ":") or
1964 path.startswith("http://" + h + ":") or
1965 path == "http://" + h):
1965 path == "http://" + h):
1966 no_proxy = 1
1966 no_proxy = 1
1967
1967
1968 # Note: urllib2 takes proxy values from the environment and those will
1968 # Note: urllib2 takes proxy values from the environment and those will
1969 # take precedence
1969 # take precedence
1970 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1970 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1971 try:
1971 try:
1972 if os.environ.has_key(env):
1972 if os.environ.has_key(env):
1973 del os.environ[env]
1973 del os.environ[env]
1974 except OSError:
1974 except OSError:
1975 pass
1975 pass
1976
1976
1977 proxy_handler = urllib2.BaseHandler()
1977 proxy_handler = urllib2.BaseHandler()
1978 if host and not no_proxy:
1978 if host and not no_proxy:
1979 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1979 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1980
1980
1981 authinfo = None
1981 authinfo = None
1982 if user and passwd:
1982 if user and passwd:
1983 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1983 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1984 passmgr.add_password(None, host, user, passwd)
1984 passmgr.add_password(None, host, user, passwd)
1985 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1985 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1986
1986
1987 opener = urllib2.build_opener(proxy_handler, authinfo)
1987 opener = urllib2.build_opener(proxy_handler, authinfo)
1988 urllib2.install_opener(opener)
1988 urllib2.install_opener(opener)
1989
1989
1990 def dev(self):
1990 def dev(self):
1991 return -1
1991 return -1
1992
1992
1993 def do_cmd(self, cmd, **args):
1993 def do_cmd(self, cmd, **args):
1994 self.ui.debug("sending %s command\n" % cmd)
1994 self.ui.debug("sending %s command\n" % cmd)
1995 q = {"cmd": cmd}
1995 q = {"cmd": cmd}
1996 q.update(args)
1996 q.update(args)
1997 qs = urllib.urlencode(q)
1997 qs = urllib.urlencode(q)
1998 cu = "%s?%s" % (self.url, qs)
1998 cu = "%s?%s" % (self.url, qs)
1999 resp = urllib2.urlopen(cu)
1999 resp = urllib2.urlopen(cu)
2000 proto = resp.headers['content-type']
2000 proto = resp.headers['content-type']
2001
2001
2002 # accept old "text/plain" and "application/hg-changegroup" for now
2002 # accept old "text/plain" and "application/hg-changegroup" for now
2003 if not proto.startswith('application/mercurial') and \
2003 if not proto.startswith('application/mercurial') and \
2004 not proto.startswith('text/plain') and \
2004 not proto.startswith('text/plain') and \
2005 not proto.startswith('application/hg-changegroup'):
2005 not proto.startswith('application/hg-changegroup'):
2006 raise RepoError("'%s' does not appear to be an hg repository"
2006 raise RepoError("'%s' does not appear to be an hg repository"
2007 % self.url)
2007 % self.url)
2008
2008
2009 if proto.startswith('application/mercurial'):
2009 if proto.startswith('application/mercurial'):
2010 version = proto[22:]
2010 version = proto[22:]
2011 if float(version) > 0.1:
2011 if float(version) > 0.1:
2012 raise RepoError("'%s' uses newer protocol %s" %
2012 raise RepoError("'%s' uses newer protocol %s" %
2013 (self.url, version))
2013 (self.url, version))
2014
2014
2015 return resp
2015 return resp
2016
2016
2017 def heads(self):
2017 def heads(self):
2018 d = self.do_cmd("heads").read()
2018 d = self.do_cmd("heads").read()
2019 try:
2019 try:
2020 return map(bin, d[:-1].split(" "))
2020 return map(bin, d[:-1].split(" "))
2021 except:
2021 except:
2022 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2022 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2023 raise
2023 raise
2024
2024
2025 def branches(self, nodes):
2025 def branches(self, nodes):
2026 n = " ".join(map(hex, nodes))
2026 n = " ".join(map(hex, nodes))
2027 d = self.do_cmd("branches", nodes=n).read()
2027 d = self.do_cmd("branches", nodes=n).read()
2028 try:
2028 try:
2029 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2029 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2030 return br
2030 return br
2031 except:
2031 except:
2032 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2032 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2033 raise
2033 raise
2034
2034
2035 def between(self, pairs):
2035 def between(self, pairs):
2036 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2036 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2037 d = self.do_cmd("between", pairs=n).read()
2037 d = self.do_cmd("between", pairs=n).read()
2038 try:
2038 try:
2039 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2039 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2040 return p
2040 return p
2041 except:
2041 except:
2042 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2042 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
2043 raise
2043 raise
2044
2044
2045 def changegroup(self, nodes):
2045 def changegroup(self, nodes):
2046 n = " ".join(map(hex, nodes))
2046 n = " ".join(map(hex, nodes))
2047 f = self.do_cmd("changegroup", roots=n)
2047 f = self.do_cmd("changegroup", roots=n)
2048 bytes = 0
2048 bytes = 0
2049
2049
2050 class zread:
2050 class zread:
2051 def __init__(self, f):
2051 def __init__(self, f):
2052 self.zd = zlib.decompressobj()
2052 self.zd = zlib.decompressobj()
2053 self.f = f
2053 self.f = f
2054 self.buf = ""
2054 self.buf = ""
2055 def read(self, l):
2055 def read(self, l):
2056 while l > len(self.buf):
2056 while l > len(self.buf):
2057 r = self.f.read(4096)
2057 r = self.f.read(4096)
2058 if r:
2058 if r:
2059 self.buf += self.zd.decompress(r)
2059 self.buf += self.zd.decompress(r)
2060 else:
2060 else:
2061 self.buf += self.zd.flush()
2061 self.buf += self.zd.flush()
2062 break
2062 break
2063 d, self.buf = self.buf[:l], self.buf[l:]
2063 d, self.buf = self.buf[:l], self.buf[l:]
2064 return d
2064 return d
2065
2065
2066 return zread(f)
2066 return zread(f)
2067
2067
2068 class remotelock:
2068 class remotelock:
2069 def __init__(self, repo):
2069 def __init__(self, repo):
2070 self.repo = repo
2070 self.repo = repo
2071 def release(self):
2071 def release(self):
2072 self.repo.unlock()
2072 self.repo.unlock()
2073 self.repo = None
2073 self.repo = None
2074 def __del__(self):
2074 def __del__(self):
2075 if self.repo:
2075 if self.repo:
2076 self.release()
2076 self.release()
2077
2077
2078 class sshrepository:
2078 class sshrepository:
2079 def __init__(self, ui, path):
2079 def __init__(self, ui, path):
2080 self.url = path
2080 self.url = path
2081 self.ui = ui
2081 self.ui = ui
2082
2082
2083 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2083 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))', path)
2084 if not m:
2084 if not m:
2085 raise RepoError("couldn't parse destination %s" % path)
2085 raise RepoError("couldn't parse destination %s" % path)
2086
2086
2087 self.user = m.group(2)
2087 self.user = m.group(2)
2088 self.host = m.group(3)
2088 self.host = m.group(3)
2089 self.port = m.group(5)
2089 self.port = m.group(5)
2090 self.path = m.group(7)
2090 self.path = m.group(7)
2091
2091
2092 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2092 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
2093 args = self.port and ("%s -p %s") % (args, self.port) or args
2093 args = self.port and ("%s -p %s") % (args, self.port) or args
2094 path = self.path or ""
2094 path = self.path or ""
2095
2095
2096 if not path:
2096 if not path:
2097 raise RepoError("no remote repository path specified")
2097 raise RepoError("no remote repository path specified")
2098
2098
2099 cmd = "ssh %s 'hg -R %s serve --stdio'"
2099 cmd = "ssh %s 'hg -R %s serve --stdio'"
2100 cmd = cmd % (args, path)
2100 cmd = cmd % (args, path)
2101
2101
2102 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2102 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
2103
2103
2104 def readerr(self):
2104 def readerr(self):
2105 while 1:
2105 while 1:
2106 r,w,x = select.select([self.pipee], [], [], 0)
2106 r,w,x = select.select([self.pipee], [], [], 0)
2107 if not r: break
2107 if not r: break
2108 l = self.pipee.readline()
2108 l = self.pipee.readline()
2109 if not l: break
2109 if not l: break
2110 self.ui.status("remote: ", l)
2110 self.ui.status("remote: ", l)
2111
2111
2112 def __del__(self):
2112 def __del__(self):
2113 try:
2113 try:
2114 self.pipeo.close()
2114 self.pipeo.close()
2115 self.pipei.close()
2115 self.pipei.close()
2116 for l in self.pipee:
2116 for l in self.pipee:
2117 self.ui.status("remote: ", l)
2117 self.ui.status("remote: ", l)
2118 self.pipee.close()
2118 self.pipee.close()
2119 except:
2119 except:
2120 pass
2120 pass
2121
2121
2122 def dev(self):
2122 def dev(self):
2123 return -1
2123 return -1
2124
2124
2125 def do_cmd(self, cmd, **args):
2125 def do_cmd(self, cmd, **args):
2126 self.ui.debug("sending %s command\n" % cmd)
2126 self.ui.debug("sending %s command\n" % cmd)
2127 self.pipeo.write("%s\n" % cmd)
2127 self.pipeo.write("%s\n" % cmd)
2128 for k, v in args.items():
2128 for k, v in args.items():
2129 self.pipeo.write("%s %d\n" % (k, len(v)))
2129 self.pipeo.write("%s %d\n" % (k, len(v)))
2130 self.pipeo.write(v)
2130 self.pipeo.write(v)
2131 self.pipeo.flush()
2131 self.pipeo.flush()
2132
2132
2133 return self.pipei
2133 return self.pipei
2134
2134
2135 def call(self, cmd, **args):
2135 def call(self, cmd, **args):
2136 r = self.do_cmd(cmd, **args)
2136 r = self.do_cmd(cmd, **args)
2137 l = r.readline()
2137 l = r.readline()
2138 self.readerr()
2138 self.readerr()
2139 try:
2139 try:
2140 l = int(l)
2140 l = int(l)
2141 except:
2141 except:
2142 raise RepoError("unexpected response '%s'" % l)
2142 raise RepoError("unexpected response '%s'" % l)
2143 return r.read(l)
2143 return r.read(l)
2144
2144
2145 def lock(self):
2145 def lock(self):
2146 self.call("lock")
2146 self.call("lock")
2147 return remotelock(self)
2147 return remotelock(self)
2148
2148
2149 def unlock(self):
2149 def unlock(self):
2150 self.call("unlock")
2150 self.call("unlock")
2151
2151
2152 def heads(self):
2152 def heads(self):
2153 d = self.call("heads")
2153 d = self.call("heads")
2154 try:
2154 try:
2155 return map(bin, d[:-1].split(" "))
2155 return map(bin, d[:-1].split(" "))
2156 except:
2156 except:
2157 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2157 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2158
2158
2159 def branches(self, nodes):
2159 def branches(self, nodes):
2160 n = " ".join(map(hex, nodes))
2160 n = " ".join(map(hex, nodes))
2161 d = self.call("branches", nodes=n)
2161 d = self.call("branches", nodes=n)
2162 try:
2162 try:
2163 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2163 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
2164 return br
2164 return br
2165 except:
2165 except:
2166 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2166 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2167
2167
2168 def between(self, pairs):
2168 def between(self, pairs):
2169 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2169 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
2170 d = self.call("between", pairs=n)
2170 d = self.call("between", pairs=n)
2171 try:
2171 try:
2172 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2172 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
2173 return p
2173 return p
2174 except:
2174 except:
2175 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2175 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
2176
2176
2177 def changegroup(self, nodes):
2177 def changegroup(self, nodes):
2178 n = " ".join(map(hex, nodes))
2178 n = " ".join(map(hex, nodes))
2179 f = self.do_cmd("changegroup", roots=n)
2179 f = self.do_cmd("changegroup", roots=n)
2180 return self.pipei
2180 return self.pipei
2181
2181
2182 def addchangegroup(self, cg):
2182 def addchangegroup(self, cg):
2183 d = self.call("addchangegroup")
2183 d = self.call("addchangegroup")
2184 if d:
2184 if d:
2185 raise RepoError("push refused: %s", d)
2185 raise RepoError("push refused: %s", d)
2186
2186
2187 while 1:
2187 while 1:
2188 d = cg.read(4096)
2188 d = cg.read(4096)
2189 if not d: break
2189 if not d: break
2190 self.pipeo.write(d)
2190 self.pipeo.write(d)
2191 self.readerr()
2191 self.readerr()
2192
2192
2193 self.pipeo.flush()
2193 self.pipeo.flush()
2194
2194
2195 self.readerr()
2195 self.readerr()
2196 l = int(self.pipei.readline())
2196 l = int(self.pipei.readline())
2197 return self.pipei.read(l) != ""
2197 return self.pipei.read(l) != ""
2198
2198
2199 def repository(ui, path=None, create=0):
2199 def repository(ui, path=None, create=0):
2200 if path:
2200 if path:
2201 if path.startswith("http://"):
2201 if path.startswith("http://"):
2202 return httprepository(ui, path)
2202 return httprepository(ui, path)
2203 if path.startswith("hg://"):
2203 if path.startswith("hg://"):
2204 return httprepository(ui, path.replace("hg://", "http://"))
2204 return httprepository(ui, path.replace("hg://", "http://"))
2205 if path.startswith("old-http://"):
2205 if path.startswith("old-http://"):
2206 return localrepository(ui, path.replace("old-http://", "http://"))
2206 return localrepository(ui, path.replace("old-http://", "http://"))
2207 if path.startswith("ssh://"):
2207 if path.startswith("ssh://"):
2208 return sshrepository(ui, path)
2208 return sshrepository(ui, path)
2209
2209
2210 return localrepository(ui, path, create)
2210 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now