##// END OF EJS Templates
Created a class in util called chunkbuffer that buffers reads from an...
Eric Hopper -
r1199:78ceaf83 default
parent child Browse files
Show More
@@ -1,1404 +1,1390 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.manifest = manifest.manifest(self.opener)
32 self.manifest = manifest.manifest(self.opener)
33 self.changelog = changelog.changelog(self.opener)
33 self.changelog = changelog.changelog(self.opener)
34 self.tagscache = None
34 self.tagscache = None
35 self.nodetagscache = None
35 self.nodetagscache = None
36
36
37 if create:
37 if create:
38 os.mkdir(self.path)
38 os.mkdir(self.path)
39 os.mkdir(self.join("data"))
39 os.mkdir(self.join("data"))
40 f = self.opener("hgrc", "w")
40 f = self.opener("hgrc", "w")
41 f.write("[web]\n")
41 f.write("[web]\n")
42 f.write("contact = %s\n" % ui.shortuser(ui.username()))
42 f.write("contact = %s\n" % ui.shortuser(ui.username()))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.opener("hgrc"))
46 self.ui.readconfig(self.opener("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 s = self.ui.config("hooks", name)
50 s = self.ui.config("hooks", name)
51 if s:
51 if s:
52 self.ui.note("running hook %s: %s\n" % (name, s))
52 self.ui.note("running hook %s: %s\n" % (name, s))
53 old = {}
53 old = {}
54 for k, v in args.items():
54 for k, v in args.items():
55 k = k.upper()
55 k = k.upper()
56 old[k] = os.environ.get(k, None)
56 old[k] = os.environ.get(k, None)
57 os.environ[k] = v
57 os.environ[k] = v
58
58
59 r = os.system(s)
59 r = os.system(s)
60
60
61 for k, v in old.items():
61 for k, v in old.items():
62 if v != None:
62 if v != None:
63 os.environ[k] = v
63 os.environ[k] = v
64 else:
64 else:
65 del os.environ[k]
65 del os.environ[k]
66
66
67 if r:
67 if r:
68 self.ui.warn("abort: %s hook failed with status %d!\n" %
68 self.ui.warn("abort: %s hook failed with status %d!\n" %
69 (name, r))
69 (name, r))
70 return False
70 return False
71 return True
71 return True
72
72
73 def tags(self):
73 def tags(self):
74 '''return a mapping of tag to node'''
74 '''return a mapping of tag to node'''
75 if not self.tagscache:
75 if not self.tagscache:
76 self.tagscache = {}
76 self.tagscache = {}
77 def addtag(self, k, n):
77 def addtag(self, k, n):
78 try:
78 try:
79 bin_n = bin(n)
79 bin_n = bin(n)
80 except TypeError:
80 except TypeError:
81 bin_n = ''
81 bin_n = ''
82 self.tagscache[k.strip()] = bin_n
82 self.tagscache[k.strip()] = bin_n
83
83
84 try:
84 try:
85 # read each head of the tags file, ending with the tip
85 # read each head of the tags file, ending with the tip
86 # and add each tag found to the map, with "newer" ones
86 # and add each tag found to the map, with "newer" ones
87 # taking precedence
87 # taking precedence
88 fl = self.file(".hgtags")
88 fl = self.file(".hgtags")
89 h = fl.heads()
89 h = fl.heads()
90 h.reverse()
90 h.reverse()
91 for r in h:
91 for r in h:
92 for l in fl.read(r).splitlines():
92 for l in fl.read(r).splitlines():
93 if l:
93 if l:
94 n, k = l.split(" ", 1)
94 n, k = l.split(" ", 1)
95 addtag(self, k, n)
95 addtag(self, k, n)
96 except KeyError:
96 except KeyError:
97 pass
97 pass
98
98
99 try:
99 try:
100 f = self.opener("localtags")
100 f = self.opener("localtags")
101 for l in f:
101 for l in f:
102 n, k = l.split(" ", 1)
102 n, k = l.split(" ", 1)
103 addtag(self, k, n)
103 addtag(self, k, n)
104 except IOError:
104 except IOError:
105 pass
105 pass
106
106
107 self.tagscache['tip'] = self.changelog.tip()
107 self.tagscache['tip'] = self.changelog.tip()
108
108
109 return self.tagscache
109 return self.tagscache
110
110
111 def tagslist(self):
111 def tagslist(self):
112 '''return a list of tags ordered by revision'''
112 '''return a list of tags ordered by revision'''
113 l = []
113 l = []
114 for t, n in self.tags().items():
114 for t, n in self.tags().items():
115 try:
115 try:
116 r = self.changelog.rev(n)
116 r = self.changelog.rev(n)
117 except:
117 except:
118 r = -2 # sort to the beginning of the list if unknown
118 r = -2 # sort to the beginning of the list if unknown
119 l.append((r,t,n))
119 l.append((r,t,n))
120 l.sort()
120 l.sort()
121 return [(t,n) for r,t,n in l]
121 return [(t,n) for r,t,n in l]
122
122
123 def nodetags(self, node):
123 def nodetags(self, node):
124 '''return the tags associated with a node'''
124 '''return the tags associated with a node'''
125 if not self.nodetagscache:
125 if not self.nodetagscache:
126 self.nodetagscache = {}
126 self.nodetagscache = {}
127 for t,n in self.tags().items():
127 for t,n in self.tags().items():
128 self.nodetagscache.setdefault(n,[]).append(t)
128 self.nodetagscache.setdefault(n,[]).append(t)
129 return self.nodetagscache.get(node, [])
129 return self.nodetagscache.get(node, [])
130
130
131 def lookup(self, key):
131 def lookup(self, key):
132 try:
132 try:
133 return self.tags()[key]
133 return self.tags()[key]
134 except KeyError:
134 except KeyError:
135 try:
135 try:
136 return self.changelog.lookup(key)
136 return self.changelog.lookup(key)
137 except:
137 except:
138 raise repo.RepoError("unknown revision '%s'" % key)
138 raise repo.RepoError("unknown revision '%s'" % key)
139
139
140 def dev(self):
140 def dev(self):
141 return os.stat(self.path).st_dev
141 return os.stat(self.path).st_dev
142
142
143 def local(self):
143 def local(self):
144 return True
144 return True
145
145
146 def join(self, f):
146 def join(self, f):
147 return os.path.join(self.path, f)
147 return os.path.join(self.path, f)
148
148
149 def wjoin(self, f):
149 def wjoin(self, f):
150 return os.path.join(self.root, f)
150 return os.path.join(self.root, f)
151
151
152 def file(self, f):
152 def file(self, f):
153 if f[0] == '/': f = f[1:]
153 if f[0] == '/': f = f[1:]
154 return filelog.filelog(self.opener, f)
154 return filelog.filelog(self.opener, f)
155
155
156 def getcwd(self):
156 def getcwd(self):
157 return self.dirstate.getcwd()
157 return self.dirstate.getcwd()
158
158
159 def wfile(self, f, mode='r'):
159 def wfile(self, f, mode='r'):
160 return self.wopener(f, mode)
160 return self.wopener(f, mode)
161
161
162 def wread(self, filename):
162 def wread(self, filename):
163 return self.wopener(filename, 'r').read()
163 return self.wopener(filename, 'r').read()
164
164
165 def wwrite(self, filename, data, fd=None):
165 def wwrite(self, filename, data, fd=None):
166 if fd:
166 if fd:
167 return fd.write(data)
167 return fd.write(data)
168 return self.wopener(filename, 'w').write(data)
168 return self.wopener(filename, 'w').write(data)
169
169
170 def transaction(self):
170 def transaction(self):
171 # save dirstate for undo
171 # save dirstate for undo
172 try:
172 try:
173 ds = self.opener("dirstate").read()
173 ds = self.opener("dirstate").read()
174 except IOError:
174 except IOError:
175 ds = ""
175 ds = ""
176 self.opener("journal.dirstate", "w").write(ds)
176 self.opener("journal.dirstate", "w").write(ds)
177
177
178 def after():
178 def after():
179 util.rename(self.join("journal"), self.join("undo"))
179 util.rename(self.join("journal"), self.join("undo"))
180 util.rename(self.join("journal.dirstate"),
180 util.rename(self.join("journal.dirstate"),
181 self.join("undo.dirstate"))
181 self.join("undo.dirstate"))
182
182
183 return transaction.transaction(self.ui.warn, self.opener,
183 return transaction.transaction(self.ui.warn, self.opener,
184 self.join("journal"), after)
184 self.join("journal"), after)
185
185
186 def recover(self):
186 def recover(self):
187 lock = self.lock()
187 lock = self.lock()
188 if os.path.exists(self.join("journal")):
188 if os.path.exists(self.join("journal")):
189 self.ui.status("rolling back interrupted transaction\n")
189 self.ui.status("rolling back interrupted transaction\n")
190 return transaction.rollback(self.opener, self.join("journal"))
190 return transaction.rollback(self.opener, self.join("journal"))
191 else:
191 else:
192 self.ui.warn("no interrupted transaction available\n")
192 self.ui.warn("no interrupted transaction available\n")
193
193
194 def undo(self):
194 def undo(self):
195 lock = self.lock()
195 lock = self.lock()
196 if os.path.exists(self.join("undo")):
196 if os.path.exists(self.join("undo")):
197 self.ui.status("rolling back last transaction\n")
197 self.ui.status("rolling back last transaction\n")
198 transaction.rollback(self.opener, self.join("undo"))
198 transaction.rollback(self.opener, self.join("undo"))
199 self.dirstate = None
199 self.dirstate = None
200 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
200 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
201 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
201 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
202 else:
202 else:
203 self.ui.warn("no undo information available\n")
203 self.ui.warn("no undo information available\n")
204
204
205 def lock(self, wait=1):
205 def lock(self, wait=1):
206 try:
206 try:
207 return lock.lock(self.join("lock"), 0)
207 return lock.lock(self.join("lock"), 0)
208 except lock.LockHeld, inst:
208 except lock.LockHeld, inst:
209 if wait:
209 if wait:
210 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
210 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
211 return lock.lock(self.join("lock"), wait)
211 return lock.lock(self.join("lock"), wait)
212 raise inst
212 raise inst
213
213
214 def rawcommit(self, files, text, user, date, p1=None, p2=None):
214 def rawcommit(self, files, text, user, date, p1=None, p2=None):
215 orig_parent = self.dirstate.parents()[0] or nullid
215 orig_parent = self.dirstate.parents()[0] or nullid
216 p1 = p1 or self.dirstate.parents()[0] or nullid
216 p1 = p1 or self.dirstate.parents()[0] or nullid
217 p2 = p2 or self.dirstate.parents()[1] or nullid
217 p2 = p2 or self.dirstate.parents()[1] or nullid
218 c1 = self.changelog.read(p1)
218 c1 = self.changelog.read(p1)
219 c2 = self.changelog.read(p2)
219 c2 = self.changelog.read(p2)
220 m1 = self.manifest.read(c1[0])
220 m1 = self.manifest.read(c1[0])
221 mf1 = self.manifest.readflags(c1[0])
221 mf1 = self.manifest.readflags(c1[0])
222 m2 = self.manifest.read(c2[0])
222 m2 = self.manifest.read(c2[0])
223 changed = []
223 changed = []
224
224
225 if orig_parent == p1:
225 if orig_parent == p1:
226 update_dirstate = 1
226 update_dirstate = 1
227 else:
227 else:
228 update_dirstate = 0
228 update_dirstate = 0
229
229
230 tr = self.transaction()
230 tr = self.transaction()
231 mm = m1.copy()
231 mm = m1.copy()
232 mfm = mf1.copy()
232 mfm = mf1.copy()
233 linkrev = self.changelog.count()
233 linkrev = self.changelog.count()
234 for f in files:
234 for f in files:
235 try:
235 try:
236 t = self.wread(f)
236 t = self.wread(f)
237 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
237 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
238 r = self.file(f)
238 r = self.file(f)
239 mfm[f] = tm
239 mfm[f] = tm
240
240
241 fp1 = m1.get(f, nullid)
241 fp1 = m1.get(f, nullid)
242 fp2 = m2.get(f, nullid)
242 fp2 = m2.get(f, nullid)
243
243
244 # is the same revision on two branches of a merge?
244 # is the same revision on two branches of a merge?
245 if fp2 == fp1:
245 if fp2 == fp1:
246 fp2 = nullid
246 fp2 = nullid
247
247
248 if fp2 != nullid:
248 if fp2 != nullid:
249 # is one parent an ancestor of the other?
249 # is one parent an ancestor of the other?
250 fpa = r.ancestor(fp1, fp2)
250 fpa = r.ancestor(fp1, fp2)
251 if fpa == fp1:
251 if fpa == fp1:
252 fp1, fp2 = fp2, nullid
252 fp1, fp2 = fp2, nullid
253 elif fpa == fp2:
253 elif fpa == fp2:
254 fp2 = nullid
254 fp2 = nullid
255
255
256 # is the file unmodified from the parent?
256 # is the file unmodified from the parent?
257 if t == r.read(fp1):
257 if t == r.read(fp1):
258 # record the proper existing parent in manifest
258 # record the proper existing parent in manifest
259 # no need to add a revision
259 # no need to add a revision
260 mm[f] = fp1
260 mm[f] = fp1
261 continue
261 continue
262
262
263 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
263 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
264 changed.append(f)
264 changed.append(f)
265 if update_dirstate:
265 if update_dirstate:
266 self.dirstate.update([f], "n")
266 self.dirstate.update([f], "n")
267 except IOError:
267 except IOError:
268 try:
268 try:
269 del mm[f]
269 del mm[f]
270 del mfm[f]
270 del mfm[f]
271 if update_dirstate:
271 if update_dirstate:
272 self.dirstate.forget([f])
272 self.dirstate.forget([f])
273 except:
273 except:
274 # deleted from p2?
274 # deleted from p2?
275 pass
275 pass
276
276
277 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
277 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
278 user = user or self.ui.username()
278 user = user or self.ui.username()
279 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
279 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
280 tr.close()
280 tr.close()
281 if update_dirstate:
281 if update_dirstate:
282 self.dirstate.setparents(n, nullid)
282 self.dirstate.setparents(n, nullid)
283
283
284 def commit(self, files = None, text = "", user = None, date = None,
284 def commit(self, files = None, text = "", user = None, date = None,
285 match = util.always, force=False):
285 match = util.always, force=False):
286 commit = []
286 commit = []
287 remove = []
287 remove = []
288 changed = []
288 changed = []
289
289
290 if files:
290 if files:
291 for f in files:
291 for f in files:
292 s = self.dirstate.state(f)
292 s = self.dirstate.state(f)
293 if s in 'nmai':
293 if s in 'nmai':
294 commit.append(f)
294 commit.append(f)
295 elif s == 'r':
295 elif s == 'r':
296 remove.append(f)
296 remove.append(f)
297 else:
297 else:
298 self.ui.warn("%s not tracked!\n" % f)
298 self.ui.warn("%s not tracked!\n" % f)
299 else:
299 else:
300 (c, a, d, u) = self.changes(match=match)
300 (c, a, d, u) = self.changes(match=match)
301 commit = c + a
301 commit = c + a
302 remove = d
302 remove = d
303
303
304 p1, p2 = self.dirstate.parents()
304 p1, p2 = self.dirstate.parents()
305 c1 = self.changelog.read(p1)
305 c1 = self.changelog.read(p1)
306 c2 = self.changelog.read(p2)
306 c2 = self.changelog.read(p2)
307 m1 = self.manifest.read(c1[0])
307 m1 = self.manifest.read(c1[0])
308 mf1 = self.manifest.readflags(c1[0])
308 mf1 = self.manifest.readflags(c1[0])
309 m2 = self.manifest.read(c2[0])
309 m2 = self.manifest.read(c2[0])
310
310
311 if not commit and not remove and not force and p2 == nullid:
311 if not commit and not remove and not force and p2 == nullid:
312 self.ui.status("nothing changed\n")
312 self.ui.status("nothing changed\n")
313 return None
313 return None
314
314
315 if not self.hook("precommit"):
315 if not self.hook("precommit"):
316 return None
316 return None
317
317
318 lock = self.lock()
318 lock = self.lock()
319 tr = self.transaction()
319 tr = self.transaction()
320
320
321 # check in files
321 # check in files
322 new = {}
322 new = {}
323 linkrev = self.changelog.count()
323 linkrev = self.changelog.count()
324 commit.sort()
324 commit.sort()
325 for f in commit:
325 for f in commit:
326 self.ui.note(f + "\n")
326 self.ui.note(f + "\n")
327 try:
327 try:
328 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
328 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
329 t = self.wread(f)
329 t = self.wread(f)
330 except IOError:
330 except IOError:
331 self.ui.warn("trouble committing %s!\n" % f)
331 self.ui.warn("trouble committing %s!\n" % f)
332 raise
332 raise
333
333
334 r = self.file(f)
334 r = self.file(f)
335
335
336 meta = {}
336 meta = {}
337 cp = self.dirstate.copied(f)
337 cp = self.dirstate.copied(f)
338 if cp:
338 if cp:
339 meta["copy"] = cp
339 meta["copy"] = cp
340 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
340 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
341 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
341 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
342 fp1, fp2 = nullid, nullid
342 fp1, fp2 = nullid, nullid
343 else:
343 else:
344 fp1 = m1.get(f, nullid)
344 fp1 = m1.get(f, nullid)
345 fp2 = m2.get(f, nullid)
345 fp2 = m2.get(f, nullid)
346
346
347 # is the same revision on two branches of a merge?
347 # is the same revision on two branches of a merge?
348 if fp2 == fp1:
348 if fp2 == fp1:
349 fp2 = nullid
349 fp2 = nullid
350
350
351 if fp2 != nullid:
351 if fp2 != nullid:
352 # is one parent an ancestor of the other?
352 # is one parent an ancestor of the other?
353 fpa = r.ancestor(fp1, fp2)
353 fpa = r.ancestor(fp1, fp2)
354 if fpa == fp1:
354 if fpa == fp1:
355 fp1, fp2 = fp2, nullid
355 fp1, fp2 = fp2, nullid
356 elif fpa == fp2:
356 elif fpa == fp2:
357 fp2 = nullid
357 fp2 = nullid
358
358
359 # is the file unmodified from the parent?
359 # is the file unmodified from the parent?
360 if not meta and t == r.read(fp1):
360 if not meta and t == r.read(fp1):
361 # record the proper existing parent in manifest
361 # record the proper existing parent in manifest
362 # no need to add a revision
362 # no need to add a revision
363 new[f] = fp1
363 new[f] = fp1
364 continue
364 continue
365
365
366 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
366 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
367 # remember what we've added so that we can later calculate
367 # remember what we've added so that we can later calculate
368 # the files to pull from a set of changesets
368 # the files to pull from a set of changesets
369 changed.append(f)
369 changed.append(f)
370
370
371 # update manifest
371 # update manifest
372 m1.update(new)
372 m1.update(new)
373 for f in remove:
373 for f in remove:
374 if f in m1:
374 if f in m1:
375 del m1[f]
375 del m1[f]
376 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
376 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
377 (new, remove))
377 (new, remove))
378
378
379 # add changeset
379 # add changeset
380 new = new.keys()
380 new = new.keys()
381 new.sort()
381 new.sort()
382
382
383 if not text:
383 if not text:
384 edittext = ""
384 edittext = ""
385 if p2 != nullid:
385 if p2 != nullid:
386 edittext += "HG: branch merge\n"
386 edittext += "HG: branch merge\n"
387 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
387 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
388 edittext += "".join(["HG: changed %s\n" % f for f in changed])
388 edittext += "".join(["HG: changed %s\n" % f for f in changed])
389 edittext += "".join(["HG: removed %s\n" % f for f in remove])
389 edittext += "".join(["HG: removed %s\n" % f for f in remove])
390 if not changed and not remove:
390 if not changed and not remove:
391 edittext += "HG: no files changed\n"
391 edittext += "HG: no files changed\n"
392 edittext = self.ui.edit(edittext)
392 edittext = self.ui.edit(edittext)
393 if not edittext.rstrip():
393 if not edittext.rstrip():
394 return None
394 return None
395 text = edittext
395 text = edittext
396
396
397 user = user or self.ui.username()
397 user = user or self.ui.username()
398 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
398 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
399 tr.close()
399 tr.close()
400
400
401 self.dirstate.setparents(n)
401 self.dirstate.setparents(n)
402 self.dirstate.update(new, "n")
402 self.dirstate.update(new, "n")
403 self.dirstate.forget(remove)
403 self.dirstate.forget(remove)
404
404
405 if not self.hook("commit", node=hex(n)):
405 if not self.hook("commit", node=hex(n)):
406 return None
406 return None
407 return n
407 return n
408
408
409 def walk(self, node=None, files=[], match=util.always):
409 def walk(self, node=None, files=[], match=util.always):
410 if node:
410 if node:
411 for fn in self.manifest.read(self.changelog.read(node)[0]):
411 for fn in self.manifest.read(self.changelog.read(node)[0]):
412 if match(fn): yield 'm', fn
412 if match(fn): yield 'm', fn
413 else:
413 else:
414 for src, fn in self.dirstate.walk(files, match):
414 for src, fn in self.dirstate.walk(files, match):
415 yield src, fn
415 yield src, fn
416
416
417 def changes(self, node1 = None, node2 = None, files = [],
417 def changes(self, node1 = None, node2 = None, files = [],
418 match = util.always):
418 match = util.always):
419 mf2, u = None, []
419 mf2, u = None, []
420
420
421 def fcmp(fn, mf):
421 def fcmp(fn, mf):
422 t1 = self.wread(fn)
422 t1 = self.wread(fn)
423 t2 = self.file(fn).read(mf.get(fn, nullid))
423 t2 = self.file(fn).read(mf.get(fn, nullid))
424 return cmp(t1, t2)
424 return cmp(t1, t2)
425
425
426 def mfmatches(node):
426 def mfmatches(node):
427 mf = dict(self.manifest.read(node))
427 mf = dict(self.manifest.read(node))
428 for fn in mf.keys():
428 for fn in mf.keys():
429 if not match(fn):
429 if not match(fn):
430 del mf[fn]
430 del mf[fn]
431 return mf
431 return mf
432
432
433 # are we comparing the working directory?
433 # are we comparing the working directory?
434 if not node2:
434 if not node2:
435 l, c, a, d, u = self.dirstate.changes(files, match)
435 l, c, a, d, u = self.dirstate.changes(files, match)
436
436
437 # are we comparing working dir against its parent?
437 # are we comparing working dir against its parent?
438 if not node1:
438 if not node1:
439 if l:
439 if l:
440 # do a full compare of any files that might have changed
440 # do a full compare of any files that might have changed
441 change = self.changelog.read(self.dirstate.parents()[0])
441 change = self.changelog.read(self.dirstate.parents()[0])
442 mf2 = mfmatches(change[0])
442 mf2 = mfmatches(change[0])
443 for f in l:
443 for f in l:
444 if fcmp(f, mf2):
444 if fcmp(f, mf2):
445 c.append(f)
445 c.append(f)
446
446
447 for l in c, a, d, u:
447 for l in c, a, d, u:
448 l.sort()
448 l.sort()
449
449
450 return (c, a, d, u)
450 return (c, a, d, u)
451
451
452 # are we comparing working dir against non-tip?
452 # are we comparing working dir against non-tip?
453 # generate a pseudo-manifest for the working dir
453 # generate a pseudo-manifest for the working dir
454 if not node2:
454 if not node2:
455 if not mf2:
455 if not mf2:
456 change = self.changelog.read(self.dirstate.parents()[0])
456 change = self.changelog.read(self.dirstate.parents()[0])
457 mf2 = mfmatches(change[0])
457 mf2 = mfmatches(change[0])
458 for f in a + c + l:
458 for f in a + c + l:
459 mf2[f] = ""
459 mf2[f] = ""
460 for f in d:
460 for f in d:
461 if f in mf2: del mf2[f]
461 if f in mf2: del mf2[f]
462 else:
462 else:
463 change = self.changelog.read(node2)
463 change = self.changelog.read(node2)
464 mf2 = mfmatches(change[0])
464 mf2 = mfmatches(change[0])
465
465
466 # flush lists from dirstate before comparing manifests
466 # flush lists from dirstate before comparing manifests
467 c, a = [], []
467 c, a = [], []
468
468
469 change = self.changelog.read(node1)
469 change = self.changelog.read(node1)
470 mf1 = mfmatches(change[0])
470 mf1 = mfmatches(change[0])
471
471
472 for fn in mf2:
472 for fn in mf2:
473 if mf1.has_key(fn):
473 if mf1.has_key(fn):
474 if mf1[fn] != mf2[fn]:
474 if mf1[fn] != mf2[fn]:
475 if mf2[fn] != "" or fcmp(fn, mf1):
475 if mf2[fn] != "" or fcmp(fn, mf1):
476 c.append(fn)
476 c.append(fn)
477 del mf1[fn]
477 del mf1[fn]
478 else:
478 else:
479 a.append(fn)
479 a.append(fn)
480
480
481 d = mf1.keys()
481 d = mf1.keys()
482
482
483 for l in c, a, d, u:
483 for l in c, a, d, u:
484 l.sort()
484 l.sort()
485
485
486 return (c, a, d, u)
486 return (c, a, d, u)
487
487
488 def add(self, list):
488 def add(self, list):
489 for f in list:
489 for f in list:
490 p = self.wjoin(f)
490 p = self.wjoin(f)
491 if not os.path.exists(p):
491 if not os.path.exists(p):
492 self.ui.warn("%s does not exist!\n" % f)
492 self.ui.warn("%s does not exist!\n" % f)
493 elif not os.path.isfile(p):
493 elif not os.path.isfile(p):
494 self.ui.warn("%s not added: only files supported currently\n" % f)
494 self.ui.warn("%s not added: only files supported currently\n" % f)
495 elif self.dirstate.state(f) in 'an':
495 elif self.dirstate.state(f) in 'an':
496 self.ui.warn("%s already tracked!\n" % f)
496 self.ui.warn("%s already tracked!\n" % f)
497 else:
497 else:
498 self.dirstate.update([f], "a")
498 self.dirstate.update([f], "a")
499
499
500 def forget(self, list):
500 def forget(self, list):
501 for f in list:
501 for f in list:
502 if self.dirstate.state(f) not in 'ai':
502 if self.dirstate.state(f) not in 'ai':
503 self.ui.warn("%s not added!\n" % f)
503 self.ui.warn("%s not added!\n" % f)
504 else:
504 else:
505 self.dirstate.forget([f])
505 self.dirstate.forget([f])
506
506
507 def remove(self, list):
507 def remove(self, list):
508 for f in list:
508 for f in list:
509 p = self.wjoin(f)
509 p = self.wjoin(f)
510 if os.path.exists(p):
510 if os.path.exists(p):
511 self.ui.warn("%s still exists!\n" % f)
511 self.ui.warn("%s still exists!\n" % f)
512 elif self.dirstate.state(f) == 'a':
512 elif self.dirstate.state(f) == 'a':
513 self.ui.warn("%s never committed!\n" % f)
513 self.ui.warn("%s never committed!\n" % f)
514 self.dirstate.forget([f])
514 self.dirstate.forget([f])
515 elif f not in self.dirstate:
515 elif f not in self.dirstate:
516 self.ui.warn("%s not tracked!\n" % f)
516 self.ui.warn("%s not tracked!\n" % f)
517 else:
517 else:
518 self.dirstate.update([f], "r")
518 self.dirstate.update([f], "r")
519
519
520 def copy(self, source, dest):
520 def copy(self, source, dest):
521 p = self.wjoin(dest)
521 p = self.wjoin(dest)
522 if not os.path.exists(p):
522 if not os.path.exists(p):
523 self.ui.warn("%s does not exist!\n" % dest)
523 self.ui.warn("%s does not exist!\n" % dest)
524 elif not os.path.isfile(p):
524 elif not os.path.isfile(p):
525 self.ui.warn("copy failed: %s is not a file\n" % dest)
525 self.ui.warn("copy failed: %s is not a file\n" % dest)
526 else:
526 else:
527 if self.dirstate.state(dest) == '?':
527 if self.dirstate.state(dest) == '?':
528 self.dirstate.update([dest], "a")
528 self.dirstate.update([dest], "a")
529 self.dirstate.copy(source, dest)
529 self.dirstate.copy(source, dest)
530
530
531 def heads(self):
531 def heads(self):
532 return self.changelog.heads()
532 return self.changelog.heads()
533
533
534 # branchlookup returns a dict giving a list of branches for
534 # branchlookup returns a dict giving a list of branches for
535 # each head. A branch is defined as the tag of a node or
535 # each head. A branch is defined as the tag of a node or
536 # the branch of the node's parents. If a node has multiple
536 # the branch of the node's parents. If a node has multiple
537 # branch tags, tags are eliminated if they are visible from other
537 # branch tags, tags are eliminated if they are visible from other
538 # branch tags.
538 # branch tags.
539 #
539 #
540 # So, for this graph: a->b->c->d->e
540 # So, for this graph: a->b->c->d->e
541 # \ /
541 # \ /
542 # aa -----/
542 # aa -----/
543 # a has tag 2.6.12
543 # a has tag 2.6.12
544 # d has tag 2.6.13
544 # d has tag 2.6.13
545 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
545 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
546 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
546 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
547 # from the list.
547 # from the list.
548 #
548 #
549 # It is possible that more than one head will have the same branch tag.
549 # It is possible that more than one head will have the same branch tag.
550 # callers need to check the result for multiple heads under the same
550 # callers need to check the result for multiple heads under the same
551 # branch tag if that is a problem for them (ie checkout of a specific
551 # branch tag if that is a problem for them (ie checkout of a specific
552 # branch).
552 # branch).
553 #
553 #
554 # passing in a specific branch will limit the depth of the search
554 # passing in a specific branch will limit the depth of the search
555 # through the parents. It won't limit the branches returned in the
555 # through the parents. It won't limit the branches returned in the
556 # result though.
556 # result though.
557 def branchlookup(self, heads=None, branch=None):
557 def branchlookup(self, heads=None, branch=None):
558 if not heads:
558 if not heads:
559 heads = self.heads()
559 heads = self.heads()
560 headt = [ h for h in heads ]
560 headt = [ h for h in heads ]
561 chlog = self.changelog
561 chlog = self.changelog
562 branches = {}
562 branches = {}
563 merges = []
563 merges = []
564 seenmerge = {}
564 seenmerge = {}
565
565
566 # traverse the tree once for each head, recording in the branches
566 # traverse the tree once for each head, recording in the branches
567 # dict which tags are visible from this head. The branches
567 # dict which tags are visible from this head. The branches
568 # dict also records which tags are visible from each tag
568 # dict also records which tags are visible from each tag
569 # while we traverse.
569 # while we traverse.
570 while headt or merges:
570 while headt or merges:
571 if merges:
571 if merges:
572 n, found = merges.pop()
572 n, found = merges.pop()
573 visit = [n]
573 visit = [n]
574 else:
574 else:
575 h = headt.pop()
575 h = headt.pop()
576 visit = [h]
576 visit = [h]
577 found = [h]
577 found = [h]
578 seen = {}
578 seen = {}
579 while visit:
579 while visit:
580 n = visit.pop()
580 n = visit.pop()
581 if n in seen:
581 if n in seen:
582 continue
582 continue
583 pp = chlog.parents(n)
583 pp = chlog.parents(n)
584 tags = self.nodetags(n)
584 tags = self.nodetags(n)
585 if tags:
585 if tags:
586 for x in tags:
586 for x in tags:
587 if x == 'tip':
587 if x == 'tip':
588 continue
588 continue
589 for f in found:
589 for f in found:
590 branches.setdefault(f, {})[n] = 1
590 branches.setdefault(f, {})[n] = 1
591 branches.setdefault(n, {})[n] = 1
591 branches.setdefault(n, {})[n] = 1
592 break
592 break
593 if n not in found:
593 if n not in found:
594 found.append(n)
594 found.append(n)
595 if branch in tags:
595 if branch in tags:
596 continue
596 continue
597 seen[n] = 1
597 seen[n] = 1
598 if pp[1] != nullid and n not in seenmerge:
598 if pp[1] != nullid and n not in seenmerge:
599 merges.append((pp[1], [x for x in found]))
599 merges.append((pp[1], [x for x in found]))
600 seenmerge[n] = 1
600 seenmerge[n] = 1
601 if pp[0] != nullid:
601 if pp[0] != nullid:
602 visit.append(pp[0])
602 visit.append(pp[0])
603 # traverse the branches dict, eliminating branch tags from each
603 # traverse the branches dict, eliminating branch tags from each
604 # head that are visible from another branch tag for that head.
604 # head that are visible from another branch tag for that head.
605 out = {}
605 out = {}
606 viscache = {}
606 viscache = {}
607 for h in heads:
607 for h in heads:
608 def visible(node):
608 def visible(node):
609 if node in viscache:
609 if node in viscache:
610 return viscache[node]
610 return viscache[node]
611 ret = {}
611 ret = {}
612 visit = [node]
612 visit = [node]
613 while visit:
613 while visit:
614 x = visit.pop()
614 x = visit.pop()
615 if x in viscache:
615 if x in viscache:
616 ret.update(viscache[x])
616 ret.update(viscache[x])
617 elif x not in ret:
617 elif x not in ret:
618 ret[x] = 1
618 ret[x] = 1
619 if x in branches:
619 if x in branches:
620 visit[len(visit):] = branches[x].keys()
620 visit[len(visit):] = branches[x].keys()
621 viscache[node] = ret
621 viscache[node] = ret
622 return ret
622 return ret
623 if h not in branches:
623 if h not in branches:
624 continue
624 continue
625 # O(n^2), but somewhat limited. This only searches the
625 # O(n^2), but somewhat limited. This only searches the
626 # tags visible from a specific head, not all the tags in the
626 # tags visible from a specific head, not all the tags in the
627 # whole repo.
627 # whole repo.
628 for b in branches[h]:
628 for b in branches[h]:
629 vis = False
629 vis = False
630 for bb in branches[h].keys():
630 for bb in branches[h].keys():
631 if b != bb:
631 if b != bb:
632 if b in visible(bb):
632 if b in visible(bb):
633 vis = True
633 vis = True
634 break
634 break
635 if not vis:
635 if not vis:
636 l = out.setdefault(h, [])
636 l = out.setdefault(h, [])
637 l[len(l):] = self.nodetags(b)
637 l[len(l):] = self.nodetags(b)
638 return out
638 return out
639
639
640 def branches(self, nodes):
640 def branches(self, nodes):
641 if not nodes: nodes = [self.changelog.tip()]
641 if not nodes: nodes = [self.changelog.tip()]
642 b = []
642 b = []
643 for n in nodes:
643 for n in nodes:
644 t = n
644 t = n
645 while n:
645 while n:
646 p = self.changelog.parents(n)
646 p = self.changelog.parents(n)
647 if p[1] != nullid or p[0] == nullid:
647 if p[1] != nullid or p[0] == nullid:
648 b.append((t, n, p[0], p[1]))
648 b.append((t, n, p[0], p[1]))
649 break
649 break
650 n = p[0]
650 n = p[0]
651 return b
651 return b
652
652
653 def between(self, pairs):
653 def between(self, pairs):
654 r = []
654 r = []
655
655
656 for top, bottom in pairs:
656 for top, bottom in pairs:
657 n, l, i = top, [], 0
657 n, l, i = top, [], 0
658 f = 1
658 f = 1
659
659
660 while n != bottom:
660 while n != bottom:
661 p = self.changelog.parents(n)[0]
661 p = self.changelog.parents(n)[0]
662 if i == f:
662 if i == f:
663 l.append(n)
663 l.append(n)
664 f = f * 2
664 f = f * 2
665 n = p
665 n = p
666 i += 1
666 i += 1
667
667
668 r.append(l)
668 r.append(l)
669
669
670 return r
670 return r
671
671
672 def newer(self, nodes):
672 def newer(self, nodes):
673 m = {}
673 m = {}
674 nl = []
674 nl = []
675 pm = {}
675 pm = {}
676 cl = self.changelog
676 cl = self.changelog
677 t = l = cl.count()
677 t = l = cl.count()
678
678
679 # find the lowest numbered node
679 # find the lowest numbered node
680 for n in nodes:
680 for n in nodes:
681 l = min(l, cl.rev(n))
681 l = min(l, cl.rev(n))
682 m[n] = 1
682 m[n] = 1
683
683
684 for i in xrange(l, t):
684 for i in xrange(l, t):
685 n = cl.node(i)
685 n = cl.node(i)
686 if n in m: # explicitly listed
686 if n in m: # explicitly listed
687 pm[n] = 1
687 pm[n] = 1
688 nl.append(n)
688 nl.append(n)
689 continue
689 continue
690 for p in cl.parents(n):
690 for p in cl.parents(n):
691 if p in pm: # parent listed
691 if p in pm: # parent listed
692 pm[n] = 1
692 pm[n] = 1
693 nl.append(n)
693 nl.append(n)
694 break
694 break
695
695
696 return nl
696 return nl
697
697
698 def findincoming(self, remote, base=None, heads=None):
698 def findincoming(self, remote, base=None, heads=None):
699 m = self.changelog.nodemap
699 m = self.changelog.nodemap
700 search = []
700 search = []
701 fetch = {}
701 fetch = {}
702 seen = {}
702 seen = {}
703 seenbranch = {}
703 seenbranch = {}
704 if base == None:
704 if base == None:
705 base = {}
705 base = {}
706
706
707 # assume we're closer to the tip than the root
707 # assume we're closer to the tip than the root
708 # and start by examining the heads
708 # and start by examining the heads
709 self.ui.status("searching for changes\n")
709 self.ui.status("searching for changes\n")
710
710
711 if not heads:
711 if not heads:
712 heads = remote.heads()
712 heads = remote.heads()
713
713
714 unknown = []
714 unknown = []
715 for h in heads:
715 for h in heads:
716 if h not in m:
716 if h not in m:
717 unknown.append(h)
717 unknown.append(h)
718 else:
718 else:
719 base[h] = 1
719 base[h] = 1
720
720
721 if not unknown:
721 if not unknown:
722 return None
722 return None
723
723
724 rep = {}
724 rep = {}
725 reqcnt = 0
725 reqcnt = 0
726
726
727 # search through remote branches
727 # search through remote branches
728 # a 'branch' here is a linear segment of history, with four parts:
728 # a 'branch' here is a linear segment of history, with four parts:
729 # head, root, first parent, second parent
729 # head, root, first parent, second parent
730 # (a branch always has two parents (or none) by definition)
730 # (a branch always has two parents (or none) by definition)
731 unknown = remote.branches(unknown)
731 unknown = remote.branches(unknown)
732 while unknown:
732 while unknown:
733 r = []
733 r = []
734 while unknown:
734 while unknown:
735 n = unknown.pop(0)
735 n = unknown.pop(0)
736 if n[0] in seen:
736 if n[0] in seen:
737 continue
737 continue
738
738
739 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
739 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
740 if n[0] == nullid:
740 if n[0] == nullid:
741 break
741 break
742 if n in seenbranch:
742 if n in seenbranch:
743 self.ui.debug("branch already found\n")
743 self.ui.debug("branch already found\n")
744 continue
744 continue
745 if n[1] and n[1] in m: # do we know the base?
745 if n[1] and n[1] in m: # do we know the base?
746 self.ui.debug("found incomplete branch %s:%s\n"
746 self.ui.debug("found incomplete branch %s:%s\n"
747 % (short(n[0]), short(n[1])))
747 % (short(n[0]), short(n[1])))
748 search.append(n) # schedule branch range for scanning
748 search.append(n) # schedule branch range for scanning
749 seenbranch[n] = 1
749 seenbranch[n] = 1
750 else:
750 else:
751 if n[1] not in seen and n[1] not in fetch:
751 if n[1] not in seen and n[1] not in fetch:
752 if n[2] in m and n[3] in m:
752 if n[2] in m and n[3] in m:
753 self.ui.debug("found new changeset %s\n" %
753 self.ui.debug("found new changeset %s\n" %
754 short(n[1]))
754 short(n[1]))
755 fetch[n[1]] = 1 # earliest unknown
755 fetch[n[1]] = 1 # earliest unknown
756 base[n[2]] = 1 # latest known
756 base[n[2]] = 1 # latest known
757 continue
757 continue
758
758
759 for a in n[2:4]:
759 for a in n[2:4]:
760 if a not in rep:
760 if a not in rep:
761 r.append(a)
761 r.append(a)
762 rep[a] = 1
762 rep[a] = 1
763
763
764 seen[n[0]] = 1
764 seen[n[0]] = 1
765
765
766 if r:
766 if r:
767 reqcnt += 1
767 reqcnt += 1
768 self.ui.debug("request %d: %s\n" %
768 self.ui.debug("request %d: %s\n" %
769 (reqcnt, " ".join(map(short, r))))
769 (reqcnt, " ".join(map(short, r))))
770 for p in range(0, len(r), 10):
770 for p in range(0, len(r), 10):
771 for b in remote.branches(r[p:p+10]):
771 for b in remote.branches(r[p:p+10]):
772 self.ui.debug("received %s:%s\n" %
772 self.ui.debug("received %s:%s\n" %
773 (short(b[0]), short(b[1])))
773 (short(b[0]), short(b[1])))
774 if b[0] in m:
774 if b[0] in m:
775 self.ui.debug("found base node %s\n" % short(b[0]))
775 self.ui.debug("found base node %s\n" % short(b[0]))
776 base[b[0]] = 1
776 base[b[0]] = 1
777 elif b[0] not in seen:
777 elif b[0] not in seen:
778 unknown.append(b)
778 unknown.append(b)
779
779
780 # do binary search on the branches we found
780 # do binary search on the branches we found
781 while search:
781 while search:
782 n = search.pop(0)
782 n = search.pop(0)
783 reqcnt += 1
783 reqcnt += 1
784 l = remote.between([(n[0], n[1])])[0]
784 l = remote.between([(n[0], n[1])])[0]
785 l.append(n[1])
785 l.append(n[1])
786 p = n[0]
786 p = n[0]
787 f = 1
787 f = 1
788 for i in l:
788 for i in l:
789 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
789 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
790 if i in m:
790 if i in m:
791 if f <= 2:
791 if f <= 2:
792 self.ui.debug("found new branch changeset %s\n" %
792 self.ui.debug("found new branch changeset %s\n" %
793 short(p))
793 short(p))
794 fetch[p] = 1
794 fetch[p] = 1
795 base[i] = 1
795 base[i] = 1
796 else:
796 else:
797 self.ui.debug("narrowed branch search to %s:%s\n"
797 self.ui.debug("narrowed branch search to %s:%s\n"
798 % (short(p), short(i)))
798 % (short(p), short(i)))
799 search.append((p, i))
799 search.append((p, i))
800 break
800 break
801 p, f = i, f * 2
801 p, f = i, f * 2
802
802
803 # sanity check our fetch list
803 # sanity check our fetch list
804 for f in fetch.keys():
804 for f in fetch.keys():
805 if f in m:
805 if f in m:
806 raise repo.RepoError("already have changeset " + short(f[:4]))
806 raise repo.RepoError("already have changeset " + short(f[:4]))
807
807
808 if base.keys() == [nullid]:
808 if base.keys() == [nullid]:
809 self.ui.warn("warning: pulling from an unrelated repository!\n")
809 self.ui.warn("warning: pulling from an unrelated repository!\n")
810
810
811 self.ui.note("found new changesets starting at " +
811 self.ui.note("found new changesets starting at " +
812 " ".join([short(f) for f in fetch]) + "\n")
812 " ".join([short(f) for f in fetch]) + "\n")
813
813
814 self.ui.debug("%d total queries\n" % reqcnt)
814 self.ui.debug("%d total queries\n" % reqcnt)
815
815
816 return fetch.keys()
816 return fetch.keys()
817
817
818 def findoutgoing(self, remote, base=None, heads=None):
818 def findoutgoing(self, remote, base=None, heads=None):
819 if base == None:
819 if base == None:
820 base = {}
820 base = {}
821 self.findincoming(remote, base, heads)
821 self.findincoming(remote, base, heads)
822
822
823 self.ui.debug("common changesets up to "
823 self.ui.debug("common changesets up to "
824 + " ".join(map(short, base.keys())) + "\n")
824 + " ".join(map(short, base.keys())) + "\n")
825
825
826 remain = dict.fromkeys(self.changelog.nodemap)
826 remain = dict.fromkeys(self.changelog.nodemap)
827
827
828 # prune everything remote has from the tree
828 # prune everything remote has from the tree
829 del remain[nullid]
829 del remain[nullid]
830 remove = base.keys()
830 remove = base.keys()
831 while remove:
831 while remove:
832 n = remove.pop(0)
832 n = remove.pop(0)
833 if n in remain:
833 if n in remain:
834 del remain[n]
834 del remain[n]
835 for p in self.changelog.parents(n):
835 for p in self.changelog.parents(n):
836 remove.append(p)
836 remove.append(p)
837
837
838 # find every node whose parents have been pruned
838 # find every node whose parents have been pruned
839 subset = []
839 subset = []
840 for n in remain:
840 for n in remain:
841 p1, p2 = self.changelog.parents(n)
841 p1, p2 = self.changelog.parents(n)
842 if p1 not in remain and p2 not in remain:
842 if p1 not in remain and p2 not in remain:
843 subset.append(n)
843 subset.append(n)
844
844
845 # this is the set of all roots we have to push
845 # this is the set of all roots we have to push
846 return subset
846 return subset
847
847
848 def pull(self, remote):
848 def pull(self, remote):
849 lock = self.lock()
849 lock = self.lock()
850
850
851 # if we have an empty repo, fetch everything
851 # if we have an empty repo, fetch everything
852 if self.changelog.tip() == nullid:
852 if self.changelog.tip() == nullid:
853 self.ui.status("requesting all changes\n")
853 self.ui.status("requesting all changes\n")
854 fetch = [nullid]
854 fetch = [nullid]
855 else:
855 else:
856 fetch = self.findincoming(remote)
856 fetch = self.findincoming(remote)
857
857
858 if not fetch:
858 if not fetch:
859 self.ui.status("no changes found\n")
859 self.ui.status("no changes found\n")
860 return 1
860 return 1
861
861
862 cg = remote.changegroup(fetch)
862 cg = remote.changegroup(fetch)
863 return self.addchangegroup(cg)
863 return self.addchangegroup(cg)
864
864
865 def push(self, remote, force=False):
865 def push(self, remote, force=False):
866 lock = remote.lock()
866 lock = remote.lock()
867
867
868 base = {}
868 base = {}
869 heads = remote.heads()
869 heads = remote.heads()
870 inc = self.findincoming(remote, base, heads)
870 inc = self.findincoming(remote, base, heads)
871 if not force and inc:
871 if not force and inc:
872 self.ui.warn("abort: unsynced remote changes!\n")
872 self.ui.warn("abort: unsynced remote changes!\n")
873 self.ui.status("(did you forget to sync? use push -f to force)\n")
873 self.ui.status("(did you forget to sync? use push -f to force)\n")
874 return 1
874 return 1
875
875
876 update = self.findoutgoing(remote, base)
876 update = self.findoutgoing(remote, base)
877 if not update:
877 if not update:
878 self.ui.status("no changes found\n")
878 self.ui.status("no changes found\n")
879 return 1
879 return 1
880 elif not force:
880 elif not force:
881 if len(heads) < len(self.changelog.heads()):
881 if len(heads) < len(self.changelog.heads()):
882 self.ui.warn("abort: push creates new remote branches!\n")
882 self.ui.warn("abort: push creates new remote branches!\n")
883 self.ui.status("(did you forget to merge?" +
883 self.ui.status("(did you forget to merge?" +
884 " use push -f to force)\n")
884 " use push -f to force)\n")
885 return 1
885 return 1
886
886
887 cg = self.changegroup(update)
887 cg = self.changegroup(update)
888 return remote.addchangegroup(cg)
888 return remote.addchangegroup(cg)
889
889
890 def changegroup(self, basenodes):
890 def changegroup(self, basenodes):
891 class genread:
891 genread = util.chunkbuffer
892 def __init__(self, generator):
893 self.g = generator
894 self.buf = ""
895 def fillbuf(self):
896 self.buf += "".join(self.g)
897
898 def read(self, l):
899 while l > len(self.buf):
900 try:
901 self.buf += self.g.next()
902 except StopIteration:
903 break
904 d, self.buf = self.buf[:l], self.buf[l:]
905 return d
906
892
907 def gengroup():
893 def gengroup():
908 nodes = self.newer(basenodes)
894 nodes = self.newer(basenodes)
909
895
910 # construct the link map
896 # construct the link map
911 linkmap = {}
897 linkmap = {}
912 for n in nodes:
898 for n in nodes:
913 linkmap[self.changelog.rev(n)] = n
899 linkmap[self.changelog.rev(n)] = n
914
900
915 # construct a list of all changed files
901 # construct a list of all changed files
916 changed = {}
902 changed = {}
917 for n in nodes:
903 for n in nodes:
918 c = self.changelog.read(n)
904 c = self.changelog.read(n)
919 for f in c[3]:
905 for f in c[3]:
920 changed[f] = 1
906 changed[f] = 1
921 changed = changed.keys()
907 changed = changed.keys()
922 changed.sort()
908 changed.sort()
923
909
924 # the changegroup is changesets + manifests + all file revs
910 # the changegroup is changesets + manifests + all file revs
925 revs = [ self.changelog.rev(n) for n in nodes ]
911 revs = [ self.changelog.rev(n) for n in nodes ]
926
912
927 for y in self.changelog.group(linkmap): yield y
913 for y in self.changelog.group(linkmap): yield y
928 for y in self.manifest.group(linkmap): yield y
914 for y in self.manifest.group(linkmap): yield y
929 for f in changed:
915 for f in changed:
930 yield struct.pack(">l", len(f) + 4) + f
916 yield struct.pack(">l", len(f) + 4) + f
931 g = self.file(f).group(linkmap)
917 g = self.file(f).group(linkmap)
932 for y in g:
918 for y in g:
933 yield y
919 yield y
934
920
935 yield struct.pack(">l", 0)
921 yield struct.pack(">l", 0)
936
922
937 return genread(gengroup())
923 return genread(gengroup())
938
924
939 def addchangegroup(self, source):
925 def addchangegroup(self, source):
940
926
941 def getchunk():
927 def getchunk():
942 d = source.read(4)
928 d = source.read(4)
943 if not d: return ""
929 if not d: return ""
944 l = struct.unpack(">l", d)[0]
930 l = struct.unpack(">l", d)[0]
945 if l <= 4: return ""
931 if l <= 4: return ""
946 return source.read(l - 4)
932 return source.read(l - 4)
947
933
948 def getgroup():
934 def getgroup():
949 while 1:
935 while 1:
950 c = getchunk()
936 c = getchunk()
951 if not c: break
937 if not c: break
952 yield c
938 yield c
953
939
954 def csmap(x):
940 def csmap(x):
955 self.ui.debug("add changeset %s\n" % short(x))
941 self.ui.debug("add changeset %s\n" % short(x))
956 return self.changelog.count()
942 return self.changelog.count()
957
943
958 def revmap(x):
944 def revmap(x):
959 return self.changelog.rev(x)
945 return self.changelog.rev(x)
960
946
961 if not source: return
947 if not source: return
962 changesets = files = revisions = 0
948 changesets = files = revisions = 0
963
949
964 tr = self.transaction()
950 tr = self.transaction()
965
951
966 oldheads = len(self.changelog.heads())
952 oldheads = len(self.changelog.heads())
967
953
968 # pull off the changeset group
954 # pull off the changeset group
969 self.ui.status("adding changesets\n")
955 self.ui.status("adding changesets\n")
970 co = self.changelog.tip()
956 co = self.changelog.tip()
971 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
957 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
972 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
958 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
973
959
974 # pull off the manifest group
960 # pull off the manifest group
975 self.ui.status("adding manifests\n")
961 self.ui.status("adding manifests\n")
976 mm = self.manifest.tip()
962 mm = self.manifest.tip()
977 mo = self.manifest.addgroup(getgroup(), revmap, tr)
963 mo = self.manifest.addgroup(getgroup(), revmap, tr)
978
964
979 # process the files
965 # process the files
980 self.ui.status("adding file changes\n")
966 self.ui.status("adding file changes\n")
981 while 1:
967 while 1:
982 f = getchunk()
968 f = getchunk()
983 if not f: break
969 if not f: break
984 self.ui.debug("adding %s revisions\n" % f)
970 self.ui.debug("adding %s revisions\n" % f)
985 fl = self.file(f)
971 fl = self.file(f)
986 o = fl.count()
972 o = fl.count()
987 n = fl.addgroup(getgroup(), revmap, tr)
973 n = fl.addgroup(getgroup(), revmap, tr)
988 revisions += fl.count() - o
974 revisions += fl.count() - o
989 files += 1
975 files += 1
990
976
991 newheads = len(self.changelog.heads())
977 newheads = len(self.changelog.heads())
992 heads = ""
978 heads = ""
993 if oldheads and newheads > oldheads:
979 if oldheads and newheads > oldheads:
994 heads = " (+%d heads)" % (newheads - oldheads)
980 heads = " (+%d heads)" % (newheads - oldheads)
995
981
996 self.ui.status(("added %d changesets" +
982 self.ui.status(("added %d changesets" +
997 " with %d changes to %d files%s\n")
983 " with %d changes to %d files%s\n")
998 % (changesets, revisions, files, heads))
984 % (changesets, revisions, files, heads))
999
985
1000 tr.close()
986 tr.close()
1001
987
1002 if not self.hook("changegroup"):
988 if not self.hook("changegroup"):
1003 return 1
989 return 1
1004
990
1005 return
991 return
1006
992
1007 def update(self, node, allow=False, force=False, choose=None,
993 def update(self, node, allow=False, force=False, choose=None,
1008 moddirstate=True):
994 moddirstate=True):
1009 pl = self.dirstate.parents()
995 pl = self.dirstate.parents()
1010 if not force and pl[1] != nullid:
996 if not force and pl[1] != nullid:
1011 self.ui.warn("aborting: outstanding uncommitted merges\n")
997 self.ui.warn("aborting: outstanding uncommitted merges\n")
1012 return 1
998 return 1
1013
999
1014 p1, p2 = pl[0], node
1000 p1, p2 = pl[0], node
1015 pa = self.changelog.ancestor(p1, p2)
1001 pa = self.changelog.ancestor(p1, p2)
1016 m1n = self.changelog.read(p1)[0]
1002 m1n = self.changelog.read(p1)[0]
1017 m2n = self.changelog.read(p2)[0]
1003 m2n = self.changelog.read(p2)[0]
1018 man = self.manifest.ancestor(m1n, m2n)
1004 man = self.manifest.ancestor(m1n, m2n)
1019 m1 = self.manifest.read(m1n)
1005 m1 = self.manifest.read(m1n)
1020 mf1 = self.manifest.readflags(m1n)
1006 mf1 = self.manifest.readflags(m1n)
1021 m2 = self.manifest.read(m2n)
1007 m2 = self.manifest.read(m2n)
1022 mf2 = self.manifest.readflags(m2n)
1008 mf2 = self.manifest.readflags(m2n)
1023 ma = self.manifest.read(man)
1009 ma = self.manifest.read(man)
1024 mfa = self.manifest.readflags(man)
1010 mfa = self.manifest.readflags(man)
1025
1011
1026 (c, a, d, u) = self.changes()
1012 (c, a, d, u) = self.changes()
1027
1013
1028 # is this a jump, or a merge? i.e. is there a linear path
1014 # is this a jump, or a merge? i.e. is there a linear path
1029 # from p1 to p2?
1015 # from p1 to p2?
1030 linear_path = (pa == p1 or pa == p2)
1016 linear_path = (pa == p1 or pa == p2)
1031
1017
1032 # resolve the manifest to determine which files
1018 # resolve the manifest to determine which files
1033 # we care about merging
1019 # we care about merging
1034 self.ui.note("resolving manifests\n")
1020 self.ui.note("resolving manifests\n")
1035 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1021 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1036 (force, allow, moddirstate, linear_path))
1022 (force, allow, moddirstate, linear_path))
1037 self.ui.debug(" ancestor %s local %s remote %s\n" %
1023 self.ui.debug(" ancestor %s local %s remote %s\n" %
1038 (short(man), short(m1n), short(m2n)))
1024 (short(man), short(m1n), short(m2n)))
1039
1025
1040 merge = {}
1026 merge = {}
1041 get = {}
1027 get = {}
1042 remove = []
1028 remove = []
1043
1029
1044 # construct a working dir manifest
1030 # construct a working dir manifest
1045 mw = m1.copy()
1031 mw = m1.copy()
1046 mfw = mf1.copy()
1032 mfw = mf1.copy()
1047 umap = dict.fromkeys(u)
1033 umap = dict.fromkeys(u)
1048
1034
1049 for f in a + c + u:
1035 for f in a + c + u:
1050 mw[f] = ""
1036 mw[f] = ""
1051 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1037 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1052
1038
1053 for f in d:
1039 for f in d:
1054 if f in mw: del mw[f]
1040 if f in mw: del mw[f]
1055
1041
1056 # If we're jumping between revisions (as opposed to merging),
1042 # If we're jumping between revisions (as opposed to merging),
1057 # and if neither the working directory nor the target rev has
1043 # and if neither the working directory nor the target rev has
1058 # the file, then we need to remove it from the dirstate, to
1044 # the file, then we need to remove it from the dirstate, to
1059 # prevent the dirstate from listing the file when it is no
1045 # prevent the dirstate from listing the file when it is no
1060 # longer in the manifest.
1046 # longer in the manifest.
1061 if moddirstate and linear_path and f not in m2:
1047 if moddirstate and linear_path and f not in m2:
1062 self.dirstate.forget((f,))
1048 self.dirstate.forget((f,))
1063
1049
1064 # Compare manifests
1050 # Compare manifests
1065 for f, n in mw.iteritems():
1051 for f, n in mw.iteritems():
1066 if choose and not choose(f): continue
1052 if choose and not choose(f): continue
1067 if f in m2:
1053 if f in m2:
1068 s = 0
1054 s = 0
1069
1055
1070 # is the wfile new since m1, and match m2?
1056 # is the wfile new since m1, and match m2?
1071 if f not in m1:
1057 if f not in m1:
1072 t1 = self.wread(f)
1058 t1 = self.wread(f)
1073 t2 = self.file(f).read(m2[f])
1059 t2 = self.file(f).read(m2[f])
1074 if cmp(t1, t2) == 0:
1060 if cmp(t1, t2) == 0:
1075 n = m2[f]
1061 n = m2[f]
1076 del t1, t2
1062 del t1, t2
1077
1063
1078 # are files different?
1064 # are files different?
1079 if n != m2[f]:
1065 if n != m2[f]:
1080 a = ma.get(f, nullid)
1066 a = ma.get(f, nullid)
1081 # are both different from the ancestor?
1067 # are both different from the ancestor?
1082 if n != a and m2[f] != a:
1068 if n != a and m2[f] != a:
1083 self.ui.debug(" %s versions differ, resolve\n" % f)
1069 self.ui.debug(" %s versions differ, resolve\n" % f)
1084 # merge executable bits
1070 # merge executable bits
1085 # "if we changed or they changed, change in merge"
1071 # "if we changed or they changed, change in merge"
1086 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1072 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1087 mode = ((a^b) | (a^c)) ^ a
1073 mode = ((a^b) | (a^c)) ^ a
1088 merge[f] = (m1.get(f, nullid), m2[f], mode)
1074 merge[f] = (m1.get(f, nullid), m2[f], mode)
1089 s = 1
1075 s = 1
1090 # are we clobbering?
1076 # are we clobbering?
1091 # is remote's version newer?
1077 # is remote's version newer?
1092 # or are we going back in time?
1078 # or are we going back in time?
1093 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1079 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1094 self.ui.debug(" remote %s is newer, get\n" % f)
1080 self.ui.debug(" remote %s is newer, get\n" % f)
1095 get[f] = m2[f]
1081 get[f] = m2[f]
1096 s = 1
1082 s = 1
1097 elif f in umap:
1083 elif f in umap:
1098 # this unknown file is the same as the checkout
1084 # this unknown file is the same as the checkout
1099 get[f] = m2[f]
1085 get[f] = m2[f]
1100
1086
1101 if not s and mfw[f] != mf2[f]:
1087 if not s and mfw[f] != mf2[f]:
1102 if force:
1088 if force:
1103 self.ui.debug(" updating permissions for %s\n" % f)
1089 self.ui.debug(" updating permissions for %s\n" % f)
1104 util.set_exec(self.wjoin(f), mf2[f])
1090 util.set_exec(self.wjoin(f), mf2[f])
1105 else:
1091 else:
1106 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1092 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1107 mode = ((a^b) | (a^c)) ^ a
1093 mode = ((a^b) | (a^c)) ^ a
1108 if mode != b:
1094 if mode != b:
1109 self.ui.debug(" updating permissions for %s\n" % f)
1095 self.ui.debug(" updating permissions for %s\n" % f)
1110 util.set_exec(self.wjoin(f), mode)
1096 util.set_exec(self.wjoin(f), mode)
1111 del m2[f]
1097 del m2[f]
1112 elif f in ma:
1098 elif f in ma:
1113 if n != ma[f]:
1099 if n != ma[f]:
1114 r = "d"
1100 r = "d"
1115 if not force and (linear_path or allow):
1101 if not force and (linear_path or allow):
1116 r = self.ui.prompt(
1102 r = self.ui.prompt(
1117 (" local changed %s which remote deleted\n" % f) +
1103 (" local changed %s which remote deleted\n" % f) +
1118 "(k)eep or (d)elete?", "[kd]", "k")
1104 "(k)eep or (d)elete?", "[kd]", "k")
1119 if r == "d":
1105 if r == "d":
1120 remove.append(f)
1106 remove.append(f)
1121 else:
1107 else:
1122 self.ui.debug("other deleted %s\n" % f)
1108 self.ui.debug("other deleted %s\n" % f)
1123 remove.append(f) # other deleted it
1109 remove.append(f) # other deleted it
1124 else:
1110 else:
1125 if n == m1.get(f, nullid): # same as parent
1111 if n == m1.get(f, nullid): # same as parent
1126 if p2 == pa: # going backwards?
1112 if p2 == pa: # going backwards?
1127 self.ui.debug("remote deleted %s\n" % f)
1113 self.ui.debug("remote deleted %s\n" % f)
1128 remove.append(f)
1114 remove.append(f)
1129 else:
1115 else:
1130 self.ui.debug("local created %s, keeping\n" % f)
1116 self.ui.debug("local created %s, keeping\n" % f)
1131 else:
1117 else:
1132 self.ui.debug("working dir created %s, keeping\n" % f)
1118 self.ui.debug("working dir created %s, keeping\n" % f)
1133
1119
1134 for f, n in m2.iteritems():
1120 for f, n in m2.iteritems():
1135 if choose and not choose(f): continue
1121 if choose and not choose(f): continue
1136 if f[0] == "/": continue
1122 if f[0] == "/": continue
1137 if f in ma and n != ma[f]:
1123 if f in ma and n != ma[f]:
1138 r = "k"
1124 r = "k"
1139 if not force and (linear_path or allow):
1125 if not force and (linear_path or allow):
1140 r = self.ui.prompt(
1126 r = self.ui.prompt(
1141 ("remote changed %s which local deleted\n" % f) +
1127 ("remote changed %s which local deleted\n" % f) +
1142 "(k)eep or (d)elete?", "[kd]", "k")
1128 "(k)eep or (d)elete?", "[kd]", "k")
1143 if r == "k": get[f] = n
1129 if r == "k": get[f] = n
1144 elif f not in ma:
1130 elif f not in ma:
1145 self.ui.debug("remote created %s\n" % f)
1131 self.ui.debug("remote created %s\n" % f)
1146 get[f] = n
1132 get[f] = n
1147 else:
1133 else:
1148 if force or p2 == pa: # going backwards?
1134 if force or p2 == pa: # going backwards?
1149 self.ui.debug("local deleted %s, recreating\n" % f)
1135 self.ui.debug("local deleted %s, recreating\n" % f)
1150 get[f] = n
1136 get[f] = n
1151 else:
1137 else:
1152 self.ui.debug("local deleted %s\n" % f)
1138 self.ui.debug("local deleted %s\n" % f)
1153
1139
1154 del mw, m1, m2, ma
1140 del mw, m1, m2, ma
1155
1141
1156 if force:
1142 if force:
1157 for f in merge:
1143 for f in merge:
1158 get[f] = merge[f][1]
1144 get[f] = merge[f][1]
1159 merge = {}
1145 merge = {}
1160
1146
1161 if linear_path or force:
1147 if linear_path or force:
1162 # we don't need to do any magic, just jump to the new rev
1148 # we don't need to do any magic, just jump to the new rev
1163 branch_merge = False
1149 branch_merge = False
1164 p1, p2 = p2, nullid
1150 p1, p2 = p2, nullid
1165 else:
1151 else:
1166 if not allow:
1152 if not allow:
1167 self.ui.status("this update spans a branch" +
1153 self.ui.status("this update spans a branch" +
1168 " affecting the following files:\n")
1154 " affecting the following files:\n")
1169 fl = merge.keys() + get.keys()
1155 fl = merge.keys() + get.keys()
1170 fl.sort()
1156 fl.sort()
1171 for f in fl:
1157 for f in fl:
1172 cf = ""
1158 cf = ""
1173 if f in merge: cf = " (resolve)"
1159 if f in merge: cf = " (resolve)"
1174 self.ui.status(" %s%s\n" % (f, cf))
1160 self.ui.status(" %s%s\n" % (f, cf))
1175 self.ui.warn("aborting update spanning branches!\n")
1161 self.ui.warn("aborting update spanning branches!\n")
1176 self.ui.status("(use update -m to merge across branches" +
1162 self.ui.status("(use update -m to merge across branches" +
1177 " or -C to lose changes)\n")
1163 " or -C to lose changes)\n")
1178 return 1
1164 return 1
1179 branch_merge = True
1165 branch_merge = True
1180
1166
1181 if moddirstate:
1167 if moddirstate:
1182 self.dirstate.setparents(p1, p2)
1168 self.dirstate.setparents(p1, p2)
1183
1169
1184 # get the files we don't need to change
1170 # get the files we don't need to change
1185 files = get.keys()
1171 files = get.keys()
1186 files.sort()
1172 files.sort()
1187 for f in files:
1173 for f in files:
1188 if f[0] == "/": continue
1174 if f[0] == "/": continue
1189 self.ui.note("getting %s\n" % f)
1175 self.ui.note("getting %s\n" % f)
1190 t = self.file(f).read(get[f])
1176 t = self.file(f).read(get[f])
1191 try:
1177 try:
1192 self.wwrite(f, t)
1178 self.wwrite(f, t)
1193 except IOError:
1179 except IOError:
1194 os.makedirs(os.path.dirname(self.wjoin(f)))
1180 os.makedirs(os.path.dirname(self.wjoin(f)))
1195 self.wwrite(f, t)
1181 self.wwrite(f, t)
1196 util.set_exec(self.wjoin(f), mf2[f])
1182 util.set_exec(self.wjoin(f), mf2[f])
1197 if moddirstate:
1183 if moddirstate:
1198 if branch_merge:
1184 if branch_merge:
1199 self.dirstate.update([f], 'n', st_mtime=-1)
1185 self.dirstate.update([f], 'n', st_mtime=-1)
1200 else:
1186 else:
1201 self.dirstate.update([f], 'n')
1187 self.dirstate.update([f], 'n')
1202
1188
1203 # merge the tricky bits
1189 # merge the tricky bits
1204 files = merge.keys()
1190 files = merge.keys()
1205 files.sort()
1191 files.sort()
1206 for f in files:
1192 for f in files:
1207 self.ui.status("merging %s\n" % f)
1193 self.ui.status("merging %s\n" % f)
1208 my, other, flag = merge[f]
1194 my, other, flag = merge[f]
1209 self.merge3(f, my, other)
1195 self.merge3(f, my, other)
1210 util.set_exec(self.wjoin(f), flag)
1196 util.set_exec(self.wjoin(f), flag)
1211 if moddirstate:
1197 if moddirstate:
1212 if branch_merge:
1198 if branch_merge:
1213 # We've done a branch merge, mark this file as merged
1199 # We've done a branch merge, mark this file as merged
1214 # so that we properly record the merger later
1200 # so that we properly record the merger later
1215 self.dirstate.update([f], 'm')
1201 self.dirstate.update([f], 'm')
1216 else:
1202 else:
1217 # We've update-merged a locally modified file, so
1203 # We've update-merged a locally modified file, so
1218 # we set the dirstate to emulate a normal checkout
1204 # we set the dirstate to emulate a normal checkout
1219 # of that file some time in the past. Thus our
1205 # of that file some time in the past. Thus our
1220 # merge will appear as a normal local file
1206 # merge will appear as a normal local file
1221 # modification.
1207 # modification.
1222 f_len = len(self.file(f).read(other))
1208 f_len = len(self.file(f).read(other))
1223 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1209 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1224
1210
1225 remove.sort()
1211 remove.sort()
1226 for f in remove:
1212 for f in remove:
1227 self.ui.note("removing %s\n" % f)
1213 self.ui.note("removing %s\n" % f)
1228 try:
1214 try:
1229 os.unlink(self.wjoin(f))
1215 os.unlink(self.wjoin(f))
1230 except OSError, inst:
1216 except OSError, inst:
1231 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1217 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1232 # try removing directories that might now be empty
1218 # try removing directories that might now be empty
1233 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1219 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1234 except: pass
1220 except: pass
1235 if moddirstate:
1221 if moddirstate:
1236 if branch_merge:
1222 if branch_merge:
1237 self.dirstate.update(remove, 'r')
1223 self.dirstate.update(remove, 'r')
1238 else:
1224 else:
1239 self.dirstate.forget(remove)
1225 self.dirstate.forget(remove)
1240
1226
1241 def merge3(self, fn, my, other):
1227 def merge3(self, fn, my, other):
1242 """perform a 3-way merge in the working directory"""
1228 """perform a 3-way merge in the working directory"""
1243
1229
1244 def temp(prefix, node):
1230 def temp(prefix, node):
1245 pre = "%s~%s." % (os.path.basename(fn), prefix)
1231 pre = "%s~%s." % (os.path.basename(fn), prefix)
1246 (fd, name) = tempfile.mkstemp("", pre)
1232 (fd, name) = tempfile.mkstemp("", pre)
1247 f = os.fdopen(fd, "wb")
1233 f = os.fdopen(fd, "wb")
1248 self.wwrite(fn, fl.read(node), f)
1234 self.wwrite(fn, fl.read(node), f)
1249 f.close()
1235 f.close()
1250 return name
1236 return name
1251
1237
1252 fl = self.file(fn)
1238 fl = self.file(fn)
1253 base = fl.ancestor(my, other)
1239 base = fl.ancestor(my, other)
1254 a = self.wjoin(fn)
1240 a = self.wjoin(fn)
1255 b = temp("base", base)
1241 b = temp("base", base)
1256 c = temp("other", other)
1242 c = temp("other", other)
1257
1243
1258 self.ui.note("resolving %s\n" % fn)
1244 self.ui.note("resolving %s\n" % fn)
1259 self.ui.debug("file %s: other %s ancestor %s\n" %
1245 self.ui.debug("file %s: other %s ancestor %s\n" %
1260 (fn, short(other), short(base)))
1246 (fn, short(other), short(base)))
1261
1247
1262 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1248 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1263 or "hgmerge")
1249 or "hgmerge")
1264 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1250 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1265 if r:
1251 if r:
1266 self.ui.warn("merging %s failed!\n" % fn)
1252 self.ui.warn("merging %s failed!\n" % fn)
1267
1253
1268 os.unlink(b)
1254 os.unlink(b)
1269 os.unlink(c)
1255 os.unlink(c)
1270
1256
1271 def verify(self):
1257 def verify(self):
1272 filelinkrevs = {}
1258 filelinkrevs = {}
1273 filenodes = {}
1259 filenodes = {}
1274 changesets = revisions = files = 0
1260 changesets = revisions = files = 0
1275 errors = 0
1261 errors = 0
1276
1262
1277 seen = {}
1263 seen = {}
1278 self.ui.status("checking changesets\n")
1264 self.ui.status("checking changesets\n")
1279 for i in range(self.changelog.count()):
1265 for i in range(self.changelog.count()):
1280 changesets += 1
1266 changesets += 1
1281 n = self.changelog.node(i)
1267 n = self.changelog.node(i)
1282 if n in seen:
1268 if n in seen:
1283 self.ui.warn("duplicate changeset at revision %d\n" % i)
1269 self.ui.warn("duplicate changeset at revision %d\n" % i)
1284 errors += 1
1270 errors += 1
1285 seen[n] = 1
1271 seen[n] = 1
1286
1272
1287 for p in self.changelog.parents(n):
1273 for p in self.changelog.parents(n):
1288 if p not in self.changelog.nodemap:
1274 if p not in self.changelog.nodemap:
1289 self.ui.warn("changeset %s has unknown parent %s\n" %
1275 self.ui.warn("changeset %s has unknown parent %s\n" %
1290 (short(n), short(p)))
1276 (short(n), short(p)))
1291 errors += 1
1277 errors += 1
1292 try:
1278 try:
1293 changes = self.changelog.read(n)
1279 changes = self.changelog.read(n)
1294 except Exception, inst:
1280 except Exception, inst:
1295 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1281 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1296 errors += 1
1282 errors += 1
1297
1283
1298 for f in changes[3]:
1284 for f in changes[3]:
1299 filelinkrevs.setdefault(f, []).append(i)
1285 filelinkrevs.setdefault(f, []).append(i)
1300
1286
1301 seen = {}
1287 seen = {}
1302 self.ui.status("checking manifests\n")
1288 self.ui.status("checking manifests\n")
1303 for i in range(self.manifest.count()):
1289 for i in range(self.manifest.count()):
1304 n = self.manifest.node(i)
1290 n = self.manifest.node(i)
1305 if n in seen:
1291 if n in seen:
1306 self.ui.warn("duplicate manifest at revision %d\n" % i)
1292 self.ui.warn("duplicate manifest at revision %d\n" % i)
1307 errors += 1
1293 errors += 1
1308 seen[n] = 1
1294 seen[n] = 1
1309
1295
1310 for p in self.manifest.parents(n):
1296 for p in self.manifest.parents(n):
1311 if p not in self.manifest.nodemap:
1297 if p not in self.manifest.nodemap:
1312 self.ui.warn("manifest %s has unknown parent %s\n" %
1298 self.ui.warn("manifest %s has unknown parent %s\n" %
1313 (short(n), short(p)))
1299 (short(n), short(p)))
1314 errors += 1
1300 errors += 1
1315
1301
1316 try:
1302 try:
1317 delta = mdiff.patchtext(self.manifest.delta(n))
1303 delta = mdiff.patchtext(self.manifest.delta(n))
1318 except KeyboardInterrupt:
1304 except KeyboardInterrupt:
1319 self.ui.warn("interrupted")
1305 self.ui.warn("interrupted")
1320 raise
1306 raise
1321 except Exception, inst:
1307 except Exception, inst:
1322 self.ui.warn("unpacking manifest %s: %s\n"
1308 self.ui.warn("unpacking manifest %s: %s\n"
1323 % (short(n), inst))
1309 % (short(n), inst))
1324 errors += 1
1310 errors += 1
1325
1311
1326 ff = [ l.split('\0') for l in delta.splitlines() ]
1312 ff = [ l.split('\0') for l in delta.splitlines() ]
1327 for f, fn in ff:
1313 for f, fn in ff:
1328 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1314 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1329
1315
1330 self.ui.status("crosschecking files in changesets and manifests\n")
1316 self.ui.status("crosschecking files in changesets and manifests\n")
1331 for f in filenodes:
1317 for f in filenodes:
1332 if f not in filelinkrevs:
1318 if f not in filelinkrevs:
1333 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1319 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1334 errors += 1
1320 errors += 1
1335
1321
1336 for f in filelinkrevs:
1322 for f in filelinkrevs:
1337 if f not in filenodes:
1323 if f not in filenodes:
1338 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1324 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1339 errors += 1
1325 errors += 1
1340
1326
1341 self.ui.status("checking files\n")
1327 self.ui.status("checking files\n")
1342 ff = filenodes.keys()
1328 ff = filenodes.keys()
1343 ff.sort()
1329 ff.sort()
1344 for f in ff:
1330 for f in ff:
1345 if f == "/dev/null": continue
1331 if f == "/dev/null": continue
1346 files += 1
1332 files += 1
1347 fl = self.file(f)
1333 fl = self.file(f)
1348 nodes = { nullid: 1 }
1334 nodes = { nullid: 1 }
1349 seen = {}
1335 seen = {}
1350 for i in range(fl.count()):
1336 for i in range(fl.count()):
1351 revisions += 1
1337 revisions += 1
1352 n = fl.node(i)
1338 n = fl.node(i)
1353
1339
1354 if n in seen:
1340 if n in seen:
1355 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1341 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1356 errors += 1
1342 errors += 1
1357
1343
1358 if n not in filenodes[f]:
1344 if n not in filenodes[f]:
1359 self.ui.warn("%s: %d:%s not in manifests\n"
1345 self.ui.warn("%s: %d:%s not in manifests\n"
1360 % (f, i, short(n)))
1346 % (f, i, short(n)))
1361 errors += 1
1347 errors += 1
1362 else:
1348 else:
1363 del filenodes[f][n]
1349 del filenodes[f][n]
1364
1350
1365 flr = fl.linkrev(n)
1351 flr = fl.linkrev(n)
1366 if flr not in filelinkrevs[f]:
1352 if flr not in filelinkrevs[f]:
1367 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1353 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1368 % (f, short(n), fl.linkrev(n)))
1354 % (f, short(n), fl.linkrev(n)))
1369 errors += 1
1355 errors += 1
1370 else:
1356 else:
1371 filelinkrevs[f].remove(flr)
1357 filelinkrevs[f].remove(flr)
1372
1358
1373 # verify contents
1359 # verify contents
1374 try:
1360 try:
1375 t = fl.read(n)
1361 t = fl.read(n)
1376 except Exception, inst:
1362 except Exception, inst:
1377 self.ui.warn("unpacking file %s %s: %s\n"
1363 self.ui.warn("unpacking file %s %s: %s\n"
1378 % (f, short(n), inst))
1364 % (f, short(n), inst))
1379 errors += 1
1365 errors += 1
1380
1366
1381 # verify parents
1367 # verify parents
1382 (p1, p2) = fl.parents(n)
1368 (p1, p2) = fl.parents(n)
1383 if p1 not in nodes:
1369 if p1 not in nodes:
1384 self.ui.warn("file %s:%s unknown parent 1 %s" %
1370 self.ui.warn("file %s:%s unknown parent 1 %s" %
1385 (f, short(n), short(p1)))
1371 (f, short(n), short(p1)))
1386 errors += 1
1372 errors += 1
1387 if p2 not in nodes:
1373 if p2 not in nodes:
1388 self.ui.warn("file %s:%s unknown parent 2 %s" %
1374 self.ui.warn("file %s:%s unknown parent 2 %s" %
1389 (f, short(n), short(p1)))
1375 (f, short(n), short(p1)))
1390 errors += 1
1376 errors += 1
1391 nodes[n] = 1
1377 nodes[n] = 1
1392
1378
1393 # cross-check
1379 # cross-check
1394 for node in filenodes[f]:
1380 for node in filenodes[f]:
1395 self.ui.warn("node %s in manifests not in %s\n"
1381 self.ui.warn("node %s in manifests not in %s\n"
1396 % (hex(node), f))
1382 % (hex(node), f))
1397 errors += 1
1383 errors += 1
1398
1384
1399 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1385 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1400 (files, changesets, revisions))
1386 (files, changesets, revisions))
1401
1387
1402 if errors:
1388 if errors:
1403 self.ui.warn("%d integrity errors encountered!\n" % errors)
1389 self.ui.warn("%d integrity errors encountered!\n" % errors)
1404 return 1
1390 return 1
@@ -1,354 +1,422 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from demandload import *
14 from demandload import *
15 demandload(globals(), "re")
15 demandload(globals(), "re cStringIO")
16
16
17 def binary(s):
17 def binary(s):
18 """return true if a string is binary data using diff's heuristic"""
18 """return true if a string is binary data using diff's heuristic"""
19 if s and '\0' in s[:4096]:
19 if s and '\0' in s[:4096]:
20 return True
20 return True
21 return False
21 return False
22
22
23 def unique(g):
23 def unique(g):
24 """return the uniq elements of iterable g"""
24 """return the uniq elements of iterable g"""
25 seen = {}
25 seen = {}
26 for f in g:
26 for f in g:
27 if f not in seen:
27 if f not in seen:
28 seen[f] = 1
28 seen[f] = 1
29 yield f
29 yield f
30
30
31 class Abort(Exception):
31 class Abort(Exception):
32 """Raised if a command needs to print an error and exit."""
32 """Raised if a command needs to print an error and exit."""
33
33
34 def always(fn): return True
34 def always(fn): return True
35 def never(fn): return False
35 def never(fn): return False
36
36
37 def globre(pat, head='^', tail='$'):
37 def globre(pat, head='^', tail='$'):
38 "convert a glob pattern into a regexp"
38 "convert a glob pattern into a regexp"
39 i, n = 0, len(pat)
39 i, n = 0, len(pat)
40 res = ''
40 res = ''
41 group = False
41 group = False
42 def peek(): return i < n and pat[i]
42 def peek(): return i < n and pat[i]
43 while i < n:
43 while i < n:
44 c = pat[i]
44 c = pat[i]
45 i = i+1
45 i = i+1
46 if c == '*':
46 if c == '*':
47 if peek() == '*':
47 if peek() == '*':
48 i += 1
48 i += 1
49 res += '.*'
49 res += '.*'
50 else:
50 else:
51 res += '[^/]*'
51 res += '[^/]*'
52 elif c == '?':
52 elif c == '?':
53 res += '.'
53 res += '.'
54 elif c == '[':
54 elif c == '[':
55 j = i
55 j = i
56 if j < n and pat[j] in '!]':
56 if j < n and pat[j] in '!]':
57 j += 1
57 j += 1
58 while j < n and pat[j] != ']':
58 while j < n and pat[j] != ']':
59 j += 1
59 j += 1
60 if j >= n:
60 if j >= n:
61 res += '\\['
61 res += '\\['
62 else:
62 else:
63 stuff = pat[i:j].replace('\\','\\\\')
63 stuff = pat[i:j].replace('\\','\\\\')
64 i = j + 1
64 i = j + 1
65 if stuff[0] == '!':
65 if stuff[0] == '!':
66 stuff = '^' + stuff[1:]
66 stuff = '^' + stuff[1:]
67 elif stuff[0] == '^':
67 elif stuff[0] == '^':
68 stuff = '\\' + stuff
68 stuff = '\\' + stuff
69 res = '%s[%s]' % (res, stuff)
69 res = '%s[%s]' % (res, stuff)
70 elif c == '{':
70 elif c == '{':
71 group = True
71 group = True
72 res += '(?:'
72 res += '(?:'
73 elif c == '}' and group:
73 elif c == '}' and group:
74 res += ')'
74 res += ')'
75 group = False
75 group = False
76 elif c == ',' and group:
76 elif c == ',' and group:
77 res += '|'
77 res += '|'
78 else:
78 else:
79 res += re.escape(c)
79 res += re.escape(c)
80 return head + res + tail
80 return head + res + tail
81
81
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
83
83
84 def pathto(n1, n2):
84 def pathto(n1, n2):
85 '''return the relative path from one place to another.
85 '''return the relative path from one place to another.
86 this returns a path in the form used by the local filesystem, not hg.'''
86 this returns a path in the form used by the local filesystem, not hg.'''
87 if not n1: return localpath(n2)
87 if not n1: return localpath(n2)
88 a, b = n1.split('/'), n2.split('/')
88 a, b = n1.split('/'), n2.split('/')
89 a.reverse(), b.reverse()
89 a.reverse(), b.reverse()
90 while a and b and a[-1] == b[-1]:
90 while a and b and a[-1] == b[-1]:
91 a.pop(), b.pop()
91 a.pop(), b.pop()
92 b.reverse()
92 b.reverse()
93 return os.sep.join((['..'] * len(a)) + b)
93 return os.sep.join((['..'] * len(a)) + b)
94
94
95 def canonpath(root, cwd, myname):
95 def canonpath(root, cwd, myname):
96 """return the canonical path of myname, given cwd and root"""
96 """return the canonical path of myname, given cwd and root"""
97 rootsep = root + os.sep
97 rootsep = root + os.sep
98 name = myname
98 name = myname
99 if not name.startswith(os.sep):
99 if not name.startswith(os.sep):
100 name = os.path.join(root, cwd, name)
100 name = os.path.join(root, cwd, name)
101 name = os.path.normpath(name)
101 name = os.path.normpath(name)
102 if name.startswith(rootsep):
102 if name.startswith(rootsep):
103 return pconvert(name[len(rootsep):])
103 return pconvert(name[len(rootsep):])
104 elif name == root:
104 elif name == root:
105 return ''
105 return ''
106 else:
106 else:
107 raise Abort('%s not under root' % myname)
107 raise Abort('%s not under root' % myname)
108
108
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
110 """build a function to match a set of file patterns
110 """build a function to match a set of file patterns
111
111
112 arguments:
112 arguments:
113 canonroot - the canonical root of the tree you're matching against
113 canonroot - the canonical root of the tree you're matching against
114 cwd - the current working directory, if relevant
114 cwd - the current working directory, if relevant
115 names - patterns to find
115 names - patterns to find
116 inc - patterns to include
116 inc - patterns to include
117 exc - patterns to exclude
117 exc - patterns to exclude
118 head - a regex to prepend to patterns to control whether a match is rooted
118 head - a regex to prepend to patterns to control whether a match is rooted
119
119
120 a pattern is one of:
120 a pattern is one of:
121 're:<regex>'
121 're:<regex>'
122 'glob:<shellglob>'
122 'glob:<shellglob>'
123 'path:<explicit path>'
123 'path:<explicit path>'
124 'relpath:<relative path>'
124 'relpath:<relative path>'
125 '<relative path>'
125 '<relative path>'
126
126
127 returns:
127 returns:
128 a 3-tuple containing
128 a 3-tuple containing
129 - list of explicit non-pattern names passed in
129 - list of explicit non-pattern names passed in
130 - a bool match(filename) function
130 - a bool match(filename) function
131 - a bool indicating if any patterns were passed in
131 - a bool indicating if any patterns were passed in
132
132
133 todo:
133 todo:
134 make head regex a rooted bool
134 make head regex a rooted bool
135 """
135 """
136
136
137 def patkind(name):
137 def patkind(name):
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
139 if name.startswith(prefix): return name.split(':', 1)
139 if name.startswith(prefix): return name.split(':', 1)
140 for c in name:
140 for c in name:
141 if c in _globchars: return 'glob', name
141 if c in _globchars: return 'glob', name
142 return 'relpath', name
142 return 'relpath', name
143
143
144 def regex(kind, name, tail):
144 def regex(kind, name, tail):
145 '''convert a pattern into a regular expression'''
145 '''convert a pattern into a regular expression'''
146 if kind == 're':
146 if kind == 're':
147 return name
147 return name
148 elif kind == 'path':
148 elif kind == 'path':
149 return '^' + re.escape(name) + '(?:/|$)'
149 return '^' + re.escape(name) + '(?:/|$)'
150 elif kind == 'relpath':
150 elif kind == 'relpath':
151 return head + re.escape(name) + tail
151 return head + re.escape(name) + tail
152 return head + globre(name, '', tail)
152 return head + globre(name, '', tail)
153
153
154 def matchfn(pats, tail):
154 def matchfn(pats, tail):
155 """build a matching function from a set of patterns"""
155 """build a matching function from a set of patterns"""
156 if pats:
156 if pats:
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
158 return re.compile(pat).match
158 return re.compile(pat).match
159
159
160 def globprefix(pat):
160 def globprefix(pat):
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
162 root = []
162 root = []
163 for p in pat.split(os.sep):
163 for p in pat.split(os.sep):
164 if patkind(p)[0] == 'glob': break
164 if patkind(p)[0] == 'glob': break
165 root.append(p)
165 root.append(p)
166 return '/'.join(root)
166 return '/'.join(root)
167
167
168 pats = []
168 pats = []
169 files = []
169 files = []
170 roots = []
170 roots = []
171 for kind, name in map(patkind, names):
171 for kind, name in map(patkind, names):
172 if kind in ('glob', 'relpath'):
172 if kind in ('glob', 'relpath'):
173 name = canonpath(canonroot, cwd, name)
173 name = canonpath(canonroot, cwd, name)
174 if name == '':
174 if name == '':
175 kind, name = 'glob', '**'
175 kind, name = 'glob', '**'
176 if kind in ('glob', 'path', 're'):
176 if kind in ('glob', 'path', 're'):
177 pats.append((kind, name))
177 pats.append((kind, name))
178 if kind == 'glob':
178 if kind == 'glob':
179 root = globprefix(name)
179 root = globprefix(name)
180 if root: roots.append(root)
180 if root: roots.append(root)
181 elif kind == 'relpath':
181 elif kind == 'relpath':
182 files.append((kind, name))
182 files.append((kind, name))
183 roots.append(name)
183 roots.append(name)
184
184
185 patmatch = matchfn(pats, '$') or always
185 patmatch = matchfn(pats, '$') or always
186 filematch = matchfn(files, '(?:/|$)') or always
186 filematch = matchfn(files, '(?:/|$)') or always
187 incmatch = always
187 incmatch = always
188 if inc:
188 if inc:
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
190 excmatch = lambda fn: False
190 excmatch = lambda fn: False
191 if exc:
191 if exc:
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
193
193
194 return (roots,
194 return (roots,
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
196 (fn.endswith('/') or
196 (fn.endswith('/') or
197 (not pats and not files) or
197 (not pats and not files) or
198 (pats and patmatch(fn)) or
198 (pats and patmatch(fn)) or
199 (files and filematch(fn)))),
199 (files and filematch(fn)))),
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
201
201
202 def system(cmd, errprefix=None):
202 def system(cmd, errprefix=None):
203 """execute a shell command that must succeed"""
203 """execute a shell command that must succeed"""
204 rc = os.system(cmd)
204 rc = os.system(cmd)
205 if rc:
205 if rc:
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
207 explain_exit(rc)[0])
207 explain_exit(rc)[0])
208 if errprefix:
208 if errprefix:
209 errmsg = "%s: %s" % (errprefix, errmsg)
209 errmsg = "%s: %s" % (errprefix, errmsg)
210 raise Abort(errmsg)
210 raise Abort(errmsg)
211
211
212 def rename(src, dst):
212 def rename(src, dst):
213 """forcibly rename a file"""
213 """forcibly rename a file"""
214 try:
214 try:
215 os.rename(src, dst)
215 os.rename(src, dst)
216 except:
216 except:
217 os.unlink(dst)
217 os.unlink(dst)
218 os.rename(src, dst)
218 os.rename(src, dst)
219
219
220 def copytree(src, dst, copyfile):
220 def copytree(src, dst, copyfile):
221 """Copy a directory tree, files are copied using 'copyfile'."""
221 """Copy a directory tree, files are copied using 'copyfile'."""
222 names = os.listdir(src)
222 names = os.listdir(src)
223 os.mkdir(dst)
223 os.mkdir(dst)
224
224
225 for name in names:
225 for name in names:
226 srcname = os.path.join(src, name)
226 srcname = os.path.join(src, name)
227 dstname = os.path.join(dst, name)
227 dstname = os.path.join(dst, name)
228 if os.path.isdir(srcname):
228 if os.path.isdir(srcname):
229 copytree(srcname, dstname, copyfile)
229 copytree(srcname, dstname, copyfile)
230 elif os.path.isfile(srcname):
230 elif os.path.isfile(srcname):
231 copyfile(srcname, dstname)
231 copyfile(srcname, dstname)
232 else:
232 else:
233 pass
233 pass
234
234
235 def opener(base):
235 def opener(base):
236 """
236 """
237 return a function that opens files relative to base
237 return a function that opens files relative to base
238
238
239 this function is used to hide the details of COW semantics and
239 this function is used to hide the details of COW semantics and
240 remote file access from higher level code.
240 remote file access from higher level code.
241 """
241 """
242 p = base
242 p = base
243 def o(path, mode="r"):
243 def o(path, mode="r"):
244 f = os.path.join(p, path)
244 f = os.path.join(p, path)
245
245
246 mode += "b" # for that other OS
246 mode += "b" # for that other OS
247
247
248 if mode[0] != "r":
248 if mode[0] != "r":
249 try:
249 try:
250 s = os.stat(f)
250 s = os.stat(f)
251 except OSError:
251 except OSError:
252 d = os.path.dirname(f)
252 d = os.path.dirname(f)
253 if not os.path.isdir(d):
253 if not os.path.isdir(d):
254 os.makedirs(d)
254 os.makedirs(d)
255 else:
255 else:
256 if s.st_nlink > 1:
256 if s.st_nlink > 1:
257 file(f + ".tmp", "wb").write(file(f, "rb").read())
257 file(f + ".tmp", "wb").write(file(f, "rb").read())
258 rename(f+".tmp", f)
258 rename(f+".tmp", f)
259
259
260 return file(f, mode)
260 return file(f, mode)
261
261
262 return o
262 return o
263
263
264 def _makelock_file(info, pathname):
264 def _makelock_file(info, pathname):
265 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
265 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
266 os.write(ld, info)
266 os.write(ld, info)
267 os.close(ld)
267 os.close(ld)
268
268
269 def _readlock_file(pathname):
269 def _readlock_file(pathname):
270 return file(pathname).read()
270 return file(pathname).read()
271
271
272 # Platform specific variants
272 # Platform specific variants
273 if os.name == 'nt':
273 if os.name == 'nt':
274 nulldev = 'NUL:'
274 nulldev = 'NUL:'
275
275
276 def is_exec(f, last):
276 def is_exec(f, last):
277 return last
277 return last
278
278
279 def set_exec(f, mode):
279 def set_exec(f, mode):
280 pass
280 pass
281
281
282 def pconvert(path):
282 def pconvert(path):
283 return path.replace("\\", "/")
283 return path.replace("\\", "/")
284
284
285 def localpath(path):
285 def localpath(path):
286 return path.replace('/', '\\')
286 return path.replace('/', '\\')
287
287
288 def normpath(path):
288 def normpath(path):
289 return pconvert(os.path.normpath(path))
289 return pconvert(os.path.normpath(path))
290
290
291 makelock = _makelock_file
291 makelock = _makelock_file
292 readlock = _readlock_file
292 readlock = _readlock_file
293
293
294 def explain_exit(code):
294 def explain_exit(code):
295 return "exited with status %d" % code, code
295 return "exited with status %d" % code, code
296
296
297 else:
297 else:
298 nulldev = '/dev/null'
298 nulldev = '/dev/null'
299
299
300 def is_exec(f, last):
300 def is_exec(f, last):
301 """check whether a file is executable"""
301 """check whether a file is executable"""
302 return (os.stat(f).st_mode & 0100 != 0)
302 return (os.stat(f).st_mode & 0100 != 0)
303
303
304 def set_exec(f, mode):
304 def set_exec(f, mode):
305 s = os.stat(f).st_mode
305 s = os.stat(f).st_mode
306 if (s & 0100 != 0) == mode:
306 if (s & 0100 != 0) == mode:
307 return
307 return
308 if mode:
308 if mode:
309 # Turn on +x for every +r bit when making a file executable
309 # Turn on +x for every +r bit when making a file executable
310 # and obey umask.
310 # and obey umask.
311 umask = os.umask(0)
311 umask = os.umask(0)
312 os.umask(umask)
312 os.umask(umask)
313 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
313 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
314 else:
314 else:
315 os.chmod(f, s & 0666)
315 os.chmod(f, s & 0666)
316
316
317 def pconvert(path):
317 def pconvert(path):
318 return path
318 return path
319
319
320 def localpath(path):
320 def localpath(path):
321 return path
321 return path
322
322
323 normpath = os.path.normpath
323 normpath = os.path.normpath
324
324
325 def makelock(info, pathname):
325 def makelock(info, pathname):
326 try:
326 try:
327 os.symlink(info, pathname)
327 os.symlink(info, pathname)
328 except OSError, why:
328 except OSError, why:
329 if why.errno == errno.EEXIST:
329 if why.errno == errno.EEXIST:
330 raise
330 raise
331 else:
331 else:
332 _makelock_file(info, pathname)
332 _makelock_file(info, pathname)
333
333
334 def readlock(pathname):
334 def readlock(pathname):
335 try:
335 try:
336 return os.readlink(pathname)
336 return os.readlink(pathname)
337 except OSError, why:
337 except OSError, why:
338 if why.errno == errno.EINVAL:
338 if why.errno == errno.EINVAL:
339 return _readlock_file(pathname)
339 return _readlock_file(pathname)
340 else:
340 else:
341 raise
341 raise
342
342
343 def explain_exit(code):
343 def explain_exit(code):
344 """return a 2-tuple (desc, code) describing a process's status"""
344 """return a 2-tuple (desc, code) describing a process's status"""
345 if os.WIFEXITED(code):
345 if os.WIFEXITED(code):
346 val = os.WEXITSTATUS(code)
346 val = os.WEXITSTATUS(code)
347 return "exited with status %d" % val, val
347 return "exited with status %d" % val, val
348 elif os.WIFSIGNALED(code):
348 elif os.WIFSIGNALED(code):
349 val = os.WTERMSIG(code)
349 val = os.WTERMSIG(code)
350 return "killed by signal %d" % val, val
350 return "killed by signal %d" % val, val
351 elif os.WIFSTOPPED(code):
351 elif os.WIFSTOPPED(code):
352 val = os.WSTOPSIG(code)
352 val = os.WSTOPSIG(code)
353 return "stopped by signal %d" % val, val
353 return "stopped by signal %d" % val, val
354 raise ValueError("invalid exit code")
354 raise ValueError("invalid exit code")
355
356 class chunkbuffer(object):
357 """Allow arbitrary sized chunks of data to be efficiently read from an
358 iterator over chunks of arbitrary size."""
359 def __init__(self, in_iter, targetsize = 2**16):
360 """in_iter is the iterator that's iterating over the input chunks.
361 targetsize is how big a buffer to try to maintain."""
362 self.in_iter = iter(in_iter)
363 self.buf = ''
364 targetsize = int(targetsize)
365 if (targetsize <= 0):
366 raise ValueError("targetsize must be greater than 0, was %d" % targetsize)
367 self.targetsize = int(targetsize)
368 self.iterempty = False
369 def fillbuf(self):
370 """x.fillbuf()
371
372 Ignore the target size, and just read every chunk from the iterator
373 until it's empty."""
374 if not self.iterempty:
375 collector = cStringIO.StringIO()
376 collector.write(self.buf)
377 for ch in self.in_iter:
378 collector.write(ch)
379 self.buf = collector.getvalue()
380 collector.close()
381 collector = None
382 self.iterempty = True
383
384 def read(self, l):
385 """x.read(l) -> str
386 Read l bytes of data from the iterator of chunks of data. Returns less
387 than l bytes if the iterator runs dry."""
388 if l > len(self.buf) and not self.iterempty:
389 # Clamp to a multiple of self.targetsize
390 targetsize = self.targetsize * ((l // self.targetsize) + 1)
391 collector = cStringIO.StringIO()
392 collector.write(self.buf)
393 collected = len(self.buf)
394 for chunk in self.in_iter:
395 collector.write(chunk)
396 collected += len(chunk)
397 if collected >= targetsize:
398 break
399 if collected < targetsize:
400 self.iterempty = True
401 self.buf = collector.getvalue()
402 collector.close()
403 collector = None
404 s = self.buf[:l]
405 self.buf = buffer(self.buf, l)
406 return s
407 def __repr__(self):
408 return "<%s.%s targetsize = %u buffered = %u bytes>" % \
409 (self.__class__.__module__, self.__class__.__name__,
410 self.targetsize, len(self.buf))
411
412 def filechunkiter(f, size = 65536):
413 """filechunkiter(file[, size]) -> generator
414
415 Create a generator that produces all the data in the file size (default
416 65536) bytes at a time. Chunks may be less than size bytes if the
417 chunk is the last chunk in the file, or the file is a socket or some
418 other type of file that sometimes reads less data than is requested."""
419 s = f.read(size)
420 while len(s) >= 0:
421 yield s
422 s = f.read(size)
General Comments 0
You need to be logged in to leave comments. Login now