##// END OF EJS Templates
localrepo: minor opener usage restructuring
mpm@selenic.com -
r1102:c81d264c default
parent child Browse files
Show More
@@ -1,29 +1,28 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import util
9 from node import *
8 from node import *
10 from repo import *
9 from repo import *
11 from demandload import *
10 from demandload import *
12 demandload(globals(), "localrepo httprepo sshrepo statichttprepo")
11 demandload(globals(), "localrepo httprepo sshrepo statichttprepo")
13
12
14 def repository(ui, path=None, create=0):
13 def repository(ui, path=None, create=0):
15 if path:
14 if path:
16 if path.startswith("http://"):
15 if path.startswith("http://"):
17 return httprepo.httprepository(ui, path)
16 return httprepo.httprepository(ui, path)
18 if path.startswith("https://"):
17 if path.startswith("https://"):
19 return httprepo.httpsrepository(ui, path)
18 return httprepo.httpsrepository(ui, path)
20 if path.startswith("hg://"):
19 if path.startswith("hg://"):
21 return httprepo.httprepository(
20 return httprepo.httprepository(
22 ui, path.replace("hg://", "http://"))
21 ui, path.replace("hg://", "http://"))
23 if path.startswith("old-http://"):
22 if path.startswith("old-http://"):
24 return statichttprepo.statichttprepository(
23 return statichttprepo.statichttprepository(
25 ui, path.replace("old-http://", "http://"))
24 ui, path.replace("old-http://", "http://"))
26 if path.startswith("ssh://"):
25 if path.startswith("ssh://"):
27 return sshrepo.sshrepository(ui, path)
26 return sshrepo.sshrepository(ui, path)
28
27
29 return localrepo.localrepository(ui, util.opener, path, create)
28 return localrepo.localrepository(ui, path, create)
@@ -1,1400 +1,1400 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, opener, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30
30
31 if create:
31 if create:
32 os.mkdir(self.path)
32 os.mkdir(self.path)
33 os.mkdir(self.join("data"))
33 os.mkdir(self.join("data"))
34
34
35 self.opener = opener(self.path)
35 self.opener = util.opener(self.path)
36 self.wopener = opener(self.root)
36 self.wopener = util.opener(self.root)
37 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
38 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
39 self.tagscache = None
39 self.tagscache = None
40 self.nodetagscache = None
40 self.nodetagscache = None
41
41
42 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
42 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
43 try:
43 try:
44 self.ui.readconfig(self.opener("hgrc"))
44 self.ui.readconfig(self.opener("hgrc"))
45 except IOError: pass
45 except IOError: pass
46
46
47 def hook(self, name, **args):
47 def hook(self, name, **args):
48 s = self.ui.config("hooks", name)
48 s = self.ui.config("hooks", name)
49 if s:
49 if s:
50 self.ui.note("running hook %s: %s\n" % (name, s))
50 self.ui.note("running hook %s: %s\n" % (name, s))
51 old = {}
51 old = {}
52 for k, v in args.items():
52 for k, v in args.items():
53 k = k.upper()
53 k = k.upper()
54 old[k] = os.environ.get(k, None)
54 old[k] = os.environ.get(k, None)
55 os.environ[k] = v
55 os.environ[k] = v
56
56
57 r = os.system(s)
57 r = os.system(s)
58
58
59 for k, v in old.items():
59 for k, v in old.items():
60 if v != None:
60 if v != None:
61 os.environ[k] = v
61 os.environ[k] = v
62 else:
62 else:
63 del os.environ[k]
63 del os.environ[k]
64
64
65 if r:
65 if r:
66 self.ui.warn("abort: %s hook failed with status %d!\n" %
66 self.ui.warn("abort: %s hook failed with status %d!\n" %
67 (name, r))
67 (name, r))
68 return False
68 return False
69 return True
69 return True
70
70
71 def tags(self):
71 def tags(self):
72 '''return a mapping of tag to node'''
72 '''return a mapping of tag to node'''
73 if not self.tagscache:
73 if not self.tagscache:
74 self.tagscache = {}
74 self.tagscache = {}
75 def addtag(self, k, n):
75 def addtag(self, k, n):
76 try:
76 try:
77 bin_n = bin(n)
77 bin_n = bin(n)
78 except TypeError:
78 except TypeError:
79 bin_n = ''
79 bin_n = ''
80 self.tagscache[k.strip()] = bin_n
80 self.tagscache[k.strip()] = bin_n
81
81
82 try:
82 try:
83 # read each head of the tags file, ending with the tip
83 # read each head of the tags file, ending with the tip
84 # and add each tag found to the map, with "newer" ones
84 # and add each tag found to the map, with "newer" ones
85 # taking precedence
85 # taking precedence
86 fl = self.file(".hgtags")
86 fl = self.file(".hgtags")
87 h = fl.heads()
87 h = fl.heads()
88 h.reverse()
88 h.reverse()
89 for r in h:
89 for r in h:
90 for l in fl.read(r).splitlines():
90 for l in fl.read(r).splitlines():
91 if l:
91 if l:
92 n, k = l.split(" ", 1)
92 n, k = l.split(" ", 1)
93 addtag(self, k, n)
93 addtag(self, k, n)
94 except KeyError:
94 except KeyError:
95 pass
95 pass
96
96
97 try:
97 try:
98 f = self.opener("localtags")
98 f = self.opener("localtags")
99 for l in f:
99 for l in f:
100 n, k = l.split(" ", 1)
100 n, k = l.split(" ", 1)
101 addtag(self, k, n)
101 addtag(self, k, n)
102 except IOError:
102 except IOError:
103 pass
103 pass
104
104
105 self.tagscache['tip'] = self.changelog.tip()
105 self.tagscache['tip'] = self.changelog.tip()
106
106
107 return self.tagscache
107 return self.tagscache
108
108
109 def tagslist(self):
109 def tagslist(self):
110 '''return a list of tags ordered by revision'''
110 '''return a list of tags ordered by revision'''
111 l = []
111 l = []
112 for t, n in self.tags().items():
112 for t, n in self.tags().items():
113 try:
113 try:
114 r = self.changelog.rev(n)
114 r = self.changelog.rev(n)
115 except:
115 except:
116 r = -2 # sort to the beginning of the list if unknown
116 r = -2 # sort to the beginning of the list if unknown
117 l.append((r,t,n))
117 l.append((r,t,n))
118 l.sort()
118 l.sort()
119 return [(t,n) for r,t,n in l]
119 return [(t,n) for r,t,n in l]
120
120
121 def nodetags(self, node):
121 def nodetags(self, node):
122 '''return the tags associated with a node'''
122 '''return the tags associated with a node'''
123 if not self.nodetagscache:
123 if not self.nodetagscache:
124 self.nodetagscache = {}
124 self.nodetagscache = {}
125 for t,n in self.tags().items():
125 for t,n in self.tags().items():
126 self.nodetagscache.setdefault(n,[]).append(t)
126 self.nodetagscache.setdefault(n,[]).append(t)
127 return self.nodetagscache.get(node, [])
127 return self.nodetagscache.get(node, [])
128
128
129 def lookup(self, key):
129 def lookup(self, key):
130 try:
130 try:
131 return self.tags()[key]
131 return self.tags()[key]
132 except KeyError:
132 except KeyError:
133 try:
133 try:
134 return self.changelog.lookup(key)
134 return self.changelog.lookup(key)
135 except:
135 except:
136 raise repo.RepoError("unknown revision '%s'" % key)
136 raise repo.RepoError("unknown revision '%s'" % key)
137
137
138 def dev(self):
138 def dev(self):
139 return os.stat(self.path).st_dev
139 return os.stat(self.path).st_dev
140
140
141 def local(self):
141 def local(self):
142 return True
142 return True
143
143
144 def join(self, f):
144 def join(self, f):
145 return os.path.join(self.path, f)
145 return os.path.join(self.path, f)
146
146
147 def wjoin(self, f):
147 def wjoin(self, f):
148 return os.path.join(self.root, f)
148 return os.path.join(self.root, f)
149
149
150 def file(self, f):
150 def file(self, f):
151 if f[0] == '/': f = f[1:]
151 if f[0] == '/': f = f[1:]
152 return filelog.filelog(self.opener, f)
152 return filelog.filelog(self.opener, f)
153
153
154 def getcwd(self):
154 def getcwd(self):
155 return self.dirstate.getcwd()
155 return self.dirstate.getcwd()
156
156
157 def wfile(self, f, mode='r'):
157 def wfile(self, f, mode='r'):
158 return self.wopener(f, mode)
158 return self.wopener(f, mode)
159
159
160 def wread(self, filename):
160 def wread(self, filename):
161 return self.wopener(filename, 'r').read()
161 return self.wopener(filename, 'r').read()
162
162
163 def wwrite(self, filename, data, fd=None):
163 def wwrite(self, filename, data, fd=None):
164 if fd:
164 if fd:
165 return fd.write(data)
165 return fd.write(data)
166 return self.wopener(filename, 'w').write(data)
166 return self.wopener(filename, 'w').write(data)
167
167
168 def transaction(self):
168 def transaction(self):
169 # save dirstate for undo
169 # save dirstate for undo
170 try:
170 try:
171 ds = self.opener("dirstate").read()
171 ds = self.opener("dirstate").read()
172 except IOError:
172 except IOError:
173 ds = ""
173 ds = ""
174 self.opener("journal.dirstate", "w").write(ds)
174 self.opener("journal.dirstate", "w").write(ds)
175
175
176 def after():
176 def after():
177 util.rename(self.join("journal"), self.join("undo"))
177 util.rename(self.join("journal"), self.join("undo"))
178 util.rename(self.join("journal.dirstate"),
178 util.rename(self.join("journal.dirstate"),
179 self.join("undo.dirstate"))
179 self.join("undo.dirstate"))
180
180
181 return transaction.transaction(self.ui.warn, self.opener,
181 return transaction.transaction(self.ui.warn, self.opener,
182 self.join("journal"), after)
182 self.join("journal"), after)
183
183
184 def recover(self):
184 def recover(self):
185 lock = self.lock()
185 lock = self.lock()
186 if os.path.exists(self.join("journal")):
186 if os.path.exists(self.join("journal")):
187 self.ui.status("rolling back interrupted transaction\n")
187 self.ui.status("rolling back interrupted transaction\n")
188 return transaction.rollback(self.opener, self.join("journal"))
188 return transaction.rollback(self.opener, self.join("journal"))
189 else:
189 else:
190 self.ui.warn("no interrupted transaction available\n")
190 self.ui.warn("no interrupted transaction available\n")
191
191
192 def undo(self):
192 def undo(self):
193 lock = self.lock()
193 lock = self.lock()
194 if os.path.exists(self.join("undo")):
194 if os.path.exists(self.join("undo")):
195 self.ui.status("rolling back last transaction\n")
195 self.ui.status("rolling back last transaction\n")
196 transaction.rollback(self.opener, self.join("undo"))
196 transaction.rollback(self.opener, self.join("undo"))
197 self.dirstate = None
197 self.dirstate = None
198 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
198 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
199 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
199 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
200 else:
200 else:
201 self.ui.warn("no undo information available\n")
201 self.ui.warn("no undo information available\n")
202
202
203 def lock(self, wait=1):
203 def lock(self, wait=1):
204 try:
204 try:
205 return lock.lock(self.join("lock"), 0)
205 return lock.lock(self.join("lock"), 0)
206 except lock.LockHeld, inst:
206 except lock.LockHeld, inst:
207 if wait:
207 if wait:
208 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
208 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
209 return lock.lock(self.join("lock"), wait)
209 return lock.lock(self.join("lock"), wait)
210 raise inst
210 raise inst
211
211
212 def rawcommit(self, files, text, user, date, p1=None, p2=None):
212 def rawcommit(self, files, text, user, date, p1=None, p2=None):
213 orig_parent = self.dirstate.parents()[0] or nullid
213 orig_parent = self.dirstate.parents()[0] or nullid
214 p1 = p1 or self.dirstate.parents()[0] or nullid
214 p1 = p1 or self.dirstate.parents()[0] or nullid
215 p2 = p2 or self.dirstate.parents()[1] or nullid
215 p2 = p2 or self.dirstate.parents()[1] or nullid
216 c1 = self.changelog.read(p1)
216 c1 = self.changelog.read(p1)
217 c2 = self.changelog.read(p2)
217 c2 = self.changelog.read(p2)
218 m1 = self.manifest.read(c1[0])
218 m1 = self.manifest.read(c1[0])
219 mf1 = self.manifest.readflags(c1[0])
219 mf1 = self.manifest.readflags(c1[0])
220 m2 = self.manifest.read(c2[0])
220 m2 = self.manifest.read(c2[0])
221 changed = []
221 changed = []
222
222
223 if orig_parent == p1:
223 if orig_parent == p1:
224 update_dirstate = 1
224 update_dirstate = 1
225 else:
225 else:
226 update_dirstate = 0
226 update_dirstate = 0
227
227
228 tr = self.transaction()
228 tr = self.transaction()
229 mm = m1.copy()
229 mm = m1.copy()
230 mfm = mf1.copy()
230 mfm = mf1.copy()
231 linkrev = self.changelog.count()
231 linkrev = self.changelog.count()
232 for f in files:
232 for f in files:
233 try:
233 try:
234 t = self.wread(f)
234 t = self.wread(f)
235 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
235 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
236 r = self.file(f)
236 r = self.file(f)
237 mfm[f] = tm
237 mfm[f] = tm
238
238
239 fp1 = m1.get(f, nullid)
239 fp1 = m1.get(f, nullid)
240 fp2 = m2.get(f, nullid)
240 fp2 = m2.get(f, nullid)
241
241
242 # is the same revision on two branches of a merge?
242 # is the same revision on two branches of a merge?
243 if fp2 == fp1:
243 if fp2 == fp1:
244 fp2 = nullid
244 fp2 = nullid
245
245
246 if fp2 != nullid:
246 if fp2 != nullid:
247 # is one parent an ancestor of the other?
247 # is one parent an ancestor of the other?
248 fpa = r.ancestor(fp1, fp2)
248 fpa = r.ancestor(fp1, fp2)
249 if fpa == fp1:
249 if fpa == fp1:
250 fp1, fp2 = fp2, nullid
250 fp1, fp2 = fp2, nullid
251 elif fpa == fp2:
251 elif fpa == fp2:
252 fp2 = nullid
252 fp2 = nullid
253
253
254 # is the file unmodified from the parent?
254 # is the file unmodified from the parent?
255 if t == r.read(fp1):
255 if t == r.read(fp1):
256 # record the proper existing parent in manifest
256 # record the proper existing parent in manifest
257 # no need to add a revision
257 # no need to add a revision
258 mm[f] = fp1
258 mm[f] = fp1
259 continue
259 continue
260
260
261 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
261 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
262 changed.append(f)
262 changed.append(f)
263 if update_dirstate:
263 if update_dirstate:
264 self.dirstate.update([f], "n")
264 self.dirstate.update([f], "n")
265 except IOError:
265 except IOError:
266 try:
266 try:
267 del mm[f]
267 del mm[f]
268 del mfm[f]
268 del mfm[f]
269 if update_dirstate:
269 if update_dirstate:
270 self.dirstate.forget([f])
270 self.dirstate.forget([f])
271 except:
271 except:
272 # deleted from p2?
272 # deleted from p2?
273 pass
273 pass
274
274
275 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
275 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
276 user = user or self.ui.username()
276 user = user or self.ui.username()
277 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
277 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
278 tr.close()
278 tr.close()
279 if update_dirstate:
279 if update_dirstate:
280 self.dirstate.setparents(n, nullid)
280 self.dirstate.setparents(n, nullid)
281
281
282 def commit(self, files = None, text = "", user = None, date = None,
282 def commit(self, files = None, text = "", user = None, date = None,
283 match = util.always, force=False):
283 match = util.always, force=False):
284 commit = []
284 commit = []
285 remove = []
285 remove = []
286 changed = []
286 changed = []
287
287
288 if files:
288 if files:
289 for f in files:
289 for f in files:
290 s = self.dirstate.state(f)
290 s = self.dirstate.state(f)
291 if s in 'nmai':
291 if s in 'nmai':
292 commit.append(f)
292 commit.append(f)
293 elif s == 'r':
293 elif s == 'r':
294 remove.append(f)
294 remove.append(f)
295 else:
295 else:
296 self.ui.warn("%s not tracked!\n" % f)
296 self.ui.warn("%s not tracked!\n" % f)
297 else:
297 else:
298 (c, a, d, u) = self.changes(match=match)
298 (c, a, d, u) = self.changes(match=match)
299 commit = c + a
299 commit = c + a
300 remove = d
300 remove = d
301
301
302 p1, p2 = self.dirstate.parents()
302 p1, p2 = self.dirstate.parents()
303 c1 = self.changelog.read(p1)
303 c1 = self.changelog.read(p1)
304 c2 = self.changelog.read(p2)
304 c2 = self.changelog.read(p2)
305 m1 = self.manifest.read(c1[0])
305 m1 = self.manifest.read(c1[0])
306 mf1 = self.manifest.readflags(c1[0])
306 mf1 = self.manifest.readflags(c1[0])
307 m2 = self.manifest.read(c2[0])
307 m2 = self.manifest.read(c2[0])
308
308
309 if not commit and not remove and not force and p2 == nullid:
309 if not commit and not remove and not force and p2 == nullid:
310 self.ui.status("nothing changed\n")
310 self.ui.status("nothing changed\n")
311 return None
311 return None
312
312
313 if not self.hook("precommit"):
313 if not self.hook("precommit"):
314 return None
314 return None
315
315
316 lock = self.lock()
316 lock = self.lock()
317 tr = self.transaction()
317 tr = self.transaction()
318
318
319 # check in files
319 # check in files
320 new = {}
320 new = {}
321 linkrev = self.changelog.count()
321 linkrev = self.changelog.count()
322 commit.sort()
322 commit.sort()
323 for f in commit:
323 for f in commit:
324 self.ui.note(f + "\n")
324 self.ui.note(f + "\n")
325 try:
325 try:
326 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
326 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
327 t = self.wread(f)
327 t = self.wread(f)
328 except IOError:
328 except IOError:
329 self.ui.warn("trouble committing %s!\n" % f)
329 self.ui.warn("trouble committing %s!\n" % f)
330 raise
330 raise
331
331
332 meta = {}
332 meta = {}
333 cp = self.dirstate.copied(f)
333 cp = self.dirstate.copied(f)
334 if cp:
334 if cp:
335 meta["copy"] = cp
335 meta["copy"] = cp
336 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
336 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
337 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
337 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
338
338
339 r = self.file(f)
339 r = self.file(f)
340 fp1 = m1.get(f, nullid)
340 fp1 = m1.get(f, nullid)
341 fp2 = m2.get(f, nullid)
341 fp2 = m2.get(f, nullid)
342
342
343 # is the same revision on two branches of a merge?
343 # is the same revision on two branches of a merge?
344 if fp2 == fp1:
344 if fp2 == fp1:
345 fp2 = nullid
345 fp2 = nullid
346
346
347 if fp2 != nullid:
347 if fp2 != nullid:
348 # is one parent an ancestor of the other?
348 # is one parent an ancestor of the other?
349 fpa = r.ancestor(fp1, fp2)
349 fpa = r.ancestor(fp1, fp2)
350 if fpa == fp1:
350 if fpa == fp1:
351 fp1, fp2 = fp2, nullid
351 fp1, fp2 = fp2, nullid
352 elif fpa == fp2:
352 elif fpa == fp2:
353 fp2 = nullid
353 fp2 = nullid
354
354
355 # is the file unmodified from the parent?
355 # is the file unmodified from the parent?
356 if not meta and t == r.read(fp1):
356 if not meta and t == r.read(fp1):
357 # record the proper existing parent in manifest
357 # record the proper existing parent in manifest
358 # no need to add a revision
358 # no need to add a revision
359 new[f] = fp1
359 new[f] = fp1
360 continue
360 continue
361
361
362 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
362 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
363 # remember what we've added so that we can later calculate
363 # remember what we've added so that we can later calculate
364 # the files to pull from a set of changesets
364 # the files to pull from a set of changesets
365 changed.append(f)
365 changed.append(f)
366
366
367 # update manifest
367 # update manifest
368 m1.update(new)
368 m1.update(new)
369 for f in remove:
369 for f in remove:
370 if f in m1:
370 if f in m1:
371 del m1[f]
371 del m1[f]
372 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
372 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
373 (new, remove))
373 (new, remove))
374
374
375 # add changeset
375 # add changeset
376 new = new.keys()
376 new = new.keys()
377 new.sort()
377 new.sort()
378
378
379 if not text:
379 if not text:
380 edittext = ""
380 edittext = ""
381 if p2 != nullid:
381 if p2 != nullid:
382 edittext += "HG: branch merge\n"
382 edittext += "HG: branch merge\n"
383 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
383 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
384 edittext += "".join(["HG: changed %s\n" % f for f in changed])
384 edittext += "".join(["HG: changed %s\n" % f for f in changed])
385 edittext += "".join(["HG: removed %s\n" % f for f in remove])
385 edittext += "".join(["HG: removed %s\n" % f for f in remove])
386 if not changed and not remove:
386 if not changed and not remove:
387 edittext += "HG: no files changed\n"
387 edittext += "HG: no files changed\n"
388 edittext = self.ui.edit(edittext)
388 edittext = self.ui.edit(edittext)
389 if not edittext.rstrip():
389 if not edittext.rstrip():
390 return None
390 return None
391 text = edittext
391 text = edittext
392
392
393 user = user or self.ui.username()
393 user = user or self.ui.username()
394 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
394 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
395 tr.close()
395 tr.close()
396
396
397 self.dirstate.setparents(n)
397 self.dirstate.setparents(n)
398 self.dirstate.update(new, "n")
398 self.dirstate.update(new, "n")
399 self.dirstate.forget(remove)
399 self.dirstate.forget(remove)
400
400
401 if not self.hook("commit", node=hex(n)):
401 if not self.hook("commit", node=hex(n)):
402 return None
402 return None
403 return n
403 return n
404
404
405 def walk(self, node=None, files=[], match=util.always):
405 def walk(self, node=None, files=[], match=util.always):
406 if node:
406 if node:
407 for fn in self.manifest.read(self.changelog.read(node)[0]):
407 for fn in self.manifest.read(self.changelog.read(node)[0]):
408 if match(fn): yield 'm', fn
408 if match(fn): yield 'm', fn
409 else:
409 else:
410 for src, fn in self.dirstate.walk(files, match):
410 for src, fn in self.dirstate.walk(files, match):
411 yield src, fn
411 yield src, fn
412
412
413 def changes(self, node1 = None, node2 = None, files = [],
413 def changes(self, node1 = None, node2 = None, files = [],
414 match = util.always):
414 match = util.always):
415 mf2, u = None, []
415 mf2, u = None, []
416
416
417 def fcmp(fn, mf):
417 def fcmp(fn, mf):
418 t1 = self.wread(fn)
418 t1 = self.wread(fn)
419 t2 = self.file(fn).read(mf.get(fn, nullid))
419 t2 = self.file(fn).read(mf.get(fn, nullid))
420 return cmp(t1, t2)
420 return cmp(t1, t2)
421
421
422 def mfmatches(node):
422 def mfmatches(node):
423 mf = dict(self.manifest.read(node))
423 mf = dict(self.manifest.read(node))
424 for fn in mf.keys():
424 for fn in mf.keys():
425 if not match(fn):
425 if not match(fn):
426 del mf[fn]
426 del mf[fn]
427 return mf
427 return mf
428
428
429 # are we comparing the working directory?
429 # are we comparing the working directory?
430 if not node2:
430 if not node2:
431 l, c, a, d, u = self.dirstate.changes(files, match)
431 l, c, a, d, u = self.dirstate.changes(files, match)
432
432
433 # are we comparing working dir against its parent?
433 # are we comparing working dir against its parent?
434 if not node1:
434 if not node1:
435 if l:
435 if l:
436 # do a full compare of any files that might have changed
436 # do a full compare of any files that might have changed
437 change = self.changelog.read(self.dirstate.parents()[0])
437 change = self.changelog.read(self.dirstate.parents()[0])
438 mf2 = mfmatches(change[0])
438 mf2 = mfmatches(change[0])
439 for f in l:
439 for f in l:
440 if fcmp(f, mf2):
440 if fcmp(f, mf2):
441 c.append(f)
441 c.append(f)
442
442
443 for l in c, a, d, u:
443 for l in c, a, d, u:
444 l.sort()
444 l.sort()
445
445
446 return (c, a, d, u)
446 return (c, a, d, u)
447
447
448 # are we comparing working dir against non-tip?
448 # are we comparing working dir against non-tip?
449 # generate a pseudo-manifest for the working dir
449 # generate a pseudo-manifest for the working dir
450 if not node2:
450 if not node2:
451 if not mf2:
451 if not mf2:
452 change = self.changelog.read(self.dirstate.parents()[0])
452 change = self.changelog.read(self.dirstate.parents()[0])
453 mf2 = mfmatches(change[0])
453 mf2 = mfmatches(change[0])
454 for f in a + c + l:
454 for f in a + c + l:
455 mf2[f] = ""
455 mf2[f] = ""
456 for f in d:
456 for f in d:
457 if f in mf2: del mf2[f]
457 if f in mf2: del mf2[f]
458 else:
458 else:
459 change = self.changelog.read(node2)
459 change = self.changelog.read(node2)
460 mf2 = mfmatches(change[0])
460 mf2 = mfmatches(change[0])
461
461
462 # flush lists from dirstate before comparing manifests
462 # flush lists from dirstate before comparing manifests
463 c, a = [], []
463 c, a = [], []
464
464
465 change = self.changelog.read(node1)
465 change = self.changelog.read(node1)
466 mf1 = mfmatches(change[0])
466 mf1 = mfmatches(change[0])
467
467
468 for fn in mf2:
468 for fn in mf2:
469 if mf1.has_key(fn):
469 if mf1.has_key(fn):
470 if mf1[fn] != mf2[fn]:
470 if mf1[fn] != mf2[fn]:
471 if mf2[fn] != "" or fcmp(fn, mf1):
471 if mf2[fn] != "" or fcmp(fn, mf1):
472 c.append(fn)
472 c.append(fn)
473 del mf1[fn]
473 del mf1[fn]
474 else:
474 else:
475 a.append(fn)
475 a.append(fn)
476
476
477 d = mf1.keys()
477 d = mf1.keys()
478
478
479 for l in c, a, d, u:
479 for l in c, a, d, u:
480 l.sort()
480 l.sort()
481
481
482 return (c, a, d, u)
482 return (c, a, d, u)
483
483
484 def add(self, list):
484 def add(self, list):
485 for f in list:
485 for f in list:
486 p = self.wjoin(f)
486 p = self.wjoin(f)
487 if not os.path.exists(p):
487 if not os.path.exists(p):
488 self.ui.warn("%s does not exist!\n" % f)
488 self.ui.warn("%s does not exist!\n" % f)
489 elif not os.path.isfile(p):
489 elif not os.path.isfile(p):
490 self.ui.warn("%s not added: only files supported currently\n" % f)
490 self.ui.warn("%s not added: only files supported currently\n" % f)
491 elif self.dirstate.state(f) in 'an':
491 elif self.dirstate.state(f) in 'an':
492 self.ui.warn("%s already tracked!\n" % f)
492 self.ui.warn("%s already tracked!\n" % f)
493 else:
493 else:
494 self.dirstate.update([f], "a")
494 self.dirstate.update([f], "a")
495
495
496 def forget(self, list):
496 def forget(self, list):
497 for f in list:
497 for f in list:
498 if self.dirstate.state(f) not in 'ai':
498 if self.dirstate.state(f) not in 'ai':
499 self.ui.warn("%s not added!\n" % f)
499 self.ui.warn("%s not added!\n" % f)
500 else:
500 else:
501 self.dirstate.forget([f])
501 self.dirstate.forget([f])
502
502
503 def remove(self, list):
503 def remove(self, list):
504 for f in list:
504 for f in list:
505 p = self.wjoin(f)
505 p = self.wjoin(f)
506 if os.path.exists(p):
506 if os.path.exists(p):
507 self.ui.warn("%s still exists!\n" % f)
507 self.ui.warn("%s still exists!\n" % f)
508 elif self.dirstate.state(f) == 'a':
508 elif self.dirstate.state(f) == 'a':
509 self.ui.warn("%s never committed!\n" % f)
509 self.ui.warn("%s never committed!\n" % f)
510 self.dirstate.forget([f])
510 self.dirstate.forget([f])
511 elif f not in self.dirstate:
511 elif f not in self.dirstate:
512 self.ui.warn("%s not tracked!\n" % f)
512 self.ui.warn("%s not tracked!\n" % f)
513 else:
513 else:
514 self.dirstate.update([f], "r")
514 self.dirstate.update([f], "r")
515
515
516 def copy(self, source, dest):
516 def copy(self, source, dest):
517 p = self.wjoin(dest)
517 p = self.wjoin(dest)
518 if not os.path.exists(p):
518 if not os.path.exists(p):
519 self.ui.warn("%s does not exist!\n" % dest)
519 self.ui.warn("%s does not exist!\n" % dest)
520 elif not os.path.isfile(p):
520 elif not os.path.isfile(p):
521 self.ui.warn("copy failed: %s is not a file\n" % dest)
521 self.ui.warn("copy failed: %s is not a file\n" % dest)
522 else:
522 else:
523 if self.dirstate.state(dest) == '?':
523 if self.dirstate.state(dest) == '?':
524 self.dirstate.update([dest], "a")
524 self.dirstate.update([dest], "a")
525 self.dirstate.copy(source, dest)
525 self.dirstate.copy(source, dest)
526
526
527 def heads(self):
527 def heads(self):
528 return self.changelog.heads()
528 return self.changelog.heads()
529
529
530 # branchlookup returns a dict giving a list of branches for
530 # branchlookup returns a dict giving a list of branches for
531 # each head. A branch is defined as the tag of a node or
531 # each head. A branch is defined as the tag of a node or
532 # the branch of the node's parents. If a node has multiple
532 # the branch of the node's parents. If a node has multiple
533 # branch tags, tags are eliminated if they are visible from other
533 # branch tags, tags are eliminated if they are visible from other
534 # branch tags.
534 # branch tags.
535 #
535 #
536 # So, for this graph: a->b->c->d->e
536 # So, for this graph: a->b->c->d->e
537 # \ /
537 # \ /
538 # aa -----/
538 # aa -----/
539 # a has tag 2.6.12
539 # a has tag 2.6.12
540 # d has tag 2.6.13
540 # d has tag 2.6.13
541 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
541 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
542 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
542 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
543 # from the list.
543 # from the list.
544 #
544 #
545 # It is possible that more than one head will have the same branch tag.
545 # It is possible that more than one head will have the same branch tag.
546 # callers need to check the result for multiple heads under the same
546 # callers need to check the result for multiple heads under the same
547 # branch tag if that is a problem for them (ie checkout of a specific
547 # branch tag if that is a problem for them (ie checkout of a specific
548 # branch).
548 # branch).
549 #
549 #
550 # passing in a specific branch will limit the depth of the search
550 # passing in a specific branch will limit the depth of the search
551 # through the parents. It won't limit the branches returned in the
551 # through the parents. It won't limit the branches returned in the
552 # result though.
552 # result though.
553 def branchlookup(self, heads=None, branch=None):
553 def branchlookup(self, heads=None, branch=None):
554 if not heads:
554 if not heads:
555 heads = self.heads()
555 heads = self.heads()
556 headt = [ h for h in heads ]
556 headt = [ h for h in heads ]
557 chlog = self.changelog
557 chlog = self.changelog
558 branches = {}
558 branches = {}
559 merges = []
559 merges = []
560 seenmerge = {}
560 seenmerge = {}
561
561
562 # traverse the tree once for each head, recording in the branches
562 # traverse the tree once for each head, recording in the branches
563 # dict which tags are visible from this head. The branches
563 # dict which tags are visible from this head. The branches
564 # dict also records which tags are visible from each tag
564 # dict also records which tags are visible from each tag
565 # while we traverse.
565 # while we traverse.
566 while headt or merges:
566 while headt or merges:
567 if merges:
567 if merges:
568 n, found = merges.pop()
568 n, found = merges.pop()
569 visit = [n]
569 visit = [n]
570 else:
570 else:
571 h = headt.pop()
571 h = headt.pop()
572 visit = [h]
572 visit = [h]
573 found = [h]
573 found = [h]
574 seen = {}
574 seen = {}
575 while visit:
575 while visit:
576 n = visit.pop()
576 n = visit.pop()
577 if n in seen:
577 if n in seen:
578 continue
578 continue
579 pp = chlog.parents(n)
579 pp = chlog.parents(n)
580 tags = self.nodetags(n)
580 tags = self.nodetags(n)
581 if tags:
581 if tags:
582 for x in tags:
582 for x in tags:
583 if x == 'tip':
583 if x == 'tip':
584 continue
584 continue
585 for f in found:
585 for f in found:
586 branches.setdefault(f, {})[n] = 1
586 branches.setdefault(f, {})[n] = 1
587 branches.setdefault(n, {})[n] = 1
587 branches.setdefault(n, {})[n] = 1
588 break
588 break
589 if n not in found:
589 if n not in found:
590 found.append(n)
590 found.append(n)
591 if branch in tags:
591 if branch in tags:
592 continue
592 continue
593 seen[n] = 1
593 seen[n] = 1
594 if pp[1] != nullid and n not in seenmerge:
594 if pp[1] != nullid and n not in seenmerge:
595 merges.append((pp[1], [x for x in found]))
595 merges.append((pp[1], [x for x in found]))
596 seenmerge[n] = 1
596 seenmerge[n] = 1
597 if pp[0] != nullid:
597 if pp[0] != nullid:
598 visit.append(pp[0])
598 visit.append(pp[0])
599 # traverse the branches dict, eliminating branch tags from each
599 # traverse the branches dict, eliminating branch tags from each
600 # head that are visible from another branch tag for that head.
600 # head that are visible from another branch tag for that head.
601 out = {}
601 out = {}
602 viscache = {}
602 viscache = {}
603 for h in heads:
603 for h in heads:
604 def visible(node):
604 def visible(node):
605 if node in viscache:
605 if node in viscache:
606 return viscache[node]
606 return viscache[node]
607 ret = {}
607 ret = {}
608 visit = [node]
608 visit = [node]
609 while visit:
609 while visit:
610 x = visit.pop()
610 x = visit.pop()
611 if x in viscache:
611 if x in viscache:
612 ret.update(viscache[x])
612 ret.update(viscache[x])
613 elif x not in ret:
613 elif x not in ret:
614 ret[x] = 1
614 ret[x] = 1
615 if x in branches:
615 if x in branches:
616 visit[len(visit):] = branches[x].keys()
616 visit[len(visit):] = branches[x].keys()
617 viscache[node] = ret
617 viscache[node] = ret
618 return ret
618 return ret
619 if h not in branches:
619 if h not in branches:
620 continue
620 continue
621 # O(n^2), but somewhat limited. This only searches the
621 # O(n^2), but somewhat limited. This only searches the
622 # tags visible from a specific head, not all the tags in the
622 # tags visible from a specific head, not all the tags in the
623 # whole repo.
623 # whole repo.
624 for b in branches[h]:
624 for b in branches[h]:
625 vis = False
625 vis = False
626 for bb in branches[h].keys():
626 for bb in branches[h].keys():
627 if b != bb:
627 if b != bb:
628 if b in visible(bb):
628 if b in visible(bb):
629 vis = True
629 vis = True
630 break
630 break
631 if not vis:
631 if not vis:
632 l = out.setdefault(h, [])
632 l = out.setdefault(h, [])
633 l[len(l):] = self.nodetags(b)
633 l[len(l):] = self.nodetags(b)
634 return out
634 return out
635
635
636 def branches(self, nodes):
636 def branches(self, nodes):
637 if not nodes: nodes = [self.changelog.tip()]
637 if not nodes: nodes = [self.changelog.tip()]
638 b = []
638 b = []
639 for n in nodes:
639 for n in nodes:
640 t = n
640 t = n
641 while n:
641 while n:
642 p = self.changelog.parents(n)
642 p = self.changelog.parents(n)
643 if p[1] != nullid or p[0] == nullid:
643 if p[1] != nullid or p[0] == nullid:
644 b.append((t, n, p[0], p[1]))
644 b.append((t, n, p[0], p[1]))
645 break
645 break
646 n = p[0]
646 n = p[0]
647 return b
647 return b
648
648
649 def between(self, pairs):
649 def between(self, pairs):
650 r = []
650 r = []
651
651
652 for top, bottom in pairs:
652 for top, bottom in pairs:
653 n, l, i = top, [], 0
653 n, l, i = top, [], 0
654 f = 1
654 f = 1
655
655
656 while n != bottom:
656 while n != bottom:
657 p = self.changelog.parents(n)[0]
657 p = self.changelog.parents(n)[0]
658 if i == f:
658 if i == f:
659 l.append(n)
659 l.append(n)
660 f = f * 2
660 f = f * 2
661 n = p
661 n = p
662 i += 1
662 i += 1
663
663
664 r.append(l)
664 r.append(l)
665
665
666 return r
666 return r
667
667
668 def newer(self, nodes):
668 def newer(self, nodes):
669 m = {}
669 m = {}
670 nl = []
670 nl = []
671 pm = {}
671 pm = {}
672 cl = self.changelog
672 cl = self.changelog
673 t = l = cl.count()
673 t = l = cl.count()
674
674
675 # find the lowest numbered node
675 # find the lowest numbered node
676 for n in nodes:
676 for n in nodes:
677 l = min(l, cl.rev(n))
677 l = min(l, cl.rev(n))
678 m[n] = 1
678 m[n] = 1
679
679
680 for i in xrange(l, t):
680 for i in xrange(l, t):
681 n = cl.node(i)
681 n = cl.node(i)
682 if n in m: # explicitly listed
682 if n in m: # explicitly listed
683 pm[n] = 1
683 pm[n] = 1
684 nl.append(n)
684 nl.append(n)
685 continue
685 continue
686 for p in cl.parents(n):
686 for p in cl.parents(n):
687 if p in pm: # parent listed
687 if p in pm: # parent listed
688 pm[n] = 1
688 pm[n] = 1
689 nl.append(n)
689 nl.append(n)
690 break
690 break
691
691
692 return nl
692 return nl
693
693
694 def findincoming(self, remote, base=None, heads=None):
694 def findincoming(self, remote, base=None, heads=None):
695 m = self.changelog.nodemap
695 m = self.changelog.nodemap
696 search = []
696 search = []
697 fetch = {}
697 fetch = {}
698 seen = {}
698 seen = {}
699 seenbranch = {}
699 seenbranch = {}
700 if base == None:
700 if base == None:
701 base = {}
701 base = {}
702
702
703 # assume we're closer to the tip than the root
703 # assume we're closer to the tip than the root
704 # and start by examining the heads
704 # and start by examining the heads
705 self.ui.status("searching for changes\n")
705 self.ui.status("searching for changes\n")
706
706
707 if not heads:
707 if not heads:
708 heads = remote.heads()
708 heads = remote.heads()
709
709
710 unknown = []
710 unknown = []
711 for h in heads:
711 for h in heads:
712 if h not in m:
712 if h not in m:
713 unknown.append(h)
713 unknown.append(h)
714 else:
714 else:
715 base[h] = 1
715 base[h] = 1
716
716
717 if not unknown:
717 if not unknown:
718 return None
718 return None
719
719
720 rep = {}
720 rep = {}
721 reqcnt = 0
721 reqcnt = 0
722
722
723 # search through remote branches
723 # search through remote branches
724 # a 'branch' here is a linear segment of history, with four parts:
724 # a 'branch' here is a linear segment of history, with four parts:
725 # head, root, first parent, second parent
725 # head, root, first parent, second parent
726 # (a branch always has two parents (or none) by definition)
726 # (a branch always has two parents (or none) by definition)
727 unknown = remote.branches(unknown)
727 unknown = remote.branches(unknown)
728 while unknown:
728 while unknown:
729 r = []
729 r = []
730 while unknown:
730 while unknown:
731 n = unknown.pop(0)
731 n = unknown.pop(0)
732 if n[0] in seen:
732 if n[0] in seen:
733 continue
733 continue
734
734
735 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
735 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
736 if n[0] == nullid:
736 if n[0] == nullid:
737 break
737 break
738 if n in seenbranch:
738 if n in seenbranch:
739 self.ui.debug("branch already found\n")
739 self.ui.debug("branch already found\n")
740 continue
740 continue
741 if n[1] and n[1] in m: # do we know the base?
741 if n[1] and n[1] in m: # do we know the base?
742 self.ui.debug("found incomplete branch %s:%s\n"
742 self.ui.debug("found incomplete branch %s:%s\n"
743 % (short(n[0]), short(n[1])))
743 % (short(n[0]), short(n[1])))
744 search.append(n) # schedule branch range for scanning
744 search.append(n) # schedule branch range for scanning
745 seenbranch[n] = 1
745 seenbranch[n] = 1
746 else:
746 else:
747 if n[1] not in seen and n[1] not in fetch:
747 if n[1] not in seen and n[1] not in fetch:
748 if n[2] in m and n[3] in m:
748 if n[2] in m and n[3] in m:
749 self.ui.debug("found new changeset %s\n" %
749 self.ui.debug("found new changeset %s\n" %
750 short(n[1]))
750 short(n[1]))
751 fetch[n[1]] = 1 # earliest unknown
751 fetch[n[1]] = 1 # earliest unknown
752 base[n[2]] = 1 # latest known
752 base[n[2]] = 1 # latest known
753 continue
753 continue
754
754
755 for a in n[2:4]:
755 for a in n[2:4]:
756 if a not in rep:
756 if a not in rep:
757 r.append(a)
757 r.append(a)
758 rep[a] = 1
758 rep[a] = 1
759
759
760 seen[n[0]] = 1
760 seen[n[0]] = 1
761
761
762 if r:
762 if r:
763 reqcnt += 1
763 reqcnt += 1
764 self.ui.debug("request %d: %s\n" %
764 self.ui.debug("request %d: %s\n" %
765 (reqcnt, " ".join(map(short, r))))
765 (reqcnt, " ".join(map(short, r))))
766 for p in range(0, len(r), 10):
766 for p in range(0, len(r), 10):
767 for b in remote.branches(r[p:p+10]):
767 for b in remote.branches(r[p:p+10]):
768 self.ui.debug("received %s:%s\n" %
768 self.ui.debug("received %s:%s\n" %
769 (short(b[0]), short(b[1])))
769 (short(b[0]), short(b[1])))
770 if b[0] in m:
770 if b[0] in m:
771 self.ui.debug("found base node %s\n" % short(b[0]))
771 self.ui.debug("found base node %s\n" % short(b[0]))
772 base[b[0]] = 1
772 base[b[0]] = 1
773 elif b[0] not in seen:
773 elif b[0] not in seen:
774 unknown.append(b)
774 unknown.append(b)
775
775
776 # do binary search on the branches we found
776 # do binary search on the branches we found
777 while search:
777 while search:
778 n = search.pop(0)
778 n = search.pop(0)
779 reqcnt += 1
779 reqcnt += 1
780 l = remote.between([(n[0], n[1])])[0]
780 l = remote.between([(n[0], n[1])])[0]
781 l.append(n[1])
781 l.append(n[1])
782 p = n[0]
782 p = n[0]
783 f = 1
783 f = 1
784 for i in l:
784 for i in l:
785 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
785 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
786 if i in m:
786 if i in m:
787 if f <= 2:
787 if f <= 2:
788 self.ui.debug("found new branch changeset %s\n" %
788 self.ui.debug("found new branch changeset %s\n" %
789 short(p))
789 short(p))
790 fetch[p] = 1
790 fetch[p] = 1
791 base[i] = 1
791 base[i] = 1
792 else:
792 else:
793 self.ui.debug("narrowed branch search to %s:%s\n"
793 self.ui.debug("narrowed branch search to %s:%s\n"
794 % (short(p), short(i)))
794 % (short(p), short(i)))
795 search.append((p, i))
795 search.append((p, i))
796 break
796 break
797 p, f = i, f * 2
797 p, f = i, f * 2
798
798
799 # sanity check our fetch list
799 # sanity check our fetch list
800 for f in fetch.keys():
800 for f in fetch.keys():
801 if f in m:
801 if f in m:
802 raise repo.RepoError("already have changeset " + short(f[:4]))
802 raise repo.RepoError("already have changeset " + short(f[:4]))
803
803
804 if base.keys() == [nullid]:
804 if base.keys() == [nullid]:
805 self.ui.warn("warning: pulling from an unrelated repository!\n")
805 self.ui.warn("warning: pulling from an unrelated repository!\n")
806
806
807 self.ui.note("found new changesets starting at " +
807 self.ui.note("found new changesets starting at " +
808 " ".join([short(f) for f in fetch]) + "\n")
808 " ".join([short(f) for f in fetch]) + "\n")
809
809
810 self.ui.debug("%d total queries\n" % reqcnt)
810 self.ui.debug("%d total queries\n" % reqcnt)
811
811
812 return fetch.keys()
812 return fetch.keys()
813
813
814 def findoutgoing(self, remote, base=None, heads=None):
814 def findoutgoing(self, remote, base=None, heads=None):
815 if base == None:
815 if base == None:
816 base = {}
816 base = {}
817 self.findincoming(remote, base, heads)
817 self.findincoming(remote, base, heads)
818
818
819 self.ui.debug("common changesets up to "
819 self.ui.debug("common changesets up to "
820 + " ".join(map(short, base.keys())) + "\n")
820 + " ".join(map(short, base.keys())) + "\n")
821
821
822 remain = dict.fromkeys(self.changelog.nodemap)
822 remain = dict.fromkeys(self.changelog.nodemap)
823
823
824 # prune everything remote has from the tree
824 # prune everything remote has from the tree
825 del remain[nullid]
825 del remain[nullid]
826 remove = base.keys()
826 remove = base.keys()
827 while remove:
827 while remove:
828 n = remove.pop(0)
828 n = remove.pop(0)
829 if n in remain:
829 if n in remain:
830 del remain[n]
830 del remain[n]
831 for p in self.changelog.parents(n):
831 for p in self.changelog.parents(n):
832 remove.append(p)
832 remove.append(p)
833
833
834 # find every node whose parents have been pruned
834 # find every node whose parents have been pruned
835 subset = []
835 subset = []
836 for n in remain:
836 for n in remain:
837 p1, p2 = self.changelog.parents(n)
837 p1, p2 = self.changelog.parents(n)
838 if p1 not in remain and p2 not in remain:
838 if p1 not in remain and p2 not in remain:
839 subset.append(n)
839 subset.append(n)
840
840
841 # this is the set of all roots we have to push
841 # this is the set of all roots we have to push
842 return subset
842 return subset
843
843
844 def pull(self, remote):
844 def pull(self, remote):
845 lock = self.lock()
845 lock = self.lock()
846
846
847 # if we have an empty repo, fetch everything
847 # if we have an empty repo, fetch everything
848 if self.changelog.tip() == nullid:
848 if self.changelog.tip() == nullid:
849 self.ui.status("requesting all changes\n")
849 self.ui.status("requesting all changes\n")
850 fetch = [nullid]
850 fetch = [nullid]
851 else:
851 else:
852 fetch = self.findincoming(remote)
852 fetch = self.findincoming(remote)
853
853
854 if not fetch:
854 if not fetch:
855 self.ui.status("no changes found\n")
855 self.ui.status("no changes found\n")
856 return 1
856 return 1
857
857
858 cg = remote.changegroup(fetch)
858 cg = remote.changegroup(fetch)
859 return self.addchangegroup(cg)
859 return self.addchangegroup(cg)
860
860
861 def push(self, remote, force=False):
861 def push(self, remote, force=False):
862 lock = remote.lock()
862 lock = remote.lock()
863
863
864 base = {}
864 base = {}
865 heads = remote.heads()
865 heads = remote.heads()
866 inc = self.findincoming(remote, base, heads)
866 inc = self.findincoming(remote, base, heads)
867 if not force and inc:
867 if not force and inc:
868 self.ui.warn("abort: unsynced remote changes!\n")
868 self.ui.warn("abort: unsynced remote changes!\n")
869 self.ui.status("(did you forget to sync? use push -f to force)\n")
869 self.ui.status("(did you forget to sync? use push -f to force)\n")
870 return 1
870 return 1
871
871
872 update = self.findoutgoing(remote, base)
872 update = self.findoutgoing(remote, base)
873 if not update:
873 if not update:
874 self.ui.status("no changes found\n")
874 self.ui.status("no changes found\n")
875 return 1
875 return 1
876 elif not force:
876 elif not force:
877 if len(heads) < len(self.changelog.heads()):
877 if len(heads) < len(self.changelog.heads()):
878 self.ui.warn("abort: push creates new remote branches!\n")
878 self.ui.warn("abort: push creates new remote branches!\n")
879 self.ui.status("(did you forget to merge?" +
879 self.ui.status("(did you forget to merge?" +
880 " use push -f to force)\n")
880 " use push -f to force)\n")
881 return 1
881 return 1
882
882
883 cg = self.changegroup(update)
883 cg = self.changegroup(update)
884 return remote.addchangegroup(cg)
884 return remote.addchangegroup(cg)
885
885
886 def changegroup(self, basenodes):
886 def changegroup(self, basenodes):
887 class genread:
887 class genread:
888 def __init__(self, generator):
888 def __init__(self, generator):
889 self.g = generator
889 self.g = generator
890 self.buf = ""
890 self.buf = ""
891 def fillbuf(self):
891 def fillbuf(self):
892 self.buf += "".join(self.g)
892 self.buf += "".join(self.g)
893
893
894 def read(self, l):
894 def read(self, l):
895 while l > len(self.buf):
895 while l > len(self.buf):
896 try:
896 try:
897 self.buf += self.g.next()
897 self.buf += self.g.next()
898 except StopIteration:
898 except StopIteration:
899 break
899 break
900 d, self.buf = self.buf[:l], self.buf[l:]
900 d, self.buf = self.buf[:l], self.buf[l:]
901 return d
901 return d
902
902
903 def gengroup():
903 def gengroup():
904 nodes = self.newer(basenodes)
904 nodes = self.newer(basenodes)
905
905
906 # construct the link map
906 # construct the link map
907 linkmap = {}
907 linkmap = {}
908 for n in nodes:
908 for n in nodes:
909 linkmap[self.changelog.rev(n)] = n
909 linkmap[self.changelog.rev(n)] = n
910
910
911 # construct a list of all changed files
911 # construct a list of all changed files
912 changed = {}
912 changed = {}
913 for n in nodes:
913 for n in nodes:
914 c = self.changelog.read(n)
914 c = self.changelog.read(n)
915 for f in c[3]:
915 for f in c[3]:
916 changed[f] = 1
916 changed[f] = 1
917 changed = changed.keys()
917 changed = changed.keys()
918 changed.sort()
918 changed.sort()
919
919
920 # the changegroup is changesets + manifests + all file revs
920 # the changegroup is changesets + manifests + all file revs
921 revs = [ self.changelog.rev(n) for n in nodes ]
921 revs = [ self.changelog.rev(n) for n in nodes ]
922
922
923 for y in self.changelog.group(linkmap): yield y
923 for y in self.changelog.group(linkmap): yield y
924 for y in self.manifest.group(linkmap): yield y
924 for y in self.manifest.group(linkmap): yield y
925 for f in changed:
925 for f in changed:
926 yield struct.pack(">l", len(f) + 4) + f
926 yield struct.pack(">l", len(f) + 4) + f
927 g = self.file(f).group(linkmap)
927 g = self.file(f).group(linkmap)
928 for y in g:
928 for y in g:
929 yield y
929 yield y
930
930
931 yield struct.pack(">l", 0)
931 yield struct.pack(">l", 0)
932
932
933 return genread(gengroup())
933 return genread(gengroup())
934
934
935 def addchangegroup(self, source):
935 def addchangegroup(self, source):
936
936
937 def getchunk():
937 def getchunk():
938 d = source.read(4)
938 d = source.read(4)
939 if not d: return ""
939 if not d: return ""
940 l = struct.unpack(">l", d)[0]
940 l = struct.unpack(">l", d)[0]
941 if l <= 4: return ""
941 if l <= 4: return ""
942 return source.read(l - 4)
942 return source.read(l - 4)
943
943
944 def getgroup():
944 def getgroup():
945 while 1:
945 while 1:
946 c = getchunk()
946 c = getchunk()
947 if not c: break
947 if not c: break
948 yield c
948 yield c
949
949
950 def csmap(x):
950 def csmap(x):
951 self.ui.debug("add changeset %s\n" % short(x))
951 self.ui.debug("add changeset %s\n" % short(x))
952 return self.changelog.count()
952 return self.changelog.count()
953
953
954 def revmap(x):
954 def revmap(x):
955 return self.changelog.rev(x)
955 return self.changelog.rev(x)
956
956
957 if not source: return
957 if not source: return
958 changesets = files = revisions = 0
958 changesets = files = revisions = 0
959
959
960 tr = self.transaction()
960 tr = self.transaction()
961
961
962 oldheads = len(self.changelog.heads())
962 oldheads = len(self.changelog.heads())
963
963
964 # pull off the changeset group
964 # pull off the changeset group
965 self.ui.status("adding changesets\n")
965 self.ui.status("adding changesets\n")
966 co = self.changelog.tip()
966 co = self.changelog.tip()
967 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
967 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
968 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
968 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
969
969
970 # pull off the manifest group
970 # pull off the manifest group
971 self.ui.status("adding manifests\n")
971 self.ui.status("adding manifests\n")
972 mm = self.manifest.tip()
972 mm = self.manifest.tip()
973 mo = self.manifest.addgroup(getgroup(), revmap, tr)
973 mo = self.manifest.addgroup(getgroup(), revmap, tr)
974
974
975 # process the files
975 # process the files
976 self.ui.status("adding file changes\n")
976 self.ui.status("adding file changes\n")
977 while 1:
977 while 1:
978 f = getchunk()
978 f = getchunk()
979 if not f: break
979 if not f: break
980 self.ui.debug("adding %s revisions\n" % f)
980 self.ui.debug("adding %s revisions\n" % f)
981 fl = self.file(f)
981 fl = self.file(f)
982 o = fl.count()
982 o = fl.count()
983 n = fl.addgroup(getgroup(), revmap, tr)
983 n = fl.addgroup(getgroup(), revmap, tr)
984 revisions += fl.count() - o
984 revisions += fl.count() - o
985 files += 1
985 files += 1
986
986
987 newheads = len(self.changelog.heads())
987 newheads = len(self.changelog.heads())
988 heads = ""
988 heads = ""
989 if oldheads and newheads > oldheads:
989 if oldheads and newheads > oldheads:
990 heads = " (+%d heads)" % (newheads - oldheads)
990 heads = " (+%d heads)" % (newheads - oldheads)
991
991
992 self.ui.status(("added %d changesets" +
992 self.ui.status(("added %d changesets" +
993 " with %d changes to %d files%s\n")
993 " with %d changes to %d files%s\n")
994 % (changesets, revisions, files, heads))
994 % (changesets, revisions, files, heads))
995
995
996 tr.close()
996 tr.close()
997
997
998 if not self.hook("changegroup"):
998 if not self.hook("changegroup"):
999 return 1
999 return 1
1000
1000
1001 return
1001 return
1002
1002
1003 def update(self, node, allow=False, force=False, choose=None,
1003 def update(self, node, allow=False, force=False, choose=None,
1004 moddirstate=True):
1004 moddirstate=True):
1005 pl = self.dirstate.parents()
1005 pl = self.dirstate.parents()
1006 if not force and pl[1] != nullid:
1006 if not force and pl[1] != nullid:
1007 self.ui.warn("aborting: outstanding uncommitted merges\n")
1007 self.ui.warn("aborting: outstanding uncommitted merges\n")
1008 return 1
1008 return 1
1009
1009
1010 p1, p2 = pl[0], node
1010 p1, p2 = pl[0], node
1011 pa = self.changelog.ancestor(p1, p2)
1011 pa = self.changelog.ancestor(p1, p2)
1012 m1n = self.changelog.read(p1)[0]
1012 m1n = self.changelog.read(p1)[0]
1013 m2n = self.changelog.read(p2)[0]
1013 m2n = self.changelog.read(p2)[0]
1014 man = self.manifest.ancestor(m1n, m2n)
1014 man = self.manifest.ancestor(m1n, m2n)
1015 m1 = self.manifest.read(m1n)
1015 m1 = self.manifest.read(m1n)
1016 mf1 = self.manifest.readflags(m1n)
1016 mf1 = self.manifest.readflags(m1n)
1017 m2 = self.manifest.read(m2n)
1017 m2 = self.manifest.read(m2n)
1018 mf2 = self.manifest.readflags(m2n)
1018 mf2 = self.manifest.readflags(m2n)
1019 ma = self.manifest.read(man)
1019 ma = self.manifest.read(man)
1020 mfa = self.manifest.readflags(man)
1020 mfa = self.manifest.readflags(man)
1021
1021
1022 (c, a, d, u) = self.changes()
1022 (c, a, d, u) = self.changes()
1023
1023
1024 # is this a jump, or a merge? i.e. is there a linear path
1024 # is this a jump, or a merge? i.e. is there a linear path
1025 # from p1 to p2?
1025 # from p1 to p2?
1026 linear_path = (pa == p1 or pa == p2)
1026 linear_path = (pa == p1 or pa == p2)
1027
1027
1028 # resolve the manifest to determine which files
1028 # resolve the manifest to determine which files
1029 # we care about merging
1029 # we care about merging
1030 self.ui.note("resolving manifests\n")
1030 self.ui.note("resolving manifests\n")
1031 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1031 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1032 (force, allow, moddirstate, linear_path))
1032 (force, allow, moddirstate, linear_path))
1033 self.ui.debug(" ancestor %s local %s remote %s\n" %
1033 self.ui.debug(" ancestor %s local %s remote %s\n" %
1034 (short(man), short(m1n), short(m2n)))
1034 (short(man), short(m1n), short(m2n)))
1035
1035
1036 merge = {}
1036 merge = {}
1037 get = {}
1037 get = {}
1038 remove = []
1038 remove = []
1039
1039
1040 # construct a working dir manifest
1040 # construct a working dir manifest
1041 mw = m1.copy()
1041 mw = m1.copy()
1042 mfw = mf1.copy()
1042 mfw = mf1.copy()
1043 umap = dict.fromkeys(u)
1043 umap = dict.fromkeys(u)
1044
1044
1045 for f in a + c + u:
1045 for f in a + c + u:
1046 mw[f] = ""
1046 mw[f] = ""
1047 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1047 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1048
1048
1049 for f in d:
1049 for f in d:
1050 if f in mw: del mw[f]
1050 if f in mw: del mw[f]
1051
1051
1052 # If we're jumping between revisions (as opposed to merging),
1052 # If we're jumping between revisions (as opposed to merging),
1053 # and if neither the working directory nor the target rev has
1053 # and if neither the working directory nor the target rev has
1054 # the file, then we need to remove it from the dirstate, to
1054 # the file, then we need to remove it from the dirstate, to
1055 # prevent the dirstate from listing the file when it is no
1055 # prevent the dirstate from listing the file when it is no
1056 # longer in the manifest.
1056 # longer in the manifest.
1057 if moddirstate and linear_path and f not in m2:
1057 if moddirstate and linear_path and f not in m2:
1058 self.dirstate.forget((f,))
1058 self.dirstate.forget((f,))
1059
1059
1060 # Compare manifests
1060 # Compare manifests
1061 for f, n in mw.iteritems():
1061 for f, n in mw.iteritems():
1062 if choose and not choose(f): continue
1062 if choose and not choose(f): continue
1063 if f in m2:
1063 if f in m2:
1064 s = 0
1064 s = 0
1065
1065
1066 # is the wfile new since m1, and match m2?
1066 # is the wfile new since m1, and match m2?
1067 if f not in m1:
1067 if f not in m1:
1068 t1 = self.wread(f)
1068 t1 = self.wread(f)
1069 t2 = self.file(f).read(m2[f])
1069 t2 = self.file(f).read(m2[f])
1070 if cmp(t1, t2) == 0:
1070 if cmp(t1, t2) == 0:
1071 n = m2[f]
1071 n = m2[f]
1072 del t1, t2
1072 del t1, t2
1073
1073
1074 # are files different?
1074 # are files different?
1075 if n != m2[f]:
1075 if n != m2[f]:
1076 a = ma.get(f, nullid)
1076 a = ma.get(f, nullid)
1077 # are both different from the ancestor?
1077 # are both different from the ancestor?
1078 if n != a and m2[f] != a:
1078 if n != a and m2[f] != a:
1079 self.ui.debug(" %s versions differ, resolve\n" % f)
1079 self.ui.debug(" %s versions differ, resolve\n" % f)
1080 # merge executable bits
1080 # merge executable bits
1081 # "if we changed or they changed, change in merge"
1081 # "if we changed or they changed, change in merge"
1082 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1082 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1083 mode = ((a^b) | (a^c)) ^ a
1083 mode = ((a^b) | (a^c)) ^ a
1084 merge[f] = (m1.get(f, nullid), m2[f], mode)
1084 merge[f] = (m1.get(f, nullid), m2[f], mode)
1085 s = 1
1085 s = 1
1086 # are we clobbering?
1086 # are we clobbering?
1087 # is remote's version newer?
1087 # is remote's version newer?
1088 # or are we going back in time?
1088 # or are we going back in time?
1089 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1089 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1090 self.ui.debug(" remote %s is newer, get\n" % f)
1090 self.ui.debug(" remote %s is newer, get\n" % f)
1091 get[f] = m2[f]
1091 get[f] = m2[f]
1092 s = 1
1092 s = 1
1093 elif f in umap:
1093 elif f in umap:
1094 # this unknown file is the same as the checkout
1094 # this unknown file is the same as the checkout
1095 get[f] = m2[f]
1095 get[f] = m2[f]
1096
1096
1097 if not s and mfw[f] != mf2[f]:
1097 if not s and mfw[f] != mf2[f]:
1098 if force:
1098 if force:
1099 self.ui.debug(" updating permissions for %s\n" % f)
1099 self.ui.debug(" updating permissions for %s\n" % f)
1100 util.set_exec(self.wjoin(f), mf2[f])
1100 util.set_exec(self.wjoin(f), mf2[f])
1101 else:
1101 else:
1102 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1102 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1103 mode = ((a^b) | (a^c)) ^ a
1103 mode = ((a^b) | (a^c)) ^ a
1104 if mode != b:
1104 if mode != b:
1105 self.ui.debug(" updating permissions for %s\n" % f)
1105 self.ui.debug(" updating permissions for %s\n" % f)
1106 util.set_exec(self.wjoin(f), mode)
1106 util.set_exec(self.wjoin(f), mode)
1107 del m2[f]
1107 del m2[f]
1108 elif f in ma:
1108 elif f in ma:
1109 if n != ma[f]:
1109 if n != ma[f]:
1110 r = "d"
1110 r = "d"
1111 if not force and (linear_path or allow):
1111 if not force and (linear_path or allow):
1112 r = self.ui.prompt(
1112 r = self.ui.prompt(
1113 (" local changed %s which remote deleted\n" % f) +
1113 (" local changed %s which remote deleted\n" % f) +
1114 "(k)eep or (d)elete?", "[kd]", "k")
1114 "(k)eep or (d)elete?", "[kd]", "k")
1115 if r == "d":
1115 if r == "d":
1116 remove.append(f)
1116 remove.append(f)
1117 else:
1117 else:
1118 self.ui.debug("other deleted %s\n" % f)
1118 self.ui.debug("other deleted %s\n" % f)
1119 remove.append(f) # other deleted it
1119 remove.append(f) # other deleted it
1120 else:
1120 else:
1121 if n == m1.get(f, nullid): # same as parent
1121 if n == m1.get(f, nullid): # same as parent
1122 if p2 == pa: # going backwards?
1122 if p2 == pa: # going backwards?
1123 self.ui.debug("remote deleted %s\n" % f)
1123 self.ui.debug("remote deleted %s\n" % f)
1124 remove.append(f)
1124 remove.append(f)
1125 else:
1125 else:
1126 self.ui.debug("local created %s, keeping\n" % f)
1126 self.ui.debug("local created %s, keeping\n" % f)
1127 else:
1127 else:
1128 self.ui.debug("working dir created %s, keeping\n" % f)
1128 self.ui.debug("working dir created %s, keeping\n" % f)
1129
1129
1130 for f, n in m2.iteritems():
1130 for f, n in m2.iteritems():
1131 if choose and not choose(f): continue
1131 if choose and not choose(f): continue
1132 if f[0] == "/": continue
1132 if f[0] == "/": continue
1133 if f in ma and n != ma[f]:
1133 if f in ma and n != ma[f]:
1134 r = "k"
1134 r = "k"
1135 if not force and (linear_path or allow):
1135 if not force and (linear_path or allow):
1136 r = self.ui.prompt(
1136 r = self.ui.prompt(
1137 ("remote changed %s which local deleted\n" % f) +
1137 ("remote changed %s which local deleted\n" % f) +
1138 "(k)eep or (d)elete?", "[kd]", "k")
1138 "(k)eep or (d)elete?", "[kd]", "k")
1139 if r == "k": get[f] = n
1139 if r == "k": get[f] = n
1140 elif f not in ma:
1140 elif f not in ma:
1141 self.ui.debug("remote created %s\n" % f)
1141 self.ui.debug("remote created %s\n" % f)
1142 get[f] = n
1142 get[f] = n
1143 else:
1143 else:
1144 if force or p2 == pa: # going backwards?
1144 if force or p2 == pa: # going backwards?
1145 self.ui.debug("local deleted %s, recreating\n" % f)
1145 self.ui.debug("local deleted %s, recreating\n" % f)
1146 get[f] = n
1146 get[f] = n
1147 else:
1147 else:
1148 self.ui.debug("local deleted %s\n" % f)
1148 self.ui.debug("local deleted %s\n" % f)
1149
1149
1150 del mw, m1, m2, ma
1150 del mw, m1, m2, ma
1151
1151
1152 if force:
1152 if force:
1153 for f in merge:
1153 for f in merge:
1154 get[f] = merge[f][1]
1154 get[f] = merge[f][1]
1155 merge = {}
1155 merge = {}
1156
1156
1157 if linear_path or force:
1157 if linear_path or force:
1158 # we don't need to do any magic, just jump to the new rev
1158 # we don't need to do any magic, just jump to the new rev
1159 branch_merge = False
1159 branch_merge = False
1160 p1, p2 = p2, nullid
1160 p1, p2 = p2, nullid
1161 else:
1161 else:
1162 if not allow:
1162 if not allow:
1163 self.ui.status("this update spans a branch" +
1163 self.ui.status("this update spans a branch" +
1164 " affecting the following files:\n")
1164 " affecting the following files:\n")
1165 fl = merge.keys() + get.keys()
1165 fl = merge.keys() + get.keys()
1166 fl.sort()
1166 fl.sort()
1167 for f in fl:
1167 for f in fl:
1168 cf = ""
1168 cf = ""
1169 if f in merge: cf = " (resolve)"
1169 if f in merge: cf = " (resolve)"
1170 self.ui.status(" %s%s\n" % (f, cf))
1170 self.ui.status(" %s%s\n" % (f, cf))
1171 self.ui.warn("aborting update spanning branches!\n")
1171 self.ui.warn("aborting update spanning branches!\n")
1172 self.ui.status("(use update -m to merge across branches" +
1172 self.ui.status("(use update -m to merge across branches" +
1173 " or -C to lose changes)\n")
1173 " or -C to lose changes)\n")
1174 return 1
1174 return 1
1175 branch_merge = True
1175 branch_merge = True
1176
1176
1177 if moddirstate:
1177 if moddirstate:
1178 self.dirstate.setparents(p1, p2)
1178 self.dirstate.setparents(p1, p2)
1179
1179
1180 # get the files we don't need to change
1180 # get the files we don't need to change
1181 files = get.keys()
1181 files = get.keys()
1182 files.sort()
1182 files.sort()
1183 for f in files:
1183 for f in files:
1184 if f[0] == "/": continue
1184 if f[0] == "/": continue
1185 self.ui.note("getting %s\n" % f)
1185 self.ui.note("getting %s\n" % f)
1186 t = self.file(f).read(get[f])
1186 t = self.file(f).read(get[f])
1187 try:
1187 try:
1188 self.wwrite(f, t)
1188 self.wwrite(f, t)
1189 except IOError:
1189 except IOError:
1190 os.makedirs(os.path.dirname(self.wjoin(f)))
1190 os.makedirs(os.path.dirname(self.wjoin(f)))
1191 self.wwrite(f, t)
1191 self.wwrite(f, t)
1192 util.set_exec(self.wjoin(f), mf2[f])
1192 util.set_exec(self.wjoin(f), mf2[f])
1193 if moddirstate:
1193 if moddirstate:
1194 if branch_merge:
1194 if branch_merge:
1195 self.dirstate.update([f], 'n', st_mtime=-1)
1195 self.dirstate.update([f], 'n', st_mtime=-1)
1196 else:
1196 else:
1197 self.dirstate.update([f], 'n')
1197 self.dirstate.update([f], 'n')
1198
1198
1199 # merge the tricky bits
1199 # merge the tricky bits
1200 files = merge.keys()
1200 files = merge.keys()
1201 files.sort()
1201 files.sort()
1202 for f in files:
1202 for f in files:
1203 self.ui.status("merging %s\n" % f)
1203 self.ui.status("merging %s\n" % f)
1204 my, other, flag = merge[f]
1204 my, other, flag = merge[f]
1205 self.merge3(f, my, other)
1205 self.merge3(f, my, other)
1206 util.set_exec(self.wjoin(f), flag)
1206 util.set_exec(self.wjoin(f), flag)
1207 if moddirstate:
1207 if moddirstate:
1208 if branch_merge:
1208 if branch_merge:
1209 # We've done a branch merge, mark this file as merged
1209 # We've done a branch merge, mark this file as merged
1210 # so that we properly record the merger later
1210 # so that we properly record the merger later
1211 self.dirstate.update([f], 'm')
1211 self.dirstate.update([f], 'm')
1212 else:
1212 else:
1213 # We've update-merged a locally modified file, so
1213 # We've update-merged a locally modified file, so
1214 # we set the dirstate to emulate a normal checkout
1214 # we set the dirstate to emulate a normal checkout
1215 # of that file some time in the past. Thus our
1215 # of that file some time in the past. Thus our
1216 # merge will appear as a normal local file
1216 # merge will appear as a normal local file
1217 # modification.
1217 # modification.
1218 f_len = len(self.file(f).read(other))
1218 f_len = len(self.file(f).read(other))
1219 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1219 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1220
1220
1221 remove.sort()
1221 remove.sort()
1222 for f in remove:
1222 for f in remove:
1223 self.ui.note("removing %s\n" % f)
1223 self.ui.note("removing %s\n" % f)
1224 try:
1224 try:
1225 os.unlink(self.wjoin(f))
1225 os.unlink(self.wjoin(f))
1226 except OSError, inst:
1226 except OSError, inst:
1227 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1227 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1228 # try removing directories that might now be empty
1228 # try removing directories that might now be empty
1229 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1229 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1230 except: pass
1230 except: pass
1231 if moddirstate:
1231 if moddirstate:
1232 if branch_merge:
1232 if branch_merge:
1233 self.dirstate.update(remove, 'r')
1233 self.dirstate.update(remove, 'r')
1234 else:
1234 else:
1235 self.dirstate.forget(remove)
1235 self.dirstate.forget(remove)
1236
1236
1237 def merge3(self, fn, my, other):
1237 def merge3(self, fn, my, other):
1238 """perform a 3-way merge in the working directory"""
1238 """perform a 3-way merge in the working directory"""
1239
1239
1240 def temp(prefix, node):
1240 def temp(prefix, node):
1241 pre = "%s~%s." % (os.path.basename(fn), prefix)
1241 pre = "%s~%s." % (os.path.basename(fn), prefix)
1242 (fd, name) = tempfile.mkstemp("", pre)
1242 (fd, name) = tempfile.mkstemp("", pre)
1243 f = os.fdopen(fd, "wb")
1243 f = os.fdopen(fd, "wb")
1244 self.wwrite(fn, fl.read(node), f)
1244 self.wwrite(fn, fl.read(node), f)
1245 f.close()
1245 f.close()
1246 return name
1246 return name
1247
1247
1248 fl = self.file(fn)
1248 fl = self.file(fn)
1249 base = fl.ancestor(my, other)
1249 base = fl.ancestor(my, other)
1250 a = self.wjoin(fn)
1250 a = self.wjoin(fn)
1251 b = temp("base", base)
1251 b = temp("base", base)
1252 c = temp("other", other)
1252 c = temp("other", other)
1253
1253
1254 self.ui.note("resolving %s\n" % fn)
1254 self.ui.note("resolving %s\n" % fn)
1255 self.ui.debug("file %s: other %s ancestor %s\n" %
1255 self.ui.debug("file %s: other %s ancestor %s\n" %
1256 (fn, short(other), short(base)))
1256 (fn, short(other), short(base)))
1257
1257
1258 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1258 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1259 or "hgmerge")
1259 or "hgmerge")
1260 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1260 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1261 if r:
1261 if r:
1262 self.ui.warn("merging %s failed!\n" % fn)
1262 self.ui.warn("merging %s failed!\n" % fn)
1263
1263
1264 os.unlink(b)
1264 os.unlink(b)
1265 os.unlink(c)
1265 os.unlink(c)
1266
1266
1267 def verify(self):
1267 def verify(self):
1268 filelinkrevs = {}
1268 filelinkrevs = {}
1269 filenodes = {}
1269 filenodes = {}
1270 changesets = revisions = files = 0
1270 changesets = revisions = files = 0
1271 errors = 0
1271 errors = 0
1272
1272
1273 seen = {}
1273 seen = {}
1274 self.ui.status("checking changesets\n")
1274 self.ui.status("checking changesets\n")
1275 for i in range(self.changelog.count()):
1275 for i in range(self.changelog.count()):
1276 changesets += 1
1276 changesets += 1
1277 n = self.changelog.node(i)
1277 n = self.changelog.node(i)
1278 if n in seen:
1278 if n in seen:
1279 self.ui.warn("duplicate changeset at revision %d\n" % i)
1279 self.ui.warn("duplicate changeset at revision %d\n" % i)
1280 errors += 1
1280 errors += 1
1281 seen[n] = 1
1281 seen[n] = 1
1282
1282
1283 for p in self.changelog.parents(n):
1283 for p in self.changelog.parents(n):
1284 if p not in self.changelog.nodemap:
1284 if p not in self.changelog.nodemap:
1285 self.ui.warn("changeset %s has unknown parent %s\n" %
1285 self.ui.warn("changeset %s has unknown parent %s\n" %
1286 (short(n), short(p)))
1286 (short(n), short(p)))
1287 errors += 1
1287 errors += 1
1288 try:
1288 try:
1289 changes = self.changelog.read(n)
1289 changes = self.changelog.read(n)
1290 except Exception, inst:
1290 except Exception, inst:
1291 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1291 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1292 errors += 1
1292 errors += 1
1293
1293
1294 for f in changes[3]:
1294 for f in changes[3]:
1295 filelinkrevs.setdefault(f, []).append(i)
1295 filelinkrevs.setdefault(f, []).append(i)
1296
1296
1297 seen = {}
1297 seen = {}
1298 self.ui.status("checking manifests\n")
1298 self.ui.status("checking manifests\n")
1299 for i in range(self.manifest.count()):
1299 for i in range(self.manifest.count()):
1300 n = self.manifest.node(i)
1300 n = self.manifest.node(i)
1301 if n in seen:
1301 if n in seen:
1302 self.ui.warn("duplicate manifest at revision %d\n" % i)
1302 self.ui.warn("duplicate manifest at revision %d\n" % i)
1303 errors += 1
1303 errors += 1
1304 seen[n] = 1
1304 seen[n] = 1
1305
1305
1306 for p in self.manifest.parents(n):
1306 for p in self.manifest.parents(n):
1307 if p not in self.manifest.nodemap:
1307 if p not in self.manifest.nodemap:
1308 self.ui.warn("manifest %s has unknown parent %s\n" %
1308 self.ui.warn("manifest %s has unknown parent %s\n" %
1309 (short(n), short(p)))
1309 (short(n), short(p)))
1310 errors += 1
1310 errors += 1
1311
1311
1312 try:
1312 try:
1313 delta = mdiff.patchtext(self.manifest.delta(n))
1313 delta = mdiff.patchtext(self.manifest.delta(n))
1314 except KeyboardInterrupt:
1314 except KeyboardInterrupt:
1315 self.ui.warn("interrupted")
1315 self.ui.warn("interrupted")
1316 raise
1316 raise
1317 except Exception, inst:
1317 except Exception, inst:
1318 self.ui.warn("unpacking manifest %s: %s\n"
1318 self.ui.warn("unpacking manifest %s: %s\n"
1319 % (short(n), inst))
1319 % (short(n), inst))
1320 errors += 1
1320 errors += 1
1321
1321
1322 ff = [ l.split('\0') for l in delta.splitlines() ]
1322 ff = [ l.split('\0') for l in delta.splitlines() ]
1323 for f, fn in ff:
1323 for f, fn in ff:
1324 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1324 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1325
1325
1326 self.ui.status("crosschecking files in changesets and manifests\n")
1326 self.ui.status("crosschecking files in changesets and manifests\n")
1327 for f in filenodes:
1327 for f in filenodes:
1328 if f not in filelinkrevs:
1328 if f not in filelinkrevs:
1329 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1329 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1330 errors += 1
1330 errors += 1
1331
1331
1332 for f in filelinkrevs:
1332 for f in filelinkrevs:
1333 if f not in filenodes:
1333 if f not in filenodes:
1334 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1334 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1335 errors += 1
1335 errors += 1
1336
1336
1337 self.ui.status("checking files\n")
1337 self.ui.status("checking files\n")
1338 ff = filenodes.keys()
1338 ff = filenodes.keys()
1339 ff.sort()
1339 ff.sort()
1340 for f in ff:
1340 for f in ff:
1341 if f == "/dev/null": continue
1341 if f == "/dev/null": continue
1342 files += 1
1342 files += 1
1343 fl = self.file(f)
1343 fl = self.file(f)
1344 nodes = { nullid: 1 }
1344 nodes = { nullid: 1 }
1345 seen = {}
1345 seen = {}
1346 for i in range(fl.count()):
1346 for i in range(fl.count()):
1347 revisions += 1
1347 revisions += 1
1348 n = fl.node(i)
1348 n = fl.node(i)
1349
1349
1350 if n in seen:
1350 if n in seen:
1351 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1351 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1352 errors += 1
1352 errors += 1
1353
1353
1354 if n not in filenodes[f]:
1354 if n not in filenodes[f]:
1355 self.ui.warn("%s: %d:%s not in manifests\n"
1355 self.ui.warn("%s: %d:%s not in manifests\n"
1356 % (f, i, short(n)))
1356 % (f, i, short(n)))
1357 errors += 1
1357 errors += 1
1358 else:
1358 else:
1359 del filenodes[f][n]
1359 del filenodes[f][n]
1360
1360
1361 flr = fl.linkrev(n)
1361 flr = fl.linkrev(n)
1362 if flr not in filelinkrevs[f]:
1362 if flr not in filelinkrevs[f]:
1363 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1363 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1364 % (f, short(n), fl.linkrev(n)))
1364 % (f, short(n), fl.linkrev(n)))
1365 errors += 1
1365 errors += 1
1366 else:
1366 else:
1367 filelinkrevs[f].remove(flr)
1367 filelinkrevs[f].remove(flr)
1368
1368
1369 # verify contents
1369 # verify contents
1370 try:
1370 try:
1371 t = fl.read(n)
1371 t = fl.read(n)
1372 except Exception, inst:
1372 except Exception, inst:
1373 self.ui.warn("unpacking file %s %s: %s\n"
1373 self.ui.warn("unpacking file %s %s: %s\n"
1374 % (f, short(n), inst))
1374 % (f, short(n), inst))
1375 errors += 1
1375 errors += 1
1376
1376
1377 # verify parents
1377 # verify parents
1378 (p1, p2) = fl.parents(n)
1378 (p1, p2) = fl.parents(n)
1379 if p1 not in nodes:
1379 if p1 not in nodes:
1380 self.ui.warn("file %s:%s unknown parent 1 %s" %
1380 self.ui.warn("file %s:%s unknown parent 1 %s" %
1381 (f, short(n), short(p1)))
1381 (f, short(n), short(p1)))
1382 errors += 1
1382 errors += 1
1383 if p2 not in nodes:
1383 if p2 not in nodes:
1384 self.ui.warn("file %s:%s unknown parent 2 %s" %
1384 self.ui.warn("file %s:%s unknown parent 2 %s" %
1385 (f, short(n), short(p1)))
1385 (f, short(n), short(p1)))
1386 errors += 1
1386 errors += 1
1387 nodes[n] = 1
1387 nodes[n] = 1
1388
1388
1389 # cross-check
1389 # cross-check
1390 for node in filenodes[f]:
1390 for node in filenodes[f]:
1391 self.ui.warn("node %s in manifests not in %s\n"
1391 self.ui.warn("node %s in manifests not in %s\n"
1392 % (hex(node), f))
1392 % (hex(node), f))
1393 errors += 1
1393 errors += 1
1394
1394
1395 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1395 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1396 (files, changesets, revisions))
1396 (files, changesets, revisions))
1397
1397
1398 if errors:
1398 if errors:
1399 self.ui.warn("%d integrity errors encountered!\n" % errors)
1399 self.ui.warn("%d integrity errors encountered!\n" % errors)
1400 return 1
1400 return 1
@@ -1,358 +1,356 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from demandload import *
14 from demandload import *
15 demandload(globals(), "re")
15 demandload(globals(), "re")
16
16
17 def binary(s):
17 def binary(s):
18 """return true if a string is binary data using diff's heuristic"""
18 """return true if a string is binary data using diff's heuristic"""
19 if s and '\0' in s[:4096]:
19 if s and '\0' in s[:4096]:
20 return True
20 return True
21 return False
21 return False
22
22
23 def unique(g):
23 def unique(g):
24 """return the uniq elements of iterable g"""
24 """return the uniq elements of iterable g"""
25 seen = {}
25 seen = {}
26 for f in g:
26 for f in g:
27 if f not in seen:
27 if f not in seen:
28 seen[f] = 1
28 seen[f] = 1
29 yield f
29 yield f
30
30
31 class Abort(Exception):
31 class Abort(Exception):
32 """Raised if a command needs to print an error and exit."""
32 """Raised if a command needs to print an error and exit."""
33
33
34 def always(fn): return True
34 def always(fn): return True
35 def never(fn): return False
35 def never(fn): return False
36
36
37 def globre(pat, head='^', tail='$'):
37 def globre(pat, head='^', tail='$'):
38 "convert a glob pattern into a regexp"
38 "convert a glob pattern into a regexp"
39 i, n = 0, len(pat)
39 i, n = 0, len(pat)
40 res = ''
40 res = ''
41 group = False
41 group = False
42 def peek(): return i < n and pat[i]
42 def peek(): return i < n and pat[i]
43 while i < n:
43 while i < n:
44 c = pat[i]
44 c = pat[i]
45 i = i+1
45 i = i+1
46 if c == '*':
46 if c == '*':
47 if peek() == '*':
47 if peek() == '*':
48 i += 1
48 i += 1
49 res += '.*'
49 res += '.*'
50 else:
50 else:
51 res += '[^/]*'
51 res += '[^/]*'
52 elif c == '?':
52 elif c == '?':
53 res += '.'
53 res += '.'
54 elif c == '[':
54 elif c == '[':
55 j = i
55 j = i
56 if j < n and pat[j] in '!]':
56 if j < n and pat[j] in '!]':
57 j += 1
57 j += 1
58 while j < n and pat[j] != ']':
58 while j < n and pat[j] != ']':
59 j += 1
59 j += 1
60 if j >= n:
60 if j >= n:
61 res += '\\['
61 res += '\\['
62 else:
62 else:
63 stuff = pat[i:j].replace('\\','\\\\')
63 stuff = pat[i:j].replace('\\','\\\\')
64 i = j + 1
64 i = j + 1
65 if stuff[0] == '!':
65 if stuff[0] == '!':
66 stuff = '^' + stuff[1:]
66 stuff = '^' + stuff[1:]
67 elif stuff[0] == '^':
67 elif stuff[0] == '^':
68 stuff = '\\' + stuff
68 stuff = '\\' + stuff
69 res = '%s[%s]' % (res, stuff)
69 res = '%s[%s]' % (res, stuff)
70 elif c == '{':
70 elif c == '{':
71 group = True
71 group = True
72 res += '(?:'
72 res += '(?:'
73 elif c == '}' and group:
73 elif c == '}' and group:
74 res += ')'
74 res += ')'
75 group = False
75 group = False
76 elif c == ',' and group:
76 elif c == ',' and group:
77 res += '|'
77 res += '|'
78 else:
78 else:
79 res += re.escape(c)
79 res += re.escape(c)
80 return head + res + tail
80 return head + res + tail
81
81
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
83
83
84 def pathto(n1, n2):
84 def pathto(n1, n2):
85 '''return the relative path from one place to another.
85 '''return the relative path from one place to another.
86 this returns a path in the form used by the local filesystem, not hg.'''
86 this returns a path in the form used by the local filesystem, not hg.'''
87 if not n1: return localpath(n2)
87 if not n1: return localpath(n2)
88 a, b = n1.split('/'), n2.split('/')
88 a, b = n1.split('/'), n2.split('/')
89 a.reverse(), b.reverse()
89 a.reverse(), b.reverse()
90 while a and b and a[-1] == b[-1]:
90 while a and b and a[-1] == b[-1]:
91 a.pop(), b.pop()
91 a.pop(), b.pop()
92 b.reverse()
92 b.reverse()
93 return os.sep.join((['..'] * len(a)) + b)
93 return os.sep.join((['..'] * len(a)) + b)
94
94
95 def canonpath(root, cwd, myname):
95 def canonpath(root, cwd, myname):
96 """return the canonical path of myname, given cwd and root"""
96 """return the canonical path of myname, given cwd and root"""
97 rootsep = root + os.sep
97 rootsep = root + os.sep
98 name = myname
98 name = myname
99 if not name.startswith(os.sep):
99 if not name.startswith(os.sep):
100 name = os.path.join(root, cwd, name)
100 name = os.path.join(root, cwd, name)
101 name = os.path.normpath(name)
101 name = os.path.normpath(name)
102 if name.startswith(rootsep):
102 if name.startswith(rootsep):
103 return pconvert(name[len(rootsep):])
103 return pconvert(name[len(rootsep):])
104 elif name == root:
104 elif name == root:
105 return ''
105 return ''
106 else:
106 else:
107 raise Abort('%s not under root' % myname)
107 raise Abort('%s not under root' % myname)
108
108
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
110 """build a function to match a set of file patterns
110 """build a function to match a set of file patterns
111
111
112 arguments:
112 arguments:
113 canonroot - the canonical root of the tree you're matching against
113 canonroot - the canonical root of the tree you're matching against
114 cwd - the current working directory, if relevant
114 cwd - the current working directory, if relevant
115 names - patterns to find
115 names - patterns to find
116 inc - patterns to include
116 inc - patterns to include
117 exc - patterns to exclude
117 exc - patterns to exclude
118 head - a regex to prepend to patterns to control whether a match is rooted
118 head - a regex to prepend to patterns to control whether a match is rooted
119
119
120 a pattern is one of:
120 a pattern is one of:
121 're:<regex>'
121 're:<regex>'
122 'glob:<shellglob>'
122 'glob:<shellglob>'
123 'path:<explicit path>'
123 'path:<explicit path>'
124 'relpath:<relative path>'
124 'relpath:<relative path>'
125 '<relative path>'
125 '<relative path>'
126
126
127 returns:
127 returns:
128 a 3-tuple containing
128 a 3-tuple containing
129 - list of explicit non-pattern names passed in
129 - list of explicit non-pattern names passed in
130 - a bool match(filename) function
130 - a bool match(filename) function
131 - a bool indicating if any patterns were passed in
131 - a bool indicating if any patterns were passed in
132
132
133 todo:
133 todo:
134 make head regex a rooted bool
134 make head regex a rooted bool
135 """
135 """
136
136
137 def patkind(name):
137 def patkind(name):
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
139 if name.startswith(prefix): return name.split(':', 1)
139 if name.startswith(prefix): return name.split(':', 1)
140 for c in name:
140 for c in name:
141 if c in _globchars: return 'glob', name
141 if c in _globchars: return 'glob', name
142 return 'relpath', name
142 return 'relpath', name
143
143
144 def regex(kind, name, tail):
144 def regex(kind, name, tail):
145 '''convert a pattern into a regular expression'''
145 '''convert a pattern into a regular expression'''
146 if kind == 're':
146 if kind == 're':
147 return name
147 return name
148 elif kind == 'path':
148 elif kind == 'path':
149 return '^' + re.escape(name) + '(?:/|$)'
149 return '^' + re.escape(name) + '(?:/|$)'
150 elif kind == 'relpath':
150 elif kind == 'relpath':
151 return head + re.escape(name) + tail
151 return head + re.escape(name) + tail
152 return head + globre(name, '', tail)
152 return head + globre(name, '', tail)
153
153
154 def matchfn(pats, tail):
154 def matchfn(pats, tail):
155 """build a matching function from a set of patterns"""
155 """build a matching function from a set of patterns"""
156 if pats:
156 if pats:
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
158 return re.compile(pat).match
158 return re.compile(pat).match
159
159
160 def globprefix(pat):
160 def globprefix(pat):
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
162 root = []
162 root = []
163 for p in pat.split(os.sep):
163 for p in pat.split(os.sep):
164 if patkind(p)[0] == 'glob': break
164 if patkind(p)[0] == 'glob': break
165 root.append(p)
165 root.append(p)
166 return '/'.join(root)
166 return '/'.join(root)
167
167
168 pats = []
168 pats = []
169 files = []
169 files = []
170 roots = []
170 roots = []
171 for kind, name in map(patkind, names):
171 for kind, name in map(patkind, names):
172 if kind in ('glob', 'relpath'):
172 if kind in ('glob', 'relpath'):
173 name = canonpath(canonroot, cwd, name)
173 name = canonpath(canonroot, cwd, name)
174 if name == '':
174 if name == '':
175 kind, name = 'glob', '**'
175 kind, name = 'glob', '**'
176 if kind in ('glob', 'path', 're'):
176 if kind in ('glob', 'path', 're'):
177 pats.append((kind, name))
177 pats.append((kind, name))
178 if kind == 'glob':
178 if kind == 'glob':
179 root = globprefix(name)
179 root = globprefix(name)
180 if root: roots.append(root)
180 if root: roots.append(root)
181 elif kind == 'relpath':
181 elif kind == 'relpath':
182 files.append((kind, name))
182 files.append((kind, name))
183 roots.append(name)
183 roots.append(name)
184
184
185 patmatch = matchfn(pats, '$') or always
185 patmatch = matchfn(pats, '$') or always
186 filematch = matchfn(files, '(?:/|$)') or always
186 filematch = matchfn(files, '(?:/|$)') or always
187 incmatch = always
187 incmatch = always
188 if inc:
188 if inc:
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
190 excmatch = lambda fn: False
190 excmatch = lambda fn: False
191 if exc:
191 if exc:
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
193
193
194 return (roots,
194 return (roots,
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
196 (fn.endswith('/') or
196 (fn.endswith('/') or
197 (not pats and not files) or
197 (not pats and not files) or
198 (pats and patmatch(fn)) or
198 (pats and patmatch(fn)) or
199 (files and filematch(fn)))),
199 (files and filematch(fn)))),
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
201
201
202 def system(cmd, errprefix=None):
202 def system(cmd, errprefix=None):
203 """execute a shell command that must succeed"""
203 """execute a shell command that must succeed"""
204 rc = os.system(cmd)
204 rc = os.system(cmd)
205 if rc:
205 if rc:
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
207 explain_exit(rc)[0])
207 explain_exit(rc)[0])
208 if errprefix:
208 if errprefix:
209 errmsg = "%s: %s" % (errprefix, errmsg)
209 errmsg = "%s: %s" % (errprefix, errmsg)
210 raise Abort(errmsg)
210 raise Abort(errmsg)
211
211
212 def rename(src, dst):
212 def rename(src, dst):
213 """forcibly rename a file"""
213 """forcibly rename a file"""
214 try:
214 try:
215 os.rename(src, dst)
215 os.rename(src, dst)
216 except:
216 except:
217 os.unlink(dst)
217 os.unlink(dst)
218 os.rename(src, dst)
218 os.rename(src, dst)
219
219
220 def copytree(src, dst, copyfile):
220 def copytree(src, dst, copyfile):
221 """Copy a directory tree, files are copied using 'copyfile'."""
221 """Copy a directory tree, files are copied using 'copyfile'."""
222 names = os.listdir(src)
222 names = os.listdir(src)
223 os.mkdir(dst)
223 os.mkdir(dst)
224
224
225 for name in names:
225 for name in names:
226 srcname = os.path.join(src, name)
226 srcname = os.path.join(src, name)
227 dstname = os.path.join(dst, name)
227 dstname = os.path.join(dst, name)
228 if os.path.isdir(srcname):
228 if os.path.isdir(srcname):
229 copytree(srcname, dstname, copyfile)
229 copytree(srcname, dstname, copyfile)
230 elif os.path.isfile(srcname):
230 elif os.path.isfile(srcname):
231 copyfile(srcname, dstname)
231 copyfile(srcname, dstname)
232 else:
232 else:
233 pass
233 pass
234
234
235 def opener(base):
235 def opener(base):
236 """
236 """
237 return a function that opens files relative to base
237 return a function that opens files relative to base
238
238
239 this function is used to hide the details of COW semantics and
239 this function is used to hide the details of COW semantics and
240 remote file access from higher level code.
240 remote file access from higher level code.
241
242 todo: separate remote file access into a separate function
243 """
241 """
244 p = base
242 p = base
245 def o(path, mode="r"):
243 def o(path, mode="r"):
246 f = os.path.join(p, path)
244 f = os.path.join(p, path)
247
245
248 mode += "b" # for that other OS
246 mode += "b" # for that other OS
249
247
250 if mode[0] != "r":
248 if mode[0] != "r":
251 try:
249 try:
252 s = os.stat(f)
250 s = os.stat(f)
253 except OSError:
251 except OSError:
254 d = os.path.dirname(f)
252 d = os.path.dirname(f)
255 if not os.path.isdir(d):
253 if not os.path.isdir(d):
256 os.makedirs(d)
254 os.makedirs(d)
257 else:
255 else:
258 if s.st_nlink > 1:
256 if s.st_nlink > 1:
259 file(f + ".tmp", "wb").write(file(f, "rb").read())
257 file(f + ".tmp", "wb").write(file(f, "rb").read())
260 rename(f+".tmp", f)
258 rename(f+".tmp", f)
261
259
262 return file(f, mode)
260 return file(f, mode)
263
261
264 return o
262 return o
265
263
266 def _makelock_file(info, pathname):
264 def _makelock_file(info, pathname):
267 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
265 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
268 os.write(ld, info)
266 os.write(ld, info)
269 os.close(ld)
267 os.close(ld)
270
268
271 def _readlock_file(pathname):
269 def _readlock_file(pathname):
272 return file(pathname).read()
270 return file(pathname).read()
273
271
274 # Platform specific variants
272 # Platform specific variants
275 if os.name == 'nt':
273 if os.name == 'nt':
276 nulldev = 'NUL:'
274 nulldev = 'NUL:'
277
275
278 def is_exec(f, last):
276 def is_exec(f, last):
279 return last
277 return last
280
278
281 def set_exec(f, mode):
279 def set_exec(f, mode):
282 pass
280 pass
283
281
284 def pconvert(path):
282 def pconvert(path):
285 return path.replace("\\", "/")
283 return path.replace("\\", "/")
286
284
287 def localpath(path):
285 def localpath(path):
288 return path.replace('/', '\\')
286 return path.replace('/', '\\')
289
287
290 def normpath(path):
288 def normpath(path):
291 return pconvert(os.path.normpath(path))
289 return pconvert(os.path.normpath(path))
292
290
293 makelock = _makelock_file
291 makelock = _makelock_file
294 readlock = _readlock_file
292 readlock = _readlock_file
295
293
296 def explain_exit(code):
294 def explain_exit(code):
297 return "exited with status %d" % code, code
295 return "exited with status %d" % code, code
298
296
299 else:
297 else:
300 nulldev = '/dev/null'
298 nulldev = '/dev/null'
301
299
302 def is_exec(f, last):
300 def is_exec(f, last):
303 """check whether a file is executable"""
301 """check whether a file is executable"""
304 return (os.stat(f).st_mode & 0100 != 0)
302 return (os.stat(f).st_mode & 0100 != 0)
305
303
306 def set_exec(f, mode):
304 def set_exec(f, mode):
307 s = os.stat(f).st_mode
305 s = os.stat(f).st_mode
308 if (s & 0100 != 0) == mode:
306 if (s & 0100 != 0) == mode:
309 return
307 return
310 if mode:
308 if mode:
311 # Turn on +x for every +r bit when making a file executable
309 # Turn on +x for every +r bit when making a file executable
312 # and obey umask.
310 # and obey umask.
313 umask = os.umask(0)
311 umask = os.umask(0)
314 os.umask(umask)
312 os.umask(umask)
315 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
313 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
316 else:
314 else:
317 os.chmod(f, s & 0666)
315 os.chmod(f, s & 0666)
318
316
319 def pconvert(path):
317 def pconvert(path):
320 return path
318 return path
321
319
322 def localpath(path):
320 def localpath(path):
323 return path
321 return path
324
322
325 normpath = os.path.normpath
323 normpath = os.path.normpath
326
324
327 def makelock(info, pathname):
325 def makelock(info, pathname):
328 try:
326 try:
329 os.symlink(info, pathname)
327 os.symlink(info, pathname)
330 except OSError, why:
328 except OSError, why:
331 if why.errno == errno.EEXIST:
329 if why.errno == errno.EEXIST:
332 raise
330 raise
333 else:
331 else:
334 _makelock_file(info, pathname)
332 _makelock_file(info, pathname)
335
333
336 def readlock(pathname):
334 def readlock(pathname):
337 try:
335 try:
338 return os.readlink(pathname)
336 return os.readlink(pathname)
339 except OSError, why:
337 except OSError, why:
340 if why.errno == errno.EINVAL:
338 if why.errno == errno.EINVAL:
341 return _readlock_file(pathname)
339 return _readlock_file(pathname)
342 else:
340 else:
343 raise
341 raise
344
342
345 def explain_exit(code):
343 def explain_exit(code):
346 """return a 2-tuple (desc, code) describing a process's status"""
344 """return a 2-tuple (desc, code) describing a process's status"""
347 if os.name == 'nt': # os.WIFxx is not supported on windows
345 if os.name == 'nt': # os.WIFxx is not supported on windows
348 return "aborted with error." , -1
346 return "aborted with error." , -1
349 if os.WIFEXITED(code):
347 if os.WIFEXITED(code):
350 val = os.WEXITSTATUS(code)
348 val = os.WEXITSTATUS(code)
351 return "exited with status %d" % val, val
349 return "exited with status %d" % val, val
352 elif os.WIFSIGNALED(code):
350 elif os.WIFSIGNALED(code):
353 val = os.WTERMSIG(code)
351 val = os.WTERMSIG(code)
354 return "killed by signal %d" % val, val
352 return "killed by signal %d" % val, val
355 elif os.WIFSTOPPED(code):
353 elif os.WIFSTOPPED(code):
356 val = os.WSTOPSIG(code)
354 val = os.WSTOPSIG(code)
357 return "stopped by signal %d" % val, val
355 return "stopped by signal %d" % val, val
358 raise ValueError("invalid exit code")
356 raise ValueError("invalid exit code")
General Comments 0
You need to be logged in to leave comments. Login now