##// END OF EJS Templates
Separate out old-http support...
mpm@selenic.com -
r1101:2cf5c8a4 default
parent child Browse files
Show More
@@ -0,0 +1,35
1 # statichttprepo.py - simple http repository class for mercurial
2 #
3 # This provides read-only repo access to repositories exported via static http
4 #
5 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 #
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
9
10 import os, urllib
11 import localrepo, httprangereader, filelog, manifest, changelog
12
13 def opener(base):
14 """return a function that opens files over http"""
15 p = base
16 def o(path, mode="r"):
17 f = os.path.join(p, urllib.quote(path))
18 return httprangereader.httprangereader(f)
19 return o
20
21 class statichttprepository(localrepo.localrepository):
22 def __init__(self, ui, path):
23 self.path = (path + "/.hg")
24 self.ui = ui
25 self.opener = opener(self.path)
26 self.manifest = manifest.manifest(self.opener)
27 self.changelog = changelog.changelog(self.opener)
28 self.tagscache = None
29 self.nodetagscache = None
30
31 def dev(self):
32 return -1
33
34 def local(self):
35 return False
@@ -1,29 +1,29
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import util
8 import util
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "localrepo httprepo sshrepo")
12 demandload(globals(), "localrepo httprepo sshrepo statichttprepo")
13
13
14 def repository(ui, path=None, create=0):
14 def repository(ui, path=None, create=0):
15 if path:
15 if path:
16 if path.startswith("http://"):
16 if path.startswith("http://"):
17 return httprepo.httprepository(ui, path)
17 return httprepo.httprepository(ui, path)
18 if path.startswith("https://"):
18 if path.startswith("https://"):
19 return httprepo.httpsrepository(ui, path)
19 return httprepo.httpsrepository(ui, path)
20 if path.startswith("hg://"):
20 if path.startswith("hg://"):
21 return httprepo.httprepository(
21 return httprepo.httprepository(
22 ui, path.replace("hg://", "http://"))
22 ui, path.replace("hg://", "http://"))
23 if path.startswith("old-http://"):
23 if path.startswith("old-http://"):
24 return localrepo.localrepository(
24 return statichttprepo.statichttprepository(
25 ui, util.opener, path.replace("old-http://", "http://"))
25 ui, path.replace("old-http://", "http://"))
26 if path.startswith("ssh://"):
26 if path.startswith("ssh://"):
27 return sshrepo.sshrepository(ui, path)
27 return sshrepo.sshrepository(ui, path)
28
28
29 return localrepo.localrepository(ui, util.opener, path, create)
29 return localrepo.localrepository(ui, util.opener, path, create)
@@ -1,1407 +1,1400
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, opener, path=None, create=0):
15 def __init__(self, ui, opener, path=None, create=0):
16 self.remote = 0
17 if path and path.startswith("http://"):
18 self.remote = 1
19 self.path = path
20 else:
21 if not path:
16 if not path:
22 p = os.getcwd()
17 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
19 oldp = p
25 p = os.path.dirname(p)
20 p = os.path.dirname(p)
26 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
27 path = p
22 path = p
28 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
29
24
30 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
31 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
32
27
33 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
34 self.ui = ui
29 self.ui = ui
35
30
36 if create:
31 if create:
37 os.mkdir(self.path)
32 os.mkdir(self.path)
38 os.mkdir(self.join("data"))
33 os.mkdir(self.join("data"))
39
34
40 self.opener = opener(self.path)
35 self.opener = opener(self.path)
41 self.wopener = opener(self.root)
36 self.wopener = opener(self.root)
42 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
43 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
44 self.tagscache = None
39 self.tagscache = None
45 self.nodetagscache = None
40 self.nodetagscache = None
46
41
47 if not self.remote:
48 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
42 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
49 try:
43 try:
50 self.ui.readconfig(self.opener("hgrc"))
44 self.ui.readconfig(self.opener("hgrc"))
51 except IOError: pass
45 except IOError: pass
52
46
53 def hook(self, name, **args):
47 def hook(self, name, **args):
54 s = self.ui.config("hooks", name)
48 s = self.ui.config("hooks", name)
55 if s:
49 if s:
56 self.ui.note("running hook %s: %s\n" % (name, s))
50 self.ui.note("running hook %s: %s\n" % (name, s))
57 old = {}
51 old = {}
58 for k, v in args.items():
52 for k, v in args.items():
59 k = k.upper()
53 k = k.upper()
60 old[k] = os.environ.get(k, None)
54 old[k] = os.environ.get(k, None)
61 os.environ[k] = v
55 os.environ[k] = v
62
56
63 r = os.system(s)
57 r = os.system(s)
64
58
65 for k, v in old.items():
59 for k, v in old.items():
66 if v != None:
60 if v != None:
67 os.environ[k] = v
61 os.environ[k] = v
68 else:
62 else:
69 del os.environ[k]
63 del os.environ[k]
70
64
71 if r:
65 if r:
72 self.ui.warn("abort: %s hook failed with status %d!\n" %
66 self.ui.warn("abort: %s hook failed with status %d!\n" %
73 (name, r))
67 (name, r))
74 return False
68 return False
75 return True
69 return True
76
70
77 def tags(self):
71 def tags(self):
78 '''return a mapping of tag to node'''
72 '''return a mapping of tag to node'''
79 if not self.tagscache:
73 if not self.tagscache:
80 self.tagscache = {}
74 self.tagscache = {}
81 def addtag(self, k, n):
75 def addtag(self, k, n):
82 try:
76 try:
83 bin_n = bin(n)
77 bin_n = bin(n)
84 except TypeError:
78 except TypeError:
85 bin_n = ''
79 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
80 self.tagscache[k.strip()] = bin_n
87
81
88 try:
82 try:
89 # read each head of the tags file, ending with the tip
83 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
84 # and add each tag found to the map, with "newer" ones
91 # taking precedence
85 # taking precedence
92 fl = self.file(".hgtags")
86 fl = self.file(".hgtags")
93 h = fl.heads()
87 h = fl.heads()
94 h.reverse()
88 h.reverse()
95 for r in h:
89 for r in h:
96 for l in fl.read(r).splitlines():
90 for l in fl.read(r).splitlines():
97 if l:
91 if l:
98 n, k = l.split(" ", 1)
92 n, k = l.split(" ", 1)
99 addtag(self, k, n)
93 addtag(self, k, n)
100 except KeyError:
94 except KeyError:
101 pass
95 pass
102
96
103 try:
97 try:
104 f = self.opener("localtags")
98 f = self.opener("localtags")
105 for l in f:
99 for l in f:
106 n, k = l.split(" ", 1)
100 n, k = l.split(" ", 1)
107 addtag(self, k, n)
101 addtag(self, k, n)
108 except IOError:
102 except IOError:
109 pass
103 pass
110
104
111 self.tagscache['tip'] = self.changelog.tip()
105 self.tagscache['tip'] = self.changelog.tip()
112
106
113 return self.tagscache
107 return self.tagscache
114
108
115 def tagslist(self):
109 def tagslist(self):
116 '''return a list of tags ordered by revision'''
110 '''return a list of tags ordered by revision'''
117 l = []
111 l = []
118 for t, n in self.tags().items():
112 for t, n in self.tags().items():
119 try:
113 try:
120 r = self.changelog.rev(n)
114 r = self.changelog.rev(n)
121 except:
115 except:
122 r = -2 # sort to the beginning of the list if unknown
116 r = -2 # sort to the beginning of the list if unknown
123 l.append((r,t,n))
117 l.append((r,t,n))
124 l.sort()
118 l.sort()
125 return [(t,n) for r,t,n in l]
119 return [(t,n) for r,t,n in l]
126
120
127 def nodetags(self, node):
121 def nodetags(self, node):
128 '''return the tags associated with a node'''
122 '''return the tags associated with a node'''
129 if not self.nodetagscache:
123 if not self.nodetagscache:
130 self.nodetagscache = {}
124 self.nodetagscache = {}
131 for t,n in self.tags().items():
125 for t,n in self.tags().items():
132 self.nodetagscache.setdefault(n,[]).append(t)
126 self.nodetagscache.setdefault(n,[]).append(t)
133 return self.nodetagscache.get(node, [])
127 return self.nodetagscache.get(node, [])
134
128
135 def lookup(self, key):
129 def lookup(self, key):
136 try:
130 try:
137 return self.tags()[key]
131 return self.tags()[key]
138 except KeyError:
132 except KeyError:
139 try:
133 try:
140 return self.changelog.lookup(key)
134 return self.changelog.lookup(key)
141 except:
135 except:
142 raise repo.RepoError("unknown revision '%s'" % key)
136 raise repo.RepoError("unknown revision '%s'" % key)
143
137
144 def dev(self):
138 def dev(self):
145 if self.remote: return -1
146 return os.stat(self.path).st_dev
139 return os.stat(self.path).st_dev
147
140
148 def local(self):
141 def local(self):
149 return not self.remote
142 return True
150
143
151 def join(self, f):
144 def join(self, f):
152 return os.path.join(self.path, f)
145 return os.path.join(self.path, f)
153
146
154 def wjoin(self, f):
147 def wjoin(self, f):
155 return os.path.join(self.root, f)
148 return os.path.join(self.root, f)
156
149
157 def file(self, f):
150 def file(self, f):
158 if f[0] == '/': f = f[1:]
151 if f[0] == '/': f = f[1:]
159 return filelog.filelog(self.opener, f)
152 return filelog.filelog(self.opener, f)
160
153
161 def getcwd(self):
154 def getcwd(self):
162 return self.dirstate.getcwd()
155 return self.dirstate.getcwd()
163
156
164 def wfile(self, f, mode='r'):
157 def wfile(self, f, mode='r'):
165 return self.wopener(f, mode)
158 return self.wopener(f, mode)
166
159
167 def wread(self, filename):
160 def wread(self, filename):
168 return self.wopener(filename, 'r').read()
161 return self.wopener(filename, 'r').read()
169
162
170 def wwrite(self, filename, data, fd=None):
163 def wwrite(self, filename, data, fd=None):
171 if fd:
164 if fd:
172 return fd.write(data)
165 return fd.write(data)
173 return self.wopener(filename, 'w').write(data)
166 return self.wopener(filename, 'w').write(data)
174
167
175 def transaction(self):
168 def transaction(self):
176 # save dirstate for undo
169 # save dirstate for undo
177 try:
170 try:
178 ds = self.opener("dirstate").read()
171 ds = self.opener("dirstate").read()
179 except IOError:
172 except IOError:
180 ds = ""
173 ds = ""
181 self.opener("journal.dirstate", "w").write(ds)
174 self.opener("journal.dirstate", "w").write(ds)
182
175
183 def after():
176 def after():
184 util.rename(self.join("journal"), self.join("undo"))
177 util.rename(self.join("journal"), self.join("undo"))
185 util.rename(self.join("journal.dirstate"),
178 util.rename(self.join("journal.dirstate"),
186 self.join("undo.dirstate"))
179 self.join("undo.dirstate"))
187
180
188 return transaction.transaction(self.ui.warn, self.opener,
181 return transaction.transaction(self.ui.warn, self.opener,
189 self.join("journal"), after)
182 self.join("journal"), after)
190
183
191 def recover(self):
184 def recover(self):
192 lock = self.lock()
185 lock = self.lock()
193 if os.path.exists(self.join("journal")):
186 if os.path.exists(self.join("journal")):
194 self.ui.status("rolling back interrupted transaction\n")
187 self.ui.status("rolling back interrupted transaction\n")
195 return transaction.rollback(self.opener, self.join("journal"))
188 return transaction.rollback(self.opener, self.join("journal"))
196 else:
189 else:
197 self.ui.warn("no interrupted transaction available\n")
190 self.ui.warn("no interrupted transaction available\n")
198
191
199 def undo(self):
192 def undo(self):
200 lock = self.lock()
193 lock = self.lock()
201 if os.path.exists(self.join("undo")):
194 if os.path.exists(self.join("undo")):
202 self.ui.status("rolling back last transaction\n")
195 self.ui.status("rolling back last transaction\n")
203 transaction.rollback(self.opener, self.join("undo"))
196 transaction.rollback(self.opener, self.join("undo"))
204 self.dirstate = None
197 self.dirstate = None
205 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
198 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
206 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
199 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
207 else:
200 else:
208 self.ui.warn("no undo information available\n")
201 self.ui.warn("no undo information available\n")
209
202
210 def lock(self, wait=1):
203 def lock(self, wait=1):
211 try:
204 try:
212 return lock.lock(self.join("lock"), 0)
205 return lock.lock(self.join("lock"), 0)
213 except lock.LockHeld, inst:
206 except lock.LockHeld, inst:
214 if wait:
207 if wait:
215 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
208 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
216 return lock.lock(self.join("lock"), wait)
209 return lock.lock(self.join("lock"), wait)
217 raise inst
210 raise inst
218
211
219 def rawcommit(self, files, text, user, date, p1=None, p2=None):
212 def rawcommit(self, files, text, user, date, p1=None, p2=None):
220 orig_parent = self.dirstate.parents()[0] or nullid
213 orig_parent = self.dirstate.parents()[0] or nullid
221 p1 = p1 or self.dirstate.parents()[0] or nullid
214 p1 = p1 or self.dirstate.parents()[0] or nullid
222 p2 = p2 or self.dirstate.parents()[1] or nullid
215 p2 = p2 or self.dirstate.parents()[1] or nullid
223 c1 = self.changelog.read(p1)
216 c1 = self.changelog.read(p1)
224 c2 = self.changelog.read(p2)
217 c2 = self.changelog.read(p2)
225 m1 = self.manifest.read(c1[0])
218 m1 = self.manifest.read(c1[0])
226 mf1 = self.manifest.readflags(c1[0])
219 mf1 = self.manifest.readflags(c1[0])
227 m2 = self.manifest.read(c2[0])
220 m2 = self.manifest.read(c2[0])
228 changed = []
221 changed = []
229
222
230 if orig_parent == p1:
223 if orig_parent == p1:
231 update_dirstate = 1
224 update_dirstate = 1
232 else:
225 else:
233 update_dirstate = 0
226 update_dirstate = 0
234
227
235 tr = self.transaction()
228 tr = self.transaction()
236 mm = m1.copy()
229 mm = m1.copy()
237 mfm = mf1.copy()
230 mfm = mf1.copy()
238 linkrev = self.changelog.count()
231 linkrev = self.changelog.count()
239 for f in files:
232 for f in files:
240 try:
233 try:
241 t = self.wread(f)
234 t = self.wread(f)
242 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
235 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
243 r = self.file(f)
236 r = self.file(f)
244 mfm[f] = tm
237 mfm[f] = tm
245
238
246 fp1 = m1.get(f, nullid)
239 fp1 = m1.get(f, nullid)
247 fp2 = m2.get(f, nullid)
240 fp2 = m2.get(f, nullid)
248
241
249 # is the same revision on two branches of a merge?
242 # is the same revision on two branches of a merge?
250 if fp2 == fp1:
243 if fp2 == fp1:
251 fp2 = nullid
244 fp2 = nullid
252
245
253 if fp2 != nullid:
246 if fp2 != nullid:
254 # is one parent an ancestor of the other?
247 # is one parent an ancestor of the other?
255 fpa = r.ancestor(fp1, fp2)
248 fpa = r.ancestor(fp1, fp2)
256 if fpa == fp1:
249 if fpa == fp1:
257 fp1, fp2 = fp2, nullid
250 fp1, fp2 = fp2, nullid
258 elif fpa == fp2:
251 elif fpa == fp2:
259 fp2 = nullid
252 fp2 = nullid
260
253
261 # is the file unmodified from the parent?
254 # is the file unmodified from the parent?
262 if t == r.read(fp1):
255 if t == r.read(fp1):
263 # record the proper existing parent in manifest
256 # record the proper existing parent in manifest
264 # no need to add a revision
257 # no need to add a revision
265 mm[f] = fp1
258 mm[f] = fp1
266 continue
259 continue
267
260
268 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
261 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
269 changed.append(f)
262 changed.append(f)
270 if update_dirstate:
263 if update_dirstate:
271 self.dirstate.update([f], "n")
264 self.dirstate.update([f], "n")
272 except IOError:
265 except IOError:
273 try:
266 try:
274 del mm[f]
267 del mm[f]
275 del mfm[f]
268 del mfm[f]
276 if update_dirstate:
269 if update_dirstate:
277 self.dirstate.forget([f])
270 self.dirstate.forget([f])
278 except:
271 except:
279 # deleted from p2?
272 # deleted from p2?
280 pass
273 pass
281
274
282 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
275 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
283 user = user or self.ui.username()
276 user = user or self.ui.username()
284 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
277 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
285 tr.close()
278 tr.close()
286 if update_dirstate:
279 if update_dirstate:
287 self.dirstate.setparents(n, nullid)
280 self.dirstate.setparents(n, nullid)
288
281
289 def commit(self, files = None, text = "", user = None, date = None,
282 def commit(self, files = None, text = "", user = None, date = None,
290 match = util.always, force=False):
283 match = util.always, force=False):
291 commit = []
284 commit = []
292 remove = []
285 remove = []
293 changed = []
286 changed = []
294
287
295 if files:
288 if files:
296 for f in files:
289 for f in files:
297 s = self.dirstate.state(f)
290 s = self.dirstate.state(f)
298 if s in 'nmai':
291 if s in 'nmai':
299 commit.append(f)
292 commit.append(f)
300 elif s == 'r':
293 elif s == 'r':
301 remove.append(f)
294 remove.append(f)
302 else:
295 else:
303 self.ui.warn("%s not tracked!\n" % f)
296 self.ui.warn("%s not tracked!\n" % f)
304 else:
297 else:
305 (c, a, d, u) = self.changes(match=match)
298 (c, a, d, u) = self.changes(match=match)
306 commit = c + a
299 commit = c + a
307 remove = d
300 remove = d
308
301
309 p1, p2 = self.dirstate.parents()
302 p1, p2 = self.dirstate.parents()
310 c1 = self.changelog.read(p1)
303 c1 = self.changelog.read(p1)
311 c2 = self.changelog.read(p2)
304 c2 = self.changelog.read(p2)
312 m1 = self.manifest.read(c1[0])
305 m1 = self.manifest.read(c1[0])
313 mf1 = self.manifest.readflags(c1[0])
306 mf1 = self.manifest.readflags(c1[0])
314 m2 = self.manifest.read(c2[0])
307 m2 = self.manifest.read(c2[0])
315
308
316 if not commit and not remove and not force and p2 == nullid:
309 if not commit and not remove and not force and p2 == nullid:
317 self.ui.status("nothing changed\n")
310 self.ui.status("nothing changed\n")
318 return None
311 return None
319
312
320 if not self.hook("precommit"):
313 if not self.hook("precommit"):
321 return None
314 return None
322
315
323 lock = self.lock()
316 lock = self.lock()
324 tr = self.transaction()
317 tr = self.transaction()
325
318
326 # check in files
319 # check in files
327 new = {}
320 new = {}
328 linkrev = self.changelog.count()
321 linkrev = self.changelog.count()
329 commit.sort()
322 commit.sort()
330 for f in commit:
323 for f in commit:
331 self.ui.note(f + "\n")
324 self.ui.note(f + "\n")
332 try:
325 try:
333 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
326 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
334 t = self.wread(f)
327 t = self.wread(f)
335 except IOError:
328 except IOError:
336 self.ui.warn("trouble committing %s!\n" % f)
329 self.ui.warn("trouble committing %s!\n" % f)
337 raise
330 raise
338
331
339 meta = {}
332 meta = {}
340 cp = self.dirstate.copied(f)
333 cp = self.dirstate.copied(f)
341 if cp:
334 if cp:
342 meta["copy"] = cp
335 meta["copy"] = cp
343 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
336 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
344 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
337 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
345
338
346 r = self.file(f)
339 r = self.file(f)
347 fp1 = m1.get(f, nullid)
340 fp1 = m1.get(f, nullid)
348 fp2 = m2.get(f, nullid)
341 fp2 = m2.get(f, nullid)
349
342
350 # is the same revision on two branches of a merge?
343 # is the same revision on two branches of a merge?
351 if fp2 == fp1:
344 if fp2 == fp1:
352 fp2 = nullid
345 fp2 = nullid
353
346
354 if fp2 != nullid:
347 if fp2 != nullid:
355 # is one parent an ancestor of the other?
348 # is one parent an ancestor of the other?
356 fpa = r.ancestor(fp1, fp2)
349 fpa = r.ancestor(fp1, fp2)
357 if fpa == fp1:
350 if fpa == fp1:
358 fp1, fp2 = fp2, nullid
351 fp1, fp2 = fp2, nullid
359 elif fpa == fp2:
352 elif fpa == fp2:
360 fp2 = nullid
353 fp2 = nullid
361
354
362 # is the file unmodified from the parent?
355 # is the file unmodified from the parent?
363 if not meta and t == r.read(fp1):
356 if not meta and t == r.read(fp1):
364 # record the proper existing parent in manifest
357 # record the proper existing parent in manifest
365 # no need to add a revision
358 # no need to add a revision
366 new[f] = fp1
359 new[f] = fp1
367 continue
360 continue
368
361
369 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
362 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
370 # remember what we've added so that we can later calculate
363 # remember what we've added so that we can later calculate
371 # the files to pull from a set of changesets
364 # the files to pull from a set of changesets
372 changed.append(f)
365 changed.append(f)
373
366
374 # update manifest
367 # update manifest
375 m1.update(new)
368 m1.update(new)
376 for f in remove:
369 for f in remove:
377 if f in m1:
370 if f in m1:
378 del m1[f]
371 del m1[f]
379 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
372 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
380 (new, remove))
373 (new, remove))
381
374
382 # add changeset
375 # add changeset
383 new = new.keys()
376 new = new.keys()
384 new.sort()
377 new.sort()
385
378
386 if not text:
379 if not text:
387 edittext = ""
380 edittext = ""
388 if p2 != nullid:
381 if p2 != nullid:
389 edittext += "HG: branch merge\n"
382 edittext += "HG: branch merge\n"
390 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
383 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
391 edittext += "".join(["HG: changed %s\n" % f for f in changed])
384 edittext += "".join(["HG: changed %s\n" % f for f in changed])
392 edittext += "".join(["HG: removed %s\n" % f for f in remove])
385 edittext += "".join(["HG: removed %s\n" % f for f in remove])
393 if not changed and not remove:
386 if not changed and not remove:
394 edittext += "HG: no files changed\n"
387 edittext += "HG: no files changed\n"
395 edittext = self.ui.edit(edittext)
388 edittext = self.ui.edit(edittext)
396 if not edittext.rstrip():
389 if not edittext.rstrip():
397 return None
390 return None
398 text = edittext
391 text = edittext
399
392
400 user = user or self.ui.username()
393 user = user or self.ui.username()
401 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
394 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
402 tr.close()
395 tr.close()
403
396
404 self.dirstate.setparents(n)
397 self.dirstate.setparents(n)
405 self.dirstate.update(new, "n")
398 self.dirstate.update(new, "n")
406 self.dirstate.forget(remove)
399 self.dirstate.forget(remove)
407
400
408 if not self.hook("commit", node=hex(n)):
401 if not self.hook("commit", node=hex(n)):
409 return None
402 return None
410 return n
403 return n
411
404
412 def walk(self, node=None, files=[], match=util.always):
405 def walk(self, node=None, files=[], match=util.always):
413 if node:
406 if node:
414 for fn in self.manifest.read(self.changelog.read(node)[0]):
407 for fn in self.manifest.read(self.changelog.read(node)[0]):
415 if match(fn): yield 'm', fn
408 if match(fn): yield 'm', fn
416 else:
409 else:
417 for src, fn in self.dirstate.walk(files, match):
410 for src, fn in self.dirstate.walk(files, match):
418 yield src, fn
411 yield src, fn
419
412
420 def changes(self, node1 = None, node2 = None, files = [],
413 def changes(self, node1 = None, node2 = None, files = [],
421 match = util.always):
414 match = util.always):
422 mf2, u = None, []
415 mf2, u = None, []
423
416
424 def fcmp(fn, mf):
417 def fcmp(fn, mf):
425 t1 = self.wread(fn)
418 t1 = self.wread(fn)
426 t2 = self.file(fn).read(mf.get(fn, nullid))
419 t2 = self.file(fn).read(mf.get(fn, nullid))
427 return cmp(t1, t2)
420 return cmp(t1, t2)
428
421
429 def mfmatches(node):
422 def mfmatches(node):
430 mf = dict(self.manifest.read(node))
423 mf = dict(self.manifest.read(node))
431 for fn in mf.keys():
424 for fn in mf.keys():
432 if not match(fn):
425 if not match(fn):
433 del mf[fn]
426 del mf[fn]
434 return mf
427 return mf
435
428
436 # are we comparing the working directory?
429 # are we comparing the working directory?
437 if not node2:
430 if not node2:
438 l, c, a, d, u = self.dirstate.changes(files, match)
431 l, c, a, d, u = self.dirstate.changes(files, match)
439
432
440 # are we comparing working dir against its parent?
433 # are we comparing working dir against its parent?
441 if not node1:
434 if not node1:
442 if l:
435 if l:
443 # do a full compare of any files that might have changed
436 # do a full compare of any files that might have changed
444 change = self.changelog.read(self.dirstate.parents()[0])
437 change = self.changelog.read(self.dirstate.parents()[0])
445 mf2 = mfmatches(change[0])
438 mf2 = mfmatches(change[0])
446 for f in l:
439 for f in l:
447 if fcmp(f, mf2):
440 if fcmp(f, mf2):
448 c.append(f)
441 c.append(f)
449
442
450 for l in c, a, d, u:
443 for l in c, a, d, u:
451 l.sort()
444 l.sort()
452
445
453 return (c, a, d, u)
446 return (c, a, d, u)
454
447
455 # are we comparing working dir against non-tip?
448 # are we comparing working dir against non-tip?
456 # generate a pseudo-manifest for the working dir
449 # generate a pseudo-manifest for the working dir
457 if not node2:
450 if not node2:
458 if not mf2:
451 if not mf2:
459 change = self.changelog.read(self.dirstate.parents()[0])
452 change = self.changelog.read(self.dirstate.parents()[0])
460 mf2 = mfmatches(change[0])
453 mf2 = mfmatches(change[0])
461 for f in a + c + l:
454 for f in a + c + l:
462 mf2[f] = ""
455 mf2[f] = ""
463 for f in d:
456 for f in d:
464 if f in mf2: del mf2[f]
457 if f in mf2: del mf2[f]
465 else:
458 else:
466 change = self.changelog.read(node2)
459 change = self.changelog.read(node2)
467 mf2 = mfmatches(change[0])
460 mf2 = mfmatches(change[0])
468
461
469 # flush lists from dirstate before comparing manifests
462 # flush lists from dirstate before comparing manifests
470 c, a = [], []
463 c, a = [], []
471
464
472 change = self.changelog.read(node1)
465 change = self.changelog.read(node1)
473 mf1 = mfmatches(change[0])
466 mf1 = mfmatches(change[0])
474
467
475 for fn in mf2:
468 for fn in mf2:
476 if mf1.has_key(fn):
469 if mf1.has_key(fn):
477 if mf1[fn] != mf2[fn]:
470 if mf1[fn] != mf2[fn]:
478 if mf2[fn] != "" or fcmp(fn, mf1):
471 if mf2[fn] != "" or fcmp(fn, mf1):
479 c.append(fn)
472 c.append(fn)
480 del mf1[fn]
473 del mf1[fn]
481 else:
474 else:
482 a.append(fn)
475 a.append(fn)
483
476
484 d = mf1.keys()
477 d = mf1.keys()
485
478
486 for l in c, a, d, u:
479 for l in c, a, d, u:
487 l.sort()
480 l.sort()
488
481
489 return (c, a, d, u)
482 return (c, a, d, u)
490
483
491 def add(self, list):
484 def add(self, list):
492 for f in list:
485 for f in list:
493 p = self.wjoin(f)
486 p = self.wjoin(f)
494 if not os.path.exists(p):
487 if not os.path.exists(p):
495 self.ui.warn("%s does not exist!\n" % f)
488 self.ui.warn("%s does not exist!\n" % f)
496 elif not os.path.isfile(p):
489 elif not os.path.isfile(p):
497 self.ui.warn("%s not added: only files supported currently\n" % f)
490 self.ui.warn("%s not added: only files supported currently\n" % f)
498 elif self.dirstate.state(f) in 'an':
491 elif self.dirstate.state(f) in 'an':
499 self.ui.warn("%s already tracked!\n" % f)
492 self.ui.warn("%s already tracked!\n" % f)
500 else:
493 else:
501 self.dirstate.update([f], "a")
494 self.dirstate.update([f], "a")
502
495
503 def forget(self, list):
496 def forget(self, list):
504 for f in list:
497 for f in list:
505 if self.dirstate.state(f) not in 'ai':
498 if self.dirstate.state(f) not in 'ai':
506 self.ui.warn("%s not added!\n" % f)
499 self.ui.warn("%s not added!\n" % f)
507 else:
500 else:
508 self.dirstate.forget([f])
501 self.dirstate.forget([f])
509
502
510 def remove(self, list):
503 def remove(self, list):
511 for f in list:
504 for f in list:
512 p = self.wjoin(f)
505 p = self.wjoin(f)
513 if os.path.exists(p):
506 if os.path.exists(p):
514 self.ui.warn("%s still exists!\n" % f)
507 self.ui.warn("%s still exists!\n" % f)
515 elif self.dirstate.state(f) == 'a':
508 elif self.dirstate.state(f) == 'a':
516 self.ui.warn("%s never committed!\n" % f)
509 self.ui.warn("%s never committed!\n" % f)
517 self.dirstate.forget([f])
510 self.dirstate.forget([f])
518 elif f not in self.dirstate:
511 elif f not in self.dirstate:
519 self.ui.warn("%s not tracked!\n" % f)
512 self.ui.warn("%s not tracked!\n" % f)
520 else:
513 else:
521 self.dirstate.update([f], "r")
514 self.dirstate.update([f], "r")
522
515
523 def copy(self, source, dest):
516 def copy(self, source, dest):
524 p = self.wjoin(dest)
517 p = self.wjoin(dest)
525 if not os.path.exists(p):
518 if not os.path.exists(p):
526 self.ui.warn("%s does not exist!\n" % dest)
519 self.ui.warn("%s does not exist!\n" % dest)
527 elif not os.path.isfile(p):
520 elif not os.path.isfile(p):
528 self.ui.warn("copy failed: %s is not a file\n" % dest)
521 self.ui.warn("copy failed: %s is not a file\n" % dest)
529 else:
522 else:
530 if self.dirstate.state(dest) == '?':
523 if self.dirstate.state(dest) == '?':
531 self.dirstate.update([dest], "a")
524 self.dirstate.update([dest], "a")
532 self.dirstate.copy(source, dest)
525 self.dirstate.copy(source, dest)
533
526
534 def heads(self):
527 def heads(self):
535 return self.changelog.heads()
528 return self.changelog.heads()
536
529
537 # branchlookup returns a dict giving a list of branches for
530 # branchlookup returns a dict giving a list of branches for
538 # each head. A branch is defined as the tag of a node or
531 # each head. A branch is defined as the tag of a node or
539 # the branch of the node's parents. If a node has multiple
532 # the branch of the node's parents. If a node has multiple
540 # branch tags, tags are eliminated if they are visible from other
533 # branch tags, tags are eliminated if they are visible from other
541 # branch tags.
534 # branch tags.
542 #
535 #
543 # So, for this graph: a->b->c->d->e
536 # So, for this graph: a->b->c->d->e
544 # \ /
537 # \ /
545 # aa -----/
538 # aa -----/
546 # a has tag 2.6.12
539 # a has tag 2.6.12
547 # d has tag 2.6.13
540 # d has tag 2.6.13
548 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
541 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
549 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
542 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
550 # from the list.
543 # from the list.
551 #
544 #
552 # It is possible that more than one head will have the same branch tag.
545 # It is possible that more than one head will have the same branch tag.
553 # callers need to check the result for multiple heads under the same
546 # callers need to check the result for multiple heads under the same
554 # branch tag if that is a problem for them (ie checkout of a specific
547 # branch tag if that is a problem for them (ie checkout of a specific
555 # branch).
548 # branch).
556 #
549 #
557 # passing in a specific branch will limit the depth of the search
550 # passing in a specific branch will limit the depth of the search
558 # through the parents. It won't limit the branches returned in the
551 # through the parents. It won't limit the branches returned in the
559 # result though.
552 # result though.
560 def branchlookup(self, heads=None, branch=None):
553 def branchlookup(self, heads=None, branch=None):
561 if not heads:
554 if not heads:
562 heads = self.heads()
555 heads = self.heads()
563 headt = [ h for h in heads ]
556 headt = [ h for h in heads ]
564 chlog = self.changelog
557 chlog = self.changelog
565 branches = {}
558 branches = {}
566 merges = []
559 merges = []
567 seenmerge = {}
560 seenmerge = {}
568
561
569 # traverse the tree once for each head, recording in the branches
562 # traverse the tree once for each head, recording in the branches
570 # dict which tags are visible from this head. The branches
563 # dict which tags are visible from this head. The branches
571 # dict also records which tags are visible from each tag
564 # dict also records which tags are visible from each tag
572 # while we traverse.
565 # while we traverse.
573 while headt or merges:
566 while headt or merges:
574 if merges:
567 if merges:
575 n, found = merges.pop()
568 n, found = merges.pop()
576 visit = [n]
569 visit = [n]
577 else:
570 else:
578 h = headt.pop()
571 h = headt.pop()
579 visit = [h]
572 visit = [h]
580 found = [h]
573 found = [h]
581 seen = {}
574 seen = {}
582 while visit:
575 while visit:
583 n = visit.pop()
576 n = visit.pop()
584 if n in seen:
577 if n in seen:
585 continue
578 continue
586 pp = chlog.parents(n)
579 pp = chlog.parents(n)
587 tags = self.nodetags(n)
580 tags = self.nodetags(n)
588 if tags:
581 if tags:
589 for x in tags:
582 for x in tags:
590 if x == 'tip':
583 if x == 'tip':
591 continue
584 continue
592 for f in found:
585 for f in found:
593 branches.setdefault(f, {})[n] = 1
586 branches.setdefault(f, {})[n] = 1
594 branches.setdefault(n, {})[n] = 1
587 branches.setdefault(n, {})[n] = 1
595 break
588 break
596 if n not in found:
589 if n not in found:
597 found.append(n)
590 found.append(n)
598 if branch in tags:
591 if branch in tags:
599 continue
592 continue
600 seen[n] = 1
593 seen[n] = 1
601 if pp[1] != nullid and n not in seenmerge:
594 if pp[1] != nullid and n not in seenmerge:
602 merges.append((pp[1], [x for x in found]))
595 merges.append((pp[1], [x for x in found]))
603 seenmerge[n] = 1
596 seenmerge[n] = 1
604 if pp[0] != nullid:
597 if pp[0] != nullid:
605 visit.append(pp[0])
598 visit.append(pp[0])
606 # traverse the branches dict, eliminating branch tags from each
599 # traverse the branches dict, eliminating branch tags from each
607 # head that are visible from another branch tag for that head.
600 # head that are visible from another branch tag for that head.
608 out = {}
601 out = {}
609 viscache = {}
602 viscache = {}
610 for h in heads:
603 for h in heads:
611 def visible(node):
604 def visible(node):
612 if node in viscache:
605 if node in viscache:
613 return viscache[node]
606 return viscache[node]
614 ret = {}
607 ret = {}
615 visit = [node]
608 visit = [node]
616 while visit:
609 while visit:
617 x = visit.pop()
610 x = visit.pop()
618 if x in viscache:
611 if x in viscache:
619 ret.update(viscache[x])
612 ret.update(viscache[x])
620 elif x not in ret:
613 elif x not in ret:
621 ret[x] = 1
614 ret[x] = 1
622 if x in branches:
615 if x in branches:
623 visit[len(visit):] = branches[x].keys()
616 visit[len(visit):] = branches[x].keys()
624 viscache[node] = ret
617 viscache[node] = ret
625 return ret
618 return ret
626 if h not in branches:
619 if h not in branches:
627 continue
620 continue
628 # O(n^2), but somewhat limited. This only searches the
621 # O(n^2), but somewhat limited. This only searches the
629 # tags visible from a specific head, not all the tags in the
622 # tags visible from a specific head, not all the tags in the
630 # whole repo.
623 # whole repo.
631 for b in branches[h]:
624 for b in branches[h]:
632 vis = False
625 vis = False
633 for bb in branches[h].keys():
626 for bb in branches[h].keys():
634 if b != bb:
627 if b != bb:
635 if b in visible(bb):
628 if b in visible(bb):
636 vis = True
629 vis = True
637 break
630 break
638 if not vis:
631 if not vis:
639 l = out.setdefault(h, [])
632 l = out.setdefault(h, [])
640 l[len(l):] = self.nodetags(b)
633 l[len(l):] = self.nodetags(b)
641 return out
634 return out
642
635
643 def branches(self, nodes):
636 def branches(self, nodes):
644 if not nodes: nodes = [self.changelog.tip()]
637 if not nodes: nodes = [self.changelog.tip()]
645 b = []
638 b = []
646 for n in nodes:
639 for n in nodes:
647 t = n
640 t = n
648 while n:
641 while n:
649 p = self.changelog.parents(n)
642 p = self.changelog.parents(n)
650 if p[1] != nullid or p[0] == nullid:
643 if p[1] != nullid or p[0] == nullid:
651 b.append((t, n, p[0], p[1]))
644 b.append((t, n, p[0], p[1]))
652 break
645 break
653 n = p[0]
646 n = p[0]
654 return b
647 return b
655
648
656 def between(self, pairs):
649 def between(self, pairs):
657 r = []
650 r = []
658
651
659 for top, bottom in pairs:
652 for top, bottom in pairs:
660 n, l, i = top, [], 0
653 n, l, i = top, [], 0
661 f = 1
654 f = 1
662
655
663 while n != bottom:
656 while n != bottom:
664 p = self.changelog.parents(n)[0]
657 p = self.changelog.parents(n)[0]
665 if i == f:
658 if i == f:
666 l.append(n)
659 l.append(n)
667 f = f * 2
660 f = f * 2
668 n = p
661 n = p
669 i += 1
662 i += 1
670
663
671 r.append(l)
664 r.append(l)
672
665
673 return r
666 return r
674
667
675 def newer(self, nodes):
668 def newer(self, nodes):
676 m = {}
669 m = {}
677 nl = []
670 nl = []
678 pm = {}
671 pm = {}
679 cl = self.changelog
672 cl = self.changelog
680 t = l = cl.count()
673 t = l = cl.count()
681
674
682 # find the lowest numbered node
675 # find the lowest numbered node
683 for n in nodes:
676 for n in nodes:
684 l = min(l, cl.rev(n))
677 l = min(l, cl.rev(n))
685 m[n] = 1
678 m[n] = 1
686
679
687 for i in xrange(l, t):
680 for i in xrange(l, t):
688 n = cl.node(i)
681 n = cl.node(i)
689 if n in m: # explicitly listed
682 if n in m: # explicitly listed
690 pm[n] = 1
683 pm[n] = 1
691 nl.append(n)
684 nl.append(n)
692 continue
685 continue
693 for p in cl.parents(n):
686 for p in cl.parents(n):
694 if p in pm: # parent listed
687 if p in pm: # parent listed
695 pm[n] = 1
688 pm[n] = 1
696 nl.append(n)
689 nl.append(n)
697 break
690 break
698
691
699 return nl
692 return nl
700
693
701 def findincoming(self, remote, base=None, heads=None):
694 def findincoming(self, remote, base=None, heads=None):
702 m = self.changelog.nodemap
695 m = self.changelog.nodemap
703 search = []
696 search = []
704 fetch = {}
697 fetch = {}
705 seen = {}
698 seen = {}
706 seenbranch = {}
699 seenbranch = {}
707 if base == None:
700 if base == None:
708 base = {}
701 base = {}
709
702
710 # assume we're closer to the tip than the root
703 # assume we're closer to the tip than the root
711 # and start by examining the heads
704 # and start by examining the heads
712 self.ui.status("searching for changes\n")
705 self.ui.status("searching for changes\n")
713
706
714 if not heads:
707 if not heads:
715 heads = remote.heads()
708 heads = remote.heads()
716
709
717 unknown = []
710 unknown = []
718 for h in heads:
711 for h in heads:
719 if h not in m:
712 if h not in m:
720 unknown.append(h)
713 unknown.append(h)
721 else:
714 else:
722 base[h] = 1
715 base[h] = 1
723
716
724 if not unknown:
717 if not unknown:
725 return None
718 return None
726
719
727 rep = {}
720 rep = {}
728 reqcnt = 0
721 reqcnt = 0
729
722
730 # search through remote branches
723 # search through remote branches
731 # a 'branch' here is a linear segment of history, with four parts:
724 # a 'branch' here is a linear segment of history, with four parts:
732 # head, root, first parent, second parent
725 # head, root, first parent, second parent
733 # (a branch always has two parents (or none) by definition)
726 # (a branch always has two parents (or none) by definition)
734 unknown = remote.branches(unknown)
727 unknown = remote.branches(unknown)
735 while unknown:
728 while unknown:
736 r = []
729 r = []
737 while unknown:
730 while unknown:
738 n = unknown.pop(0)
731 n = unknown.pop(0)
739 if n[0] in seen:
732 if n[0] in seen:
740 continue
733 continue
741
734
742 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
735 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
743 if n[0] == nullid:
736 if n[0] == nullid:
744 break
737 break
745 if n in seenbranch:
738 if n in seenbranch:
746 self.ui.debug("branch already found\n")
739 self.ui.debug("branch already found\n")
747 continue
740 continue
748 if n[1] and n[1] in m: # do we know the base?
741 if n[1] and n[1] in m: # do we know the base?
749 self.ui.debug("found incomplete branch %s:%s\n"
742 self.ui.debug("found incomplete branch %s:%s\n"
750 % (short(n[0]), short(n[1])))
743 % (short(n[0]), short(n[1])))
751 search.append(n) # schedule branch range for scanning
744 search.append(n) # schedule branch range for scanning
752 seenbranch[n] = 1
745 seenbranch[n] = 1
753 else:
746 else:
754 if n[1] not in seen and n[1] not in fetch:
747 if n[1] not in seen and n[1] not in fetch:
755 if n[2] in m and n[3] in m:
748 if n[2] in m and n[3] in m:
756 self.ui.debug("found new changeset %s\n" %
749 self.ui.debug("found new changeset %s\n" %
757 short(n[1]))
750 short(n[1]))
758 fetch[n[1]] = 1 # earliest unknown
751 fetch[n[1]] = 1 # earliest unknown
759 base[n[2]] = 1 # latest known
752 base[n[2]] = 1 # latest known
760 continue
753 continue
761
754
762 for a in n[2:4]:
755 for a in n[2:4]:
763 if a not in rep:
756 if a not in rep:
764 r.append(a)
757 r.append(a)
765 rep[a] = 1
758 rep[a] = 1
766
759
767 seen[n[0]] = 1
760 seen[n[0]] = 1
768
761
769 if r:
762 if r:
770 reqcnt += 1
763 reqcnt += 1
771 self.ui.debug("request %d: %s\n" %
764 self.ui.debug("request %d: %s\n" %
772 (reqcnt, " ".join(map(short, r))))
765 (reqcnt, " ".join(map(short, r))))
773 for p in range(0, len(r), 10):
766 for p in range(0, len(r), 10):
774 for b in remote.branches(r[p:p+10]):
767 for b in remote.branches(r[p:p+10]):
775 self.ui.debug("received %s:%s\n" %
768 self.ui.debug("received %s:%s\n" %
776 (short(b[0]), short(b[1])))
769 (short(b[0]), short(b[1])))
777 if b[0] in m:
770 if b[0] in m:
778 self.ui.debug("found base node %s\n" % short(b[0]))
771 self.ui.debug("found base node %s\n" % short(b[0]))
779 base[b[0]] = 1
772 base[b[0]] = 1
780 elif b[0] not in seen:
773 elif b[0] not in seen:
781 unknown.append(b)
774 unknown.append(b)
782
775
783 # do binary search on the branches we found
776 # do binary search on the branches we found
784 while search:
777 while search:
785 n = search.pop(0)
778 n = search.pop(0)
786 reqcnt += 1
779 reqcnt += 1
787 l = remote.between([(n[0], n[1])])[0]
780 l = remote.between([(n[0], n[1])])[0]
788 l.append(n[1])
781 l.append(n[1])
789 p = n[0]
782 p = n[0]
790 f = 1
783 f = 1
791 for i in l:
784 for i in l:
792 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
785 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
793 if i in m:
786 if i in m:
794 if f <= 2:
787 if f <= 2:
795 self.ui.debug("found new branch changeset %s\n" %
788 self.ui.debug("found new branch changeset %s\n" %
796 short(p))
789 short(p))
797 fetch[p] = 1
790 fetch[p] = 1
798 base[i] = 1
791 base[i] = 1
799 else:
792 else:
800 self.ui.debug("narrowed branch search to %s:%s\n"
793 self.ui.debug("narrowed branch search to %s:%s\n"
801 % (short(p), short(i)))
794 % (short(p), short(i)))
802 search.append((p, i))
795 search.append((p, i))
803 break
796 break
804 p, f = i, f * 2
797 p, f = i, f * 2
805
798
806 # sanity check our fetch list
799 # sanity check our fetch list
807 for f in fetch.keys():
800 for f in fetch.keys():
808 if f in m:
801 if f in m:
809 raise repo.RepoError("already have changeset " + short(f[:4]))
802 raise repo.RepoError("already have changeset " + short(f[:4]))
810
803
811 if base.keys() == [nullid]:
804 if base.keys() == [nullid]:
812 self.ui.warn("warning: pulling from an unrelated repository!\n")
805 self.ui.warn("warning: pulling from an unrelated repository!\n")
813
806
814 self.ui.note("found new changesets starting at " +
807 self.ui.note("found new changesets starting at " +
815 " ".join([short(f) for f in fetch]) + "\n")
808 " ".join([short(f) for f in fetch]) + "\n")
816
809
817 self.ui.debug("%d total queries\n" % reqcnt)
810 self.ui.debug("%d total queries\n" % reqcnt)
818
811
819 return fetch.keys()
812 return fetch.keys()
820
813
821 def findoutgoing(self, remote, base=None, heads=None):
814 def findoutgoing(self, remote, base=None, heads=None):
822 if base == None:
815 if base == None:
823 base = {}
816 base = {}
824 self.findincoming(remote, base, heads)
817 self.findincoming(remote, base, heads)
825
818
826 self.ui.debug("common changesets up to "
819 self.ui.debug("common changesets up to "
827 + " ".join(map(short, base.keys())) + "\n")
820 + " ".join(map(short, base.keys())) + "\n")
828
821
829 remain = dict.fromkeys(self.changelog.nodemap)
822 remain = dict.fromkeys(self.changelog.nodemap)
830
823
831 # prune everything remote has from the tree
824 # prune everything remote has from the tree
832 del remain[nullid]
825 del remain[nullid]
833 remove = base.keys()
826 remove = base.keys()
834 while remove:
827 while remove:
835 n = remove.pop(0)
828 n = remove.pop(0)
836 if n in remain:
829 if n in remain:
837 del remain[n]
830 del remain[n]
838 for p in self.changelog.parents(n):
831 for p in self.changelog.parents(n):
839 remove.append(p)
832 remove.append(p)
840
833
841 # find every node whose parents have been pruned
834 # find every node whose parents have been pruned
842 subset = []
835 subset = []
843 for n in remain:
836 for n in remain:
844 p1, p2 = self.changelog.parents(n)
837 p1, p2 = self.changelog.parents(n)
845 if p1 not in remain and p2 not in remain:
838 if p1 not in remain and p2 not in remain:
846 subset.append(n)
839 subset.append(n)
847
840
848 # this is the set of all roots we have to push
841 # this is the set of all roots we have to push
849 return subset
842 return subset
850
843
851 def pull(self, remote):
844 def pull(self, remote):
852 lock = self.lock()
845 lock = self.lock()
853
846
854 # if we have an empty repo, fetch everything
847 # if we have an empty repo, fetch everything
855 if self.changelog.tip() == nullid:
848 if self.changelog.tip() == nullid:
856 self.ui.status("requesting all changes\n")
849 self.ui.status("requesting all changes\n")
857 fetch = [nullid]
850 fetch = [nullid]
858 else:
851 else:
859 fetch = self.findincoming(remote)
852 fetch = self.findincoming(remote)
860
853
861 if not fetch:
854 if not fetch:
862 self.ui.status("no changes found\n")
855 self.ui.status("no changes found\n")
863 return 1
856 return 1
864
857
865 cg = remote.changegroup(fetch)
858 cg = remote.changegroup(fetch)
866 return self.addchangegroup(cg)
859 return self.addchangegroup(cg)
867
860
868 def push(self, remote, force=False):
861 def push(self, remote, force=False):
869 lock = remote.lock()
862 lock = remote.lock()
870
863
871 base = {}
864 base = {}
872 heads = remote.heads()
865 heads = remote.heads()
873 inc = self.findincoming(remote, base, heads)
866 inc = self.findincoming(remote, base, heads)
874 if not force and inc:
867 if not force and inc:
875 self.ui.warn("abort: unsynced remote changes!\n")
868 self.ui.warn("abort: unsynced remote changes!\n")
876 self.ui.status("(did you forget to sync? use push -f to force)\n")
869 self.ui.status("(did you forget to sync? use push -f to force)\n")
877 return 1
870 return 1
878
871
879 update = self.findoutgoing(remote, base)
872 update = self.findoutgoing(remote, base)
880 if not update:
873 if not update:
881 self.ui.status("no changes found\n")
874 self.ui.status("no changes found\n")
882 return 1
875 return 1
883 elif not force:
876 elif not force:
884 if len(heads) < len(self.changelog.heads()):
877 if len(heads) < len(self.changelog.heads()):
885 self.ui.warn("abort: push creates new remote branches!\n")
878 self.ui.warn("abort: push creates new remote branches!\n")
886 self.ui.status("(did you forget to merge?" +
879 self.ui.status("(did you forget to merge?" +
887 " use push -f to force)\n")
880 " use push -f to force)\n")
888 return 1
881 return 1
889
882
890 cg = self.changegroup(update)
883 cg = self.changegroup(update)
891 return remote.addchangegroup(cg)
884 return remote.addchangegroup(cg)
892
885
893 def changegroup(self, basenodes):
886 def changegroup(self, basenodes):
894 class genread:
887 class genread:
895 def __init__(self, generator):
888 def __init__(self, generator):
896 self.g = generator
889 self.g = generator
897 self.buf = ""
890 self.buf = ""
898 def fillbuf(self):
891 def fillbuf(self):
899 self.buf += "".join(self.g)
892 self.buf += "".join(self.g)
900
893
901 def read(self, l):
894 def read(self, l):
902 while l > len(self.buf):
895 while l > len(self.buf):
903 try:
896 try:
904 self.buf += self.g.next()
897 self.buf += self.g.next()
905 except StopIteration:
898 except StopIteration:
906 break
899 break
907 d, self.buf = self.buf[:l], self.buf[l:]
900 d, self.buf = self.buf[:l], self.buf[l:]
908 return d
901 return d
909
902
910 def gengroup():
903 def gengroup():
911 nodes = self.newer(basenodes)
904 nodes = self.newer(basenodes)
912
905
913 # construct the link map
906 # construct the link map
914 linkmap = {}
907 linkmap = {}
915 for n in nodes:
908 for n in nodes:
916 linkmap[self.changelog.rev(n)] = n
909 linkmap[self.changelog.rev(n)] = n
917
910
918 # construct a list of all changed files
911 # construct a list of all changed files
919 changed = {}
912 changed = {}
920 for n in nodes:
913 for n in nodes:
921 c = self.changelog.read(n)
914 c = self.changelog.read(n)
922 for f in c[3]:
915 for f in c[3]:
923 changed[f] = 1
916 changed[f] = 1
924 changed = changed.keys()
917 changed = changed.keys()
925 changed.sort()
918 changed.sort()
926
919
927 # the changegroup is changesets + manifests + all file revs
920 # the changegroup is changesets + manifests + all file revs
928 revs = [ self.changelog.rev(n) for n in nodes ]
921 revs = [ self.changelog.rev(n) for n in nodes ]
929
922
930 for y in self.changelog.group(linkmap): yield y
923 for y in self.changelog.group(linkmap): yield y
931 for y in self.manifest.group(linkmap): yield y
924 for y in self.manifest.group(linkmap): yield y
932 for f in changed:
925 for f in changed:
933 yield struct.pack(">l", len(f) + 4) + f
926 yield struct.pack(">l", len(f) + 4) + f
934 g = self.file(f).group(linkmap)
927 g = self.file(f).group(linkmap)
935 for y in g:
928 for y in g:
936 yield y
929 yield y
937
930
938 yield struct.pack(">l", 0)
931 yield struct.pack(">l", 0)
939
932
940 return genread(gengroup())
933 return genread(gengroup())
941
934
942 def addchangegroup(self, source):
935 def addchangegroup(self, source):
943
936
944 def getchunk():
937 def getchunk():
945 d = source.read(4)
938 d = source.read(4)
946 if not d: return ""
939 if not d: return ""
947 l = struct.unpack(">l", d)[0]
940 l = struct.unpack(">l", d)[0]
948 if l <= 4: return ""
941 if l <= 4: return ""
949 return source.read(l - 4)
942 return source.read(l - 4)
950
943
951 def getgroup():
944 def getgroup():
952 while 1:
945 while 1:
953 c = getchunk()
946 c = getchunk()
954 if not c: break
947 if not c: break
955 yield c
948 yield c
956
949
957 def csmap(x):
950 def csmap(x):
958 self.ui.debug("add changeset %s\n" % short(x))
951 self.ui.debug("add changeset %s\n" % short(x))
959 return self.changelog.count()
952 return self.changelog.count()
960
953
961 def revmap(x):
954 def revmap(x):
962 return self.changelog.rev(x)
955 return self.changelog.rev(x)
963
956
964 if not source: return
957 if not source: return
965 changesets = files = revisions = 0
958 changesets = files = revisions = 0
966
959
967 tr = self.transaction()
960 tr = self.transaction()
968
961
969 oldheads = len(self.changelog.heads())
962 oldheads = len(self.changelog.heads())
970
963
971 # pull off the changeset group
964 # pull off the changeset group
972 self.ui.status("adding changesets\n")
965 self.ui.status("adding changesets\n")
973 co = self.changelog.tip()
966 co = self.changelog.tip()
974 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
967 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
975 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
968 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
976
969
977 # pull off the manifest group
970 # pull off the manifest group
978 self.ui.status("adding manifests\n")
971 self.ui.status("adding manifests\n")
979 mm = self.manifest.tip()
972 mm = self.manifest.tip()
980 mo = self.manifest.addgroup(getgroup(), revmap, tr)
973 mo = self.manifest.addgroup(getgroup(), revmap, tr)
981
974
982 # process the files
975 # process the files
983 self.ui.status("adding file changes\n")
976 self.ui.status("adding file changes\n")
984 while 1:
977 while 1:
985 f = getchunk()
978 f = getchunk()
986 if not f: break
979 if not f: break
987 self.ui.debug("adding %s revisions\n" % f)
980 self.ui.debug("adding %s revisions\n" % f)
988 fl = self.file(f)
981 fl = self.file(f)
989 o = fl.count()
982 o = fl.count()
990 n = fl.addgroup(getgroup(), revmap, tr)
983 n = fl.addgroup(getgroup(), revmap, tr)
991 revisions += fl.count() - o
984 revisions += fl.count() - o
992 files += 1
985 files += 1
993
986
994 newheads = len(self.changelog.heads())
987 newheads = len(self.changelog.heads())
995 heads = ""
988 heads = ""
996 if oldheads and newheads > oldheads:
989 if oldheads and newheads > oldheads:
997 heads = " (+%d heads)" % (newheads - oldheads)
990 heads = " (+%d heads)" % (newheads - oldheads)
998
991
999 self.ui.status(("added %d changesets" +
992 self.ui.status(("added %d changesets" +
1000 " with %d changes to %d files%s\n")
993 " with %d changes to %d files%s\n")
1001 % (changesets, revisions, files, heads))
994 % (changesets, revisions, files, heads))
1002
995
1003 tr.close()
996 tr.close()
1004
997
1005 if not self.hook("changegroup"):
998 if not self.hook("changegroup"):
1006 return 1
999 return 1
1007
1000
1008 return
1001 return
1009
1002
1010 def update(self, node, allow=False, force=False, choose=None,
1003 def update(self, node, allow=False, force=False, choose=None,
1011 moddirstate=True):
1004 moddirstate=True):
1012 pl = self.dirstate.parents()
1005 pl = self.dirstate.parents()
1013 if not force and pl[1] != nullid:
1006 if not force and pl[1] != nullid:
1014 self.ui.warn("aborting: outstanding uncommitted merges\n")
1007 self.ui.warn("aborting: outstanding uncommitted merges\n")
1015 return 1
1008 return 1
1016
1009
1017 p1, p2 = pl[0], node
1010 p1, p2 = pl[0], node
1018 pa = self.changelog.ancestor(p1, p2)
1011 pa = self.changelog.ancestor(p1, p2)
1019 m1n = self.changelog.read(p1)[0]
1012 m1n = self.changelog.read(p1)[0]
1020 m2n = self.changelog.read(p2)[0]
1013 m2n = self.changelog.read(p2)[0]
1021 man = self.manifest.ancestor(m1n, m2n)
1014 man = self.manifest.ancestor(m1n, m2n)
1022 m1 = self.manifest.read(m1n)
1015 m1 = self.manifest.read(m1n)
1023 mf1 = self.manifest.readflags(m1n)
1016 mf1 = self.manifest.readflags(m1n)
1024 m2 = self.manifest.read(m2n)
1017 m2 = self.manifest.read(m2n)
1025 mf2 = self.manifest.readflags(m2n)
1018 mf2 = self.manifest.readflags(m2n)
1026 ma = self.manifest.read(man)
1019 ma = self.manifest.read(man)
1027 mfa = self.manifest.readflags(man)
1020 mfa = self.manifest.readflags(man)
1028
1021
1029 (c, a, d, u) = self.changes()
1022 (c, a, d, u) = self.changes()
1030
1023
1031 # is this a jump, or a merge? i.e. is there a linear path
1024 # is this a jump, or a merge? i.e. is there a linear path
1032 # from p1 to p2?
1025 # from p1 to p2?
1033 linear_path = (pa == p1 or pa == p2)
1026 linear_path = (pa == p1 or pa == p2)
1034
1027
1035 # resolve the manifest to determine which files
1028 # resolve the manifest to determine which files
1036 # we care about merging
1029 # we care about merging
1037 self.ui.note("resolving manifests\n")
1030 self.ui.note("resolving manifests\n")
1038 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1031 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1039 (force, allow, moddirstate, linear_path))
1032 (force, allow, moddirstate, linear_path))
1040 self.ui.debug(" ancestor %s local %s remote %s\n" %
1033 self.ui.debug(" ancestor %s local %s remote %s\n" %
1041 (short(man), short(m1n), short(m2n)))
1034 (short(man), short(m1n), short(m2n)))
1042
1035
1043 merge = {}
1036 merge = {}
1044 get = {}
1037 get = {}
1045 remove = []
1038 remove = []
1046
1039
1047 # construct a working dir manifest
1040 # construct a working dir manifest
1048 mw = m1.copy()
1041 mw = m1.copy()
1049 mfw = mf1.copy()
1042 mfw = mf1.copy()
1050 umap = dict.fromkeys(u)
1043 umap = dict.fromkeys(u)
1051
1044
1052 for f in a + c + u:
1045 for f in a + c + u:
1053 mw[f] = ""
1046 mw[f] = ""
1054 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1047 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1055
1048
1056 for f in d:
1049 for f in d:
1057 if f in mw: del mw[f]
1050 if f in mw: del mw[f]
1058
1051
1059 # If we're jumping between revisions (as opposed to merging),
1052 # If we're jumping between revisions (as opposed to merging),
1060 # and if neither the working directory nor the target rev has
1053 # and if neither the working directory nor the target rev has
1061 # the file, then we need to remove it from the dirstate, to
1054 # the file, then we need to remove it from the dirstate, to
1062 # prevent the dirstate from listing the file when it is no
1055 # prevent the dirstate from listing the file when it is no
1063 # longer in the manifest.
1056 # longer in the manifest.
1064 if moddirstate and linear_path and f not in m2:
1057 if moddirstate and linear_path and f not in m2:
1065 self.dirstate.forget((f,))
1058 self.dirstate.forget((f,))
1066
1059
1067 # Compare manifests
1060 # Compare manifests
1068 for f, n in mw.iteritems():
1061 for f, n in mw.iteritems():
1069 if choose and not choose(f): continue
1062 if choose and not choose(f): continue
1070 if f in m2:
1063 if f in m2:
1071 s = 0
1064 s = 0
1072
1065
1073 # is the wfile new since m1, and match m2?
1066 # is the wfile new since m1, and match m2?
1074 if f not in m1:
1067 if f not in m1:
1075 t1 = self.wread(f)
1068 t1 = self.wread(f)
1076 t2 = self.file(f).read(m2[f])
1069 t2 = self.file(f).read(m2[f])
1077 if cmp(t1, t2) == 0:
1070 if cmp(t1, t2) == 0:
1078 n = m2[f]
1071 n = m2[f]
1079 del t1, t2
1072 del t1, t2
1080
1073
1081 # are files different?
1074 # are files different?
1082 if n != m2[f]:
1075 if n != m2[f]:
1083 a = ma.get(f, nullid)
1076 a = ma.get(f, nullid)
1084 # are both different from the ancestor?
1077 # are both different from the ancestor?
1085 if n != a and m2[f] != a:
1078 if n != a and m2[f] != a:
1086 self.ui.debug(" %s versions differ, resolve\n" % f)
1079 self.ui.debug(" %s versions differ, resolve\n" % f)
1087 # merge executable bits
1080 # merge executable bits
1088 # "if we changed or they changed, change in merge"
1081 # "if we changed or they changed, change in merge"
1089 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1082 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1090 mode = ((a^b) | (a^c)) ^ a
1083 mode = ((a^b) | (a^c)) ^ a
1091 merge[f] = (m1.get(f, nullid), m2[f], mode)
1084 merge[f] = (m1.get(f, nullid), m2[f], mode)
1092 s = 1
1085 s = 1
1093 # are we clobbering?
1086 # are we clobbering?
1094 # is remote's version newer?
1087 # is remote's version newer?
1095 # or are we going back in time?
1088 # or are we going back in time?
1096 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1089 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1097 self.ui.debug(" remote %s is newer, get\n" % f)
1090 self.ui.debug(" remote %s is newer, get\n" % f)
1098 get[f] = m2[f]
1091 get[f] = m2[f]
1099 s = 1
1092 s = 1
1100 elif f in umap:
1093 elif f in umap:
1101 # this unknown file is the same as the checkout
1094 # this unknown file is the same as the checkout
1102 get[f] = m2[f]
1095 get[f] = m2[f]
1103
1096
1104 if not s and mfw[f] != mf2[f]:
1097 if not s and mfw[f] != mf2[f]:
1105 if force:
1098 if force:
1106 self.ui.debug(" updating permissions for %s\n" % f)
1099 self.ui.debug(" updating permissions for %s\n" % f)
1107 util.set_exec(self.wjoin(f), mf2[f])
1100 util.set_exec(self.wjoin(f), mf2[f])
1108 else:
1101 else:
1109 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1102 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1110 mode = ((a^b) | (a^c)) ^ a
1103 mode = ((a^b) | (a^c)) ^ a
1111 if mode != b:
1104 if mode != b:
1112 self.ui.debug(" updating permissions for %s\n" % f)
1105 self.ui.debug(" updating permissions for %s\n" % f)
1113 util.set_exec(self.wjoin(f), mode)
1106 util.set_exec(self.wjoin(f), mode)
1114 del m2[f]
1107 del m2[f]
1115 elif f in ma:
1108 elif f in ma:
1116 if n != ma[f]:
1109 if n != ma[f]:
1117 r = "d"
1110 r = "d"
1118 if not force and (linear_path or allow):
1111 if not force and (linear_path or allow):
1119 r = self.ui.prompt(
1112 r = self.ui.prompt(
1120 (" local changed %s which remote deleted\n" % f) +
1113 (" local changed %s which remote deleted\n" % f) +
1121 "(k)eep or (d)elete?", "[kd]", "k")
1114 "(k)eep or (d)elete?", "[kd]", "k")
1122 if r == "d":
1115 if r == "d":
1123 remove.append(f)
1116 remove.append(f)
1124 else:
1117 else:
1125 self.ui.debug("other deleted %s\n" % f)
1118 self.ui.debug("other deleted %s\n" % f)
1126 remove.append(f) # other deleted it
1119 remove.append(f) # other deleted it
1127 else:
1120 else:
1128 if n == m1.get(f, nullid): # same as parent
1121 if n == m1.get(f, nullid): # same as parent
1129 if p2 == pa: # going backwards?
1122 if p2 == pa: # going backwards?
1130 self.ui.debug("remote deleted %s\n" % f)
1123 self.ui.debug("remote deleted %s\n" % f)
1131 remove.append(f)
1124 remove.append(f)
1132 else:
1125 else:
1133 self.ui.debug("local created %s, keeping\n" % f)
1126 self.ui.debug("local created %s, keeping\n" % f)
1134 else:
1127 else:
1135 self.ui.debug("working dir created %s, keeping\n" % f)
1128 self.ui.debug("working dir created %s, keeping\n" % f)
1136
1129
1137 for f, n in m2.iteritems():
1130 for f, n in m2.iteritems():
1138 if choose and not choose(f): continue
1131 if choose and not choose(f): continue
1139 if f[0] == "/": continue
1132 if f[0] == "/": continue
1140 if f in ma and n != ma[f]:
1133 if f in ma and n != ma[f]:
1141 r = "k"
1134 r = "k"
1142 if not force and (linear_path or allow):
1135 if not force and (linear_path or allow):
1143 r = self.ui.prompt(
1136 r = self.ui.prompt(
1144 ("remote changed %s which local deleted\n" % f) +
1137 ("remote changed %s which local deleted\n" % f) +
1145 "(k)eep or (d)elete?", "[kd]", "k")
1138 "(k)eep or (d)elete?", "[kd]", "k")
1146 if r == "k": get[f] = n
1139 if r == "k": get[f] = n
1147 elif f not in ma:
1140 elif f not in ma:
1148 self.ui.debug("remote created %s\n" % f)
1141 self.ui.debug("remote created %s\n" % f)
1149 get[f] = n
1142 get[f] = n
1150 else:
1143 else:
1151 if force or p2 == pa: # going backwards?
1144 if force or p2 == pa: # going backwards?
1152 self.ui.debug("local deleted %s, recreating\n" % f)
1145 self.ui.debug("local deleted %s, recreating\n" % f)
1153 get[f] = n
1146 get[f] = n
1154 else:
1147 else:
1155 self.ui.debug("local deleted %s\n" % f)
1148 self.ui.debug("local deleted %s\n" % f)
1156
1149
1157 del mw, m1, m2, ma
1150 del mw, m1, m2, ma
1158
1151
1159 if force:
1152 if force:
1160 for f in merge:
1153 for f in merge:
1161 get[f] = merge[f][1]
1154 get[f] = merge[f][1]
1162 merge = {}
1155 merge = {}
1163
1156
1164 if linear_path or force:
1157 if linear_path or force:
1165 # we don't need to do any magic, just jump to the new rev
1158 # we don't need to do any magic, just jump to the new rev
1166 branch_merge = False
1159 branch_merge = False
1167 p1, p2 = p2, nullid
1160 p1, p2 = p2, nullid
1168 else:
1161 else:
1169 if not allow:
1162 if not allow:
1170 self.ui.status("this update spans a branch" +
1163 self.ui.status("this update spans a branch" +
1171 " affecting the following files:\n")
1164 " affecting the following files:\n")
1172 fl = merge.keys() + get.keys()
1165 fl = merge.keys() + get.keys()
1173 fl.sort()
1166 fl.sort()
1174 for f in fl:
1167 for f in fl:
1175 cf = ""
1168 cf = ""
1176 if f in merge: cf = " (resolve)"
1169 if f in merge: cf = " (resolve)"
1177 self.ui.status(" %s%s\n" % (f, cf))
1170 self.ui.status(" %s%s\n" % (f, cf))
1178 self.ui.warn("aborting update spanning branches!\n")
1171 self.ui.warn("aborting update spanning branches!\n")
1179 self.ui.status("(use update -m to merge across branches" +
1172 self.ui.status("(use update -m to merge across branches" +
1180 " or -C to lose changes)\n")
1173 " or -C to lose changes)\n")
1181 return 1
1174 return 1
1182 branch_merge = True
1175 branch_merge = True
1183
1176
1184 if moddirstate:
1177 if moddirstate:
1185 self.dirstate.setparents(p1, p2)
1178 self.dirstate.setparents(p1, p2)
1186
1179
1187 # get the files we don't need to change
1180 # get the files we don't need to change
1188 files = get.keys()
1181 files = get.keys()
1189 files.sort()
1182 files.sort()
1190 for f in files:
1183 for f in files:
1191 if f[0] == "/": continue
1184 if f[0] == "/": continue
1192 self.ui.note("getting %s\n" % f)
1185 self.ui.note("getting %s\n" % f)
1193 t = self.file(f).read(get[f])
1186 t = self.file(f).read(get[f])
1194 try:
1187 try:
1195 self.wwrite(f, t)
1188 self.wwrite(f, t)
1196 except IOError:
1189 except IOError:
1197 os.makedirs(os.path.dirname(self.wjoin(f)))
1190 os.makedirs(os.path.dirname(self.wjoin(f)))
1198 self.wwrite(f, t)
1191 self.wwrite(f, t)
1199 util.set_exec(self.wjoin(f), mf2[f])
1192 util.set_exec(self.wjoin(f), mf2[f])
1200 if moddirstate:
1193 if moddirstate:
1201 if branch_merge:
1194 if branch_merge:
1202 self.dirstate.update([f], 'n', st_mtime=-1)
1195 self.dirstate.update([f], 'n', st_mtime=-1)
1203 else:
1196 else:
1204 self.dirstate.update([f], 'n')
1197 self.dirstate.update([f], 'n')
1205
1198
1206 # merge the tricky bits
1199 # merge the tricky bits
1207 files = merge.keys()
1200 files = merge.keys()
1208 files.sort()
1201 files.sort()
1209 for f in files:
1202 for f in files:
1210 self.ui.status("merging %s\n" % f)
1203 self.ui.status("merging %s\n" % f)
1211 my, other, flag = merge[f]
1204 my, other, flag = merge[f]
1212 self.merge3(f, my, other)
1205 self.merge3(f, my, other)
1213 util.set_exec(self.wjoin(f), flag)
1206 util.set_exec(self.wjoin(f), flag)
1214 if moddirstate:
1207 if moddirstate:
1215 if branch_merge:
1208 if branch_merge:
1216 # We've done a branch merge, mark this file as merged
1209 # We've done a branch merge, mark this file as merged
1217 # so that we properly record the merger later
1210 # so that we properly record the merger later
1218 self.dirstate.update([f], 'm')
1211 self.dirstate.update([f], 'm')
1219 else:
1212 else:
1220 # We've update-merged a locally modified file, so
1213 # We've update-merged a locally modified file, so
1221 # we set the dirstate to emulate a normal checkout
1214 # we set the dirstate to emulate a normal checkout
1222 # of that file some time in the past. Thus our
1215 # of that file some time in the past. Thus our
1223 # merge will appear as a normal local file
1216 # merge will appear as a normal local file
1224 # modification.
1217 # modification.
1225 f_len = len(self.file(f).read(other))
1218 f_len = len(self.file(f).read(other))
1226 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1219 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1227
1220
1228 remove.sort()
1221 remove.sort()
1229 for f in remove:
1222 for f in remove:
1230 self.ui.note("removing %s\n" % f)
1223 self.ui.note("removing %s\n" % f)
1231 try:
1224 try:
1232 os.unlink(self.wjoin(f))
1225 os.unlink(self.wjoin(f))
1233 except OSError, inst:
1226 except OSError, inst:
1234 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1227 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1235 # try removing directories that might now be empty
1228 # try removing directories that might now be empty
1236 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1229 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1237 except: pass
1230 except: pass
1238 if moddirstate:
1231 if moddirstate:
1239 if branch_merge:
1232 if branch_merge:
1240 self.dirstate.update(remove, 'r')
1233 self.dirstate.update(remove, 'r')
1241 else:
1234 else:
1242 self.dirstate.forget(remove)
1235 self.dirstate.forget(remove)
1243
1236
1244 def merge3(self, fn, my, other):
1237 def merge3(self, fn, my, other):
1245 """perform a 3-way merge in the working directory"""
1238 """perform a 3-way merge in the working directory"""
1246
1239
1247 def temp(prefix, node):
1240 def temp(prefix, node):
1248 pre = "%s~%s." % (os.path.basename(fn), prefix)
1241 pre = "%s~%s." % (os.path.basename(fn), prefix)
1249 (fd, name) = tempfile.mkstemp("", pre)
1242 (fd, name) = tempfile.mkstemp("", pre)
1250 f = os.fdopen(fd, "wb")
1243 f = os.fdopen(fd, "wb")
1251 self.wwrite(fn, fl.read(node), f)
1244 self.wwrite(fn, fl.read(node), f)
1252 f.close()
1245 f.close()
1253 return name
1246 return name
1254
1247
1255 fl = self.file(fn)
1248 fl = self.file(fn)
1256 base = fl.ancestor(my, other)
1249 base = fl.ancestor(my, other)
1257 a = self.wjoin(fn)
1250 a = self.wjoin(fn)
1258 b = temp("base", base)
1251 b = temp("base", base)
1259 c = temp("other", other)
1252 c = temp("other", other)
1260
1253
1261 self.ui.note("resolving %s\n" % fn)
1254 self.ui.note("resolving %s\n" % fn)
1262 self.ui.debug("file %s: other %s ancestor %s\n" %
1255 self.ui.debug("file %s: other %s ancestor %s\n" %
1263 (fn, short(other), short(base)))
1256 (fn, short(other), short(base)))
1264
1257
1265 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1258 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1266 or "hgmerge")
1259 or "hgmerge")
1267 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1260 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1268 if r:
1261 if r:
1269 self.ui.warn("merging %s failed!\n" % fn)
1262 self.ui.warn("merging %s failed!\n" % fn)
1270
1263
1271 os.unlink(b)
1264 os.unlink(b)
1272 os.unlink(c)
1265 os.unlink(c)
1273
1266
1274 def verify(self):
1267 def verify(self):
1275 filelinkrevs = {}
1268 filelinkrevs = {}
1276 filenodes = {}
1269 filenodes = {}
1277 changesets = revisions = files = 0
1270 changesets = revisions = files = 0
1278 errors = 0
1271 errors = 0
1279
1272
1280 seen = {}
1273 seen = {}
1281 self.ui.status("checking changesets\n")
1274 self.ui.status("checking changesets\n")
1282 for i in range(self.changelog.count()):
1275 for i in range(self.changelog.count()):
1283 changesets += 1
1276 changesets += 1
1284 n = self.changelog.node(i)
1277 n = self.changelog.node(i)
1285 if n in seen:
1278 if n in seen:
1286 self.ui.warn("duplicate changeset at revision %d\n" % i)
1279 self.ui.warn("duplicate changeset at revision %d\n" % i)
1287 errors += 1
1280 errors += 1
1288 seen[n] = 1
1281 seen[n] = 1
1289
1282
1290 for p in self.changelog.parents(n):
1283 for p in self.changelog.parents(n):
1291 if p not in self.changelog.nodemap:
1284 if p not in self.changelog.nodemap:
1292 self.ui.warn("changeset %s has unknown parent %s\n" %
1285 self.ui.warn("changeset %s has unknown parent %s\n" %
1293 (short(n), short(p)))
1286 (short(n), short(p)))
1294 errors += 1
1287 errors += 1
1295 try:
1288 try:
1296 changes = self.changelog.read(n)
1289 changes = self.changelog.read(n)
1297 except Exception, inst:
1290 except Exception, inst:
1298 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1291 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1299 errors += 1
1292 errors += 1
1300
1293
1301 for f in changes[3]:
1294 for f in changes[3]:
1302 filelinkrevs.setdefault(f, []).append(i)
1295 filelinkrevs.setdefault(f, []).append(i)
1303
1296
1304 seen = {}
1297 seen = {}
1305 self.ui.status("checking manifests\n")
1298 self.ui.status("checking manifests\n")
1306 for i in range(self.manifest.count()):
1299 for i in range(self.manifest.count()):
1307 n = self.manifest.node(i)
1300 n = self.manifest.node(i)
1308 if n in seen:
1301 if n in seen:
1309 self.ui.warn("duplicate manifest at revision %d\n" % i)
1302 self.ui.warn("duplicate manifest at revision %d\n" % i)
1310 errors += 1
1303 errors += 1
1311 seen[n] = 1
1304 seen[n] = 1
1312
1305
1313 for p in self.manifest.parents(n):
1306 for p in self.manifest.parents(n):
1314 if p not in self.manifest.nodemap:
1307 if p not in self.manifest.nodemap:
1315 self.ui.warn("manifest %s has unknown parent %s\n" %
1308 self.ui.warn("manifest %s has unknown parent %s\n" %
1316 (short(n), short(p)))
1309 (short(n), short(p)))
1317 errors += 1
1310 errors += 1
1318
1311
1319 try:
1312 try:
1320 delta = mdiff.patchtext(self.manifest.delta(n))
1313 delta = mdiff.patchtext(self.manifest.delta(n))
1321 except KeyboardInterrupt:
1314 except KeyboardInterrupt:
1322 self.ui.warn("interrupted")
1315 self.ui.warn("interrupted")
1323 raise
1316 raise
1324 except Exception, inst:
1317 except Exception, inst:
1325 self.ui.warn("unpacking manifest %s: %s\n"
1318 self.ui.warn("unpacking manifest %s: %s\n"
1326 % (short(n), inst))
1319 % (short(n), inst))
1327 errors += 1
1320 errors += 1
1328
1321
1329 ff = [ l.split('\0') for l in delta.splitlines() ]
1322 ff = [ l.split('\0') for l in delta.splitlines() ]
1330 for f, fn in ff:
1323 for f, fn in ff:
1331 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1324 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1332
1325
1333 self.ui.status("crosschecking files in changesets and manifests\n")
1326 self.ui.status("crosschecking files in changesets and manifests\n")
1334 for f in filenodes:
1327 for f in filenodes:
1335 if f not in filelinkrevs:
1328 if f not in filelinkrevs:
1336 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1329 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1337 errors += 1
1330 errors += 1
1338
1331
1339 for f in filelinkrevs:
1332 for f in filelinkrevs:
1340 if f not in filenodes:
1333 if f not in filenodes:
1341 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1334 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1342 errors += 1
1335 errors += 1
1343
1336
1344 self.ui.status("checking files\n")
1337 self.ui.status("checking files\n")
1345 ff = filenodes.keys()
1338 ff = filenodes.keys()
1346 ff.sort()
1339 ff.sort()
1347 for f in ff:
1340 for f in ff:
1348 if f == "/dev/null": continue
1341 if f == "/dev/null": continue
1349 files += 1
1342 files += 1
1350 fl = self.file(f)
1343 fl = self.file(f)
1351 nodes = { nullid: 1 }
1344 nodes = { nullid: 1 }
1352 seen = {}
1345 seen = {}
1353 for i in range(fl.count()):
1346 for i in range(fl.count()):
1354 revisions += 1
1347 revisions += 1
1355 n = fl.node(i)
1348 n = fl.node(i)
1356
1349
1357 if n in seen:
1350 if n in seen:
1358 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1351 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1359 errors += 1
1352 errors += 1
1360
1353
1361 if n not in filenodes[f]:
1354 if n not in filenodes[f]:
1362 self.ui.warn("%s: %d:%s not in manifests\n"
1355 self.ui.warn("%s: %d:%s not in manifests\n"
1363 % (f, i, short(n)))
1356 % (f, i, short(n)))
1364 errors += 1
1357 errors += 1
1365 else:
1358 else:
1366 del filenodes[f][n]
1359 del filenodes[f][n]
1367
1360
1368 flr = fl.linkrev(n)
1361 flr = fl.linkrev(n)
1369 if flr not in filelinkrevs[f]:
1362 if flr not in filelinkrevs[f]:
1370 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1363 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1371 % (f, short(n), fl.linkrev(n)))
1364 % (f, short(n), fl.linkrev(n)))
1372 errors += 1
1365 errors += 1
1373 else:
1366 else:
1374 filelinkrevs[f].remove(flr)
1367 filelinkrevs[f].remove(flr)
1375
1368
1376 # verify contents
1369 # verify contents
1377 try:
1370 try:
1378 t = fl.read(n)
1371 t = fl.read(n)
1379 except Exception, inst:
1372 except Exception, inst:
1380 self.ui.warn("unpacking file %s %s: %s\n"
1373 self.ui.warn("unpacking file %s %s: %s\n"
1381 % (f, short(n), inst))
1374 % (f, short(n), inst))
1382 errors += 1
1375 errors += 1
1383
1376
1384 # verify parents
1377 # verify parents
1385 (p1, p2) = fl.parents(n)
1378 (p1, p2) = fl.parents(n)
1386 if p1 not in nodes:
1379 if p1 not in nodes:
1387 self.ui.warn("file %s:%s unknown parent 1 %s" %
1380 self.ui.warn("file %s:%s unknown parent 1 %s" %
1388 (f, short(n), short(p1)))
1381 (f, short(n), short(p1)))
1389 errors += 1
1382 errors += 1
1390 if p2 not in nodes:
1383 if p2 not in nodes:
1391 self.ui.warn("file %s:%s unknown parent 2 %s" %
1384 self.ui.warn("file %s:%s unknown parent 2 %s" %
1392 (f, short(n), short(p1)))
1385 (f, short(n), short(p1)))
1393 errors += 1
1386 errors += 1
1394 nodes[n] = 1
1387 nodes[n] = 1
1395
1388
1396 # cross-check
1389 # cross-check
1397 for node in filenodes[f]:
1390 for node in filenodes[f]:
1398 self.ui.warn("node %s in manifests not in %s\n"
1391 self.ui.warn("node %s in manifests not in %s\n"
1399 % (hex(node), f))
1392 % (hex(node), f))
1400 errors += 1
1393 errors += 1
1401
1394
1402 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1395 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1403 (files, changesets, revisions))
1396 (files, changesets, revisions))
1404
1397
1405 if errors:
1398 if errors:
1406 self.ui.warn("%d integrity errors encountered!\n" % errors)
1399 self.ui.warn("%d integrity errors encountered!\n" % errors)
1407 return 1
1400 return 1
@@ -1,362 +1,358
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from demandload import *
14 from demandload import *
15 demandload(globals(), "re")
15 demandload(globals(), "re")
16
16
17 def binary(s):
17 def binary(s):
18 """return true if a string is binary data using diff's heuristic"""
18 """return true if a string is binary data using diff's heuristic"""
19 if s and '\0' in s[:4096]:
19 if s and '\0' in s[:4096]:
20 return True
20 return True
21 return False
21 return False
22
22
23 def unique(g):
23 def unique(g):
24 """return the uniq elements of iterable g"""
24 """return the uniq elements of iterable g"""
25 seen = {}
25 seen = {}
26 for f in g:
26 for f in g:
27 if f not in seen:
27 if f not in seen:
28 seen[f] = 1
28 seen[f] = 1
29 yield f
29 yield f
30
30
31 class Abort(Exception):
31 class Abort(Exception):
32 """Raised if a command needs to print an error and exit."""
32 """Raised if a command needs to print an error and exit."""
33
33
34 def always(fn): return True
34 def always(fn): return True
35 def never(fn): return False
35 def never(fn): return False
36
36
37 def globre(pat, head='^', tail='$'):
37 def globre(pat, head='^', tail='$'):
38 "convert a glob pattern into a regexp"
38 "convert a glob pattern into a regexp"
39 i, n = 0, len(pat)
39 i, n = 0, len(pat)
40 res = ''
40 res = ''
41 group = False
41 group = False
42 def peek(): return i < n and pat[i]
42 def peek(): return i < n and pat[i]
43 while i < n:
43 while i < n:
44 c = pat[i]
44 c = pat[i]
45 i = i+1
45 i = i+1
46 if c == '*':
46 if c == '*':
47 if peek() == '*':
47 if peek() == '*':
48 i += 1
48 i += 1
49 res += '.*'
49 res += '.*'
50 else:
50 else:
51 res += '[^/]*'
51 res += '[^/]*'
52 elif c == '?':
52 elif c == '?':
53 res += '.'
53 res += '.'
54 elif c == '[':
54 elif c == '[':
55 j = i
55 j = i
56 if j < n and pat[j] in '!]':
56 if j < n and pat[j] in '!]':
57 j += 1
57 j += 1
58 while j < n and pat[j] != ']':
58 while j < n and pat[j] != ']':
59 j += 1
59 j += 1
60 if j >= n:
60 if j >= n:
61 res += '\\['
61 res += '\\['
62 else:
62 else:
63 stuff = pat[i:j].replace('\\','\\\\')
63 stuff = pat[i:j].replace('\\','\\\\')
64 i = j + 1
64 i = j + 1
65 if stuff[0] == '!':
65 if stuff[0] == '!':
66 stuff = '^' + stuff[1:]
66 stuff = '^' + stuff[1:]
67 elif stuff[0] == '^':
67 elif stuff[0] == '^':
68 stuff = '\\' + stuff
68 stuff = '\\' + stuff
69 res = '%s[%s]' % (res, stuff)
69 res = '%s[%s]' % (res, stuff)
70 elif c == '{':
70 elif c == '{':
71 group = True
71 group = True
72 res += '(?:'
72 res += '(?:'
73 elif c == '}' and group:
73 elif c == '}' and group:
74 res += ')'
74 res += ')'
75 group = False
75 group = False
76 elif c == ',' and group:
76 elif c == ',' and group:
77 res += '|'
77 res += '|'
78 else:
78 else:
79 res += re.escape(c)
79 res += re.escape(c)
80 return head + res + tail
80 return head + res + tail
81
81
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
82 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
83
83
84 def pathto(n1, n2):
84 def pathto(n1, n2):
85 '''return the relative path from one place to another.
85 '''return the relative path from one place to another.
86 this returns a path in the form used by the local filesystem, not hg.'''
86 this returns a path in the form used by the local filesystem, not hg.'''
87 if not n1: return localpath(n2)
87 if not n1: return localpath(n2)
88 a, b = n1.split('/'), n2.split('/')
88 a, b = n1.split('/'), n2.split('/')
89 a.reverse(), b.reverse()
89 a.reverse(), b.reverse()
90 while a and b and a[-1] == b[-1]:
90 while a and b and a[-1] == b[-1]:
91 a.pop(), b.pop()
91 a.pop(), b.pop()
92 b.reverse()
92 b.reverse()
93 return os.sep.join((['..'] * len(a)) + b)
93 return os.sep.join((['..'] * len(a)) + b)
94
94
95 def canonpath(root, cwd, myname):
95 def canonpath(root, cwd, myname):
96 """return the canonical path of myname, given cwd and root"""
96 """return the canonical path of myname, given cwd and root"""
97 rootsep = root + os.sep
97 rootsep = root + os.sep
98 name = myname
98 name = myname
99 if not name.startswith(os.sep):
99 if not name.startswith(os.sep):
100 name = os.path.join(root, cwd, name)
100 name = os.path.join(root, cwd, name)
101 name = os.path.normpath(name)
101 name = os.path.normpath(name)
102 if name.startswith(rootsep):
102 if name.startswith(rootsep):
103 return pconvert(name[len(rootsep):])
103 return pconvert(name[len(rootsep):])
104 elif name == root:
104 elif name == root:
105 return ''
105 return ''
106 else:
106 else:
107 raise Abort('%s not under root' % myname)
107 raise Abort('%s not under root' % myname)
108
108
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
109 def matcher(canonroot, cwd, names, inc, exc, head=''):
110 """build a function to match a set of file patterns
110 """build a function to match a set of file patterns
111
111
112 arguments:
112 arguments:
113 canonroot - the canonical root of the tree you're matching against
113 canonroot - the canonical root of the tree you're matching against
114 cwd - the current working directory, if relevant
114 cwd - the current working directory, if relevant
115 names - patterns to find
115 names - patterns to find
116 inc - patterns to include
116 inc - patterns to include
117 exc - patterns to exclude
117 exc - patterns to exclude
118 head - a regex to prepend to patterns to control whether a match is rooted
118 head - a regex to prepend to patterns to control whether a match is rooted
119
119
120 a pattern is one of:
120 a pattern is one of:
121 're:<regex>'
121 're:<regex>'
122 'glob:<shellglob>'
122 'glob:<shellglob>'
123 'path:<explicit path>'
123 'path:<explicit path>'
124 'relpath:<relative path>'
124 'relpath:<relative path>'
125 '<relative path>'
125 '<relative path>'
126
126
127 returns:
127 returns:
128 a 3-tuple containing
128 a 3-tuple containing
129 - list of explicit non-pattern names passed in
129 - list of explicit non-pattern names passed in
130 - a bool match(filename) function
130 - a bool match(filename) function
131 - a bool indicating if any patterns were passed in
131 - a bool indicating if any patterns were passed in
132
132
133 todo:
133 todo:
134 make head regex a rooted bool
134 make head regex a rooted bool
135 """
135 """
136
136
137 def patkind(name):
137 def patkind(name):
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
138 for prefix in 're:', 'glob:', 'path:', 'relpath:':
139 if name.startswith(prefix): return name.split(':', 1)
139 if name.startswith(prefix): return name.split(':', 1)
140 for c in name:
140 for c in name:
141 if c in _globchars: return 'glob', name
141 if c in _globchars: return 'glob', name
142 return 'relpath', name
142 return 'relpath', name
143
143
144 def regex(kind, name, tail):
144 def regex(kind, name, tail):
145 '''convert a pattern into a regular expression'''
145 '''convert a pattern into a regular expression'''
146 if kind == 're':
146 if kind == 're':
147 return name
147 return name
148 elif kind == 'path':
148 elif kind == 'path':
149 return '^' + re.escape(name) + '(?:/|$)'
149 return '^' + re.escape(name) + '(?:/|$)'
150 elif kind == 'relpath':
150 elif kind == 'relpath':
151 return head + re.escape(name) + tail
151 return head + re.escape(name) + tail
152 return head + globre(name, '', tail)
152 return head + globre(name, '', tail)
153
153
154 def matchfn(pats, tail):
154 def matchfn(pats, tail):
155 """build a matching function from a set of patterns"""
155 """build a matching function from a set of patterns"""
156 if pats:
156 if pats:
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
157 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
158 return re.compile(pat).match
158 return re.compile(pat).match
159
159
160 def globprefix(pat):
160 def globprefix(pat):
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
161 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
162 root = []
162 root = []
163 for p in pat.split(os.sep):
163 for p in pat.split(os.sep):
164 if patkind(p)[0] == 'glob': break
164 if patkind(p)[0] == 'glob': break
165 root.append(p)
165 root.append(p)
166 return '/'.join(root)
166 return '/'.join(root)
167
167
168 pats = []
168 pats = []
169 files = []
169 files = []
170 roots = []
170 roots = []
171 for kind, name in map(patkind, names):
171 for kind, name in map(patkind, names):
172 if kind in ('glob', 'relpath'):
172 if kind in ('glob', 'relpath'):
173 name = canonpath(canonroot, cwd, name)
173 name = canonpath(canonroot, cwd, name)
174 if name == '':
174 if name == '':
175 kind, name = 'glob', '**'
175 kind, name = 'glob', '**'
176 if kind in ('glob', 'path', 're'):
176 if kind in ('glob', 'path', 're'):
177 pats.append((kind, name))
177 pats.append((kind, name))
178 if kind == 'glob':
178 if kind == 'glob':
179 root = globprefix(name)
179 root = globprefix(name)
180 if root: roots.append(root)
180 if root: roots.append(root)
181 elif kind == 'relpath':
181 elif kind == 'relpath':
182 files.append((kind, name))
182 files.append((kind, name))
183 roots.append(name)
183 roots.append(name)
184
184
185 patmatch = matchfn(pats, '$') or always
185 patmatch = matchfn(pats, '$') or always
186 filematch = matchfn(files, '(?:/|$)') or always
186 filematch = matchfn(files, '(?:/|$)') or always
187 incmatch = always
187 incmatch = always
188 if inc:
188 if inc:
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
189 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
190 excmatch = lambda fn: False
190 excmatch = lambda fn: False
191 if exc:
191 if exc:
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
192 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
193
193
194 return (roots,
194 return (roots,
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
195 lambda fn: (incmatch(fn) and not excmatch(fn) and
196 (fn.endswith('/') or
196 (fn.endswith('/') or
197 (not pats and not files) or
197 (not pats and not files) or
198 (pats and patmatch(fn)) or
198 (pats and patmatch(fn)) or
199 (files and filematch(fn)))),
199 (files and filematch(fn)))),
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
200 (inc or exc or (pats and pats != [('glob', '**')])) and True)
201
201
202 def system(cmd, errprefix=None):
202 def system(cmd, errprefix=None):
203 """execute a shell command that must succeed"""
203 """execute a shell command that must succeed"""
204 rc = os.system(cmd)
204 rc = os.system(cmd)
205 if rc:
205 if rc:
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
206 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
207 explain_exit(rc)[0])
207 explain_exit(rc)[0])
208 if errprefix:
208 if errprefix:
209 errmsg = "%s: %s" % (errprefix, errmsg)
209 errmsg = "%s: %s" % (errprefix, errmsg)
210 raise Abort(errmsg)
210 raise Abort(errmsg)
211
211
212 def rename(src, dst):
212 def rename(src, dst):
213 """forcibly rename a file"""
213 """forcibly rename a file"""
214 try:
214 try:
215 os.rename(src, dst)
215 os.rename(src, dst)
216 except:
216 except:
217 os.unlink(dst)
217 os.unlink(dst)
218 os.rename(src, dst)
218 os.rename(src, dst)
219
219
220 def copytree(src, dst, copyfile):
220 def copytree(src, dst, copyfile):
221 """Copy a directory tree, files are copied using 'copyfile'."""
221 """Copy a directory tree, files are copied using 'copyfile'."""
222 names = os.listdir(src)
222 names = os.listdir(src)
223 os.mkdir(dst)
223 os.mkdir(dst)
224
224
225 for name in names:
225 for name in names:
226 srcname = os.path.join(src, name)
226 srcname = os.path.join(src, name)
227 dstname = os.path.join(dst, name)
227 dstname = os.path.join(dst, name)
228 if os.path.isdir(srcname):
228 if os.path.isdir(srcname):
229 copytree(srcname, dstname, copyfile)
229 copytree(srcname, dstname, copyfile)
230 elif os.path.isfile(srcname):
230 elif os.path.isfile(srcname):
231 copyfile(srcname, dstname)
231 copyfile(srcname, dstname)
232 else:
232 else:
233 pass
233 pass
234
234
235 def opener(base):
235 def opener(base):
236 """
236 """
237 return a function that opens files relative to base
237 return a function that opens files relative to base
238
238
239 this function is used to hide the details of COW semantics and
239 this function is used to hide the details of COW semantics and
240 remote file access from higher level code.
240 remote file access from higher level code.
241
241
242 todo: separate remote file access into a separate function
242 todo: separate remote file access into a separate function
243 """
243 """
244 p = base
244 p = base
245 def o(path, mode="r"):
245 def o(path, mode="r"):
246 if p.startswith("http://"):
247 f = os.path.join(p, urllib.quote(path))
248 return httprangereader.httprangereader(f)
249
250 f = os.path.join(p, path)
246 f = os.path.join(p, path)
251
247
252 mode += "b" # for that other OS
248 mode += "b" # for that other OS
253
249
254 if mode[0] != "r":
250 if mode[0] != "r":
255 try:
251 try:
256 s = os.stat(f)
252 s = os.stat(f)
257 except OSError:
253 except OSError:
258 d = os.path.dirname(f)
254 d = os.path.dirname(f)
259 if not os.path.isdir(d):
255 if not os.path.isdir(d):
260 os.makedirs(d)
256 os.makedirs(d)
261 else:
257 else:
262 if s.st_nlink > 1:
258 if s.st_nlink > 1:
263 file(f + ".tmp", "wb").write(file(f, "rb").read())
259 file(f + ".tmp", "wb").write(file(f, "rb").read())
264 rename(f+".tmp", f)
260 rename(f+".tmp", f)
265
261
266 return file(f, mode)
262 return file(f, mode)
267
263
268 return o
264 return o
269
265
270 def _makelock_file(info, pathname):
266 def _makelock_file(info, pathname):
271 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
267 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
272 os.write(ld, info)
268 os.write(ld, info)
273 os.close(ld)
269 os.close(ld)
274
270
275 def _readlock_file(pathname):
271 def _readlock_file(pathname):
276 return file(pathname).read()
272 return file(pathname).read()
277
273
278 # Platform specific variants
274 # Platform specific variants
279 if os.name == 'nt':
275 if os.name == 'nt':
280 nulldev = 'NUL:'
276 nulldev = 'NUL:'
281
277
282 def is_exec(f, last):
278 def is_exec(f, last):
283 return last
279 return last
284
280
285 def set_exec(f, mode):
281 def set_exec(f, mode):
286 pass
282 pass
287
283
288 def pconvert(path):
284 def pconvert(path):
289 return path.replace("\\", "/")
285 return path.replace("\\", "/")
290
286
291 def localpath(path):
287 def localpath(path):
292 return path.replace('/', '\\')
288 return path.replace('/', '\\')
293
289
294 def normpath(path):
290 def normpath(path):
295 return pconvert(os.path.normpath(path))
291 return pconvert(os.path.normpath(path))
296
292
297 makelock = _makelock_file
293 makelock = _makelock_file
298 readlock = _readlock_file
294 readlock = _readlock_file
299
295
300 def explain_exit(code):
296 def explain_exit(code):
301 return "exited with status %d" % code, code
297 return "exited with status %d" % code, code
302
298
303 else:
299 else:
304 nulldev = '/dev/null'
300 nulldev = '/dev/null'
305
301
306 def is_exec(f, last):
302 def is_exec(f, last):
307 """check whether a file is executable"""
303 """check whether a file is executable"""
308 return (os.stat(f).st_mode & 0100 != 0)
304 return (os.stat(f).st_mode & 0100 != 0)
309
305
310 def set_exec(f, mode):
306 def set_exec(f, mode):
311 s = os.stat(f).st_mode
307 s = os.stat(f).st_mode
312 if (s & 0100 != 0) == mode:
308 if (s & 0100 != 0) == mode:
313 return
309 return
314 if mode:
310 if mode:
315 # Turn on +x for every +r bit when making a file executable
311 # Turn on +x for every +r bit when making a file executable
316 # and obey umask.
312 # and obey umask.
317 umask = os.umask(0)
313 umask = os.umask(0)
318 os.umask(umask)
314 os.umask(umask)
319 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
315 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
320 else:
316 else:
321 os.chmod(f, s & 0666)
317 os.chmod(f, s & 0666)
322
318
323 def pconvert(path):
319 def pconvert(path):
324 return path
320 return path
325
321
326 def localpath(path):
322 def localpath(path):
327 return path
323 return path
328
324
329 normpath = os.path.normpath
325 normpath = os.path.normpath
330
326
331 def makelock(info, pathname):
327 def makelock(info, pathname):
332 try:
328 try:
333 os.symlink(info, pathname)
329 os.symlink(info, pathname)
334 except OSError, why:
330 except OSError, why:
335 if why.errno == errno.EEXIST:
331 if why.errno == errno.EEXIST:
336 raise
332 raise
337 else:
333 else:
338 _makelock_file(info, pathname)
334 _makelock_file(info, pathname)
339
335
340 def readlock(pathname):
336 def readlock(pathname):
341 try:
337 try:
342 return os.readlink(pathname)
338 return os.readlink(pathname)
343 except OSError, why:
339 except OSError, why:
344 if why.errno == errno.EINVAL:
340 if why.errno == errno.EINVAL:
345 return _readlock_file(pathname)
341 return _readlock_file(pathname)
346 else:
342 else:
347 raise
343 raise
348
344
349 def explain_exit(code):
345 def explain_exit(code):
350 """return a 2-tuple (desc, code) describing a process's status"""
346 """return a 2-tuple (desc, code) describing a process's status"""
351 if os.name == 'nt': # os.WIFxx is not supported on windows
347 if os.name == 'nt': # os.WIFxx is not supported on windows
352 return "aborted with error." , -1
348 return "aborted with error." , -1
353 if os.WIFEXITED(code):
349 if os.WIFEXITED(code):
354 val = os.WEXITSTATUS(code)
350 val = os.WEXITSTATUS(code)
355 return "exited with status %d" % val, val
351 return "exited with status %d" % val, val
356 elif os.WIFSIGNALED(code):
352 elif os.WIFSIGNALED(code):
357 val = os.WTERMSIG(code)
353 val = os.WTERMSIG(code)
358 return "killed by signal %d" % val, val
354 return "killed by signal %d" % val, val
359 elif os.WIFSTOPPED(code):
355 elif os.WIFSTOPPED(code):
360 val = os.WSTOPSIG(code)
356 val = os.WSTOPSIG(code)
361 return "stopped by signal %d" % val, val
357 return "stopped by signal %d" % val, val
362 raise ValueError("invalid exit code")
358 raise ValueError("invalid exit code")
General Comments 0
You need to be logged in to leave comments. Login now