##// END OF EJS Templates
rebase: do not crash in panic when cwd disapear in the process (issue4121)...
Pierre-Yves David -
r20335:e4052064 stable
parent child Browse files
Show More
@@ -1,852 +1,856 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 import errno
7 import errno
8
8
9 from node import nullid
9 from node import nullid
10 from i18n import _
10 from i18n import _
11 import scmutil, util, ignore, osutil, parsers, encoding, pathutil
11 import scmutil, util, ignore, osutil, parsers, encoding, pathutil
12 import os, stat, errno, gc
12 import os, stat, errno, gc
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15 filecache = scmutil.filecache
15 filecache = scmutil.filecache
16 _rangemask = 0x7fffffff
16 _rangemask = 0x7fffffff
17
17
18 class repocache(filecache):
18 class repocache(filecache):
19 """filecache for files in .hg/"""
19 """filecache for files in .hg/"""
20 def join(self, obj, fname):
20 def join(self, obj, fname):
21 return obj._opener.join(fname)
21 return obj._opener.join(fname)
22
22
23 class rootcache(filecache):
23 class rootcache(filecache):
24 """filecache for files in the repository root"""
24 """filecache for files in the repository root"""
25 def join(self, obj, fname):
25 def join(self, obj, fname):
26 return obj._join(fname)
26 return obj._join(fname)
27
27
28 class dirstate(object):
28 class dirstate(object):
29
29
30 def __init__(self, opener, ui, root, validate):
30 def __init__(self, opener, ui, root, validate):
31 '''Create a new dirstate object.
31 '''Create a new dirstate object.
32
32
33 opener is an open()-like callable that can be used to open the
33 opener is an open()-like callable that can be used to open the
34 dirstate file; root is the root of the directory tracked by
34 dirstate file; root is the root of the directory tracked by
35 the dirstate.
35 the dirstate.
36 '''
36 '''
37 self._opener = opener
37 self._opener = opener
38 self._validate = validate
38 self._validate = validate
39 self._root = root
39 self._root = root
40 self._rootdir = os.path.join(root, '')
40 self._rootdir = os.path.join(root, '')
41 self._dirty = False
41 self._dirty = False
42 self._dirtypl = False
42 self._dirtypl = False
43 self._lastnormaltime = 0
43 self._lastnormaltime = 0
44 self._ui = ui
44 self._ui = ui
45 self._filecache = {}
45 self._filecache = {}
46
46
47 @propertycache
47 @propertycache
48 def _map(self):
48 def _map(self):
49 '''Return the dirstate contents as a map from filename to
49 '''Return the dirstate contents as a map from filename to
50 (state, mode, size, time).'''
50 (state, mode, size, time).'''
51 self._read()
51 self._read()
52 return self._map
52 return self._map
53
53
54 @propertycache
54 @propertycache
55 def _copymap(self):
55 def _copymap(self):
56 self._read()
56 self._read()
57 return self._copymap
57 return self._copymap
58
58
59 @propertycache
59 @propertycache
60 def _foldmap(self):
60 def _foldmap(self):
61 f = {}
61 f = {}
62 for name, s in self._map.iteritems():
62 for name, s in self._map.iteritems():
63 if s[0] != 'r':
63 if s[0] != 'r':
64 f[util.normcase(name)] = name
64 f[util.normcase(name)] = name
65 for name in self._dirs:
65 for name in self._dirs:
66 f[util.normcase(name)] = name
66 f[util.normcase(name)] = name
67 f['.'] = '.' # prevents useless util.fspath() invocation
67 f['.'] = '.' # prevents useless util.fspath() invocation
68 return f
68 return f
69
69
70 @repocache('branch')
70 @repocache('branch')
71 def _branch(self):
71 def _branch(self):
72 try:
72 try:
73 return self._opener.read("branch").strip() or "default"
73 return self._opener.read("branch").strip() or "default"
74 except IOError, inst:
74 except IOError, inst:
75 if inst.errno != errno.ENOENT:
75 if inst.errno != errno.ENOENT:
76 raise
76 raise
77 return "default"
77 return "default"
78
78
79 @propertycache
79 @propertycache
80 def _pl(self):
80 def _pl(self):
81 try:
81 try:
82 fp = self._opener("dirstate")
82 fp = self._opener("dirstate")
83 st = fp.read(40)
83 st = fp.read(40)
84 fp.close()
84 fp.close()
85 l = len(st)
85 l = len(st)
86 if l == 40:
86 if l == 40:
87 return st[:20], st[20:40]
87 return st[:20], st[20:40]
88 elif l > 0 and l < 40:
88 elif l > 0 and l < 40:
89 raise util.Abort(_('working directory state appears damaged!'))
89 raise util.Abort(_('working directory state appears damaged!'))
90 except IOError, err:
90 except IOError, err:
91 if err.errno != errno.ENOENT:
91 if err.errno != errno.ENOENT:
92 raise
92 raise
93 return [nullid, nullid]
93 return [nullid, nullid]
94
94
95 @propertycache
95 @propertycache
96 def _dirs(self):
96 def _dirs(self):
97 return scmutil.dirs(self._map, 'r')
97 return scmutil.dirs(self._map, 'r')
98
98
99 def dirs(self):
99 def dirs(self):
100 return self._dirs
100 return self._dirs
101
101
102 @rootcache('.hgignore')
102 @rootcache('.hgignore')
103 def _ignore(self):
103 def _ignore(self):
104 files = [self._join('.hgignore')]
104 files = [self._join('.hgignore')]
105 for name, path in self._ui.configitems("ui"):
105 for name, path in self._ui.configitems("ui"):
106 if name == 'ignore' or name.startswith('ignore.'):
106 if name == 'ignore' or name.startswith('ignore.'):
107 files.append(util.expandpath(path))
107 files.append(util.expandpath(path))
108 return ignore.ignore(self._root, files, self._ui.warn)
108 return ignore.ignore(self._root, files, self._ui.warn)
109
109
110 @propertycache
110 @propertycache
111 def _slash(self):
111 def _slash(self):
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
113
113
114 @propertycache
114 @propertycache
115 def _checklink(self):
115 def _checklink(self):
116 return util.checklink(self._root)
116 return util.checklink(self._root)
117
117
118 @propertycache
118 @propertycache
119 def _checkexec(self):
119 def _checkexec(self):
120 return util.checkexec(self._root)
120 return util.checkexec(self._root)
121
121
122 @propertycache
122 @propertycache
123 def _checkcase(self):
123 def _checkcase(self):
124 return not util.checkcase(self._join('.hg'))
124 return not util.checkcase(self._join('.hg'))
125
125
126 def _join(self, f):
126 def _join(self, f):
127 # much faster than os.path.join()
127 # much faster than os.path.join()
128 # it's safe because f is always a relative path
128 # it's safe because f is always a relative path
129 return self._rootdir + f
129 return self._rootdir + f
130
130
131 def flagfunc(self, buildfallback):
131 def flagfunc(self, buildfallback):
132 if self._checklink and self._checkexec:
132 if self._checklink and self._checkexec:
133 def f(x):
133 def f(x):
134 try:
134 try:
135 st = os.lstat(self._join(x))
135 st = os.lstat(self._join(x))
136 if util.statislink(st):
136 if util.statislink(st):
137 return 'l'
137 return 'l'
138 if util.statisexec(st):
138 if util.statisexec(st):
139 return 'x'
139 return 'x'
140 except OSError:
140 except OSError:
141 pass
141 pass
142 return ''
142 return ''
143 return f
143 return f
144
144
145 fallback = buildfallback()
145 fallback = buildfallback()
146 if self._checklink:
146 if self._checklink:
147 def f(x):
147 def f(x):
148 if os.path.islink(self._join(x)):
148 if os.path.islink(self._join(x)):
149 return 'l'
149 return 'l'
150 if 'x' in fallback(x):
150 if 'x' in fallback(x):
151 return 'x'
151 return 'x'
152 return ''
152 return ''
153 return f
153 return f
154 if self._checkexec:
154 if self._checkexec:
155 def f(x):
155 def f(x):
156 if 'l' in fallback(x):
156 if 'l' in fallback(x):
157 return 'l'
157 return 'l'
158 if util.isexec(self._join(x)):
158 if util.isexec(self._join(x)):
159 return 'x'
159 return 'x'
160 return ''
160 return ''
161 return f
161 return f
162 else:
162 else:
163 return fallback
163 return fallback
164
164
165 @propertycache
166 def _cwd(self):
167 return os.getcwd()
168
165 def getcwd(self):
169 def getcwd(self):
166 cwd = os.getcwd()
170 cwd = self._cwd
167 if cwd == self._root:
171 if cwd == self._root:
168 return ''
172 return ''
169 # self._root ends with a path separator if self._root is '/' or 'C:\'
173 # self._root ends with a path separator if self._root is '/' or 'C:\'
170 rootsep = self._root
174 rootsep = self._root
171 if not util.endswithsep(rootsep):
175 if not util.endswithsep(rootsep):
172 rootsep += os.sep
176 rootsep += os.sep
173 if cwd.startswith(rootsep):
177 if cwd.startswith(rootsep):
174 return cwd[len(rootsep):]
178 return cwd[len(rootsep):]
175 else:
179 else:
176 # we're outside the repo. return an absolute path.
180 # we're outside the repo. return an absolute path.
177 return cwd
181 return cwd
178
182
179 def pathto(self, f, cwd=None):
183 def pathto(self, f, cwd=None):
180 if cwd is None:
184 if cwd is None:
181 cwd = self.getcwd()
185 cwd = self.getcwd()
182 path = util.pathto(self._root, cwd, f)
186 path = util.pathto(self._root, cwd, f)
183 if self._slash:
187 if self._slash:
184 return util.pconvert(path)
188 return util.pconvert(path)
185 return path
189 return path
186
190
187 def __getitem__(self, key):
191 def __getitem__(self, key):
188 '''Return the current state of key (a filename) in the dirstate.
192 '''Return the current state of key (a filename) in the dirstate.
189
193
190 States are:
194 States are:
191 n normal
195 n normal
192 m needs merging
196 m needs merging
193 r marked for removal
197 r marked for removal
194 a marked for addition
198 a marked for addition
195 ? not tracked
199 ? not tracked
196 '''
200 '''
197 return self._map.get(key, ("?",))[0]
201 return self._map.get(key, ("?",))[0]
198
202
199 def __contains__(self, key):
203 def __contains__(self, key):
200 return key in self._map
204 return key in self._map
201
205
202 def __iter__(self):
206 def __iter__(self):
203 for x in sorted(self._map):
207 for x in sorted(self._map):
204 yield x
208 yield x
205
209
206 def iteritems(self):
210 def iteritems(self):
207 return self._map.iteritems()
211 return self._map.iteritems()
208
212
209 def parents(self):
213 def parents(self):
210 return [self._validate(p) for p in self._pl]
214 return [self._validate(p) for p in self._pl]
211
215
212 def p1(self):
216 def p1(self):
213 return self._validate(self._pl[0])
217 return self._validate(self._pl[0])
214
218
215 def p2(self):
219 def p2(self):
216 return self._validate(self._pl[1])
220 return self._validate(self._pl[1])
217
221
218 def branch(self):
222 def branch(self):
219 return encoding.tolocal(self._branch)
223 return encoding.tolocal(self._branch)
220
224
221 def setparents(self, p1, p2=nullid):
225 def setparents(self, p1, p2=nullid):
222 """Set dirstate parents to p1 and p2.
226 """Set dirstate parents to p1 and p2.
223
227
224 When moving from two parents to one, 'm' merged entries a
228 When moving from two parents to one, 'm' merged entries a
225 adjusted to normal and previous copy records discarded and
229 adjusted to normal and previous copy records discarded and
226 returned by the call.
230 returned by the call.
227
231
228 See localrepo.setparents()
232 See localrepo.setparents()
229 """
233 """
230 self._dirty = self._dirtypl = True
234 self._dirty = self._dirtypl = True
231 oldp2 = self._pl[1]
235 oldp2 = self._pl[1]
232 self._pl = p1, p2
236 self._pl = p1, p2
233 copies = {}
237 copies = {}
234 if oldp2 != nullid and p2 == nullid:
238 if oldp2 != nullid and p2 == nullid:
235 # Discard 'm' markers when moving away from a merge state
239 # Discard 'm' markers when moving away from a merge state
236 for f, s in self._map.iteritems():
240 for f, s in self._map.iteritems():
237 if s[0] == 'm':
241 if s[0] == 'm':
238 if f in self._copymap:
242 if f in self._copymap:
239 copies[f] = self._copymap[f]
243 copies[f] = self._copymap[f]
240 self.normallookup(f)
244 self.normallookup(f)
241 return copies
245 return copies
242
246
243 def setbranch(self, branch):
247 def setbranch(self, branch):
244 self._branch = encoding.fromlocal(branch)
248 self._branch = encoding.fromlocal(branch)
245 f = self._opener('branch', 'w', atomictemp=True)
249 f = self._opener('branch', 'w', atomictemp=True)
246 try:
250 try:
247 f.write(self._branch + '\n')
251 f.write(self._branch + '\n')
248 f.close()
252 f.close()
249
253
250 # make sure filecache has the correct stat info for _branch after
254 # make sure filecache has the correct stat info for _branch after
251 # replacing the underlying file
255 # replacing the underlying file
252 ce = self._filecache['_branch']
256 ce = self._filecache['_branch']
253 if ce:
257 if ce:
254 ce.refresh()
258 ce.refresh()
255 except: # re-raises
259 except: # re-raises
256 f.discard()
260 f.discard()
257 raise
261 raise
258
262
259 def _read(self):
263 def _read(self):
260 self._map = {}
264 self._map = {}
261 self._copymap = {}
265 self._copymap = {}
262 try:
266 try:
263 st = self._opener.read("dirstate")
267 st = self._opener.read("dirstate")
264 except IOError, err:
268 except IOError, err:
265 if err.errno != errno.ENOENT:
269 if err.errno != errno.ENOENT:
266 raise
270 raise
267 return
271 return
268 if not st:
272 if not st:
269 return
273 return
270
274
271 # Python's garbage collector triggers a GC each time a certain number
275 # Python's garbage collector triggers a GC each time a certain number
272 # of container objects (the number being defined by
276 # of container objects (the number being defined by
273 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
277 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
274 # for each file in the dirstate. The C version then immediately marks
278 # for each file in the dirstate. The C version then immediately marks
275 # them as not to be tracked by the collector. However, this has no
279 # them as not to be tracked by the collector. However, this has no
276 # effect on when GCs are triggered, only on what objects the GC looks
280 # effect on when GCs are triggered, only on what objects the GC looks
277 # into. This means that O(number of files) GCs are unavoidable.
281 # into. This means that O(number of files) GCs are unavoidable.
278 # Depending on when in the process's lifetime the dirstate is parsed,
282 # Depending on when in the process's lifetime the dirstate is parsed,
279 # this can get very expensive. As a workaround, disable GC while
283 # this can get very expensive. As a workaround, disable GC while
280 # parsing the dirstate.
284 # parsing the dirstate.
281 gcenabled = gc.isenabled()
285 gcenabled = gc.isenabled()
282 gc.disable()
286 gc.disable()
283 try:
287 try:
284 p = parsers.parse_dirstate(self._map, self._copymap, st)
288 p = parsers.parse_dirstate(self._map, self._copymap, st)
285 finally:
289 finally:
286 if gcenabled:
290 if gcenabled:
287 gc.enable()
291 gc.enable()
288 if not self._dirtypl:
292 if not self._dirtypl:
289 self._pl = p
293 self._pl = p
290
294
291 def invalidate(self):
295 def invalidate(self):
292 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
296 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
293 "_ignore"):
297 "_ignore"):
294 if a in self.__dict__:
298 if a in self.__dict__:
295 delattr(self, a)
299 delattr(self, a)
296 self._lastnormaltime = 0
300 self._lastnormaltime = 0
297 self._dirty = False
301 self._dirty = False
298
302
299 def copy(self, source, dest):
303 def copy(self, source, dest):
300 """Mark dest as a copy of source. Unmark dest if source is None."""
304 """Mark dest as a copy of source. Unmark dest if source is None."""
301 if source == dest:
305 if source == dest:
302 return
306 return
303 self._dirty = True
307 self._dirty = True
304 if source is not None:
308 if source is not None:
305 self._copymap[dest] = source
309 self._copymap[dest] = source
306 elif dest in self._copymap:
310 elif dest in self._copymap:
307 del self._copymap[dest]
311 del self._copymap[dest]
308
312
309 def copied(self, file):
313 def copied(self, file):
310 return self._copymap.get(file, None)
314 return self._copymap.get(file, None)
311
315
312 def copies(self):
316 def copies(self):
313 return self._copymap
317 return self._copymap
314
318
315 def _droppath(self, f):
319 def _droppath(self, f):
316 if self[f] not in "?r" and "_dirs" in self.__dict__:
320 if self[f] not in "?r" and "_dirs" in self.__dict__:
317 self._dirs.delpath(f)
321 self._dirs.delpath(f)
318
322
319 def _addpath(self, f, state, mode, size, mtime):
323 def _addpath(self, f, state, mode, size, mtime):
320 oldstate = self[f]
324 oldstate = self[f]
321 if state == 'a' or oldstate == 'r':
325 if state == 'a' or oldstate == 'r':
322 scmutil.checkfilename(f)
326 scmutil.checkfilename(f)
323 if f in self._dirs:
327 if f in self._dirs:
324 raise util.Abort(_('directory %r already in dirstate') % f)
328 raise util.Abort(_('directory %r already in dirstate') % f)
325 # shadows
329 # shadows
326 for d in scmutil.finddirs(f):
330 for d in scmutil.finddirs(f):
327 if d in self._dirs:
331 if d in self._dirs:
328 break
332 break
329 if d in self._map and self[d] != 'r':
333 if d in self._map and self[d] != 'r':
330 raise util.Abort(
334 raise util.Abort(
331 _('file %r in dirstate clashes with %r') % (d, f))
335 _('file %r in dirstate clashes with %r') % (d, f))
332 if oldstate in "?r" and "_dirs" in self.__dict__:
336 if oldstate in "?r" and "_dirs" in self.__dict__:
333 self._dirs.addpath(f)
337 self._dirs.addpath(f)
334 self._dirty = True
338 self._dirty = True
335 self._map[f] = (state, mode, size, mtime)
339 self._map[f] = (state, mode, size, mtime)
336
340
337 def normal(self, f):
341 def normal(self, f):
338 '''Mark a file normal and clean.'''
342 '''Mark a file normal and clean.'''
339 s = os.lstat(self._join(f))
343 s = os.lstat(self._join(f))
340 mtime = int(s.st_mtime)
344 mtime = int(s.st_mtime)
341 self._addpath(f, 'n', s.st_mode,
345 self._addpath(f, 'n', s.st_mode,
342 s.st_size & _rangemask, mtime & _rangemask)
346 s.st_size & _rangemask, mtime & _rangemask)
343 if f in self._copymap:
347 if f in self._copymap:
344 del self._copymap[f]
348 del self._copymap[f]
345 if mtime > self._lastnormaltime:
349 if mtime > self._lastnormaltime:
346 # Remember the most recent modification timeslot for status(),
350 # Remember the most recent modification timeslot for status(),
347 # to make sure we won't miss future size-preserving file content
351 # to make sure we won't miss future size-preserving file content
348 # modifications that happen within the same timeslot.
352 # modifications that happen within the same timeslot.
349 self._lastnormaltime = mtime
353 self._lastnormaltime = mtime
350
354
351 def normallookup(self, f):
355 def normallookup(self, f):
352 '''Mark a file normal, but possibly dirty.'''
356 '''Mark a file normal, but possibly dirty.'''
353 if self._pl[1] != nullid and f in self._map:
357 if self._pl[1] != nullid and f in self._map:
354 # if there is a merge going on and the file was either
358 # if there is a merge going on and the file was either
355 # in state 'm' (-1) or coming from other parent (-2) before
359 # in state 'm' (-1) or coming from other parent (-2) before
356 # being removed, restore that state.
360 # being removed, restore that state.
357 entry = self._map[f]
361 entry = self._map[f]
358 if entry[0] == 'r' and entry[2] in (-1, -2):
362 if entry[0] == 'r' and entry[2] in (-1, -2):
359 source = self._copymap.get(f)
363 source = self._copymap.get(f)
360 if entry[2] == -1:
364 if entry[2] == -1:
361 self.merge(f)
365 self.merge(f)
362 elif entry[2] == -2:
366 elif entry[2] == -2:
363 self.otherparent(f)
367 self.otherparent(f)
364 if source:
368 if source:
365 self.copy(source, f)
369 self.copy(source, f)
366 return
370 return
367 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
371 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
368 return
372 return
369 self._addpath(f, 'n', 0, -1, -1)
373 self._addpath(f, 'n', 0, -1, -1)
370 if f in self._copymap:
374 if f in self._copymap:
371 del self._copymap[f]
375 del self._copymap[f]
372
376
373 def otherparent(self, f):
377 def otherparent(self, f):
374 '''Mark as coming from the other parent, always dirty.'''
378 '''Mark as coming from the other parent, always dirty.'''
375 if self._pl[1] == nullid:
379 if self._pl[1] == nullid:
376 raise util.Abort(_("setting %r to other parent "
380 raise util.Abort(_("setting %r to other parent "
377 "only allowed in merges") % f)
381 "only allowed in merges") % f)
378 self._addpath(f, 'n', 0, -2, -1)
382 self._addpath(f, 'n', 0, -2, -1)
379 if f in self._copymap:
383 if f in self._copymap:
380 del self._copymap[f]
384 del self._copymap[f]
381
385
382 def add(self, f):
386 def add(self, f):
383 '''Mark a file added.'''
387 '''Mark a file added.'''
384 self._addpath(f, 'a', 0, -1, -1)
388 self._addpath(f, 'a', 0, -1, -1)
385 if f in self._copymap:
389 if f in self._copymap:
386 del self._copymap[f]
390 del self._copymap[f]
387
391
388 def remove(self, f):
392 def remove(self, f):
389 '''Mark a file removed.'''
393 '''Mark a file removed.'''
390 self._dirty = True
394 self._dirty = True
391 self._droppath(f)
395 self._droppath(f)
392 size = 0
396 size = 0
393 if self._pl[1] != nullid and f in self._map:
397 if self._pl[1] != nullid and f in self._map:
394 # backup the previous state
398 # backup the previous state
395 entry = self._map[f]
399 entry = self._map[f]
396 if entry[0] == 'm': # merge
400 if entry[0] == 'm': # merge
397 size = -1
401 size = -1
398 elif entry[0] == 'n' and entry[2] == -2: # other parent
402 elif entry[0] == 'n' and entry[2] == -2: # other parent
399 size = -2
403 size = -2
400 self._map[f] = ('r', 0, size, 0)
404 self._map[f] = ('r', 0, size, 0)
401 if size == 0 and f in self._copymap:
405 if size == 0 and f in self._copymap:
402 del self._copymap[f]
406 del self._copymap[f]
403
407
404 def merge(self, f):
408 def merge(self, f):
405 '''Mark a file merged.'''
409 '''Mark a file merged.'''
406 if self._pl[1] == nullid:
410 if self._pl[1] == nullid:
407 return self.normallookup(f)
411 return self.normallookup(f)
408 s = os.lstat(self._join(f))
412 s = os.lstat(self._join(f))
409 self._addpath(f, 'm', s.st_mode,
413 self._addpath(f, 'm', s.st_mode,
410 s.st_size & _rangemask, int(s.st_mtime) & _rangemask)
414 s.st_size & _rangemask, int(s.st_mtime) & _rangemask)
411 if f in self._copymap:
415 if f in self._copymap:
412 del self._copymap[f]
416 del self._copymap[f]
413
417
414 def drop(self, f):
418 def drop(self, f):
415 '''Drop a file from the dirstate'''
419 '''Drop a file from the dirstate'''
416 if f in self._map:
420 if f in self._map:
417 self._dirty = True
421 self._dirty = True
418 self._droppath(f)
422 self._droppath(f)
419 del self._map[f]
423 del self._map[f]
420
424
421 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
425 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
422 normed = util.normcase(path)
426 normed = util.normcase(path)
423 folded = self._foldmap.get(normed, None)
427 folded = self._foldmap.get(normed, None)
424 if folded is None:
428 if folded is None:
425 if isknown:
429 if isknown:
426 folded = path
430 folded = path
427 else:
431 else:
428 if exists is None:
432 if exists is None:
429 exists = os.path.lexists(os.path.join(self._root, path))
433 exists = os.path.lexists(os.path.join(self._root, path))
430 if not exists:
434 if not exists:
431 # Maybe a path component exists
435 # Maybe a path component exists
432 if not ignoremissing and '/' in path:
436 if not ignoremissing and '/' in path:
433 d, f = path.rsplit('/', 1)
437 d, f = path.rsplit('/', 1)
434 d = self._normalize(d, isknown, ignoremissing, None)
438 d = self._normalize(d, isknown, ignoremissing, None)
435 folded = d + "/" + f
439 folded = d + "/" + f
436 else:
440 else:
437 # No path components, preserve original case
441 # No path components, preserve original case
438 folded = path
442 folded = path
439 else:
443 else:
440 # recursively normalize leading directory components
444 # recursively normalize leading directory components
441 # against dirstate
445 # against dirstate
442 if '/' in normed:
446 if '/' in normed:
443 d, f = normed.rsplit('/', 1)
447 d, f = normed.rsplit('/', 1)
444 d = self._normalize(d, isknown, ignoremissing, True)
448 d = self._normalize(d, isknown, ignoremissing, True)
445 r = self._root + "/" + d
449 r = self._root + "/" + d
446 folded = d + "/" + util.fspath(f, r)
450 folded = d + "/" + util.fspath(f, r)
447 else:
451 else:
448 folded = util.fspath(normed, self._root)
452 folded = util.fspath(normed, self._root)
449 self._foldmap[normed] = folded
453 self._foldmap[normed] = folded
450
454
451 return folded
455 return folded
452
456
453 def normalize(self, path, isknown=False, ignoremissing=False):
457 def normalize(self, path, isknown=False, ignoremissing=False):
454 '''
458 '''
455 normalize the case of a pathname when on a casefolding filesystem
459 normalize the case of a pathname when on a casefolding filesystem
456
460
457 isknown specifies whether the filename came from walking the
461 isknown specifies whether the filename came from walking the
458 disk, to avoid extra filesystem access.
462 disk, to avoid extra filesystem access.
459
463
460 If ignoremissing is True, missing path are returned
464 If ignoremissing is True, missing path are returned
461 unchanged. Otherwise, we try harder to normalize possibly
465 unchanged. Otherwise, we try harder to normalize possibly
462 existing path components.
466 existing path components.
463
467
464 The normalized case is determined based on the following precedence:
468 The normalized case is determined based on the following precedence:
465
469
466 - version of name already stored in the dirstate
470 - version of name already stored in the dirstate
467 - version of name stored on disk
471 - version of name stored on disk
468 - version provided via command arguments
472 - version provided via command arguments
469 '''
473 '''
470
474
471 if self._checkcase:
475 if self._checkcase:
472 return self._normalize(path, isknown, ignoremissing)
476 return self._normalize(path, isknown, ignoremissing)
473 return path
477 return path
474
478
475 def clear(self):
479 def clear(self):
476 self._map = {}
480 self._map = {}
477 if "_dirs" in self.__dict__:
481 if "_dirs" in self.__dict__:
478 delattr(self, "_dirs")
482 delattr(self, "_dirs")
479 self._copymap = {}
483 self._copymap = {}
480 self._pl = [nullid, nullid]
484 self._pl = [nullid, nullid]
481 self._lastnormaltime = 0
485 self._lastnormaltime = 0
482 self._dirty = True
486 self._dirty = True
483
487
484 def rebuild(self, parent, allfiles, changedfiles=None):
488 def rebuild(self, parent, allfiles, changedfiles=None):
485 changedfiles = changedfiles or allfiles
489 changedfiles = changedfiles or allfiles
486 oldmap = self._map
490 oldmap = self._map
487 self.clear()
491 self.clear()
488 for f in allfiles:
492 for f in allfiles:
489 if f not in changedfiles:
493 if f not in changedfiles:
490 self._map[f] = oldmap[f]
494 self._map[f] = oldmap[f]
491 else:
495 else:
492 if 'x' in allfiles.flags(f):
496 if 'x' in allfiles.flags(f):
493 self._map[f] = ('n', 0777, -1, 0)
497 self._map[f] = ('n', 0777, -1, 0)
494 else:
498 else:
495 self._map[f] = ('n', 0666, -1, 0)
499 self._map[f] = ('n', 0666, -1, 0)
496 self._pl = (parent, nullid)
500 self._pl = (parent, nullid)
497 self._dirty = True
501 self._dirty = True
498
502
499 def write(self):
503 def write(self):
500 if not self._dirty:
504 if not self._dirty:
501 return
505 return
502 st = self._opener("dirstate", "w", atomictemp=True)
506 st = self._opener("dirstate", "w", atomictemp=True)
503
507
504 def finish(s):
508 def finish(s):
505 st.write(s)
509 st.write(s)
506 st.close()
510 st.close()
507 self._lastnormaltime = 0
511 self._lastnormaltime = 0
508 self._dirty = self._dirtypl = False
512 self._dirty = self._dirtypl = False
509
513
510 # use the modification time of the newly created temporary file as the
514 # use the modification time of the newly created temporary file as the
511 # filesystem's notion of 'now'
515 # filesystem's notion of 'now'
512 now = util.fstat(st).st_mtime
516 now = util.fstat(st).st_mtime
513 finish(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
517 finish(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
514
518
515 def _dirignore(self, f):
519 def _dirignore(self, f):
516 if f == '.':
520 if f == '.':
517 return False
521 return False
518 if self._ignore(f):
522 if self._ignore(f):
519 return True
523 return True
520 for p in scmutil.finddirs(f):
524 for p in scmutil.finddirs(f):
521 if self._ignore(p):
525 if self._ignore(p):
522 return True
526 return True
523 return False
527 return False
524
528
525 def _walkexplicit(self, match, subrepos):
529 def _walkexplicit(self, match, subrepos):
526 '''Get stat data about the files explicitly specified by match.
530 '''Get stat data about the files explicitly specified by match.
527
531
528 Return a triple (results, dirsfound, dirsnotfound).
532 Return a triple (results, dirsfound, dirsnotfound).
529 - results is a mapping from filename to stat result. It also contains
533 - results is a mapping from filename to stat result. It also contains
530 listings mapping subrepos and .hg to None.
534 listings mapping subrepos and .hg to None.
531 - dirsfound is a list of files found to be directories.
535 - dirsfound is a list of files found to be directories.
532 - dirsnotfound is a list of files that the dirstate thinks are
536 - dirsnotfound is a list of files that the dirstate thinks are
533 directories and that were not found.'''
537 directories and that were not found.'''
534
538
535 def badtype(mode):
539 def badtype(mode):
536 kind = _('unknown')
540 kind = _('unknown')
537 if stat.S_ISCHR(mode):
541 if stat.S_ISCHR(mode):
538 kind = _('character device')
542 kind = _('character device')
539 elif stat.S_ISBLK(mode):
543 elif stat.S_ISBLK(mode):
540 kind = _('block device')
544 kind = _('block device')
541 elif stat.S_ISFIFO(mode):
545 elif stat.S_ISFIFO(mode):
542 kind = _('fifo')
546 kind = _('fifo')
543 elif stat.S_ISSOCK(mode):
547 elif stat.S_ISSOCK(mode):
544 kind = _('socket')
548 kind = _('socket')
545 elif stat.S_ISDIR(mode):
549 elif stat.S_ISDIR(mode):
546 kind = _('directory')
550 kind = _('directory')
547 return _('unsupported file type (type is %s)') % kind
551 return _('unsupported file type (type is %s)') % kind
548
552
549 matchedir = match.explicitdir
553 matchedir = match.explicitdir
550 badfn = match.bad
554 badfn = match.bad
551 dmap = self._map
555 dmap = self._map
552 normpath = util.normpath
556 normpath = util.normpath
553 lstat = os.lstat
557 lstat = os.lstat
554 getkind = stat.S_IFMT
558 getkind = stat.S_IFMT
555 dirkind = stat.S_IFDIR
559 dirkind = stat.S_IFDIR
556 regkind = stat.S_IFREG
560 regkind = stat.S_IFREG
557 lnkkind = stat.S_IFLNK
561 lnkkind = stat.S_IFLNK
558 join = self._join
562 join = self._join
559 dirsfound = []
563 dirsfound = []
560 foundadd = dirsfound.append
564 foundadd = dirsfound.append
561 dirsnotfound = []
565 dirsnotfound = []
562 notfoundadd = dirsnotfound.append
566 notfoundadd = dirsnotfound.append
563
567
564 if match.matchfn != match.exact and self._checkcase:
568 if match.matchfn != match.exact and self._checkcase:
565 normalize = self._normalize
569 normalize = self._normalize
566 else:
570 else:
567 normalize = None
571 normalize = None
568
572
569 files = sorted(match.files())
573 files = sorted(match.files())
570 subrepos.sort()
574 subrepos.sort()
571 i, j = 0, 0
575 i, j = 0, 0
572 while i < len(files) and j < len(subrepos):
576 while i < len(files) and j < len(subrepos):
573 subpath = subrepos[j] + "/"
577 subpath = subrepos[j] + "/"
574 if files[i] < subpath:
578 if files[i] < subpath:
575 i += 1
579 i += 1
576 continue
580 continue
577 while i < len(files) and files[i].startswith(subpath):
581 while i < len(files) and files[i].startswith(subpath):
578 del files[i]
582 del files[i]
579 j += 1
583 j += 1
580
584
581 if not files or '.' in files:
585 if not files or '.' in files:
582 files = ['']
586 files = ['']
583 results = dict.fromkeys(subrepos)
587 results = dict.fromkeys(subrepos)
584 results['.hg'] = None
588 results['.hg'] = None
585
589
586 for ff in files:
590 for ff in files:
587 if normalize:
591 if normalize:
588 nf = normalize(normpath(ff), False, True)
592 nf = normalize(normpath(ff), False, True)
589 else:
593 else:
590 nf = normpath(ff)
594 nf = normpath(ff)
591 if nf in results:
595 if nf in results:
592 continue
596 continue
593
597
594 try:
598 try:
595 st = lstat(join(nf))
599 st = lstat(join(nf))
596 kind = getkind(st.st_mode)
600 kind = getkind(st.st_mode)
597 if kind == dirkind:
601 if kind == dirkind:
598 if nf in dmap:
602 if nf in dmap:
599 #file deleted on disk but still in dirstate
603 #file deleted on disk but still in dirstate
600 results[nf] = None
604 results[nf] = None
601 if matchedir:
605 if matchedir:
602 matchedir(nf)
606 matchedir(nf)
603 foundadd(nf)
607 foundadd(nf)
604 elif kind == regkind or kind == lnkkind:
608 elif kind == regkind or kind == lnkkind:
605 results[nf] = st
609 results[nf] = st
606 else:
610 else:
607 badfn(ff, badtype(kind))
611 badfn(ff, badtype(kind))
608 if nf in dmap:
612 if nf in dmap:
609 results[nf] = None
613 results[nf] = None
610 except OSError, inst:
614 except OSError, inst:
611 if nf in dmap: # does it exactly match a file?
615 if nf in dmap: # does it exactly match a file?
612 results[nf] = None
616 results[nf] = None
613 else: # does it match a directory?
617 else: # does it match a directory?
614 prefix = nf + "/"
618 prefix = nf + "/"
615 for fn in dmap:
619 for fn in dmap:
616 if fn.startswith(prefix):
620 if fn.startswith(prefix):
617 if matchedir:
621 if matchedir:
618 matchedir(nf)
622 matchedir(nf)
619 notfoundadd(nf)
623 notfoundadd(nf)
620 break
624 break
621 else:
625 else:
622 badfn(ff, inst.strerror)
626 badfn(ff, inst.strerror)
623
627
624 return results, dirsfound, dirsnotfound
628 return results, dirsfound, dirsnotfound
625
629
626 def walk(self, match, subrepos, unknown, ignored, full=True):
630 def walk(self, match, subrepos, unknown, ignored, full=True):
627 '''
631 '''
628 Walk recursively through the directory tree, finding all files
632 Walk recursively through the directory tree, finding all files
629 matched by match.
633 matched by match.
630
634
631 If full is False, maybe skip some known-clean files.
635 If full is False, maybe skip some known-clean files.
632
636
633 Return a dict mapping filename to stat-like object (either
637 Return a dict mapping filename to stat-like object (either
634 mercurial.osutil.stat instance or return value of os.stat()).
638 mercurial.osutil.stat instance or return value of os.stat()).
635
639
636 '''
640 '''
637 # full is a flag that extensions that hook into walk can use -- this
641 # full is a flag that extensions that hook into walk can use -- this
638 # implementation doesn't use it at all. This satisfies the contract
642 # implementation doesn't use it at all. This satisfies the contract
639 # because we only guarantee a "maybe".
643 # because we only guarantee a "maybe".
640
644
641 def fwarn(f, msg):
645 def fwarn(f, msg):
642 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
646 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
643 return False
647 return False
644
648
645 ignore = self._ignore
649 ignore = self._ignore
646 dirignore = self._dirignore
650 dirignore = self._dirignore
647 if ignored:
651 if ignored:
648 ignore = util.never
652 ignore = util.never
649 dirignore = util.never
653 dirignore = util.never
650 elif not unknown:
654 elif not unknown:
651 # if unknown and ignored are False, skip step 2
655 # if unknown and ignored are False, skip step 2
652 ignore = util.always
656 ignore = util.always
653 dirignore = util.always
657 dirignore = util.always
654
658
655 matchfn = match.matchfn
659 matchfn = match.matchfn
656 matchalways = match.always()
660 matchalways = match.always()
657 matchtdir = match.traversedir
661 matchtdir = match.traversedir
658 dmap = self._map
662 dmap = self._map
659 listdir = osutil.listdir
663 listdir = osutil.listdir
660 lstat = os.lstat
664 lstat = os.lstat
661 dirkind = stat.S_IFDIR
665 dirkind = stat.S_IFDIR
662 regkind = stat.S_IFREG
666 regkind = stat.S_IFREG
663 lnkkind = stat.S_IFLNK
667 lnkkind = stat.S_IFLNK
664 join = self._join
668 join = self._join
665
669
666 exact = skipstep3 = False
670 exact = skipstep3 = False
667 if matchfn == match.exact: # match.exact
671 if matchfn == match.exact: # match.exact
668 exact = True
672 exact = True
669 dirignore = util.always # skip step 2
673 dirignore = util.always # skip step 2
670 elif match.files() and not match.anypats(): # match.match, no patterns
674 elif match.files() and not match.anypats(): # match.match, no patterns
671 skipstep3 = True
675 skipstep3 = True
672
676
673 if not exact and self._checkcase:
677 if not exact and self._checkcase:
674 normalize = self._normalize
678 normalize = self._normalize
675 skipstep3 = False
679 skipstep3 = False
676 else:
680 else:
677 normalize = None
681 normalize = None
678
682
679 # step 1: find all explicit files
683 # step 1: find all explicit files
680 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
684 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
681
685
682 skipstep3 = skipstep3 and not (work or dirsnotfound)
686 skipstep3 = skipstep3 and not (work or dirsnotfound)
683 work = [d for d in work if not dirignore(d)]
687 work = [d for d in work if not dirignore(d)]
684 wadd = work.append
688 wadd = work.append
685
689
686 # step 2: visit subdirectories
690 # step 2: visit subdirectories
687 while work:
691 while work:
688 nd = work.pop()
692 nd = work.pop()
689 skip = None
693 skip = None
690 if nd == '.':
694 if nd == '.':
691 nd = ''
695 nd = ''
692 else:
696 else:
693 skip = '.hg'
697 skip = '.hg'
694 try:
698 try:
695 entries = listdir(join(nd), stat=True, skip=skip)
699 entries = listdir(join(nd), stat=True, skip=skip)
696 except OSError, inst:
700 except OSError, inst:
697 if inst.errno in (errno.EACCES, errno.ENOENT):
701 if inst.errno in (errno.EACCES, errno.ENOENT):
698 fwarn(nd, inst.strerror)
702 fwarn(nd, inst.strerror)
699 continue
703 continue
700 raise
704 raise
701 for f, kind, st in entries:
705 for f, kind, st in entries:
702 if normalize:
706 if normalize:
703 nf = normalize(nd and (nd + "/" + f) or f, True, True)
707 nf = normalize(nd and (nd + "/" + f) or f, True, True)
704 else:
708 else:
705 nf = nd and (nd + "/" + f) or f
709 nf = nd and (nd + "/" + f) or f
706 if nf not in results:
710 if nf not in results:
707 if kind == dirkind:
711 if kind == dirkind:
708 if not ignore(nf):
712 if not ignore(nf):
709 if matchtdir:
713 if matchtdir:
710 matchtdir(nf)
714 matchtdir(nf)
711 wadd(nf)
715 wadd(nf)
712 if nf in dmap and (matchalways or matchfn(nf)):
716 if nf in dmap and (matchalways or matchfn(nf)):
713 results[nf] = None
717 results[nf] = None
714 elif kind == regkind or kind == lnkkind:
718 elif kind == regkind or kind == lnkkind:
715 if nf in dmap:
719 if nf in dmap:
716 if matchalways or matchfn(nf):
720 if matchalways or matchfn(nf):
717 results[nf] = st
721 results[nf] = st
718 elif (matchalways or matchfn(nf)) and not ignore(nf):
722 elif (matchalways or matchfn(nf)) and not ignore(nf):
719 results[nf] = st
723 results[nf] = st
720 elif nf in dmap and (matchalways or matchfn(nf)):
724 elif nf in dmap and (matchalways or matchfn(nf)):
721 results[nf] = None
725 results[nf] = None
722
726
723 for s in subrepos:
727 for s in subrepos:
724 del results[s]
728 del results[s]
725 del results['.hg']
729 del results['.hg']
726
730
727 # step 3: report unseen items in the dmap hash
731 # step 3: report unseen items in the dmap hash
728 if not skipstep3 and not exact:
732 if not skipstep3 and not exact:
729 if not results and matchalways:
733 if not results and matchalways:
730 visit = dmap.keys()
734 visit = dmap.keys()
731 else:
735 else:
732 visit = [f for f in dmap if f not in results and matchfn(f)]
736 visit = [f for f in dmap if f not in results and matchfn(f)]
733 visit.sort()
737 visit.sort()
734
738
735 if unknown:
739 if unknown:
736 # unknown == True means we walked the full directory tree above.
740 # unknown == True means we walked the full directory tree above.
737 # So if a file is not seen it was either a) not matching matchfn
741 # So if a file is not seen it was either a) not matching matchfn
738 # b) ignored, c) missing, or d) under a symlink directory.
742 # b) ignored, c) missing, or d) under a symlink directory.
739 audit_path = pathutil.pathauditor(self._root)
743 audit_path = pathutil.pathauditor(self._root)
740
744
741 for nf in iter(visit):
745 for nf in iter(visit):
742 # Report ignored items in the dmap as long as they are not
746 # Report ignored items in the dmap as long as they are not
743 # under a symlink directory.
747 # under a symlink directory.
744 if audit_path.check(nf):
748 if audit_path.check(nf):
745 try:
749 try:
746 results[nf] = lstat(join(nf))
750 results[nf] = lstat(join(nf))
747 except OSError:
751 except OSError:
748 # file doesn't exist
752 # file doesn't exist
749 results[nf] = None
753 results[nf] = None
750 else:
754 else:
751 # It's either missing or under a symlink directory
755 # It's either missing or under a symlink directory
752 results[nf] = None
756 results[nf] = None
753 else:
757 else:
754 # We may not have walked the full directory tree above,
758 # We may not have walked the full directory tree above,
755 # so stat everything we missed.
759 # so stat everything we missed.
756 nf = iter(visit).next
760 nf = iter(visit).next
757 for st in util.statfiles([join(i) for i in visit]):
761 for st in util.statfiles([join(i) for i in visit]):
758 results[nf()] = st
762 results[nf()] = st
759 return results
763 return results
760
764
761 def status(self, match, subrepos, ignored, clean, unknown):
765 def status(self, match, subrepos, ignored, clean, unknown):
762 '''Determine the status of the working copy relative to the
766 '''Determine the status of the working copy relative to the
763 dirstate and return a tuple of lists (unsure, modified, added,
767 dirstate and return a tuple of lists (unsure, modified, added,
764 removed, deleted, unknown, ignored, clean), where:
768 removed, deleted, unknown, ignored, clean), where:
765
769
766 unsure:
770 unsure:
767 files that might have been modified since the dirstate was
771 files that might have been modified since the dirstate was
768 written, but need to be read to be sure (size is the same
772 written, but need to be read to be sure (size is the same
769 but mtime differs)
773 but mtime differs)
770 modified:
774 modified:
771 files that have definitely been modified since the dirstate
775 files that have definitely been modified since the dirstate
772 was written (different size or mode)
776 was written (different size or mode)
773 added:
777 added:
774 files that have been explicitly added with hg add
778 files that have been explicitly added with hg add
775 removed:
779 removed:
776 files that have been explicitly removed with hg remove
780 files that have been explicitly removed with hg remove
777 deleted:
781 deleted:
778 files that have been deleted through other means ("missing")
782 files that have been deleted through other means ("missing")
779 unknown:
783 unknown:
780 files not in the dirstate that are not ignored
784 files not in the dirstate that are not ignored
781 ignored:
785 ignored:
782 files not in the dirstate that are ignored
786 files not in the dirstate that are ignored
783 (by _dirignore())
787 (by _dirignore())
784 clean:
788 clean:
785 files that have definitely not been modified since the
789 files that have definitely not been modified since the
786 dirstate was written
790 dirstate was written
787 '''
791 '''
788 listignored, listclean, listunknown = ignored, clean, unknown
792 listignored, listclean, listunknown = ignored, clean, unknown
789 lookup, modified, added, unknown, ignored = [], [], [], [], []
793 lookup, modified, added, unknown, ignored = [], [], [], [], []
790 removed, deleted, clean = [], [], []
794 removed, deleted, clean = [], [], []
791
795
792 dmap = self._map
796 dmap = self._map
793 ladd = lookup.append # aka "unsure"
797 ladd = lookup.append # aka "unsure"
794 madd = modified.append
798 madd = modified.append
795 aadd = added.append
799 aadd = added.append
796 uadd = unknown.append
800 uadd = unknown.append
797 iadd = ignored.append
801 iadd = ignored.append
798 radd = removed.append
802 radd = removed.append
799 dadd = deleted.append
803 dadd = deleted.append
800 cadd = clean.append
804 cadd = clean.append
801 mexact = match.exact
805 mexact = match.exact
802 dirignore = self._dirignore
806 dirignore = self._dirignore
803 checkexec = self._checkexec
807 checkexec = self._checkexec
804 copymap = self._copymap
808 copymap = self._copymap
805 lastnormaltime = self._lastnormaltime
809 lastnormaltime = self._lastnormaltime
806
810
807 # We need to do full walks when either
811 # We need to do full walks when either
808 # - we're listing all clean files, or
812 # - we're listing all clean files, or
809 # - match.traversedir does something, because match.traversedir should
813 # - match.traversedir does something, because match.traversedir should
810 # be called for every dir in the working dir
814 # be called for every dir in the working dir
811 full = listclean or match.traversedir is not None
815 full = listclean or match.traversedir is not None
812 for fn, st in self.walk(match, subrepos, listunknown, listignored,
816 for fn, st in self.walk(match, subrepos, listunknown, listignored,
813 full=full).iteritems():
817 full=full).iteritems():
814 if fn not in dmap:
818 if fn not in dmap:
815 if (listignored or mexact(fn)) and dirignore(fn):
819 if (listignored or mexact(fn)) and dirignore(fn):
816 if listignored:
820 if listignored:
817 iadd(fn)
821 iadd(fn)
818 else:
822 else:
819 uadd(fn)
823 uadd(fn)
820 continue
824 continue
821
825
822 state, mode, size, time = dmap[fn]
826 state, mode, size, time = dmap[fn]
823
827
824 if not st and state in "nma":
828 if not st and state in "nma":
825 dadd(fn)
829 dadd(fn)
826 elif state == 'n':
830 elif state == 'n':
827 mtime = int(st.st_mtime)
831 mtime = int(st.st_mtime)
828 if (size >= 0 and
832 if (size >= 0 and
829 ((size != st.st_size and size != st.st_size & _rangemask)
833 ((size != st.st_size and size != st.st_size & _rangemask)
830 or ((mode ^ st.st_mode) & 0100 and checkexec))
834 or ((mode ^ st.st_mode) & 0100 and checkexec))
831 or size == -2 # other parent
835 or size == -2 # other parent
832 or fn in copymap):
836 or fn in copymap):
833 madd(fn)
837 madd(fn)
834 elif time != mtime and time != mtime & _rangemask:
838 elif time != mtime and time != mtime & _rangemask:
835 ladd(fn)
839 ladd(fn)
836 elif mtime == lastnormaltime:
840 elif mtime == lastnormaltime:
837 # fn may have been changed in the same timeslot without
841 # fn may have been changed in the same timeslot without
838 # changing its size. This can happen if we quickly do
842 # changing its size. This can happen if we quickly do
839 # multiple commits in a single transaction.
843 # multiple commits in a single transaction.
840 # Force lookup, so we don't miss such a racy file change.
844 # Force lookup, so we don't miss such a racy file change.
841 ladd(fn)
845 ladd(fn)
842 elif listclean:
846 elif listclean:
843 cadd(fn)
847 cadd(fn)
844 elif state == 'm':
848 elif state == 'm':
845 madd(fn)
849 madd(fn)
846 elif state == 'a':
850 elif state == 'a':
847 aadd(fn)
851 aadd(fn)
848 elif state == 'r':
852 elif state == 'r':
849 radd(fn)
853 radd(fn)
850
854
851 return (lookup, modified, added, removed, deleted, unknown, ignored,
855 return (lookup, modified, added, removed, deleted, unknown, ignored,
852 clean)
856 clean)
@@ -1,651 +1,676 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > rebase=
3 > rebase=
4 >
4 >
5 > [phases]
5 > [phases]
6 > publish=False
6 > publish=False
7 >
7 >
8 > [alias]
8 > [alias]
9 > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
9 > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
10 > EOF
10 > EOF
11
11
12
12
13 $ hg init a
13 $ hg init a
14 $ cd a
14 $ cd a
15 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
15 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
16 adding changesets
16 adding changesets
17 adding manifests
17 adding manifests
18 adding file changes
18 adding file changes
19 added 8 changesets with 7 changes to 7 files (+2 heads)
19 added 8 changesets with 7 changes to 7 files (+2 heads)
20 (run 'hg heads' to see heads, 'hg merge' to merge)
20 (run 'hg heads' to see heads, 'hg merge' to merge)
21 $ hg up tip
21 $ hg up tip
22 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 $ cd ..
23 $ cd ..
24
24
25
25
26 Rebasing
26 Rebasing
27 D onto H - simple rebase:
27 D onto H - simple rebase:
28
28
29 $ hg clone -q -u . a a1
29 $ hg clone -q -u . a a1
30 $ cd a1
30 $ cd a1
31
31
32 $ hg tglog
32 $ hg tglog
33 @ 7: 'H'
33 @ 7: 'H'
34 |
34 |
35 | o 6: 'G'
35 | o 6: 'G'
36 |/|
36 |/|
37 o | 5: 'F'
37 o | 5: 'F'
38 | |
38 | |
39 | o 4: 'E'
39 | o 4: 'E'
40 |/
40 |/
41 | o 3: 'D'
41 | o 3: 'D'
42 | |
42 | |
43 | o 2: 'C'
43 | o 2: 'C'
44 | |
44 | |
45 | o 1: 'B'
45 | o 1: 'B'
46 |/
46 |/
47 o 0: 'A'
47 o 0: 'A'
48
48
49
49
50 $ hg rebase -s 3 -d 7
50 $ hg rebase -s 3 -d 7
51 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
51 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
52
52
53 $ hg tglog
53 $ hg tglog
54 o 7: 'D'
54 o 7: 'D'
55 |
55 |
56 @ 6: 'H'
56 @ 6: 'H'
57 |
57 |
58 | o 5: 'G'
58 | o 5: 'G'
59 |/|
59 |/|
60 o | 4: 'F'
60 o | 4: 'F'
61 | |
61 | |
62 | o 3: 'E'
62 | o 3: 'E'
63 |/
63 |/
64 | o 2: 'C'
64 | o 2: 'C'
65 | |
65 | |
66 | o 1: 'B'
66 | o 1: 'B'
67 |/
67 |/
68 o 0: 'A'
68 o 0: 'A'
69
69
70 $ cd ..
70 $ cd ..
71
71
72
72
73 D onto F - intermediate point:
73 D onto F - intermediate point:
74
74
75 $ hg clone -q -u . a a2
75 $ hg clone -q -u . a a2
76 $ cd a2
76 $ cd a2
77
77
78 $ hg rebase -s 3 -d 5
78 $ hg rebase -s 3 -d 5
79 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
79 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
80
80
81 $ hg tglog
81 $ hg tglog
82 o 7: 'D'
82 o 7: 'D'
83 |
83 |
84 | @ 6: 'H'
84 | @ 6: 'H'
85 |/
85 |/
86 | o 5: 'G'
86 | o 5: 'G'
87 |/|
87 |/|
88 o | 4: 'F'
88 o | 4: 'F'
89 | |
89 | |
90 | o 3: 'E'
90 | o 3: 'E'
91 |/
91 |/
92 | o 2: 'C'
92 | o 2: 'C'
93 | |
93 | |
94 | o 1: 'B'
94 | o 1: 'B'
95 |/
95 |/
96 o 0: 'A'
96 o 0: 'A'
97
97
98 $ cd ..
98 $ cd ..
99
99
100
100
101 E onto H - skip of G:
101 E onto H - skip of G:
102
102
103 $ hg clone -q -u . a a3
103 $ hg clone -q -u . a a3
104 $ cd a3
104 $ cd a3
105
105
106 $ hg rebase -s 4 -d 7
106 $ hg rebase -s 4 -d 7
107 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
107 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
108
108
109 $ hg tglog
109 $ hg tglog
110 o 6: 'E'
110 o 6: 'E'
111 |
111 |
112 @ 5: 'H'
112 @ 5: 'H'
113 |
113 |
114 o 4: 'F'
114 o 4: 'F'
115 |
115 |
116 | o 3: 'D'
116 | o 3: 'D'
117 | |
117 | |
118 | o 2: 'C'
118 | o 2: 'C'
119 | |
119 | |
120 | o 1: 'B'
120 | o 1: 'B'
121 |/
121 |/
122 o 0: 'A'
122 o 0: 'A'
123
123
124 $ cd ..
124 $ cd ..
125
125
126
126
127 F onto E - rebase of a branching point (skip G):
127 F onto E - rebase of a branching point (skip G):
128
128
129 $ hg clone -q -u . a a4
129 $ hg clone -q -u . a a4
130 $ cd a4
130 $ cd a4
131
131
132 $ hg rebase -s 5 -d 4
132 $ hg rebase -s 5 -d 4
133 saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
133 saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
134
134
135 $ hg tglog
135 $ hg tglog
136 @ 6: 'H'
136 @ 6: 'H'
137 |
137 |
138 o 5: 'F'
138 o 5: 'F'
139 |
139 |
140 o 4: 'E'
140 o 4: 'E'
141 |
141 |
142 | o 3: 'D'
142 | o 3: 'D'
143 | |
143 | |
144 | o 2: 'C'
144 | o 2: 'C'
145 | |
145 | |
146 | o 1: 'B'
146 | o 1: 'B'
147 |/
147 |/
148 o 0: 'A'
148 o 0: 'A'
149
149
150 $ cd ..
150 $ cd ..
151
151
152
152
153 G onto H - merged revision having a parent in ancestors of target:
153 G onto H - merged revision having a parent in ancestors of target:
154
154
155 $ hg clone -q -u . a a5
155 $ hg clone -q -u . a a5
156 $ cd a5
156 $ cd a5
157
157
158 $ hg rebase -s 6 -d 7
158 $ hg rebase -s 6 -d 7
159 saved backup bundle to $TESTTMP/a5/.hg/strip-backup/*-backup.hg (glob)
159 saved backup bundle to $TESTTMP/a5/.hg/strip-backup/*-backup.hg (glob)
160
160
161 $ hg tglog
161 $ hg tglog
162 o 7: 'G'
162 o 7: 'G'
163 |\
163 |\
164 | @ 6: 'H'
164 | @ 6: 'H'
165 | |
165 | |
166 | o 5: 'F'
166 | o 5: 'F'
167 | |
167 | |
168 o | 4: 'E'
168 o | 4: 'E'
169 |/
169 |/
170 | o 3: 'D'
170 | o 3: 'D'
171 | |
171 | |
172 | o 2: 'C'
172 | o 2: 'C'
173 | |
173 | |
174 | o 1: 'B'
174 | o 1: 'B'
175 |/
175 |/
176 o 0: 'A'
176 o 0: 'A'
177
177
178 $ cd ..
178 $ cd ..
179
179
180
180
181 F onto B - G maintains E as parent:
181 F onto B - G maintains E as parent:
182
182
183 $ hg clone -q -u . a a6
183 $ hg clone -q -u . a a6
184 $ cd a6
184 $ cd a6
185
185
186 $ hg rebase -s 5 -d 1
186 $ hg rebase -s 5 -d 1
187 saved backup bundle to $TESTTMP/a6/.hg/strip-backup/*-backup.hg (glob)
187 saved backup bundle to $TESTTMP/a6/.hg/strip-backup/*-backup.hg (glob)
188
188
189 $ hg tglog
189 $ hg tglog
190 @ 7: 'H'
190 @ 7: 'H'
191 |
191 |
192 | o 6: 'G'
192 | o 6: 'G'
193 |/|
193 |/|
194 o | 5: 'F'
194 o | 5: 'F'
195 | |
195 | |
196 | o 4: 'E'
196 | o 4: 'E'
197 | |
197 | |
198 | | o 3: 'D'
198 | | o 3: 'D'
199 | | |
199 | | |
200 +---o 2: 'C'
200 +---o 2: 'C'
201 | |
201 | |
202 o | 1: 'B'
202 o | 1: 'B'
203 |/
203 |/
204 o 0: 'A'
204 o 0: 'A'
205
205
206 $ cd ..
206 $ cd ..
207
207
208
208
209 These will fail (using --source):
209 These will fail (using --source):
210
210
211 G onto F - rebase onto an ancestor:
211 G onto F - rebase onto an ancestor:
212
212
213 $ hg clone -q -u . a a7
213 $ hg clone -q -u . a a7
214 $ cd a7
214 $ cd a7
215
215
216 $ hg rebase -s 6 -d 5
216 $ hg rebase -s 6 -d 5
217 nothing to rebase
217 nothing to rebase
218 [1]
218 [1]
219
219
220 F onto G - rebase onto a descendant:
220 F onto G - rebase onto a descendant:
221
221
222 $ hg rebase -s 5 -d 6
222 $ hg rebase -s 5 -d 6
223 abort: source is ancestor of destination
223 abort: source is ancestor of destination
224 [255]
224 [255]
225
225
226 G onto B - merge revision with both parents not in ancestors of target:
226 G onto B - merge revision with both parents not in ancestors of target:
227
227
228 $ hg rebase -s 6 -d 1
228 $ hg rebase -s 6 -d 1
229 abort: cannot use revision 6 as base, result would have 3 parents
229 abort: cannot use revision 6 as base, result would have 3 parents
230 [255]
230 [255]
231
231
232
232
233 These will abort gracefully (using --base):
233 These will abort gracefully (using --base):
234
234
235 G onto G - rebase onto same changeset:
235 G onto G - rebase onto same changeset:
236
236
237 $ hg rebase -b 6 -d 6
237 $ hg rebase -b 6 -d 6
238 nothing to rebase - eea13746799a is both "base" and destination
238 nothing to rebase - eea13746799a is both "base" and destination
239 [1]
239 [1]
240
240
241 G onto F - rebase onto an ancestor:
241 G onto F - rebase onto an ancestor:
242
242
243 $ hg rebase -b 6 -d 5
243 $ hg rebase -b 6 -d 5
244 nothing to rebase
244 nothing to rebase
245 [1]
245 [1]
246
246
247 F onto G - rebase onto a descendant:
247 F onto G - rebase onto a descendant:
248
248
249 $ hg rebase -b 5 -d 6
249 $ hg rebase -b 5 -d 6
250 nothing to rebase - "base" 24b6387c8c8c is already an ancestor of destination eea13746799a
250 nothing to rebase - "base" 24b6387c8c8c is already an ancestor of destination eea13746799a
251 [1]
251 [1]
252
252
253 C onto A - rebase onto an ancestor:
253 C onto A - rebase onto an ancestor:
254
254
255 $ hg rebase -d 0 -s 2
255 $ hg rebase -d 0 -s 2
256 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-backup.hg (glob)
256 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-backup.hg (glob)
257 $ hg tglog
257 $ hg tglog
258 o 7: 'D'
258 o 7: 'D'
259 |
259 |
260 o 6: 'C'
260 o 6: 'C'
261 |
261 |
262 | @ 5: 'H'
262 | @ 5: 'H'
263 | |
263 | |
264 | | o 4: 'G'
264 | | o 4: 'G'
265 | |/|
265 | |/|
266 | o | 3: 'F'
266 | o | 3: 'F'
267 |/ /
267 |/ /
268 | o 2: 'E'
268 | o 2: 'E'
269 |/
269 |/
270 | o 1: 'B'
270 | o 1: 'B'
271 |/
271 |/
272 o 0: 'A'
272 o 0: 'A'
273
273
274
274
275 Check rebasing public changeset
275 Check rebasing public changeset
276
276
277 $ hg pull --config phases.publish=True -q -r 6 . # update phase of 6
277 $ hg pull --config phases.publish=True -q -r 6 . # update phase of 6
278 $ hg rebase -d 0 -b 6
278 $ hg rebase -d 0 -b 6
279 nothing to rebase
279 nothing to rebase
280 [1]
280 [1]
281 $ hg rebase -d 5 -b 6
281 $ hg rebase -d 5 -b 6
282 abort: can't rebase immutable changeset e1c4361dd923
282 abort: can't rebase immutable changeset e1c4361dd923
283 (see hg help phases for details)
283 (see hg help phases for details)
284 [255]
284 [255]
285
285
286 $ hg rebase -d 5 -b 6 --keep
286 $ hg rebase -d 5 -b 6 --keep
287
287
288 Check rebasing mutable changeset
288 Check rebasing mutable changeset
289 Source phase greater or equal to destination phase: new changeset get the phase of source:
289 Source phase greater or equal to destination phase: new changeset get the phase of source:
290 $ hg rebase -s9 -d0
290 $ hg rebase -s9 -d0
291 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-backup.hg (glob)
291 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-backup.hg (glob)
292 $ hg log --template "{phase}\n" -r 9
292 $ hg log --template "{phase}\n" -r 9
293 draft
293 draft
294 $ hg rebase -s9 -d1
294 $ hg rebase -s9 -d1
295 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-backup.hg (glob)
295 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-backup.hg (glob)
296 $ hg log --template "{phase}\n" -r 9
296 $ hg log --template "{phase}\n" -r 9
297 draft
297 draft
298 $ hg phase --force --secret 9
298 $ hg phase --force --secret 9
299 $ hg rebase -s9 -d0
299 $ hg rebase -s9 -d0
300 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-backup.hg (glob)
300 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-backup.hg (glob)
301 $ hg log --template "{phase}\n" -r 9
301 $ hg log --template "{phase}\n" -r 9
302 secret
302 secret
303 $ hg rebase -s9 -d1
303 $ hg rebase -s9 -d1
304 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-backup.hg (glob)
304 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-backup.hg (glob)
305 $ hg log --template "{phase}\n" -r 9
305 $ hg log --template "{phase}\n" -r 9
306 secret
306 secret
307 Source phase lower than destination phase: new changeset get the phase of destination:
307 Source phase lower than destination phase: new changeset get the phase of destination:
308 $ hg rebase -s8 -d9
308 $ hg rebase -s8 -d9
309 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-backup.hg (glob)
309 saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-backup.hg (glob)
310 $ hg log --template "{phase}\n" -r 'rev(9)'
310 $ hg log --template "{phase}\n" -r 'rev(9)'
311 secret
311 secret
312
312
313 $ cd ..
313 $ cd ..
314
314
315 Test for revset
315 Test for revset
316
316
317 We need a bit different graph
317 We need a bit different graph
318 All destination are B
318 All destination are B
319
319
320 $ hg init ah
320 $ hg init ah
321 $ cd ah
321 $ cd ah
322 $ hg unbundle "$TESTDIR/bundles/rebase-revset.hg"
322 $ hg unbundle "$TESTDIR/bundles/rebase-revset.hg"
323 adding changesets
323 adding changesets
324 adding manifests
324 adding manifests
325 adding file changes
325 adding file changes
326 added 9 changesets with 9 changes to 9 files (+2 heads)
326 added 9 changesets with 9 changes to 9 files (+2 heads)
327 (run 'hg heads' to see heads, 'hg merge' to merge)
327 (run 'hg heads' to see heads, 'hg merge' to merge)
328 $ hg tglog
328 $ hg tglog
329 o 8: 'I'
329 o 8: 'I'
330 |
330 |
331 o 7: 'H'
331 o 7: 'H'
332 |
332 |
333 o 6: 'G'
333 o 6: 'G'
334 |
334 |
335 | o 5: 'F'
335 | o 5: 'F'
336 | |
336 | |
337 | o 4: 'E'
337 | o 4: 'E'
338 |/
338 |/
339 o 3: 'D'
339 o 3: 'D'
340 |
340 |
341 o 2: 'C'
341 o 2: 'C'
342 |
342 |
343 | o 1: 'B'
343 | o 1: 'B'
344 |/
344 |/
345 o 0: 'A'
345 o 0: 'A'
346
346
347 $ cd ..
347 $ cd ..
348
348
349
349
350 Simple case with keep:
350 Simple case with keep:
351
351
352 Source on have two descendant heads but ask for one
352 Source on have two descendant heads but ask for one
353
353
354 $ hg clone -q -u . ah ah1
354 $ hg clone -q -u . ah ah1
355 $ cd ah1
355 $ cd ah1
356 $ hg rebase -r '2::8' -d 1
356 $ hg rebase -r '2::8' -d 1
357 abort: can't remove original changesets with unrebased descendants
357 abort: can't remove original changesets with unrebased descendants
358 (use --keep to keep original changesets)
358 (use --keep to keep original changesets)
359 [255]
359 [255]
360 $ hg rebase -r '2::8' -d 1 --keep
360 $ hg rebase -r '2::8' -d 1 --keep
361 $ hg tglog
361 $ hg tglog
362 o 13: 'I'
362 o 13: 'I'
363 |
363 |
364 o 12: 'H'
364 o 12: 'H'
365 |
365 |
366 o 11: 'G'
366 o 11: 'G'
367 |
367 |
368 o 10: 'D'
368 o 10: 'D'
369 |
369 |
370 o 9: 'C'
370 o 9: 'C'
371 |
371 |
372 | o 8: 'I'
372 | o 8: 'I'
373 | |
373 | |
374 | o 7: 'H'
374 | o 7: 'H'
375 | |
375 | |
376 | o 6: 'G'
376 | o 6: 'G'
377 | |
377 | |
378 | | o 5: 'F'
378 | | o 5: 'F'
379 | | |
379 | | |
380 | | o 4: 'E'
380 | | o 4: 'E'
381 | |/
381 | |/
382 | o 3: 'D'
382 | o 3: 'D'
383 | |
383 | |
384 | o 2: 'C'
384 | o 2: 'C'
385 | |
385 | |
386 o | 1: 'B'
386 o | 1: 'B'
387 |/
387 |/
388 o 0: 'A'
388 o 0: 'A'
389
389
390
390
391 $ cd ..
391 $ cd ..
392
392
393 Base on have one descendant heads we ask for but common ancestor have two
393 Base on have one descendant heads we ask for but common ancestor have two
394
394
395 $ hg clone -q -u . ah ah2
395 $ hg clone -q -u . ah ah2
396 $ cd ah2
396 $ cd ah2
397 $ hg rebase -r '3::8' -d 1
397 $ hg rebase -r '3::8' -d 1
398 abort: can't remove original changesets with unrebased descendants
398 abort: can't remove original changesets with unrebased descendants
399 (use --keep to keep original changesets)
399 (use --keep to keep original changesets)
400 [255]
400 [255]
401 $ hg rebase -r '3::8' -d 1 --keep
401 $ hg rebase -r '3::8' -d 1 --keep
402 $ hg tglog
402 $ hg tglog
403 o 12: 'I'
403 o 12: 'I'
404 |
404 |
405 o 11: 'H'
405 o 11: 'H'
406 |
406 |
407 o 10: 'G'
407 o 10: 'G'
408 |
408 |
409 o 9: 'D'
409 o 9: 'D'
410 |
410 |
411 | o 8: 'I'
411 | o 8: 'I'
412 | |
412 | |
413 | o 7: 'H'
413 | o 7: 'H'
414 | |
414 | |
415 | o 6: 'G'
415 | o 6: 'G'
416 | |
416 | |
417 | | o 5: 'F'
417 | | o 5: 'F'
418 | | |
418 | | |
419 | | o 4: 'E'
419 | | o 4: 'E'
420 | |/
420 | |/
421 | o 3: 'D'
421 | o 3: 'D'
422 | |
422 | |
423 | o 2: 'C'
423 | o 2: 'C'
424 | |
424 | |
425 o | 1: 'B'
425 o | 1: 'B'
426 |/
426 |/
427 o 0: 'A'
427 o 0: 'A'
428
428
429
429
430 $ cd ..
430 $ cd ..
431
431
432 rebase subset
432 rebase subset
433
433
434 $ hg clone -q -u . ah ah3
434 $ hg clone -q -u . ah ah3
435 $ cd ah3
435 $ cd ah3
436 $ hg rebase -r '3::7' -d 1
436 $ hg rebase -r '3::7' -d 1
437 abort: can't remove original changesets with unrebased descendants
437 abort: can't remove original changesets with unrebased descendants
438 (use --keep to keep original changesets)
438 (use --keep to keep original changesets)
439 [255]
439 [255]
440 $ hg rebase -r '3::7' -d 1 --keep
440 $ hg rebase -r '3::7' -d 1 --keep
441 $ hg tglog
441 $ hg tglog
442 o 11: 'H'
442 o 11: 'H'
443 |
443 |
444 o 10: 'G'
444 o 10: 'G'
445 |
445 |
446 o 9: 'D'
446 o 9: 'D'
447 |
447 |
448 | o 8: 'I'
448 | o 8: 'I'
449 | |
449 | |
450 | o 7: 'H'
450 | o 7: 'H'
451 | |
451 | |
452 | o 6: 'G'
452 | o 6: 'G'
453 | |
453 | |
454 | | o 5: 'F'
454 | | o 5: 'F'
455 | | |
455 | | |
456 | | o 4: 'E'
456 | | o 4: 'E'
457 | |/
457 | |/
458 | o 3: 'D'
458 | o 3: 'D'
459 | |
459 | |
460 | o 2: 'C'
460 | o 2: 'C'
461 | |
461 | |
462 o | 1: 'B'
462 o | 1: 'B'
463 |/
463 |/
464 o 0: 'A'
464 o 0: 'A'
465
465
466
466
467 $ cd ..
467 $ cd ..
468
468
469 rebase subset with multiple head
469 rebase subset with multiple head
470
470
471 $ hg clone -q -u . ah ah4
471 $ hg clone -q -u . ah ah4
472 $ cd ah4
472 $ cd ah4
473 $ hg rebase -r '3::(7+5)' -d 1
473 $ hg rebase -r '3::(7+5)' -d 1
474 abort: can't remove original changesets with unrebased descendants
474 abort: can't remove original changesets with unrebased descendants
475 (use --keep to keep original changesets)
475 (use --keep to keep original changesets)
476 [255]
476 [255]
477 $ hg rebase -r '3::(7+5)' -d 1 --keep
477 $ hg rebase -r '3::(7+5)' -d 1 --keep
478 $ hg tglog
478 $ hg tglog
479 o 13: 'H'
479 o 13: 'H'
480 |
480 |
481 o 12: 'G'
481 o 12: 'G'
482 |
482 |
483 | o 11: 'F'
483 | o 11: 'F'
484 | |
484 | |
485 | o 10: 'E'
485 | o 10: 'E'
486 |/
486 |/
487 o 9: 'D'
487 o 9: 'D'
488 |
488 |
489 | o 8: 'I'
489 | o 8: 'I'
490 | |
490 | |
491 | o 7: 'H'
491 | o 7: 'H'
492 | |
492 | |
493 | o 6: 'G'
493 | o 6: 'G'
494 | |
494 | |
495 | | o 5: 'F'
495 | | o 5: 'F'
496 | | |
496 | | |
497 | | o 4: 'E'
497 | | o 4: 'E'
498 | |/
498 | |/
499 | o 3: 'D'
499 | o 3: 'D'
500 | |
500 | |
501 | o 2: 'C'
501 | o 2: 'C'
502 | |
502 | |
503 o | 1: 'B'
503 o | 1: 'B'
504 |/
504 |/
505 o 0: 'A'
505 o 0: 'A'
506
506
507
507
508 $ cd ..
508 $ cd ..
509
509
510 More advanced tests
510 More advanced tests
511
511
512 rebase on ancestor with revset
512 rebase on ancestor with revset
513
513
514 $ hg clone -q -u . ah ah5
514 $ hg clone -q -u . ah ah5
515 $ cd ah5
515 $ cd ah5
516 $ hg rebase -r '6::' -d 2
516 $ hg rebase -r '6::' -d 2
517 saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
517 saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
518 $ hg tglog
518 $ hg tglog
519 o 8: 'I'
519 o 8: 'I'
520 |
520 |
521 o 7: 'H'
521 o 7: 'H'
522 |
522 |
523 o 6: 'G'
523 o 6: 'G'
524 |
524 |
525 | o 5: 'F'
525 | o 5: 'F'
526 | |
526 | |
527 | o 4: 'E'
527 | o 4: 'E'
528 | |
528 | |
529 | o 3: 'D'
529 | o 3: 'D'
530 |/
530 |/
531 o 2: 'C'
531 o 2: 'C'
532 |
532 |
533 | o 1: 'B'
533 | o 1: 'B'
534 |/
534 |/
535 o 0: 'A'
535 o 0: 'A'
536
536
537 $ cd ..
537 $ cd ..
538
538
539
539
540 rebase with multiple root.
540 rebase with multiple root.
541 We rebase E and G on B
541 We rebase E and G on B
542 We would expect heads are I, F if it was supported
542 We would expect heads are I, F if it was supported
543
543
544 $ hg clone -q -u . ah ah6
544 $ hg clone -q -u . ah ah6
545 $ cd ah6
545 $ cd ah6
546 $ hg rebase -r '(4+6)::' -d 1
546 $ hg rebase -r '(4+6)::' -d 1
547 saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
547 saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
548 $ hg tglog
548 $ hg tglog
549 o 8: 'I'
549 o 8: 'I'
550 |
550 |
551 o 7: 'H'
551 o 7: 'H'
552 |
552 |
553 o 6: 'G'
553 o 6: 'G'
554 |
554 |
555 | o 5: 'F'
555 | o 5: 'F'
556 | |
556 | |
557 | o 4: 'E'
557 | o 4: 'E'
558 |/
558 |/
559 | o 3: 'D'
559 | o 3: 'D'
560 | |
560 | |
561 | o 2: 'C'
561 | o 2: 'C'
562 | |
562 | |
563 o | 1: 'B'
563 o | 1: 'B'
564 |/
564 |/
565 o 0: 'A'
565 o 0: 'A'
566
566
567 $ cd ..
567 $ cd ..
568
568
569 More complex rebase with multiple roots
569 More complex rebase with multiple roots
570 each root have a different common ancestor with the destination and this is a detach
570 each root have a different common ancestor with the destination and this is a detach
571
571
572 (setup)
572 (setup)
573
573
574 $ hg clone -q -u . a a8
574 $ hg clone -q -u . a a8
575 $ cd a8
575 $ cd a8
576 $ echo I > I
576 $ echo I > I
577 $ hg add I
577 $ hg add I
578 $ hg commit -m I
578 $ hg commit -m I
579 $ hg up 4
579 $ hg up 4
580 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
580 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
581 $ echo I > J
581 $ echo I > J
582 $ hg add J
582 $ hg add J
583 $ hg commit -m J
583 $ hg commit -m J
584 created new head
584 created new head
585 $ echo I > K
585 $ echo I > K
586 $ hg add K
586 $ hg add K
587 $ hg commit -m K
587 $ hg commit -m K
588 $ hg tglog
588 $ hg tglog
589 @ 10: 'K'
589 @ 10: 'K'
590 |
590 |
591 o 9: 'J'
591 o 9: 'J'
592 |
592 |
593 | o 8: 'I'
593 | o 8: 'I'
594 | |
594 | |
595 | o 7: 'H'
595 | o 7: 'H'
596 | |
596 | |
597 +---o 6: 'G'
597 +---o 6: 'G'
598 | |/
598 | |/
599 | o 5: 'F'
599 | o 5: 'F'
600 | |
600 | |
601 o | 4: 'E'
601 o | 4: 'E'
602 |/
602 |/
603 | o 3: 'D'
603 | o 3: 'D'
604 | |
604 | |
605 | o 2: 'C'
605 | o 2: 'C'
606 | |
606 | |
607 | o 1: 'B'
607 | o 1: 'B'
608 |/
608 |/
609 o 0: 'A'
609 o 0: 'A'
610
610
611 (actual test)
611 (actual test)
612
612
613 $ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
613 $ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
614 saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-backup.hg (glob)
614 saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-backup.hg (glob)
615 $ hg log --rev 'children(desc(G))'
615 $ hg log --rev 'children(desc(G))'
616 changeset: 9:adb617877056
616 changeset: 9:adb617877056
617 parent: 6:eea13746799a
617 parent: 6:eea13746799a
618 user: test
618 user: test
619 date: Thu Jan 01 00:00:00 1970 +0000
619 date: Thu Jan 01 00:00:00 1970 +0000
620 summary: I
620 summary: I
621
621
622 changeset: 10:882431a34a0e
622 changeset: 10:882431a34a0e
623 tag: tip
623 tag: tip
624 parent: 6:eea13746799a
624 parent: 6:eea13746799a
625 user: test
625 user: test
626 date: Thu Jan 01 00:00:00 1970 +0000
626 date: Thu Jan 01 00:00:00 1970 +0000
627 summary: K
627 summary: K
628
628
629 $ hg tglog
629 $ hg tglog
630 @ 10: 'K'
630 @ 10: 'K'
631 |
631 |
632 | o 9: 'I'
632 | o 9: 'I'
633 |/
633 |/
634 | o 8: 'J'
634 | o 8: 'J'
635 | |
635 | |
636 | | o 7: 'H'
636 | | o 7: 'H'
637 | | |
637 | | |
638 o---+ 6: 'G'
638 o---+ 6: 'G'
639 |/ /
639 |/ /
640 | o 5: 'F'
640 | o 5: 'F'
641 | |
641 | |
642 o | 4: 'E'
642 o | 4: 'E'
643 |/
643 |/
644 | o 3: 'D'
644 | o 3: 'D'
645 | |
645 | |
646 | o 2: 'C'
646 | o 2: 'C'
647 | |
647 | |
648 | o 1: 'B'
648 | o 1: 'B'
649 |/
649 |/
650 o 0: 'A'
650 o 0: 'A'
651
651
652
653 Test that rebase is not confused by $CWD disapearing during rebase (issue 4121)
654
655 $ cd ..
656 $ hg init cwd-vanish
657 $ cd cwd-vanish
658 $ touch initial-file
659 $ hg add initial-file
660 $ hg commit -m 'initial commit'
661 $ touch dest-file
662 $ hg add dest-file
663 $ hg commit -m 'dest commit'
664 $ hg up 0
665 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
666 $ touch other-file
667 $ hg add other-file
668 $ hg commit -m 'first source commit'
669 created new head
670 $ mkdir subdir
671 $ cd subdir
672 $ touch subfile
673 $ hg add subfile
674 $ hg commit -m 'second source with subdir'
675 $ hg rebase -b . -d 1 --traceback
676 saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-backup.hg (glob)
General Comments 0
You need to be logged in to leave comments. Login now