##// END OF EJS Templates
context: make sure file is not deleted while checking path conflicts...
Pulkit Goyal -
r39168:873f3a56 default
parent child Browse files
Show More
@@ -1,2566 +1,2566 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirfilenodeids,
24 wdirfilenodeids,
25 wdirid,
25 wdirid,
26 )
26 )
27 from . import (
27 from . import (
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 obsolete as obsmod,
33 obsolete as obsmod,
34 patch,
34 patch,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 repoview,
38 repoview,
39 revlog,
39 revlog,
40 scmutil,
40 scmutil,
41 sparse,
41 sparse,
42 subrepo,
42 subrepo,
43 subrepoutil,
43 subrepoutil,
44 util,
44 util,
45 )
45 )
46 from .utils import (
46 from .utils import (
47 dateutil,
47 dateutil,
48 stringutil,
48 stringutil,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 class basectx(object):
53 class basectx(object):
54 """A basectx object represents the common logic for its children:
54 """A basectx object represents the common logic for its children:
55 changectx: read-only context that is already present in the repo,
55 changectx: read-only context that is already present in the repo,
56 workingctx: a context that represents the working directory and can
56 workingctx: a context that represents the working directory and can
57 be committed,
57 be committed,
58 memctx: a context that represents changes in-memory and can also
58 memctx: a context that represents changes in-memory and can also
59 be committed."""
59 be committed."""
60
60
61 def __init__(self, repo):
61 def __init__(self, repo):
62 self._repo = repo
62 self._repo = repo
63
63
64 def __bytes__(self):
64 def __bytes__(self):
65 return short(self.node())
65 return short(self.node())
66
66
67 __str__ = encoding.strmethod(__bytes__)
67 __str__ = encoding.strmethod(__bytes__)
68
68
69 def __repr__(self):
69 def __repr__(self):
70 return r"<%s %s>" % (type(self).__name__, str(self))
70 return r"<%s %s>" % (type(self).__name__, str(self))
71
71
72 def __eq__(self, other):
72 def __eq__(self, other):
73 try:
73 try:
74 return type(self) == type(other) and self._rev == other._rev
74 return type(self) == type(other) and self._rev == other._rev
75 except AttributeError:
75 except AttributeError:
76 return False
76 return False
77
77
78 def __ne__(self, other):
78 def __ne__(self, other):
79 return not (self == other)
79 return not (self == other)
80
80
81 def __contains__(self, key):
81 def __contains__(self, key):
82 return key in self._manifest
82 return key in self._manifest
83
83
84 def __getitem__(self, key):
84 def __getitem__(self, key):
85 return self.filectx(key)
85 return self.filectx(key)
86
86
87 def __iter__(self):
87 def __iter__(self):
88 return iter(self._manifest)
88 return iter(self._manifest)
89
89
90 def _buildstatusmanifest(self, status):
90 def _buildstatusmanifest(self, status):
91 """Builds a manifest that includes the given status results, if this is
91 """Builds a manifest that includes the given status results, if this is
92 a working copy context. For non-working copy contexts, it just returns
92 a working copy context. For non-working copy contexts, it just returns
93 the normal manifest."""
93 the normal manifest."""
94 return self.manifest()
94 return self.manifest()
95
95
96 def _matchstatus(self, other, match):
96 def _matchstatus(self, other, match):
97 """This internal method provides a way for child objects to override the
97 """This internal method provides a way for child objects to override the
98 match operator.
98 match operator.
99 """
99 """
100 return match
100 return match
101
101
102 def _buildstatus(self, other, s, match, listignored, listclean,
102 def _buildstatus(self, other, s, match, listignored, listclean,
103 listunknown):
103 listunknown):
104 """build a status with respect to another context"""
104 """build a status with respect to another context"""
105 # Load earliest manifest first for caching reasons. More specifically,
105 # Load earliest manifest first for caching reasons. More specifically,
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
108 # 1000 and cache it so that when you read 1001, we just need to apply a
108 # 1000 and cache it so that when you read 1001, we just need to apply a
109 # delta to what's in the cache. So that's one full reconstruction + one
109 # delta to what's in the cache. So that's one full reconstruction + one
110 # delta application.
110 # delta application.
111 mf2 = None
111 mf2 = None
112 if self.rev() is not None and self.rev() < other.rev():
112 if self.rev() is not None and self.rev() < other.rev():
113 mf2 = self._buildstatusmanifest(s)
113 mf2 = self._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
115 if mf2 is None:
115 if mf2 is None:
116 mf2 = self._buildstatusmanifest(s)
116 mf2 = self._buildstatusmanifest(s)
117
117
118 modified, added = [], []
118 modified, added = [], []
119 removed = []
119 removed = []
120 clean = []
120 clean = []
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
122 deletedset = set(deleted)
122 deletedset = set(deleted)
123 d = mf1.diff(mf2, match=match, clean=listclean)
123 d = mf1.diff(mf2, match=match, clean=listclean)
124 for fn, value in d.iteritems():
124 for fn, value in d.iteritems():
125 if fn in deletedset:
125 if fn in deletedset:
126 continue
126 continue
127 if value is None:
127 if value is None:
128 clean.append(fn)
128 clean.append(fn)
129 continue
129 continue
130 (node1, flag1), (node2, flag2) = value
130 (node1, flag1), (node2, flag2) = value
131 if node1 is None:
131 if node1 is None:
132 added.append(fn)
132 added.append(fn)
133 elif node2 is None:
133 elif node2 is None:
134 removed.append(fn)
134 removed.append(fn)
135 elif flag1 != flag2:
135 elif flag1 != flag2:
136 modified.append(fn)
136 modified.append(fn)
137 elif node2 not in wdirfilenodeids:
137 elif node2 not in wdirfilenodeids:
138 # When comparing files between two commits, we save time by
138 # When comparing files between two commits, we save time by
139 # not comparing the file contents when the nodeids differ.
139 # not comparing the file contents when the nodeids differ.
140 # Note that this means we incorrectly report a reverted change
140 # Note that this means we incorrectly report a reverted change
141 # to a file as a modification.
141 # to a file as a modification.
142 modified.append(fn)
142 modified.append(fn)
143 elif self[fn].cmp(other[fn]):
143 elif self[fn].cmp(other[fn]):
144 modified.append(fn)
144 modified.append(fn)
145 else:
145 else:
146 clean.append(fn)
146 clean.append(fn)
147
147
148 if removed:
148 if removed:
149 # need to filter files if they are already reported as removed
149 # need to filter files if they are already reported as removed
150 unknown = [fn for fn in unknown if fn not in mf1 and
150 unknown = [fn for fn in unknown if fn not in mf1 and
151 (not match or match(fn))]
151 (not match or match(fn))]
152 ignored = [fn for fn in ignored if fn not in mf1 and
152 ignored = [fn for fn in ignored if fn not in mf1 and
153 (not match or match(fn))]
153 (not match or match(fn))]
154 # if they're deleted, don't report them as removed
154 # if they're deleted, don't report them as removed
155 removed = [fn for fn in removed if fn not in deletedset]
155 removed = [fn for fn in removed if fn not in deletedset]
156
156
157 return scmutil.status(modified, added, removed, deleted, unknown,
157 return scmutil.status(modified, added, removed, deleted, unknown,
158 ignored, clean)
158 ignored, clean)
159
159
160 @propertycache
160 @propertycache
161 def substate(self):
161 def substate(self):
162 return subrepoutil.state(self, self._repo.ui)
162 return subrepoutil.state(self, self._repo.ui)
163
163
164 def subrev(self, subpath):
164 def subrev(self, subpath):
165 return self.substate[subpath][1]
165 return self.substate[subpath][1]
166
166
167 def rev(self):
167 def rev(self):
168 return self._rev
168 return self._rev
169 def node(self):
169 def node(self):
170 return self._node
170 return self._node
171 def hex(self):
171 def hex(self):
172 return hex(self.node())
172 return hex(self.node())
173 def manifest(self):
173 def manifest(self):
174 return self._manifest
174 return self._manifest
175 def manifestctx(self):
175 def manifestctx(self):
176 return self._manifestctx
176 return self._manifestctx
177 def repo(self):
177 def repo(self):
178 return self._repo
178 return self._repo
179 def phasestr(self):
179 def phasestr(self):
180 return phases.phasenames[self.phase()]
180 return phases.phasenames[self.phase()]
181 def mutable(self):
181 def mutable(self):
182 return self.phase() > phases.public
182 return self.phase() > phases.public
183
183
184 def matchfileset(self, expr, badfn=None):
184 def matchfileset(self, expr, badfn=None):
185 return fileset.match(self, expr, badfn=badfn)
185 return fileset.match(self, expr, badfn=badfn)
186
186
187 def obsolete(self):
187 def obsolete(self):
188 """True if the changeset is obsolete"""
188 """True if the changeset is obsolete"""
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
190
190
191 def extinct(self):
191 def extinct(self):
192 """True if the changeset is extinct"""
192 """True if the changeset is extinct"""
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
194
194
195 def orphan(self):
195 def orphan(self):
196 """True if the changeset is not obsolete but it's ancestor are"""
196 """True if the changeset is not obsolete but it's ancestor are"""
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
198
198
199 def phasedivergent(self):
199 def phasedivergent(self):
200 """True if the changeset try to be a successor of a public changeset
200 """True if the changeset try to be a successor of a public changeset
201
201
202 Only non-public and non-obsolete changesets may be bumped.
202 Only non-public and non-obsolete changesets may be bumped.
203 """
203 """
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
205
205
206 def contentdivergent(self):
206 def contentdivergent(self):
207 """Is a successors of a changeset with multiple possible successors set
207 """Is a successors of a changeset with multiple possible successors set
208
208
209 Only non-public and non-obsolete changesets may be divergent.
209 Only non-public and non-obsolete changesets may be divergent.
210 """
210 """
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
212
212
213 def isunstable(self):
213 def isunstable(self):
214 """True if the changeset is either unstable, bumped or divergent"""
214 """True if the changeset is either unstable, bumped or divergent"""
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
216
216
217 def instabilities(self):
217 def instabilities(self):
218 """return the list of instabilities affecting this changeset.
218 """return the list of instabilities affecting this changeset.
219
219
220 Instabilities are returned as strings. possible values are:
220 Instabilities are returned as strings. possible values are:
221 - orphan,
221 - orphan,
222 - phase-divergent,
222 - phase-divergent,
223 - content-divergent.
223 - content-divergent.
224 """
224 """
225 instabilities = []
225 instabilities = []
226 if self.orphan():
226 if self.orphan():
227 instabilities.append('orphan')
227 instabilities.append('orphan')
228 if self.phasedivergent():
228 if self.phasedivergent():
229 instabilities.append('phase-divergent')
229 instabilities.append('phase-divergent')
230 if self.contentdivergent():
230 if self.contentdivergent():
231 instabilities.append('content-divergent')
231 instabilities.append('content-divergent')
232 return instabilities
232 return instabilities
233
233
234 def parents(self):
234 def parents(self):
235 """return contexts for each parent changeset"""
235 """return contexts for each parent changeset"""
236 return self._parents
236 return self._parents
237
237
238 def p1(self):
238 def p1(self):
239 return self._parents[0]
239 return self._parents[0]
240
240
241 def p2(self):
241 def p2(self):
242 parents = self._parents
242 parents = self._parents
243 if len(parents) == 2:
243 if len(parents) == 2:
244 return parents[1]
244 return parents[1]
245 return changectx(self._repo, nullrev)
245 return changectx(self._repo, nullrev)
246
246
247 def _fileinfo(self, path):
247 def _fileinfo(self, path):
248 if r'_manifest' in self.__dict__:
248 if r'_manifest' in self.__dict__:
249 try:
249 try:
250 return self._manifest[path], self._manifest.flags(path)
250 return self._manifest[path], self._manifest.flags(path)
251 except KeyError:
251 except KeyError:
252 raise error.ManifestLookupError(self._node, path,
252 raise error.ManifestLookupError(self._node, path,
253 _('not found in manifest'))
253 _('not found in manifest'))
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
255 if path in self._manifestdelta:
255 if path in self._manifestdelta:
256 return (self._manifestdelta[path],
256 return (self._manifestdelta[path],
257 self._manifestdelta.flags(path))
257 self._manifestdelta.flags(path))
258 mfl = self._repo.manifestlog
258 mfl = self._repo.manifestlog
259 try:
259 try:
260 node, flag = mfl[self._changeset.manifest].find(path)
260 node, flag = mfl[self._changeset.manifest].find(path)
261 except KeyError:
261 except KeyError:
262 raise error.ManifestLookupError(self._node, path,
262 raise error.ManifestLookupError(self._node, path,
263 _('not found in manifest'))
263 _('not found in manifest'))
264
264
265 return node, flag
265 return node, flag
266
266
267 def filenode(self, path):
267 def filenode(self, path):
268 return self._fileinfo(path)[0]
268 return self._fileinfo(path)[0]
269
269
270 def flags(self, path):
270 def flags(self, path):
271 try:
271 try:
272 return self._fileinfo(path)[1]
272 return self._fileinfo(path)[1]
273 except error.LookupError:
273 except error.LookupError:
274 return ''
274 return ''
275
275
276 def sub(self, path, allowcreate=True):
276 def sub(self, path, allowcreate=True):
277 '''return a subrepo for the stored revision of path, never wdir()'''
277 '''return a subrepo for the stored revision of path, never wdir()'''
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
279
279
280 def nullsub(self, path, pctx):
280 def nullsub(self, path, pctx):
281 return subrepo.nullsubrepo(self, path, pctx)
281 return subrepo.nullsubrepo(self, path, pctx)
282
282
283 def workingsub(self, path):
283 def workingsub(self, path):
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
285 context.
285 context.
286 '''
286 '''
287 return subrepo.subrepo(self, path, allowwdir=True)
287 return subrepo.subrepo(self, path, allowwdir=True)
288
288
289 def match(self, pats=None, include=None, exclude=None, default='glob',
289 def match(self, pats=None, include=None, exclude=None, default='glob',
290 listsubrepos=False, badfn=None):
290 listsubrepos=False, badfn=None):
291 r = self._repo
291 r = self._repo
292 return matchmod.match(r.root, r.getcwd(), pats,
292 return matchmod.match(r.root, r.getcwd(), pats,
293 include, exclude, default,
293 include, exclude, default,
294 auditor=r.nofsauditor, ctx=self,
294 auditor=r.nofsauditor, ctx=self,
295 listsubrepos=listsubrepos, badfn=badfn)
295 listsubrepos=listsubrepos, badfn=badfn)
296
296
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
299 hunksfilterfn=None):
299 hunksfilterfn=None):
300 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
301 if ctx2 is None:
301 if ctx2 is None:
302 ctx2 = self.p1()
302 ctx2 = self.p1()
303 if ctx2 is not None:
303 if ctx2 is not None:
304 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
307 relroot=relroot, copy=copy,
307 relroot=relroot, copy=copy,
308 hunksfilterfn=hunksfilterfn)
308 hunksfilterfn=hunksfilterfn)
309
309
310 def dirs(self):
310 def dirs(self):
311 return self._manifest.dirs()
311 return self._manifest.dirs()
312
312
313 def hasdir(self, dir):
313 def hasdir(self, dir):
314 return self._manifest.hasdir(dir)
314 return self._manifest.hasdir(dir)
315
315
316 def status(self, other=None, match=None, listignored=False,
316 def status(self, other=None, match=None, listignored=False,
317 listclean=False, listunknown=False, listsubrepos=False):
317 listclean=False, listunknown=False, listsubrepos=False):
318 """return status of files between two nodes or node and working
318 """return status of files between two nodes or node and working
319 directory.
319 directory.
320
320
321 If other is None, compare this node with working directory.
321 If other is None, compare this node with working directory.
322
322
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 """
324 """
325
325
326 ctx1 = self
326 ctx1 = self
327 ctx2 = self._repo[other]
327 ctx2 = self._repo[other]
328
328
329 # This next code block is, admittedly, fragile logic that tests for
329 # This next code block is, admittedly, fragile logic that tests for
330 # reversing the contexts and wouldn't need to exist if it weren't for
330 # reversing the contexts and wouldn't need to exist if it weren't for
331 # the fast (and common) code path of comparing the working directory
331 # the fast (and common) code path of comparing the working directory
332 # with its first parent.
332 # with its first parent.
333 #
333 #
334 # What we're aiming for here is the ability to call:
334 # What we're aiming for here is the ability to call:
335 #
335 #
336 # workingctx.status(parentctx)
336 # workingctx.status(parentctx)
337 #
337 #
338 # If we always built the manifest for each context and compared those,
338 # If we always built the manifest for each context and compared those,
339 # then we'd be done. But the special case of the above call means we
339 # then we'd be done. But the special case of the above call means we
340 # just copy the manifest of the parent.
340 # just copy the manifest of the parent.
341 reversed = False
341 reversed = False
342 if (not isinstance(ctx1, changectx)
342 if (not isinstance(ctx1, changectx)
343 and isinstance(ctx2, changectx)):
343 and isinstance(ctx2, changectx)):
344 reversed = True
344 reversed = True
345 ctx1, ctx2 = ctx2, ctx1
345 ctx1, ctx2 = ctx2, ctx1
346
346
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
348 match = ctx2._matchstatus(ctx1, match)
348 match = ctx2._matchstatus(ctx1, match)
349 r = scmutil.status([], [], [], [], [], [], [])
349 r = scmutil.status([], [], [], [], [], [], [])
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 listunknown)
351 listunknown)
352
352
353 if reversed:
353 if reversed:
354 # Reverse added and removed. Clear deleted, unknown and ignored as
354 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # these make no sense to reverse.
355 # these make no sense to reverse.
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r.clean)
357 r.clean)
358
358
359 if listsubrepos:
359 if listsubrepos:
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 try:
361 try:
362 rev2 = ctx2.subrev(subpath)
362 rev2 = ctx2.subrev(subpath)
363 except KeyError:
363 except KeyError:
364 # A subrepo that existed in node1 was deleted between
364 # A subrepo that existed in node1 was deleted between
365 # node1 and node2 (inclusive). Thus, ctx2's substate
365 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # won't contain that subpath. The best we can do ignore it.
366 # won't contain that subpath. The best we can do ignore it.
367 rev2 = None
367 rev2 = None
368 submatch = matchmod.subdirmatcher(subpath, match)
368 submatch = matchmod.subdirmatcher(subpath, match)
369 s = sub.status(rev2, match=submatch, ignored=listignored,
369 s = sub.status(rev2, match=submatch, ignored=listignored,
370 clean=listclean, unknown=listunknown,
370 clean=listclean, unknown=listunknown,
371 listsubrepos=True)
371 listsubrepos=True)
372 for rfiles, sfiles in zip(r, s):
372 for rfiles, sfiles in zip(r, s):
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374
374
375 narrowmatch = self._repo.narrowmatch()
375 narrowmatch = self._repo.narrowmatch()
376 if not narrowmatch.always():
376 if not narrowmatch.always():
377 for l in r:
377 for l in r:
378 l[:] = list(filter(narrowmatch, l))
378 l[:] = list(filter(narrowmatch, l))
379 for l in r:
379 for l in r:
380 l.sort()
380 l.sort()
381
381
382 return r
382 return r
383
383
384 class changectx(basectx):
384 class changectx(basectx):
385 """A changecontext object makes access to data related to a particular
385 """A changecontext object makes access to data related to a particular
386 changeset convenient. It represents a read-only context already present in
386 changeset convenient. It represents a read-only context already present in
387 the repo."""
387 the repo."""
388 def __init__(self, repo, changeid='.'):
388 def __init__(self, repo, changeid='.'):
389 """changeid is a revision number, node, or tag"""
389 """changeid is a revision number, node, or tag"""
390 super(changectx, self).__init__(repo)
390 super(changectx, self).__init__(repo)
391
391
392 try:
392 try:
393 if isinstance(changeid, int):
393 if isinstance(changeid, int):
394 self._node = repo.changelog.node(changeid)
394 self._node = repo.changelog.node(changeid)
395 self._rev = changeid
395 self._rev = changeid
396 return
396 return
397 elif changeid == 'null':
397 elif changeid == 'null':
398 self._node = nullid
398 self._node = nullid
399 self._rev = nullrev
399 self._rev = nullrev
400 return
400 return
401 elif changeid == 'tip':
401 elif changeid == 'tip':
402 self._node = repo.changelog.tip()
402 self._node = repo.changelog.tip()
403 self._rev = repo.changelog.rev(self._node)
403 self._rev = repo.changelog.rev(self._node)
404 return
404 return
405 elif (changeid == '.'
405 elif (changeid == '.'
406 or repo.local() and changeid == repo.dirstate.p1()):
406 or repo.local() and changeid == repo.dirstate.p1()):
407 # this is a hack to delay/avoid loading obsmarkers
407 # this is a hack to delay/avoid loading obsmarkers
408 # when we know that '.' won't be hidden
408 # when we know that '.' won't be hidden
409 self._node = repo.dirstate.p1()
409 self._node = repo.dirstate.p1()
410 self._rev = repo.unfiltered().changelog.rev(self._node)
410 self._rev = repo.unfiltered().changelog.rev(self._node)
411 return
411 return
412 elif len(changeid) == 20:
412 elif len(changeid) == 20:
413 try:
413 try:
414 self._node = changeid
414 self._node = changeid
415 self._rev = repo.changelog.rev(changeid)
415 self._rev = repo.changelog.rev(changeid)
416 return
416 return
417 except error.FilteredLookupError:
417 except error.FilteredLookupError:
418 changeid = hex(changeid) # for the error message
418 changeid = hex(changeid) # for the error message
419 raise
419 raise
420 except LookupError:
420 except LookupError:
421 # check if it might have come from damaged dirstate
421 # check if it might have come from damaged dirstate
422 #
422 #
423 # XXX we could avoid the unfiltered if we had a recognizable
423 # XXX we could avoid the unfiltered if we had a recognizable
424 # exception for filtered changeset access
424 # exception for filtered changeset access
425 if (repo.local()
425 if (repo.local()
426 and changeid in repo.unfiltered().dirstate.parents()):
426 and changeid in repo.unfiltered().dirstate.parents()):
427 msg = _("working directory has unknown parent '%s'!")
427 msg = _("working directory has unknown parent '%s'!")
428 raise error.Abort(msg % short(changeid))
428 raise error.Abort(msg % short(changeid))
429 changeid = hex(changeid) # for the error message
429 changeid = hex(changeid) # for the error message
430
430
431 elif len(changeid) == 40:
431 elif len(changeid) == 40:
432 try:
432 try:
433 self._node = bin(changeid)
433 self._node = bin(changeid)
434 self._rev = repo.changelog.rev(self._node)
434 self._rev = repo.changelog.rev(self._node)
435 return
435 return
436 except error.FilteredLookupError:
436 except error.FilteredLookupError:
437 raise
437 raise
438 except (TypeError, LookupError):
438 except (TypeError, LookupError):
439 pass
439 pass
440 else:
440 else:
441 raise error.ProgrammingError(
441 raise error.ProgrammingError(
442 "unsupported changeid '%s' of type %s" %
442 "unsupported changeid '%s' of type %s" %
443 (changeid, type(changeid)))
443 (changeid, type(changeid)))
444
444
445 except (error.FilteredIndexError, error.FilteredLookupError):
445 except (error.FilteredIndexError, error.FilteredLookupError):
446 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
446 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
447 % pycompat.bytestr(changeid))
447 % pycompat.bytestr(changeid))
448 except error.FilteredRepoLookupError:
448 except error.FilteredRepoLookupError:
449 raise
449 raise
450 except IndexError:
450 except IndexError:
451 pass
451 pass
452 raise error.RepoLookupError(
452 raise error.RepoLookupError(
453 _("unknown revision '%s'") % changeid)
453 _("unknown revision '%s'") % changeid)
454
454
455 def __hash__(self):
455 def __hash__(self):
456 try:
456 try:
457 return hash(self._rev)
457 return hash(self._rev)
458 except AttributeError:
458 except AttributeError:
459 return id(self)
459 return id(self)
460
460
461 def __nonzero__(self):
461 def __nonzero__(self):
462 return self._rev != nullrev
462 return self._rev != nullrev
463
463
464 __bool__ = __nonzero__
464 __bool__ = __nonzero__
465
465
466 @propertycache
466 @propertycache
467 def _changeset(self):
467 def _changeset(self):
468 return self._repo.changelog.changelogrevision(self.rev())
468 return self._repo.changelog.changelogrevision(self.rev())
469
469
470 @propertycache
470 @propertycache
471 def _manifest(self):
471 def _manifest(self):
472 return self._manifestctx.read()
472 return self._manifestctx.read()
473
473
474 @property
474 @property
475 def _manifestctx(self):
475 def _manifestctx(self):
476 return self._repo.manifestlog[self._changeset.manifest]
476 return self._repo.manifestlog[self._changeset.manifest]
477
477
478 @propertycache
478 @propertycache
479 def _manifestdelta(self):
479 def _manifestdelta(self):
480 return self._manifestctx.readdelta()
480 return self._manifestctx.readdelta()
481
481
482 @propertycache
482 @propertycache
483 def _parents(self):
483 def _parents(self):
484 repo = self._repo
484 repo = self._repo
485 p1, p2 = repo.changelog.parentrevs(self._rev)
485 p1, p2 = repo.changelog.parentrevs(self._rev)
486 if p2 == nullrev:
486 if p2 == nullrev:
487 return [changectx(repo, p1)]
487 return [changectx(repo, p1)]
488 return [changectx(repo, p1), changectx(repo, p2)]
488 return [changectx(repo, p1), changectx(repo, p2)]
489
489
490 def changeset(self):
490 def changeset(self):
491 c = self._changeset
491 c = self._changeset
492 return (
492 return (
493 c.manifest,
493 c.manifest,
494 c.user,
494 c.user,
495 c.date,
495 c.date,
496 c.files,
496 c.files,
497 c.description,
497 c.description,
498 c.extra,
498 c.extra,
499 )
499 )
500 def manifestnode(self):
500 def manifestnode(self):
501 return self._changeset.manifest
501 return self._changeset.manifest
502
502
503 def user(self):
503 def user(self):
504 return self._changeset.user
504 return self._changeset.user
505 def date(self):
505 def date(self):
506 return self._changeset.date
506 return self._changeset.date
507 def files(self):
507 def files(self):
508 return self._changeset.files
508 return self._changeset.files
509 def description(self):
509 def description(self):
510 return self._changeset.description
510 return self._changeset.description
511 def branch(self):
511 def branch(self):
512 return encoding.tolocal(self._changeset.extra.get("branch"))
512 return encoding.tolocal(self._changeset.extra.get("branch"))
513 def closesbranch(self):
513 def closesbranch(self):
514 return 'close' in self._changeset.extra
514 return 'close' in self._changeset.extra
515 def extra(self):
515 def extra(self):
516 """Return a dict of extra information."""
516 """Return a dict of extra information."""
517 return self._changeset.extra
517 return self._changeset.extra
518 def tags(self):
518 def tags(self):
519 """Return a list of byte tag names"""
519 """Return a list of byte tag names"""
520 return self._repo.nodetags(self._node)
520 return self._repo.nodetags(self._node)
521 def bookmarks(self):
521 def bookmarks(self):
522 """Return a list of byte bookmark names."""
522 """Return a list of byte bookmark names."""
523 return self._repo.nodebookmarks(self._node)
523 return self._repo.nodebookmarks(self._node)
524 def phase(self):
524 def phase(self):
525 return self._repo._phasecache.phase(self._repo, self._rev)
525 return self._repo._phasecache.phase(self._repo, self._rev)
526 def hidden(self):
526 def hidden(self):
527 return self._rev in repoview.filterrevs(self._repo, 'visible')
527 return self._rev in repoview.filterrevs(self._repo, 'visible')
528
528
529 def isinmemory(self):
529 def isinmemory(self):
530 return False
530 return False
531
531
532 def children(self):
532 def children(self):
533 """return list of changectx contexts for each child changeset.
533 """return list of changectx contexts for each child changeset.
534
534
535 This returns only the immediate child changesets. Use descendants() to
535 This returns only the immediate child changesets. Use descendants() to
536 recursively walk children.
536 recursively walk children.
537 """
537 """
538 c = self._repo.changelog.children(self._node)
538 c = self._repo.changelog.children(self._node)
539 return [changectx(self._repo, x) for x in c]
539 return [changectx(self._repo, x) for x in c]
540
540
541 def ancestors(self):
541 def ancestors(self):
542 for a in self._repo.changelog.ancestors([self._rev]):
542 for a in self._repo.changelog.ancestors([self._rev]):
543 yield changectx(self._repo, a)
543 yield changectx(self._repo, a)
544
544
545 def descendants(self):
545 def descendants(self):
546 """Recursively yield all children of the changeset.
546 """Recursively yield all children of the changeset.
547
547
548 For just the immediate children, use children()
548 For just the immediate children, use children()
549 """
549 """
550 for d in self._repo.changelog.descendants([self._rev]):
550 for d in self._repo.changelog.descendants([self._rev]):
551 yield changectx(self._repo, d)
551 yield changectx(self._repo, d)
552
552
553 def filectx(self, path, fileid=None, filelog=None):
553 def filectx(self, path, fileid=None, filelog=None):
554 """get a file context from this changeset"""
554 """get a file context from this changeset"""
555 if fileid is None:
555 if fileid is None:
556 fileid = self.filenode(path)
556 fileid = self.filenode(path)
557 return filectx(self._repo, path, fileid=fileid,
557 return filectx(self._repo, path, fileid=fileid,
558 changectx=self, filelog=filelog)
558 changectx=self, filelog=filelog)
559
559
560 def ancestor(self, c2, warn=False):
560 def ancestor(self, c2, warn=False):
561 """return the "best" ancestor context of self and c2
561 """return the "best" ancestor context of self and c2
562
562
563 If there are multiple candidates, it will show a message and check
563 If there are multiple candidates, it will show a message and check
564 merge.preferancestor configuration before falling back to the
564 merge.preferancestor configuration before falling back to the
565 revlog ancestor."""
565 revlog ancestor."""
566 # deal with workingctxs
566 # deal with workingctxs
567 n2 = c2._node
567 n2 = c2._node
568 if n2 is None:
568 if n2 is None:
569 n2 = c2._parents[0]._node
569 n2 = c2._parents[0]._node
570 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
570 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
571 if not cahs:
571 if not cahs:
572 anc = nullid
572 anc = nullid
573 elif len(cahs) == 1:
573 elif len(cahs) == 1:
574 anc = cahs[0]
574 anc = cahs[0]
575 else:
575 else:
576 # experimental config: merge.preferancestor
576 # experimental config: merge.preferancestor
577 for r in self._repo.ui.configlist('merge', 'preferancestor'):
577 for r in self._repo.ui.configlist('merge', 'preferancestor'):
578 try:
578 try:
579 ctx = scmutil.revsymbol(self._repo, r)
579 ctx = scmutil.revsymbol(self._repo, r)
580 except error.RepoLookupError:
580 except error.RepoLookupError:
581 continue
581 continue
582 anc = ctx.node()
582 anc = ctx.node()
583 if anc in cahs:
583 if anc in cahs:
584 break
584 break
585 else:
585 else:
586 anc = self._repo.changelog.ancestor(self._node, n2)
586 anc = self._repo.changelog.ancestor(self._node, n2)
587 if warn:
587 if warn:
588 self._repo.ui.status(
588 self._repo.ui.status(
589 (_("note: using %s as ancestor of %s and %s\n") %
589 (_("note: using %s as ancestor of %s and %s\n") %
590 (short(anc), short(self._node), short(n2))) +
590 (short(anc), short(self._node), short(n2))) +
591 ''.join(_(" alternatively, use --config "
591 ''.join(_(" alternatively, use --config "
592 "merge.preferancestor=%s\n") %
592 "merge.preferancestor=%s\n") %
593 short(n) for n in sorted(cahs) if n != anc))
593 short(n) for n in sorted(cahs) if n != anc))
594 return changectx(self._repo, anc)
594 return changectx(self._repo, anc)
595
595
596 def isancestorof(self, other):
596 def isancestorof(self, other):
597 """True if this changeset is an ancestor of other"""
597 """True if this changeset is an ancestor of other"""
598 return self._repo.changelog.isancestorrev(self._rev, other._rev)
598 return self._repo.changelog.isancestorrev(self._rev, other._rev)
599
599
600 def walk(self, match):
600 def walk(self, match):
601 '''Generates matching file names.'''
601 '''Generates matching file names.'''
602
602
603 # Wrap match.bad method to have message with nodeid
603 # Wrap match.bad method to have message with nodeid
604 def bad(fn, msg):
604 def bad(fn, msg):
605 # The manifest doesn't know about subrepos, so don't complain about
605 # The manifest doesn't know about subrepos, so don't complain about
606 # paths into valid subrepos.
606 # paths into valid subrepos.
607 if any(fn == s or fn.startswith(s + '/')
607 if any(fn == s or fn.startswith(s + '/')
608 for s in self.substate):
608 for s in self.substate):
609 return
609 return
610 match.bad(fn, _('no such file in rev %s') % self)
610 match.bad(fn, _('no such file in rev %s') % self)
611
611
612 m = matchmod.badmatch(match, bad)
612 m = matchmod.badmatch(match, bad)
613 return self._manifest.walk(m)
613 return self._manifest.walk(m)
614
614
615 def matches(self, match):
615 def matches(self, match):
616 return self.walk(match)
616 return self.walk(match)
617
617
618 class basefilectx(object):
618 class basefilectx(object):
619 """A filecontext object represents the common logic for its children:
619 """A filecontext object represents the common logic for its children:
620 filectx: read-only access to a filerevision that is already present
620 filectx: read-only access to a filerevision that is already present
621 in the repo,
621 in the repo,
622 workingfilectx: a filecontext that represents files from the working
622 workingfilectx: a filecontext that represents files from the working
623 directory,
623 directory,
624 memfilectx: a filecontext that represents files in-memory,
624 memfilectx: a filecontext that represents files in-memory,
625 overlayfilectx: duplicate another filecontext with some fields overridden.
625 overlayfilectx: duplicate another filecontext with some fields overridden.
626 """
626 """
627 @propertycache
627 @propertycache
628 def _filelog(self):
628 def _filelog(self):
629 return self._repo.file(self._path)
629 return self._repo.file(self._path)
630
630
631 @propertycache
631 @propertycache
632 def _changeid(self):
632 def _changeid(self):
633 if r'_changeid' in self.__dict__:
633 if r'_changeid' in self.__dict__:
634 return self._changeid
634 return self._changeid
635 elif r'_changectx' in self.__dict__:
635 elif r'_changectx' in self.__dict__:
636 return self._changectx.rev()
636 return self._changectx.rev()
637 elif r'_descendantrev' in self.__dict__:
637 elif r'_descendantrev' in self.__dict__:
638 # this file context was created from a revision with a known
638 # this file context was created from a revision with a known
639 # descendant, we can (lazily) correct for linkrev aliases
639 # descendant, we can (lazily) correct for linkrev aliases
640 return self._adjustlinkrev(self._descendantrev)
640 return self._adjustlinkrev(self._descendantrev)
641 else:
641 else:
642 return self._filelog.linkrev(self._filerev)
642 return self._filelog.linkrev(self._filerev)
643
643
644 @propertycache
644 @propertycache
645 def _filenode(self):
645 def _filenode(self):
646 if r'_fileid' in self.__dict__:
646 if r'_fileid' in self.__dict__:
647 return self._filelog.lookup(self._fileid)
647 return self._filelog.lookup(self._fileid)
648 else:
648 else:
649 return self._changectx.filenode(self._path)
649 return self._changectx.filenode(self._path)
650
650
651 @propertycache
651 @propertycache
652 def _filerev(self):
652 def _filerev(self):
653 return self._filelog.rev(self._filenode)
653 return self._filelog.rev(self._filenode)
654
654
655 @propertycache
655 @propertycache
656 def _repopath(self):
656 def _repopath(self):
657 return self._path
657 return self._path
658
658
659 def __nonzero__(self):
659 def __nonzero__(self):
660 try:
660 try:
661 self._filenode
661 self._filenode
662 return True
662 return True
663 except error.LookupError:
663 except error.LookupError:
664 # file is missing
664 # file is missing
665 return False
665 return False
666
666
667 __bool__ = __nonzero__
667 __bool__ = __nonzero__
668
668
669 def __bytes__(self):
669 def __bytes__(self):
670 try:
670 try:
671 return "%s@%s" % (self.path(), self._changectx)
671 return "%s@%s" % (self.path(), self._changectx)
672 except error.LookupError:
672 except error.LookupError:
673 return "%s@???" % self.path()
673 return "%s@???" % self.path()
674
674
675 __str__ = encoding.strmethod(__bytes__)
675 __str__ = encoding.strmethod(__bytes__)
676
676
677 def __repr__(self):
677 def __repr__(self):
678 return r"<%s %s>" % (type(self).__name__, str(self))
678 return r"<%s %s>" % (type(self).__name__, str(self))
679
679
680 def __hash__(self):
680 def __hash__(self):
681 try:
681 try:
682 return hash((self._path, self._filenode))
682 return hash((self._path, self._filenode))
683 except AttributeError:
683 except AttributeError:
684 return id(self)
684 return id(self)
685
685
686 def __eq__(self, other):
686 def __eq__(self, other):
687 try:
687 try:
688 return (type(self) == type(other) and self._path == other._path
688 return (type(self) == type(other) and self._path == other._path
689 and self._filenode == other._filenode)
689 and self._filenode == other._filenode)
690 except AttributeError:
690 except AttributeError:
691 return False
691 return False
692
692
693 def __ne__(self, other):
693 def __ne__(self, other):
694 return not (self == other)
694 return not (self == other)
695
695
696 def filerev(self):
696 def filerev(self):
697 return self._filerev
697 return self._filerev
698 def filenode(self):
698 def filenode(self):
699 return self._filenode
699 return self._filenode
700 @propertycache
700 @propertycache
701 def _flags(self):
701 def _flags(self):
702 return self._changectx.flags(self._path)
702 return self._changectx.flags(self._path)
703 def flags(self):
703 def flags(self):
704 return self._flags
704 return self._flags
705 def filelog(self):
705 def filelog(self):
706 return self._filelog
706 return self._filelog
707 def rev(self):
707 def rev(self):
708 return self._changeid
708 return self._changeid
709 def linkrev(self):
709 def linkrev(self):
710 return self._filelog.linkrev(self._filerev)
710 return self._filelog.linkrev(self._filerev)
711 def node(self):
711 def node(self):
712 return self._changectx.node()
712 return self._changectx.node()
713 def hex(self):
713 def hex(self):
714 return self._changectx.hex()
714 return self._changectx.hex()
715 def user(self):
715 def user(self):
716 return self._changectx.user()
716 return self._changectx.user()
717 def date(self):
717 def date(self):
718 return self._changectx.date()
718 return self._changectx.date()
719 def files(self):
719 def files(self):
720 return self._changectx.files()
720 return self._changectx.files()
721 def description(self):
721 def description(self):
722 return self._changectx.description()
722 return self._changectx.description()
723 def branch(self):
723 def branch(self):
724 return self._changectx.branch()
724 return self._changectx.branch()
725 def extra(self):
725 def extra(self):
726 return self._changectx.extra()
726 return self._changectx.extra()
727 def phase(self):
727 def phase(self):
728 return self._changectx.phase()
728 return self._changectx.phase()
729 def phasestr(self):
729 def phasestr(self):
730 return self._changectx.phasestr()
730 return self._changectx.phasestr()
731 def obsolete(self):
731 def obsolete(self):
732 return self._changectx.obsolete()
732 return self._changectx.obsolete()
733 def instabilities(self):
733 def instabilities(self):
734 return self._changectx.instabilities()
734 return self._changectx.instabilities()
735 def manifest(self):
735 def manifest(self):
736 return self._changectx.manifest()
736 return self._changectx.manifest()
737 def changectx(self):
737 def changectx(self):
738 return self._changectx
738 return self._changectx
739 def renamed(self):
739 def renamed(self):
740 return self._copied
740 return self._copied
741 def repo(self):
741 def repo(self):
742 return self._repo
742 return self._repo
743 def size(self):
743 def size(self):
744 return len(self.data())
744 return len(self.data())
745
745
746 def path(self):
746 def path(self):
747 return self._path
747 return self._path
748
748
749 def isbinary(self):
749 def isbinary(self):
750 try:
750 try:
751 return stringutil.binary(self.data())
751 return stringutil.binary(self.data())
752 except IOError:
752 except IOError:
753 return False
753 return False
754 def isexec(self):
754 def isexec(self):
755 return 'x' in self.flags()
755 return 'x' in self.flags()
756 def islink(self):
756 def islink(self):
757 return 'l' in self.flags()
757 return 'l' in self.flags()
758
758
759 def isabsent(self):
759 def isabsent(self):
760 """whether this filectx represents a file not in self._changectx
760 """whether this filectx represents a file not in self._changectx
761
761
762 This is mainly for merge code to detect change/delete conflicts. This is
762 This is mainly for merge code to detect change/delete conflicts. This is
763 expected to be True for all subclasses of basectx."""
763 expected to be True for all subclasses of basectx."""
764 return False
764 return False
765
765
766 _customcmp = False
766 _customcmp = False
767 def cmp(self, fctx):
767 def cmp(self, fctx):
768 """compare with other file context
768 """compare with other file context
769
769
770 returns True if different than fctx.
770 returns True if different than fctx.
771 """
771 """
772 if fctx._customcmp:
772 if fctx._customcmp:
773 return fctx.cmp(self)
773 return fctx.cmp(self)
774
774
775 if (fctx._filenode is None
775 if (fctx._filenode is None
776 and (self._repo._encodefilterpats
776 and (self._repo._encodefilterpats
777 # if file data starts with '\1\n', empty metadata block is
777 # if file data starts with '\1\n', empty metadata block is
778 # prepended, which adds 4 bytes to filelog.size().
778 # prepended, which adds 4 bytes to filelog.size().
779 or self.size() - 4 == fctx.size())
779 or self.size() - 4 == fctx.size())
780 or self.size() == fctx.size()):
780 or self.size() == fctx.size()):
781 return self._filelog.cmp(self._filenode, fctx.data())
781 return self._filelog.cmp(self._filenode, fctx.data())
782
782
783 return True
783 return True
784
784
785 def _adjustlinkrev(self, srcrev, inclusive=False):
785 def _adjustlinkrev(self, srcrev, inclusive=False):
786 """return the first ancestor of <srcrev> introducing <fnode>
786 """return the first ancestor of <srcrev> introducing <fnode>
787
787
788 If the linkrev of the file revision does not point to an ancestor of
788 If the linkrev of the file revision does not point to an ancestor of
789 srcrev, we'll walk down the ancestors until we find one introducing
789 srcrev, we'll walk down the ancestors until we find one introducing
790 this file revision.
790 this file revision.
791
791
792 :srcrev: the changeset revision we search ancestors from
792 :srcrev: the changeset revision we search ancestors from
793 :inclusive: if true, the src revision will also be checked
793 :inclusive: if true, the src revision will also be checked
794 """
794 """
795 repo = self._repo
795 repo = self._repo
796 cl = repo.unfiltered().changelog
796 cl = repo.unfiltered().changelog
797 mfl = repo.manifestlog
797 mfl = repo.manifestlog
798 # fetch the linkrev
798 # fetch the linkrev
799 lkr = self.linkrev()
799 lkr = self.linkrev()
800 # hack to reuse ancestor computation when searching for renames
800 # hack to reuse ancestor computation when searching for renames
801 memberanc = getattr(self, '_ancestrycontext', None)
801 memberanc = getattr(self, '_ancestrycontext', None)
802 iteranc = None
802 iteranc = None
803 if srcrev is None:
803 if srcrev is None:
804 # wctx case, used by workingfilectx during mergecopy
804 # wctx case, used by workingfilectx during mergecopy
805 revs = [p.rev() for p in self._repo[None].parents()]
805 revs = [p.rev() for p in self._repo[None].parents()]
806 inclusive = True # we skipped the real (revless) source
806 inclusive = True # we skipped the real (revless) source
807 else:
807 else:
808 revs = [srcrev]
808 revs = [srcrev]
809 if memberanc is None:
809 if memberanc is None:
810 memberanc = iteranc = cl.ancestors(revs, lkr,
810 memberanc = iteranc = cl.ancestors(revs, lkr,
811 inclusive=inclusive)
811 inclusive=inclusive)
812 # check if this linkrev is an ancestor of srcrev
812 # check if this linkrev is an ancestor of srcrev
813 if lkr not in memberanc:
813 if lkr not in memberanc:
814 if iteranc is None:
814 if iteranc is None:
815 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
815 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
816 fnode = self._filenode
816 fnode = self._filenode
817 path = self._path
817 path = self._path
818 for a in iteranc:
818 for a in iteranc:
819 ac = cl.read(a) # get changeset data (we avoid object creation)
819 ac = cl.read(a) # get changeset data (we avoid object creation)
820 if path in ac[3]: # checking the 'files' field.
820 if path in ac[3]: # checking the 'files' field.
821 # The file has been touched, check if the content is
821 # The file has been touched, check if the content is
822 # similar to the one we search for.
822 # similar to the one we search for.
823 if fnode == mfl[ac[0]].readfast().get(path):
823 if fnode == mfl[ac[0]].readfast().get(path):
824 return a
824 return a
825 # In theory, we should never get out of that loop without a result.
825 # In theory, we should never get out of that loop without a result.
826 # But if manifest uses a buggy file revision (not children of the
826 # But if manifest uses a buggy file revision (not children of the
827 # one it replaces) we could. Such a buggy situation will likely
827 # one it replaces) we could. Such a buggy situation will likely
828 # result is crash somewhere else at to some point.
828 # result is crash somewhere else at to some point.
829 return lkr
829 return lkr
830
830
831 def introrev(self):
831 def introrev(self):
832 """return the rev of the changeset which introduced this file revision
832 """return the rev of the changeset which introduced this file revision
833
833
834 This method is different from linkrev because it take into account the
834 This method is different from linkrev because it take into account the
835 changeset the filectx was created from. It ensures the returned
835 changeset the filectx was created from. It ensures the returned
836 revision is one of its ancestors. This prevents bugs from
836 revision is one of its ancestors. This prevents bugs from
837 'linkrev-shadowing' when a file revision is used by multiple
837 'linkrev-shadowing' when a file revision is used by multiple
838 changesets.
838 changesets.
839 """
839 """
840 lkr = self.linkrev()
840 lkr = self.linkrev()
841 attrs = vars(self)
841 attrs = vars(self)
842 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
842 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
843 if noctx or self.rev() == lkr:
843 if noctx or self.rev() == lkr:
844 return self.linkrev()
844 return self.linkrev()
845 return self._adjustlinkrev(self.rev(), inclusive=True)
845 return self._adjustlinkrev(self.rev(), inclusive=True)
846
846
847 def introfilectx(self):
847 def introfilectx(self):
848 """Return filectx having identical contents, but pointing to the
848 """Return filectx having identical contents, but pointing to the
849 changeset revision where this filectx was introduced"""
849 changeset revision where this filectx was introduced"""
850 introrev = self.introrev()
850 introrev = self.introrev()
851 if self.rev() == introrev:
851 if self.rev() == introrev:
852 return self
852 return self
853 return self.filectx(self.filenode(), changeid=introrev)
853 return self.filectx(self.filenode(), changeid=introrev)
854
854
855 def _parentfilectx(self, path, fileid, filelog):
855 def _parentfilectx(self, path, fileid, filelog):
856 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
856 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
857 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
857 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
858 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
858 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
859 # If self is associated with a changeset (probably explicitly
859 # If self is associated with a changeset (probably explicitly
860 # fed), ensure the created filectx is associated with a
860 # fed), ensure the created filectx is associated with a
861 # changeset that is an ancestor of self.changectx.
861 # changeset that is an ancestor of self.changectx.
862 # This lets us later use _adjustlinkrev to get a correct link.
862 # This lets us later use _adjustlinkrev to get a correct link.
863 fctx._descendantrev = self.rev()
863 fctx._descendantrev = self.rev()
864 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
864 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
865 elif r'_descendantrev' in vars(self):
865 elif r'_descendantrev' in vars(self):
866 # Otherwise propagate _descendantrev if we have one associated.
866 # Otherwise propagate _descendantrev if we have one associated.
867 fctx._descendantrev = self._descendantrev
867 fctx._descendantrev = self._descendantrev
868 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
868 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
869 return fctx
869 return fctx
870
870
871 def parents(self):
871 def parents(self):
872 _path = self._path
872 _path = self._path
873 fl = self._filelog
873 fl = self._filelog
874 parents = self._filelog.parents(self._filenode)
874 parents = self._filelog.parents(self._filenode)
875 pl = [(_path, node, fl) for node in parents if node != nullid]
875 pl = [(_path, node, fl) for node in parents if node != nullid]
876
876
877 r = fl.renamed(self._filenode)
877 r = fl.renamed(self._filenode)
878 if r:
878 if r:
879 # - In the simple rename case, both parent are nullid, pl is empty.
879 # - In the simple rename case, both parent are nullid, pl is empty.
880 # - In case of merge, only one of the parent is null id and should
880 # - In case of merge, only one of the parent is null id and should
881 # be replaced with the rename information. This parent is -always-
881 # be replaced with the rename information. This parent is -always-
882 # the first one.
882 # the first one.
883 #
883 #
884 # As null id have always been filtered out in the previous list
884 # As null id have always been filtered out in the previous list
885 # comprehension, inserting to 0 will always result in "replacing
885 # comprehension, inserting to 0 will always result in "replacing
886 # first nullid parent with rename information.
886 # first nullid parent with rename information.
887 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
887 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
888
888
889 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
889 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
890
890
891 def p1(self):
891 def p1(self):
892 return self.parents()[0]
892 return self.parents()[0]
893
893
894 def p2(self):
894 def p2(self):
895 p = self.parents()
895 p = self.parents()
896 if len(p) == 2:
896 if len(p) == 2:
897 return p[1]
897 return p[1]
898 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
898 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
899
899
900 def annotate(self, follow=False, skiprevs=None, diffopts=None):
900 def annotate(self, follow=False, skiprevs=None, diffopts=None):
901 """Returns a list of annotateline objects for each line in the file
901 """Returns a list of annotateline objects for each line in the file
902
902
903 - line.fctx is the filectx of the node where that line was last changed
903 - line.fctx is the filectx of the node where that line was last changed
904 - line.lineno is the line number at the first appearance in the managed
904 - line.lineno is the line number at the first appearance in the managed
905 file
905 file
906 - line.text is the data on that line (including newline character)
906 - line.text is the data on that line (including newline character)
907 """
907 """
908 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
908 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
909
909
910 def parents(f):
910 def parents(f):
911 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
911 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
912 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
912 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
913 # from the topmost introrev (= srcrev) down to p.linkrev() if it
913 # from the topmost introrev (= srcrev) down to p.linkrev() if it
914 # isn't an ancestor of the srcrev.
914 # isn't an ancestor of the srcrev.
915 f._changeid
915 f._changeid
916 pl = f.parents()
916 pl = f.parents()
917
917
918 # Don't return renamed parents if we aren't following.
918 # Don't return renamed parents if we aren't following.
919 if not follow:
919 if not follow:
920 pl = [p for p in pl if p.path() == f.path()]
920 pl = [p for p in pl if p.path() == f.path()]
921
921
922 # renamed filectx won't have a filelog yet, so set it
922 # renamed filectx won't have a filelog yet, so set it
923 # from the cache to save time
923 # from the cache to save time
924 for p in pl:
924 for p in pl:
925 if not r'_filelog' in p.__dict__:
925 if not r'_filelog' in p.__dict__:
926 p._filelog = getlog(p.path())
926 p._filelog = getlog(p.path())
927
927
928 return pl
928 return pl
929
929
930 # use linkrev to find the first changeset where self appeared
930 # use linkrev to find the first changeset where self appeared
931 base = self.introfilectx()
931 base = self.introfilectx()
932 if getattr(base, '_ancestrycontext', None) is None:
932 if getattr(base, '_ancestrycontext', None) is None:
933 cl = self._repo.changelog
933 cl = self._repo.changelog
934 if base.rev() is None:
934 if base.rev() is None:
935 # wctx is not inclusive, but works because _ancestrycontext
935 # wctx is not inclusive, but works because _ancestrycontext
936 # is used to test filelog revisions
936 # is used to test filelog revisions
937 ac = cl.ancestors([p.rev() for p in base.parents()],
937 ac = cl.ancestors([p.rev() for p in base.parents()],
938 inclusive=True)
938 inclusive=True)
939 else:
939 else:
940 ac = cl.ancestors([base.rev()], inclusive=True)
940 ac = cl.ancestors([base.rev()], inclusive=True)
941 base._ancestrycontext = ac
941 base._ancestrycontext = ac
942
942
943 return dagop.annotate(base, parents, skiprevs=skiprevs,
943 return dagop.annotate(base, parents, skiprevs=skiprevs,
944 diffopts=diffopts)
944 diffopts=diffopts)
945
945
946 def ancestors(self, followfirst=False):
946 def ancestors(self, followfirst=False):
947 visit = {}
947 visit = {}
948 c = self
948 c = self
949 if followfirst:
949 if followfirst:
950 cut = 1
950 cut = 1
951 else:
951 else:
952 cut = None
952 cut = None
953
953
954 while True:
954 while True:
955 for parent in c.parents()[:cut]:
955 for parent in c.parents()[:cut]:
956 visit[(parent.linkrev(), parent.filenode())] = parent
956 visit[(parent.linkrev(), parent.filenode())] = parent
957 if not visit:
957 if not visit:
958 break
958 break
959 c = visit.pop(max(visit))
959 c = visit.pop(max(visit))
960 yield c
960 yield c
961
961
962 def decodeddata(self):
962 def decodeddata(self):
963 """Returns `data()` after running repository decoding filters.
963 """Returns `data()` after running repository decoding filters.
964
964
965 This is often equivalent to how the data would be expressed on disk.
965 This is often equivalent to how the data would be expressed on disk.
966 """
966 """
967 return self._repo.wwritedata(self.path(), self.data())
967 return self._repo.wwritedata(self.path(), self.data())
968
968
969 class filectx(basefilectx):
969 class filectx(basefilectx):
970 """A filecontext object makes access to data related to a particular
970 """A filecontext object makes access to data related to a particular
971 filerevision convenient."""
971 filerevision convenient."""
972 def __init__(self, repo, path, changeid=None, fileid=None,
972 def __init__(self, repo, path, changeid=None, fileid=None,
973 filelog=None, changectx=None):
973 filelog=None, changectx=None):
974 """changeid can be a changeset revision, node, or tag.
974 """changeid can be a changeset revision, node, or tag.
975 fileid can be a file revision or node."""
975 fileid can be a file revision or node."""
976 self._repo = repo
976 self._repo = repo
977 self._path = path
977 self._path = path
978
978
979 assert (changeid is not None
979 assert (changeid is not None
980 or fileid is not None
980 or fileid is not None
981 or changectx is not None), \
981 or changectx is not None), \
982 ("bad args: changeid=%r, fileid=%r, changectx=%r"
982 ("bad args: changeid=%r, fileid=%r, changectx=%r"
983 % (changeid, fileid, changectx))
983 % (changeid, fileid, changectx))
984
984
985 if filelog is not None:
985 if filelog is not None:
986 self._filelog = filelog
986 self._filelog = filelog
987
987
988 if changeid is not None:
988 if changeid is not None:
989 self._changeid = changeid
989 self._changeid = changeid
990 if changectx is not None:
990 if changectx is not None:
991 self._changectx = changectx
991 self._changectx = changectx
992 if fileid is not None:
992 if fileid is not None:
993 self._fileid = fileid
993 self._fileid = fileid
994
994
995 @propertycache
995 @propertycache
996 def _changectx(self):
996 def _changectx(self):
997 try:
997 try:
998 return changectx(self._repo, self._changeid)
998 return changectx(self._repo, self._changeid)
999 except error.FilteredRepoLookupError:
999 except error.FilteredRepoLookupError:
1000 # Linkrev may point to any revision in the repository. When the
1000 # Linkrev may point to any revision in the repository. When the
1001 # repository is filtered this may lead to `filectx` trying to build
1001 # repository is filtered this may lead to `filectx` trying to build
1002 # `changectx` for filtered revision. In such case we fallback to
1002 # `changectx` for filtered revision. In such case we fallback to
1003 # creating `changectx` on the unfiltered version of the reposition.
1003 # creating `changectx` on the unfiltered version of the reposition.
1004 # This fallback should not be an issue because `changectx` from
1004 # This fallback should not be an issue because `changectx` from
1005 # `filectx` are not used in complex operations that care about
1005 # `filectx` are not used in complex operations that care about
1006 # filtering.
1006 # filtering.
1007 #
1007 #
1008 # This fallback is a cheap and dirty fix that prevent several
1008 # This fallback is a cheap and dirty fix that prevent several
1009 # crashes. It does not ensure the behavior is correct. However the
1009 # crashes. It does not ensure the behavior is correct. However the
1010 # behavior was not correct before filtering either and "incorrect
1010 # behavior was not correct before filtering either and "incorrect
1011 # behavior" is seen as better as "crash"
1011 # behavior" is seen as better as "crash"
1012 #
1012 #
1013 # Linkrevs have several serious troubles with filtering that are
1013 # Linkrevs have several serious troubles with filtering that are
1014 # complicated to solve. Proper handling of the issue here should be
1014 # complicated to solve. Proper handling of the issue here should be
1015 # considered when solving linkrev issue are on the table.
1015 # considered when solving linkrev issue are on the table.
1016 return changectx(self._repo.unfiltered(), self._changeid)
1016 return changectx(self._repo.unfiltered(), self._changeid)
1017
1017
1018 def filectx(self, fileid, changeid=None):
1018 def filectx(self, fileid, changeid=None):
1019 '''opens an arbitrary revision of the file without
1019 '''opens an arbitrary revision of the file without
1020 opening a new filelog'''
1020 opening a new filelog'''
1021 return filectx(self._repo, self._path, fileid=fileid,
1021 return filectx(self._repo, self._path, fileid=fileid,
1022 filelog=self._filelog, changeid=changeid)
1022 filelog=self._filelog, changeid=changeid)
1023
1023
1024 def rawdata(self):
1024 def rawdata(self):
1025 return self._filelog.revision(self._filenode, raw=True)
1025 return self._filelog.revision(self._filenode, raw=True)
1026
1026
1027 def rawflags(self):
1027 def rawflags(self):
1028 """low-level revlog flags"""
1028 """low-level revlog flags"""
1029 return self._filelog.flags(self._filerev)
1029 return self._filelog.flags(self._filerev)
1030
1030
1031 def data(self):
1031 def data(self):
1032 try:
1032 try:
1033 return self._filelog.read(self._filenode)
1033 return self._filelog.read(self._filenode)
1034 except error.CensoredNodeError:
1034 except error.CensoredNodeError:
1035 if self._repo.ui.config("censor", "policy") == "ignore":
1035 if self._repo.ui.config("censor", "policy") == "ignore":
1036 return ""
1036 return ""
1037 raise error.Abort(_("censored node: %s") % short(self._filenode),
1037 raise error.Abort(_("censored node: %s") % short(self._filenode),
1038 hint=_("set censor.policy to ignore errors"))
1038 hint=_("set censor.policy to ignore errors"))
1039
1039
1040 def size(self):
1040 def size(self):
1041 return self._filelog.size(self._filerev)
1041 return self._filelog.size(self._filerev)
1042
1042
1043 @propertycache
1043 @propertycache
1044 def _copied(self):
1044 def _copied(self):
1045 """check if file was actually renamed in this changeset revision
1045 """check if file was actually renamed in this changeset revision
1046
1046
1047 If rename logged in file revision, we report copy for changeset only
1047 If rename logged in file revision, we report copy for changeset only
1048 if file revisions linkrev points back to the changeset in question
1048 if file revisions linkrev points back to the changeset in question
1049 or both changeset parents contain different file revisions.
1049 or both changeset parents contain different file revisions.
1050 """
1050 """
1051
1051
1052 renamed = self._filelog.renamed(self._filenode)
1052 renamed = self._filelog.renamed(self._filenode)
1053 if not renamed:
1053 if not renamed:
1054 return renamed
1054 return renamed
1055
1055
1056 if self.rev() == self.linkrev():
1056 if self.rev() == self.linkrev():
1057 return renamed
1057 return renamed
1058
1058
1059 name = self.path()
1059 name = self.path()
1060 fnode = self._filenode
1060 fnode = self._filenode
1061 for p in self._changectx.parents():
1061 for p in self._changectx.parents():
1062 try:
1062 try:
1063 if fnode == p.filenode(name):
1063 if fnode == p.filenode(name):
1064 return None
1064 return None
1065 except error.LookupError:
1065 except error.LookupError:
1066 pass
1066 pass
1067 return renamed
1067 return renamed
1068
1068
1069 def children(self):
1069 def children(self):
1070 # hard for renames
1070 # hard for renames
1071 c = self._filelog.children(self._filenode)
1071 c = self._filelog.children(self._filenode)
1072 return [filectx(self._repo, self._path, fileid=x,
1072 return [filectx(self._repo, self._path, fileid=x,
1073 filelog=self._filelog) for x in c]
1073 filelog=self._filelog) for x in c]
1074
1074
1075 class committablectx(basectx):
1075 class committablectx(basectx):
1076 """A committablectx object provides common functionality for a context that
1076 """A committablectx object provides common functionality for a context that
1077 wants the ability to commit, e.g. workingctx or memctx."""
1077 wants the ability to commit, e.g. workingctx or memctx."""
1078 def __init__(self, repo, text="", user=None, date=None, extra=None,
1078 def __init__(self, repo, text="", user=None, date=None, extra=None,
1079 changes=None):
1079 changes=None):
1080 super(committablectx, self).__init__(repo)
1080 super(committablectx, self).__init__(repo)
1081 self._rev = None
1081 self._rev = None
1082 self._node = None
1082 self._node = None
1083 self._text = text
1083 self._text = text
1084 if date:
1084 if date:
1085 self._date = dateutil.parsedate(date)
1085 self._date = dateutil.parsedate(date)
1086 if user:
1086 if user:
1087 self._user = user
1087 self._user = user
1088 if changes:
1088 if changes:
1089 self._status = changes
1089 self._status = changes
1090
1090
1091 self._extra = {}
1091 self._extra = {}
1092 if extra:
1092 if extra:
1093 self._extra = extra.copy()
1093 self._extra = extra.copy()
1094 if 'branch' not in self._extra:
1094 if 'branch' not in self._extra:
1095 try:
1095 try:
1096 branch = encoding.fromlocal(self._repo.dirstate.branch())
1096 branch = encoding.fromlocal(self._repo.dirstate.branch())
1097 except UnicodeDecodeError:
1097 except UnicodeDecodeError:
1098 raise error.Abort(_('branch name not in UTF-8!'))
1098 raise error.Abort(_('branch name not in UTF-8!'))
1099 self._extra['branch'] = branch
1099 self._extra['branch'] = branch
1100 if self._extra['branch'] == '':
1100 if self._extra['branch'] == '':
1101 self._extra['branch'] = 'default'
1101 self._extra['branch'] = 'default'
1102
1102
1103 def __bytes__(self):
1103 def __bytes__(self):
1104 return bytes(self._parents[0]) + "+"
1104 return bytes(self._parents[0]) + "+"
1105
1105
1106 __str__ = encoding.strmethod(__bytes__)
1106 __str__ = encoding.strmethod(__bytes__)
1107
1107
1108 def __nonzero__(self):
1108 def __nonzero__(self):
1109 return True
1109 return True
1110
1110
1111 __bool__ = __nonzero__
1111 __bool__ = __nonzero__
1112
1112
1113 def _buildflagfunc(self):
1113 def _buildflagfunc(self):
1114 # Create a fallback function for getting file flags when the
1114 # Create a fallback function for getting file flags when the
1115 # filesystem doesn't support them
1115 # filesystem doesn't support them
1116
1116
1117 copiesget = self._repo.dirstate.copies().get
1117 copiesget = self._repo.dirstate.copies().get
1118 parents = self.parents()
1118 parents = self.parents()
1119 if len(parents) < 2:
1119 if len(parents) < 2:
1120 # when we have one parent, it's easy: copy from parent
1120 # when we have one parent, it's easy: copy from parent
1121 man = parents[0].manifest()
1121 man = parents[0].manifest()
1122 def func(f):
1122 def func(f):
1123 f = copiesget(f, f)
1123 f = copiesget(f, f)
1124 return man.flags(f)
1124 return man.flags(f)
1125 else:
1125 else:
1126 # merges are tricky: we try to reconstruct the unstored
1126 # merges are tricky: we try to reconstruct the unstored
1127 # result from the merge (issue1802)
1127 # result from the merge (issue1802)
1128 p1, p2 = parents
1128 p1, p2 = parents
1129 pa = p1.ancestor(p2)
1129 pa = p1.ancestor(p2)
1130 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1130 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1131
1131
1132 def func(f):
1132 def func(f):
1133 f = copiesget(f, f) # may be wrong for merges with copies
1133 f = copiesget(f, f) # may be wrong for merges with copies
1134 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1134 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1135 if fl1 == fl2:
1135 if fl1 == fl2:
1136 return fl1
1136 return fl1
1137 if fl1 == fla:
1137 if fl1 == fla:
1138 return fl2
1138 return fl2
1139 if fl2 == fla:
1139 if fl2 == fla:
1140 return fl1
1140 return fl1
1141 return '' # punt for conflicts
1141 return '' # punt for conflicts
1142
1142
1143 return func
1143 return func
1144
1144
1145 @propertycache
1145 @propertycache
1146 def _flagfunc(self):
1146 def _flagfunc(self):
1147 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1147 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1148
1148
1149 @propertycache
1149 @propertycache
1150 def _status(self):
1150 def _status(self):
1151 return self._repo.status()
1151 return self._repo.status()
1152
1152
1153 @propertycache
1153 @propertycache
1154 def _user(self):
1154 def _user(self):
1155 return self._repo.ui.username()
1155 return self._repo.ui.username()
1156
1156
1157 @propertycache
1157 @propertycache
1158 def _date(self):
1158 def _date(self):
1159 ui = self._repo.ui
1159 ui = self._repo.ui
1160 date = ui.configdate('devel', 'default-date')
1160 date = ui.configdate('devel', 'default-date')
1161 if date is None:
1161 if date is None:
1162 date = dateutil.makedate()
1162 date = dateutil.makedate()
1163 return date
1163 return date
1164
1164
1165 def subrev(self, subpath):
1165 def subrev(self, subpath):
1166 return None
1166 return None
1167
1167
1168 def manifestnode(self):
1168 def manifestnode(self):
1169 return None
1169 return None
1170 def user(self):
1170 def user(self):
1171 return self._user or self._repo.ui.username()
1171 return self._user or self._repo.ui.username()
1172 def date(self):
1172 def date(self):
1173 return self._date
1173 return self._date
1174 def description(self):
1174 def description(self):
1175 return self._text
1175 return self._text
1176 def files(self):
1176 def files(self):
1177 return sorted(self._status.modified + self._status.added +
1177 return sorted(self._status.modified + self._status.added +
1178 self._status.removed)
1178 self._status.removed)
1179
1179
1180 def modified(self):
1180 def modified(self):
1181 return self._status.modified
1181 return self._status.modified
1182 def added(self):
1182 def added(self):
1183 return self._status.added
1183 return self._status.added
1184 def removed(self):
1184 def removed(self):
1185 return self._status.removed
1185 return self._status.removed
1186 def deleted(self):
1186 def deleted(self):
1187 return self._status.deleted
1187 return self._status.deleted
1188 def branch(self):
1188 def branch(self):
1189 return encoding.tolocal(self._extra['branch'])
1189 return encoding.tolocal(self._extra['branch'])
1190 def closesbranch(self):
1190 def closesbranch(self):
1191 return 'close' in self._extra
1191 return 'close' in self._extra
1192 def extra(self):
1192 def extra(self):
1193 return self._extra
1193 return self._extra
1194
1194
1195 def isinmemory(self):
1195 def isinmemory(self):
1196 return False
1196 return False
1197
1197
1198 def tags(self):
1198 def tags(self):
1199 return []
1199 return []
1200
1200
1201 def bookmarks(self):
1201 def bookmarks(self):
1202 b = []
1202 b = []
1203 for p in self.parents():
1203 for p in self.parents():
1204 b.extend(p.bookmarks())
1204 b.extend(p.bookmarks())
1205 return b
1205 return b
1206
1206
1207 def phase(self):
1207 def phase(self):
1208 phase = phases.draft # default phase to draft
1208 phase = phases.draft # default phase to draft
1209 for p in self.parents():
1209 for p in self.parents():
1210 phase = max(phase, p.phase())
1210 phase = max(phase, p.phase())
1211 return phase
1211 return phase
1212
1212
1213 def hidden(self):
1213 def hidden(self):
1214 return False
1214 return False
1215
1215
1216 def children(self):
1216 def children(self):
1217 return []
1217 return []
1218
1218
1219 def flags(self, path):
1219 def flags(self, path):
1220 if r'_manifest' in self.__dict__:
1220 if r'_manifest' in self.__dict__:
1221 try:
1221 try:
1222 return self._manifest.flags(path)
1222 return self._manifest.flags(path)
1223 except KeyError:
1223 except KeyError:
1224 return ''
1224 return ''
1225
1225
1226 try:
1226 try:
1227 return self._flagfunc(path)
1227 return self._flagfunc(path)
1228 except OSError:
1228 except OSError:
1229 return ''
1229 return ''
1230
1230
1231 def ancestor(self, c2):
1231 def ancestor(self, c2):
1232 """return the "best" ancestor context of self and c2"""
1232 """return the "best" ancestor context of self and c2"""
1233 return self._parents[0].ancestor(c2) # punt on two parents for now
1233 return self._parents[0].ancestor(c2) # punt on two parents for now
1234
1234
1235 def walk(self, match):
1235 def walk(self, match):
1236 '''Generates matching file names.'''
1236 '''Generates matching file names.'''
1237 return sorted(self._repo.dirstate.walk(match,
1237 return sorted(self._repo.dirstate.walk(match,
1238 subrepos=sorted(self.substate),
1238 subrepos=sorted(self.substate),
1239 unknown=True, ignored=False))
1239 unknown=True, ignored=False))
1240
1240
1241 def matches(self, match):
1241 def matches(self, match):
1242 ds = self._repo.dirstate
1242 ds = self._repo.dirstate
1243 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1243 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1244
1244
1245 def ancestors(self):
1245 def ancestors(self):
1246 for p in self._parents:
1246 for p in self._parents:
1247 yield p
1247 yield p
1248 for a in self._repo.changelog.ancestors(
1248 for a in self._repo.changelog.ancestors(
1249 [p.rev() for p in self._parents]):
1249 [p.rev() for p in self._parents]):
1250 yield changectx(self._repo, a)
1250 yield changectx(self._repo, a)
1251
1251
1252 def markcommitted(self, node):
1252 def markcommitted(self, node):
1253 """Perform post-commit cleanup necessary after committing this ctx
1253 """Perform post-commit cleanup necessary after committing this ctx
1254
1254
1255 Specifically, this updates backing stores this working context
1255 Specifically, this updates backing stores this working context
1256 wraps to reflect the fact that the changes reflected by this
1256 wraps to reflect the fact that the changes reflected by this
1257 workingctx have been committed. For example, it marks
1257 workingctx have been committed. For example, it marks
1258 modified and added files as normal in the dirstate.
1258 modified and added files as normal in the dirstate.
1259
1259
1260 """
1260 """
1261
1261
1262 with self._repo.dirstate.parentchange():
1262 with self._repo.dirstate.parentchange():
1263 for f in self.modified() + self.added():
1263 for f in self.modified() + self.added():
1264 self._repo.dirstate.normal(f)
1264 self._repo.dirstate.normal(f)
1265 for f in self.removed():
1265 for f in self.removed():
1266 self._repo.dirstate.drop(f)
1266 self._repo.dirstate.drop(f)
1267 self._repo.dirstate.setparents(node)
1267 self._repo.dirstate.setparents(node)
1268
1268
1269 # write changes out explicitly, because nesting wlock at
1269 # write changes out explicitly, because nesting wlock at
1270 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1270 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1271 # from immediately doing so for subsequent changing files
1271 # from immediately doing so for subsequent changing files
1272 self._repo.dirstate.write(self._repo.currenttransaction())
1272 self._repo.dirstate.write(self._repo.currenttransaction())
1273
1273
1274 def dirty(self, missing=False, merge=True, branch=True):
1274 def dirty(self, missing=False, merge=True, branch=True):
1275 return False
1275 return False
1276
1276
1277 class workingctx(committablectx):
1277 class workingctx(committablectx):
1278 """A workingctx object makes access to data related to
1278 """A workingctx object makes access to data related to
1279 the current working directory convenient.
1279 the current working directory convenient.
1280 date - any valid date string or (unixtime, offset), or None.
1280 date - any valid date string or (unixtime, offset), or None.
1281 user - username string, or None.
1281 user - username string, or None.
1282 extra - a dictionary of extra values, or None.
1282 extra - a dictionary of extra values, or None.
1283 changes - a list of file lists as returned by localrepo.status()
1283 changes - a list of file lists as returned by localrepo.status()
1284 or None to use the repository status.
1284 or None to use the repository status.
1285 """
1285 """
1286 def __init__(self, repo, text="", user=None, date=None, extra=None,
1286 def __init__(self, repo, text="", user=None, date=None, extra=None,
1287 changes=None):
1287 changes=None):
1288 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1288 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1289
1289
1290 def __iter__(self):
1290 def __iter__(self):
1291 d = self._repo.dirstate
1291 d = self._repo.dirstate
1292 for f in d:
1292 for f in d:
1293 if d[f] != 'r':
1293 if d[f] != 'r':
1294 yield f
1294 yield f
1295
1295
1296 def __contains__(self, key):
1296 def __contains__(self, key):
1297 return self._repo.dirstate[key] not in "?r"
1297 return self._repo.dirstate[key] not in "?r"
1298
1298
1299 def hex(self):
1299 def hex(self):
1300 return hex(wdirid)
1300 return hex(wdirid)
1301
1301
1302 @propertycache
1302 @propertycache
1303 def _parents(self):
1303 def _parents(self):
1304 p = self._repo.dirstate.parents()
1304 p = self._repo.dirstate.parents()
1305 if p[1] == nullid:
1305 if p[1] == nullid:
1306 p = p[:-1]
1306 p = p[:-1]
1307 return [changectx(self._repo, x) for x in p]
1307 return [changectx(self._repo, x) for x in p]
1308
1308
1309 def _fileinfo(self, path):
1309 def _fileinfo(self, path):
1310 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1310 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1311 self._manifest
1311 self._manifest
1312 return super(workingctx, self)._fileinfo(path)
1312 return super(workingctx, self)._fileinfo(path)
1313
1313
1314 def filectx(self, path, filelog=None):
1314 def filectx(self, path, filelog=None):
1315 """get a file context from the working directory"""
1315 """get a file context from the working directory"""
1316 return workingfilectx(self._repo, path, workingctx=self,
1316 return workingfilectx(self._repo, path, workingctx=self,
1317 filelog=filelog)
1317 filelog=filelog)
1318
1318
1319 def dirty(self, missing=False, merge=True, branch=True):
1319 def dirty(self, missing=False, merge=True, branch=True):
1320 "check whether a working directory is modified"
1320 "check whether a working directory is modified"
1321 # check subrepos first
1321 # check subrepos first
1322 for s in sorted(self.substate):
1322 for s in sorted(self.substate):
1323 if self.sub(s).dirty(missing=missing):
1323 if self.sub(s).dirty(missing=missing):
1324 return True
1324 return True
1325 # check current working dir
1325 # check current working dir
1326 return ((merge and self.p2()) or
1326 return ((merge and self.p2()) or
1327 (branch and self.branch() != self.p1().branch()) or
1327 (branch and self.branch() != self.p1().branch()) or
1328 self.modified() or self.added() or self.removed() or
1328 self.modified() or self.added() or self.removed() or
1329 (missing and self.deleted()))
1329 (missing and self.deleted()))
1330
1330
1331 def add(self, list, prefix=""):
1331 def add(self, list, prefix=""):
1332 with self._repo.wlock():
1332 with self._repo.wlock():
1333 ui, ds = self._repo.ui, self._repo.dirstate
1333 ui, ds = self._repo.ui, self._repo.dirstate
1334 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1334 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1335 rejected = []
1335 rejected = []
1336 lstat = self._repo.wvfs.lstat
1336 lstat = self._repo.wvfs.lstat
1337 for f in list:
1337 for f in list:
1338 # ds.pathto() returns an absolute file when this is invoked from
1338 # ds.pathto() returns an absolute file when this is invoked from
1339 # the keyword extension. That gets flagged as non-portable on
1339 # the keyword extension. That gets flagged as non-portable on
1340 # Windows, since it contains the drive letter and colon.
1340 # Windows, since it contains the drive letter and colon.
1341 scmutil.checkportable(ui, os.path.join(prefix, f))
1341 scmutil.checkportable(ui, os.path.join(prefix, f))
1342 try:
1342 try:
1343 st = lstat(f)
1343 st = lstat(f)
1344 except OSError:
1344 except OSError:
1345 ui.warn(_("%s does not exist!\n") % uipath(f))
1345 ui.warn(_("%s does not exist!\n") % uipath(f))
1346 rejected.append(f)
1346 rejected.append(f)
1347 continue
1347 continue
1348 limit = ui.configbytes('ui', 'large-file-limit')
1348 limit = ui.configbytes('ui', 'large-file-limit')
1349 if limit != 0 and st.st_size > limit:
1349 if limit != 0 and st.st_size > limit:
1350 ui.warn(_("%s: up to %d MB of RAM may be required "
1350 ui.warn(_("%s: up to %d MB of RAM may be required "
1351 "to manage this file\n"
1351 "to manage this file\n"
1352 "(use 'hg revert %s' to cancel the "
1352 "(use 'hg revert %s' to cancel the "
1353 "pending addition)\n")
1353 "pending addition)\n")
1354 % (f, 3 * st.st_size // 1000000, uipath(f)))
1354 % (f, 3 * st.st_size // 1000000, uipath(f)))
1355 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1355 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1356 ui.warn(_("%s not added: only files and symlinks "
1356 ui.warn(_("%s not added: only files and symlinks "
1357 "supported currently\n") % uipath(f))
1357 "supported currently\n") % uipath(f))
1358 rejected.append(f)
1358 rejected.append(f)
1359 elif ds[f] in 'amn':
1359 elif ds[f] in 'amn':
1360 ui.warn(_("%s already tracked!\n") % uipath(f))
1360 ui.warn(_("%s already tracked!\n") % uipath(f))
1361 elif ds[f] == 'r':
1361 elif ds[f] == 'r':
1362 ds.normallookup(f)
1362 ds.normallookup(f)
1363 else:
1363 else:
1364 ds.add(f)
1364 ds.add(f)
1365 return rejected
1365 return rejected
1366
1366
1367 def forget(self, files, prefix=""):
1367 def forget(self, files, prefix=""):
1368 with self._repo.wlock():
1368 with self._repo.wlock():
1369 ds = self._repo.dirstate
1369 ds = self._repo.dirstate
1370 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1370 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1371 rejected = []
1371 rejected = []
1372 for f in files:
1372 for f in files:
1373 if f not in self._repo.dirstate:
1373 if f not in self._repo.dirstate:
1374 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1374 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1375 rejected.append(f)
1375 rejected.append(f)
1376 elif self._repo.dirstate[f] != 'a':
1376 elif self._repo.dirstate[f] != 'a':
1377 self._repo.dirstate.remove(f)
1377 self._repo.dirstate.remove(f)
1378 else:
1378 else:
1379 self._repo.dirstate.drop(f)
1379 self._repo.dirstate.drop(f)
1380 return rejected
1380 return rejected
1381
1381
1382 def undelete(self, list):
1382 def undelete(self, list):
1383 pctxs = self.parents()
1383 pctxs = self.parents()
1384 with self._repo.wlock():
1384 with self._repo.wlock():
1385 ds = self._repo.dirstate
1385 ds = self._repo.dirstate
1386 for f in list:
1386 for f in list:
1387 if self._repo.dirstate[f] != 'r':
1387 if self._repo.dirstate[f] != 'r':
1388 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1388 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1389 else:
1389 else:
1390 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1390 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1391 t = fctx.data()
1391 t = fctx.data()
1392 self._repo.wwrite(f, t, fctx.flags())
1392 self._repo.wwrite(f, t, fctx.flags())
1393 self._repo.dirstate.normal(f)
1393 self._repo.dirstate.normal(f)
1394
1394
1395 def copy(self, source, dest):
1395 def copy(self, source, dest):
1396 try:
1396 try:
1397 st = self._repo.wvfs.lstat(dest)
1397 st = self._repo.wvfs.lstat(dest)
1398 except OSError as err:
1398 except OSError as err:
1399 if err.errno != errno.ENOENT:
1399 if err.errno != errno.ENOENT:
1400 raise
1400 raise
1401 self._repo.ui.warn(_("%s does not exist!\n")
1401 self._repo.ui.warn(_("%s does not exist!\n")
1402 % self._repo.dirstate.pathto(dest))
1402 % self._repo.dirstate.pathto(dest))
1403 return
1403 return
1404 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1404 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1405 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1405 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1406 "symbolic link\n")
1406 "symbolic link\n")
1407 % self._repo.dirstate.pathto(dest))
1407 % self._repo.dirstate.pathto(dest))
1408 else:
1408 else:
1409 with self._repo.wlock():
1409 with self._repo.wlock():
1410 if self._repo.dirstate[dest] in '?':
1410 if self._repo.dirstate[dest] in '?':
1411 self._repo.dirstate.add(dest)
1411 self._repo.dirstate.add(dest)
1412 elif self._repo.dirstate[dest] in 'r':
1412 elif self._repo.dirstate[dest] in 'r':
1413 self._repo.dirstate.normallookup(dest)
1413 self._repo.dirstate.normallookup(dest)
1414 self._repo.dirstate.copy(source, dest)
1414 self._repo.dirstate.copy(source, dest)
1415
1415
1416 def match(self, pats=None, include=None, exclude=None, default='glob',
1416 def match(self, pats=None, include=None, exclude=None, default='glob',
1417 listsubrepos=False, badfn=None):
1417 listsubrepos=False, badfn=None):
1418 r = self._repo
1418 r = self._repo
1419
1419
1420 # Only a case insensitive filesystem needs magic to translate user input
1420 # Only a case insensitive filesystem needs magic to translate user input
1421 # to actual case in the filesystem.
1421 # to actual case in the filesystem.
1422 icasefs = not util.fscasesensitive(r.root)
1422 icasefs = not util.fscasesensitive(r.root)
1423 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1423 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1424 default, auditor=r.auditor, ctx=self,
1424 default, auditor=r.auditor, ctx=self,
1425 listsubrepos=listsubrepos, badfn=badfn,
1425 listsubrepos=listsubrepos, badfn=badfn,
1426 icasefs=icasefs)
1426 icasefs=icasefs)
1427
1427
1428 def _filtersuspectsymlink(self, files):
1428 def _filtersuspectsymlink(self, files):
1429 if not files or self._repo.dirstate._checklink:
1429 if not files or self._repo.dirstate._checklink:
1430 return files
1430 return files
1431
1431
1432 # Symlink placeholders may get non-symlink-like contents
1432 # Symlink placeholders may get non-symlink-like contents
1433 # via user error or dereferencing by NFS or Samba servers,
1433 # via user error or dereferencing by NFS or Samba servers,
1434 # so we filter out any placeholders that don't look like a
1434 # so we filter out any placeholders that don't look like a
1435 # symlink
1435 # symlink
1436 sane = []
1436 sane = []
1437 for f in files:
1437 for f in files:
1438 if self.flags(f) == 'l':
1438 if self.flags(f) == 'l':
1439 d = self[f].data()
1439 d = self[f].data()
1440 if (d == '' or len(d) >= 1024 or '\n' in d
1440 if (d == '' or len(d) >= 1024 or '\n' in d
1441 or stringutil.binary(d)):
1441 or stringutil.binary(d)):
1442 self._repo.ui.debug('ignoring suspect symlink placeholder'
1442 self._repo.ui.debug('ignoring suspect symlink placeholder'
1443 ' "%s"\n' % f)
1443 ' "%s"\n' % f)
1444 continue
1444 continue
1445 sane.append(f)
1445 sane.append(f)
1446 return sane
1446 return sane
1447
1447
1448 def _checklookup(self, files):
1448 def _checklookup(self, files):
1449 # check for any possibly clean files
1449 # check for any possibly clean files
1450 if not files:
1450 if not files:
1451 return [], [], []
1451 return [], [], []
1452
1452
1453 modified = []
1453 modified = []
1454 deleted = []
1454 deleted = []
1455 fixup = []
1455 fixup = []
1456 pctx = self._parents[0]
1456 pctx = self._parents[0]
1457 # do a full compare of any files that might have changed
1457 # do a full compare of any files that might have changed
1458 for f in sorted(files):
1458 for f in sorted(files):
1459 try:
1459 try:
1460 # This will return True for a file that got replaced by a
1460 # This will return True for a file that got replaced by a
1461 # directory in the interim, but fixing that is pretty hard.
1461 # directory in the interim, but fixing that is pretty hard.
1462 if (f not in pctx or self.flags(f) != pctx.flags(f)
1462 if (f not in pctx or self.flags(f) != pctx.flags(f)
1463 or pctx[f].cmp(self[f])):
1463 or pctx[f].cmp(self[f])):
1464 modified.append(f)
1464 modified.append(f)
1465 else:
1465 else:
1466 fixup.append(f)
1466 fixup.append(f)
1467 except (IOError, OSError):
1467 except (IOError, OSError):
1468 # A file become inaccessible in between? Mark it as deleted,
1468 # A file become inaccessible in between? Mark it as deleted,
1469 # matching dirstate behavior (issue5584).
1469 # matching dirstate behavior (issue5584).
1470 # The dirstate has more complex behavior around whether a
1470 # The dirstate has more complex behavior around whether a
1471 # missing file matches a directory, etc, but we don't need to
1471 # missing file matches a directory, etc, but we don't need to
1472 # bother with that: if f has made it to this point, we're sure
1472 # bother with that: if f has made it to this point, we're sure
1473 # it's in the dirstate.
1473 # it's in the dirstate.
1474 deleted.append(f)
1474 deleted.append(f)
1475
1475
1476 return modified, deleted, fixup
1476 return modified, deleted, fixup
1477
1477
1478 def _poststatusfixup(self, status, fixup):
1478 def _poststatusfixup(self, status, fixup):
1479 """update dirstate for files that are actually clean"""
1479 """update dirstate for files that are actually clean"""
1480 poststatus = self._repo.postdsstatus()
1480 poststatus = self._repo.postdsstatus()
1481 if fixup or poststatus:
1481 if fixup or poststatus:
1482 try:
1482 try:
1483 oldid = self._repo.dirstate.identity()
1483 oldid = self._repo.dirstate.identity()
1484
1484
1485 # updating the dirstate is optional
1485 # updating the dirstate is optional
1486 # so we don't wait on the lock
1486 # so we don't wait on the lock
1487 # wlock can invalidate the dirstate, so cache normal _after_
1487 # wlock can invalidate the dirstate, so cache normal _after_
1488 # taking the lock
1488 # taking the lock
1489 with self._repo.wlock(False):
1489 with self._repo.wlock(False):
1490 if self._repo.dirstate.identity() == oldid:
1490 if self._repo.dirstate.identity() == oldid:
1491 if fixup:
1491 if fixup:
1492 normal = self._repo.dirstate.normal
1492 normal = self._repo.dirstate.normal
1493 for f in fixup:
1493 for f in fixup:
1494 normal(f)
1494 normal(f)
1495 # write changes out explicitly, because nesting
1495 # write changes out explicitly, because nesting
1496 # wlock at runtime may prevent 'wlock.release()'
1496 # wlock at runtime may prevent 'wlock.release()'
1497 # after this block from doing so for subsequent
1497 # after this block from doing so for subsequent
1498 # changing files
1498 # changing files
1499 tr = self._repo.currenttransaction()
1499 tr = self._repo.currenttransaction()
1500 self._repo.dirstate.write(tr)
1500 self._repo.dirstate.write(tr)
1501
1501
1502 if poststatus:
1502 if poststatus:
1503 for ps in poststatus:
1503 for ps in poststatus:
1504 ps(self, status)
1504 ps(self, status)
1505 else:
1505 else:
1506 # in this case, writing changes out breaks
1506 # in this case, writing changes out breaks
1507 # consistency, because .hg/dirstate was
1507 # consistency, because .hg/dirstate was
1508 # already changed simultaneously after last
1508 # already changed simultaneously after last
1509 # caching (see also issue5584 for detail)
1509 # caching (see also issue5584 for detail)
1510 self._repo.ui.debug('skip updating dirstate: '
1510 self._repo.ui.debug('skip updating dirstate: '
1511 'identity mismatch\n')
1511 'identity mismatch\n')
1512 except error.LockError:
1512 except error.LockError:
1513 pass
1513 pass
1514 finally:
1514 finally:
1515 # Even if the wlock couldn't be grabbed, clear out the list.
1515 # Even if the wlock couldn't be grabbed, clear out the list.
1516 self._repo.clearpostdsstatus()
1516 self._repo.clearpostdsstatus()
1517
1517
1518 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1518 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1519 '''Gets the status from the dirstate -- internal use only.'''
1519 '''Gets the status from the dirstate -- internal use only.'''
1520 subrepos = []
1520 subrepos = []
1521 if '.hgsub' in self:
1521 if '.hgsub' in self:
1522 subrepos = sorted(self.substate)
1522 subrepos = sorted(self.substate)
1523 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1523 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1524 clean=clean, unknown=unknown)
1524 clean=clean, unknown=unknown)
1525
1525
1526 # check for any possibly clean files
1526 # check for any possibly clean files
1527 fixup = []
1527 fixup = []
1528 if cmp:
1528 if cmp:
1529 modified2, deleted2, fixup = self._checklookup(cmp)
1529 modified2, deleted2, fixup = self._checklookup(cmp)
1530 s.modified.extend(modified2)
1530 s.modified.extend(modified2)
1531 s.deleted.extend(deleted2)
1531 s.deleted.extend(deleted2)
1532
1532
1533 if fixup and clean:
1533 if fixup and clean:
1534 s.clean.extend(fixup)
1534 s.clean.extend(fixup)
1535
1535
1536 self._poststatusfixup(s, fixup)
1536 self._poststatusfixup(s, fixup)
1537
1537
1538 if match.always():
1538 if match.always():
1539 # cache for performance
1539 # cache for performance
1540 if s.unknown or s.ignored or s.clean:
1540 if s.unknown or s.ignored or s.clean:
1541 # "_status" is cached with list*=False in the normal route
1541 # "_status" is cached with list*=False in the normal route
1542 self._status = scmutil.status(s.modified, s.added, s.removed,
1542 self._status = scmutil.status(s.modified, s.added, s.removed,
1543 s.deleted, [], [], [])
1543 s.deleted, [], [], [])
1544 else:
1544 else:
1545 self._status = s
1545 self._status = s
1546
1546
1547 return s
1547 return s
1548
1548
1549 @propertycache
1549 @propertycache
1550 def _manifest(self):
1550 def _manifest(self):
1551 """generate a manifest corresponding to the values in self._status
1551 """generate a manifest corresponding to the values in self._status
1552
1552
1553 This reuse the file nodeid from parent, but we use special node
1553 This reuse the file nodeid from parent, but we use special node
1554 identifiers for added and modified files. This is used by manifests
1554 identifiers for added and modified files. This is used by manifests
1555 merge to see that files are different and by update logic to avoid
1555 merge to see that files are different and by update logic to avoid
1556 deleting newly added files.
1556 deleting newly added files.
1557 """
1557 """
1558 return self._buildstatusmanifest(self._status)
1558 return self._buildstatusmanifest(self._status)
1559
1559
1560 def _buildstatusmanifest(self, status):
1560 def _buildstatusmanifest(self, status):
1561 """Builds a manifest that includes the given status results."""
1561 """Builds a manifest that includes the given status results."""
1562 parents = self.parents()
1562 parents = self.parents()
1563
1563
1564 man = parents[0].manifest().copy()
1564 man = parents[0].manifest().copy()
1565
1565
1566 ff = self._flagfunc
1566 ff = self._flagfunc
1567 for i, l in ((addednodeid, status.added),
1567 for i, l in ((addednodeid, status.added),
1568 (modifiednodeid, status.modified)):
1568 (modifiednodeid, status.modified)):
1569 for f in l:
1569 for f in l:
1570 man[f] = i
1570 man[f] = i
1571 try:
1571 try:
1572 man.setflag(f, ff(f))
1572 man.setflag(f, ff(f))
1573 except OSError:
1573 except OSError:
1574 pass
1574 pass
1575
1575
1576 for f in status.deleted + status.removed:
1576 for f in status.deleted + status.removed:
1577 if f in man:
1577 if f in man:
1578 del man[f]
1578 del man[f]
1579
1579
1580 return man
1580 return man
1581
1581
1582 def _buildstatus(self, other, s, match, listignored, listclean,
1582 def _buildstatus(self, other, s, match, listignored, listclean,
1583 listunknown):
1583 listunknown):
1584 """build a status with respect to another context
1584 """build a status with respect to another context
1585
1585
1586 This includes logic for maintaining the fast path of status when
1586 This includes logic for maintaining the fast path of status when
1587 comparing the working directory against its parent, which is to skip
1587 comparing the working directory against its parent, which is to skip
1588 building a new manifest if self (working directory) is not comparing
1588 building a new manifest if self (working directory) is not comparing
1589 against its parent (repo['.']).
1589 against its parent (repo['.']).
1590 """
1590 """
1591 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1591 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1592 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1592 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1593 # might have accidentally ended up with the entire contents of the file
1593 # might have accidentally ended up with the entire contents of the file
1594 # they are supposed to be linking to.
1594 # they are supposed to be linking to.
1595 s.modified[:] = self._filtersuspectsymlink(s.modified)
1595 s.modified[:] = self._filtersuspectsymlink(s.modified)
1596 if other != self._repo['.']:
1596 if other != self._repo['.']:
1597 s = super(workingctx, self)._buildstatus(other, s, match,
1597 s = super(workingctx, self)._buildstatus(other, s, match,
1598 listignored, listclean,
1598 listignored, listclean,
1599 listunknown)
1599 listunknown)
1600 return s
1600 return s
1601
1601
1602 def _matchstatus(self, other, match):
1602 def _matchstatus(self, other, match):
1603 """override the match method with a filter for directory patterns
1603 """override the match method with a filter for directory patterns
1604
1604
1605 We use inheritance to customize the match.bad method only in cases of
1605 We use inheritance to customize the match.bad method only in cases of
1606 workingctx since it belongs only to the working directory when
1606 workingctx since it belongs only to the working directory when
1607 comparing against the parent changeset.
1607 comparing against the parent changeset.
1608
1608
1609 If we aren't comparing against the working directory's parent, then we
1609 If we aren't comparing against the working directory's parent, then we
1610 just use the default match object sent to us.
1610 just use the default match object sent to us.
1611 """
1611 """
1612 if other != self._repo['.']:
1612 if other != self._repo['.']:
1613 def bad(f, msg):
1613 def bad(f, msg):
1614 # 'f' may be a directory pattern from 'match.files()',
1614 # 'f' may be a directory pattern from 'match.files()',
1615 # so 'f not in ctx1' is not enough
1615 # so 'f not in ctx1' is not enough
1616 if f not in other and not other.hasdir(f):
1616 if f not in other and not other.hasdir(f):
1617 self._repo.ui.warn('%s: %s\n' %
1617 self._repo.ui.warn('%s: %s\n' %
1618 (self._repo.dirstate.pathto(f), msg))
1618 (self._repo.dirstate.pathto(f), msg))
1619 match.bad = bad
1619 match.bad = bad
1620 return match
1620 return match
1621
1621
1622 def markcommitted(self, node):
1622 def markcommitted(self, node):
1623 super(workingctx, self).markcommitted(node)
1623 super(workingctx, self).markcommitted(node)
1624
1624
1625 sparse.aftercommit(self._repo, node)
1625 sparse.aftercommit(self._repo, node)
1626
1626
1627 class committablefilectx(basefilectx):
1627 class committablefilectx(basefilectx):
1628 """A committablefilectx provides common functionality for a file context
1628 """A committablefilectx provides common functionality for a file context
1629 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1629 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1630 def __init__(self, repo, path, filelog=None, ctx=None):
1630 def __init__(self, repo, path, filelog=None, ctx=None):
1631 self._repo = repo
1631 self._repo = repo
1632 self._path = path
1632 self._path = path
1633 self._changeid = None
1633 self._changeid = None
1634 self._filerev = self._filenode = None
1634 self._filerev = self._filenode = None
1635
1635
1636 if filelog is not None:
1636 if filelog is not None:
1637 self._filelog = filelog
1637 self._filelog = filelog
1638 if ctx:
1638 if ctx:
1639 self._changectx = ctx
1639 self._changectx = ctx
1640
1640
1641 def __nonzero__(self):
1641 def __nonzero__(self):
1642 return True
1642 return True
1643
1643
1644 __bool__ = __nonzero__
1644 __bool__ = __nonzero__
1645
1645
1646 def linkrev(self):
1646 def linkrev(self):
1647 # linked to self._changectx no matter if file is modified or not
1647 # linked to self._changectx no matter if file is modified or not
1648 return self.rev()
1648 return self.rev()
1649
1649
1650 def parents(self):
1650 def parents(self):
1651 '''return parent filectxs, following copies if necessary'''
1651 '''return parent filectxs, following copies if necessary'''
1652 def filenode(ctx, path):
1652 def filenode(ctx, path):
1653 return ctx._manifest.get(path, nullid)
1653 return ctx._manifest.get(path, nullid)
1654
1654
1655 path = self._path
1655 path = self._path
1656 fl = self._filelog
1656 fl = self._filelog
1657 pcl = self._changectx._parents
1657 pcl = self._changectx._parents
1658 renamed = self.renamed()
1658 renamed = self.renamed()
1659
1659
1660 if renamed:
1660 if renamed:
1661 pl = [renamed + (None,)]
1661 pl = [renamed + (None,)]
1662 else:
1662 else:
1663 pl = [(path, filenode(pcl[0], path), fl)]
1663 pl = [(path, filenode(pcl[0], path), fl)]
1664
1664
1665 for pc in pcl[1:]:
1665 for pc in pcl[1:]:
1666 pl.append((path, filenode(pc, path), fl))
1666 pl.append((path, filenode(pc, path), fl))
1667
1667
1668 return [self._parentfilectx(p, fileid=n, filelog=l)
1668 return [self._parentfilectx(p, fileid=n, filelog=l)
1669 for p, n, l in pl if n != nullid]
1669 for p, n, l in pl if n != nullid]
1670
1670
1671 def children(self):
1671 def children(self):
1672 return []
1672 return []
1673
1673
1674 class workingfilectx(committablefilectx):
1674 class workingfilectx(committablefilectx):
1675 """A workingfilectx object makes access to data related to a particular
1675 """A workingfilectx object makes access to data related to a particular
1676 file in the working directory convenient."""
1676 file in the working directory convenient."""
1677 def __init__(self, repo, path, filelog=None, workingctx=None):
1677 def __init__(self, repo, path, filelog=None, workingctx=None):
1678 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1678 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1679
1679
1680 @propertycache
1680 @propertycache
1681 def _changectx(self):
1681 def _changectx(self):
1682 return workingctx(self._repo)
1682 return workingctx(self._repo)
1683
1683
1684 def data(self):
1684 def data(self):
1685 return self._repo.wread(self._path)
1685 return self._repo.wread(self._path)
1686 def renamed(self):
1686 def renamed(self):
1687 rp = self._repo.dirstate.copied(self._path)
1687 rp = self._repo.dirstate.copied(self._path)
1688 if not rp:
1688 if not rp:
1689 return None
1689 return None
1690 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1690 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1691
1691
1692 def size(self):
1692 def size(self):
1693 return self._repo.wvfs.lstat(self._path).st_size
1693 return self._repo.wvfs.lstat(self._path).st_size
1694 def date(self):
1694 def date(self):
1695 t, tz = self._changectx.date()
1695 t, tz = self._changectx.date()
1696 try:
1696 try:
1697 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1697 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1698 except OSError as err:
1698 except OSError as err:
1699 if err.errno != errno.ENOENT:
1699 if err.errno != errno.ENOENT:
1700 raise
1700 raise
1701 return (t, tz)
1701 return (t, tz)
1702
1702
1703 def exists(self):
1703 def exists(self):
1704 return self._repo.wvfs.exists(self._path)
1704 return self._repo.wvfs.exists(self._path)
1705
1705
1706 def lexists(self):
1706 def lexists(self):
1707 return self._repo.wvfs.lexists(self._path)
1707 return self._repo.wvfs.lexists(self._path)
1708
1708
1709 def audit(self):
1709 def audit(self):
1710 return self._repo.wvfs.audit(self._path)
1710 return self._repo.wvfs.audit(self._path)
1711
1711
1712 def cmp(self, fctx):
1712 def cmp(self, fctx):
1713 """compare with other file context
1713 """compare with other file context
1714
1714
1715 returns True if different than fctx.
1715 returns True if different than fctx.
1716 """
1716 """
1717 # fctx should be a filectx (not a workingfilectx)
1717 # fctx should be a filectx (not a workingfilectx)
1718 # invert comparison to reuse the same code path
1718 # invert comparison to reuse the same code path
1719 return fctx.cmp(self)
1719 return fctx.cmp(self)
1720
1720
1721 def remove(self, ignoremissing=False):
1721 def remove(self, ignoremissing=False):
1722 """wraps unlink for a repo's working directory"""
1722 """wraps unlink for a repo's working directory"""
1723 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1723 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1724 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1724 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1725 rmdir=rmdir)
1725 rmdir=rmdir)
1726
1726
1727 def write(self, data, flags, backgroundclose=False, **kwargs):
1727 def write(self, data, flags, backgroundclose=False, **kwargs):
1728 """wraps repo.wwrite"""
1728 """wraps repo.wwrite"""
1729 self._repo.wwrite(self._path, data, flags,
1729 self._repo.wwrite(self._path, data, flags,
1730 backgroundclose=backgroundclose,
1730 backgroundclose=backgroundclose,
1731 **kwargs)
1731 **kwargs)
1732
1732
1733 def markcopied(self, src):
1733 def markcopied(self, src):
1734 """marks this file a copy of `src`"""
1734 """marks this file a copy of `src`"""
1735 if self._repo.dirstate[self._path] in "nma":
1735 if self._repo.dirstate[self._path] in "nma":
1736 self._repo.dirstate.copy(src, self._path)
1736 self._repo.dirstate.copy(src, self._path)
1737
1737
1738 def clearunknown(self):
1738 def clearunknown(self):
1739 """Removes conflicting items in the working directory so that
1739 """Removes conflicting items in the working directory so that
1740 ``write()`` can be called successfully.
1740 ``write()`` can be called successfully.
1741 """
1741 """
1742 wvfs = self._repo.wvfs
1742 wvfs = self._repo.wvfs
1743 f = self._path
1743 f = self._path
1744 wvfs.audit(f)
1744 wvfs.audit(f)
1745 if wvfs.isdir(f) and not wvfs.islink(f):
1745 if wvfs.isdir(f) and not wvfs.islink(f):
1746 wvfs.rmtree(f, forcibly=True)
1746 wvfs.rmtree(f, forcibly=True)
1747 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1747 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1748 for p in reversed(list(util.finddirs(f))):
1748 for p in reversed(list(util.finddirs(f))):
1749 if wvfs.isfileorlink(p):
1749 if wvfs.isfileorlink(p):
1750 wvfs.unlink(p)
1750 wvfs.unlink(p)
1751 break
1751 break
1752
1752
1753 def setflags(self, l, x):
1753 def setflags(self, l, x):
1754 self._repo.wvfs.setflags(self._path, l, x)
1754 self._repo.wvfs.setflags(self._path, l, x)
1755
1755
1756 class overlayworkingctx(committablectx):
1756 class overlayworkingctx(committablectx):
1757 """Wraps another mutable context with a write-back cache that can be
1757 """Wraps another mutable context with a write-back cache that can be
1758 converted into a commit context.
1758 converted into a commit context.
1759
1759
1760 self._cache[path] maps to a dict with keys: {
1760 self._cache[path] maps to a dict with keys: {
1761 'exists': bool?
1761 'exists': bool?
1762 'date': date?
1762 'date': date?
1763 'data': str?
1763 'data': str?
1764 'flags': str?
1764 'flags': str?
1765 'copied': str? (path or None)
1765 'copied': str? (path or None)
1766 }
1766 }
1767 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1767 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1768 is `False`, the file was deleted.
1768 is `False`, the file was deleted.
1769 """
1769 """
1770
1770
1771 def __init__(self, repo):
1771 def __init__(self, repo):
1772 super(overlayworkingctx, self).__init__(repo)
1772 super(overlayworkingctx, self).__init__(repo)
1773 self.clean()
1773 self.clean()
1774
1774
1775 def setbase(self, wrappedctx):
1775 def setbase(self, wrappedctx):
1776 self._wrappedctx = wrappedctx
1776 self._wrappedctx = wrappedctx
1777 self._parents = [wrappedctx]
1777 self._parents = [wrappedctx]
1778 # Drop old manifest cache as it is now out of date.
1778 # Drop old manifest cache as it is now out of date.
1779 # This is necessary when, e.g., rebasing several nodes with one
1779 # This is necessary when, e.g., rebasing several nodes with one
1780 # ``overlayworkingctx`` (e.g. with --collapse).
1780 # ``overlayworkingctx`` (e.g. with --collapse).
1781 util.clearcachedproperty(self, '_manifest')
1781 util.clearcachedproperty(self, '_manifest')
1782
1782
1783 def data(self, path):
1783 def data(self, path):
1784 if self.isdirty(path):
1784 if self.isdirty(path):
1785 if self._cache[path]['exists']:
1785 if self._cache[path]['exists']:
1786 if self._cache[path]['data']:
1786 if self._cache[path]['data']:
1787 return self._cache[path]['data']
1787 return self._cache[path]['data']
1788 else:
1788 else:
1789 # Must fallback here, too, because we only set flags.
1789 # Must fallback here, too, because we only set flags.
1790 return self._wrappedctx[path].data()
1790 return self._wrappedctx[path].data()
1791 else:
1791 else:
1792 raise error.ProgrammingError("No such file or directory: %s" %
1792 raise error.ProgrammingError("No such file or directory: %s" %
1793 path)
1793 path)
1794 else:
1794 else:
1795 return self._wrappedctx[path].data()
1795 return self._wrappedctx[path].data()
1796
1796
1797 @propertycache
1797 @propertycache
1798 def _manifest(self):
1798 def _manifest(self):
1799 parents = self.parents()
1799 parents = self.parents()
1800 man = parents[0].manifest().copy()
1800 man = parents[0].manifest().copy()
1801
1801
1802 flag = self._flagfunc
1802 flag = self._flagfunc
1803 for path in self.added():
1803 for path in self.added():
1804 man[path] = addednodeid
1804 man[path] = addednodeid
1805 man.setflag(path, flag(path))
1805 man.setflag(path, flag(path))
1806 for path in self.modified():
1806 for path in self.modified():
1807 man[path] = modifiednodeid
1807 man[path] = modifiednodeid
1808 man.setflag(path, flag(path))
1808 man.setflag(path, flag(path))
1809 for path in self.removed():
1809 for path in self.removed():
1810 del man[path]
1810 del man[path]
1811 return man
1811 return man
1812
1812
1813 @propertycache
1813 @propertycache
1814 def _flagfunc(self):
1814 def _flagfunc(self):
1815 def f(path):
1815 def f(path):
1816 return self._cache[path]['flags']
1816 return self._cache[path]['flags']
1817 return f
1817 return f
1818
1818
1819 def files(self):
1819 def files(self):
1820 return sorted(self.added() + self.modified() + self.removed())
1820 return sorted(self.added() + self.modified() + self.removed())
1821
1821
1822 def modified(self):
1822 def modified(self):
1823 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1823 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1824 self._existsinparent(f)]
1824 self._existsinparent(f)]
1825
1825
1826 def added(self):
1826 def added(self):
1827 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1827 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1828 not self._existsinparent(f)]
1828 not self._existsinparent(f)]
1829
1829
1830 def removed(self):
1830 def removed(self):
1831 return [f for f in self._cache.keys() if
1831 return [f for f in self._cache.keys() if
1832 not self._cache[f]['exists'] and self._existsinparent(f)]
1832 not self._cache[f]['exists'] and self._existsinparent(f)]
1833
1833
1834 def isinmemory(self):
1834 def isinmemory(self):
1835 return True
1835 return True
1836
1836
1837 def filedate(self, path):
1837 def filedate(self, path):
1838 if self.isdirty(path):
1838 if self.isdirty(path):
1839 return self._cache[path]['date']
1839 return self._cache[path]['date']
1840 else:
1840 else:
1841 return self._wrappedctx[path].date()
1841 return self._wrappedctx[path].date()
1842
1842
1843 def markcopied(self, path, origin):
1843 def markcopied(self, path, origin):
1844 if self.isdirty(path):
1844 if self.isdirty(path):
1845 self._cache[path]['copied'] = origin
1845 self._cache[path]['copied'] = origin
1846 else:
1846 else:
1847 raise error.ProgrammingError('markcopied() called on clean context')
1847 raise error.ProgrammingError('markcopied() called on clean context')
1848
1848
1849 def copydata(self, path):
1849 def copydata(self, path):
1850 if self.isdirty(path):
1850 if self.isdirty(path):
1851 return self._cache[path]['copied']
1851 return self._cache[path]['copied']
1852 else:
1852 else:
1853 raise error.ProgrammingError('copydata() called on clean context')
1853 raise error.ProgrammingError('copydata() called on clean context')
1854
1854
1855 def flags(self, path):
1855 def flags(self, path):
1856 if self.isdirty(path):
1856 if self.isdirty(path):
1857 if self._cache[path]['exists']:
1857 if self._cache[path]['exists']:
1858 return self._cache[path]['flags']
1858 return self._cache[path]['flags']
1859 else:
1859 else:
1860 raise error.ProgrammingError("No such file or directory: %s" %
1860 raise error.ProgrammingError("No such file or directory: %s" %
1861 self._path)
1861 self._path)
1862 else:
1862 else:
1863 return self._wrappedctx[path].flags()
1863 return self._wrappedctx[path].flags()
1864
1864
1865 def _existsinparent(self, path):
1865 def _existsinparent(self, path):
1866 try:
1866 try:
1867 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1867 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1868 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1868 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1869 # with an ``exists()`` function.
1869 # with an ``exists()`` function.
1870 self._wrappedctx[path]
1870 self._wrappedctx[path]
1871 return True
1871 return True
1872 except error.ManifestLookupError:
1872 except error.ManifestLookupError:
1873 return False
1873 return False
1874
1874
1875 def _auditconflicts(self, path):
1875 def _auditconflicts(self, path):
1876 """Replicates conflict checks done by wvfs.write().
1876 """Replicates conflict checks done by wvfs.write().
1877
1877
1878 Since we never write to the filesystem and never call `applyupdates` in
1878 Since we never write to the filesystem and never call `applyupdates` in
1879 IMM, we'll never check that a path is actually writable -- e.g., because
1879 IMM, we'll never check that a path is actually writable -- e.g., because
1880 it adds `a/foo`, but `a` is actually a file in the other commit.
1880 it adds `a/foo`, but `a` is actually a file in the other commit.
1881 """
1881 """
1882 def fail(path, component):
1882 def fail(path, component):
1883 # p1() is the base and we're receiving "writes" for p2()'s
1883 # p1() is the base and we're receiving "writes" for p2()'s
1884 # files.
1884 # files.
1885 if 'l' in self.p1()[component].flags():
1885 if 'l' in self.p1()[component].flags():
1886 raise error.Abort("error: %s conflicts with symlink %s "
1886 raise error.Abort("error: %s conflicts with symlink %s "
1887 "in %s." % (path, component,
1887 "in %s." % (path, component,
1888 self.p1().rev()))
1888 self.p1().rev()))
1889 else:
1889 else:
1890 raise error.Abort("error: '%s' conflicts with file '%s' in "
1890 raise error.Abort("error: '%s' conflicts with file '%s' in "
1891 "%s." % (path, component,
1891 "%s." % (path, component,
1892 self.p1().rev()))
1892 self.p1().rev()))
1893
1893
1894 # Test that each new directory to be created to write this path from p2
1894 # Test that each new directory to be created to write this path from p2
1895 # is not a file in p1.
1895 # is not a file in p1.
1896 components = path.split('/')
1896 components = path.split('/')
1897 for i in pycompat.xrange(len(components)):
1897 for i in pycompat.xrange(len(components)):
1898 component = "/".join(components[0:i])
1898 component = "/".join(components[0:i])
1899 if component in self.p1():
1899 if component in self.p1() and self._cache[component]['exists']:
1900 fail(path, component)
1900 fail(path, component)
1901
1901
1902 # Test the other direction -- that this path from p2 isn't a directory
1902 # Test the other direction -- that this path from p2 isn't a directory
1903 # in p1 (test that p1 doesn't any paths matching `path/*`).
1903 # in p1 (test that p1 doesn't any paths matching `path/*`).
1904 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1904 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1905 matches = self.p1().manifest().matches(match)
1905 matches = self.p1().manifest().matches(match)
1906 if len(matches) > 0:
1906 if len(matches) > 0:
1907 if len(matches) == 1 and matches.keys()[0] == path:
1907 if len(matches) == 1 and matches.keys()[0] == path:
1908 return
1908 return
1909 raise error.Abort("error: file '%s' cannot be written because "
1909 raise error.Abort("error: file '%s' cannot be written because "
1910 " '%s/' is a folder in %s (containing %d "
1910 " '%s/' is a folder in %s (containing %d "
1911 "entries: %s)"
1911 "entries: %s)"
1912 % (path, path, self.p1(), len(matches),
1912 % (path, path, self.p1(), len(matches),
1913 ', '.join(matches.keys())))
1913 ', '.join(matches.keys())))
1914
1914
1915 def write(self, path, data, flags='', **kwargs):
1915 def write(self, path, data, flags='', **kwargs):
1916 if data is None:
1916 if data is None:
1917 raise error.ProgrammingError("data must be non-None")
1917 raise error.ProgrammingError("data must be non-None")
1918 self._auditconflicts(path)
1918 self._auditconflicts(path)
1919 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1919 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1920 flags=flags)
1920 flags=flags)
1921
1921
1922 def setflags(self, path, l, x):
1922 def setflags(self, path, l, x):
1923 flag = ''
1923 flag = ''
1924 if l:
1924 if l:
1925 flag = 'l'
1925 flag = 'l'
1926 elif x:
1926 elif x:
1927 flag = 'x'
1927 flag = 'x'
1928 self._markdirty(path, exists=True, date=dateutil.makedate(),
1928 self._markdirty(path, exists=True, date=dateutil.makedate(),
1929 flags=flag)
1929 flags=flag)
1930
1930
1931 def remove(self, path):
1931 def remove(self, path):
1932 self._markdirty(path, exists=False)
1932 self._markdirty(path, exists=False)
1933
1933
1934 def exists(self, path):
1934 def exists(self, path):
1935 """exists behaves like `lexists`, but needs to follow symlinks and
1935 """exists behaves like `lexists`, but needs to follow symlinks and
1936 return False if they are broken.
1936 return False if they are broken.
1937 """
1937 """
1938 if self.isdirty(path):
1938 if self.isdirty(path):
1939 # If this path exists and is a symlink, "follow" it by calling
1939 # If this path exists and is a symlink, "follow" it by calling
1940 # exists on the destination path.
1940 # exists on the destination path.
1941 if (self._cache[path]['exists'] and
1941 if (self._cache[path]['exists'] and
1942 'l' in self._cache[path]['flags']):
1942 'l' in self._cache[path]['flags']):
1943 return self.exists(self._cache[path]['data'].strip())
1943 return self.exists(self._cache[path]['data'].strip())
1944 else:
1944 else:
1945 return self._cache[path]['exists']
1945 return self._cache[path]['exists']
1946
1946
1947 return self._existsinparent(path)
1947 return self._existsinparent(path)
1948
1948
1949 def lexists(self, path):
1949 def lexists(self, path):
1950 """lexists returns True if the path exists"""
1950 """lexists returns True if the path exists"""
1951 if self.isdirty(path):
1951 if self.isdirty(path):
1952 return self._cache[path]['exists']
1952 return self._cache[path]['exists']
1953
1953
1954 return self._existsinparent(path)
1954 return self._existsinparent(path)
1955
1955
1956 def size(self, path):
1956 def size(self, path):
1957 if self.isdirty(path):
1957 if self.isdirty(path):
1958 if self._cache[path]['exists']:
1958 if self._cache[path]['exists']:
1959 return len(self._cache[path]['data'])
1959 return len(self._cache[path]['data'])
1960 else:
1960 else:
1961 raise error.ProgrammingError("No such file or directory: %s" %
1961 raise error.ProgrammingError("No such file or directory: %s" %
1962 self._path)
1962 self._path)
1963 return self._wrappedctx[path].size()
1963 return self._wrappedctx[path].size()
1964
1964
1965 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1965 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1966 user=None, editor=None):
1966 user=None, editor=None):
1967 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1967 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1968 committed.
1968 committed.
1969
1969
1970 ``text`` is the commit message.
1970 ``text`` is the commit message.
1971 ``parents`` (optional) are rev numbers.
1971 ``parents`` (optional) are rev numbers.
1972 """
1972 """
1973 # Default parents to the wrapped contexts' if not passed.
1973 # Default parents to the wrapped contexts' if not passed.
1974 if parents is None:
1974 if parents is None:
1975 parents = self._wrappedctx.parents()
1975 parents = self._wrappedctx.parents()
1976 if len(parents) == 1:
1976 if len(parents) == 1:
1977 parents = (parents[0], None)
1977 parents = (parents[0], None)
1978
1978
1979 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1979 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1980 if parents[1] is None:
1980 if parents[1] is None:
1981 parents = (self._repo[parents[0]], None)
1981 parents = (self._repo[parents[0]], None)
1982 else:
1982 else:
1983 parents = (self._repo[parents[0]], self._repo[parents[1]])
1983 parents = (self._repo[parents[0]], self._repo[parents[1]])
1984
1984
1985 files = self._cache.keys()
1985 files = self._cache.keys()
1986 def getfile(repo, memctx, path):
1986 def getfile(repo, memctx, path):
1987 if self._cache[path]['exists']:
1987 if self._cache[path]['exists']:
1988 return memfilectx(repo, memctx, path,
1988 return memfilectx(repo, memctx, path,
1989 self._cache[path]['data'],
1989 self._cache[path]['data'],
1990 'l' in self._cache[path]['flags'],
1990 'l' in self._cache[path]['flags'],
1991 'x' in self._cache[path]['flags'],
1991 'x' in self._cache[path]['flags'],
1992 self._cache[path]['copied'])
1992 self._cache[path]['copied'])
1993 else:
1993 else:
1994 # Returning None, but including the path in `files`, is
1994 # Returning None, but including the path in `files`, is
1995 # necessary for memctx to register a deletion.
1995 # necessary for memctx to register a deletion.
1996 return None
1996 return None
1997 return memctx(self._repo, parents, text, files, getfile, date=date,
1997 return memctx(self._repo, parents, text, files, getfile, date=date,
1998 extra=extra, user=user, branch=branch, editor=editor)
1998 extra=extra, user=user, branch=branch, editor=editor)
1999
1999
2000 def isdirty(self, path):
2000 def isdirty(self, path):
2001 return path in self._cache
2001 return path in self._cache
2002
2002
2003 def isempty(self):
2003 def isempty(self):
2004 # We need to discard any keys that are actually clean before the empty
2004 # We need to discard any keys that are actually clean before the empty
2005 # commit check.
2005 # commit check.
2006 self._compact()
2006 self._compact()
2007 return len(self._cache) == 0
2007 return len(self._cache) == 0
2008
2008
2009 def clean(self):
2009 def clean(self):
2010 self._cache = {}
2010 self._cache = {}
2011
2011
2012 def _compact(self):
2012 def _compact(self):
2013 """Removes keys from the cache that are actually clean, by comparing
2013 """Removes keys from the cache that are actually clean, by comparing
2014 them with the underlying context.
2014 them with the underlying context.
2015
2015
2016 This can occur during the merge process, e.g. by passing --tool :local
2016 This can occur during the merge process, e.g. by passing --tool :local
2017 to resolve a conflict.
2017 to resolve a conflict.
2018 """
2018 """
2019 keys = []
2019 keys = []
2020 for path in self._cache.keys():
2020 for path in self._cache.keys():
2021 cache = self._cache[path]
2021 cache = self._cache[path]
2022 try:
2022 try:
2023 underlying = self._wrappedctx[path]
2023 underlying = self._wrappedctx[path]
2024 if (underlying.data() == cache['data'] and
2024 if (underlying.data() == cache['data'] and
2025 underlying.flags() == cache['flags']):
2025 underlying.flags() == cache['flags']):
2026 keys.append(path)
2026 keys.append(path)
2027 except error.ManifestLookupError:
2027 except error.ManifestLookupError:
2028 # Path not in the underlying manifest (created).
2028 # Path not in the underlying manifest (created).
2029 continue
2029 continue
2030
2030
2031 for path in keys:
2031 for path in keys:
2032 del self._cache[path]
2032 del self._cache[path]
2033 return keys
2033 return keys
2034
2034
2035 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2035 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2036 # data not provided, let's see if we already have some; if not, let's
2036 # data not provided, let's see if we already have some; if not, let's
2037 # grab it from our underlying context, so that we always have data if
2037 # grab it from our underlying context, so that we always have data if
2038 # the file is marked as existing.
2038 # the file is marked as existing.
2039 if exists and data is None:
2039 if exists and data is None:
2040 oldentry = self._cache.get(path) or {}
2040 oldentry = self._cache.get(path) or {}
2041 data = oldentry.get('data') or self._wrappedctx[path].data()
2041 data = oldentry.get('data') or self._wrappedctx[path].data()
2042
2042
2043 self._cache[path] = {
2043 self._cache[path] = {
2044 'exists': exists,
2044 'exists': exists,
2045 'data': data,
2045 'data': data,
2046 'date': date,
2046 'date': date,
2047 'flags': flags,
2047 'flags': flags,
2048 'copied': None,
2048 'copied': None,
2049 }
2049 }
2050
2050
2051 def filectx(self, path, filelog=None):
2051 def filectx(self, path, filelog=None):
2052 return overlayworkingfilectx(self._repo, path, parent=self,
2052 return overlayworkingfilectx(self._repo, path, parent=self,
2053 filelog=filelog)
2053 filelog=filelog)
2054
2054
2055 class overlayworkingfilectx(committablefilectx):
2055 class overlayworkingfilectx(committablefilectx):
2056 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2056 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2057 cache, which can be flushed through later by calling ``flush()``."""
2057 cache, which can be flushed through later by calling ``flush()``."""
2058
2058
2059 def __init__(self, repo, path, filelog=None, parent=None):
2059 def __init__(self, repo, path, filelog=None, parent=None):
2060 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2060 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2061 parent)
2061 parent)
2062 self._repo = repo
2062 self._repo = repo
2063 self._parent = parent
2063 self._parent = parent
2064 self._path = path
2064 self._path = path
2065
2065
2066 def cmp(self, fctx):
2066 def cmp(self, fctx):
2067 return self.data() != fctx.data()
2067 return self.data() != fctx.data()
2068
2068
2069 def changectx(self):
2069 def changectx(self):
2070 return self._parent
2070 return self._parent
2071
2071
2072 def data(self):
2072 def data(self):
2073 return self._parent.data(self._path)
2073 return self._parent.data(self._path)
2074
2074
2075 def date(self):
2075 def date(self):
2076 return self._parent.filedate(self._path)
2076 return self._parent.filedate(self._path)
2077
2077
2078 def exists(self):
2078 def exists(self):
2079 return self.lexists()
2079 return self.lexists()
2080
2080
2081 def lexists(self):
2081 def lexists(self):
2082 return self._parent.exists(self._path)
2082 return self._parent.exists(self._path)
2083
2083
2084 def renamed(self):
2084 def renamed(self):
2085 path = self._parent.copydata(self._path)
2085 path = self._parent.copydata(self._path)
2086 if not path:
2086 if not path:
2087 return None
2087 return None
2088 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2088 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2089
2089
2090 def size(self):
2090 def size(self):
2091 return self._parent.size(self._path)
2091 return self._parent.size(self._path)
2092
2092
2093 def markcopied(self, origin):
2093 def markcopied(self, origin):
2094 self._parent.markcopied(self._path, origin)
2094 self._parent.markcopied(self._path, origin)
2095
2095
2096 def audit(self):
2096 def audit(self):
2097 pass
2097 pass
2098
2098
2099 def flags(self):
2099 def flags(self):
2100 return self._parent.flags(self._path)
2100 return self._parent.flags(self._path)
2101
2101
2102 def setflags(self, islink, isexec):
2102 def setflags(self, islink, isexec):
2103 return self._parent.setflags(self._path, islink, isexec)
2103 return self._parent.setflags(self._path, islink, isexec)
2104
2104
2105 def write(self, data, flags, backgroundclose=False, **kwargs):
2105 def write(self, data, flags, backgroundclose=False, **kwargs):
2106 return self._parent.write(self._path, data, flags, **kwargs)
2106 return self._parent.write(self._path, data, flags, **kwargs)
2107
2107
2108 def remove(self, ignoremissing=False):
2108 def remove(self, ignoremissing=False):
2109 return self._parent.remove(self._path)
2109 return self._parent.remove(self._path)
2110
2110
2111 def clearunknown(self):
2111 def clearunknown(self):
2112 pass
2112 pass
2113
2113
2114 class workingcommitctx(workingctx):
2114 class workingcommitctx(workingctx):
2115 """A workingcommitctx object makes access to data related to
2115 """A workingcommitctx object makes access to data related to
2116 the revision being committed convenient.
2116 the revision being committed convenient.
2117
2117
2118 This hides changes in the working directory, if they aren't
2118 This hides changes in the working directory, if they aren't
2119 committed in this context.
2119 committed in this context.
2120 """
2120 """
2121 def __init__(self, repo, changes,
2121 def __init__(self, repo, changes,
2122 text="", user=None, date=None, extra=None):
2122 text="", user=None, date=None, extra=None):
2123 super(workingctx, self).__init__(repo, text, user, date, extra,
2123 super(workingctx, self).__init__(repo, text, user, date, extra,
2124 changes)
2124 changes)
2125
2125
2126 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2126 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2127 """Return matched files only in ``self._status``
2127 """Return matched files only in ``self._status``
2128
2128
2129 Uncommitted files appear "clean" via this context, even if
2129 Uncommitted files appear "clean" via this context, even if
2130 they aren't actually so in the working directory.
2130 they aren't actually so in the working directory.
2131 """
2131 """
2132 if clean:
2132 if clean:
2133 clean = [f for f in self._manifest if f not in self._changedset]
2133 clean = [f for f in self._manifest if f not in self._changedset]
2134 else:
2134 else:
2135 clean = []
2135 clean = []
2136 return scmutil.status([f for f in self._status.modified if match(f)],
2136 return scmutil.status([f for f in self._status.modified if match(f)],
2137 [f for f in self._status.added if match(f)],
2137 [f for f in self._status.added if match(f)],
2138 [f for f in self._status.removed if match(f)],
2138 [f for f in self._status.removed if match(f)],
2139 [], [], [], clean)
2139 [], [], [], clean)
2140
2140
2141 @propertycache
2141 @propertycache
2142 def _changedset(self):
2142 def _changedset(self):
2143 """Return the set of files changed in this context
2143 """Return the set of files changed in this context
2144 """
2144 """
2145 changed = set(self._status.modified)
2145 changed = set(self._status.modified)
2146 changed.update(self._status.added)
2146 changed.update(self._status.added)
2147 changed.update(self._status.removed)
2147 changed.update(self._status.removed)
2148 return changed
2148 return changed
2149
2149
2150 def makecachingfilectxfn(func):
2150 def makecachingfilectxfn(func):
2151 """Create a filectxfn that caches based on the path.
2151 """Create a filectxfn that caches based on the path.
2152
2152
2153 We can't use util.cachefunc because it uses all arguments as the cache
2153 We can't use util.cachefunc because it uses all arguments as the cache
2154 key and this creates a cycle since the arguments include the repo and
2154 key and this creates a cycle since the arguments include the repo and
2155 memctx.
2155 memctx.
2156 """
2156 """
2157 cache = {}
2157 cache = {}
2158
2158
2159 def getfilectx(repo, memctx, path):
2159 def getfilectx(repo, memctx, path):
2160 if path not in cache:
2160 if path not in cache:
2161 cache[path] = func(repo, memctx, path)
2161 cache[path] = func(repo, memctx, path)
2162 return cache[path]
2162 return cache[path]
2163
2163
2164 return getfilectx
2164 return getfilectx
2165
2165
2166 def memfilefromctx(ctx):
2166 def memfilefromctx(ctx):
2167 """Given a context return a memfilectx for ctx[path]
2167 """Given a context return a memfilectx for ctx[path]
2168
2168
2169 This is a convenience method for building a memctx based on another
2169 This is a convenience method for building a memctx based on another
2170 context.
2170 context.
2171 """
2171 """
2172 def getfilectx(repo, memctx, path):
2172 def getfilectx(repo, memctx, path):
2173 fctx = ctx[path]
2173 fctx = ctx[path]
2174 # this is weird but apparently we only keep track of one parent
2174 # this is weird but apparently we only keep track of one parent
2175 # (why not only store that instead of a tuple?)
2175 # (why not only store that instead of a tuple?)
2176 copied = fctx.renamed()
2176 copied = fctx.renamed()
2177 if copied:
2177 if copied:
2178 copied = copied[0]
2178 copied = copied[0]
2179 return memfilectx(repo, memctx, path, fctx.data(),
2179 return memfilectx(repo, memctx, path, fctx.data(),
2180 islink=fctx.islink(), isexec=fctx.isexec(),
2180 islink=fctx.islink(), isexec=fctx.isexec(),
2181 copied=copied)
2181 copied=copied)
2182
2182
2183 return getfilectx
2183 return getfilectx
2184
2184
2185 def memfilefrompatch(patchstore):
2185 def memfilefrompatch(patchstore):
2186 """Given a patch (e.g. patchstore object) return a memfilectx
2186 """Given a patch (e.g. patchstore object) return a memfilectx
2187
2187
2188 This is a convenience method for building a memctx based on a patchstore.
2188 This is a convenience method for building a memctx based on a patchstore.
2189 """
2189 """
2190 def getfilectx(repo, memctx, path):
2190 def getfilectx(repo, memctx, path):
2191 data, mode, copied = patchstore.getfile(path)
2191 data, mode, copied = patchstore.getfile(path)
2192 if data is None:
2192 if data is None:
2193 return None
2193 return None
2194 islink, isexec = mode
2194 islink, isexec = mode
2195 return memfilectx(repo, memctx, path, data, islink=islink,
2195 return memfilectx(repo, memctx, path, data, islink=islink,
2196 isexec=isexec, copied=copied)
2196 isexec=isexec, copied=copied)
2197
2197
2198 return getfilectx
2198 return getfilectx
2199
2199
2200 class memctx(committablectx):
2200 class memctx(committablectx):
2201 """Use memctx to perform in-memory commits via localrepo.commitctx().
2201 """Use memctx to perform in-memory commits via localrepo.commitctx().
2202
2202
2203 Revision information is supplied at initialization time while
2203 Revision information is supplied at initialization time while
2204 related files data and is made available through a callback
2204 related files data and is made available through a callback
2205 mechanism. 'repo' is the current localrepo, 'parents' is a
2205 mechanism. 'repo' is the current localrepo, 'parents' is a
2206 sequence of two parent revisions identifiers (pass None for every
2206 sequence of two parent revisions identifiers (pass None for every
2207 missing parent), 'text' is the commit message and 'files' lists
2207 missing parent), 'text' is the commit message and 'files' lists
2208 names of files touched by the revision (normalized and relative to
2208 names of files touched by the revision (normalized and relative to
2209 repository root).
2209 repository root).
2210
2210
2211 filectxfn(repo, memctx, path) is a callable receiving the
2211 filectxfn(repo, memctx, path) is a callable receiving the
2212 repository, the current memctx object and the normalized path of
2212 repository, the current memctx object and the normalized path of
2213 requested file, relative to repository root. It is fired by the
2213 requested file, relative to repository root. It is fired by the
2214 commit function for every file in 'files', but calls order is
2214 commit function for every file in 'files', but calls order is
2215 undefined. If the file is available in the revision being
2215 undefined. If the file is available in the revision being
2216 committed (updated or added), filectxfn returns a memfilectx
2216 committed (updated or added), filectxfn returns a memfilectx
2217 object. If the file was removed, filectxfn return None for recent
2217 object. If the file was removed, filectxfn return None for recent
2218 Mercurial. Moved files are represented by marking the source file
2218 Mercurial. Moved files are represented by marking the source file
2219 removed and the new file added with copy information (see
2219 removed and the new file added with copy information (see
2220 memfilectx).
2220 memfilectx).
2221
2221
2222 user receives the committer name and defaults to current
2222 user receives the committer name and defaults to current
2223 repository username, date is the commit date in any format
2223 repository username, date is the commit date in any format
2224 supported by dateutil.parsedate() and defaults to current date, extra
2224 supported by dateutil.parsedate() and defaults to current date, extra
2225 is a dictionary of metadata or is left empty.
2225 is a dictionary of metadata or is left empty.
2226 """
2226 """
2227
2227
2228 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2228 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2229 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2229 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2230 # this field to determine what to do in filectxfn.
2230 # this field to determine what to do in filectxfn.
2231 _returnnoneformissingfiles = True
2231 _returnnoneformissingfiles = True
2232
2232
2233 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2233 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2234 date=None, extra=None, branch=None, editor=False):
2234 date=None, extra=None, branch=None, editor=False):
2235 super(memctx, self).__init__(repo, text, user, date, extra)
2235 super(memctx, self).__init__(repo, text, user, date, extra)
2236 self._rev = None
2236 self._rev = None
2237 self._node = None
2237 self._node = None
2238 parents = [(p or nullid) for p in parents]
2238 parents = [(p or nullid) for p in parents]
2239 p1, p2 = parents
2239 p1, p2 = parents
2240 self._parents = [self._repo[p] for p in (p1, p2)]
2240 self._parents = [self._repo[p] for p in (p1, p2)]
2241 files = sorted(set(files))
2241 files = sorted(set(files))
2242 self._files = files
2242 self._files = files
2243 if branch is not None:
2243 if branch is not None:
2244 self._extra['branch'] = encoding.fromlocal(branch)
2244 self._extra['branch'] = encoding.fromlocal(branch)
2245 self.substate = {}
2245 self.substate = {}
2246
2246
2247 if isinstance(filectxfn, patch.filestore):
2247 if isinstance(filectxfn, patch.filestore):
2248 filectxfn = memfilefrompatch(filectxfn)
2248 filectxfn = memfilefrompatch(filectxfn)
2249 elif not callable(filectxfn):
2249 elif not callable(filectxfn):
2250 # if store is not callable, wrap it in a function
2250 # if store is not callable, wrap it in a function
2251 filectxfn = memfilefromctx(filectxfn)
2251 filectxfn = memfilefromctx(filectxfn)
2252
2252
2253 # memoizing increases performance for e.g. vcs convert scenarios.
2253 # memoizing increases performance for e.g. vcs convert scenarios.
2254 self._filectxfn = makecachingfilectxfn(filectxfn)
2254 self._filectxfn = makecachingfilectxfn(filectxfn)
2255
2255
2256 if editor:
2256 if editor:
2257 self._text = editor(self._repo, self, [])
2257 self._text = editor(self._repo, self, [])
2258 self._repo.savecommitmessage(self._text)
2258 self._repo.savecommitmessage(self._text)
2259
2259
2260 def filectx(self, path, filelog=None):
2260 def filectx(self, path, filelog=None):
2261 """get a file context from the working directory
2261 """get a file context from the working directory
2262
2262
2263 Returns None if file doesn't exist and should be removed."""
2263 Returns None if file doesn't exist and should be removed."""
2264 return self._filectxfn(self._repo, self, path)
2264 return self._filectxfn(self._repo, self, path)
2265
2265
2266 def commit(self):
2266 def commit(self):
2267 """commit context to the repo"""
2267 """commit context to the repo"""
2268 return self._repo.commitctx(self)
2268 return self._repo.commitctx(self)
2269
2269
2270 @propertycache
2270 @propertycache
2271 def _manifest(self):
2271 def _manifest(self):
2272 """generate a manifest based on the return values of filectxfn"""
2272 """generate a manifest based on the return values of filectxfn"""
2273
2273
2274 # keep this simple for now; just worry about p1
2274 # keep this simple for now; just worry about p1
2275 pctx = self._parents[0]
2275 pctx = self._parents[0]
2276 man = pctx.manifest().copy()
2276 man = pctx.manifest().copy()
2277
2277
2278 for f in self._status.modified:
2278 for f in self._status.modified:
2279 p1node = nullid
2279 p1node = nullid
2280 p2node = nullid
2280 p2node = nullid
2281 p = pctx[f].parents() # if file isn't in pctx, check p2?
2281 p = pctx[f].parents() # if file isn't in pctx, check p2?
2282 if len(p) > 0:
2282 if len(p) > 0:
2283 p1node = p[0].filenode()
2283 p1node = p[0].filenode()
2284 if len(p) > 1:
2284 if len(p) > 1:
2285 p2node = p[1].filenode()
2285 p2node = p[1].filenode()
2286 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2286 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2287
2287
2288 for f in self._status.added:
2288 for f in self._status.added:
2289 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2289 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2290
2290
2291 for f in self._status.removed:
2291 for f in self._status.removed:
2292 if f in man:
2292 if f in man:
2293 del man[f]
2293 del man[f]
2294
2294
2295 return man
2295 return man
2296
2296
2297 @propertycache
2297 @propertycache
2298 def _status(self):
2298 def _status(self):
2299 """Calculate exact status from ``files`` specified at construction
2299 """Calculate exact status from ``files`` specified at construction
2300 """
2300 """
2301 man1 = self.p1().manifest()
2301 man1 = self.p1().manifest()
2302 p2 = self._parents[1]
2302 p2 = self._parents[1]
2303 # "1 < len(self._parents)" can't be used for checking
2303 # "1 < len(self._parents)" can't be used for checking
2304 # existence of the 2nd parent, because "memctx._parents" is
2304 # existence of the 2nd parent, because "memctx._parents" is
2305 # explicitly initialized by the list, of which length is 2.
2305 # explicitly initialized by the list, of which length is 2.
2306 if p2.node() != nullid:
2306 if p2.node() != nullid:
2307 man2 = p2.manifest()
2307 man2 = p2.manifest()
2308 managing = lambda f: f in man1 or f in man2
2308 managing = lambda f: f in man1 or f in man2
2309 else:
2309 else:
2310 managing = lambda f: f in man1
2310 managing = lambda f: f in man1
2311
2311
2312 modified, added, removed = [], [], []
2312 modified, added, removed = [], [], []
2313 for f in self._files:
2313 for f in self._files:
2314 if not managing(f):
2314 if not managing(f):
2315 added.append(f)
2315 added.append(f)
2316 elif self[f]:
2316 elif self[f]:
2317 modified.append(f)
2317 modified.append(f)
2318 else:
2318 else:
2319 removed.append(f)
2319 removed.append(f)
2320
2320
2321 return scmutil.status(modified, added, removed, [], [], [], [])
2321 return scmutil.status(modified, added, removed, [], [], [], [])
2322
2322
2323 class memfilectx(committablefilectx):
2323 class memfilectx(committablefilectx):
2324 """memfilectx represents an in-memory file to commit.
2324 """memfilectx represents an in-memory file to commit.
2325
2325
2326 See memctx and committablefilectx for more details.
2326 See memctx and committablefilectx for more details.
2327 """
2327 """
2328 def __init__(self, repo, changectx, path, data, islink=False,
2328 def __init__(self, repo, changectx, path, data, islink=False,
2329 isexec=False, copied=None):
2329 isexec=False, copied=None):
2330 """
2330 """
2331 path is the normalized file path relative to repository root.
2331 path is the normalized file path relative to repository root.
2332 data is the file content as a string.
2332 data is the file content as a string.
2333 islink is True if the file is a symbolic link.
2333 islink is True if the file is a symbolic link.
2334 isexec is True if the file is executable.
2334 isexec is True if the file is executable.
2335 copied is the source file path if current file was copied in the
2335 copied is the source file path if current file was copied in the
2336 revision being committed, or None."""
2336 revision being committed, or None."""
2337 super(memfilectx, self).__init__(repo, path, None, changectx)
2337 super(memfilectx, self).__init__(repo, path, None, changectx)
2338 self._data = data
2338 self._data = data
2339 if islink:
2339 if islink:
2340 self._flags = 'l'
2340 self._flags = 'l'
2341 elif isexec:
2341 elif isexec:
2342 self._flags = 'x'
2342 self._flags = 'x'
2343 else:
2343 else:
2344 self._flags = ''
2344 self._flags = ''
2345 self._copied = None
2345 self._copied = None
2346 if copied:
2346 if copied:
2347 self._copied = (copied, nullid)
2347 self._copied = (copied, nullid)
2348
2348
2349 def data(self):
2349 def data(self):
2350 return self._data
2350 return self._data
2351
2351
2352 def remove(self, ignoremissing=False):
2352 def remove(self, ignoremissing=False):
2353 """wraps unlink for a repo's working directory"""
2353 """wraps unlink for a repo's working directory"""
2354 # need to figure out what to do here
2354 # need to figure out what to do here
2355 del self._changectx[self._path]
2355 del self._changectx[self._path]
2356
2356
2357 def write(self, data, flags, **kwargs):
2357 def write(self, data, flags, **kwargs):
2358 """wraps repo.wwrite"""
2358 """wraps repo.wwrite"""
2359 self._data = data
2359 self._data = data
2360
2360
2361 class overlayfilectx(committablefilectx):
2361 class overlayfilectx(committablefilectx):
2362 """Like memfilectx but take an original filectx and optional parameters to
2362 """Like memfilectx but take an original filectx and optional parameters to
2363 override parts of it. This is useful when fctx.data() is expensive (i.e.
2363 override parts of it. This is useful when fctx.data() is expensive (i.e.
2364 flag processor is expensive) and raw data, flags, and filenode could be
2364 flag processor is expensive) and raw data, flags, and filenode could be
2365 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2365 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2366 """
2366 """
2367
2367
2368 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2368 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2369 copied=None, ctx=None):
2369 copied=None, ctx=None):
2370 """originalfctx: filecontext to duplicate
2370 """originalfctx: filecontext to duplicate
2371
2371
2372 datafunc: None or a function to override data (file content). It is a
2372 datafunc: None or a function to override data (file content). It is a
2373 function to be lazy. path, flags, copied, ctx: None or overridden value
2373 function to be lazy. path, flags, copied, ctx: None or overridden value
2374
2374
2375 copied could be (path, rev), or False. copied could also be just path,
2375 copied could be (path, rev), or False. copied could also be just path,
2376 and will be converted to (path, nullid). This simplifies some callers.
2376 and will be converted to (path, nullid). This simplifies some callers.
2377 """
2377 """
2378
2378
2379 if path is None:
2379 if path is None:
2380 path = originalfctx.path()
2380 path = originalfctx.path()
2381 if ctx is None:
2381 if ctx is None:
2382 ctx = originalfctx.changectx()
2382 ctx = originalfctx.changectx()
2383 ctxmatch = lambda: True
2383 ctxmatch = lambda: True
2384 else:
2384 else:
2385 ctxmatch = lambda: ctx == originalfctx.changectx()
2385 ctxmatch = lambda: ctx == originalfctx.changectx()
2386
2386
2387 repo = originalfctx.repo()
2387 repo = originalfctx.repo()
2388 flog = originalfctx.filelog()
2388 flog = originalfctx.filelog()
2389 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2389 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2390
2390
2391 if copied is None:
2391 if copied is None:
2392 copied = originalfctx.renamed()
2392 copied = originalfctx.renamed()
2393 copiedmatch = lambda: True
2393 copiedmatch = lambda: True
2394 else:
2394 else:
2395 if copied and not isinstance(copied, tuple):
2395 if copied and not isinstance(copied, tuple):
2396 # repo._filecommit will recalculate copyrev so nullid is okay
2396 # repo._filecommit will recalculate copyrev so nullid is okay
2397 copied = (copied, nullid)
2397 copied = (copied, nullid)
2398 copiedmatch = lambda: copied == originalfctx.renamed()
2398 copiedmatch = lambda: copied == originalfctx.renamed()
2399
2399
2400 # When data, copied (could affect data), ctx (could affect filelog
2400 # When data, copied (could affect data), ctx (could affect filelog
2401 # parents) are not overridden, rawdata, rawflags, and filenode may be
2401 # parents) are not overridden, rawdata, rawflags, and filenode may be
2402 # reused (repo._filecommit should double check filelog parents).
2402 # reused (repo._filecommit should double check filelog parents).
2403 #
2403 #
2404 # path, flags are not hashed in filelog (but in manifestlog) so they do
2404 # path, flags are not hashed in filelog (but in manifestlog) so they do
2405 # not affect reusable here.
2405 # not affect reusable here.
2406 #
2406 #
2407 # If ctx or copied is overridden to a same value with originalfctx,
2407 # If ctx or copied is overridden to a same value with originalfctx,
2408 # still consider it's reusable. originalfctx.renamed() may be a bit
2408 # still consider it's reusable. originalfctx.renamed() may be a bit
2409 # expensive so it's not called unless necessary. Assuming datafunc is
2409 # expensive so it's not called unless necessary. Assuming datafunc is
2410 # always expensive, do not call it for this "reusable" test.
2410 # always expensive, do not call it for this "reusable" test.
2411 reusable = datafunc is None and ctxmatch() and copiedmatch()
2411 reusable = datafunc is None and ctxmatch() and copiedmatch()
2412
2412
2413 if datafunc is None:
2413 if datafunc is None:
2414 datafunc = originalfctx.data
2414 datafunc = originalfctx.data
2415 if flags is None:
2415 if flags is None:
2416 flags = originalfctx.flags()
2416 flags = originalfctx.flags()
2417
2417
2418 self._datafunc = datafunc
2418 self._datafunc = datafunc
2419 self._flags = flags
2419 self._flags = flags
2420 self._copied = copied
2420 self._copied = copied
2421
2421
2422 if reusable:
2422 if reusable:
2423 # copy extra fields from originalfctx
2423 # copy extra fields from originalfctx
2424 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2424 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2425 for attr_ in attrs:
2425 for attr_ in attrs:
2426 if util.safehasattr(originalfctx, attr_):
2426 if util.safehasattr(originalfctx, attr_):
2427 setattr(self, attr_, getattr(originalfctx, attr_))
2427 setattr(self, attr_, getattr(originalfctx, attr_))
2428
2428
2429 def data(self):
2429 def data(self):
2430 return self._datafunc()
2430 return self._datafunc()
2431
2431
2432 class metadataonlyctx(committablectx):
2432 class metadataonlyctx(committablectx):
2433 """Like memctx but it's reusing the manifest of different commit.
2433 """Like memctx but it's reusing the manifest of different commit.
2434 Intended to be used by lightweight operations that are creating
2434 Intended to be used by lightweight operations that are creating
2435 metadata-only changes.
2435 metadata-only changes.
2436
2436
2437 Revision information is supplied at initialization time. 'repo' is the
2437 Revision information is supplied at initialization time. 'repo' is the
2438 current localrepo, 'ctx' is original revision which manifest we're reuisng
2438 current localrepo, 'ctx' is original revision which manifest we're reuisng
2439 'parents' is a sequence of two parent revisions identifiers (pass None for
2439 'parents' is a sequence of two parent revisions identifiers (pass None for
2440 every missing parent), 'text' is the commit.
2440 every missing parent), 'text' is the commit.
2441
2441
2442 user receives the committer name and defaults to current repository
2442 user receives the committer name and defaults to current repository
2443 username, date is the commit date in any format supported by
2443 username, date is the commit date in any format supported by
2444 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2444 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2445 metadata or is left empty.
2445 metadata or is left empty.
2446 """
2446 """
2447 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2447 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2448 date=None, extra=None, editor=False):
2448 date=None, extra=None, editor=False):
2449 if text is None:
2449 if text is None:
2450 text = originalctx.description()
2450 text = originalctx.description()
2451 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2451 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2452 self._rev = None
2452 self._rev = None
2453 self._node = None
2453 self._node = None
2454 self._originalctx = originalctx
2454 self._originalctx = originalctx
2455 self._manifestnode = originalctx.manifestnode()
2455 self._manifestnode = originalctx.manifestnode()
2456 if parents is None:
2456 if parents is None:
2457 parents = originalctx.parents()
2457 parents = originalctx.parents()
2458 else:
2458 else:
2459 parents = [repo[p] for p in parents if p is not None]
2459 parents = [repo[p] for p in parents if p is not None]
2460 parents = parents[:]
2460 parents = parents[:]
2461 while len(parents) < 2:
2461 while len(parents) < 2:
2462 parents.append(repo[nullid])
2462 parents.append(repo[nullid])
2463 p1, p2 = self._parents = parents
2463 p1, p2 = self._parents = parents
2464
2464
2465 # sanity check to ensure that the reused manifest parents are
2465 # sanity check to ensure that the reused manifest parents are
2466 # manifests of our commit parents
2466 # manifests of our commit parents
2467 mp1, mp2 = self.manifestctx().parents
2467 mp1, mp2 = self.manifestctx().parents
2468 if p1 != nullid and p1.manifestnode() != mp1:
2468 if p1 != nullid and p1.manifestnode() != mp1:
2469 raise RuntimeError('can\'t reuse the manifest: '
2469 raise RuntimeError('can\'t reuse the manifest: '
2470 'its p1 doesn\'t match the new ctx p1')
2470 'its p1 doesn\'t match the new ctx p1')
2471 if p2 != nullid and p2.manifestnode() != mp2:
2471 if p2 != nullid and p2.manifestnode() != mp2:
2472 raise RuntimeError('can\'t reuse the manifest: '
2472 raise RuntimeError('can\'t reuse the manifest: '
2473 'its p2 doesn\'t match the new ctx p2')
2473 'its p2 doesn\'t match the new ctx p2')
2474
2474
2475 self._files = originalctx.files()
2475 self._files = originalctx.files()
2476 self.substate = {}
2476 self.substate = {}
2477
2477
2478 if editor:
2478 if editor:
2479 self._text = editor(self._repo, self, [])
2479 self._text = editor(self._repo, self, [])
2480 self._repo.savecommitmessage(self._text)
2480 self._repo.savecommitmessage(self._text)
2481
2481
2482 def manifestnode(self):
2482 def manifestnode(self):
2483 return self._manifestnode
2483 return self._manifestnode
2484
2484
2485 @property
2485 @property
2486 def _manifestctx(self):
2486 def _manifestctx(self):
2487 return self._repo.manifestlog[self._manifestnode]
2487 return self._repo.manifestlog[self._manifestnode]
2488
2488
2489 def filectx(self, path, filelog=None):
2489 def filectx(self, path, filelog=None):
2490 return self._originalctx.filectx(path, filelog=filelog)
2490 return self._originalctx.filectx(path, filelog=filelog)
2491
2491
2492 def commit(self):
2492 def commit(self):
2493 """commit context to the repo"""
2493 """commit context to the repo"""
2494 return self._repo.commitctx(self)
2494 return self._repo.commitctx(self)
2495
2495
2496 @property
2496 @property
2497 def _manifest(self):
2497 def _manifest(self):
2498 return self._originalctx.manifest()
2498 return self._originalctx.manifest()
2499
2499
2500 @propertycache
2500 @propertycache
2501 def _status(self):
2501 def _status(self):
2502 """Calculate exact status from ``files`` specified in the ``origctx``
2502 """Calculate exact status from ``files`` specified in the ``origctx``
2503 and parents manifests.
2503 and parents manifests.
2504 """
2504 """
2505 man1 = self.p1().manifest()
2505 man1 = self.p1().manifest()
2506 p2 = self._parents[1]
2506 p2 = self._parents[1]
2507 # "1 < len(self._parents)" can't be used for checking
2507 # "1 < len(self._parents)" can't be used for checking
2508 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2508 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2509 # explicitly initialized by the list, of which length is 2.
2509 # explicitly initialized by the list, of which length is 2.
2510 if p2.node() != nullid:
2510 if p2.node() != nullid:
2511 man2 = p2.manifest()
2511 man2 = p2.manifest()
2512 managing = lambda f: f in man1 or f in man2
2512 managing = lambda f: f in man1 or f in man2
2513 else:
2513 else:
2514 managing = lambda f: f in man1
2514 managing = lambda f: f in man1
2515
2515
2516 modified, added, removed = [], [], []
2516 modified, added, removed = [], [], []
2517 for f in self._files:
2517 for f in self._files:
2518 if not managing(f):
2518 if not managing(f):
2519 added.append(f)
2519 added.append(f)
2520 elif f in self:
2520 elif f in self:
2521 modified.append(f)
2521 modified.append(f)
2522 else:
2522 else:
2523 removed.append(f)
2523 removed.append(f)
2524
2524
2525 return scmutil.status(modified, added, removed, [], [], [], [])
2525 return scmutil.status(modified, added, removed, [], [], [], [])
2526
2526
2527 class arbitraryfilectx(object):
2527 class arbitraryfilectx(object):
2528 """Allows you to use filectx-like functions on a file in an arbitrary
2528 """Allows you to use filectx-like functions on a file in an arbitrary
2529 location on disk, possibly not in the working directory.
2529 location on disk, possibly not in the working directory.
2530 """
2530 """
2531 def __init__(self, path, repo=None):
2531 def __init__(self, path, repo=None):
2532 # Repo is optional because contrib/simplemerge uses this class.
2532 # Repo is optional because contrib/simplemerge uses this class.
2533 self._repo = repo
2533 self._repo = repo
2534 self._path = path
2534 self._path = path
2535
2535
2536 def cmp(self, fctx):
2536 def cmp(self, fctx):
2537 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2537 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2538 # path if either side is a symlink.
2538 # path if either side is a symlink.
2539 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2539 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2540 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2540 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2541 # Add a fast-path for merge if both sides are disk-backed.
2541 # Add a fast-path for merge if both sides are disk-backed.
2542 # Note that filecmp uses the opposite return values (True if same)
2542 # Note that filecmp uses the opposite return values (True if same)
2543 # from our cmp functions (True if different).
2543 # from our cmp functions (True if different).
2544 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2544 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2545 return self.data() != fctx.data()
2545 return self.data() != fctx.data()
2546
2546
2547 def path(self):
2547 def path(self):
2548 return self._path
2548 return self._path
2549
2549
2550 def flags(self):
2550 def flags(self):
2551 return ''
2551 return ''
2552
2552
2553 def data(self):
2553 def data(self):
2554 return util.readfile(self._path)
2554 return util.readfile(self._path)
2555
2555
2556 def decodeddata(self):
2556 def decodeddata(self):
2557 with open(self._path, "rb") as f:
2557 with open(self._path, "rb") as f:
2558 return f.read()
2558 return f.read()
2559
2559
2560 def remove(self):
2560 def remove(self):
2561 util.unlink(self._path)
2561 util.unlink(self._path)
2562
2562
2563 def write(self, data, flags, **kwargs):
2563 def write(self, data, flags, **kwargs):
2564 assert not flags
2564 assert not flags
2565 with open(self._path, "w") as f:
2565 with open(self._path, "w") as f:
2566 f.write(data)
2566 f.write(data)
@@ -1,566 +1,565 b''
1 #require symlink execbit
1 #require symlink execbit
2 $ cat << EOF >> $HGRCPATH
2 $ cat << EOF >> $HGRCPATH
3 > [extensions]
3 > [extensions]
4 > amend=
4 > amend=
5 > rebase=
5 > rebase=
6 > debugdrawdag=$TESTDIR/drawdag.py
6 > debugdrawdag=$TESTDIR/drawdag.py
7 > strip=
7 > strip=
8 > [rebase]
8 > [rebase]
9 > experimental.inmemory=1
9 > experimental.inmemory=1
10 > [diff]
10 > [diff]
11 > git=1
11 > git=1
12 > [alias]
12 > [alias]
13 > tglog = log -G --template "{rev}: {node|short} '{desc}'\n"
13 > tglog = log -G --template "{rev}: {node|short} '{desc}'\n"
14 > EOF
14 > EOF
15
15
16 Rebase a simple DAG:
16 Rebase a simple DAG:
17 $ hg init repo1
17 $ hg init repo1
18 $ cd repo1
18 $ cd repo1
19 $ hg debugdrawdag <<'EOS'
19 $ hg debugdrawdag <<'EOS'
20 > c b
20 > c b
21 > |/
21 > |/
22 > d
22 > d
23 > |
23 > |
24 > a
24 > a
25 > EOS
25 > EOS
26 $ hg up -C a
26 $ hg up -C a
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ hg tglog
28 $ hg tglog
29 o 3: 814f6bd05178 'c'
29 o 3: 814f6bd05178 'c'
30 |
30 |
31 | o 2: db0e82a16a62 'b'
31 | o 2: db0e82a16a62 'b'
32 |/
32 |/
33 o 1: 02952614a83d 'd'
33 o 1: 02952614a83d 'd'
34 |
34 |
35 @ 0: b173517d0057 'a'
35 @ 0: b173517d0057 'a'
36
36
37 $ hg cat -r 3 c
37 $ hg cat -r 3 c
38 c (no-eol)
38 c (no-eol)
39 $ hg cat -r 2 b
39 $ hg cat -r 2 b
40 b (no-eol)
40 b (no-eol)
41 $ hg rebase --debug -r b -d c | grep rebasing
41 $ hg rebase --debug -r b -d c | grep rebasing
42 rebasing in-memory
42 rebasing in-memory
43 rebasing 2:db0e82a16a62 "b" (b)
43 rebasing 2:db0e82a16a62 "b" (b)
44 $ hg tglog
44 $ hg tglog
45 o 3: ca58782ad1e4 'b'
45 o 3: ca58782ad1e4 'b'
46 |
46 |
47 o 2: 814f6bd05178 'c'
47 o 2: 814f6bd05178 'c'
48 |
48 |
49 o 1: 02952614a83d 'd'
49 o 1: 02952614a83d 'd'
50 |
50 |
51 @ 0: b173517d0057 'a'
51 @ 0: b173517d0057 'a'
52
52
53 $ hg cat -r 3 b
53 $ hg cat -r 3 b
54 b (no-eol)
54 b (no-eol)
55 $ hg cat -r 2 c
55 $ hg cat -r 2 c
56 c (no-eol)
56 c (no-eol)
57
57
58 Case 2:
58 Case 2:
59 $ hg init repo2
59 $ hg init repo2
60 $ cd repo2
60 $ cd repo2
61 $ hg debugdrawdag <<'EOS'
61 $ hg debugdrawdag <<'EOS'
62 > c b
62 > c b
63 > |/
63 > |/
64 > d
64 > d
65 > |
65 > |
66 > a
66 > a
67 > EOS
67 > EOS
68
68
69 Add a symlink and executable file:
69 Add a symlink and executable file:
70 $ hg up -C c
70 $ hg up -C c
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
72 $ ln -s somefile e
72 $ ln -s somefile e
73 $ echo f > f
73 $ echo f > f
74 $ chmod +x f
74 $ chmod +x f
75 $ hg add e f
75 $ hg add e f
76 $ hg amend -q
76 $ hg amend -q
77 $ hg up -Cq a
77 $ hg up -Cq a
78
78
79 Write files to the working copy, and ensure they're still there after the rebase
79 Write files to the working copy, and ensure they're still there after the rebase
80 $ echo "abc" > a
80 $ echo "abc" > a
81 $ ln -s def b
81 $ ln -s def b
82 $ echo "ghi" > c
82 $ echo "ghi" > c
83 $ echo "jkl" > d
83 $ echo "jkl" > d
84 $ echo "mno" > e
84 $ echo "mno" > e
85 $ hg tglog
85 $ hg tglog
86 o 3: f56b71190a8f 'c'
86 o 3: f56b71190a8f 'c'
87 |
87 |
88 | o 2: db0e82a16a62 'b'
88 | o 2: db0e82a16a62 'b'
89 |/
89 |/
90 o 1: 02952614a83d 'd'
90 o 1: 02952614a83d 'd'
91 |
91 |
92 @ 0: b173517d0057 'a'
92 @ 0: b173517d0057 'a'
93
93
94 $ hg cat -r 3 c
94 $ hg cat -r 3 c
95 c (no-eol)
95 c (no-eol)
96 $ hg cat -r 2 b
96 $ hg cat -r 2 b
97 b (no-eol)
97 b (no-eol)
98 $ hg cat -r 3 e
98 $ hg cat -r 3 e
99 somefile (no-eol)
99 somefile (no-eol)
100 $ hg rebase --debug -s b -d a | grep rebasing
100 $ hg rebase --debug -s b -d a | grep rebasing
101 rebasing in-memory
101 rebasing in-memory
102 rebasing 2:db0e82a16a62 "b" (b)
102 rebasing 2:db0e82a16a62 "b" (b)
103 $ hg tglog
103 $ hg tglog
104 o 3: fc055c3b4d33 'b'
104 o 3: fc055c3b4d33 'b'
105 |
105 |
106 | o 2: f56b71190a8f 'c'
106 | o 2: f56b71190a8f 'c'
107 | |
107 | |
108 | o 1: 02952614a83d 'd'
108 | o 1: 02952614a83d 'd'
109 |/
109 |/
110 @ 0: b173517d0057 'a'
110 @ 0: b173517d0057 'a'
111
111
112 $ hg cat -r 2 c
112 $ hg cat -r 2 c
113 c (no-eol)
113 c (no-eol)
114 $ hg cat -r 3 b
114 $ hg cat -r 3 b
115 b (no-eol)
115 b (no-eol)
116 $ hg rebase --debug -s 1 -d 3 | grep rebasing
116 $ hg rebase --debug -s 1 -d 3 | grep rebasing
117 rebasing in-memory
117 rebasing in-memory
118 rebasing 1:02952614a83d "d" (d)
118 rebasing 1:02952614a83d "d" (d)
119 rebasing 2:f56b71190a8f "c"
119 rebasing 2:f56b71190a8f "c"
120 $ hg tglog
120 $ hg tglog
121 o 3: 753feb6fd12a 'c'
121 o 3: 753feb6fd12a 'c'
122 |
122 |
123 o 2: 09c044d2cb43 'd'
123 o 2: 09c044d2cb43 'd'
124 |
124 |
125 o 1: fc055c3b4d33 'b'
125 o 1: fc055c3b4d33 'b'
126 |
126 |
127 @ 0: b173517d0057 'a'
127 @ 0: b173517d0057 'a'
128
128
129 Ensure working copy files are still there:
129 Ensure working copy files are still there:
130 $ cat a
130 $ cat a
131 abc
131 abc
132 $ readlink.py b
132 $ readlink.py b
133 b -> def
133 b -> def
134 $ cat e
134 $ cat e
135 mno
135 mno
136
136
137 Ensure symlink and executable files were rebased properly:
137 Ensure symlink and executable files were rebased properly:
138 $ hg up -Cq 3
138 $ hg up -Cq 3
139 $ readlink.py e
139 $ readlink.py e
140 e -> somefile
140 e -> somefile
141 $ ls -l f | cut -c -10
141 $ ls -l f | cut -c -10
142 -rwxr-xr-x
142 -rwxr-xr-x
143
143
144 Rebase the working copy parent
144 Rebase the working copy parent
145 $ hg up -C 3
145 $ hg up -C 3
146 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 $ hg rebase -r 3 -d 0 --debug | grep rebasing
147 $ hg rebase -r 3 -d 0 --debug | grep rebasing
148 rebasing in-memory
148 rebasing in-memory
149 rebasing 3:753feb6fd12a "c" (tip)
149 rebasing 3:753feb6fd12a "c" (tip)
150 $ hg tglog
150 $ hg tglog
151 @ 3: 844a7de3e617 'c'
151 @ 3: 844a7de3e617 'c'
152 |
152 |
153 | o 2: 09c044d2cb43 'd'
153 | o 2: 09c044d2cb43 'd'
154 | |
154 | |
155 | o 1: fc055c3b4d33 'b'
155 | o 1: fc055c3b4d33 'b'
156 |/
156 |/
157 o 0: b173517d0057 'a'
157 o 0: b173517d0057 'a'
158
158
159
159
160 Test reporting of path conflicts
160 Test reporting of path conflicts
161
161
162 $ hg rm a
162 $ hg rm a
163 $ mkdir a
163 $ mkdir a
164 $ touch a/a
164 $ touch a/a
165 $ hg ci -Am "a/a"
165 $ hg ci -Am "a/a"
166 adding a/a
166 adding a/a
167 $ hg tglog
167 $ hg tglog
168 @ 4: daf7dfc139cb 'a/a'
168 @ 4: daf7dfc139cb 'a/a'
169 |
169 |
170 o 3: 844a7de3e617 'c'
170 o 3: 844a7de3e617 'c'
171 |
171 |
172 | o 2: 09c044d2cb43 'd'
172 | o 2: 09c044d2cb43 'd'
173 | |
173 | |
174 | o 1: fc055c3b4d33 'b'
174 | o 1: fc055c3b4d33 'b'
175 |/
175 |/
176 o 0: b173517d0057 'a'
176 o 0: b173517d0057 'a'
177
177
178 $ hg rebase -r . -d 2
178 $ hg rebase -r . -d 2
179 rebasing 4:daf7dfc139cb "a/a" (tip)
179 rebasing 4:daf7dfc139cb "a/a" (tip)
180 abort: error: 'a/a' conflicts with file 'a' in 2.
180 saved backup bundle to $TESTTMP/repo1/repo2/.hg/strip-backup/daf7dfc139cb-fdbfcf4f-rebase.hg
181 [255]
182
181
183 $ cd ..
182 $ cd ..
184
183
185 Test dry-run rebasing
184 Test dry-run rebasing
186
185
187 $ hg init repo3
186 $ hg init repo3
188 $ cd repo3
187 $ cd repo3
189 $ echo a>a
188 $ echo a>a
190 $ hg ci -Aqma
189 $ hg ci -Aqma
191 $ echo b>b
190 $ echo b>b
192 $ hg ci -Aqmb
191 $ hg ci -Aqmb
193 $ echo c>c
192 $ echo c>c
194 $ hg ci -Aqmc
193 $ hg ci -Aqmc
195 $ echo d>d
194 $ echo d>d
196 $ hg ci -Aqmd
195 $ hg ci -Aqmd
197 $ echo e>e
196 $ echo e>e
198 $ hg ci -Aqme
197 $ hg ci -Aqme
199
198
200 $ hg up 1 -q
199 $ hg up 1 -q
201 $ echo f>f
200 $ echo f>f
202 $ hg ci -Amf
201 $ hg ci -Amf
203 adding f
202 adding f
204 created new head
203 created new head
205 $ echo g>g
204 $ echo g>g
206 $ hg ci -Aqmg
205 $ hg ci -Aqmg
207 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
206 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
208 @ 6:baf10c5166d4 test
207 @ 6:baf10c5166d4 test
209 | g
208 | g
210 |
209 |
211 o 5:6343ca3eff20 test
210 o 5:6343ca3eff20 test
212 | f
211 | f
213 |
212 |
214 | o 4:e860deea161a test
213 | o 4:e860deea161a test
215 | | e
214 | | e
216 | |
215 | |
217 | o 3:055a42cdd887 test
216 | o 3:055a42cdd887 test
218 | | d
217 | | d
219 | |
218 | |
220 | o 2:177f92b77385 test
219 | o 2:177f92b77385 test
221 |/ c
220 |/ c
222 |
221 |
223 o 1:d2ae7f538514 test
222 o 1:d2ae7f538514 test
224 | b
223 | b
225 |
224 |
226 o 0:cb9a9f314b8b test
225 o 0:cb9a9f314b8b test
227 a
226 a
228
227
229 Make sure it throws error while passing --continue or --abort with --dry-run
228 Make sure it throws error while passing --continue or --abort with --dry-run
230 $ hg rebase -s 2 -d 6 -n --continue
229 $ hg rebase -s 2 -d 6 -n --continue
231 abort: cannot specify both --dry-run and --continue
230 abort: cannot specify both --dry-run and --continue
232 [255]
231 [255]
233 $ hg rebase -s 2 -d 6 -n --abort
232 $ hg rebase -s 2 -d 6 -n --abort
234 abort: cannot specify both --dry-run and --abort
233 abort: cannot specify both --dry-run and --abort
235 [255]
234 [255]
236
235
237 Check dryrun gives correct results when there is no conflict in rebasing
236 Check dryrun gives correct results when there is no conflict in rebasing
238 $ hg rebase -s 2 -d 6 -n
237 $ hg rebase -s 2 -d 6 -n
239 starting dry-run rebase; repository will not be changed
238 starting dry-run rebase; repository will not be changed
240 rebasing 2:177f92b77385 "c"
239 rebasing 2:177f92b77385 "c"
241 rebasing 3:055a42cdd887 "d"
240 rebasing 3:055a42cdd887 "d"
242 rebasing 4:e860deea161a "e"
241 rebasing 4:e860deea161a "e"
243 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
242 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
244
243
245 $ hg diff
244 $ hg diff
246 $ hg status
245 $ hg status
247
246
248 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
247 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
249 @ 6:baf10c5166d4 test
248 @ 6:baf10c5166d4 test
250 | g
249 | g
251 |
250 |
252 o 5:6343ca3eff20 test
251 o 5:6343ca3eff20 test
253 | f
252 | f
254 |
253 |
255 | o 4:e860deea161a test
254 | o 4:e860deea161a test
256 | | e
255 | | e
257 | |
256 | |
258 | o 3:055a42cdd887 test
257 | o 3:055a42cdd887 test
259 | | d
258 | | d
260 | |
259 | |
261 | o 2:177f92b77385 test
260 | o 2:177f92b77385 test
262 |/ c
261 |/ c
263 |
262 |
264 o 1:d2ae7f538514 test
263 o 1:d2ae7f538514 test
265 | b
264 | b
266 |
265 |
267 o 0:cb9a9f314b8b test
266 o 0:cb9a9f314b8b test
268 a
267 a
269
268
270 Check dryrun working with --collapse when there is no conflict
269 Check dryrun working with --collapse when there is no conflict
271 $ hg rebase -s 2 -d 6 -n --collapse
270 $ hg rebase -s 2 -d 6 -n --collapse
272 starting dry-run rebase; repository will not be changed
271 starting dry-run rebase; repository will not be changed
273 rebasing 2:177f92b77385 "c"
272 rebasing 2:177f92b77385 "c"
274 rebasing 3:055a42cdd887 "d"
273 rebasing 3:055a42cdd887 "d"
275 rebasing 4:e860deea161a "e"
274 rebasing 4:e860deea161a "e"
276 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
275 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
277
276
278 Check dryrun gives correct results when there is conflict in rebasing
277 Check dryrun gives correct results when there is conflict in rebasing
279 Make a conflict:
278 Make a conflict:
280 $ hg up 6 -q
279 $ hg up 6 -q
281 $ echo conflict>e
280 $ echo conflict>e
282 $ hg ci -Aqm "conflict with e"
281 $ hg ci -Aqm "conflict with e"
283 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
282 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
284 @ 7:d2c195b28050 test
283 @ 7:d2c195b28050 test
285 | conflict with e
284 | conflict with e
286 |
285 |
287 o 6:baf10c5166d4 test
286 o 6:baf10c5166d4 test
288 | g
287 | g
289 |
288 |
290 o 5:6343ca3eff20 test
289 o 5:6343ca3eff20 test
291 | f
290 | f
292 |
291 |
293 | o 4:e860deea161a test
292 | o 4:e860deea161a test
294 | | e
293 | | e
295 | |
294 | |
296 | o 3:055a42cdd887 test
295 | o 3:055a42cdd887 test
297 | | d
296 | | d
298 | |
297 | |
299 | o 2:177f92b77385 test
298 | o 2:177f92b77385 test
300 |/ c
299 |/ c
301 |
300 |
302 o 1:d2ae7f538514 test
301 o 1:d2ae7f538514 test
303 | b
302 | b
304 |
303 |
305 o 0:cb9a9f314b8b test
304 o 0:cb9a9f314b8b test
306 a
305 a
307
306
308 $ hg rebase -s 2 -d 7 -n
307 $ hg rebase -s 2 -d 7 -n
309 starting dry-run rebase; repository will not be changed
308 starting dry-run rebase; repository will not be changed
310 rebasing 2:177f92b77385 "c"
309 rebasing 2:177f92b77385 "c"
311 rebasing 3:055a42cdd887 "d"
310 rebasing 3:055a42cdd887 "d"
312 rebasing 4:e860deea161a "e"
311 rebasing 4:e860deea161a "e"
313 merging e
312 merging e
314 transaction abort!
313 transaction abort!
315 rollback completed
314 rollback completed
316 hit a merge conflict
315 hit a merge conflict
317 [1]
316 [1]
318 $ hg diff
317 $ hg diff
319 $ hg status
318 $ hg status
320 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
319 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
321 @ 7:d2c195b28050 test
320 @ 7:d2c195b28050 test
322 | conflict with e
321 | conflict with e
323 |
322 |
324 o 6:baf10c5166d4 test
323 o 6:baf10c5166d4 test
325 | g
324 | g
326 |
325 |
327 o 5:6343ca3eff20 test
326 o 5:6343ca3eff20 test
328 | f
327 | f
329 |
328 |
330 | o 4:e860deea161a test
329 | o 4:e860deea161a test
331 | | e
330 | | e
332 | |
331 | |
333 | o 3:055a42cdd887 test
332 | o 3:055a42cdd887 test
334 | | d
333 | | d
335 | |
334 | |
336 | o 2:177f92b77385 test
335 | o 2:177f92b77385 test
337 |/ c
336 |/ c
338 |
337 |
339 o 1:d2ae7f538514 test
338 o 1:d2ae7f538514 test
340 | b
339 | b
341 |
340 |
342 o 0:cb9a9f314b8b test
341 o 0:cb9a9f314b8b test
343 a
342 a
344
343
345 Check dryrun working with --collapse when there is conflicts
344 Check dryrun working with --collapse when there is conflicts
346 $ hg rebase -s 2 -d 7 -n --collapse
345 $ hg rebase -s 2 -d 7 -n --collapse
347 starting dry-run rebase; repository will not be changed
346 starting dry-run rebase; repository will not be changed
348 rebasing 2:177f92b77385 "c"
347 rebasing 2:177f92b77385 "c"
349 rebasing 3:055a42cdd887 "d"
348 rebasing 3:055a42cdd887 "d"
350 rebasing 4:e860deea161a "e"
349 rebasing 4:e860deea161a "e"
351 merging e
350 merging e
352 hit a merge conflict
351 hit a merge conflict
353 [1]
352 [1]
354
353
355 ==========================
354 ==========================
356 Test for --confirm option|
355 Test for --confirm option|
357 ==========================
356 ==========================
358 $ cd ..
357 $ cd ..
359 $ hg clone repo3 repo4 -q
358 $ hg clone repo3 repo4 -q
360 $ cd repo4
359 $ cd repo4
361 $ hg strip 7 -q
360 $ hg strip 7 -q
362 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
361 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
363 @ 6:baf10c5166d4 test
362 @ 6:baf10c5166d4 test
364 | g
363 | g
365 |
364 |
366 o 5:6343ca3eff20 test
365 o 5:6343ca3eff20 test
367 | f
366 | f
368 |
367 |
369 | o 4:e860deea161a test
368 | o 4:e860deea161a test
370 | | e
369 | | e
371 | |
370 | |
372 | o 3:055a42cdd887 test
371 | o 3:055a42cdd887 test
373 | | d
372 | | d
374 | |
373 | |
375 | o 2:177f92b77385 test
374 | o 2:177f92b77385 test
376 |/ c
375 |/ c
377 |
376 |
378 o 1:d2ae7f538514 test
377 o 1:d2ae7f538514 test
379 | b
378 | b
380 |
379 |
381 o 0:cb9a9f314b8b test
380 o 0:cb9a9f314b8b test
382 a
381 a
383
382
384 Check it gives error when both --dryrun and --confirm is used:
383 Check it gives error when both --dryrun and --confirm is used:
385 $ hg rebase -s 2 -d . --confirm --dry-run
384 $ hg rebase -s 2 -d . --confirm --dry-run
386 abort: cannot specify both --confirm and --dry-run
385 abort: cannot specify both --confirm and --dry-run
387 [255]
386 [255]
388 $ hg rebase -s 2 -d . --confirm --abort
387 $ hg rebase -s 2 -d . --confirm --abort
389 abort: cannot specify both --confirm and --abort
388 abort: cannot specify both --confirm and --abort
390 [255]
389 [255]
391 $ hg rebase -s 2 -d . --confirm --continue
390 $ hg rebase -s 2 -d . --confirm --continue
392 abort: cannot specify both --confirm and --continue
391 abort: cannot specify both --confirm and --continue
393 [255]
392 [255]
394
393
395 Test --confirm option when there are no conflicts:
394 Test --confirm option when there are no conflicts:
396 $ hg rebase -s 2 -d . --keep --config ui.interactive=True --confirm << EOF
395 $ hg rebase -s 2 -d . --keep --config ui.interactive=True --confirm << EOF
397 > n
396 > n
398 > EOF
397 > EOF
399 starting in-memory rebase
398 starting in-memory rebase
400 rebasing 2:177f92b77385 "c"
399 rebasing 2:177f92b77385 "c"
401 rebasing 3:055a42cdd887 "d"
400 rebasing 3:055a42cdd887 "d"
402 rebasing 4:e860deea161a "e"
401 rebasing 4:e860deea161a "e"
403 rebase completed successfully
402 rebase completed successfully
404 apply changes (yn)? n
403 apply changes (yn)? n
405 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
404 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
406 @ 6:baf10c5166d4 test
405 @ 6:baf10c5166d4 test
407 | g
406 | g
408 |
407 |
409 o 5:6343ca3eff20 test
408 o 5:6343ca3eff20 test
410 | f
409 | f
411 |
410 |
412 | o 4:e860deea161a test
411 | o 4:e860deea161a test
413 | | e
412 | | e
414 | |
413 | |
415 | o 3:055a42cdd887 test
414 | o 3:055a42cdd887 test
416 | | d
415 | | d
417 | |
416 | |
418 | o 2:177f92b77385 test
417 | o 2:177f92b77385 test
419 |/ c
418 |/ c
420 |
419 |
421 o 1:d2ae7f538514 test
420 o 1:d2ae7f538514 test
422 | b
421 | b
423 |
422 |
424 o 0:cb9a9f314b8b test
423 o 0:cb9a9f314b8b test
425 a
424 a
426
425
427 $ hg rebase -s 2 -d . --keep --config ui.interactive=True --confirm << EOF
426 $ hg rebase -s 2 -d . --keep --config ui.interactive=True --confirm << EOF
428 > y
427 > y
429 > EOF
428 > EOF
430 starting in-memory rebase
429 starting in-memory rebase
431 rebasing 2:177f92b77385 "c"
430 rebasing 2:177f92b77385 "c"
432 rebasing 3:055a42cdd887 "d"
431 rebasing 3:055a42cdd887 "d"
433 rebasing 4:e860deea161a "e"
432 rebasing 4:e860deea161a "e"
434 rebase completed successfully
433 rebase completed successfully
435 apply changes (yn)? y
434 apply changes (yn)? y
436 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
435 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
437 o 9:9fd28f55f6dc test
436 o 9:9fd28f55f6dc test
438 | e
437 | e
439 |
438 |
440 o 8:12cbf031f469 test
439 o 8:12cbf031f469 test
441 | d
440 | d
442 |
441 |
443 o 7:c83b1da5b1ae test
442 o 7:c83b1da5b1ae test
444 | c
443 | c
445 |
444 |
446 @ 6:baf10c5166d4 test
445 @ 6:baf10c5166d4 test
447 | g
446 | g
448 |
447 |
449 o 5:6343ca3eff20 test
448 o 5:6343ca3eff20 test
450 | f
449 | f
451 |
450 |
452 | o 4:e860deea161a test
451 | o 4:e860deea161a test
453 | | e
452 | | e
454 | |
453 | |
455 | o 3:055a42cdd887 test
454 | o 3:055a42cdd887 test
456 | | d
455 | | d
457 | |
456 | |
458 | o 2:177f92b77385 test
457 | o 2:177f92b77385 test
459 |/ c
458 |/ c
460 |
459 |
461 o 1:d2ae7f538514 test
460 o 1:d2ae7f538514 test
462 | b
461 | b
463 |
462 |
464 o 0:cb9a9f314b8b test
463 o 0:cb9a9f314b8b test
465 a
464 a
466
465
467 Test --confirm option when there is a conflict
466 Test --confirm option when there is a conflict
468 $ hg up tip -q
467 $ hg up tip -q
469 $ echo ee>e
468 $ echo ee>e
470 $ hg ci --amend -m "conflict with e" -q
469 $ hg ci --amend -m "conflict with e" -q
471 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
470 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
472 @ 9:906d72f66a59 test
471 @ 9:906d72f66a59 test
473 | conflict with e
472 | conflict with e
474 |
473 |
475 o 8:12cbf031f469 test
474 o 8:12cbf031f469 test
476 | d
475 | d
477 |
476 |
478 o 7:c83b1da5b1ae test
477 o 7:c83b1da5b1ae test
479 | c
478 | c
480 |
479 |
481 o 6:baf10c5166d4 test
480 o 6:baf10c5166d4 test
482 | g
481 | g
483 |
482 |
484 o 5:6343ca3eff20 test
483 o 5:6343ca3eff20 test
485 | f
484 | f
486 |
485 |
487 | o 4:e860deea161a test
486 | o 4:e860deea161a test
488 | | e
487 | | e
489 | |
488 | |
490 | o 3:055a42cdd887 test
489 | o 3:055a42cdd887 test
491 | | d
490 | | d
492 | |
491 | |
493 | o 2:177f92b77385 test
492 | o 2:177f92b77385 test
494 |/ c
493 |/ c
495 |
494 |
496 o 1:d2ae7f538514 test
495 o 1:d2ae7f538514 test
497 | b
496 | b
498 |
497 |
499 o 0:cb9a9f314b8b test
498 o 0:cb9a9f314b8b test
500 a
499 a
501
500
502 $ hg rebase -s 4 -d . --keep --confirm
501 $ hg rebase -s 4 -d . --keep --confirm
503 starting in-memory rebase
502 starting in-memory rebase
504 rebasing 4:e860deea161a "e"
503 rebasing 4:e860deea161a "e"
505 merging e
504 merging e
506 hit a merge conflict
505 hit a merge conflict
507 [1]
506 [1]
508 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
507 $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
509 @ 9:906d72f66a59 test
508 @ 9:906d72f66a59 test
510 | conflict with e
509 | conflict with e
511 |
510 |
512 o 8:12cbf031f469 test
511 o 8:12cbf031f469 test
513 | d
512 | d
514 |
513 |
515 o 7:c83b1da5b1ae test
514 o 7:c83b1da5b1ae test
516 | c
515 | c
517 |
516 |
518 o 6:baf10c5166d4 test
517 o 6:baf10c5166d4 test
519 | g
518 | g
520 |
519 |
521 o 5:6343ca3eff20 test
520 o 5:6343ca3eff20 test
522 | f
521 | f
523 |
522 |
524 | o 4:e860deea161a test
523 | o 4:e860deea161a test
525 | | e
524 | | e
526 | |
525 | |
527 | o 3:055a42cdd887 test
526 | o 3:055a42cdd887 test
528 | | d
527 | | d
529 | |
528 | |
530 | o 2:177f92b77385 test
529 | o 2:177f92b77385 test
531 |/ c
530 |/ c
532 |
531 |
533 o 1:d2ae7f538514 test
532 o 1:d2ae7f538514 test
534 | b
533 | b
535 |
534 |
536 o 0:cb9a9f314b8b test
535 o 0:cb9a9f314b8b test
537 a
536 a
538
537
539 #if execbit
538 #if execbit
540
539
541 Test a metadata-only in-memory merge
540 Test a metadata-only in-memory merge
542 $ cd $TESTTMP
541 $ cd $TESTTMP
543 $ hg init no_exception
542 $ hg init no_exception
544 $ cd no_exception
543 $ cd no_exception
545 # Produce the following graph:
544 # Produce the following graph:
546 # o 'add +x to foo.txt'
545 # o 'add +x to foo.txt'
547 # | o r1 (adds bar.txt, just for something to rebase to)
546 # | o r1 (adds bar.txt, just for something to rebase to)
548 # |/
547 # |/
549 # o r0 (adds foo.txt, no +x)
548 # o r0 (adds foo.txt, no +x)
550 $ echo hi > foo.txt
549 $ echo hi > foo.txt
551 $ hg ci -qAm r0
550 $ hg ci -qAm r0
552 $ echo hi > bar.txt
551 $ echo hi > bar.txt
553 $ hg ci -qAm r1
552 $ hg ci -qAm r1
554 $ hg co -qr ".^"
553 $ hg co -qr ".^"
555 $ chmod +x foo.txt
554 $ chmod +x foo.txt
556 $ hg ci -qAm 'add +x to foo.txt'
555 $ hg ci -qAm 'add +x to foo.txt'
557 issue5960: this was raising an AttributeError exception
556 issue5960: this was raising an AttributeError exception
558 $ hg rebase -r . -d 1
557 $ hg rebase -r . -d 1
559 rebasing 2:539b93e77479 "add +x to foo.txt" (tip)
558 rebasing 2:539b93e77479 "add +x to foo.txt" (tip)
560 saved backup bundle to $TESTTMP/no_exception/.hg/strip-backup/*.hg (glob)
559 saved backup bundle to $TESTTMP/no_exception/.hg/strip-backup/*.hg (glob)
561 $ hg diff -c tip
560 $ hg diff -c tip
562 diff --git a/foo.txt b/foo.txt
561 diff --git a/foo.txt b/foo.txt
563 old mode 100644
562 old mode 100644
564 new mode 100755
563 new mode 100755
565
564
566 #endif
565 #endif
General Comments 0
You need to be logged in to leave comments. Login now