##// END OF EJS Templates
merge: do not delete untracked files silently (issue5962)...
Yuya Nishihara -
r39232:8c6775e8 stable
parent child Browse files
Show More
@@ -1,2557 +1,2563 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirfilenodeids,
24 wdirfilenodeids,
25 wdirid,
25 wdirid,
26 )
26 )
27 from . import (
27 from . import (
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 obsolete as obsmod,
33 obsolete as obsmod,
34 patch,
34 patch,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 repoview,
38 repoview,
39 revlog,
39 revlog,
40 scmutil,
40 scmutil,
41 sparse,
41 sparse,
42 subrepo,
42 subrepo,
43 subrepoutil,
43 subrepoutil,
44 util,
44 util,
45 )
45 )
46 from .utils import (
46 from .utils import (
47 dateutil,
47 dateutil,
48 stringutil,
48 stringutil,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 class basectx(object):
53 class basectx(object):
54 """A basectx object represents the common logic for its children:
54 """A basectx object represents the common logic for its children:
55 changectx: read-only context that is already present in the repo,
55 changectx: read-only context that is already present in the repo,
56 workingctx: a context that represents the working directory and can
56 workingctx: a context that represents the working directory and can
57 be committed,
57 be committed,
58 memctx: a context that represents changes in-memory and can also
58 memctx: a context that represents changes in-memory and can also
59 be committed."""
59 be committed."""
60
60
61 def __init__(self, repo):
61 def __init__(self, repo):
62 self._repo = repo
62 self._repo = repo
63
63
64 def __bytes__(self):
64 def __bytes__(self):
65 return short(self.node())
65 return short(self.node())
66
66
67 __str__ = encoding.strmethod(__bytes__)
67 __str__ = encoding.strmethod(__bytes__)
68
68
69 def __repr__(self):
69 def __repr__(self):
70 return r"<%s %s>" % (type(self).__name__, str(self))
70 return r"<%s %s>" % (type(self).__name__, str(self))
71
71
72 def __eq__(self, other):
72 def __eq__(self, other):
73 try:
73 try:
74 return type(self) == type(other) and self._rev == other._rev
74 return type(self) == type(other) and self._rev == other._rev
75 except AttributeError:
75 except AttributeError:
76 return False
76 return False
77
77
78 def __ne__(self, other):
78 def __ne__(self, other):
79 return not (self == other)
79 return not (self == other)
80
80
81 def __contains__(self, key):
81 def __contains__(self, key):
82 return key in self._manifest
82 return key in self._manifest
83
83
84 def __getitem__(self, key):
84 def __getitem__(self, key):
85 return self.filectx(key)
85 return self.filectx(key)
86
86
87 def __iter__(self):
87 def __iter__(self):
88 return iter(self._manifest)
88 return iter(self._manifest)
89
89
90 def _buildstatusmanifest(self, status):
90 def _buildstatusmanifest(self, status):
91 """Builds a manifest that includes the given status results, if this is
91 """Builds a manifest that includes the given status results, if this is
92 a working copy context. For non-working copy contexts, it just returns
92 a working copy context. For non-working copy contexts, it just returns
93 the normal manifest."""
93 the normal manifest."""
94 return self.manifest()
94 return self.manifest()
95
95
96 def _matchstatus(self, other, match):
96 def _matchstatus(self, other, match):
97 """This internal method provides a way for child objects to override the
97 """This internal method provides a way for child objects to override the
98 match operator.
98 match operator.
99 """
99 """
100 return match
100 return match
101
101
102 def _buildstatus(self, other, s, match, listignored, listclean,
102 def _buildstatus(self, other, s, match, listignored, listclean,
103 listunknown):
103 listunknown):
104 """build a status with respect to another context"""
104 """build a status with respect to another context"""
105 # Load earliest manifest first for caching reasons. More specifically,
105 # Load earliest manifest first for caching reasons. More specifically,
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
108 # 1000 and cache it so that when you read 1001, we just need to apply a
108 # 1000 and cache it so that when you read 1001, we just need to apply a
109 # delta to what's in the cache. So that's one full reconstruction + one
109 # delta to what's in the cache. So that's one full reconstruction + one
110 # delta application.
110 # delta application.
111 mf2 = None
111 mf2 = None
112 if self.rev() is not None and self.rev() < other.rev():
112 if self.rev() is not None and self.rev() < other.rev():
113 mf2 = self._buildstatusmanifest(s)
113 mf2 = self._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
115 if mf2 is None:
115 if mf2 is None:
116 mf2 = self._buildstatusmanifest(s)
116 mf2 = self._buildstatusmanifest(s)
117
117
118 modified, added = [], []
118 modified, added = [], []
119 removed = []
119 removed = []
120 clean = []
120 clean = []
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
122 deletedset = set(deleted)
122 deletedset = set(deleted)
123 d = mf1.diff(mf2, match=match, clean=listclean)
123 d = mf1.diff(mf2, match=match, clean=listclean)
124 for fn, value in d.iteritems():
124 for fn, value in d.iteritems():
125 if fn in deletedset:
125 if fn in deletedset:
126 continue
126 continue
127 if value is None:
127 if value is None:
128 clean.append(fn)
128 clean.append(fn)
129 continue
129 continue
130 (node1, flag1), (node2, flag2) = value
130 (node1, flag1), (node2, flag2) = value
131 if node1 is None:
131 if node1 is None:
132 added.append(fn)
132 added.append(fn)
133 elif node2 is None:
133 elif node2 is None:
134 removed.append(fn)
134 removed.append(fn)
135 elif flag1 != flag2:
135 elif flag1 != flag2:
136 modified.append(fn)
136 modified.append(fn)
137 elif node2 not in wdirfilenodeids:
137 elif node2 not in wdirfilenodeids:
138 # When comparing files between two commits, we save time by
138 # When comparing files between two commits, we save time by
139 # not comparing the file contents when the nodeids differ.
139 # not comparing the file contents when the nodeids differ.
140 # Note that this means we incorrectly report a reverted change
140 # Note that this means we incorrectly report a reverted change
141 # to a file as a modification.
141 # to a file as a modification.
142 modified.append(fn)
142 modified.append(fn)
143 elif self[fn].cmp(other[fn]):
143 elif self[fn].cmp(other[fn]):
144 modified.append(fn)
144 modified.append(fn)
145 else:
145 else:
146 clean.append(fn)
146 clean.append(fn)
147
147
148 if removed:
148 if removed:
149 # need to filter files if they are already reported as removed
149 # need to filter files if they are already reported as removed
150 unknown = [fn for fn in unknown if fn not in mf1 and
150 unknown = [fn for fn in unknown if fn not in mf1 and
151 (not match or match(fn))]
151 (not match or match(fn))]
152 ignored = [fn for fn in ignored if fn not in mf1 and
152 ignored = [fn for fn in ignored if fn not in mf1 and
153 (not match or match(fn))]
153 (not match or match(fn))]
154 # if they're deleted, don't report them as removed
154 # if they're deleted, don't report them as removed
155 removed = [fn for fn in removed if fn not in deletedset]
155 removed = [fn for fn in removed if fn not in deletedset]
156
156
157 return scmutil.status(modified, added, removed, deleted, unknown,
157 return scmutil.status(modified, added, removed, deleted, unknown,
158 ignored, clean)
158 ignored, clean)
159
159
160 @propertycache
160 @propertycache
161 def substate(self):
161 def substate(self):
162 return subrepoutil.state(self, self._repo.ui)
162 return subrepoutil.state(self, self._repo.ui)
163
163
164 def subrev(self, subpath):
164 def subrev(self, subpath):
165 return self.substate[subpath][1]
165 return self.substate[subpath][1]
166
166
167 def rev(self):
167 def rev(self):
168 return self._rev
168 return self._rev
169 def node(self):
169 def node(self):
170 return self._node
170 return self._node
171 def hex(self):
171 def hex(self):
172 return hex(self.node())
172 return hex(self.node())
173 def manifest(self):
173 def manifest(self):
174 return self._manifest
174 return self._manifest
175 def manifestctx(self):
175 def manifestctx(self):
176 return self._manifestctx
176 return self._manifestctx
177 def repo(self):
177 def repo(self):
178 return self._repo
178 return self._repo
179 def phasestr(self):
179 def phasestr(self):
180 return phases.phasenames[self.phase()]
180 return phases.phasenames[self.phase()]
181 def mutable(self):
181 def mutable(self):
182 return self.phase() > phases.public
182 return self.phase() > phases.public
183
183
184 def matchfileset(self, expr, badfn=None):
184 def matchfileset(self, expr, badfn=None):
185 return fileset.match(self, expr, badfn=badfn)
185 return fileset.match(self, expr, badfn=badfn)
186
186
187 def obsolete(self):
187 def obsolete(self):
188 """True if the changeset is obsolete"""
188 """True if the changeset is obsolete"""
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
190
190
191 def extinct(self):
191 def extinct(self):
192 """True if the changeset is extinct"""
192 """True if the changeset is extinct"""
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
194
194
195 def orphan(self):
195 def orphan(self):
196 """True if the changeset is not obsolete but it's ancestor are"""
196 """True if the changeset is not obsolete but it's ancestor are"""
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
198
198
199 def phasedivergent(self):
199 def phasedivergent(self):
200 """True if the changeset try to be a successor of a public changeset
200 """True if the changeset try to be a successor of a public changeset
201
201
202 Only non-public and non-obsolete changesets may be bumped.
202 Only non-public and non-obsolete changesets may be bumped.
203 """
203 """
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
205
205
206 def contentdivergent(self):
206 def contentdivergent(self):
207 """Is a successors of a changeset with multiple possible successors set
207 """Is a successors of a changeset with multiple possible successors set
208
208
209 Only non-public and non-obsolete changesets may be divergent.
209 Only non-public and non-obsolete changesets may be divergent.
210 """
210 """
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
212
212
213 def isunstable(self):
213 def isunstable(self):
214 """True if the changeset is either unstable, bumped or divergent"""
214 """True if the changeset is either unstable, bumped or divergent"""
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
216
216
217 def instabilities(self):
217 def instabilities(self):
218 """return the list of instabilities affecting this changeset.
218 """return the list of instabilities affecting this changeset.
219
219
220 Instabilities are returned as strings. possible values are:
220 Instabilities are returned as strings. possible values are:
221 - orphan,
221 - orphan,
222 - phase-divergent,
222 - phase-divergent,
223 - content-divergent.
223 - content-divergent.
224 """
224 """
225 instabilities = []
225 instabilities = []
226 if self.orphan():
226 if self.orphan():
227 instabilities.append('orphan')
227 instabilities.append('orphan')
228 if self.phasedivergent():
228 if self.phasedivergent():
229 instabilities.append('phase-divergent')
229 instabilities.append('phase-divergent')
230 if self.contentdivergent():
230 if self.contentdivergent():
231 instabilities.append('content-divergent')
231 instabilities.append('content-divergent')
232 return instabilities
232 return instabilities
233
233
234 def parents(self):
234 def parents(self):
235 """return contexts for each parent changeset"""
235 """return contexts for each parent changeset"""
236 return self._parents
236 return self._parents
237
237
238 def p1(self):
238 def p1(self):
239 return self._parents[0]
239 return self._parents[0]
240
240
241 def p2(self):
241 def p2(self):
242 parents = self._parents
242 parents = self._parents
243 if len(parents) == 2:
243 if len(parents) == 2:
244 return parents[1]
244 return parents[1]
245 return changectx(self._repo, nullrev)
245 return changectx(self._repo, nullrev)
246
246
247 def _fileinfo(self, path):
247 def _fileinfo(self, path):
248 if r'_manifest' in self.__dict__:
248 if r'_manifest' in self.__dict__:
249 try:
249 try:
250 return self._manifest[path], self._manifest.flags(path)
250 return self._manifest[path], self._manifest.flags(path)
251 except KeyError:
251 except KeyError:
252 raise error.ManifestLookupError(self._node, path,
252 raise error.ManifestLookupError(self._node, path,
253 _('not found in manifest'))
253 _('not found in manifest'))
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
255 if path in self._manifestdelta:
255 if path in self._manifestdelta:
256 return (self._manifestdelta[path],
256 return (self._manifestdelta[path],
257 self._manifestdelta.flags(path))
257 self._manifestdelta.flags(path))
258 mfl = self._repo.manifestlog
258 mfl = self._repo.manifestlog
259 try:
259 try:
260 node, flag = mfl[self._changeset.manifest].find(path)
260 node, flag = mfl[self._changeset.manifest].find(path)
261 except KeyError:
261 except KeyError:
262 raise error.ManifestLookupError(self._node, path,
262 raise error.ManifestLookupError(self._node, path,
263 _('not found in manifest'))
263 _('not found in manifest'))
264
264
265 return node, flag
265 return node, flag
266
266
267 def filenode(self, path):
267 def filenode(self, path):
268 return self._fileinfo(path)[0]
268 return self._fileinfo(path)[0]
269
269
270 def flags(self, path):
270 def flags(self, path):
271 try:
271 try:
272 return self._fileinfo(path)[1]
272 return self._fileinfo(path)[1]
273 except error.LookupError:
273 except error.LookupError:
274 return ''
274 return ''
275
275
276 def sub(self, path, allowcreate=True):
276 def sub(self, path, allowcreate=True):
277 '''return a subrepo for the stored revision of path, never wdir()'''
277 '''return a subrepo for the stored revision of path, never wdir()'''
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
279
279
280 def nullsub(self, path, pctx):
280 def nullsub(self, path, pctx):
281 return subrepo.nullsubrepo(self, path, pctx)
281 return subrepo.nullsubrepo(self, path, pctx)
282
282
283 def workingsub(self, path):
283 def workingsub(self, path):
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
285 context.
285 context.
286 '''
286 '''
287 return subrepo.subrepo(self, path, allowwdir=True)
287 return subrepo.subrepo(self, path, allowwdir=True)
288
288
289 def match(self, pats=None, include=None, exclude=None, default='glob',
289 def match(self, pats=None, include=None, exclude=None, default='glob',
290 listsubrepos=False, badfn=None):
290 listsubrepos=False, badfn=None):
291 r = self._repo
291 r = self._repo
292 return matchmod.match(r.root, r.getcwd(), pats,
292 return matchmod.match(r.root, r.getcwd(), pats,
293 include, exclude, default,
293 include, exclude, default,
294 auditor=r.nofsauditor, ctx=self,
294 auditor=r.nofsauditor, ctx=self,
295 listsubrepos=listsubrepos, badfn=badfn)
295 listsubrepos=listsubrepos, badfn=badfn)
296
296
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
299 hunksfilterfn=None):
299 hunksfilterfn=None):
300 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
301 if ctx2 is None:
301 if ctx2 is None:
302 ctx2 = self.p1()
302 ctx2 = self.p1()
303 if ctx2 is not None:
303 if ctx2 is not None:
304 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
307 relroot=relroot, copy=copy,
307 relroot=relroot, copy=copy,
308 hunksfilterfn=hunksfilterfn)
308 hunksfilterfn=hunksfilterfn)
309
309
310 def dirs(self):
310 def dirs(self):
311 return self._manifest.dirs()
311 return self._manifest.dirs()
312
312
313 def hasdir(self, dir):
313 def hasdir(self, dir):
314 return self._manifest.hasdir(dir)
314 return self._manifest.hasdir(dir)
315
315
316 def status(self, other=None, match=None, listignored=False,
316 def status(self, other=None, match=None, listignored=False,
317 listclean=False, listunknown=False, listsubrepos=False):
317 listclean=False, listunknown=False, listsubrepos=False):
318 """return status of files between two nodes or node and working
318 """return status of files between two nodes or node and working
319 directory.
319 directory.
320
320
321 If other is None, compare this node with working directory.
321 If other is None, compare this node with working directory.
322
322
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 """
324 """
325
325
326 ctx1 = self
326 ctx1 = self
327 ctx2 = self._repo[other]
327 ctx2 = self._repo[other]
328
328
329 # This next code block is, admittedly, fragile logic that tests for
329 # This next code block is, admittedly, fragile logic that tests for
330 # reversing the contexts and wouldn't need to exist if it weren't for
330 # reversing the contexts and wouldn't need to exist if it weren't for
331 # the fast (and common) code path of comparing the working directory
331 # the fast (and common) code path of comparing the working directory
332 # with its first parent.
332 # with its first parent.
333 #
333 #
334 # What we're aiming for here is the ability to call:
334 # What we're aiming for here is the ability to call:
335 #
335 #
336 # workingctx.status(parentctx)
336 # workingctx.status(parentctx)
337 #
337 #
338 # If we always built the manifest for each context and compared those,
338 # If we always built the manifest for each context and compared those,
339 # then we'd be done. But the special case of the above call means we
339 # then we'd be done. But the special case of the above call means we
340 # just copy the manifest of the parent.
340 # just copy the manifest of the parent.
341 reversed = False
341 reversed = False
342 if (not isinstance(ctx1, changectx)
342 if (not isinstance(ctx1, changectx)
343 and isinstance(ctx2, changectx)):
343 and isinstance(ctx2, changectx)):
344 reversed = True
344 reversed = True
345 ctx1, ctx2 = ctx2, ctx1
345 ctx1, ctx2 = ctx2, ctx1
346
346
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
348 match = ctx2._matchstatus(ctx1, match)
348 match = ctx2._matchstatus(ctx1, match)
349 r = scmutil.status([], [], [], [], [], [], [])
349 r = scmutil.status([], [], [], [], [], [], [])
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 listunknown)
351 listunknown)
352
352
353 if reversed:
353 if reversed:
354 # Reverse added and removed. Clear deleted, unknown and ignored as
354 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # these make no sense to reverse.
355 # these make no sense to reverse.
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r.clean)
357 r.clean)
358
358
359 if listsubrepos:
359 if listsubrepos:
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 try:
361 try:
362 rev2 = ctx2.subrev(subpath)
362 rev2 = ctx2.subrev(subpath)
363 except KeyError:
363 except KeyError:
364 # A subrepo that existed in node1 was deleted between
364 # A subrepo that existed in node1 was deleted between
365 # node1 and node2 (inclusive). Thus, ctx2's substate
365 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # won't contain that subpath. The best we can do ignore it.
366 # won't contain that subpath. The best we can do ignore it.
367 rev2 = None
367 rev2 = None
368 submatch = matchmod.subdirmatcher(subpath, match)
368 submatch = matchmod.subdirmatcher(subpath, match)
369 s = sub.status(rev2, match=submatch, ignored=listignored,
369 s = sub.status(rev2, match=submatch, ignored=listignored,
370 clean=listclean, unknown=listunknown,
370 clean=listclean, unknown=listunknown,
371 listsubrepos=True)
371 listsubrepos=True)
372 for rfiles, sfiles in zip(r, s):
372 for rfiles, sfiles in zip(r, s):
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374
374
375 for l in r:
375 for l in r:
376 l.sort()
376 l.sort()
377
377
378 return r
378 return r
379
379
380 class changectx(basectx):
380 class changectx(basectx):
381 """A changecontext object makes access to data related to a particular
381 """A changecontext object makes access to data related to a particular
382 changeset convenient. It represents a read-only context already present in
382 changeset convenient. It represents a read-only context already present in
383 the repo."""
383 the repo."""
384 def __init__(self, repo, changeid='.'):
384 def __init__(self, repo, changeid='.'):
385 """changeid is a revision number, node, or tag"""
385 """changeid is a revision number, node, or tag"""
386 super(changectx, self).__init__(repo)
386 super(changectx, self).__init__(repo)
387
387
388 try:
388 try:
389 if isinstance(changeid, int):
389 if isinstance(changeid, int):
390 self._node = repo.changelog.node(changeid)
390 self._node = repo.changelog.node(changeid)
391 self._rev = changeid
391 self._rev = changeid
392 return
392 return
393 elif changeid == 'null':
393 elif changeid == 'null':
394 self._node = nullid
394 self._node = nullid
395 self._rev = nullrev
395 self._rev = nullrev
396 return
396 return
397 elif changeid == 'tip':
397 elif changeid == 'tip':
398 self._node = repo.changelog.tip()
398 self._node = repo.changelog.tip()
399 self._rev = repo.changelog.rev(self._node)
399 self._rev = repo.changelog.rev(self._node)
400 return
400 return
401 elif (changeid == '.'
401 elif (changeid == '.'
402 or repo.local() and changeid == repo.dirstate.p1()):
402 or repo.local() and changeid == repo.dirstate.p1()):
403 # this is a hack to delay/avoid loading obsmarkers
403 # this is a hack to delay/avoid loading obsmarkers
404 # when we know that '.' won't be hidden
404 # when we know that '.' won't be hidden
405 self._node = repo.dirstate.p1()
405 self._node = repo.dirstate.p1()
406 self._rev = repo.unfiltered().changelog.rev(self._node)
406 self._rev = repo.unfiltered().changelog.rev(self._node)
407 return
407 return
408 elif len(changeid) == 20:
408 elif len(changeid) == 20:
409 try:
409 try:
410 self._node = changeid
410 self._node = changeid
411 self._rev = repo.changelog.rev(changeid)
411 self._rev = repo.changelog.rev(changeid)
412 return
412 return
413 except error.FilteredLookupError:
413 except error.FilteredLookupError:
414 changeid = hex(changeid) # for the error message
414 changeid = hex(changeid) # for the error message
415 raise
415 raise
416 except LookupError:
416 except LookupError:
417 # check if it might have come from damaged dirstate
417 # check if it might have come from damaged dirstate
418 #
418 #
419 # XXX we could avoid the unfiltered if we had a recognizable
419 # XXX we could avoid the unfiltered if we had a recognizable
420 # exception for filtered changeset access
420 # exception for filtered changeset access
421 if (repo.local()
421 if (repo.local()
422 and changeid in repo.unfiltered().dirstate.parents()):
422 and changeid in repo.unfiltered().dirstate.parents()):
423 msg = _("working directory has unknown parent '%s'!")
423 msg = _("working directory has unknown parent '%s'!")
424 raise error.Abort(msg % short(changeid))
424 raise error.Abort(msg % short(changeid))
425 changeid = hex(changeid) # for the error message
425 changeid = hex(changeid) # for the error message
426
426
427 elif len(changeid) == 40:
427 elif len(changeid) == 40:
428 try:
428 try:
429 self._node = bin(changeid)
429 self._node = bin(changeid)
430 self._rev = repo.changelog.rev(self._node)
430 self._rev = repo.changelog.rev(self._node)
431 return
431 return
432 except error.FilteredLookupError:
432 except error.FilteredLookupError:
433 raise
433 raise
434 except (TypeError, LookupError):
434 except (TypeError, LookupError):
435 pass
435 pass
436 else:
436 else:
437 raise error.ProgrammingError(
437 raise error.ProgrammingError(
438 "unsupported changeid '%s' of type %s" %
438 "unsupported changeid '%s' of type %s" %
439 (changeid, type(changeid)))
439 (changeid, type(changeid)))
440
440
441 # lookup failed
441 # lookup failed
442 except (error.FilteredIndexError, error.FilteredLookupError):
442 except (error.FilteredIndexError, error.FilteredLookupError):
443 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
443 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
444 % pycompat.bytestr(changeid))
444 % pycompat.bytestr(changeid))
445 except error.FilteredRepoLookupError:
445 except error.FilteredRepoLookupError:
446 raise
446 raise
447 except IndexError:
447 except IndexError:
448 pass
448 pass
449 raise error.RepoLookupError(
449 raise error.RepoLookupError(
450 _("unknown revision '%s'") % changeid)
450 _("unknown revision '%s'") % changeid)
451
451
452 def __hash__(self):
452 def __hash__(self):
453 try:
453 try:
454 return hash(self._rev)
454 return hash(self._rev)
455 except AttributeError:
455 except AttributeError:
456 return id(self)
456 return id(self)
457
457
458 def __nonzero__(self):
458 def __nonzero__(self):
459 return self._rev != nullrev
459 return self._rev != nullrev
460
460
461 __bool__ = __nonzero__
461 __bool__ = __nonzero__
462
462
463 @propertycache
463 @propertycache
464 def _changeset(self):
464 def _changeset(self):
465 return self._repo.changelog.changelogrevision(self.rev())
465 return self._repo.changelog.changelogrevision(self.rev())
466
466
467 @propertycache
467 @propertycache
468 def _manifest(self):
468 def _manifest(self):
469 return self._manifestctx.read()
469 return self._manifestctx.read()
470
470
471 @property
471 @property
472 def _manifestctx(self):
472 def _manifestctx(self):
473 return self._repo.manifestlog[self._changeset.manifest]
473 return self._repo.manifestlog[self._changeset.manifest]
474
474
475 @propertycache
475 @propertycache
476 def _manifestdelta(self):
476 def _manifestdelta(self):
477 return self._manifestctx.readdelta()
477 return self._manifestctx.readdelta()
478
478
479 @propertycache
479 @propertycache
480 def _parents(self):
480 def _parents(self):
481 repo = self._repo
481 repo = self._repo
482 p1, p2 = repo.changelog.parentrevs(self._rev)
482 p1, p2 = repo.changelog.parentrevs(self._rev)
483 if p2 == nullrev:
483 if p2 == nullrev:
484 return [changectx(repo, p1)]
484 return [changectx(repo, p1)]
485 return [changectx(repo, p1), changectx(repo, p2)]
485 return [changectx(repo, p1), changectx(repo, p2)]
486
486
487 def changeset(self):
487 def changeset(self):
488 c = self._changeset
488 c = self._changeset
489 return (
489 return (
490 c.manifest,
490 c.manifest,
491 c.user,
491 c.user,
492 c.date,
492 c.date,
493 c.files,
493 c.files,
494 c.description,
494 c.description,
495 c.extra,
495 c.extra,
496 )
496 )
497 def manifestnode(self):
497 def manifestnode(self):
498 return self._changeset.manifest
498 return self._changeset.manifest
499
499
500 def user(self):
500 def user(self):
501 return self._changeset.user
501 return self._changeset.user
502 def date(self):
502 def date(self):
503 return self._changeset.date
503 return self._changeset.date
504 def files(self):
504 def files(self):
505 return self._changeset.files
505 return self._changeset.files
506 def description(self):
506 def description(self):
507 return self._changeset.description
507 return self._changeset.description
508 def branch(self):
508 def branch(self):
509 return encoding.tolocal(self._changeset.extra.get("branch"))
509 return encoding.tolocal(self._changeset.extra.get("branch"))
510 def closesbranch(self):
510 def closesbranch(self):
511 return 'close' in self._changeset.extra
511 return 'close' in self._changeset.extra
512 def extra(self):
512 def extra(self):
513 """Return a dict of extra information."""
513 """Return a dict of extra information."""
514 return self._changeset.extra
514 return self._changeset.extra
515 def tags(self):
515 def tags(self):
516 """Return a list of byte tag names"""
516 """Return a list of byte tag names"""
517 return self._repo.nodetags(self._node)
517 return self._repo.nodetags(self._node)
518 def bookmarks(self):
518 def bookmarks(self):
519 """Return a list of byte bookmark names."""
519 """Return a list of byte bookmark names."""
520 return self._repo.nodebookmarks(self._node)
520 return self._repo.nodebookmarks(self._node)
521 def phase(self):
521 def phase(self):
522 return self._repo._phasecache.phase(self._repo, self._rev)
522 return self._repo._phasecache.phase(self._repo, self._rev)
523 def hidden(self):
523 def hidden(self):
524 return self._rev in repoview.filterrevs(self._repo, 'visible')
524 return self._rev in repoview.filterrevs(self._repo, 'visible')
525
525
526 def isinmemory(self):
526 def isinmemory(self):
527 return False
527 return False
528
528
529 def children(self):
529 def children(self):
530 """return list of changectx contexts for each child changeset.
530 """return list of changectx contexts for each child changeset.
531
531
532 This returns only the immediate child changesets. Use descendants() to
532 This returns only the immediate child changesets. Use descendants() to
533 recursively walk children.
533 recursively walk children.
534 """
534 """
535 c = self._repo.changelog.children(self._node)
535 c = self._repo.changelog.children(self._node)
536 return [changectx(self._repo, x) for x in c]
536 return [changectx(self._repo, x) for x in c]
537
537
538 def ancestors(self):
538 def ancestors(self):
539 for a in self._repo.changelog.ancestors([self._rev]):
539 for a in self._repo.changelog.ancestors([self._rev]):
540 yield changectx(self._repo, a)
540 yield changectx(self._repo, a)
541
541
542 def descendants(self):
542 def descendants(self):
543 """Recursively yield all children of the changeset.
543 """Recursively yield all children of the changeset.
544
544
545 For just the immediate children, use children()
545 For just the immediate children, use children()
546 """
546 """
547 for d in self._repo.changelog.descendants([self._rev]):
547 for d in self._repo.changelog.descendants([self._rev]):
548 yield changectx(self._repo, d)
548 yield changectx(self._repo, d)
549
549
550 def filectx(self, path, fileid=None, filelog=None):
550 def filectx(self, path, fileid=None, filelog=None):
551 """get a file context from this changeset"""
551 """get a file context from this changeset"""
552 if fileid is None:
552 if fileid is None:
553 fileid = self.filenode(path)
553 fileid = self.filenode(path)
554 return filectx(self._repo, path, fileid=fileid,
554 return filectx(self._repo, path, fileid=fileid,
555 changectx=self, filelog=filelog)
555 changectx=self, filelog=filelog)
556
556
557 def ancestor(self, c2, warn=False):
557 def ancestor(self, c2, warn=False):
558 """return the "best" ancestor context of self and c2
558 """return the "best" ancestor context of self and c2
559
559
560 If there are multiple candidates, it will show a message and check
560 If there are multiple candidates, it will show a message and check
561 merge.preferancestor configuration before falling back to the
561 merge.preferancestor configuration before falling back to the
562 revlog ancestor."""
562 revlog ancestor."""
563 # deal with workingctxs
563 # deal with workingctxs
564 n2 = c2._node
564 n2 = c2._node
565 if n2 is None:
565 if n2 is None:
566 n2 = c2._parents[0]._node
566 n2 = c2._parents[0]._node
567 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
567 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
568 if not cahs:
568 if not cahs:
569 anc = nullid
569 anc = nullid
570 elif len(cahs) == 1:
570 elif len(cahs) == 1:
571 anc = cahs[0]
571 anc = cahs[0]
572 else:
572 else:
573 # experimental config: merge.preferancestor
573 # experimental config: merge.preferancestor
574 for r in self._repo.ui.configlist('merge', 'preferancestor'):
574 for r in self._repo.ui.configlist('merge', 'preferancestor'):
575 try:
575 try:
576 ctx = scmutil.revsymbol(self._repo, r)
576 ctx = scmutil.revsymbol(self._repo, r)
577 except error.RepoLookupError:
577 except error.RepoLookupError:
578 continue
578 continue
579 anc = ctx.node()
579 anc = ctx.node()
580 if anc in cahs:
580 if anc in cahs:
581 break
581 break
582 else:
582 else:
583 anc = self._repo.changelog.ancestor(self._node, n2)
583 anc = self._repo.changelog.ancestor(self._node, n2)
584 if warn:
584 if warn:
585 self._repo.ui.status(
585 self._repo.ui.status(
586 (_("note: using %s as ancestor of %s and %s\n") %
586 (_("note: using %s as ancestor of %s and %s\n") %
587 (short(anc), short(self._node), short(n2))) +
587 (short(anc), short(self._node), short(n2))) +
588 ''.join(_(" alternatively, use --config "
588 ''.join(_(" alternatively, use --config "
589 "merge.preferancestor=%s\n") %
589 "merge.preferancestor=%s\n") %
590 short(n) for n in sorted(cahs) if n != anc))
590 short(n) for n in sorted(cahs) if n != anc))
591 return changectx(self._repo, anc)
591 return changectx(self._repo, anc)
592
592
593 def descendant(self, other):
593 def descendant(self, other):
594 msg = (b'ctx.descendant(other) is deprecated, '
594 msg = (b'ctx.descendant(other) is deprecated, '
595 b'use ctx.isancestorof(other)')
595 b'use ctx.isancestorof(other)')
596 self._repo.ui.deprecwarn(msg, b'4.7')
596 self._repo.ui.deprecwarn(msg, b'4.7')
597 return self.isancestorof(other)
597 return self.isancestorof(other)
598
598
599 def isancestorof(self, other):
599 def isancestorof(self, other):
600 """True if this changeset is an ancestor of other"""
600 """True if this changeset is an ancestor of other"""
601 return self._repo.changelog.isancestorrev(self._rev, other._rev)
601 return self._repo.changelog.isancestorrev(self._rev, other._rev)
602
602
603 def walk(self, match):
603 def walk(self, match):
604 '''Generates matching file names.'''
604 '''Generates matching file names.'''
605
605
606 # Wrap match.bad method to have message with nodeid
606 # Wrap match.bad method to have message with nodeid
607 def bad(fn, msg):
607 def bad(fn, msg):
608 # The manifest doesn't know about subrepos, so don't complain about
608 # The manifest doesn't know about subrepos, so don't complain about
609 # paths into valid subrepos.
609 # paths into valid subrepos.
610 if any(fn == s or fn.startswith(s + '/')
610 if any(fn == s or fn.startswith(s + '/')
611 for s in self.substate):
611 for s in self.substate):
612 return
612 return
613 match.bad(fn, _('no such file in rev %s') % self)
613 match.bad(fn, _('no such file in rev %s') % self)
614
614
615 m = matchmod.badmatch(match, bad)
615 m = matchmod.badmatch(match, bad)
616 return self._manifest.walk(m)
616 return self._manifest.walk(m)
617
617
618 def matches(self, match):
618 def matches(self, match):
619 return self.walk(match)
619 return self.walk(match)
620
620
621 class basefilectx(object):
621 class basefilectx(object):
622 """A filecontext object represents the common logic for its children:
622 """A filecontext object represents the common logic for its children:
623 filectx: read-only access to a filerevision that is already present
623 filectx: read-only access to a filerevision that is already present
624 in the repo,
624 in the repo,
625 workingfilectx: a filecontext that represents files from the working
625 workingfilectx: a filecontext that represents files from the working
626 directory,
626 directory,
627 memfilectx: a filecontext that represents files in-memory,
627 memfilectx: a filecontext that represents files in-memory,
628 overlayfilectx: duplicate another filecontext with some fields overridden.
628 overlayfilectx: duplicate another filecontext with some fields overridden.
629 """
629 """
630 @propertycache
630 @propertycache
631 def _filelog(self):
631 def _filelog(self):
632 return self._repo.file(self._path)
632 return self._repo.file(self._path)
633
633
634 @propertycache
634 @propertycache
635 def _changeid(self):
635 def _changeid(self):
636 if r'_changeid' in self.__dict__:
636 if r'_changeid' in self.__dict__:
637 return self._changeid
637 return self._changeid
638 elif r'_changectx' in self.__dict__:
638 elif r'_changectx' in self.__dict__:
639 return self._changectx.rev()
639 return self._changectx.rev()
640 elif r'_descendantrev' in self.__dict__:
640 elif r'_descendantrev' in self.__dict__:
641 # this file context was created from a revision with a known
641 # this file context was created from a revision with a known
642 # descendant, we can (lazily) correct for linkrev aliases
642 # descendant, we can (lazily) correct for linkrev aliases
643 return self._adjustlinkrev(self._descendantrev)
643 return self._adjustlinkrev(self._descendantrev)
644 else:
644 else:
645 return self._filelog.linkrev(self._filerev)
645 return self._filelog.linkrev(self._filerev)
646
646
647 @propertycache
647 @propertycache
648 def _filenode(self):
648 def _filenode(self):
649 if r'_fileid' in self.__dict__:
649 if r'_fileid' in self.__dict__:
650 return self._filelog.lookup(self._fileid)
650 return self._filelog.lookup(self._fileid)
651 else:
651 else:
652 return self._changectx.filenode(self._path)
652 return self._changectx.filenode(self._path)
653
653
654 @propertycache
654 @propertycache
655 def _filerev(self):
655 def _filerev(self):
656 return self._filelog.rev(self._filenode)
656 return self._filelog.rev(self._filenode)
657
657
658 @propertycache
658 @propertycache
659 def _repopath(self):
659 def _repopath(self):
660 return self._path
660 return self._path
661
661
662 def __nonzero__(self):
662 def __nonzero__(self):
663 try:
663 try:
664 self._filenode
664 self._filenode
665 return True
665 return True
666 except error.LookupError:
666 except error.LookupError:
667 # file is missing
667 # file is missing
668 return False
668 return False
669
669
670 __bool__ = __nonzero__
670 __bool__ = __nonzero__
671
671
672 def __bytes__(self):
672 def __bytes__(self):
673 try:
673 try:
674 return "%s@%s" % (self.path(), self._changectx)
674 return "%s@%s" % (self.path(), self._changectx)
675 except error.LookupError:
675 except error.LookupError:
676 return "%s@???" % self.path()
676 return "%s@???" % self.path()
677
677
678 __str__ = encoding.strmethod(__bytes__)
678 __str__ = encoding.strmethod(__bytes__)
679
679
680 def __repr__(self):
680 def __repr__(self):
681 return r"<%s %s>" % (type(self).__name__, str(self))
681 return r"<%s %s>" % (type(self).__name__, str(self))
682
682
683 def __hash__(self):
683 def __hash__(self):
684 try:
684 try:
685 return hash((self._path, self._filenode))
685 return hash((self._path, self._filenode))
686 except AttributeError:
686 except AttributeError:
687 return id(self)
687 return id(self)
688
688
689 def __eq__(self, other):
689 def __eq__(self, other):
690 try:
690 try:
691 return (type(self) == type(other) and self._path == other._path
691 return (type(self) == type(other) and self._path == other._path
692 and self._filenode == other._filenode)
692 and self._filenode == other._filenode)
693 except AttributeError:
693 except AttributeError:
694 return False
694 return False
695
695
696 def __ne__(self, other):
696 def __ne__(self, other):
697 return not (self == other)
697 return not (self == other)
698
698
699 def filerev(self):
699 def filerev(self):
700 return self._filerev
700 return self._filerev
701 def filenode(self):
701 def filenode(self):
702 return self._filenode
702 return self._filenode
703 @propertycache
703 @propertycache
704 def _flags(self):
704 def _flags(self):
705 return self._changectx.flags(self._path)
705 return self._changectx.flags(self._path)
706 def flags(self):
706 def flags(self):
707 return self._flags
707 return self._flags
708 def filelog(self):
708 def filelog(self):
709 return self._filelog
709 return self._filelog
710 def rev(self):
710 def rev(self):
711 return self._changeid
711 return self._changeid
712 def linkrev(self):
712 def linkrev(self):
713 return self._filelog.linkrev(self._filerev)
713 return self._filelog.linkrev(self._filerev)
714 def node(self):
714 def node(self):
715 return self._changectx.node()
715 return self._changectx.node()
716 def hex(self):
716 def hex(self):
717 return self._changectx.hex()
717 return self._changectx.hex()
718 def user(self):
718 def user(self):
719 return self._changectx.user()
719 return self._changectx.user()
720 def date(self):
720 def date(self):
721 return self._changectx.date()
721 return self._changectx.date()
722 def files(self):
722 def files(self):
723 return self._changectx.files()
723 return self._changectx.files()
724 def description(self):
724 def description(self):
725 return self._changectx.description()
725 return self._changectx.description()
726 def branch(self):
726 def branch(self):
727 return self._changectx.branch()
727 return self._changectx.branch()
728 def extra(self):
728 def extra(self):
729 return self._changectx.extra()
729 return self._changectx.extra()
730 def phase(self):
730 def phase(self):
731 return self._changectx.phase()
731 return self._changectx.phase()
732 def phasestr(self):
732 def phasestr(self):
733 return self._changectx.phasestr()
733 return self._changectx.phasestr()
734 def obsolete(self):
734 def obsolete(self):
735 return self._changectx.obsolete()
735 return self._changectx.obsolete()
736 def instabilities(self):
736 def instabilities(self):
737 return self._changectx.instabilities()
737 return self._changectx.instabilities()
738 def manifest(self):
738 def manifest(self):
739 return self._changectx.manifest()
739 return self._changectx.manifest()
740 def changectx(self):
740 def changectx(self):
741 return self._changectx
741 return self._changectx
742 def renamed(self):
742 def renamed(self):
743 return self._copied
743 return self._copied
744 def repo(self):
744 def repo(self):
745 return self._repo
745 return self._repo
746 def size(self):
746 def size(self):
747 return len(self.data())
747 return len(self.data())
748
748
749 def path(self):
749 def path(self):
750 return self._path
750 return self._path
751
751
752 def isbinary(self):
752 def isbinary(self):
753 try:
753 try:
754 return stringutil.binary(self.data())
754 return stringutil.binary(self.data())
755 except IOError:
755 except IOError:
756 return False
756 return False
757 def isexec(self):
757 def isexec(self):
758 return 'x' in self.flags()
758 return 'x' in self.flags()
759 def islink(self):
759 def islink(self):
760 return 'l' in self.flags()
760 return 'l' in self.flags()
761
761
762 def isabsent(self):
762 def isabsent(self):
763 """whether this filectx represents a file not in self._changectx
763 """whether this filectx represents a file not in self._changectx
764
764
765 This is mainly for merge code to detect change/delete conflicts. This is
765 This is mainly for merge code to detect change/delete conflicts. This is
766 expected to be True for all subclasses of basectx."""
766 expected to be True for all subclasses of basectx."""
767 return False
767 return False
768
768
769 _customcmp = False
769 _customcmp = False
770 def cmp(self, fctx):
770 def cmp(self, fctx):
771 """compare with other file context
771 """compare with other file context
772
772
773 returns True if different than fctx.
773 returns True if different than fctx.
774 """
774 """
775 if fctx._customcmp:
775 if fctx._customcmp:
776 return fctx.cmp(self)
776 return fctx.cmp(self)
777
777
778 if (fctx._filenode is None
778 if (fctx._filenode is None
779 and (self._repo._encodefilterpats
779 and (self._repo._encodefilterpats
780 # if file data starts with '\1\n', empty metadata block is
780 # if file data starts with '\1\n', empty metadata block is
781 # prepended, which adds 4 bytes to filelog.size().
781 # prepended, which adds 4 bytes to filelog.size().
782 or self.size() - 4 == fctx.size())
782 or self.size() - 4 == fctx.size())
783 or self.size() == fctx.size()):
783 or self.size() == fctx.size()):
784 return self._filelog.cmp(self._filenode, fctx.data())
784 return self._filelog.cmp(self._filenode, fctx.data())
785
785
786 return True
786 return True
787
787
788 def _adjustlinkrev(self, srcrev, inclusive=False):
788 def _adjustlinkrev(self, srcrev, inclusive=False):
789 """return the first ancestor of <srcrev> introducing <fnode>
789 """return the first ancestor of <srcrev> introducing <fnode>
790
790
791 If the linkrev of the file revision does not point to an ancestor of
791 If the linkrev of the file revision does not point to an ancestor of
792 srcrev, we'll walk down the ancestors until we find one introducing
792 srcrev, we'll walk down the ancestors until we find one introducing
793 this file revision.
793 this file revision.
794
794
795 :srcrev: the changeset revision we search ancestors from
795 :srcrev: the changeset revision we search ancestors from
796 :inclusive: if true, the src revision will also be checked
796 :inclusive: if true, the src revision will also be checked
797 """
797 """
798 repo = self._repo
798 repo = self._repo
799 cl = repo.unfiltered().changelog
799 cl = repo.unfiltered().changelog
800 mfl = repo.manifestlog
800 mfl = repo.manifestlog
801 # fetch the linkrev
801 # fetch the linkrev
802 lkr = self.linkrev()
802 lkr = self.linkrev()
803 # hack to reuse ancestor computation when searching for renames
803 # hack to reuse ancestor computation when searching for renames
804 memberanc = getattr(self, '_ancestrycontext', None)
804 memberanc = getattr(self, '_ancestrycontext', None)
805 iteranc = None
805 iteranc = None
806 if srcrev is None:
806 if srcrev is None:
807 # wctx case, used by workingfilectx during mergecopy
807 # wctx case, used by workingfilectx during mergecopy
808 revs = [p.rev() for p in self._repo[None].parents()]
808 revs = [p.rev() for p in self._repo[None].parents()]
809 inclusive = True # we skipped the real (revless) source
809 inclusive = True # we skipped the real (revless) source
810 else:
810 else:
811 revs = [srcrev]
811 revs = [srcrev]
812 if memberanc is None:
812 if memberanc is None:
813 memberanc = iteranc = cl.ancestors(revs, lkr,
813 memberanc = iteranc = cl.ancestors(revs, lkr,
814 inclusive=inclusive)
814 inclusive=inclusive)
815 # check if this linkrev is an ancestor of srcrev
815 # check if this linkrev is an ancestor of srcrev
816 if lkr not in memberanc:
816 if lkr not in memberanc:
817 if iteranc is None:
817 if iteranc is None:
818 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
818 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
819 fnode = self._filenode
819 fnode = self._filenode
820 path = self._path
820 path = self._path
821 for a in iteranc:
821 for a in iteranc:
822 ac = cl.read(a) # get changeset data (we avoid object creation)
822 ac = cl.read(a) # get changeset data (we avoid object creation)
823 if path in ac[3]: # checking the 'files' field.
823 if path in ac[3]: # checking the 'files' field.
824 # The file has been touched, check if the content is
824 # The file has been touched, check if the content is
825 # similar to the one we search for.
825 # similar to the one we search for.
826 if fnode == mfl[ac[0]].readfast().get(path):
826 if fnode == mfl[ac[0]].readfast().get(path):
827 return a
827 return a
828 # In theory, we should never get out of that loop without a result.
828 # In theory, we should never get out of that loop without a result.
829 # But if manifest uses a buggy file revision (not children of the
829 # But if manifest uses a buggy file revision (not children of the
830 # one it replaces) we could. Such a buggy situation will likely
830 # one it replaces) we could. Such a buggy situation will likely
831 # result is crash somewhere else at to some point.
831 # result is crash somewhere else at to some point.
832 return lkr
832 return lkr
833
833
834 def introrev(self):
834 def introrev(self):
835 """return the rev of the changeset which introduced this file revision
835 """return the rev of the changeset which introduced this file revision
836
836
837 This method is different from linkrev because it take into account the
837 This method is different from linkrev because it take into account the
838 changeset the filectx was created from. It ensures the returned
838 changeset the filectx was created from. It ensures the returned
839 revision is one of its ancestors. This prevents bugs from
839 revision is one of its ancestors. This prevents bugs from
840 'linkrev-shadowing' when a file revision is used by multiple
840 'linkrev-shadowing' when a file revision is used by multiple
841 changesets.
841 changesets.
842 """
842 """
843 lkr = self.linkrev()
843 lkr = self.linkrev()
844 attrs = vars(self)
844 attrs = vars(self)
845 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
845 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
846 if noctx or self.rev() == lkr:
846 if noctx or self.rev() == lkr:
847 return self.linkrev()
847 return self.linkrev()
848 return self._adjustlinkrev(self.rev(), inclusive=True)
848 return self._adjustlinkrev(self.rev(), inclusive=True)
849
849
850 def introfilectx(self):
850 def introfilectx(self):
851 """Return filectx having identical contents, but pointing to the
851 """Return filectx having identical contents, but pointing to the
852 changeset revision where this filectx was introduced"""
852 changeset revision where this filectx was introduced"""
853 introrev = self.introrev()
853 introrev = self.introrev()
854 if self.rev() == introrev:
854 if self.rev() == introrev:
855 return self
855 return self
856 return self.filectx(self.filenode(), changeid=introrev)
856 return self.filectx(self.filenode(), changeid=introrev)
857
857
858 def _parentfilectx(self, path, fileid, filelog):
858 def _parentfilectx(self, path, fileid, filelog):
859 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
859 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
860 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
860 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
861 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
861 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
862 # If self is associated with a changeset (probably explicitly
862 # If self is associated with a changeset (probably explicitly
863 # fed), ensure the created filectx is associated with a
863 # fed), ensure the created filectx is associated with a
864 # changeset that is an ancestor of self.changectx.
864 # changeset that is an ancestor of self.changectx.
865 # This lets us later use _adjustlinkrev to get a correct link.
865 # This lets us later use _adjustlinkrev to get a correct link.
866 fctx._descendantrev = self.rev()
866 fctx._descendantrev = self.rev()
867 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
867 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
868 elif r'_descendantrev' in vars(self):
868 elif r'_descendantrev' in vars(self):
869 # Otherwise propagate _descendantrev if we have one associated.
869 # Otherwise propagate _descendantrev if we have one associated.
870 fctx._descendantrev = self._descendantrev
870 fctx._descendantrev = self._descendantrev
871 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
871 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
872 return fctx
872 return fctx
873
873
874 def parents(self):
874 def parents(self):
875 _path = self._path
875 _path = self._path
876 fl = self._filelog
876 fl = self._filelog
877 parents = self._filelog.parents(self._filenode)
877 parents = self._filelog.parents(self._filenode)
878 pl = [(_path, node, fl) for node in parents if node != nullid]
878 pl = [(_path, node, fl) for node in parents if node != nullid]
879
879
880 r = fl.renamed(self._filenode)
880 r = fl.renamed(self._filenode)
881 if r:
881 if r:
882 # - In the simple rename case, both parent are nullid, pl is empty.
882 # - In the simple rename case, both parent are nullid, pl is empty.
883 # - In case of merge, only one of the parent is null id and should
883 # - In case of merge, only one of the parent is null id and should
884 # be replaced with the rename information. This parent is -always-
884 # be replaced with the rename information. This parent is -always-
885 # the first one.
885 # the first one.
886 #
886 #
887 # As null id have always been filtered out in the previous list
887 # As null id have always been filtered out in the previous list
888 # comprehension, inserting to 0 will always result in "replacing
888 # comprehension, inserting to 0 will always result in "replacing
889 # first nullid parent with rename information.
889 # first nullid parent with rename information.
890 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
890 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
891
891
892 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
892 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
893
893
894 def p1(self):
894 def p1(self):
895 return self.parents()[0]
895 return self.parents()[0]
896
896
897 def p2(self):
897 def p2(self):
898 p = self.parents()
898 p = self.parents()
899 if len(p) == 2:
899 if len(p) == 2:
900 return p[1]
900 return p[1]
901 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
901 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
902
902
903 def annotate(self, follow=False, skiprevs=None, diffopts=None):
903 def annotate(self, follow=False, skiprevs=None, diffopts=None):
904 """Returns a list of annotateline objects for each line in the file
904 """Returns a list of annotateline objects for each line in the file
905
905
906 - line.fctx is the filectx of the node where that line was last changed
906 - line.fctx is the filectx of the node where that line was last changed
907 - line.lineno is the line number at the first appearance in the managed
907 - line.lineno is the line number at the first appearance in the managed
908 file
908 file
909 - line.text is the data on that line (including newline character)
909 - line.text is the data on that line (including newline character)
910 """
910 """
911 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
911 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
912
912
913 def parents(f):
913 def parents(f):
914 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
914 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
915 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
915 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
916 # from the topmost introrev (= srcrev) down to p.linkrev() if it
916 # from the topmost introrev (= srcrev) down to p.linkrev() if it
917 # isn't an ancestor of the srcrev.
917 # isn't an ancestor of the srcrev.
918 f._changeid
918 f._changeid
919 pl = f.parents()
919 pl = f.parents()
920
920
921 # Don't return renamed parents if we aren't following.
921 # Don't return renamed parents if we aren't following.
922 if not follow:
922 if not follow:
923 pl = [p for p in pl if p.path() == f.path()]
923 pl = [p for p in pl if p.path() == f.path()]
924
924
925 # renamed filectx won't have a filelog yet, so set it
925 # renamed filectx won't have a filelog yet, so set it
926 # from the cache to save time
926 # from the cache to save time
927 for p in pl:
927 for p in pl:
928 if not r'_filelog' in p.__dict__:
928 if not r'_filelog' in p.__dict__:
929 p._filelog = getlog(p.path())
929 p._filelog = getlog(p.path())
930
930
931 return pl
931 return pl
932
932
933 # use linkrev to find the first changeset where self appeared
933 # use linkrev to find the first changeset where self appeared
934 base = self.introfilectx()
934 base = self.introfilectx()
935 if getattr(base, '_ancestrycontext', None) is None:
935 if getattr(base, '_ancestrycontext', None) is None:
936 cl = self._repo.changelog
936 cl = self._repo.changelog
937 if base.rev() is None:
937 if base.rev() is None:
938 # wctx is not inclusive, but works because _ancestrycontext
938 # wctx is not inclusive, but works because _ancestrycontext
939 # is used to test filelog revisions
939 # is used to test filelog revisions
940 ac = cl.ancestors([p.rev() for p in base.parents()],
940 ac = cl.ancestors([p.rev() for p in base.parents()],
941 inclusive=True)
941 inclusive=True)
942 else:
942 else:
943 ac = cl.ancestors([base.rev()], inclusive=True)
943 ac = cl.ancestors([base.rev()], inclusive=True)
944 base._ancestrycontext = ac
944 base._ancestrycontext = ac
945
945
946 return dagop.annotate(base, parents, skiprevs=skiprevs,
946 return dagop.annotate(base, parents, skiprevs=skiprevs,
947 diffopts=diffopts)
947 diffopts=diffopts)
948
948
949 def ancestors(self, followfirst=False):
949 def ancestors(self, followfirst=False):
950 visit = {}
950 visit = {}
951 c = self
951 c = self
952 if followfirst:
952 if followfirst:
953 cut = 1
953 cut = 1
954 else:
954 else:
955 cut = None
955 cut = None
956
956
957 while True:
957 while True:
958 for parent in c.parents()[:cut]:
958 for parent in c.parents()[:cut]:
959 visit[(parent.linkrev(), parent.filenode())] = parent
959 visit[(parent.linkrev(), parent.filenode())] = parent
960 if not visit:
960 if not visit:
961 break
961 break
962 c = visit.pop(max(visit))
962 c = visit.pop(max(visit))
963 yield c
963 yield c
964
964
965 def decodeddata(self):
965 def decodeddata(self):
966 """Returns `data()` after running repository decoding filters.
966 """Returns `data()` after running repository decoding filters.
967
967
968 This is often equivalent to how the data would be expressed on disk.
968 This is often equivalent to how the data would be expressed on disk.
969 """
969 """
970 return self._repo.wwritedata(self.path(), self.data())
970 return self._repo.wwritedata(self.path(), self.data())
971
971
972 class filectx(basefilectx):
972 class filectx(basefilectx):
973 """A filecontext object makes access to data related to a particular
973 """A filecontext object makes access to data related to a particular
974 filerevision convenient."""
974 filerevision convenient."""
975 def __init__(self, repo, path, changeid=None, fileid=None,
975 def __init__(self, repo, path, changeid=None, fileid=None,
976 filelog=None, changectx=None):
976 filelog=None, changectx=None):
977 """changeid can be a changeset revision, node, or tag.
977 """changeid can be a changeset revision, node, or tag.
978 fileid can be a file revision or node."""
978 fileid can be a file revision or node."""
979 self._repo = repo
979 self._repo = repo
980 self._path = path
980 self._path = path
981
981
982 assert (changeid is not None
982 assert (changeid is not None
983 or fileid is not None
983 or fileid is not None
984 or changectx is not None), \
984 or changectx is not None), \
985 ("bad args: changeid=%r, fileid=%r, changectx=%r"
985 ("bad args: changeid=%r, fileid=%r, changectx=%r"
986 % (changeid, fileid, changectx))
986 % (changeid, fileid, changectx))
987
987
988 if filelog is not None:
988 if filelog is not None:
989 self._filelog = filelog
989 self._filelog = filelog
990
990
991 if changeid is not None:
991 if changeid is not None:
992 self._changeid = changeid
992 self._changeid = changeid
993 if changectx is not None:
993 if changectx is not None:
994 self._changectx = changectx
994 self._changectx = changectx
995 if fileid is not None:
995 if fileid is not None:
996 self._fileid = fileid
996 self._fileid = fileid
997
997
998 @propertycache
998 @propertycache
999 def _changectx(self):
999 def _changectx(self):
1000 try:
1000 try:
1001 return changectx(self._repo, self._changeid)
1001 return changectx(self._repo, self._changeid)
1002 except error.FilteredRepoLookupError:
1002 except error.FilteredRepoLookupError:
1003 # Linkrev may point to any revision in the repository. When the
1003 # Linkrev may point to any revision in the repository. When the
1004 # repository is filtered this may lead to `filectx` trying to build
1004 # repository is filtered this may lead to `filectx` trying to build
1005 # `changectx` for filtered revision. In such case we fallback to
1005 # `changectx` for filtered revision. In such case we fallback to
1006 # creating `changectx` on the unfiltered version of the reposition.
1006 # creating `changectx` on the unfiltered version of the reposition.
1007 # This fallback should not be an issue because `changectx` from
1007 # This fallback should not be an issue because `changectx` from
1008 # `filectx` are not used in complex operations that care about
1008 # `filectx` are not used in complex operations that care about
1009 # filtering.
1009 # filtering.
1010 #
1010 #
1011 # This fallback is a cheap and dirty fix that prevent several
1011 # This fallback is a cheap and dirty fix that prevent several
1012 # crashes. It does not ensure the behavior is correct. However the
1012 # crashes. It does not ensure the behavior is correct. However the
1013 # behavior was not correct before filtering either and "incorrect
1013 # behavior was not correct before filtering either and "incorrect
1014 # behavior" is seen as better as "crash"
1014 # behavior" is seen as better as "crash"
1015 #
1015 #
1016 # Linkrevs have several serious troubles with filtering that are
1016 # Linkrevs have several serious troubles with filtering that are
1017 # complicated to solve. Proper handling of the issue here should be
1017 # complicated to solve. Proper handling of the issue here should be
1018 # considered when solving linkrev issue are on the table.
1018 # considered when solving linkrev issue are on the table.
1019 return changectx(self._repo.unfiltered(), self._changeid)
1019 return changectx(self._repo.unfiltered(), self._changeid)
1020
1020
1021 def filectx(self, fileid, changeid=None):
1021 def filectx(self, fileid, changeid=None):
1022 '''opens an arbitrary revision of the file without
1022 '''opens an arbitrary revision of the file without
1023 opening a new filelog'''
1023 opening a new filelog'''
1024 return filectx(self._repo, self._path, fileid=fileid,
1024 return filectx(self._repo, self._path, fileid=fileid,
1025 filelog=self._filelog, changeid=changeid)
1025 filelog=self._filelog, changeid=changeid)
1026
1026
1027 def rawdata(self):
1027 def rawdata(self):
1028 return self._filelog.revision(self._filenode, raw=True)
1028 return self._filelog.revision(self._filenode, raw=True)
1029
1029
1030 def rawflags(self):
1030 def rawflags(self):
1031 """low-level revlog flags"""
1031 """low-level revlog flags"""
1032 return self._filelog.flags(self._filerev)
1032 return self._filelog.flags(self._filerev)
1033
1033
1034 def data(self):
1034 def data(self):
1035 try:
1035 try:
1036 return self._filelog.read(self._filenode)
1036 return self._filelog.read(self._filenode)
1037 except error.CensoredNodeError:
1037 except error.CensoredNodeError:
1038 if self._repo.ui.config("censor", "policy") == "ignore":
1038 if self._repo.ui.config("censor", "policy") == "ignore":
1039 return ""
1039 return ""
1040 raise error.Abort(_("censored node: %s") % short(self._filenode),
1040 raise error.Abort(_("censored node: %s") % short(self._filenode),
1041 hint=_("set censor.policy to ignore errors"))
1041 hint=_("set censor.policy to ignore errors"))
1042
1042
1043 def size(self):
1043 def size(self):
1044 return self._filelog.size(self._filerev)
1044 return self._filelog.size(self._filerev)
1045
1045
1046 @propertycache
1046 @propertycache
1047 def _copied(self):
1047 def _copied(self):
1048 """check if file was actually renamed in this changeset revision
1048 """check if file was actually renamed in this changeset revision
1049
1049
1050 If rename logged in file revision, we report copy for changeset only
1050 If rename logged in file revision, we report copy for changeset only
1051 if file revisions linkrev points back to the changeset in question
1051 if file revisions linkrev points back to the changeset in question
1052 or both changeset parents contain different file revisions.
1052 or both changeset parents contain different file revisions.
1053 """
1053 """
1054
1054
1055 renamed = self._filelog.renamed(self._filenode)
1055 renamed = self._filelog.renamed(self._filenode)
1056 if not renamed:
1056 if not renamed:
1057 return renamed
1057 return renamed
1058
1058
1059 if self.rev() == self.linkrev():
1059 if self.rev() == self.linkrev():
1060 return renamed
1060 return renamed
1061
1061
1062 name = self.path()
1062 name = self.path()
1063 fnode = self._filenode
1063 fnode = self._filenode
1064 for p in self._changectx.parents():
1064 for p in self._changectx.parents():
1065 try:
1065 try:
1066 if fnode == p.filenode(name):
1066 if fnode == p.filenode(name):
1067 return None
1067 return None
1068 except error.LookupError:
1068 except error.LookupError:
1069 pass
1069 pass
1070 return renamed
1070 return renamed
1071
1071
1072 def children(self):
1072 def children(self):
1073 # hard for renames
1073 # hard for renames
1074 c = self._filelog.children(self._filenode)
1074 c = self._filelog.children(self._filenode)
1075 return [filectx(self._repo, self._path, fileid=x,
1075 return [filectx(self._repo, self._path, fileid=x,
1076 filelog=self._filelog) for x in c]
1076 filelog=self._filelog) for x in c]
1077
1077
1078 class committablectx(basectx):
1078 class committablectx(basectx):
1079 """A committablectx object provides common functionality for a context that
1079 """A committablectx object provides common functionality for a context that
1080 wants the ability to commit, e.g. workingctx or memctx."""
1080 wants the ability to commit, e.g. workingctx or memctx."""
1081 def __init__(self, repo, text="", user=None, date=None, extra=None,
1081 def __init__(self, repo, text="", user=None, date=None, extra=None,
1082 changes=None):
1082 changes=None):
1083 super(committablectx, self).__init__(repo)
1083 super(committablectx, self).__init__(repo)
1084 self._rev = None
1084 self._rev = None
1085 self._node = None
1085 self._node = None
1086 self._text = text
1086 self._text = text
1087 if date:
1087 if date:
1088 self._date = dateutil.parsedate(date)
1088 self._date = dateutil.parsedate(date)
1089 if user:
1089 if user:
1090 self._user = user
1090 self._user = user
1091 if changes:
1091 if changes:
1092 self._status = changes
1092 self._status = changes
1093
1093
1094 self._extra = {}
1094 self._extra = {}
1095 if extra:
1095 if extra:
1096 self._extra = extra.copy()
1096 self._extra = extra.copy()
1097 if 'branch' not in self._extra:
1097 if 'branch' not in self._extra:
1098 try:
1098 try:
1099 branch = encoding.fromlocal(self._repo.dirstate.branch())
1099 branch = encoding.fromlocal(self._repo.dirstate.branch())
1100 except UnicodeDecodeError:
1100 except UnicodeDecodeError:
1101 raise error.Abort(_('branch name not in UTF-8!'))
1101 raise error.Abort(_('branch name not in UTF-8!'))
1102 self._extra['branch'] = branch
1102 self._extra['branch'] = branch
1103 if self._extra['branch'] == '':
1103 if self._extra['branch'] == '':
1104 self._extra['branch'] = 'default'
1104 self._extra['branch'] = 'default'
1105
1105
1106 def __bytes__(self):
1106 def __bytes__(self):
1107 return bytes(self._parents[0]) + "+"
1107 return bytes(self._parents[0]) + "+"
1108
1108
1109 __str__ = encoding.strmethod(__bytes__)
1109 __str__ = encoding.strmethod(__bytes__)
1110
1110
1111 def __nonzero__(self):
1111 def __nonzero__(self):
1112 return True
1112 return True
1113
1113
1114 __bool__ = __nonzero__
1114 __bool__ = __nonzero__
1115
1115
1116 def _buildflagfunc(self):
1116 def _buildflagfunc(self):
1117 # Create a fallback function for getting file flags when the
1117 # Create a fallback function for getting file flags when the
1118 # filesystem doesn't support them
1118 # filesystem doesn't support them
1119
1119
1120 copiesget = self._repo.dirstate.copies().get
1120 copiesget = self._repo.dirstate.copies().get
1121 parents = self.parents()
1121 parents = self.parents()
1122 if len(parents) < 2:
1122 if len(parents) < 2:
1123 # when we have one parent, it's easy: copy from parent
1123 # when we have one parent, it's easy: copy from parent
1124 man = parents[0].manifest()
1124 man = parents[0].manifest()
1125 def func(f):
1125 def func(f):
1126 f = copiesget(f, f)
1126 f = copiesget(f, f)
1127 return man.flags(f)
1127 return man.flags(f)
1128 else:
1128 else:
1129 # merges are tricky: we try to reconstruct the unstored
1129 # merges are tricky: we try to reconstruct the unstored
1130 # result from the merge (issue1802)
1130 # result from the merge (issue1802)
1131 p1, p2 = parents
1131 p1, p2 = parents
1132 pa = p1.ancestor(p2)
1132 pa = p1.ancestor(p2)
1133 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1133 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1134
1134
1135 def func(f):
1135 def func(f):
1136 f = copiesget(f, f) # may be wrong for merges with copies
1136 f = copiesget(f, f) # may be wrong for merges with copies
1137 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1137 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1138 if fl1 == fl2:
1138 if fl1 == fl2:
1139 return fl1
1139 return fl1
1140 if fl1 == fla:
1140 if fl1 == fla:
1141 return fl2
1141 return fl2
1142 if fl2 == fla:
1142 if fl2 == fla:
1143 return fl1
1143 return fl1
1144 return '' # punt for conflicts
1144 return '' # punt for conflicts
1145
1145
1146 return func
1146 return func
1147
1147
1148 @propertycache
1148 @propertycache
1149 def _flagfunc(self):
1149 def _flagfunc(self):
1150 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1150 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1151
1151
1152 @propertycache
1152 @propertycache
1153 def _status(self):
1153 def _status(self):
1154 return self._repo.status()
1154 return self._repo.status()
1155
1155
1156 @propertycache
1156 @propertycache
1157 def _user(self):
1157 def _user(self):
1158 return self._repo.ui.username()
1158 return self._repo.ui.username()
1159
1159
1160 @propertycache
1160 @propertycache
1161 def _date(self):
1161 def _date(self):
1162 ui = self._repo.ui
1162 ui = self._repo.ui
1163 date = ui.configdate('devel', 'default-date')
1163 date = ui.configdate('devel', 'default-date')
1164 if date is None:
1164 if date is None:
1165 date = dateutil.makedate()
1165 date = dateutil.makedate()
1166 return date
1166 return date
1167
1167
1168 def subrev(self, subpath):
1168 def subrev(self, subpath):
1169 return None
1169 return None
1170
1170
1171 def manifestnode(self):
1171 def manifestnode(self):
1172 return None
1172 return None
1173 def user(self):
1173 def user(self):
1174 return self._user or self._repo.ui.username()
1174 return self._user or self._repo.ui.username()
1175 def date(self):
1175 def date(self):
1176 return self._date
1176 return self._date
1177 def description(self):
1177 def description(self):
1178 return self._text
1178 return self._text
1179 def files(self):
1179 def files(self):
1180 return sorted(self._status.modified + self._status.added +
1180 return sorted(self._status.modified + self._status.added +
1181 self._status.removed)
1181 self._status.removed)
1182
1182
1183 def modified(self):
1183 def modified(self):
1184 return self._status.modified
1184 return self._status.modified
1185 def added(self):
1185 def added(self):
1186 return self._status.added
1186 return self._status.added
1187 def removed(self):
1187 def removed(self):
1188 return self._status.removed
1188 return self._status.removed
1189 def deleted(self):
1189 def deleted(self):
1190 return self._status.deleted
1190 return self._status.deleted
1191 def branch(self):
1191 def branch(self):
1192 return encoding.tolocal(self._extra['branch'])
1192 return encoding.tolocal(self._extra['branch'])
1193 def closesbranch(self):
1193 def closesbranch(self):
1194 return 'close' in self._extra
1194 return 'close' in self._extra
1195 def extra(self):
1195 def extra(self):
1196 return self._extra
1196 return self._extra
1197
1197
1198 def isinmemory(self):
1198 def isinmemory(self):
1199 return False
1199 return False
1200
1200
1201 def tags(self):
1201 def tags(self):
1202 return []
1202 return []
1203
1203
1204 def bookmarks(self):
1204 def bookmarks(self):
1205 b = []
1205 b = []
1206 for p in self.parents():
1206 for p in self.parents():
1207 b.extend(p.bookmarks())
1207 b.extend(p.bookmarks())
1208 return b
1208 return b
1209
1209
1210 def phase(self):
1210 def phase(self):
1211 phase = phases.draft # default phase to draft
1211 phase = phases.draft # default phase to draft
1212 for p in self.parents():
1212 for p in self.parents():
1213 phase = max(phase, p.phase())
1213 phase = max(phase, p.phase())
1214 return phase
1214 return phase
1215
1215
1216 def hidden(self):
1216 def hidden(self):
1217 return False
1217 return False
1218
1218
1219 def children(self):
1219 def children(self):
1220 return []
1220 return []
1221
1221
1222 def flags(self, path):
1222 def flags(self, path):
1223 if r'_manifest' in self.__dict__:
1223 if r'_manifest' in self.__dict__:
1224 try:
1224 try:
1225 return self._manifest.flags(path)
1225 return self._manifest.flags(path)
1226 except KeyError:
1226 except KeyError:
1227 return ''
1227 return ''
1228
1228
1229 try:
1229 try:
1230 return self._flagfunc(path)
1230 return self._flagfunc(path)
1231 except OSError:
1231 except OSError:
1232 return ''
1232 return ''
1233
1233
1234 def ancestor(self, c2):
1234 def ancestor(self, c2):
1235 """return the "best" ancestor context of self and c2"""
1235 """return the "best" ancestor context of self and c2"""
1236 return self._parents[0].ancestor(c2) # punt on two parents for now
1236 return self._parents[0].ancestor(c2) # punt on two parents for now
1237
1237
1238 def walk(self, match):
1238 def walk(self, match):
1239 '''Generates matching file names.'''
1239 '''Generates matching file names.'''
1240 return sorted(self._repo.dirstate.walk(match,
1240 return sorted(self._repo.dirstate.walk(match,
1241 subrepos=sorted(self.substate),
1241 subrepos=sorted(self.substate),
1242 unknown=True, ignored=False))
1242 unknown=True, ignored=False))
1243
1243
1244 def matches(self, match):
1244 def matches(self, match):
1245 ds = self._repo.dirstate
1245 ds = self._repo.dirstate
1246 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1246 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1247
1247
1248 def ancestors(self):
1248 def ancestors(self):
1249 for p in self._parents:
1249 for p in self._parents:
1250 yield p
1250 yield p
1251 for a in self._repo.changelog.ancestors(
1251 for a in self._repo.changelog.ancestors(
1252 [p.rev() for p in self._parents]):
1252 [p.rev() for p in self._parents]):
1253 yield changectx(self._repo, a)
1253 yield changectx(self._repo, a)
1254
1254
1255 def markcommitted(self, node):
1255 def markcommitted(self, node):
1256 """Perform post-commit cleanup necessary after committing this ctx
1256 """Perform post-commit cleanup necessary after committing this ctx
1257
1257
1258 Specifically, this updates backing stores this working context
1258 Specifically, this updates backing stores this working context
1259 wraps to reflect the fact that the changes reflected by this
1259 wraps to reflect the fact that the changes reflected by this
1260 workingctx have been committed. For example, it marks
1260 workingctx have been committed. For example, it marks
1261 modified and added files as normal in the dirstate.
1261 modified and added files as normal in the dirstate.
1262
1262
1263 """
1263 """
1264
1264
1265 with self._repo.dirstate.parentchange():
1265 with self._repo.dirstate.parentchange():
1266 for f in self.modified() + self.added():
1266 for f in self.modified() + self.added():
1267 self._repo.dirstate.normal(f)
1267 self._repo.dirstate.normal(f)
1268 for f in self.removed():
1268 for f in self.removed():
1269 self._repo.dirstate.drop(f)
1269 self._repo.dirstate.drop(f)
1270 self._repo.dirstate.setparents(node)
1270 self._repo.dirstate.setparents(node)
1271
1271
1272 # write changes out explicitly, because nesting wlock at
1272 # write changes out explicitly, because nesting wlock at
1273 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1273 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1274 # from immediately doing so for subsequent changing files
1274 # from immediately doing so for subsequent changing files
1275 self._repo.dirstate.write(self._repo.currenttransaction())
1275 self._repo.dirstate.write(self._repo.currenttransaction())
1276
1276
1277 def dirty(self, missing=False, merge=True, branch=True):
1277 def dirty(self, missing=False, merge=True, branch=True):
1278 return False
1278 return False
1279
1279
1280 class workingctx(committablectx):
1280 class workingctx(committablectx):
1281 """A workingctx object makes access to data related to
1281 """A workingctx object makes access to data related to
1282 the current working directory convenient.
1282 the current working directory convenient.
1283 date - any valid date string or (unixtime, offset), or None.
1283 date - any valid date string or (unixtime, offset), or None.
1284 user - username string, or None.
1284 user - username string, or None.
1285 extra - a dictionary of extra values, or None.
1285 extra - a dictionary of extra values, or None.
1286 changes - a list of file lists as returned by localrepo.status()
1286 changes - a list of file lists as returned by localrepo.status()
1287 or None to use the repository status.
1287 or None to use the repository status.
1288 """
1288 """
1289 def __init__(self, repo, text="", user=None, date=None, extra=None,
1289 def __init__(self, repo, text="", user=None, date=None, extra=None,
1290 changes=None):
1290 changes=None):
1291 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1291 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1292
1292
1293 def __iter__(self):
1293 def __iter__(self):
1294 d = self._repo.dirstate
1294 d = self._repo.dirstate
1295 for f in d:
1295 for f in d:
1296 if d[f] != 'r':
1296 if d[f] != 'r':
1297 yield f
1297 yield f
1298
1298
1299 def __contains__(self, key):
1299 def __contains__(self, key):
1300 return self._repo.dirstate[key] not in "?r"
1300 return self._repo.dirstate[key] not in "?r"
1301
1301
1302 def hex(self):
1302 def hex(self):
1303 return hex(wdirid)
1303 return hex(wdirid)
1304
1304
1305 @propertycache
1305 @propertycache
1306 def _parents(self):
1306 def _parents(self):
1307 p = self._repo.dirstate.parents()
1307 p = self._repo.dirstate.parents()
1308 if p[1] == nullid:
1308 if p[1] == nullid:
1309 p = p[:-1]
1309 p = p[:-1]
1310 return [changectx(self._repo, x) for x in p]
1310 return [changectx(self._repo, x) for x in p]
1311
1311
1312 def _fileinfo(self, path):
1312 def _fileinfo(self, path):
1313 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1313 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1314 self._manifest
1314 self._manifest
1315 return super(workingctx, self)._fileinfo(path)
1315 return super(workingctx, self)._fileinfo(path)
1316
1316
1317 def filectx(self, path, filelog=None):
1317 def filectx(self, path, filelog=None):
1318 """get a file context from the working directory"""
1318 """get a file context from the working directory"""
1319 return workingfilectx(self._repo, path, workingctx=self,
1319 return workingfilectx(self._repo, path, workingctx=self,
1320 filelog=filelog)
1320 filelog=filelog)
1321
1321
1322 def dirty(self, missing=False, merge=True, branch=True):
1322 def dirty(self, missing=False, merge=True, branch=True):
1323 "check whether a working directory is modified"
1323 "check whether a working directory is modified"
1324 # check subrepos first
1324 # check subrepos first
1325 for s in sorted(self.substate):
1325 for s in sorted(self.substate):
1326 if self.sub(s).dirty(missing=missing):
1326 if self.sub(s).dirty(missing=missing):
1327 return True
1327 return True
1328 # check current working dir
1328 # check current working dir
1329 return ((merge and self.p2()) or
1329 return ((merge and self.p2()) or
1330 (branch and self.branch() != self.p1().branch()) or
1330 (branch and self.branch() != self.p1().branch()) or
1331 self.modified() or self.added() or self.removed() or
1331 self.modified() or self.added() or self.removed() or
1332 (missing and self.deleted()))
1332 (missing and self.deleted()))
1333
1333
1334 def add(self, list, prefix=""):
1334 def add(self, list, prefix=""):
1335 with self._repo.wlock():
1335 with self._repo.wlock():
1336 ui, ds = self._repo.ui, self._repo.dirstate
1336 ui, ds = self._repo.ui, self._repo.dirstate
1337 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1337 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1338 rejected = []
1338 rejected = []
1339 lstat = self._repo.wvfs.lstat
1339 lstat = self._repo.wvfs.lstat
1340 for f in list:
1340 for f in list:
1341 # ds.pathto() returns an absolute file when this is invoked from
1341 # ds.pathto() returns an absolute file when this is invoked from
1342 # the keyword extension. That gets flagged as non-portable on
1342 # the keyword extension. That gets flagged as non-portable on
1343 # Windows, since it contains the drive letter and colon.
1343 # Windows, since it contains the drive letter and colon.
1344 scmutil.checkportable(ui, os.path.join(prefix, f))
1344 scmutil.checkportable(ui, os.path.join(prefix, f))
1345 try:
1345 try:
1346 st = lstat(f)
1346 st = lstat(f)
1347 except OSError:
1347 except OSError:
1348 ui.warn(_("%s does not exist!\n") % uipath(f))
1348 ui.warn(_("%s does not exist!\n") % uipath(f))
1349 rejected.append(f)
1349 rejected.append(f)
1350 continue
1350 continue
1351 limit = ui.configbytes('ui', 'large-file-limit')
1351 limit = ui.configbytes('ui', 'large-file-limit')
1352 if limit != 0 and st.st_size > limit:
1352 if limit != 0 and st.st_size > limit:
1353 ui.warn(_("%s: up to %d MB of RAM may be required "
1353 ui.warn(_("%s: up to %d MB of RAM may be required "
1354 "to manage this file\n"
1354 "to manage this file\n"
1355 "(use 'hg revert %s' to cancel the "
1355 "(use 'hg revert %s' to cancel the "
1356 "pending addition)\n")
1356 "pending addition)\n")
1357 % (f, 3 * st.st_size // 1000000, uipath(f)))
1357 % (f, 3 * st.st_size // 1000000, uipath(f)))
1358 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1358 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1359 ui.warn(_("%s not added: only files and symlinks "
1359 ui.warn(_("%s not added: only files and symlinks "
1360 "supported currently\n") % uipath(f))
1360 "supported currently\n") % uipath(f))
1361 rejected.append(f)
1361 rejected.append(f)
1362 elif ds[f] in 'amn':
1362 elif ds[f] in 'amn':
1363 ui.warn(_("%s already tracked!\n") % uipath(f))
1363 ui.warn(_("%s already tracked!\n") % uipath(f))
1364 elif ds[f] == 'r':
1364 elif ds[f] == 'r':
1365 ds.normallookup(f)
1365 ds.normallookup(f)
1366 else:
1366 else:
1367 ds.add(f)
1367 ds.add(f)
1368 return rejected
1368 return rejected
1369
1369
1370 def forget(self, files, prefix=""):
1370 def forget(self, files, prefix=""):
1371 with self._repo.wlock():
1371 with self._repo.wlock():
1372 ds = self._repo.dirstate
1372 ds = self._repo.dirstate
1373 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1373 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1374 rejected = []
1374 rejected = []
1375 for f in files:
1375 for f in files:
1376 if f not in self._repo.dirstate:
1376 if f not in self._repo.dirstate:
1377 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1377 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1378 rejected.append(f)
1378 rejected.append(f)
1379 elif self._repo.dirstate[f] != 'a':
1379 elif self._repo.dirstate[f] != 'a':
1380 self._repo.dirstate.remove(f)
1380 self._repo.dirstate.remove(f)
1381 else:
1381 else:
1382 self._repo.dirstate.drop(f)
1382 self._repo.dirstate.drop(f)
1383 return rejected
1383 return rejected
1384
1384
1385 def undelete(self, list):
1385 def undelete(self, list):
1386 pctxs = self.parents()
1386 pctxs = self.parents()
1387 with self._repo.wlock():
1387 with self._repo.wlock():
1388 ds = self._repo.dirstate
1388 ds = self._repo.dirstate
1389 for f in list:
1389 for f in list:
1390 if self._repo.dirstate[f] != 'r':
1390 if self._repo.dirstate[f] != 'r':
1391 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1391 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1392 else:
1392 else:
1393 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1393 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1394 t = fctx.data()
1394 t = fctx.data()
1395 self._repo.wwrite(f, t, fctx.flags())
1395 self._repo.wwrite(f, t, fctx.flags())
1396 self._repo.dirstate.normal(f)
1396 self._repo.dirstate.normal(f)
1397
1397
1398 def copy(self, source, dest):
1398 def copy(self, source, dest):
1399 try:
1399 try:
1400 st = self._repo.wvfs.lstat(dest)
1400 st = self._repo.wvfs.lstat(dest)
1401 except OSError as err:
1401 except OSError as err:
1402 if err.errno != errno.ENOENT:
1402 if err.errno != errno.ENOENT:
1403 raise
1403 raise
1404 self._repo.ui.warn(_("%s does not exist!\n")
1404 self._repo.ui.warn(_("%s does not exist!\n")
1405 % self._repo.dirstate.pathto(dest))
1405 % self._repo.dirstate.pathto(dest))
1406 return
1406 return
1407 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1407 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1408 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1408 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1409 "symbolic link\n")
1409 "symbolic link\n")
1410 % self._repo.dirstate.pathto(dest))
1410 % self._repo.dirstate.pathto(dest))
1411 else:
1411 else:
1412 with self._repo.wlock():
1412 with self._repo.wlock():
1413 if self._repo.dirstate[dest] in '?':
1413 if self._repo.dirstate[dest] in '?':
1414 self._repo.dirstate.add(dest)
1414 self._repo.dirstate.add(dest)
1415 elif self._repo.dirstate[dest] in 'r':
1415 elif self._repo.dirstate[dest] in 'r':
1416 self._repo.dirstate.normallookup(dest)
1416 self._repo.dirstate.normallookup(dest)
1417 self._repo.dirstate.copy(source, dest)
1417 self._repo.dirstate.copy(source, dest)
1418
1418
1419 def match(self, pats=None, include=None, exclude=None, default='glob',
1419 def match(self, pats=None, include=None, exclude=None, default='glob',
1420 listsubrepos=False, badfn=None):
1420 listsubrepos=False, badfn=None):
1421 r = self._repo
1421 r = self._repo
1422
1422
1423 # Only a case insensitive filesystem needs magic to translate user input
1423 # Only a case insensitive filesystem needs magic to translate user input
1424 # to actual case in the filesystem.
1424 # to actual case in the filesystem.
1425 icasefs = not util.fscasesensitive(r.root)
1425 icasefs = not util.fscasesensitive(r.root)
1426 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1426 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1427 default, auditor=r.auditor, ctx=self,
1427 default, auditor=r.auditor, ctx=self,
1428 listsubrepos=listsubrepos, badfn=badfn,
1428 listsubrepos=listsubrepos, badfn=badfn,
1429 icasefs=icasefs)
1429 icasefs=icasefs)
1430
1430
1431 def _filtersuspectsymlink(self, files):
1431 def _filtersuspectsymlink(self, files):
1432 if not files or self._repo.dirstate._checklink:
1432 if not files or self._repo.dirstate._checklink:
1433 return files
1433 return files
1434
1434
1435 # Symlink placeholders may get non-symlink-like contents
1435 # Symlink placeholders may get non-symlink-like contents
1436 # via user error or dereferencing by NFS or Samba servers,
1436 # via user error or dereferencing by NFS or Samba servers,
1437 # so we filter out any placeholders that don't look like a
1437 # so we filter out any placeholders that don't look like a
1438 # symlink
1438 # symlink
1439 sane = []
1439 sane = []
1440 for f in files:
1440 for f in files:
1441 if self.flags(f) == 'l':
1441 if self.flags(f) == 'l':
1442 d = self[f].data()
1442 d = self[f].data()
1443 if (d == '' or len(d) >= 1024 or '\n' in d
1443 if (d == '' or len(d) >= 1024 or '\n' in d
1444 or stringutil.binary(d)):
1444 or stringutil.binary(d)):
1445 self._repo.ui.debug('ignoring suspect symlink placeholder'
1445 self._repo.ui.debug('ignoring suspect symlink placeholder'
1446 ' "%s"\n' % f)
1446 ' "%s"\n' % f)
1447 continue
1447 continue
1448 sane.append(f)
1448 sane.append(f)
1449 return sane
1449 return sane
1450
1450
1451 def _checklookup(self, files):
1451 def _checklookup(self, files):
1452 # check for any possibly clean files
1452 # check for any possibly clean files
1453 if not files:
1453 if not files:
1454 return [], [], []
1454 return [], [], []
1455
1455
1456 modified = []
1456 modified = []
1457 deleted = []
1457 deleted = []
1458 fixup = []
1458 fixup = []
1459 pctx = self._parents[0]
1459 pctx = self._parents[0]
1460 # do a full compare of any files that might have changed
1460 # do a full compare of any files that might have changed
1461 for f in sorted(files):
1461 for f in sorted(files):
1462 try:
1462 try:
1463 # This will return True for a file that got replaced by a
1463 # This will return True for a file that got replaced by a
1464 # directory in the interim, but fixing that is pretty hard.
1464 # directory in the interim, but fixing that is pretty hard.
1465 if (f not in pctx or self.flags(f) != pctx.flags(f)
1465 if (f not in pctx or self.flags(f) != pctx.flags(f)
1466 or pctx[f].cmp(self[f])):
1466 or pctx[f].cmp(self[f])):
1467 modified.append(f)
1467 modified.append(f)
1468 else:
1468 else:
1469 fixup.append(f)
1469 fixup.append(f)
1470 except (IOError, OSError):
1470 except (IOError, OSError):
1471 # A file become inaccessible in between? Mark it as deleted,
1471 # A file become inaccessible in between? Mark it as deleted,
1472 # matching dirstate behavior (issue5584).
1472 # matching dirstate behavior (issue5584).
1473 # The dirstate has more complex behavior around whether a
1473 # The dirstate has more complex behavior around whether a
1474 # missing file matches a directory, etc, but we don't need to
1474 # missing file matches a directory, etc, but we don't need to
1475 # bother with that: if f has made it to this point, we're sure
1475 # bother with that: if f has made it to this point, we're sure
1476 # it's in the dirstate.
1476 # it's in the dirstate.
1477 deleted.append(f)
1477 deleted.append(f)
1478
1478
1479 return modified, deleted, fixup
1479 return modified, deleted, fixup
1480
1480
1481 def _poststatusfixup(self, status, fixup):
1481 def _poststatusfixup(self, status, fixup):
1482 """update dirstate for files that are actually clean"""
1482 """update dirstate for files that are actually clean"""
1483 poststatus = self._repo.postdsstatus()
1483 poststatus = self._repo.postdsstatus()
1484 if fixup or poststatus:
1484 if fixup or poststatus:
1485 try:
1485 try:
1486 oldid = self._repo.dirstate.identity()
1486 oldid = self._repo.dirstate.identity()
1487
1487
1488 # updating the dirstate is optional
1488 # updating the dirstate is optional
1489 # so we don't wait on the lock
1489 # so we don't wait on the lock
1490 # wlock can invalidate the dirstate, so cache normal _after_
1490 # wlock can invalidate the dirstate, so cache normal _after_
1491 # taking the lock
1491 # taking the lock
1492 with self._repo.wlock(False):
1492 with self._repo.wlock(False):
1493 if self._repo.dirstate.identity() == oldid:
1493 if self._repo.dirstate.identity() == oldid:
1494 if fixup:
1494 if fixup:
1495 normal = self._repo.dirstate.normal
1495 normal = self._repo.dirstate.normal
1496 for f in fixup:
1496 for f in fixup:
1497 normal(f)
1497 normal(f)
1498 # write changes out explicitly, because nesting
1498 # write changes out explicitly, because nesting
1499 # wlock at runtime may prevent 'wlock.release()'
1499 # wlock at runtime may prevent 'wlock.release()'
1500 # after this block from doing so for subsequent
1500 # after this block from doing so for subsequent
1501 # changing files
1501 # changing files
1502 tr = self._repo.currenttransaction()
1502 tr = self._repo.currenttransaction()
1503 self._repo.dirstate.write(tr)
1503 self._repo.dirstate.write(tr)
1504
1504
1505 if poststatus:
1505 if poststatus:
1506 for ps in poststatus:
1506 for ps in poststatus:
1507 ps(self, status)
1507 ps(self, status)
1508 else:
1508 else:
1509 # in this case, writing changes out breaks
1509 # in this case, writing changes out breaks
1510 # consistency, because .hg/dirstate was
1510 # consistency, because .hg/dirstate was
1511 # already changed simultaneously after last
1511 # already changed simultaneously after last
1512 # caching (see also issue5584 for detail)
1512 # caching (see also issue5584 for detail)
1513 self._repo.ui.debug('skip updating dirstate: '
1513 self._repo.ui.debug('skip updating dirstate: '
1514 'identity mismatch\n')
1514 'identity mismatch\n')
1515 except error.LockError:
1515 except error.LockError:
1516 pass
1516 pass
1517 finally:
1517 finally:
1518 # Even if the wlock couldn't be grabbed, clear out the list.
1518 # Even if the wlock couldn't be grabbed, clear out the list.
1519 self._repo.clearpostdsstatus()
1519 self._repo.clearpostdsstatus()
1520
1520
1521 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1521 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1522 '''Gets the status from the dirstate -- internal use only.'''
1522 '''Gets the status from the dirstate -- internal use only.'''
1523 subrepos = []
1523 subrepos = []
1524 if '.hgsub' in self:
1524 if '.hgsub' in self:
1525 subrepos = sorted(self.substate)
1525 subrepos = sorted(self.substate)
1526 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1526 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1527 clean=clean, unknown=unknown)
1527 clean=clean, unknown=unknown)
1528
1528
1529 # check for any possibly clean files
1529 # check for any possibly clean files
1530 fixup = []
1530 fixup = []
1531 if cmp:
1531 if cmp:
1532 modified2, deleted2, fixup = self._checklookup(cmp)
1532 modified2, deleted2, fixup = self._checklookup(cmp)
1533 s.modified.extend(modified2)
1533 s.modified.extend(modified2)
1534 s.deleted.extend(deleted2)
1534 s.deleted.extend(deleted2)
1535
1535
1536 if fixup and clean:
1536 if fixup and clean:
1537 s.clean.extend(fixup)
1537 s.clean.extend(fixup)
1538
1538
1539 self._poststatusfixup(s, fixup)
1539 self._poststatusfixup(s, fixup)
1540
1540
1541 if match.always():
1541 if match.always():
1542 # cache for performance
1542 # cache for performance
1543 if s.unknown or s.ignored or s.clean:
1543 if s.unknown or s.ignored or s.clean:
1544 # "_status" is cached with list*=False in the normal route
1544 # "_status" is cached with list*=False in the normal route
1545 self._status = scmutil.status(s.modified, s.added, s.removed,
1545 self._status = scmutil.status(s.modified, s.added, s.removed,
1546 s.deleted, [], [], [])
1546 s.deleted, [], [], [])
1547 else:
1547 else:
1548 self._status = s
1548 self._status = s
1549
1549
1550 return s
1550 return s
1551
1551
1552 @propertycache
1552 @propertycache
1553 def _manifest(self):
1553 def _manifest(self):
1554 """generate a manifest corresponding to the values in self._status
1554 """generate a manifest corresponding to the values in self._status
1555
1555
1556 This reuse the file nodeid from parent, but we use special node
1556 This reuse the file nodeid from parent, but we use special node
1557 identifiers for added and modified files. This is used by manifests
1557 identifiers for added and modified files. This is used by manifests
1558 merge to see that files are different and by update logic to avoid
1558 merge to see that files are different and by update logic to avoid
1559 deleting newly added files.
1559 deleting newly added files.
1560 """
1560 """
1561 return self._buildstatusmanifest(self._status)
1561 return self._buildstatusmanifest(self._status)
1562
1562
1563 def _buildstatusmanifest(self, status):
1563 def _buildstatusmanifest(self, status):
1564 """Builds a manifest that includes the given status results."""
1564 """Builds a manifest that includes the given status results."""
1565 parents = self.parents()
1565 parents = self.parents()
1566
1566
1567 man = parents[0].manifest().copy()
1567 man = parents[0].manifest().copy()
1568
1568
1569 ff = self._flagfunc
1569 ff = self._flagfunc
1570 for i, l in ((addednodeid, status.added),
1570 for i, l in ((addednodeid, status.added),
1571 (modifiednodeid, status.modified)):
1571 (modifiednodeid, status.modified)):
1572 for f in l:
1572 for f in l:
1573 man[f] = i
1573 man[f] = i
1574 try:
1574 try:
1575 man.setflag(f, ff(f))
1575 man.setflag(f, ff(f))
1576 except OSError:
1576 except OSError:
1577 pass
1577 pass
1578
1578
1579 for f in status.deleted + status.removed:
1579 for f in status.deleted + status.removed:
1580 if f in man:
1580 if f in man:
1581 del man[f]
1581 del man[f]
1582
1582
1583 return man
1583 return man
1584
1584
1585 def _buildstatus(self, other, s, match, listignored, listclean,
1585 def _buildstatus(self, other, s, match, listignored, listclean,
1586 listunknown):
1586 listunknown):
1587 """build a status with respect to another context
1587 """build a status with respect to another context
1588
1588
1589 This includes logic for maintaining the fast path of status when
1589 This includes logic for maintaining the fast path of status when
1590 comparing the working directory against its parent, which is to skip
1590 comparing the working directory against its parent, which is to skip
1591 building a new manifest if self (working directory) is not comparing
1591 building a new manifest if self (working directory) is not comparing
1592 against its parent (repo['.']).
1592 against its parent (repo['.']).
1593 """
1593 """
1594 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1594 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1595 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1595 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1596 # might have accidentally ended up with the entire contents of the file
1596 # might have accidentally ended up with the entire contents of the file
1597 # they are supposed to be linking to.
1597 # they are supposed to be linking to.
1598 s.modified[:] = self._filtersuspectsymlink(s.modified)
1598 s.modified[:] = self._filtersuspectsymlink(s.modified)
1599 if other != self._repo['.']:
1599 if other != self._repo['.']:
1600 s = super(workingctx, self)._buildstatus(other, s, match,
1600 s = super(workingctx, self)._buildstatus(other, s, match,
1601 listignored, listclean,
1601 listignored, listclean,
1602 listunknown)
1602 listunknown)
1603 return s
1603 return s
1604
1604
1605 def _matchstatus(self, other, match):
1605 def _matchstatus(self, other, match):
1606 """override the match method with a filter for directory patterns
1606 """override the match method with a filter for directory patterns
1607
1607
1608 We use inheritance to customize the match.bad method only in cases of
1608 We use inheritance to customize the match.bad method only in cases of
1609 workingctx since it belongs only to the working directory when
1609 workingctx since it belongs only to the working directory when
1610 comparing against the parent changeset.
1610 comparing against the parent changeset.
1611
1611
1612 If we aren't comparing against the working directory's parent, then we
1612 If we aren't comparing against the working directory's parent, then we
1613 just use the default match object sent to us.
1613 just use the default match object sent to us.
1614 """
1614 """
1615 if other != self._repo['.']:
1615 if other != self._repo['.']:
1616 def bad(f, msg):
1616 def bad(f, msg):
1617 # 'f' may be a directory pattern from 'match.files()',
1617 # 'f' may be a directory pattern from 'match.files()',
1618 # so 'f not in ctx1' is not enough
1618 # so 'f not in ctx1' is not enough
1619 if f not in other and not other.hasdir(f):
1619 if f not in other and not other.hasdir(f):
1620 self._repo.ui.warn('%s: %s\n' %
1620 self._repo.ui.warn('%s: %s\n' %
1621 (self._repo.dirstate.pathto(f), msg))
1621 (self._repo.dirstate.pathto(f), msg))
1622 match.bad = bad
1622 match.bad = bad
1623 return match
1623 return match
1624
1624
1625 def markcommitted(self, node):
1625 def markcommitted(self, node):
1626 super(workingctx, self).markcommitted(node)
1626 super(workingctx, self).markcommitted(node)
1627
1627
1628 sparse.aftercommit(self._repo, node)
1628 sparse.aftercommit(self._repo, node)
1629
1629
1630 class committablefilectx(basefilectx):
1630 class committablefilectx(basefilectx):
1631 """A committablefilectx provides common functionality for a file context
1631 """A committablefilectx provides common functionality for a file context
1632 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1632 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1633 def __init__(self, repo, path, filelog=None, ctx=None):
1633 def __init__(self, repo, path, filelog=None, ctx=None):
1634 self._repo = repo
1634 self._repo = repo
1635 self._path = path
1635 self._path = path
1636 self._changeid = None
1636 self._changeid = None
1637 self._filerev = self._filenode = None
1637 self._filerev = self._filenode = None
1638
1638
1639 if filelog is not None:
1639 if filelog is not None:
1640 self._filelog = filelog
1640 self._filelog = filelog
1641 if ctx:
1641 if ctx:
1642 self._changectx = ctx
1642 self._changectx = ctx
1643
1643
1644 def __nonzero__(self):
1644 def __nonzero__(self):
1645 return True
1645 return True
1646
1646
1647 __bool__ = __nonzero__
1647 __bool__ = __nonzero__
1648
1648
1649 def linkrev(self):
1649 def linkrev(self):
1650 # linked to self._changectx no matter if file is modified or not
1650 # linked to self._changectx no matter if file is modified or not
1651 return self.rev()
1651 return self.rev()
1652
1652
1653 def parents(self):
1653 def parents(self):
1654 '''return parent filectxs, following copies if necessary'''
1654 '''return parent filectxs, following copies if necessary'''
1655 def filenode(ctx, path):
1655 def filenode(ctx, path):
1656 return ctx._manifest.get(path, nullid)
1656 return ctx._manifest.get(path, nullid)
1657
1657
1658 path = self._path
1658 path = self._path
1659 fl = self._filelog
1659 fl = self._filelog
1660 pcl = self._changectx._parents
1660 pcl = self._changectx._parents
1661 renamed = self.renamed()
1661 renamed = self.renamed()
1662
1662
1663 if renamed:
1663 if renamed:
1664 pl = [renamed + (None,)]
1664 pl = [renamed + (None,)]
1665 else:
1665 else:
1666 pl = [(path, filenode(pcl[0], path), fl)]
1666 pl = [(path, filenode(pcl[0], path), fl)]
1667
1667
1668 for pc in pcl[1:]:
1668 for pc in pcl[1:]:
1669 pl.append((path, filenode(pc, path), fl))
1669 pl.append((path, filenode(pc, path), fl))
1670
1670
1671 return [self._parentfilectx(p, fileid=n, filelog=l)
1671 return [self._parentfilectx(p, fileid=n, filelog=l)
1672 for p, n, l in pl if n != nullid]
1672 for p, n, l in pl if n != nullid]
1673
1673
1674 def children(self):
1674 def children(self):
1675 return []
1675 return []
1676
1676
1677 class workingfilectx(committablefilectx):
1677 class workingfilectx(committablefilectx):
1678 """A workingfilectx object makes access to data related to a particular
1678 """A workingfilectx object makes access to data related to a particular
1679 file in the working directory convenient."""
1679 file in the working directory convenient."""
1680 def __init__(self, repo, path, filelog=None, workingctx=None):
1680 def __init__(self, repo, path, filelog=None, workingctx=None):
1681 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1681 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1682
1682
1683 @propertycache
1683 @propertycache
1684 def _changectx(self):
1684 def _changectx(self):
1685 return workingctx(self._repo)
1685 return workingctx(self._repo)
1686
1686
1687 def data(self):
1687 def data(self):
1688 return self._repo.wread(self._path)
1688 return self._repo.wread(self._path)
1689 def renamed(self):
1689 def renamed(self):
1690 rp = self._repo.dirstate.copied(self._path)
1690 rp = self._repo.dirstate.copied(self._path)
1691 if not rp:
1691 if not rp:
1692 return None
1692 return None
1693 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1693 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1694
1694
1695 def size(self):
1695 def size(self):
1696 return self._repo.wvfs.lstat(self._path).st_size
1696 return self._repo.wvfs.lstat(self._path).st_size
1697 def date(self):
1697 def date(self):
1698 t, tz = self._changectx.date()
1698 t, tz = self._changectx.date()
1699 try:
1699 try:
1700 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1700 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1701 except OSError as err:
1701 except OSError as err:
1702 if err.errno != errno.ENOENT:
1702 if err.errno != errno.ENOENT:
1703 raise
1703 raise
1704 return (t, tz)
1704 return (t, tz)
1705
1705
1706 def exists(self):
1706 def exists(self):
1707 return self._repo.wvfs.exists(self._path)
1707 return self._repo.wvfs.exists(self._path)
1708
1708
1709 def lexists(self):
1709 def lexists(self):
1710 return self._repo.wvfs.lexists(self._path)
1710 return self._repo.wvfs.lexists(self._path)
1711
1711
1712 def audit(self):
1712 def audit(self):
1713 return self._repo.wvfs.audit(self._path)
1713 return self._repo.wvfs.audit(self._path)
1714
1714
1715 def cmp(self, fctx):
1715 def cmp(self, fctx):
1716 """compare with other file context
1716 """compare with other file context
1717
1717
1718 returns True if different than fctx.
1718 returns True if different than fctx.
1719 """
1719 """
1720 # fctx should be a filectx (not a workingfilectx)
1720 # fctx should be a filectx (not a workingfilectx)
1721 # invert comparison to reuse the same code path
1721 # invert comparison to reuse the same code path
1722 return fctx.cmp(self)
1722 return fctx.cmp(self)
1723
1723
1724 def remove(self, ignoremissing=False):
1724 def remove(self, ignoremissing=False):
1725 """wraps unlink for a repo's working directory"""
1725 """wraps unlink for a repo's working directory"""
1726 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1726 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1727 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1727 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1728 rmdir=rmdir)
1728 rmdir=rmdir)
1729
1729
1730 def write(self, data, flags, backgroundclose=False, **kwargs):
1730 def write(self, data, flags, backgroundclose=False, **kwargs):
1731 """wraps repo.wwrite"""
1731 """wraps repo.wwrite"""
1732 self._repo.wwrite(self._path, data, flags,
1732 self._repo.wwrite(self._path, data, flags,
1733 backgroundclose=backgroundclose,
1733 backgroundclose=backgroundclose,
1734 **kwargs)
1734 **kwargs)
1735
1735
1736 def markcopied(self, src):
1736 def markcopied(self, src):
1737 """marks this file a copy of `src`"""
1737 """marks this file a copy of `src`"""
1738 if self._repo.dirstate[self._path] in "nma":
1738 if self._repo.dirstate[self._path] in "nma":
1739 self._repo.dirstate.copy(src, self._path)
1739 self._repo.dirstate.copy(src, self._path)
1740
1740
1741 def clearunknown(self):
1741 def clearunknown(self):
1742 """Removes conflicting items in the working directory so that
1742 """Removes conflicting items in the working directory so that
1743 ``write()`` can be called successfully.
1743 ``write()`` can be called successfully.
1744 """
1744 """
1745 wvfs = self._repo.wvfs
1745 wvfs = self._repo.wvfs
1746 f = self._path
1746 f = self._path
1747 wvfs.audit(f)
1747 wvfs.audit(f)
1748 if wvfs.isdir(f) and not wvfs.islink(f):
1749 wvfs.rmtree(f, forcibly=True)
1750 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1748 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1749 # remove files under the directory as they should already be
1750 # warned and backed up
1751 if wvfs.isdir(f) and not wvfs.islink(f):
1752 wvfs.rmtree(f, forcibly=True)
1751 for p in reversed(list(util.finddirs(f))):
1753 for p in reversed(list(util.finddirs(f))):
1752 if wvfs.isfileorlink(p):
1754 if wvfs.isfileorlink(p):
1753 wvfs.unlink(p)
1755 wvfs.unlink(p)
1754 break
1756 break
1757 else:
1758 # don't remove files if path conflicts are not processed
1759 if wvfs.isdir(f) and not wvfs.islink(f):
1760 wvfs.removedirs(f)
1755
1761
1756 def setflags(self, l, x):
1762 def setflags(self, l, x):
1757 self._repo.wvfs.setflags(self._path, l, x)
1763 self._repo.wvfs.setflags(self._path, l, x)
1758
1764
1759 class overlayworkingctx(committablectx):
1765 class overlayworkingctx(committablectx):
1760 """Wraps another mutable context with a write-back cache that can be
1766 """Wraps another mutable context with a write-back cache that can be
1761 converted into a commit context.
1767 converted into a commit context.
1762
1768
1763 self._cache[path] maps to a dict with keys: {
1769 self._cache[path] maps to a dict with keys: {
1764 'exists': bool?
1770 'exists': bool?
1765 'date': date?
1771 'date': date?
1766 'data': str?
1772 'data': str?
1767 'flags': str?
1773 'flags': str?
1768 'copied': str? (path or None)
1774 'copied': str? (path or None)
1769 }
1775 }
1770 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1776 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1771 is `False`, the file was deleted.
1777 is `False`, the file was deleted.
1772 """
1778 """
1773
1779
1774 def __init__(self, repo):
1780 def __init__(self, repo):
1775 super(overlayworkingctx, self).__init__(repo)
1781 super(overlayworkingctx, self).__init__(repo)
1776 self.clean()
1782 self.clean()
1777
1783
1778 def setbase(self, wrappedctx):
1784 def setbase(self, wrappedctx):
1779 self._wrappedctx = wrappedctx
1785 self._wrappedctx = wrappedctx
1780 self._parents = [wrappedctx]
1786 self._parents = [wrappedctx]
1781 # Drop old manifest cache as it is now out of date.
1787 # Drop old manifest cache as it is now out of date.
1782 # This is necessary when, e.g., rebasing several nodes with one
1788 # This is necessary when, e.g., rebasing several nodes with one
1783 # ``overlayworkingctx`` (e.g. with --collapse).
1789 # ``overlayworkingctx`` (e.g. with --collapse).
1784 util.clearcachedproperty(self, '_manifest')
1790 util.clearcachedproperty(self, '_manifest')
1785
1791
1786 def data(self, path):
1792 def data(self, path):
1787 if self.isdirty(path):
1793 if self.isdirty(path):
1788 if self._cache[path]['exists']:
1794 if self._cache[path]['exists']:
1789 if self._cache[path]['data']:
1795 if self._cache[path]['data']:
1790 return self._cache[path]['data']
1796 return self._cache[path]['data']
1791 else:
1797 else:
1792 # Must fallback here, too, because we only set flags.
1798 # Must fallback here, too, because we only set flags.
1793 return self._wrappedctx[path].data()
1799 return self._wrappedctx[path].data()
1794 else:
1800 else:
1795 raise error.ProgrammingError("No such file or directory: %s" %
1801 raise error.ProgrammingError("No such file or directory: %s" %
1796 path)
1802 path)
1797 else:
1803 else:
1798 return self._wrappedctx[path].data()
1804 return self._wrappedctx[path].data()
1799
1805
1800 @propertycache
1806 @propertycache
1801 def _manifest(self):
1807 def _manifest(self):
1802 parents = self.parents()
1808 parents = self.parents()
1803 man = parents[0].manifest().copy()
1809 man = parents[0].manifest().copy()
1804
1810
1805 flag = self._flagfunc
1811 flag = self._flagfunc
1806 for path in self.added():
1812 for path in self.added():
1807 man[path] = addednodeid
1813 man[path] = addednodeid
1808 man.setflag(path, flag(path))
1814 man.setflag(path, flag(path))
1809 for path in self.modified():
1815 for path in self.modified():
1810 man[path] = modifiednodeid
1816 man[path] = modifiednodeid
1811 man.setflag(path, flag(path))
1817 man.setflag(path, flag(path))
1812 for path in self.removed():
1818 for path in self.removed():
1813 del man[path]
1819 del man[path]
1814 return man
1820 return man
1815
1821
1816 @propertycache
1822 @propertycache
1817 def _flagfunc(self):
1823 def _flagfunc(self):
1818 def f(path):
1824 def f(path):
1819 return self._cache[path]['flags']
1825 return self._cache[path]['flags']
1820 return f
1826 return f
1821
1827
1822 def files(self):
1828 def files(self):
1823 return sorted(self.added() + self.modified() + self.removed())
1829 return sorted(self.added() + self.modified() + self.removed())
1824
1830
1825 def modified(self):
1831 def modified(self):
1826 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1832 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1827 self._existsinparent(f)]
1833 self._existsinparent(f)]
1828
1834
1829 def added(self):
1835 def added(self):
1830 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1836 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1831 not self._existsinparent(f)]
1837 not self._existsinparent(f)]
1832
1838
1833 def removed(self):
1839 def removed(self):
1834 return [f for f in self._cache.keys() if
1840 return [f for f in self._cache.keys() if
1835 not self._cache[f]['exists'] and self._existsinparent(f)]
1841 not self._cache[f]['exists'] and self._existsinparent(f)]
1836
1842
1837 def isinmemory(self):
1843 def isinmemory(self):
1838 return True
1844 return True
1839
1845
1840 def filedate(self, path):
1846 def filedate(self, path):
1841 if self.isdirty(path):
1847 if self.isdirty(path):
1842 return self._cache[path]['date']
1848 return self._cache[path]['date']
1843 else:
1849 else:
1844 return self._wrappedctx[path].date()
1850 return self._wrappedctx[path].date()
1845
1851
1846 def markcopied(self, path, origin):
1852 def markcopied(self, path, origin):
1847 if self.isdirty(path):
1853 if self.isdirty(path):
1848 self._cache[path]['copied'] = origin
1854 self._cache[path]['copied'] = origin
1849 else:
1855 else:
1850 raise error.ProgrammingError('markcopied() called on clean context')
1856 raise error.ProgrammingError('markcopied() called on clean context')
1851
1857
1852 def copydata(self, path):
1858 def copydata(self, path):
1853 if self.isdirty(path):
1859 if self.isdirty(path):
1854 return self._cache[path]['copied']
1860 return self._cache[path]['copied']
1855 else:
1861 else:
1856 raise error.ProgrammingError('copydata() called on clean context')
1862 raise error.ProgrammingError('copydata() called on clean context')
1857
1863
1858 def flags(self, path):
1864 def flags(self, path):
1859 if self.isdirty(path):
1865 if self.isdirty(path):
1860 if self._cache[path]['exists']:
1866 if self._cache[path]['exists']:
1861 return self._cache[path]['flags']
1867 return self._cache[path]['flags']
1862 else:
1868 else:
1863 raise error.ProgrammingError("No such file or directory: %s" %
1869 raise error.ProgrammingError("No such file or directory: %s" %
1864 self._path)
1870 self._path)
1865 else:
1871 else:
1866 return self._wrappedctx[path].flags()
1872 return self._wrappedctx[path].flags()
1867
1873
1868 def _existsinparent(self, path):
1874 def _existsinparent(self, path):
1869 try:
1875 try:
1870 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1876 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1871 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1877 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1872 # with an ``exists()`` function.
1878 # with an ``exists()`` function.
1873 self._wrappedctx[path]
1879 self._wrappedctx[path]
1874 return True
1880 return True
1875 except error.ManifestLookupError:
1881 except error.ManifestLookupError:
1876 return False
1882 return False
1877
1883
1878 def _auditconflicts(self, path):
1884 def _auditconflicts(self, path):
1879 """Replicates conflict checks done by wvfs.write().
1885 """Replicates conflict checks done by wvfs.write().
1880
1886
1881 Since we never write to the filesystem and never call `applyupdates` in
1887 Since we never write to the filesystem and never call `applyupdates` in
1882 IMM, we'll never check that a path is actually writable -- e.g., because
1888 IMM, we'll never check that a path is actually writable -- e.g., because
1883 it adds `a/foo`, but `a` is actually a file in the other commit.
1889 it adds `a/foo`, but `a` is actually a file in the other commit.
1884 """
1890 """
1885 def fail(path, component):
1891 def fail(path, component):
1886 # p1() is the base and we're receiving "writes" for p2()'s
1892 # p1() is the base and we're receiving "writes" for p2()'s
1887 # files.
1893 # files.
1888 if 'l' in self.p1()[component].flags():
1894 if 'l' in self.p1()[component].flags():
1889 raise error.Abort("error: %s conflicts with symlink %s "
1895 raise error.Abort("error: %s conflicts with symlink %s "
1890 "in %s." % (path, component,
1896 "in %s." % (path, component,
1891 self.p1().rev()))
1897 self.p1().rev()))
1892 else:
1898 else:
1893 raise error.Abort("error: '%s' conflicts with file '%s' in "
1899 raise error.Abort("error: '%s' conflicts with file '%s' in "
1894 "%s." % (path, component,
1900 "%s." % (path, component,
1895 self.p1().rev()))
1901 self.p1().rev()))
1896
1902
1897 # Test that each new directory to be created to write this path from p2
1903 # Test that each new directory to be created to write this path from p2
1898 # is not a file in p1.
1904 # is not a file in p1.
1899 components = path.split('/')
1905 components = path.split('/')
1900 for i in xrange(len(components)):
1906 for i in xrange(len(components)):
1901 component = "/".join(components[0:i])
1907 component = "/".join(components[0:i])
1902 if component in self.p1():
1908 if component in self.p1():
1903 fail(path, component)
1909 fail(path, component)
1904
1910
1905 # Test the other direction -- that this path from p2 isn't a directory
1911 # Test the other direction -- that this path from p2 isn't a directory
1906 # in p1 (test that p1 doesn't any paths matching `path/*`).
1912 # in p1 (test that p1 doesn't any paths matching `path/*`).
1907 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1913 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1908 matches = self.p1().manifest().matches(match)
1914 matches = self.p1().manifest().matches(match)
1909 if len(matches) > 0:
1915 if len(matches) > 0:
1910 if len(matches) == 1 and matches.keys()[0] == path:
1916 if len(matches) == 1 and matches.keys()[0] == path:
1911 return
1917 return
1912 raise error.Abort("error: file '%s' cannot be written because "
1918 raise error.Abort("error: file '%s' cannot be written because "
1913 " '%s/' is a folder in %s (containing %d "
1919 " '%s/' is a folder in %s (containing %d "
1914 "entries: %s)"
1920 "entries: %s)"
1915 % (path, path, self.p1(), len(matches),
1921 % (path, path, self.p1(), len(matches),
1916 ', '.join(matches.keys())))
1922 ', '.join(matches.keys())))
1917
1923
1918 def write(self, path, data, flags='', **kwargs):
1924 def write(self, path, data, flags='', **kwargs):
1919 if data is None:
1925 if data is None:
1920 raise error.ProgrammingError("data must be non-None")
1926 raise error.ProgrammingError("data must be non-None")
1921 self._auditconflicts(path)
1927 self._auditconflicts(path)
1922 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1928 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1923 flags=flags)
1929 flags=flags)
1924
1930
1925 def setflags(self, path, l, x):
1931 def setflags(self, path, l, x):
1926 self._markdirty(path, exists=True, date=dateutil.makedate(),
1932 self._markdirty(path, exists=True, date=dateutil.makedate(),
1927 flags=(l and 'l' or '') + (x and 'x' or ''))
1933 flags=(l and 'l' or '') + (x and 'x' or ''))
1928
1934
1929 def remove(self, path):
1935 def remove(self, path):
1930 self._markdirty(path, exists=False)
1936 self._markdirty(path, exists=False)
1931
1937
1932 def exists(self, path):
1938 def exists(self, path):
1933 """exists behaves like `lexists`, but needs to follow symlinks and
1939 """exists behaves like `lexists`, but needs to follow symlinks and
1934 return False if they are broken.
1940 return False if they are broken.
1935 """
1941 """
1936 if self.isdirty(path):
1942 if self.isdirty(path):
1937 # If this path exists and is a symlink, "follow" it by calling
1943 # If this path exists and is a symlink, "follow" it by calling
1938 # exists on the destination path.
1944 # exists on the destination path.
1939 if (self._cache[path]['exists'] and
1945 if (self._cache[path]['exists'] and
1940 'l' in self._cache[path]['flags']):
1946 'l' in self._cache[path]['flags']):
1941 return self.exists(self._cache[path]['data'].strip())
1947 return self.exists(self._cache[path]['data'].strip())
1942 else:
1948 else:
1943 return self._cache[path]['exists']
1949 return self._cache[path]['exists']
1944
1950
1945 return self._existsinparent(path)
1951 return self._existsinparent(path)
1946
1952
1947 def lexists(self, path):
1953 def lexists(self, path):
1948 """lexists returns True if the path exists"""
1954 """lexists returns True if the path exists"""
1949 if self.isdirty(path):
1955 if self.isdirty(path):
1950 return self._cache[path]['exists']
1956 return self._cache[path]['exists']
1951
1957
1952 return self._existsinparent(path)
1958 return self._existsinparent(path)
1953
1959
1954 def size(self, path):
1960 def size(self, path):
1955 if self.isdirty(path):
1961 if self.isdirty(path):
1956 if self._cache[path]['exists']:
1962 if self._cache[path]['exists']:
1957 return len(self._cache[path]['data'])
1963 return len(self._cache[path]['data'])
1958 else:
1964 else:
1959 raise error.ProgrammingError("No such file or directory: %s" %
1965 raise error.ProgrammingError("No such file or directory: %s" %
1960 self._path)
1966 self._path)
1961 return self._wrappedctx[path].size()
1967 return self._wrappedctx[path].size()
1962
1968
1963 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1969 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1964 user=None, editor=None):
1970 user=None, editor=None):
1965 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1971 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1966 committed.
1972 committed.
1967
1973
1968 ``text`` is the commit message.
1974 ``text`` is the commit message.
1969 ``parents`` (optional) are rev numbers.
1975 ``parents`` (optional) are rev numbers.
1970 """
1976 """
1971 # Default parents to the wrapped contexts' if not passed.
1977 # Default parents to the wrapped contexts' if not passed.
1972 if parents is None:
1978 if parents is None:
1973 parents = self._wrappedctx.parents()
1979 parents = self._wrappedctx.parents()
1974 if len(parents) == 1:
1980 if len(parents) == 1:
1975 parents = (parents[0], None)
1981 parents = (parents[0], None)
1976
1982
1977 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1983 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1978 if parents[1] is None:
1984 if parents[1] is None:
1979 parents = (self._repo[parents[0]], None)
1985 parents = (self._repo[parents[0]], None)
1980 else:
1986 else:
1981 parents = (self._repo[parents[0]], self._repo[parents[1]])
1987 parents = (self._repo[parents[0]], self._repo[parents[1]])
1982
1988
1983 files = self._cache.keys()
1989 files = self._cache.keys()
1984 def getfile(repo, memctx, path):
1990 def getfile(repo, memctx, path):
1985 if self._cache[path]['exists']:
1991 if self._cache[path]['exists']:
1986 return memfilectx(repo, memctx, path,
1992 return memfilectx(repo, memctx, path,
1987 self._cache[path]['data'],
1993 self._cache[path]['data'],
1988 'l' in self._cache[path]['flags'],
1994 'l' in self._cache[path]['flags'],
1989 'x' in self._cache[path]['flags'],
1995 'x' in self._cache[path]['flags'],
1990 self._cache[path]['copied'])
1996 self._cache[path]['copied'])
1991 else:
1997 else:
1992 # Returning None, but including the path in `files`, is
1998 # Returning None, but including the path in `files`, is
1993 # necessary for memctx to register a deletion.
1999 # necessary for memctx to register a deletion.
1994 return None
2000 return None
1995 return memctx(self._repo, parents, text, files, getfile, date=date,
2001 return memctx(self._repo, parents, text, files, getfile, date=date,
1996 extra=extra, user=user, branch=branch, editor=editor)
2002 extra=extra, user=user, branch=branch, editor=editor)
1997
2003
1998 def isdirty(self, path):
2004 def isdirty(self, path):
1999 return path in self._cache
2005 return path in self._cache
2000
2006
2001 def isempty(self):
2007 def isempty(self):
2002 # We need to discard any keys that are actually clean before the empty
2008 # We need to discard any keys that are actually clean before the empty
2003 # commit check.
2009 # commit check.
2004 self._compact()
2010 self._compact()
2005 return len(self._cache) == 0
2011 return len(self._cache) == 0
2006
2012
2007 def clean(self):
2013 def clean(self):
2008 self._cache = {}
2014 self._cache = {}
2009
2015
2010 def _compact(self):
2016 def _compact(self):
2011 """Removes keys from the cache that are actually clean, by comparing
2017 """Removes keys from the cache that are actually clean, by comparing
2012 them with the underlying context.
2018 them with the underlying context.
2013
2019
2014 This can occur during the merge process, e.g. by passing --tool :local
2020 This can occur during the merge process, e.g. by passing --tool :local
2015 to resolve a conflict.
2021 to resolve a conflict.
2016 """
2022 """
2017 keys = []
2023 keys = []
2018 for path in self._cache.keys():
2024 for path in self._cache.keys():
2019 cache = self._cache[path]
2025 cache = self._cache[path]
2020 try:
2026 try:
2021 underlying = self._wrappedctx[path]
2027 underlying = self._wrappedctx[path]
2022 if (underlying.data() == cache['data'] and
2028 if (underlying.data() == cache['data'] and
2023 underlying.flags() == cache['flags']):
2029 underlying.flags() == cache['flags']):
2024 keys.append(path)
2030 keys.append(path)
2025 except error.ManifestLookupError:
2031 except error.ManifestLookupError:
2026 # Path not in the underlying manifest (created).
2032 # Path not in the underlying manifest (created).
2027 continue
2033 continue
2028
2034
2029 for path in keys:
2035 for path in keys:
2030 del self._cache[path]
2036 del self._cache[path]
2031 return keys
2037 return keys
2032
2038
2033 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2039 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2034 self._cache[path] = {
2040 self._cache[path] = {
2035 'exists': exists,
2041 'exists': exists,
2036 'data': data,
2042 'data': data,
2037 'date': date,
2043 'date': date,
2038 'flags': flags,
2044 'flags': flags,
2039 'copied': None,
2045 'copied': None,
2040 }
2046 }
2041
2047
2042 def filectx(self, path, filelog=None):
2048 def filectx(self, path, filelog=None):
2043 return overlayworkingfilectx(self._repo, path, parent=self,
2049 return overlayworkingfilectx(self._repo, path, parent=self,
2044 filelog=filelog)
2050 filelog=filelog)
2045
2051
2046 class overlayworkingfilectx(committablefilectx):
2052 class overlayworkingfilectx(committablefilectx):
2047 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2053 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2048 cache, which can be flushed through later by calling ``flush()``."""
2054 cache, which can be flushed through later by calling ``flush()``."""
2049
2055
2050 def __init__(self, repo, path, filelog=None, parent=None):
2056 def __init__(self, repo, path, filelog=None, parent=None):
2051 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2057 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2052 parent)
2058 parent)
2053 self._repo = repo
2059 self._repo = repo
2054 self._parent = parent
2060 self._parent = parent
2055 self._path = path
2061 self._path = path
2056
2062
2057 def cmp(self, fctx):
2063 def cmp(self, fctx):
2058 return self.data() != fctx.data()
2064 return self.data() != fctx.data()
2059
2065
2060 def changectx(self):
2066 def changectx(self):
2061 return self._parent
2067 return self._parent
2062
2068
2063 def data(self):
2069 def data(self):
2064 return self._parent.data(self._path)
2070 return self._parent.data(self._path)
2065
2071
2066 def date(self):
2072 def date(self):
2067 return self._parent.filedate(self._path)
2073 return self._parent.filedate(self._path)
2068
2074
2069 def exists(self):
2075 def exists(self):
2070 return self.lexists()
2076 return self.lexists()
2071
2077
2072 def lexists(self):
2078 def lexists(self):
2073 return self._parent.exists(self._path)
2079 return self._parent.exists(self._path)
2074
2080
2075 def renamed(self):
2081 def renamed(self):
2076 path = self._parent.copydata(self._path)
2082 path = self._parent.copydata(self._path)
2077 if not path:
2083 if not path:
2078 return None
2084 return None
2079 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2085 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2080
2086
2081 def size(self):
2087 def size(self):
2082 return self._parent.size(self._path)
2088 return self._parent.size(self._path)
2083
2089
2084 def markcopied(self, origin):
2090 def markcopied(self, origin):
2085 self._parent.markcopied(self._path, origin)
2091 self._parent.markcopied(self._path, origin)
2086
2092
2087 def audit(self):
2093 def audit(self):
2088 pass
2094 pass
2089
2095
2090 def flags(self):
2096 def flags(self):
2091 return self._parent.flags(self._path)
2097 return self._parent.flags(self._path)
2092
2098
2093 def setflags(self, islink, isexec):
2099 def setflags(self, islink, isexec):
2094 return self._parent.setflags(self._path, islink, isexec)
2100 return self._parent.setflags(self._path, islink, isexec)
2095
2101
2096 def write(self, data, flags, backgroundclose=False, **kwargs):
2102 def write(self, data, flags, backgroundclose=False, **kwargs):
2097 return self._parent.write(self._path, data, flags, **kwargs)
2103 return self._parent.write(self._path, data, flags, **kwargs)
2098
2104
2099 def remove(self, ignoremissing=False):
2105 def remove(self, ignoremissing=False):
2100 return self._parent.remove(self._path)
2106 return self._parent.remove(self._path)
2101
2107
2102 def clearunknown(self):
2108 def clearunknown(self):
2103 pass
2109 pass
2104
2110
2105 class workingcommitctx(workingctx):
2111 class workingcommitctx(workingctx):
2106 """A workingcommitctx object makes access to data related to
2112 """A workingcommitctx object makes access to data related to
2107 the revision being committed convenient.
2113 the revision being committed convenient.
2108
2114
2109 This hides changes in the working directory, if they aren't
2115 This hides changes in the working directory, if they aren't
2110 committed in this context.
2116 committed in this context.
2111 """
2117 """
2112 def __init__(self, repo, changes,
2118 def __init__(self, repo, changes,
2113 text="", user=None, date=None, extra=None):
2119 text="", user=None, date=None, extra=None):
2114 super(workingctx, self).__init__(repo, text, user, date, extra,
2120 super(workingctx, self).__init__(repo, text, user, date, extra,
2115 changes)
2121 changes)
2116
2122
2117 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2123 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2118 """Return matched files only in ``self._status``
2124 """Return matched files only in ``self._status``
2119
2125
2120 Uncommitted files appear "clean" via this context, even if
2126 Uncommitted files appear "clean" via this context, even if
2121 they aren't actually so in the working directory.
2127 they aren't actually so in the working directory.
2122 """
2128 """
2123 if clean:
2129 if clean:
2124 clean = [f for f in self._manifest if f not in self._changedset]
2130 clean = [f for f in self._manifest if f not in self._changedset]
2125 else:
2131 else:
2126 clean = []
2132 clean = []
2127 return scmutil.status([f for f in self._status.modified if match(f)],
2133 return scmutil.status([f for f in self._status.modified if match(f)],
2128 [f for f in self._status.added if match(f)],
2134 [f for f in self._status.added if match(f)],
2129 [f for f in self._status.removed if match(f)],
2135 [f for f in self._status.removed if match(f)],
2130 [], [], [], clean)
2136 [], [], [], clean)
2131
2137
2132 @propertycache
2138 @propertycache
2133 def _changedset(self):
2139 def _changedset(self):
2134 """Return the set of files changed in this context
2140 """Return the set of files changed in this context
2135 """
2141 """
2136 changed = set(self._status.modified)
2142 changed = set(self._status.modified)
2137 changed.update(self._status.added)
2143 changed.update(self._status.added)
2138 changed.update(self._status.removed)
2144 changed.update(self._status.removed)
2139 return changed
2145 return changed
2140
2146
2141 def makecachingfilectxfn(func):
2147 def makecachingfilectxfn(func):
2142 """Create a filectxfn that caches based on the path.
2148 """Create a filectxfn that caches based on the path.
2143
2149
2144 We can't use util.cachefunc because it uses all arguments as the cache
2150 We can't use util.cachefunc because it uses all arguments as the cache
2145 key and this creates a cycle since the arguments include the repo and
2151 key and this creates a cycle since the arguments include the repo and
2146 memctx.
2152 memctx.
2147 """
2153 """
2148 cache = {}
2154 cache = {}
2149
2155
2150 def getfilectx(repo, memctx, path):
2156 def getfilectx(repo, memctx, path):
2151 if path not in cache:
2157 if path not in cache:
2152 cache[path] = func(repo, memctx, path)
2158 cache[path] = func(repo, memctx, path)
2153 return cache[path]
2159 return cache[path]
2154
2160
2155 return getfilectx
2161 return getfilectx
2156
2162
2157 def memfilefromctx(ctx):
2163 def memfilefromctx(ctx):
2158 """Given a context return a memfilectx for ctx[path]
2164 """Given a context return a memfilectx for ctx[path]
2159
2165
2160 This is a convenience method for building a memctx based on another
2166 This is a convenience method for building a memctx based on another
2161 context.
2167 context.
2162 """
2168 """
2163 def getfilectx(repo, memctx, path):
2169 def getfilectx(repo, memctx, path):
2164 fctx = ctx[path]
2170 fctx = ctx[path]
2165 # this is weird but apparently we only keep track of one parent
2171 # this is weird but apparently we only keep track of one parent
2166 # (why not only store that instead of a tuple?)
2172 # (why not only store that instead of a tuple?)
2167 copied = fctx.renamed()
2173 copied = fctx.renamed()
2168 if copied:
2174 if copied:
2169 copied = copied[0]
2175 copied = copied[0]
2170 return memfilectx(repo, memctx, path, fctx.data(),
2176 return memfilectx(repo, memctx, path, fctx.data(),
2171 islink=fctx.islink(), isexec=fctx.isexec(),
2177 islink=fctx.islink(), isexec=fctx.isexec(),
2172 copied=copied)
2178 copied=copied)
2173
2179
2174 return getfilectx
2180 return getfilectx
2175
2181
2176 def memfilefrompatch(patchstore):
2182 def memfilefrompatch(patchstore):
2177 """Given a patch (e.g. patchstore object) return a memfilectx
2183 """Given a patch (e.g. patchstore object) return a memfilectx
2178
2184
2179 This is a convenience method for building a memctx based on a patchstore.
2185 This is a convenience method for building a memctx based on a patchstore.
2180 """
2186 """
2181 def getfilectx(repo, memctx, path):
2187 def getfilectx(repo, memctx, path):
2182 data, mode, copied = patchstore.getfile(path)
2188 data, mode, copied = patchstore.getfile(path)
2183 if data is None:
2189 if data is None:
2184 return None
2190 return None
2185 islink, isexec = mode
2191 islink, isexec = mode
2186 return memfilectx(repo, memctx, path, data, islink=islink,
2192 return memfilectx(repo, memctx, path, data, islink=islink,
2187 isexec=isexec, copied=copied)
2193 isexec=isexec, copied=copied)
2188
2194
2189 return getfilectx
2195 return getfilectx
2190
2196
2191 class memctx(committablectx):
2197 class memctx(committablectx):
2192 """Use memctx to perform in-memory commits via localrepo.commitctx().
2198 """Use memctx to perform in-memory commits via localrepo.commitctx().
2193
2199
2194 Revision information is supplied at initialization time while
2200 Revision information is supplied at initialization time while
2195 related files data and is made available through a callback
2201 related files data and is made available through a callback
2196 mechanism. 'repo' is the current localrepo, 'parents' is a
2202 mechanism. 'repo' is the current localrepo, 'parents' is a
2197 sequence of two parent revisions identifiers (pass None for every
2203 sequence of two parent revisions identifiers (pass None for every
2198 missing parent), 'text' is the commit message and 'files' lists
2204 missing parent), 'text' is the commit message and 'files' lists
2199 names of files touched by the revision (normalized and relative to
2205 names of files touched by the revision (normalized and relative to
2200 repository root).
2206 repository root).
2201
2207
2202 filectxfn(repo, memctx, path) is a callable receiving the
2208 filectxfn(repo, memctx, path) is a callable receiving the
2203 repository, the current memctx object and the normalized path of
2209 repository, the current memctx object and the normalized path of
2204 requested file, relative to repository root. It is fired by the
2210 requested file, relative to repository root. It is fired by the
2205 commit function for every file in 'files', but calls order is
2211 commit function for every file in 'files', but calls order is
2206 undefined. If the file is available in the revision being
2212 undefined. If the file is available in the revision being
2207 committed (updated or added), filectxfn returns a memfilectx
2213 committed (updated or added), filectxfn returns a memfilectx
2208 object. If the file was removed, filectxfn return None for recent
2214 object. If the file was removed, filectxfn return None for recent
2209 Mercurial. Moved files are represented by marking the source file
2215 Mercurial. Moved files are represented by marking the source file
2210 removed and the new file added with copy information (see
2216 removed and the new file added with copy information (see
2211 memfilectx).
2217 memfilectx).
2212
2218
2213 user receives the committer name and defaults to current
2219 user receives the committer name and defaults to current
2214 repository username, date is the commit date in any format
2220 repository username, date is the commit date in any format
2215 supported by dateutil.parsedate() and defaults to current date, extra
2221 supported by dateutil.parsedate() and defaults to current date, extra
2216 is a dictionary of metadata or is left empty.
2222 is a dictionary of metadata or is left empty.
2217 """
2223 """
2218
2224
2219 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2225 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2220 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2226 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2221 # this field to determine what to do in filectxfn.
2227 # this field to determine what to do in filectxfn.
2222 _returnnoneformissingfiles = True
2228 _returnnoneformissingfiles = True
2223
2229
2224 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2230 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2225 date=None, extra=None, branch=None, editor=False):
2231 date=None, extra=None, branch=None, editor=False):
2226 super(memctx, self).__init__(repo, text, user, date, extra)
2232 super(memctx, self).__init__(repo, text, user, date, extra)
2227 self._rev = None
2233 self._rev = None
2228 self._node = None
2234 self._node = None
2229 parents = [(p or nullid) for p in parents]
2235 parents = [(p or nullid) for p in parents]
2230 p1, p2 = parents
2236 p1, p2 = parents
2231 self._parents = [self._repo[p] for p in (p1, p2)]
2237 self._parents = [self._repo[p] for p in (p1, p2)]
2232 files = sorted(set(files))
2238 files = sorted(set(files))
2233 self._files = files
2239 self._files = files
2234 if branch is not None:
2240 if branch is not None:
2235 self._extra['branch'] = encoding.fromlocal(branch)
2241 self._extra['branch'] = encoding.fromlocal(branch)
2236 self.substate = {}
2242 self.substate = {}
2237
2243
2238 if isinstance(filectxfn, patch.filestore):
2244 if isinstance(filectxfn, patch.filestore):
2239 filectxfn = memfilefrompatch(filectxfn)
2245 filectxfn = memfilefrompatch(filectxfn)
2240 elif not callable(filectxfn):
2246 elif not callable(filectxfn):
2241 # if store is not callable, wrap it in a function
2247 # if store is not callable, wrap it in a function
2242 filectxfn = memfilefromctx(filectxfn)
2248 filectxfn = memfilefromctx(filectxfn)
2243
2249
2244 # memoizing increases performance for e.g. vcs convert scenarios.
2250 # memoizing increases performance for e.g. vcs convert scenarios.
2245 self._filectxfn = makecachingfilectxfn(filectxfn)
2251 self._filectxfn = makecachingfilectxfn(filectxfn)
2246
2252
2247 if editor:
2253 if editor:
2248 self._text = editor(self._repo, self, [])
2254 self._text = editor(self._repo, self, [])
2249 self._repo.savecommitmessage(self._text)
2255 self._repo.savecommitmessage(self._text)
2250
2256
2251 def filectx(self, path, filelog=None):
2257 def filectx(self, path, filelog=None):
2252 """get a file context from the working directory
2258 """get a file context from the working directory
2253
2259
2254 Returns None if file doesn't exist and should be removed."""
2260 Returns None if file doesn't exist and should be removed."""
2255 return self._filectxfn(self._repo, self, path)
2261 return self._filectxfn(self._repo, self, path)
2256
2262
2257 def commit(self):
2263 def commit(self):
2258 """commit context to the repo"""
2264 """commit context to the repo"""
2259 return self._repo.commitctx(self)
2265 return self._repo.commitctx(self)
2260
2266
2261 @propertycache
2267 @propertycache
2262 def _manifest(self):
2268 def _manifest(self):
2263 """generate a manifest based on the return values of filectxfn"""
2269 """generate a manifest based on the return values of filectxfn"""
2264
2270
2265 # keep this simple for now; just worry about p1
2271 # keep this simple for now; just worry about p1
2266 pctx = self._parents[0]
2272 pctx = self._parents[0]
2267 man = pctx.manifest().copy()
2273 man = pctx.manifest().copy()
2268
2274
2269 for f in self._status.modified:
2275 for f in self._status.modified:
2270 p1node = nullid
2276 p1node = nullid
2271 p2node = nullid
2277 p2node = nullid
2272 p = pctx[f].parents() # if file isn't in pctx, check p2?
2278 p = pctx[f].parents() # if file isn't in pctx, check p2?
2273 if len(p) > 0:
2279 if len(p) > 0:
2274 p1node = p[0].filenode()
2280 p1node = p[0].filenode()
2275 if len(p) > 1:
2281 if len(p) > 1:
2276 p2node = p[1].filenode()
2282 p2node = p[1].filenode()
2277 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2283 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2278
2284
2279 for f in self._status.added:
2285 for f in self._status.added:
2280 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2286 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2281
2287
2282 for f in self._status.removed:
2288 for f in self._status.removed:
2283 if f in man:
2289 if f in man:
2284 del man[f]
2290 del man[f]
2285
2291
2286 return man
2292 return man
2287
2293
2288 @propertycache
2294 @propertycache
2289 def _status(self):
2295 def _status(self):
2290 """Calculate exact status from ``files`` specified at construction
2296 """Calculate exact status from ``files`` specified at construction
2291 """
2297 """
2292 man1 = self.p1().manifest()
2298 man1 = self.p1().manifest()
2293 p2 = self._parents[1]
2299 p2 = self._parents[1]
2294 # "1 < len(self._parents)" can't be used for checking
2300 # "1 < len(self._parents)" can't be used for checking
2295 # existence of the 2nd parent, because "memctx._parents" is
2301 # existence of the 2nd parent, because "memctx._parents" is
2296 # explicitly initialized by the list, of which length is 2.
2302 # explicitly initialized by the list, of which length is 2.
2297 if p2.node() != nullid:
2303 if p2.node() != nullid:
2298 man2 = p2.manifest()
2304 man2 = p2.manifest()
2299 managing = lambda f: f in man1 or f in man2
2305 managing = lambda f: f in man1 or f in man2
2300 else:
2306 else:
2301 managing = lambda f: f in man1
2307 managing = lambda f: f in man1
2302
2308
2303 modified, added, removed = [], [], []
2309 modified, added, removed = [], [], []
2304 for f in self._files:
2310 for f in self._files:
2305 if not managing(f):
2311 if not managing(f):
2306 added.append(f)
2312 added.append(f)
2307 elif self[f]:
2313 elif self[f]:
2308 modified.append(f)
2314 modified.append(f)
2309 else:
2315 else:
2310 removed.append(f)
2316 removed.append(f)
2311
2317
2312 return scmutil.status(modified, added, removed, [], [], [], [])
2318 return scmutil.status(modified, added, removed, [], [], [], [])
2313
2319
2314 class memfilectx(committablefilectx):
2320 class memfilectx(committablefilectx):
2315 """memfilectx represents an in-memory file to commit.
2321 """memfilectx represents an in-memory file to commit.
2316
2322
2317 See memctx and committablefilectx for more details.
2323 See memctx and committablefilectx for more details.
2318 """
2324 """
2319 def __init__(self, repo, changectx, path, data, islink=False,
2325 def __init__(self, repo, changectx, path, data, islink=False,
2320 isexec=False, copied=None):
2326 isexec=False, copied=None):
2321 """
2327 """
2322 path is the normalized file path relative to repository root.
2328 path is the normalized file path relative to repository root.
2323 data is the file content as a string.
2329 data is the file content as a string.
2324 islink is True if the file is a symbolic link.
2330 islink is True if the file is a symbolic link.
2325 isexec is True if the file is executable.
2331 isexec is True if the file is executable.
2326 copied is the source file path if current file was copied in the
2332 copied is the source file path if current file was copied in the
2327 revision being committed, or None."""
2333 revision being committed, or None."""
2328 super(memfilectx, self).__init__(repo, path, None, changectx)
2334 super(memfilectx, self).__init__(repo, path, None, changectx)
2329 self._data = data
2335 self._data = data
2330 if islink:
2336 if islink:
2331 self._flags = 'l'
2337 self._flags = 'l'
2332 elif isexec:
2338 elif isexec:
2333 self._flags = 'x'
2339 self._flags = 'x'
2334 else:
2340 else:
2335 self._flags = ''
2341 self._flags = ''
2336 self._copied = None
2342 self._copied = None
2337 if copied:
2343 if copied:
2338 self._copied = (copied, nullid)
2344 self._copied = (copied, nullid)
2339
2345
2340 def data(self):
2346 def data(self):
2341 return self._data
2347 return self._data
2342
2348
2343 def remove(self, ignoremissing=False):
2349 def remove(self, ignoremissing=False):
2344 """wraps unlink for a repo's working directory"""
2350 """wraps unlink for a repo's working directory"""
2345 # need to figure out what to do here
2351 # need to figure out what to do here
2346 del self._changectx[self._path]
2352 del self._changectx[self._path]
2347
2353
2348 def write(self, data, flags, **kwargs):
2354 def write(self, data, flags, **kwargs):
2349 """wraps repo.wwrite"""
2355 """wraps repo.wwrite"""
2350 self._data = data
2356 self._data = data
2351
2357
2352 class overlayfilectx(committablefilectx):
2358 class overlayfilectx(committablefilectx):
2353 """Like memfilectx but take an original filectx and optional parameters to
2359 """Like memfilectx but take an original filectx and optional parameters to
2354 override parts of it. This is useful when fctx.data() is expensive (i.e.
2360 override parts of it. This is useful when fctx.data() is expensive (i.e.
2355 flag processor is expensive) and raw data, flags, and filenode could be
2361 flag processor is expensive) and raw data, flags, and filenode could be
2356 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2362 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2357 """
2363 """
2358
2364
2359 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2365 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2360 copied=None, ctx=None):
2366 copied=None, ctx=None):
2361 """originalfctx: filecontext to duplicate
2367 """originalfctx: filecontext to duplicate
2362
2368
2363 datafunc: None or a function to override data (file content). It is a
2369 datafunc: None or a function to override data (file content). It is a
2364 function to be lazy. path, flags, copied, ctx: None or overridden value
2370 function to be lazy. path, flags, copied, ctx: None or overridden value
2365
2371
2366 copied could be (path, rev), or False. copied could also be just path,
2372 copied could be (path, rev), or False. copied could also be just path,
2367 and will be converted to (path, nullid). This simplifies some callers.
2373 and will be converted to (path, nullid). This simplifies some callers.
2368 """
2374 """
2369
2375
2370 if path is None:
2376 if path is None:
2371 path = originalfctx.path()
2377 path = originalfctx.path()
2372 if ctx is None:
2378 if ctx is None:
2373 ctx = originalfctx.changectx()
2379 ctx = originalfctx.changectx()
2374 ctxmatch = lambda: True
2380 ctxmatch = lambda: True
2375 else:
2381 else:
2376 ctxmatch = lambda: ctx == originalfctx.changectx()
2382 ctxmatch = lambda: ctx == originalfctx.changectx()
2377
2383
2378 repo = originalfctx.repo()
2384 repo = originalfctx.repo()
2379 flog = originalfctx.filelog()
2385 flog = originalfctx.filelog()
2380 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2386 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2381
2387
2382 if copied is None:
2388 if copied is None:
2383 copied = originalfctx.renamed()
2389 copied = originalfctx.renamed()
2384 copiedmatch = lambda: True
2390 copiedmatch = lambda: True
2385 else:
2391 else:
2386 if copied and not isinstance(copied, tuple):
2392 if copied and not isinstance(copied, tuple):
2387 # repo._filecommit will recalculate copyrev so nullid is okay
2393 # repo._filecommit will recalculate copyrev so nullid is okay
2388 copied = (copied, nullid)
2394 copied = (copied, nullid)
2389 copiedmatch = lambda: copied == originalfctx.renamed()
2395 copiedmatch = lambda: copied == originalfctx.renamed()
2390
2396
2391 # When data, copied (could affect data), ctx (could affect filelog
2397 # When data, copied (could affect data), ctx (could affect filelog
2392 # parents) are not overridden, rawdata, rawflags, and filenode may be
2398 # parents) are not overridden, rawdata, rawflags, and filenode may be
2393 # reused (repo._filecommit should double check filelog parents).
2399 # reused (repo._filecommit should double check filelog parents).
2394 #
2400 #
2395 # path, flags are not hashed in filelog (but in manifestlog) so they do
2401 # path, flags are not hashed in filelog (but in manifestlog) so they do
2396 # not affect reusable here.
2402 # not affect reusable here.
2397 #
2403 #
2398 # If ctx or copied is overridden to a same value with originalfctx,
2404 # If ctx or copied is overridden to a same value with originalfctx,
2399 # still consider it's reusable. originalfctx.renamed() may be a bit
2405 # still consider it's reusable. originalfctx.renamed() may be a bit
2400 # expensive so it's not called unless necessary. Assuming datafunc is
2406 # expensive so it's not called unless necessary. Assuming datafunc is
2401 # always expensive, do not call it for this "reusable" test.
2407 # always expensive, do not call it for this "reusable" test.
2402 reusable = datafunc is None and ctxmatch() and copiedmatch()
2408 reusable = datafunc is None and ctxmatch() and copiedmatch()
2403
2409
2404 if datafunc is None:
2410 if datafunc is None:
2405 datafunc = originalfctx.data
2411 datafunc = originalfctx.data
2406 if flags is None:
2412 if flags is None:
2407 flags = originalfctx.flags()
2413 flags = originalfctx.flags()
2408
2414
2409 self._datafunc = datafunc
2415 self._datafunc = datafunc
2410 self._flags = flags
2416 self._flags = flags
2411 self._copied = copied
2417 self._copied = copied
2412
2418
2413 if reusable:
2419 if reusable:
2414 # copy extra fields from originalfctx
2420 # copy extra fields from originalfctx
2415 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2421 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2416 for attr_ in attrs:
2422 for attr_ in attrs:
2417 if util.safehasattr(originalfctx, attr_):
2423 if util.safehasattr(originalfctx, attr_):
2418 setattr(self, attr_, getattr(originalfctx, attr_))
2424 setattr(self, attr_, getattr(originalfctx, attr_))
2419
2425
2420 def data(self):
2426 def data(self):
2421 return self._datafunc()
2427 return self._datafunc()
2422
2428
2423 class metadataonlyctx(committablectx):
2429 class metadataonlyctx(committablectx):
2424 """Like memctx but it's reusing the manifest of different commit.
2430 """Like memctx but it's reusing the manifest of different commit.
2425 Intended to be used by lightweight operations that are creating
2431 Intended to be used by lightweight operations that are creating
2426 metadata-only changes.
2432 metadata-only changes.
2427
2433
2428 Revision information is supplied at initialization time. 'repo' is the
2434 Revision information is supplied at initialization time. 'repo' is the
2429 current localrepo, 'ctx' is original revision which manifest we're reuisng
2435 current localrepo, 'ctx' is original revision which manifest we're reuisng
2430 'parents' is a sequence of two parent revisions identifiers (pass None for
2436 'parents' is a sequence of two parent revisions identifiers (pass None for
2431 every missing parent), 'text' is the commit.
2437 every missing parent), 'text' is the commit.
2432
2438
2433 user receives the committer name and defaults to current repository
2439 user receives the committer name and defaults to current repository
2434 username, date is the commit date in any format supported by
2440 username, date is the commit date in any format supported by
2435 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2441 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2436 metadata or is left empty.
2442 metadata or is left empty.
2437 """
2443 """
2438 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2444 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2439 date=None, extra=None, editor=False):
2445 date=None, extra=None, editor=False):
2440 if text is None:
2446 if text is None:
2441 text = originalctx.description()
2447 text = originalctx.description()
2442 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2448 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2443 self._rev = None
2449 self._rev = None
2444 self._node = None
2450 self._node = None
2445 self._originalctx = originalctx
2451 self._originalctx = originalctx
2446 self._manifestnode = originalctx.manifestnode()
2452 self._manifestnode = originalctx.manifestnode()
2447 if parents is None:
2453 if parents is None:
2448 parents = originalctx.parents()
2454 parents = originalctx.parents()
2449 else:
2455 else:
2450 parents = [repo[p] for p in parents if p is not None]
2456 parents = [repo[p] for p in parents if p is not None]
2451 parents = parents[:]
2457 parents = parents[:]
2452 while len(parents) < 2:
2458 while len(parents) < 2:
2453 parents.append(repo[nullid])
2459 parents.append(repo[nullid])
2454 p1, p2 = self._parents = parents
2460 p1, p2 = self._parents = parents
2455
2461
2456 # sanity check to ensure that the reused manifest parents are
2462 # sanity check to ensure that the reused manifest parents are
2457 # manifests of our commit parents
2463 # manifests of our commit parents
2458 mp1, mp2 = self.manifestctx().parents
2464 mp1, mp2 = self.manifestctx().parents
2459 if p1 != nullid and p1.manifestnode() != mp1:
2465 if p1 != nullid and p1.manifestnode() != mp1:
2460 raise RuntimeError('can\'t reuse the manifest: '
2466 raise RuntimeError('can\'t reuse the manifest: '
2461 'its p1 doesn\'t match the new ctx p1')
2467 'its p1 doesn\'t match the new ctx p1')
2462 if p2 != nullid and p2.manifestnode() != mp2:
2468 if p2 != nullid and p2.manifestnode() != mp2:
2463 raise RuntimeError('can\'t reuse the manifest: '
2469 raise RuntimeError('can\'t reuse the manifest: '
2464 'its p2 doesn\'t match the new ctx p2')
2470 'its p2 doesn\'t match the new ctx p2')
2465
2471
2466 self._files = originalctx.files()
2472 self._files = originalctx.files()
2467 self.substate = {}
2473 self.substate = {}
2468
2474
2469 if editor:
2475 if editor:
2470 self._text = editor(self._repo, self, [])
2476 self._text = editor(self._repo, self, [])
2471 self._repo.savecommitmessage(self._text)
2477 self._repo.savecommitmessage(self._text)
2472
2478
2473 def manifestnode(self):
2479 def manifestnode(self):
2474 return self._manifestnode
2480 return self._manifestnode
2475
2481
2476 @property
2482 @property
2477 def _manifestctx(self):
2483 def _manifestctx(self):
2478 return self._repo.manifestlog[self._manifestnode]
2484 return self._repo.manifestlog[self._manifestnode]
2479
2485
2480 def filectx(self, path, filelog=None):
2486 def filectx(self, path, filelog=None):
2481 return self._originalctx.filectx(path, filelog=filelog)
2487 return self._originalctx.filectx(path, filelog=filelog)
2482
2488
2483 def commit(self):
2489 def commit(self):
2484 """commit context to the repo"""
2490 """commit context to the repo"""
2485 return self._repo.commitctx(self)
2491 return self._repo.commitctx(self)
2486
2492
2487 @property
2493 @property
2488 def _manifest(self):
2494 def _manifest(self):
2489 return self._originalctx.manifest()
2495 return self._originalctx.manifest()
2490
2496
2491 @propertycache
2497 @propertycache
2492 def _status(self):
2498 def _status(self):
2493 """Calculate exact status from ``files`` specified in the ``origctx``
2499 """Calculate exact status from ``files`` specified in the ``origctx``
2494 and parents manifests.
2500 and parents manifests.
2495 """
2501 """
2496 man1 = self.p1().manifest()
2502 man1 = self.p1().manifest()
2497 p2 = self._parents[1]
2503 p2 = self._parents[1]
2498 # "1 < len(self._parents)" can't be used for checking
2504 # "1 < len(self._parents)" can't be used for checking
2499 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2505 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2500 # explicitly initialized by the list, of which length is 2.
2506 # explicitly initialized by the list, of which length is 2.
2501 if p2.node() != nullid:
2507 if p2.node() != nullid:
2502 man2 = p2.manifest()
2508 man2 = p2.manifest()
2503 managing = lambda f: f in man1 or f in man2
2509 managing = lambda f: f in man1 or f in man2
2504 else:
2510 else:
2505 managing = lambda f: f in man1
2511 managing = lambda f: f in man1
2506
2512
2507 modified, added, removed = [], [], []
2513 modified, added, removed = [], [], []
2508 for f in self._files:
2514 for f in self._files:
2509 if not managing(f):
2515 if not managing(f):
2510 added.append(f)
2516 added.append(f)
2511 elif f in self:
2517 elif f in self:
2512 modified.append(f)
2518 modified.append(f)
2513 else:
2519 else:
2514 removed.append(f)
2520 removed.append(f)
2515
2521
2516 return scmutil.status(modified, added, removed, [], [], [], [])
2522 return scmutil.status(modified, added, removed, [], [], [], [])
2517
2523
2518 class arbitraryfilectx(object):
2524 class arbitraryfilectx(object):
2519 """Allows you to use filectx-like functions on a file in an arbitrary
2525 """Allows you to use filectx-like functions on a file in an arbitrary
2520 location on disk, possibly not in the working directory.
2526 location on disk, possibly not in the working directory.
2521 """
2527 """
2522 def __init__(self, path, repo=None):
2528 def __init__(self, path, repo=None):
2523 # Repo is optional because contrib/simplemerge uses this class.
2529 # Repo is optional because contrib/simplemerge uses this class.
2524 self._repo = repo
2530 self._repo = repo
2525 self._path = path
2531 self._path = path
2526
2532
2527 def cmp(self, fctx):
2533 def cmp(self, fctx):
2528 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2534 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2529 # path if either side is a symlink.
2535 # path if either side is a symlink.
2530 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2536 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2531 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2537 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2532 # Add a fast-path for merge if both sides are disk-backed.
2538 # Add a fast-path for merge if both sides are disk-backed.
2533 # Note that filecmp uses the opposite return values (True if same)
2539 # Note that filecmp uses the opposite return values (True if same)
2534 # from our cmp functions (True if different).
2540 # from our cmp functions (True if different).
2535 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2541 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2536 return self.data() != fctx.data()
2542 return self.data() != fctx.data()
2537
2543
2538 def path(self):
2544 def path(self):
2539 return self._path
2545 return self._path
2540
2546
2541 def flags(self):
2547 def flags(self):
2542 return ''
2548 return ''
2543
2549
2544 def data(self):
2550 def data(self):
2545 return util.readfile(self._path)
2551 return util.readfile(self._path)
2546
2552
2547 def decodeddata(self):
2553 def decodeddata(self):
2548 with open(self._path, "rb") as f:
2554 with open(self._path, "rb") as f:
2549 return f.read()
2555 return f.read()
2550
2556
2551 def remove(self):
2557 def remove(self):
2552 util.unlink(self._path)
2558 util.unlink(self._path)
2553
2559
2554 def write(self, data, flags, **kwargs):
2560 def write(self, data, flags, **kwargs):
2555 assert not flags
2561 assert not flags
2556 with open(self._path, "w") as f:
2562 with open(self._path, "w") as f:
2557 f.write(data)
2563 f.write(data)
@@ -1,430 +1,431 b''
1 $ cat <<EOF > merge
1 $ cat <<EOF > merge
2 > from __future__ import print_function
2 > from __future__ import print_function
3 > import sys, os
3 > import sys, os
4 >
4 >
5 > try:
5 > try:
6 > import msvcrt
6 > import msvcrt
7 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
7 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
8 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
8 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
9 > except ImportError:
9 > except ImportError:
10 > pass
10 > pass
11 >
11 >
12 > print("merging for", os.path.basename(sys.argv[1]))
12 > print("merging for", os.path.basename(sys.argv[1]))
13 > EOF
13 > EOF
14 $ HGMERGE="$PYTHON ../merge"; export HGMERGE
14 $ HGMERGE="$PYTHON ../merge"; export HGMERGE
15
15
16 $ hg init t
16 $ hg init t
17 $ cd t
17 $ cd t
18 $ echo This is file a1 > a
18 $ echo This is file a1 > a
19 $ hg add a
19 $ hg add a
20 $ hg commit -m "commit #0"
20 $ hg commit -m "commit #0"
21 $ echo This is file b1 > b
21 $ echo This is file b1 > b
22 $ hg add b
22 $ hg add b
23 $ hg commit -m "commit #1"
23 $ hg commit -m "commit #1"
24
24
25 $ hg update 0
25 $ hg update 0
26 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
26 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
27
27
28 Test interrupted updates by having a non-empty dir with the same name as one
28 Test interrupted updates by having a non-empty dir with the same name as one
29 of the files in a commit we're updating to
29 of the files in a commit we're updating to
30
30
31 $ mkdir b && touch b/nonempty
31 $ mkdir b && touch b/nonempty
32 $ hg up
32 $ hg up
33 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 abort: Directory not empty: '$TESTTMP/t/b'
34 [255]
34 $ hg ci
35 $ hg ci
35 nothing changed
36 abort: last update was interrupted
36 [1]
37 (use 'hg update' to get a consistent checkout)
38 [255]
37 $ hg sum
39 $ hg sum
38 parent: 1:b8bb4a988f25 tip
40 parent: 0:538afb845929
39 commit #1
41 commit #0
40 branch: default
42 branch: default
41 commit: (clean)
43 commit: 1 unknown (interrupted update)
42 update: (current)
44 update: 1 new changesets (update)
43 phases: 2 draft
45 phases: 2 draft
44
46
45 The following line is commented out because the file doesn't exist at the moment, and some OSes error out even with `rm -f`.
47 $ rm b/nonempty
46 $ rm b/nonempty
47
48
48 $ hg up
49 $ hg up
49 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 $ hg sum
51 $ hg sum
51 parent: 1:b8bb4a988f25 tip
52 parent: 1:b8bb4a988f25 tip
52 commit #1
53 commit #1
53 branch: default
54 branch: default
54 commit: (clean)
55 commit: (clean)
55 update: (current)
56 update: (current)
56 phases: 2 draft
57 phases: 2 draft
57
58
58 Prepare a basic merge
59 Prepare a basic merge
59
60
60 $ hg up 0
61 $ hg up 0
61 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
62 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
62 $ echo This is file c1 > c
63 $ echo This is file c1 > c
63 $ hg add c
64 $ hg add c
64 $ hg commit -m "commit #2"
65 $ hg commit -m "commit #2"
65 created new head
66 created new head
66 $ echo This is file b1 > b
67 $ echo This is file b1 > b
67 no merges expected
68 no merges expected
68 $ hg merge -P 1
69 $ hg merge -P 1
69 changeset: 1:b8bb4a988f25
70 changeset: 1:b8bb4a988f25
70 user: test
71 user: test
71 date: Thu Jan 01 00:00:00 1970 +0000
72 date: Thu Jan 01 00:00:00 1970 +0000
72 summary: commit #1
73 summary: commit #1
73
74
74 $ hg merge 1
75 $ hg merge 1
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 (branch merge, don't forget to commit)
77 (branch merge, don't forget to commit)
77 $ hg diff --nodates
78 $ hg diff --nodates
78 diff -r 49035e18a8e6 b
79 diff -r 49035e18a8e6 b
79 --- /dev/null
80 --- /dev/null
80 +++ b/b
81 +++ b/b
81 @@ -0,0 +1,1 @@
82 @@ -0,0 +1,1 @@
82 +This is file b1
83 +This is file b1
83 $ hg status
84 $ hg status
84 M b
85 M b
85 $ cd ..; rm -r t
86 $ cd ..; rm -r t
86
87
87 $ hg init t
88 $ hg init t
88 $ cd t
89 $ cd t
89 $ echo This is file a1 > a
90 $ echo This is file a1 > a
90 $ hg add a
91 $ hg add a
91 $ hg commit -m "commit #0"
92 $ hg commit -m "commit #0"
92 $ echo This is file b1 > b
93 $ echo This is file b1 > b
93 $ hg add b
94 $ hg add b
94 $ hg commit -m "commit #1"
95 $ hg commit -m "commit #1"
95
96
96 $ hg update 0
97 $ hg update 0
97 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
98 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
98 $ echo This is file c1 > c
99 $ echo This is file c1 > c
99 $ hg add c
100 $ hg add c
100 $ hg commit -m "commit #2"
101 $ hg commit -m "commit #2"
101 created new head
102 created new head
102 $ echo This is file b2 > b
103 $ echo This is file b2 > b
103 merge should fail
104 merge should fail
104 $ hg merge 1
105 $ hg merge 1
105 b: untracked file differs
106 b: untracked file differs
106 abort: untracked files in working directory differ from files in requested revision
107 abort: untracked files in working directory differ from files in requested revision
107 [255]
108 [255]
108
109
109 #if symlink
110 #if symlink
110 symlinks to directories should be treated as regular files (issue5027)
111 symlinks to directories should be treated as regular files (issue5027)
111 $ rm b
112 $ rm b
112 $ ln -s 'This is file b2' b
113 $ ln -s 'This is file b2' b
113 $ hg merge 1
114 $ hg merge 1
114 b: untracked file differs
115 b: untracked file differs
115 abort: untracked files in working directory differ from files in requested revision
116 abort: untracked files in working directory differ from files in requested revision
116 [255]
117 [255]
117 symlinks shouldn't be followed
118 symlinks shouldn't be followed
118 $ rm b
119 $ rm b
119 $ echo This is file b1 > .hg/b
120 $ echo This is file b1 > .hg/b
120 $ ln -s .hg/b b
121 $ ln -s .hg/b b
121 $ hg merge 1
122 $ hg merge 1
122 b: untracked file differs
123 b: untracked file differs
123 abort: untracked files in working directory differ from files in requested revision
124 abort: untracked files in working directory differ from files in requested revision
124 [255]
125 [255]
125
126
126 $ rm b
127 $ rm b
127 $ echo This is file b2 > b
128 $ echo This is file b2 > b
128 #endif
129 #endif
129
130
130 bad config
131 bad config
131 $ hg merge 1 --config merge.checkunknown=x
132 $ hg merge 1 --config merge.checkunknown=x
132 abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
133 abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
133 [255]
134 [255]
134 this merge should fail
135 this merge should fail
135 $ hg merge 1 --config merge.checkunknown=abort
136 $ hg merge 1 --config merge.checkunknown=abort
136 b: untracked file differs
137 b: untracked file differs
137 abort: untracked files in working directory differ from files in requested revision
138 abort: untracked files in working directory differ from files in requested revision
138 [255]
139 [255]
139
140
140 this merge should warn
141 this merge should warn
141 $ hg merge 1 --config merge.checkunknown=warn
142 $ hg merge 1 --config merge.checkunknown=warn
142 b: replacing untracked file
143 b: replacing untracked file
143 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
144 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
144 (branch merge, don't forget to commit)
145 (branch merge, don't forget to commit)
145 $ cat b.orig
146 $ cat b.orig
146 This is file b2
147 This is file b2
147 $ hg up --clean 2
148 $ hg up --clean 2
148 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
149 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
149 $ mv b.orig b
150 $ mv b.orig b
150
151
151 this merge should silently ignore
152 this merge should silently ignore
152 $ cat b
153 $ cat b
153 This is file b2
154 This is file b2
154 $ hg merge 1 --config merge.checkunknown=ignore
155 $ hg merge 1 --config merge.checkunknown=ignore
155 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
156 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
156 (branch merge, don't forget to commit)
157 (branch merge, don't forget to commit)
157
158
158 merge.checkignored
159 merge.checkignored
159 $ hg up --clean 1
160 $ hg up --clean 1
160 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
161 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
161 $ cat >> .hgignore << EOF
162 $ cat >> .hgignore << EOF
162 > remoteignored
163 > remoteignored
163 > EOF
164 > EOF
164 $ echo This is file localignored3 > localignored
165 $ echo This is file localignored3 > localignored
165 $ echo This is file remoteignored3 > remoteignored
166 $ echo This is file remoteignored3 > remoteignored
166 $ hg add .hgignore localignored remoteignored
167 $ hg add .hgignore localignored remoteignored
167 $ hg commit -m "commit #3"
168 $ hg commit -m "commit #3"
168
169
169 $ hg up 2
170 $ hg up 2
170 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
171 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
171 $ cat >> .hgignore << EOF
172 $ cat >> .hgignore << EOF
172 > localignored
173 > localignored
173 > EOF
174 > EOF
174 $ hg add .hgignore
175 $ hg add .hgignore
175 $ hg commit -m "commit #4"
176 $ hg commit -m "commit #4"
176
177
177 remote .hgignore shouldn't be used for determining whether a file is ignored
178 remote .hgignore shouldn't be used for determining whether a file is ignored
178 $ echo This is file remoteignored4 > remoteignored
179 $ echo This is file remoteignored4 > remoteignored
179 $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
180 $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
180 remoteignored: untracked file differs
181 remoteignored: untracked file differs
181 abort: untracked files in working directory differ from files in requested revision
182 abort: untracked files in working directory differ from files in requested revision
182 [255]
183 [255]
183 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
184 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
184 merging .hgignore
185 merging .hgignore
185 merging for .hgignore
186 merging for .hgignore
186 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
187 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
187 (branch merge, don't forget to commit)
188 (branch merge, don't forget to commit)
188 $ cat remoteignored
189 $ cat remoteignored
189 This is file remoteignored3
190 This is file remoteignored3
190 $ cat remoteignored.orig
191 $ cat remoteignored.orig
191 This is file remoteignored4
192 This is file remoteignored4
192 $ rm remoteignored.orig
193 $ rm remoteignored.orig
193
194
194 local .hgignore should be used for that
195 local .hgignore should be used for that
195 $ hg up --clean 4
196 $ hg up --clean 4
196 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
197 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
197 $ echo This is file localignored4 > localignored
198 $ echo This is file localignored4 > localignored
198 also test other conflicting files to see we output the full set of warnings
199 also test other conflicting files to see we output the full set of warnings
199 $ echo This is file b2 > b
200 $ echo This is file b2 > b
200 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort
201 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort
201 b: untracked file differs
202 b: untracked file differs
202 localignored: untracked file differs
203 localignored: untracked file differs
203 abort: untracked files in working directory differ from files in requested revision
204 abort: untracked files in working directory differ from files in requested revision
204 [255]
205 [255]
205 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
206 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
206 localignored: untracked file differs
207 localignored: untracked file differs
207 abort: untracked files in working directory differ from files in requested revision
208 abort: untracked files in working directory differ from files in requested revision
208 [255]
209 [255]
209 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
210 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
210 b: untracked file differs
211 b: untracked file differs
211 abort: untracked files in working directory differ from files in requested revision
212 abort: untracked files in working directory differ from files in requested revision
212 [255]
213 [255]
213 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
214 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
214 b: replacing untracked file
215 b: replacing untracked file
215 localignored: replacing untracked file
216 localignored: replacing untracked file
216 merging .hgignore
217 merging .hgignore
217 merging for .hgignore
218 merging for .hgignore
218 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
219 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
219 (branch merge, don't forget to commit)
220 (branch merge, don't forget to commit)
220 $ cat localignored
221 $ cat localignored
221 This is file localignored3
222 This is file localignored3
222 $ cat localignored.orig
223 $ cat localignored.orig
223 This is file localignored4
224 This is file localignored4
224 $ rm localignored.orig
225 $ rm localignored.orig
225
226
226 $ cat b.orig
227 $ cat b.orig
227 This is file b2
228 This is file b2
228 $ hg up --clean 2
229 $ hg up --clean 2
229 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
230 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
230 $ mv b.orig b
231 $ mv b.orig b
231
232
232 this merge of b should work
233 this merge of b should work
233 $ cat b
234 $ cat b
234 This is file b2
235 This is file b2
235 $ hg merge -f 1
236 $ hg merge -f 1
236 merging b
237 merging b
237 merging for b
238 merging for b
238 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
239 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
239 (branch merge, don't forget to commit)
240 (branch merge, don't forget to commit)
240 $ hg diff --nodates
241 $ hg diff --nodates
241 diff -r 49035e18a8e6 b
242 diff -r 49035e18a8e6 b
242 --- /dev/null
243 --- /dev/null
243 +++ b/b
244 +++ b/b
244 @@ -0,0 +1,1 @@
245 @@ -0,0 +1,1 @@
245 +This is file b2
246 +This is file b2
246 $ hg status
247 $ hg status
247 M b
248 M b
248 $ cd ..; rm -r t
249 $ cd ..; rm -r t
249
250
250 $ hg init t
251 $ hg init t
251 $ cd t
252 $ cd t
252 $ echo This is file a1 > a
253 $ echo This is file a1 > a
253 $ hg add a
254 $ hg add a
254 $ hg commit -m "commit #0"
255 $ hg commit -m "commit #0"
255 $ echo This is file b1 > b
256 $ echo This is file b1 > b
256 $ hg add b
257 $ hg add b
257 $ hg commit -m "commit #1"
258 $ hg commit -m "commit #1"
258 $ echo This is file b22 > b
259 $ echo This is file b22 > b
259 $ hg commit -m "commit #2"
260 $ hg commit -m "commit #2"
260 $ hg update 1
261 $ hg update 1
261 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 $ echo This is file c1 > c
263 $ echo This is file c1 > c
263 $ hg add c
264 $ hg add c
264 $ hg commit -m "commit #3"
265 $ hg commit -m "commit #3"
265 created new head
266 created new head
266
267
267 Contents of b should be "this is file b1"
268 Contents of b should be "this is file b1"
268 $ cat b
269 $ cat b
269 This is file b1
270 This is file b1
270
271
271 $ echo This is file b22 > b
272 $ echo This is file b22 > b
272 merge fails
273 merge fails
273 $ hg merge 2
274 $ hg merge 2
274 abort: uncommitted changes
275 abort: uncommitted changes
275 (use 'hg status' to list changes)
276 (use 'hg status' to list changes)
276 [255]
277 [255]
277 merge expected!
278 merge expected!
278 $ hg merge -f 2
279 $ hg merge -f 2
279 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
280 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
280 (branch merge, don't forget to commit)
281 (branch merge, don't forget to commit)
281 $ hg diff --nodates
282 $ hg diff --nodates
282 diff -r 85de557015a8 b
283 diff -r 85de557015a8 b
283 --- a/b
284 --- a/b
284 +++ b/b
285 +++ b/b
285 @@ -1,1 +1,1 @@
286 @@ -1,1 +1,1 @@
286 -This is file b1
287 -This is file b1
287 +This is file b22
288 +This is file b22
288 $ hg status
289 $ hg status
289 M b
290 M b
290 $ cd ..; rm -r t
291 $ cd ..; rm -r t
291
292
292 $ hg init t
293 $ hg init t
293 $ cd t
294 $ cd t
294 $ echo This is file a1 > a
295 $ echo This is file a1 > a
295 $ hg add a
296 $ hg add a
296 $ hg commit -m "commit #0"
297 $ hg commit -m "commit #0"
297 $ echo This is file b1 > b
298 $ echo This is file b1 > b
298 $ hg add b
299 $ hg add b
299 $ hg commit -m "commit #1"
300 $ hg commit -m "commit #1"
300 $ echo This is file b22 > b
301 $ echo This is file b22 > b
301 $ hg commit -m "commit #2"
302 $ hg commit -m "commit #2"
302 $ hg update 1
303 $ hg update 1
303 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
304 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
304 $ echo This is file c1 > c
305 $ echo This is file c1 > c
305 $ hg add c
306 $ hg add c
306 $ hg commit -m "commit #3"
307 $ hg commit -m "commit #3"
307 created new head
308 created new head
308 $ echo This is file b33 > b
309 $ echo This is file b33 > b
309 merge of b should fail
310 merge of b should fail
310 $ hg merge 2
311 $ hg merge 2
311 abort: uncommitted changes
312 abort: uncommitted changes
312 (use 'hg status' to list changes)
313 (use 'hg status' to list changes)
313 [255]
314 [255]
314 merge of b expected
315 merge of b expected
315 $ hg merge -f 2
316 $ hg merge -f 2
316 merging b
317 merging b
317 merging for b
318 merging for b
318 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
319 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
319 (branch merge, don't forget to commit)
320 (branch merge, don't forget to commit)
320 $ hg diff --nodates
321 $ hg diff --nodates
321 diff -r 85de557015a8 b
322 diff -r 85de557015a8 b
322 --- a/b
323 --- a/b
323 +++ b/b
324 +++ b/b
324 @@ -1,1 +1,1 @@
325 @@ -1,1 +1,1 @@
325 -This is file b1
326 -This is file b1
326 +This is file b33
327 +This is file b33
327 $ hg status
328 $ hg status
328 M b
329 M b
329
330
330 Test for issue2364
331 Test for issue2364
331
332
332 $ hg up -qC .
333 $ hg up -qC .
333 $ hg rm b
334 $ hg rm b
334 $ hg ci -md
335 $ hg ci -md
335 $ hg revert -r -2 b
336 $ hg revert -r -2 b
336 $ hg up -q -- -2
337 $ hg up -q -- -2
337
338
338 Test that updated files are treated as "modified", when
339 Test that updated files are treated as "modified", when
339 'merge.update()' is aborted before 'merge.recordupdates()' (= parents
340 'merge.update()' is aborted before 'merge.recordupdates()' (= parents
340 aren't changed), even if none of mode, size and timestamp of them
341 aren't changed), even if none of mode, size and timestamp of them
341 isn't changed on the filesystem (see also issue4583).
342 isn't changed on the filesystem (see also issue4583).
342
343
343 $ cat > $TESTTMP/abort.py <<EOF
344 $ cat > $TESTTMP/abort.py <<EOF
344 > from __future__ import absolute_import
345 > from __future__ import absolute_import
345 > # emulate aborting before "recordupdates()". in this case, files
346 > # emulate aborting before "recordupdates()". in this case, files
346 > # are changed without updating dirstate
347 > # are changed without updating dirstate
347 > from mercurial import (
348 > from mercurial import (
348 > error,
349 > error,
349 > extensions,
350 > extensions,
350 > merge,
351 > merge,
351 > )
352 > )
352 > def applyupdates(orig, *args, **kwargs):
353 > def applyupdates(orig, *args, **kwargs):
353 > orig(*args, **kwargs)
354 > orig(*args, **kwargs)
354 > raise error.Abort('intentional aborting')
355 > raise error.Abort('intentional aborting')
355 > def extsetup(ui):
356 > def extsetup(ui):
356 > extensions.wrapfunction(merge, "applyupdates", applyupdates)
357 > extensions.wrapfunction(merge, "applyupdates", applyupdates)
357 > EOF
358 > EOF
358
359
359 $ cat >> .hg/hgrc <<EOF
360 $ cat >> .hg/hgrc <<EOF
360 > [fakedirstatewritetime]
361 > [fakedirstatewritetime]
361 > # emulate invoking dirstate.write() via repo.status()
362 > # emulate invoking dirstate.write() via repo.status()
362 > # at 2000-01-01 00:00
363 > # at 2000-01-01 00:00
363 > fakenow = 200001010000
364 > fakenow = 200001010000
364 > EOF
365 > EOF
365
366
366 (file gotten from other revision)
367 (file gotten from other revision)
367
368
368 $ hg update -q -C 2
369 $ hg update -q -C 2
369 $ echo 'THIS IS FILE B5' > b
370 $ echo 'THIS IS FILE B5' > b
370 $ hg commit -m 'commit #5'
371 $ hg commit -m 'commit #5'
371
372
372 $ hg update -q -C 3
373 $ hg update -q -C 3
373 $ cat b
374 $ cat b
374 This is file b1
375 This is file b1
375 $ touch -t 200001010000 b
376 $ touch -t 200001010000 b
376 $ hg debugrebuildstate
377 $ hg debugrebuildstate
377
378
378 $ cat >> .hg/hgrc <<EOF
379 $ cat >> .hg/hgrc <<EOF
379 > [extensions]
380 > [extensions]
380 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
381 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
381 > abort = $TESTTMP/abort.py
382 > abort = $TESTTMP/abort.py
382 > EOF
383 > EOF
383 $ hg merge 5
384 $ hg merge 5
384 abort: intentional aborting
385 abort: intentional aborting
385 [255]
386 [255]
386 $ cat >> .hg/hgrc <<EOF
387 $ cat >> .hg/hgrc <<EOF
387 > [extensions]
388 > [extensions]
388 > fakedirstatewritetime = !
389 > fakedirstatewritetime = !
389 > abort = !
390 > abort = !
390 > EOF
391 > EOF
391
392
392 $ cat b
393 $ cat b
393 THIS IS FILE B5
394 THIS IS FILE B5
394 $ touch -t 200001010000 b
395 $ touch -t 200001010000 b
395 $ hg status -A b
396 $ hg status -A b
396 M b
397 M b
397
398
398 (file merged from other revision)
399 (file merged from other revision)
399
400
400 $ hg update -q -C 3
401 $ hg update -q -C 3
401 $ echo 'this is file b6' > b
402 $ echo 'this is file b6' > b
402 $ hg commit -m 'commit #6'
403 $ hg commit -m 'commit #6'
403 created new head
404 created new head
404
405
405 $ cat b
406 $ cat b
406 this is file b6
407 this is file b6
407 $ touch -t 200001010000 b
408 $ touch -t 200001010000 b
408 $ hg debugrebuildstate
409 $ hg debugrebuildstate
409
410
410 $ cat >> .hg/hgrc <<EOF
411 $ cat >> .hg/hgrc <<EOF
411 > [extensions]
412 > [extensions]
412 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
413 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
413 > abort = $TESTTMP/abort.py
414 > abort = $TESTTMP/abort.py
414 > EOF
415 > EOF
415 $ hg merge --tool internal:other 5
416 $ hg merge --tool internal:other 5
416 abort: intentional aborting
417 abort: intentional aborting
417 [255]
418 [255]
418 $ cat >> .hg/hgrc <<EOF
419 $ cat >> .hg/hgrc <<EOF
419 > [extensions]
420 > [extensions]
420 > fakedirstatewritetime = !
421 > fakedirstatewritetime = !
421 > abort = !
422 > abort = !
422 > EOF
423 > EOF
423
424
424 $ cat b
425 $ cat b
425 THIS IS FILE B5
426 THIS IS FILE B5
426 $ touch -t 200001010000 b
427 $ touch -t 200001010000 b
427 $ hg status -A b
428 $ hg status -A b
428 M b
429 M b
429
430
430 $ cd ..
431 $ cd ..
@@ -1,89 +1,92 b''
1 Test update logic when there are renames or weird same-name cases between dirs
1 Test update logic when there are renames or weird same-name cases between dirs
2 and files
2 and files
3
3
4 Update with local changes across a file rename
4 Update with local changes across a file rename
5
5
6 $ hg init r1 && cd r1
6 $ hg init r1 && cd r1
7
7
8 $ echo a > a
8 $ echo a > a
9 $ hg add a
9 $ hg add a
10 $ hg ci -m a
10 $ hg ci -m a
11
11
12 $ hg mv a b
12 $ hg mv a b
13 $ hg ci -m rename
13 $ hg ci -m rename
14
14
15 $ echo b > b
15 $ echo b > b
16 $ hg ci -m change
16 $ hg ci -m change
17
17
18 $ hg up -q 0
18 $ hg up -q 0
19
19
20 $ echo c > a
20 $ echo c > a
21
21
22 $ hg up
22 $ hg up
23 merging a and b to b
23 merging a and b to b
24 warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
24 warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
25 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
25 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
26 use 'hg resolve' to retry unresolved file merges
26 use 'hg resolve' to retry unresolved file merges
27 [1]
27 [1]
28
28
29 Test update when local untracked directory exists with the same name as a
29 Test update when local untracked directory exists with the same name as a
30 tracked file in a commit we are updating to
30 tracked file in a commit we are updating to
31 $ hg init r2 && cd r2
31 $ hg init r2 && cd r2
32 $ echo root > root && hg ci -Am root # rev 0
32 $ echo root > root && hg ci -Am root # rev 0
33 adding root
33 adding root
34 $ echo text > name && hg ci -Am "name is a file" # rev 1
34 $ echo text > name && hg ci -Am "name is a file" # rev 1
35 adding name
35 adding name
36 $ hg up 0
36 $ hg up 0
37 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
38 $ mkdir name
38 $ mkdir name
39 $ hg up 1
39 $ hg up 1
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
41
41
42 Test update when local untracked directory exists with some files in it and has
42 Test update when local untracked directory exists with some files in it and has
43 the same name a tracked file in a commit we are updating to. In future this
43 the same name a tracked file in a commit we are updating to. In future this
44 should be updated to give an friendlier error message, but now we should just
44 should be updated to give an friendlier error message, but now we should just
45 make sure that this does not erase untracked data
45 make sure that this does not erase untracked data
46 $ hg up 0
46 $ hg up 0
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 $ mkdir name
48 $ mkdir name
49 $ echo text > name/file
49 $ echo text > name/file
50 $ hg st
50 $ hg st
51 ? name/file
51 ? name/file
52 $ hg up 1
52 $ hg up 1
53 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 abort: Directory not empty: '$TESTTMP/r1/r2/name'
54 [255]
55 $ cat name/file
56 text
54 $ cd ..
57 $ cd ..
55
58
56 #if symlink
59 #if symlink
57
60
58 Test update when two commits have symlinks that point to different folders
61 Test update when two commits have symlinks that point to different folders
59 $ hg init r3 && cd r3
62 $ hg init r3 && cd r3
60 $ echo root > root && hg ci -Am root
63 $ echo root > root && hg ci -Am root
61 adding root
64 adding root
62 $ mkdir folder1 && mkdir folder2
65 $ mkdir folder1 && mkdir folder2
63 $ ln -s folder1 folder
66 $ ln -s folder1 folder
64 $ hg ci -Am "symlink to folder1"
67 $ hg ci -Am "symlink to folder1"
65 adding folder
68 adding folder
66 $ rm folder
69 $ rm folder
67 $ ln -s folder2 folder
70 $ ln -s folder2 folder
68 $ hg ci -Am "symlink to folder2"
71 $ hg ci -Am "symlink to folder2"
69 $ hg up 1
72 $ hg up 1
70 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
73 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 $ cd ..
74 $ cd ..
72
75
73 #endif
76 #endif
74
77
75 #if rmcwd
78 #if rmcwd
76
79
77 Test that warning is printed if cwd is deleted during update
80 Test that warning is printed if cwd is deleted during update
78 $ hg init r4 && cd r4
81 $ hg init r4 && cd r4
79 $ mkdir dir
82 $ mkdir dir
80 $ cd dir
83 $ cd dir
81 $ echo a > a
84 $ echo a > a
82 $ echo b > b
85 $ echo b > b
83 $ hg add a b
86 $ hg add a b
84 $ hg ci -m "file and dir"
87 $ hg ci -m "file and dir"
85 $ hg up -q null
88 $ hg up -q null
86 current directory was removed
89 current directory was removed
87 (consider changing to repo root: $TESTTMP/r1/r4)
90 (consider changing to repo root: $TESTTMP/r1/r4)
88
91
89 #endif
92 #endif
General Comments 0
You need to be logged in to leave comments. Login now