##// END OF EJS Templates
arbitraryfilecontext: skip the cmp fast path if any side is a symlink...
Phil Cohen -
r34836:14c87708 default
parent child Browse files
Show More
@@ -0,0 +1,57 b''
1 Setup:
2 $ cat > eval.py <<EOF
3 > from __future__ import absolute_import
4 > import filecmp
5 > from mercurial import commands, context, registrar
6 > cmdtable = {}
7 > command = registrar.command(cmdtable)
8 > @command(b'eval', [], 'hg eval CMD')
9 > def eval_(ui, repo, *cmds, **opts):
10 > cmd = " ".join(cmds)
11 > res = str(eval(cmd, globals(), locals()))
12 > ui.warn("%s" % res)
13 > EOF
14
15 $ echo "[extensions]" >> $HGRCPATH
16 $ echo "eval=`pwd`/eval.py" >> $HGRCPATH
17
18 Arbitraryfilectx.cmp does not follow symlinks:
19 $ mkdir case1
20 $ cd case1
21 $ hg init
22 $ printf "A" > real_A
23 $ printf "foo" > A
24 $ printf "foo" > B
25 $ ln -s A sym_A
26 $ hg add .
27 adding A
28 adding B
29 adding real_A
30 adding sym_A
31 $ hg commit -m "base"
32
33 These files are different and should return True (different):
34 (Note that filecmp.cmp's return semantics are inverted from ours, so we invert
35 for simplicity):
36 $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['real_A'])"
37 True (no-eol)
38 $ hg eval "not filecmp.cmp('A', 'real_A')"
39 True (no-eol)
40
41 These files are identical and should return False (same):
42 $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['A'])"
43 False (no-eol)
44 $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['B'])"
45 False (no-eol)
46 $ hg eval "not filecmp.cmp('A', 'B')"
47 False (no-eol)
48
49 This comparison should also return False, since A and sym_A are substantially
50 the same in the eyes of ``filectx.cmp``, which looks at data only.
51 $ hg eval "context.arbitraryfilectx('real_A', repo).cmp(repo[None]['sym_A'])"
52 False (no-eol)
53
54 A naive use of filecmp on those two would wrongly return True, since it follows
55 the symlink to "A", which has different contents.
56 $ hg eval "not filecmp.cmp('real_A', 'sym_A')"
57 True (no-eol)
@@ -1,2598 +1,2602 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 repoview,
43 repoview,
44 revlog,
44 revlog,
45 scmutil,
45 scmutil,
46 sparse,
46 sparse,
47 subrepo,
47 subrepo,
48 util,
48 util,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 nonascii = re.compile(r'[^\x21-\x7f]').search
53 nonascii = re.compile(r'[^\x21-\x7f]').search
54
54
55 class basectx(object):
55 class basectx(object):
56 """A basectx object represents the common logic for its children:
56 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
57 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
58 workingctx: a context that represents the working directory and can
59 be committed,
59 be committed,
60 memctx: a context that represents changes in-memory and can also
60 memctx: a context that represents changes in-memory and can also
61 be committed."""
61 be committed."""
62 def __new__(cls, repo, changeid='', *args, **kwargs):
62 def __new__(cls, repo, changeid='', *args, **kwargs):
63 if isinstance(changeid, basectx):
63 if isinstance(changeid, basectx):
64 return changeid
64 return changeid
65
65
66 o = super(basectx, cls).__new__(cls)
66 o = super(basectx, cls).__new__(cls)
67
67
68 o._repo = repo
68 o._repo = repo
69 o._rev = nullrev
69 o._rev = nullrev
70 o._node = nullid
70 o._node = nullid
71
71
72 return o
72 return o
73
73
74 def __bytes__(self):
74 def __bytes__(self):
75 return short(self.node())
75 return short(self.node())
76
76
77 __str__ = encoding.strmethod(__bytes__)
77 __str__ = encoding.strmethod(__bytes__)
78
78
79 def __int__(self):
79 def __int__(self):
80 return self.rev()
80 return self.rev()
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepo.state(self, self._repo.ui)
175 return subrepo.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def unstable(self):
208 def unstable(self):
209 msg = ("'context.unstable' is deprecated, "
209 msg = ("'context.unstable' is deprecated, "
210 "use 'context.orphan'")
210 "use 'context.orphan'")
211 self._repo.ui.deprecwarn(msg, '4.4')
211 self._repo.ui.deprecwarn(msg, '4.4')
212 return self.orphan()
212 return self.orphan()
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete but it's ancestor are"""
215 """True if the changeset is not obsolete but it's ancestor are"""
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
217
217
218 def bumped(self):
218 def bumped(self):
219 msg = ("'context.bumped' is deprecated, "
219 msg = ("'context.bumped' is deprecated, "
220 "use 'context.phasedivergent'")
220 "use 'context.phasedivergent'")
221 self._repo.ui.deprecwarn(msg, '4.4')
221 self._repo.ui.deprecwarn(msg, '4.4')
222 return self.phasedivergent()
222 return self.phasedivergent()
223
223
224 def phasedivergent(self):
224 def phasedivergent(self):
225 """True if the changeset try to be a successor of a public changeset
225 """True if the changeset try to be a successor of a public changeset
226
226
227 Only non-public and non-obsolete changesets may be bumped.
227 Only non-public and non-obsolete changesets may be bumped.
228 """
228 """
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
230
230
231 def divergent(self):
231 def divergent(self):
232 msg = ("'context.divergent' is deprecated, "
232 msg = ("'context.divergent' is deprecated, "
233 "use 'context.contentdivergent'")
233 "use 'context.contentdivergent'")
234 self._repo.ui.deprecwarn(msg, '4.4')
234 self._repo.ui.deprecwarn(msg, '4.4')
235 return self.contentdivergent()
235 return self.contentdivergent()
236
236
237 def contentdivergent(self):
237 def contentdivergent(self):
238 """Is a successors of a changeset with multiple possible successors set
238 """Is a successors of a changeset with multiple possible successors set
239
239
240 Only non-public and non-obsolete changesets may be divergent.
240 Only non-public and non-obsolete changesets may be divergent.
241 """
241 """
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
243
243
244 def troubled(self):
244 def troubled(self):
245 msg = ("'context.troubled' is deprecated, "
245 msg = ("'context.troubled' is deprecated, "
246 "use 'context.isunstable'")
246 "use 'context.isunstable'")
247 self._repo.ui.deprecwarn(msg, '4.4')
247 self._repo.ui.deprecwarn(msg, '4.4')
248 return self.isunstable()
248 return self.isunstable()
249
249
250 def isunstable(self):
250 def isunstable(self):
251 """True if the changeset is either unstable, bumped or divergent"""
251 """True if the changeset is either unstable, bumped or divergent"""
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
253
253
254 def troubles(self):
254 def troubles(self):
255 """Keep the old version around in order to avoid breaking extensions
255 """Keep the old version around in order to avoid breaking extensions
256 about different return values.
256 about different return values.
257 """
257 """
258 msg = ("'context.troubles' is deprecated, "
258 msg = ("'context.troubles' is deprecated, "
259 "use 'context.instabilities'")
259 "use 'context.instabilities'")
260 self._repo.ui.deprecwarn(msg, '4.4')
260 self._repo.ui.deprecwarn(msg, '4.4')
261
261
262 troubles = []
262 troubles = []
263 if self.orphan():
263 if self.orphan():
264 troubles.append('orphan')
264 troubles.append('orphan')
265 if self.phasedivergent():
265 if self.phasedivergent():
266 troubles.append('bumped')
266 troubles.append('bumped')
267 if self.contentdivergent():
267 if self.contentdivergent():
268 troubles.append('divergent')
268 troubles.append('divergent')
269 return troubles
269 return troubles
270
270
271 def instabilities(self):
271 def instabilities(self):
272 """return the list of instabilities affecting this changeset.
272 """return the list of instabilities affecting this changeset.
273
273
274 Instabilities are returned as strings. possible values are:
274 Instabilities are returned as strings. possible values are:
275 - orphan,
275 - orphan,
276 - phase-divergent,
276 - phase-divergent,
277 - content-divergent.
277 - content-divergent.
278 """
278 """
279 instabilities = []
279 instabilities = []
280 if self.orphan():
280 if self.orphan():
281 instabilities.append('orphan')
281 instabilities.append('orphan')
282 if self.phasedivergent():
282 if self.phasedivergent():
283 instabilities.append('phase-divergent')
283 instabilities.append('phase-divergent')
284 if self.contentdivergent():
284 if self.contentdivergent():
285 instabilities.append('content-divergent')
285 instabilities.append('content-divergent')
286 return instabilities
286 return instabilities
287
287
288 def parents(self):
288 def parents(self):
289 """return contexts for each parent changeset"""
289 """return contexts for each parent changeset"""
290 return self._parents
290 return self._parents
291
291
292 def p1(self):
292 def p1(self):
293 return self._parents[0]
293 return self._parents[0]
294
294
295 def p2(self):
295 def p2(self):
296 parents = self._parents
296 parents = self._parents
297 if len(parents) == 2:
297 if len(parents) == 2:
298 return parents[1]
298 return parents[1]
299 return changectx(self._repo, nullrev)
299 return changectx(self._repo, nullrev)
300
300
301 def _fileinfo(self, path):
301 def _fileinfo(self, path):
302 if r'_manifest' in self.__dict__:
302 if r'_manifest' in self.__dict__:
303 try:
303 try:
304 return self._manifest[path], self._manifest.flags(path)
304 return self._manifest[path], self._manifest.flags(path)
305 except KeyError:
305 except KeyError:
306 raise error.ManifestLookupError(self._node, path,
306 raise error.ManifestLookupError(self._node, path,
307 _('not found in manifest'))
307 _('not found in manifest'))
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
309 if path in self._manifestdelta:
309 if path in self._manifestdelta:
310 return (self._manifestdelta[path],
310 return (self._manifestdelta[path],
311 self._manifestdelta.flags(path))
311 self._manifestdelta.flags(path))
312 mfl = self._repo.manifestlog
312 mfl = self._repo.manifestlog
313 try:
313 try:
314 node, flag = mfl[self._changeset.manifest].find(path)
314 node, flag = mfl[self._changeset.manifest].find(path)
315 except KeyError:
315 except KeyError:
316 raise error.ManifestLookupError(self._node, path,
316 raise error.ManifestLookupError(self._node, path,
317 _('not found in manifest'))
317 _('not found in manifest'))
318
318
319 return node, flag
319 return node, flag
320
320
321 def filenode(self, path):
321 def filenode(self, path):
322 return self._fileinfo(path)[0]
322 return self._fileinfo(path)[0]
323
323
324 def flags(self, path):
324 def flags(self, path):
325 try:
325 try:
326 return self._fileinfo(path)[1]
326 return self._fileinfo(path)[1]
327 except error.LookupError:
327 except error.LookupError:
328 return ''
328 return ''
329
329
330 def sub(self, path, allowcreate=True):
330 def sub(self, path, allowcreate=True):
331 '''return a subrepo for the stored revision of path, never wdir()'''
331 '''return a subrepo for the stored revision of path, never wdir()'''
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
333
333
334 def nullsub(self, path, pctx):
334 def nullsub(self, path, pctx):
335 return subrepo.nullsubrepo(self, path, pctx)
335 return subrepo.nullsubrepo(self, path, pctx)
336
336
337 def workingsub(self, path):
337 def workingsub(self, path):
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
339 context.
339 context.
340 '''
340 '''
341 return subrepo.subrepo(self, path, allowwdir=True)
341 return subrepo.subrepo(self, path, allowwdir=True)
342
342
343 def match(self, pats=None, include=None, exclude=None, default='glob',
343 def match(self, pats=None, include=None, exclude=None, default='glob',
344 listsubrepos=False, badfn=None):
344 listsubrepos=False, badfn=None):
345 r = self._repo
345 r = self._repo
346 return matchmod.match(r.root, r.getcwd(), pats,
346 return matchmod.match(r.root, r.getcwd(), pats,
347 include, exclude, default,
347 include, exclude, default,
348 auditor=r.nofsauditor, ctx=self,
348 auditor=r.nofsauditor, ctx=self,
349 listsubrepos=listsubrepos, badfn=badfn)
349 listsubrepos=listsubrepos, badfn=badfn)
350
350
351 def diff(self, ctx2=None, match=None, **opts):
351 def diff(self, ctx2=None, match=None, **opts):
352 """Returns a diff generator for the given contexts and matcher"""
352 """Returns a diff generator for the given contexts and matcher"""
353 if ctx2 is None:
353 if ctx2 is None:
354 ctx2 = self.p1()
354 ctx2 = self.p1()
355 if ctx2 is not None:
355 if ctx2 is not None:
356 ctx2 = self._repo[ctx2]
356 ctx2 = self._repo[ctx2]
357 diffopts = patch.diffopts(self._repo.ui, opts)
357 diffopts = patch.diffopts(self._repo.ui, opts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
359
359
360 def dirs(self):
360 def dirs(self):
361 return self._manifest.dirs()
361 return self._manifest.dirs()
362
362
363 def hasdir(self, dir):
363 def hasdir(self, dir):
364 return self._manifest.hasdir(dir)
364 return self._manifest.hasdir(dir)
365
365
366 def status(self, other=None, match=None, listignored=False,
366 def status(self, other=None, match=None, listignored=False,
367 listclean=False, listunknown=False, listsubrepos=False):
367 listclean=False, listunknown=False, listsubrepos=False):
368 """return status of files between two nodes or node and working
368 """return status of files between two nodes or node and working
369 directory.
369 directory.
370
370
371 If other is None, compare this node with working directory.
371 If other is None, compare this node with working directory.
372
372
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
374 """
374 """
375
375
376 ctx1 = self
376 ctx1 = self
377 ctx2 = self._repo[other]
377 ctx2 = self._repo[other]
378
378
379 # This next code block is, admittedly, fragile logic that tests for
379 # This next code block is, admittedly, fragile logic that tests for
380 # reversing the contexts and wouldn't need to exist if it weren't for
380 # reversing the contexts and wouldn't need to exist if it weren't for
381 # the fast (and common) code path of comparing the working directory
381 # the fast (and common) code path of comparing the working directory
382 # with its first parent.
382 # with its first parent.
383 #
383 #
384 # What we're aiming for here is the ability to call:
384 # What we're aiming for here is the ability to call:
385 #
385 #
386 # workingctx.status(parentctx)
386 # workingctx.status(parentctx)
387 #
387 #
388 # If we always built the manifest for each context and compared those,
388 # If we always built the manifest for each context and compared those,
389 # then we'd be done. But the special case of the above call means we
389 # then we'd be done. But the special case of the above call means we
390 # just copy the manifest of the parent.
390 # just copy the manifest of the parent.
391 reversed = False
391 reversed = False
392 if (not isinstance(ctx1, changectx)
392 if (not isinstance(ctx1, changectx)
393 and isinstance(ctx2, changectx)):
393 and isinstance(ctx2, changectx)):
394 reversed = True
394 reversed = True
395 ctx1, ctx2 = ctx2, ctx1
395 ctx1, ctx2 = ctx2, ctx1
396
396
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
398 match = ctx2._matchstatus(ctx1, match)
398 match = ctx2._matchstatus(ctx1, match)
399 r = scmutil.status([], [], [], [], [], [], [])
399 r = scmutil.status([], [], [], [], [], [], [])
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
401 listunknown)
401 listunknown)
402
402
403 if reversed:
403 if reversed:
404 # Reverse added and removed. Clear deleted, unknown and ignored as
404 # Reverse added and removed. Clear deleted, unknown and ignored as
405 # these make no sense to reverse.
405 # these make no sense to reverse.
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
407 r.clean)
407 r.clean)
408
408
409 if listsubrepos:
409 if listsubrepos:
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
411 try:
411 try:
412 rev2 = ctx2.subrev(subpath)
412 rev2 = ctx2.subrev(subpath)
413 except KeyError:
413 except KeyError:
414 # A subrepo that existed in node1 was deleted between
414 # A subrepo that existed in node1 was deleted between
415 # node1 and node2 (inclusive). Thus, ctx2's substate
415 # node1 and node2 (inclusive). Thus, ctx2's substate
416 # won't contain that subpath. The best we can do ignore it.
416 # won't contain that subpath. The best we can do ignore it.
417 rev2 = None
417 rev2 = None
418 submatch = matchmod.subdirmatcher(subpath, match)
418 submatch = matchmod.subdirmatcher(subpath, match)
419 s = sub.status(rev2, match=submatch, ignored=listignored,
419 s = sub.status(rev2, match=submatch, ignored=listignored,
420 clean=listclean, unknown=listunknown,
420 clean=listclean, unknown=listunknown,
421 listsubrepos=True)
421 listsubrepos=True)
422 for rfiles, sfiles in zip(r, s):
422 for rfiles, sfiles in zip(r, s):
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
424
424
425 for l in r:
425 for l in r:
426 l.sort()
426 l.sort()
427
427
428 return r
428 return r
429
429
430 def _filterederror(repo, changeid):
430 def _filterederror(repo, changeid):
431 """build an exception to be raised about a filtered changeid
431 """build an exception to be raised about a filtered changeid
432
432
433 This is extracted in a function to help extensions (eg: evolve) to
433 This is extracted in a function to help extensions (eg: evolve) to
434 experiment with various message variants."""
434 experiment with various message variants."""
435 if repo.filtername.startswith('visible'):
435 if repo.filtername.startswith('visible'):
436 msg = _("hidden revision '%s'") % changeid
436 msg = _("hidden revision '%s'") % changeid
437 hint = _('use --hidden to access hidden revisions')
437 hint = _('use --hidden to access hidden revisions')
438 return error.FilteredRepoLookupError(msg, hint=hint)
438 return error.FilteredRepoLookupError(msg, hint=hint)
439 msg = _("filtered revision '%s' (not in '%s' subset)")
439 msg = _("filtered revision '%s' (not in '%s' subset)")
440 msg %= (changeid, repo.filtername)
440 msg %= (changeid, repo.filtername)
441 return error.FilteredRepoLookupError(msg)
441 return error.FilteredRepoLookupError(msg)
442
442
443 class changectx(basectx):
443 class changectx(basectx):
444 """A changecontext object makes access to data related to a particular
444 """A changecontext object makes access to data related to a particular
445 changeset convenient. It represents a read-only context already present in
445 changeset convenient. It represents a read-only context already present in
446 the repo."""
446 the repo."""
447 def __init__(self, repo, changeid=''):
447 def __init__(self, repo, changeid=''):
448 """changeid is a revision number, node, or tag"""
448 """changeid is a revision number, node, or tag"""
449
449
450 # since basectx.__new__ already took care of copying the object, we
450 # since basectx.__new__ already took care of copying the object, we
451 # don't need to do anything in __init__, so we just exit here
451 # don't need to do anything in __init__, so we just exit here
452 if isinstance(changeid, basectx):
452 if isinstance(changeid, basectx):
453 return
453 return
454
454
455 if changeid == '':
455 if changeid == '':
456 changeid = '.'
456 changeid = '.'
457 self._repo = repo
457 self._repo = repo
458
458
459 try:
459 try:
460 if isinstance(changeid, int):
460 if isinstance(changeid, int):
461 self._node = repo.changelog.node(changeid)
461 self._node = repo.changelog.node(changeid)
462 self._rev = changeid
462 self._rev = changeid
463 return
463 return
464 if not pycompat.ispy3 and isinstance(changeid, long):
464 if not pycompat.ispy3 and isinstance(changeid, long):
465 changeid = str(changeid)
465 changeid = str(changeid)
466 if changeid == 'null':
466 if changeid == 'null':
467 self._node = nullid
467 self._node = nullid
468 self._rev = nullrev
468 self._rev = nullrev
469 return
469 return
470 if changeid == 'tip':
470 if changeid == 'tip':
471 self._node = repo.changelog.tip()
471 self._node = repo.changelog.tip()
472 self._rev = repo.changelog.rev(self._node)
472 self._rev = repo.changelog.rev(self._node)
473 return
473 return
474 if changeid == '.' or changeid == repo.dirstate.p1():
474 if changeid == '.' or changeid == repo.dirstate.p1():
475 # this is a hack to delay/avoid loading obsmarkers
475 # this is a hack to delay/avoid loading obsmarkers
476 # when we know that '.' won't be hidden
476 # when we know that '.' won't be hidden
477 self._node = repo.dirstate.p1()
477 self._node = repo.dirstate.p1()
478 self._rev = repo.unfiltered().changelog.rev(self._node)
478 self._rev = repo.unfiltered().changelog.rev(self._node)
479 return
479 return
480 if len(changeid) == 20:
480 if len(changeid) == 20:
481 try:
481 try:
482 self._node = changeid
482 self._node = changeid
483 self._rev = repo.changelog.rev(changeid)
483 self._rev = repo.changelog.rev(changeid)
484 return
484 return
485 except error.FilteredRepoLookupError:
485 except error.FilteredRepoLookupError:
486 raise
486 raise
487 except LookupError:
487 except LookupError:
488 pass
488 pass
489
489
490 try:
490 try:
491 r = int(changeid)
491 r = int(changeid)
492 if '%d' % r != changeid:
492 if '%d' % r != changeid:
493 raise ValueError
493 raise ValueError
494 l = len(repo.changelog)
494 l = len(repo.changelog)
495 if r < 0:
495 if r < 0:
496 r += l
496 r += l
497 if r < 0 or r >= l and r != wdirrev:
497 if r < 0 or r >= l and r != wdirrev:
498 raise ValueError
498 raise ValueError
499 self._rev = r
499 self._rev = r
500 self._node = repo.changelog.node(r)
500 self._node = repo.changelog.node(r)
501 return
501 return
502 except error.FilteredIndexError:
502 except error.FilteredIndexError:
503 raise
503 raise
504 except (ValueError, OverflowError, IndexError):
504 except (ValueError, OverflowError, IndexError):
505 pass
505 pass
506
506
507 if len(changeid) == 40:
507 if len(changeid) == 40:
508 try:
508 try:
509 self._node = bin(changeid)
509 self._node = bin(changeid)
510 self._rev = repo.changelog.rev(self._node)
510 self._rev = repo.changelog.rev(self._node)
511 return
511 return
512 except error.FilteredLookupError:
512 except error.FilteredLookupError:
513 raise
513 raise
514 except (TypeError, LookupError):
514 except (TypeError, LookupError):
515 pass
515 pass
516
516
517 # lookup bookmarks through the name interface
517 # lookup bookmarks through the name interface
518 try:
518 try:
519 self._node = repo.names.singlenode(repo, changeid)
519 self._node = repo.names.singlenode(repo, changeid)
520 self._rev = repo.changelog.rev(self._node)
520 self._rev = repo.changelog.rev(self._node)
521 return
521 return
522 except KeyError:
522 except KeyError:
523 pass
523 pass
524 except error.FilteredRepoLookupError:
524 except error.FilteredRepoLookupError:
525 raise
525 raise
526 except error.RepoLookupError:
526 except error.RepoLookupError:
527 pass
527 pass
528
528
529 self._node = repo.unfiltered().changelog._partialmatch(changeid)
529 self._node = repo.unfiltered().changelog._partialmatch(changeid)
530 if self._node is not None:
530 if self._node is not None:
531 self._rev = repo.changelog.rev(self._node)
531 self._rev = repo.changelog.rev(self._node)
532 return
532 return
533
533
534 # lookup failed
534 # lookup failed
535 # check if it might have come from damaged dirstate
535 # check if it might have come from damaged dirstate
536 #
536 #
537 # XXX we could avoid the unfiltered if we had a recognizable
537 # XXX we could avoid the unfiltered if we had a recognizable
538 # exception for filtered changeset access
538 # exception for filtered changeset access
539 if changeid in repo.unfiltered().dirstate.parents():
539 if changeid in repo.unfiltered().dirstate.parents():
540 msg = _("working directory has unknown parent '%s'!")
540 msg = _("working directory has unknown parent '%s'!")
541 raise error.Abort(msg % short(changeid))
541 raise error.Abort(msg % short(changeid))
542 try:
542 try:
543 if len(changeid) == 20 and nonascii(changeid):
543 if len(changeid) == 20 and nonascii(changeid):
544 changeid = hex(changeid)
544 changeid = hex(changeid)
545 except TypeError:
545 except TypeError:
546 pass
546 pass
547 except (error.FilteredIndexError, error.FilteredLookupError,
547 except (error.FilteredIndexError, error.FilteredLookupError,
548 error.FilteredRepoLookupError):
548 error.FilteredRepoLookupError):
549 raise _filterederror(repo, changeid)
549 raise _filterederror(repo, changeid)
550 except IndexError:
550 except IndexError:
551 pass
551 pass
552 raise error.RepoLookupError(
552 raise error.RepoLookupError(
553 _("unknown revision '%s'") % changeid)
553 _("unknown revision '%s'") % changeid)
554
554
555 def __hash__(self):
555 def __hash__(self):
556 try:
556 try:
557 return hash(self._rev)
557 return hash(self._rev)
558 except AttributeError:
558 except AttributeError:
559 return id(self)
559 return id(self)
560
560
561 def __nonzero__(self):
561 def __nonzero__(self):
562 return self._rev != nullrev
562 return self._rev != nullrev
563
563
564 __bool__ = __nonzero__
564 __bool__ = __nonzero__
565
565
566 @propertycache
566 @propertycache
567 def _changeset(self):
567 def _changeset(self):
568 return self._repo.changelog.changelogrevision(self.rev())
568 return self._repo.changelog.changelogrevision(self.rev())
569
569
570 @propertycache
570 @propertycache
571 def _manifest(self):
571 def _manifest(self):
572 return self._manifestctx.read()
572 return self._manifestctx.read()
573
573
574 @property
574 @property
575 def _manifestctx(self):
575 def _manifestctx(self):
576 return self._repo.manifestlog[self._changeset.manifest]
576 return self._repo.manifestlog[self._changeset.manifest]
577
577
578 @propertycache
578 @propertycache
579 def _manifestdelta(self):
579 def _manifestdelta(self):
580 return self._manifestctx.readdelta()
580 return self._manifestctx.readdelta()
581
581
582 @propertycache
582 @propertycache
583 def _parents(self):
583 def _parents(self):
584 repo = self._repo
584 repo = self._repo
585 p1, p2 = repo.changelog.parentrevs(self._rev)
585 p1, p2 = repo.changelog.parentrevs(self._rev)
586 if p2 == nullrev:
586 if p2 == nullrev:
587 return [changectx(repo, p1)]
587 return [changectx(repo, p1)]
588 return [changectx(repo, p1), changectx(repo, p2)]
588 return [changectx(repo, p1), changectx(repo, p2)]
589
589
590 def changeset(self):
590 def changeset(self):
591 c = self._changeset
591 c = self._changeset
592 return (
592 return (
593 c.manifest,
593 c.manifest,
594 c.user,
594 c.user,
595 c.date,
595 c.date,
596 c.files,
596 c.files,
597 c.description,
597 c.description,
598 c.extra,
598 c.extra,
599 )
599 )
600 def manifestnode(self):
600 def manifestnode(self):
601 return self._changeset.manifest
601 return self._changeset.manifest
602
602
603 def user(self):
603 def user(self):
604 return self._changeset.user
604 return self._changeset.user
605 def date(self):
605 def date(self):
606 return self._changeset.date
606 return self._changeset.date
607 def files(self):
607 def files(self):
608 return self._changeset.files
608 return self._changeset.files
609 def description(self):
609 def description(self):
610 return self._changeset.description
610 return self._changeset.description
611 def branch(self):
611 def branch(self):
612 return encoding.tolocal(self._changeset.extra.get("branch"))
612 return encoding.tolocal(self._changeset.extra.get("branch"))
613 def closesbranch(self):
613 def closesbranch(self):
614 return 'close' in self._changeset.extra
614 return 'close' in self._changeset.extra
615 def extra(self):
615 def extra(self):
616 return self._changeset.extra
616 return self._changeset.extra
617 def tags(self):
617 def tags(self):
618 return self._repo.nodetags(self._node)
618 return self._repo.nodetags(self._node)
619 def bookmarks(self):
619 def bookmarks(self):
620 return self._repo.nodebookmarks(self._node)
620 return self._repo.nodebookmarks(self._node)
621 def phase(self):
621 def phase(self):
622 return self._repo._phasecache.phase(self._repo, self._rev)
622 return self._repo._phasecache.phase(self._repo, self._rev)
623 def hidden(self):
623 def hidden(self):
624 return self._rev in repoview.filterrevs(self._repo, 'visible')
624 return self._rev in repoview.filterrevs(self._repo, 'visible')
625
625
626 def isinmemory(self):
626 def isinmemory(self):
627 return False
627 return False
628
628
629 def children(self):
629 def children(self):
630 """return contexts for each child changeset"""
630 """return contexts for each child changeset"""
631 c = self._repo.changelog.children(self._node)
631 c = self._repo.changelog.children(self._node)
632 return [changectx(self._repo, x) for x in c]
632 return [changectx(self._repo, x) for x in c]
633
633
634 def ancestors(self):
634 def ancestors(self):
635 for a in self._repo.changelog.ancestors([self._rev]):
635 for a in self._repo.changelog.ancestors([self._rev]):
636 yield changectx(self._repo, a)
636 yield changectx(self._repo, a)
637
637
638 def descendants(self):
638 def descendants(self):
639 for d in self._repo.changelog.descendants([self._rev]):
639 for d in self._repo.changelog.descendants([self._rev]):
640 yield changectx(self._repo, d)
640 yield changectx(self._repo, d)
641
641
642 def filectx(self, path, fileid=None, filelog=None):
642 def filectx(self, path, fileid=None, filelog=None):
643 """get a file context from this changeset"""
643 """get a file context from this changeset"""
644 if fileid is None:
644 if fileid is None:
645 fileid = self.filenode(path)
645 fileid = self.filenode(path)
646 return filectx(self._repo, path, fileid=fileid,
646 return filectx(self._repo, path, fileid=fileid,
647 changectx=self, filelog=filelog)
647 changectx=self, filelog=filelog)
648
648
649 def ancestor(self, c2, warn=False):
649 def ancestor(self, c2, warn=False):
650 """return the "best" ancestor context of self and c2
650 """return the "best" ancestor context of self and c2
651
651
652 If there are multiple candidates, it will show a message and check
652 If there are multiple candidates, it will show a message and check
653 merge.preferancestor configuration before falling back to the
653 merge.preferancestor configuration before falling back to the
654 revlog ancestor."""
654 revlog ancestor."""
655 # deal with workingctxs
655 # deal with workingctxs
656 n2 = c2._node
656 n2 = c2._node
657 if n2 is None:
657 if n2 is None:
658 n2 = c2._parents[0]._node
658 n2 = c2._parents[0]._node
659 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
659 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
660 if not cahs:
660 if not cahs:
661 anc = nullid
661 anc = nullid
662 elif len(cahs) == 1:
662 elif len(cahs) == 1:
663 anc = cahs[0]
663 anc = cahs[0]
664 else:
664 else:
665 # experimental config: merge.preferancestor
665 # experimental config: merge.preferancestor
666 for r in self._repo.ui.configlist('merge', 'preferancestor'):
666 for r in self._repo.ui.configlist('merge', 'preferancestor'):
667 try:
667 try:
668 ctx = changectx(self._repo, r)
668 ctx = changectx(self._repo, r)
669 except error.RepoLookupError:
669 except error.RepoLookupError:
670 continue
670 continue
671 anc = ctx.node()
671 anc = ctx.node()
672 if anc in cahs:
672 if anc in cahs:
673 break
673 break
674 else:
674 else:
675 anc = self._repo.changelog.ancestor(self._node, n2)
675 anc = self._repo.changelog.ancestor(self._node, n2)
676 if warn:
676 if warn:
677 self._repo.ui.status(
677 self._repo.ui.status(
678 (_("note: using %s as ancestor of %s and %s\n") %
678 (_("note: using %s as ancestor of %s and %s\n") %
679 (short(anc), short(self._node), short(n2))) +
679 (short(anc), short(self._node), short(n2))) +
680 ''.join(_(" alternatively, use --config "
680 ''.join(_(" alternatively, use --config "
681 "merge.preferancestor=%s\n") %
681 "merge.preferancestor=%s\n") %
682 short(n) for n in sorted(cahs) if n != anc))
682 short(n) for n in sorted(cahs) if n != anc))
683 return changectx(self._repo, anc)
683 return changectx(self._repo, anc)
684
684
685 def descendant(self, other):
685 def descendant(self, other):
686 """True if other is descendant of this changeset"""
686 """True if other is descendant of this changeset"""
687 return self._repo.changelog.descendant(self._rev, other._rev)
687 return self._repo.changelog.descendant(self._rev, other._rev)
688
688
689 def walk(self, match):
689 def walk(self, match):
690 '''Generates matching file names.'''
690 '''Generates matching file names.'''
691
691
692 # Wrap match.bad method to have message with nodeid
692 # Wrap match.bad method to have message with nodeid
693 def bad(fn, msg):
693 def bad(fn, msg):
694 # The manifest doesn't know about subrepos, so don't complain about
694 # The manifest doesn't know about subrepos, so don't complain about
695 # paths into valid subrepos.
695 # paths into valid subrepos.
696 if any(fn == s or fn.startswith(s + '/')
696 if any(fn == s or fn.startswith(s + '/')
697 for s in self.substate):
697 for s in self.substate):
698 return
698 return
699 match.bad(fn, _('no such file in rev %s') % self)
699 match.bad(fn, _('no such file in rev %s') % self)
700
700
701 m = matchmod.badmatch(match, bad)
701 m = matchmod.badmatch(match, bad)
702 return self._manifest.walk(m)
702 return self._manifest.walk(m)
703
703
704 def matches(self, match):
704 def matches(self, match):
705 return self.walk(match)
705 return self.walk(match)
706
706
707 class basefilectx(object):
707 class basefilectx(object):
708 """A filecontext object represents the common logic for its children:
708 """A filecontext object represents the common logic for its children:
709 filectx: read-only access to a filerevision that is already present
709 filectx: read-only access to a filerevision that is already present
710 in the repo,
710 in the repo,
711 workingfilectx: a filecontext that represents files from the working
711 workingfilectx: a filecontext that represents files from the working
712 directory,
712 directory,
713 memfilectx: a filecontext that represents files in-memory,
713 memfilectx: a filecontext that represents files in-memory,
714 overlayfilectx: duplicate another filecontext with some fields overridden.
714 overlayfilectx: duplicate another filecontext with some fields overridden.
715 """
715 """
716 @propertycache
716 @propertycache
717 def _filelog(self):
717 def _filelog(self):
718 return self._repo.file(self._path)
718 return self._repo.file(self._path)
719
719
720 @propertycache
720 @propertycache
721 def _changeid(self):
721 def _changeid(self):
722 if r'_changeid' in self.__dict__:
722 if r'_changeid' in self.__dict__:
723 return self._changeid
723 return self._changeid
724 elif r'_changectx' in self.__dict__:
724 elif r'_changectx' in self.__dict__:
725 return self._changectx.rev()
725 return self._changectx.rev()
726 elif r'_descendantrev' in self.__dict__:
726 elif r'_descendantrev' in self.__dict__:
727 # this file context was created from a revision with a known
727 # this file context was created from a revision with a known
728 # descendant, we can (lazily) correct for linkrev aliases
728 # descendant, we can (lazily) correct for linkrev aliases
729 return self._adjustlinkrev(self._descendantrev)
729 return self._adjustlinkrev(self._descendantrev)
730 else:
730 else:
731 return self._filelog.linkrev(self._filerev)
731 return self._filelog.linkrev(self._filerev)
732
732
733 @propertycache
733 @propertycache
734 def _filenode(self):
734 def _filenode(self):
735 if r'_fileid' in self.__dict__:
735 if r'_fileid' in self.__dict__:
736 return self._filelog.lookup(self._fileid)
736 return self._filelog.lookup(self._fileid)
737 else:
737 else:
738 return self._changectx.filenode(self._path)
738 return self._changectx.filenode(self._path)
739
739
740 @propertycache
740 @propertycache
741 def _filerev(self):
741 def _filerev(self):
742 return self._filelog.rev(self._filenode)
742 return self._filelog.rev(self._filenode)
743
743
744 @propertycache
744 @propertycache
745 def _repopath(self):
745 def _repopath(self):
746 return self._path
746 return self._path
747
747
748 def __nonzero__(self):
748 def __nonzero__(self):
749 try:
749 try:
750 self._filenode
750 self._filenode
751 return True
751 return True
752 except error.LookupError:
752 except error.LookupError:
753 # file is missing
753 # file is missing
754 return False
754 return False
755
755
756 __bool__ = __nonzero__
756 __bool__ = __nonzero__
757
757
758 def __bytes__(self):
758 def __bytes__(self):
759 try:
759 try:
760 return "%s@%s" % (self.path(), self._changectx)
760 return "%s@%s" % (self.path(), self._changectx)
761 except error.LookupError:
761 except error.LookupError:
762 return "%s@???" % self.path()
762 return "%s@???" % self.path()
763
763
764 __str__ = encoding.strmethod(__bytes__)
764 __str__ = encoding.strmethod(__bytes__)
765
765
766 def __repr__(self):
766 def __repr__(self):
767 return "<%s %s>" % (type(self).__name__, str(self))
767 return "<%s %s>" % (type(self).__name__, str(self))
768
768
769 def __hash__(self):
769 def __hash__(self):
770 try:
770 try:
771 return hash((self._path, self._filenode))
771 return hash((self._path, self._filenode))
772 except AttributeError:
772 except AttributeError:
773 return id(self)
773 return id(self)
774
774
775 def __eq__(self, other):
775 def __eq__(self, other):
776 try:
776 try:
777 return (type(self) == type(other) and self._path == other._path
777 return (type(self) == type(other) and self._path == other._path
778 and self._filenode == other._filenode)
778 and self._filenode == other._filenode)
779 except AttributeError:
779 except AttributeError:
780 return False
780 return False
781
781
782 def __ne__(self, other):
782 def __ne__(self, other):
783 return not (self == other)
783 return not (self == other)
784
784
785 def filerev(self):
785 def filerev(self):
786 return self._filerev
786 return self._filerev
787 def filenode(self):
787 def filenode(self):
788 return self._filenode
788 return self._filenode
789 @propertycache
789 @propertycache
790 def _flags(self):
790 def _flags(self):
791 return self._changectx.flags(self._path)
791 return self._changectx.flags(self._path)
792 def flags(self):
792 def flags(self):
793 return self._flags
793 return self._flags
794 def filelog(self):
794 def filelog(self):
795 return self._filelog
795 return self._filelog
796 def rev(self):
796 def rev(self):
797 return self._changeid
797 return self._changeid
798 def linkrev(self):
798 def linkrev(self):
799 return self._filelog.linkrev(self._filerev)
799 return self._filelog.linkrev(self._filerev)
800 def node(self):
800 def node(self):
801 return self._changectx.node()
801 return self._changectx.node()
802 def hex(self):
802 def hex(self):
803 return self._changectx.hex()
803 return self._changectx.hex()
804 def user(self):
804 def user(self):
805 return self._changectx.user()
805 return self._changectx.user()
806 def date(self):
806 def date(self):
807 return self._changectx.date()
807 return self._changectx.date()
808 def files(self):
808 def files(self):
809 return self._changectx.files()
809 return self._changectx.files()
810 def description(self):
810 def description(self):
811 return self._changectx.description()
811 return self._changectx.description()
812 def branch(self):
812 def branch(self):
813 return self._changectx.branch()
813 return self._changectx.branch()
814 def extra(self):
814 def extra(self):
815 return self._changectx.extra()
815 return self._changectx.extra()
816 def phase(self):
816 def phase(self):
817 return self._changectx.phase()
817 return self._changectx.phase()
818 def phasestr(self):
818 def phasestr(self):
819 return self._changectx.phasestr()
819 return self._changectx.phasestr()
820 def manifest(self):
820 def manifest(self):
821 return self._changectx.manifest()
821 return self._changectx.manifest()
822 def changectx(self):
822 def changectx(self):
823 return self._changectx
823 return self._changectx
824 def renamed(self):
824 def renamed(self):
825 return self._copied
825 return self._copied
826 def repo(self):
826 def repo(self):
827 return self._repo
827 return self._repo
828 def size(self):
828 def size(self):
829 return len(self.data())
829 return len(self.data())
830
830
831 def path(self):
831 def path(self):
832 return self._path
832 return self._path
833
833
834 def isbinary(self):
834 def isbinary(self):
835 try:
835 try:
836 return util.binary(self.data())
836 return util.binary(self.data())
837 except IOError:
837 except IOError:
838 return False
838 return False
839 def isexec(self):
839 def isexec(self):
840 return 'x' in self.flags()
840 return 'x' in self.flags()
841 def islink(self):
841 def islink(self):
842 return 'l' in self.flags()
842 return 'l' in self.flags()
843
843
844 def isabsent(self):
844 def isabsent(self):
845 """whether this filectx represents a file not in self._changectx
845 """whether this filectx represents a file not in self._changectx
846
846
847 This is mainly for merge code to detect change/delete conflicts. This is
847 This is mainly for merge code to detect change/delete conflicts. This is
848 expected to be True for all subclasses of basectx."""
848 expected to be True for all subclasses of basectx."""
849 return False
849 return False
850
850
851 _customcmp = False
851 _customcmp = False
852 def cmp(self, fctx):
852 def cmp(self, fctx):
853 """compare with other file context
853 """compare with other file context
854
854
855 returns True if different than fctx.
855 returns True if different than fctx.
856 """
856 """
857 if fctx._customcmp:
857 if fctx._customcmp:
858 return fctx.cmp(self)
858 return fctx.cmp(self)
859
859
860 if (fctx._filenode is None
860 if (fctx._filenode is None
861 and (self._repo._encodefilterpats
861 and (self._repo._encodefilterpats
862 # if file data starts with '\1\n', empty metadata block is
862 # if file data starts with '\1\n', empty metadata block is
863 # prepended, which adds 4 bytes to filelog.size().
863 # prepended, which adds 4 bytes to filelog.size().
864 or self.size() - 4 == fctx.size())
864 or self.size() - 4 == fctx.size())
865 or self.size() == fctx.size()):
865 or self.size() == fctx.size()):
866 return self._filelog.cmp(self._filenode, fctx.data())
866 return self._filelog.cmp(self._filenode, fctx.data())
867
867
868 return True
868 return True
869
869
870 def _adjustlinkrev(self, srcrev, inclusive=False):
870 def _adjustlinkrev(self, srcrev, inclusive=False):
871 """return the first ancestor of <srcrev> introducing <fnode>
871 """return the first ancestor of <srcrev> introducing <fnode>
872
872
873 If the linkrev of the file revision does not point to an ancestor of
873 If the linkrev of the file revision does not point to an ancestor of
874 srcrev, we'll walk down the ancestors until we find one introducing
874 srcrev, we'll walk down the ancestors until we find one introducing
875 this file revision.
875 this file revision.
876
876
877 :srcrev: the changeset revision we search ancestors from
877 :srcrev: the changeset revision we search ancestors from
878 :inclusive: if true, the src revision will also be checked
878 :inclusive: if true, the src revision will also be checked
879 """
879 """
880 repo = self._repo
880 repo = self._repo
881 cl = repo.unfiltered().changelog
881 cl = repo.unfiltered().changelog
882 mfl = repo.manifestlog
882 mfl = repo.manifestlog
883 # fetch the linkrev
883 # fetch the linkrev
884 lkr = self.linkrev()
884 lkr = self.linkrev()
885 # hack to reuse ancestor computation when searching for renames
885 # hack to reuse ancestor computation when searching for renames
886 memberanc = getattr(self, '_ancestrycontext', None)
886 memberanc = getattr(self, '_ancestrycontext', None)
887 iteranc = None
887 iteranc = None
888 if srcrev is None:
888 if srcrev is None:
889 # wctx case, used by workingfilectx during mergecopy
889 # wctx case, used by workingfilectx during mergecopy
890 revs = [p.rev() for p in self._repo[None].parents()]
890 revs = [p.rev() for p in self._repo[None].parents()]
891 inclusive = True # we skipped the real (revless) source
891 inclusive = True # we skipped the real (revless) source
892 else:
892 else:
893 revs = [srcrev]
893 revs = [srcrev]
894 if memberanc is None:
894 if memberanc is None:
895 memberanc = iteranc = cl.ancestors(revs, lkr,
895 memberanc = iteranc = cl.ancestors(revs, lkr,
896 inclusive=inclusive)
896 inclusive=inclusive)
897 # check if this linkrev is an ancestor of srcrev
897 # check if this linkrev is an ancestor of srcrev
898 if lkr not in memberanc:
898 if lkr not in memberanc:
899 if iteranc is None:
899 if iteranc is None:
900 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
900 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
901 fnode = self._filenode
901 fnode = self._filenode
902 path = self._path
902 path = self._path
903 for a in iteranc:
903 for a in iteranc:
904 ac = cl.read(a) # get changeset data (we avoid object creation)
904 ac = cl.read(a) # get changeset data (we avoid object creation)
905 if path in ac[3]: # checking the 'files' field.
905 if path in ac[3]: # checking the 'files' field.
906 # The file has been touched, check if the content is
906 # The file has been touched, check if the content is
907 # similar to the one we search for.
907 # similar to the one we search for.
908 if fnode == mfl[ac[0]].readfast().get(path):
908 if fnode == mfl[ac[0]].readfast().get(path):
909 return a
909 return a
910 # In theory, we should never get out of that loop without a result.
910 # In theory, we should never get out of that loop without a result.
911 # But if manifest uses a buggy file revision (not children of the
911 # But if manifest uses a buggy file revision (not children of the
912 # one it replaces) we could. Such a buggy situation will likely
912 # one it replaces) we could. Such a buggy situation will likely
913 # result is crash somewhere else at to some point.
913 # result is crash somewhere else at to some point.
914 return lkr
914 return lkr
915
915
916 def introrev(self):
916 def introrev(self):
917 """return the rev of the changeset which introduced this file revision
917 """return the rev of the changeset which introduced this file revision
918
918
919 This method is different from linkrev because it take into account the
919 This method is different from linkrev because it take into account the
920 changeset the filectx was created from. It ensures the returned
920 changeset the filectx was created from. It ensures the returned
921 revision is one of its ancestors. This prevents bugs from
921 revision is one of its ancestors. This prevents bugs from
922 'linkrev-shadowing' when a file revision is used by multiple
922 'linkrev-shadowing' when a file revision is used by multiple
923 changesets.
923 changesets.
924 """
924 """
925 lkr = self.linkrev()
925 lkr = self.linkrev()
926 attrs = vars(self)
926 attrs = vars(self)
927 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
927 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
928 if noctx or self.rev() == lkr:
928 if noctx or self.rev() == lkr:
929 return self.linkrev()
929 return self.linkrev()
930 return self._adjustlinkrev(self.rev(), inclusive=True)
930 return self._adjustlinkrev(self.rev(), inclusive=True)
931
931
932 def _parentfilectx(self, path, fileid, filelog):
932 def _parentfilectx(self, path, fileid, filelog):
933 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
933 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
934 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
934 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
935 if '_changeid' in vars(self) or '_changectx' in vars(self):
935 if '_changeid' in vars(self) or '_changectx' in vars(self):
936 # If self is associated with a changeset (probably explicitly
936 # If self is associated with a changeset (probably explicitly
937 # fed), ensure the created filectx is associated with a
937 # fed), ensure the created filectx is associated with a
938 # changeset that is an ancestor of self.changectx.
938 # changeset that is an ancestor of self.changectx.
939 # This lets us later use _adjustlinkrev to get a correct link.
939 # This lets us later use _adjustlinkrev to get a correct link.
940 fctx._descendantrev = self.rev()
940 fctx._descendantrev = self.rev()
941 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
941 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
942 elif '_descendantrev' in vars(self):
942 elif '_descendantrev' in vars(self):
943 # Otherwise propagate _descendantrev if we have one associated.
943 # Otherwise propagate _descendantrev if we have one associated.
944 fctx._descendantrev = self._descendantrev
944 fctx._descendantrev = self._descendantrev
945 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
945 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
946 return fctx
946 return fctx
947
947
948 def parents(self):
948 def parents(self):
949 _path = self._path
949 _path = self._path
950 fl = self._filelog
950 fl = self._filelog
951 parents = self._filelog.parents(self._filenode)
951 parents = self._filelog.parents(self._filenode)
952 pl = [(_path, node, fl) for node in parents if node != nullid]
952 pl = [(_path, node, fl) for node in parents if node != nullid]
953
953
954 r = fl.renamed(self._filenode)
954 r = fl.renamed(self._filenode)
955 if r:
955 if r:
956 # - In the simple rename case, both parent are nullid, pl is empty.
956 # - In the simple rename case, both parent are nullid, pl is empty.
957 # - In case of merge, only one of the parent is null id and should
957 # - In case of merge, only one of the parent is null id and should
958 # be replaced with the rename information. This parent is -always-
958 # be replaced with the rename information. This parent is -always-
959 # the first one.
959 # the first one.
960 #
960 #
961 # As null id have always been filtered out in the previous list
961 # As null id have always been filtered out in the previous list
962 # comprehension, inserting to 0 will always result in "replacing
962 # comprehension, inserting to 0 will always result in "replacing
963 # first nullid parent with rename information.
963 # first nullid parent with rename information.
964 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
964 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
965
965
966 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
966 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
967
967
968 def p1(self):
968 def p1(self):
969 return self.parents()[0]
969 return self.parents()[0]
970
970
971 def p2(self):
971 def p2(self):
972 p = self.parents()
972 p = self.parents()
973 if len(p) == 2:
973 if len(p) == 2:
974 return p[1]
974 return p[1]
975 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
975 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
976
976
977 def annotate(self, follow=False, linenumber=False, skiprevs=None,
977 def annotate(self, follow=False, linenumber=False, skiprevs=None,
978 diffopts=None):
978 diffopts=None):
979 '''returns a list of tuples of ((ctx, number), line) for each line
979 '''returns a list of tuples of ((ctx, number), line) for each line
980 in the file, where ctx is the filectx of the node where
980 in the file, where ctx is the filectx of the node where
981 that line was last changed; if linenumber parameter is true, number is
981 that line was last changed; if linenumber parameter is true, number is
982 the line number at the first appearance in the managed file, otherwise,
982 the line number at the first appearance in the managed file, otherwise,
983 number has a fixed value of False.
983 number has a fixed value of False.
984 '''
984 '''
985
985
986 def lines(text):
986 def lines(text):
987 if text.endswith("\n"):
987 if text.endswith("\n"):
988 return text.count("\n")
988 return text.count("\n")
989 return text.count("\n") + int(bool(text))
989 return text.count("\n") + int(bool(text))
990
990
991 if linenumber:
991 if linenumber:
992 def decorate(text, rev):
992 def decorate(text, rev):
993 return ([annotateline(fctx=rev, lineno=i)
993 return ([annotateline(fctx=rev, lineno=i)
994 for i in xrange(1, lines(text) + 1)], text)
994 for i in xrange(1, lines(text) + 1)], text)
995 else:
995 else:
996 def decorate(text, rev):
996 def decorate(text, rev):
997 return ([annotateline(fctx=rev)] * lines(text), text)
997 return ([annotateline(fctx=rev)] * lines(text), text)
998
998
999 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
999 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1000
1000
1001 def parents(f):
1001 def parents(f):
1002 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1002 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1003 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1003 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1004 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1004 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1005 # isn't an ancestor of the srcrev.
1005 # isn't an ancestor of the srcrev.
1006 f._changeid
1006 f._changeid
1007 pl = f.parents()
1007 pl = f.parents()
1008
1008
1009 # Don't return renamed parents if we aren't following.
1009 # Don't return renamed parents if we aren't following.
1010 if not follow:
1010 if not follow:
1011 pl = [p for p in pl if p.path() == f.path()]
1011 pl = [p for p in pl if p.path() == f.path()]
1012
1012
1013 # renamed filectx won't have a filelog yet, so set it
1013 # renamed filectx won't have a filelog yet, so set it
1014 # from the cache to save time
1014 # from the cache to save time
1015 for p in pl:
1015 for p in pl:
1016 if not '_filelog' in p.__dict__:
1016 if not '_filelog' in p.__dict__:
1017 p._filelog = getlog(p.path())
1017 p._filelog = getlog(p.path())
1018
1018
1019 return pl
1019 return pl
1020
1020
1021 # use linkrev to find the first changeset where self appeared
1021 # use linkrev to find the first changeset where self appeared
1022 base = self
1022 base = self
1023 introrev = self.introrev()
1023 introrev = self.introrev()
1024 if self.rev() != introrev:
1024 if self.rev() != introrev:
1025 base = self.filectx(self.filenode(), changeid=introrev)
1025 base = self.filectx(self.filenode(), changeid=introrev)
1026 if getattr(base, '_ancestrycontext', None) is None:
1026 if getattr(base, '_ancestrycontext', None) is None:
1027 cl = self._repo.changelog
1027 cl = self._repo.changelog
1028 if introrev is None:
1028 if introrev is None:
1029 # wctx is not inclusive, but works because _ancestrycontext
1029 # wctx is not inclusive, but works because _ancestrycontext
1030 # is used to test filelog revisions
1030 # is used to test filelog revisions
1031 ac = cl.ancestors([p.rev() for p in base.parents()],
1031 ac = cl.ancestors([p.rev() for p in base.parents()],
1032 inclusive=True)
1032 inclusive=True)
1033 else:
1033 else:
1034 ac = cl.ancestors([introrev], inclusive=True)
1034 ac = cl.ancestors([introrev], inclusive=True)
1035 base._ancestrycontext = ac
1035 base._ancestrycontext = ac
1036
1036
1037 # This algorithm would prefer to be recursive, but Python is a
1037 # This algorithm would prefer to be recursive, but Python is a
1038 # bit recursion-hostile. Instead we do an iterative
1038 # bit recursion-hostile. Instead we do an iterative
1039 # depth-first search.
1039 # depth-first search.
1040
1040
1041 # 1st DFS pre-calculates pcache and needed
1041 # 1st DFS pre-calculates pcache and needed
1042 visit = [base]
1042 visit = [base]
1043 pcache = {}
1043 pcache = {}
1044 needed = {base: 1}
1044 needed = {base: 1}
1045 while visit:
1045 while visit:
1046 f = visit.pop()
1046 f = visit.pop()
1047 if f in pcache:
1047 if f in pcache:
1048 continue
1048 continue
1049 pl = parents(f)
1049 pl = parents(f)
1050 pcache[f] = pl
1050 pcache[f] = pl
1051 for p in pl:
1051 for p in pl:
1052 needed[p] = needed.get(p, 0) + 1
1052 needed[p] = needed.get(p, 0) + 1
1053 if p not in pcache:
1053 if p not in pcache:
1054 visit.append(p)
1054 visit.append(p)
1055
1055
1056 # 2nd DFS does the actual annotate
1056 # 2nd DFS does the actual annotate
1057 visit[:] = [base]
1057 visit[:] = [base]
1058 hist = {}
1058 hist = {}
1059 while visit:
1059 while visit:
1060 f = visit[-1]
1060 f = visit[-1]
1061 if f in hist:
1061 if f in hist:
1062 visit.pop()
1062 visit.pop()
1063 continue
1063 continue
1064
1064
1065 ready = True
1065 ready = True
1066 pl = pcache[f]
1066 pl = pcache[f]
1067 for p in pl:
1067 for p in pl:
1068 if p not in hist:
1068 if p not in hist:
1069 ready = False
1069 ready = False
1070 visit.append(p)
1070 visit.append(p)
1071 if ready:
1071 if ready:
1072 visit.pop()
1072 visit.pop()
1073 curr = decorate(f.data(), f)
1073 curr = decorate(f.data(), f)
1074 skipchild = False
1074 skipchild = False
1075 if skiprevs is not None:
1075 if skiprevs is not None:
1076 skipchild = f._changeid in skiprevs
1076 skipchild = f._changeid in skiprevs
1077 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1077 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1078 diffopts)
1078 diffopts)
1079 for p in pl:
1079 for p in pl:
1080 if needed[p] == 1:
1080 if needed[p] == 1:
1081 del hist[p]
1081 del hist[p]
1082 del needed[p]
1082 del needed[p]
1083 else:
1083 else:
1084 needed[p] -= 1
1084 needed[p] -= 1
1085
1085
1086 hist[f] = curr
1086 hist[f] = curr
1087 del pcache[f]
1087 del pcache[f]
1088
1088
1089 return zip(hist[base][0], hist[base][1].splitlines(True))
1089 return zip(hist[base][0], hist[base][1].splitlines(True))
1090
1090
1091 def ancestors(self, followfirst=False):
1091 def ancestors(self, followfirst=False):
1092 visit = {}
1092 visit = {}
1093 c = self
1093 c = self
1094 if followfirst:
1094 if followfirst:
1095 cut = 1
1095 cut = 1
1096 else:
1096 else:
1097 cut = None
1097 cut = None
1098
1098
1099 while True:
1099 while True:
1100 for parent in c.parents()[:cut]:
1100 for parent in c.parents()[:cut]:
1101 visit[(parent.linkrev(), parent.filenode())] = parent
1101 visit[(parent.linkrev(), parent.filenode())] = parent
1102 if not visit:
1102 if not visit:
1103 break
1103 break
1104 c = visit.pop(max(visit))
1104 c = visit.pop(max(visit))
1105 yield c
1105 yield c
1106
1106
1107 def decodeddata(self):
1107 def decodeddata(self):
1108 """Returns `data()` after running repository decoding filters.
1108 """Returns `data()` after running repository decoding filters.
1109
1109
1110 This is often equivalent to how the data would be expressed on disk.
1110 This is often equivalent to how the data would be expressed on disk.
1111 """
1111 """
1112 return self._repo.wwritedata(self.path(), self.data())
1112 return self._repo.wwritedata(self.path(), self.data())
1113
1113
1114 @attr.s(slots=True, frozen=True)
1114 @attr.s(slots=True, frozen=True)
1115 class annotateline(object):
1115 class annotateline(object):
1116 fctx = attr.ib()
1116 fctx = attr.ib()
1117 lineno = attr.ib(default=False)
1117 lineno = attr.ib(default=False)
1118 # Whether this annotation was the result of a skip-annotate.
1118 # Whether this annotation was the result of a skip-annotate.
1119 skip = attr.ib(default=False)
1119 skip = attr.ib(default=False)
1120
1120
1121 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1121 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1122 r'''
1122 r'''
1123 Given parent and child fctxes and annotate data for parents, for all lines
1123 Given parent and child fctxes and annotate data for parents, for all lines
1124 in either parent that match the child, annotate the child with the parent's
1124 in either parent that match the child, annotate the child with the parent's
1125 data.
1125 data.
1126
1126
1127 Additionally, if `skipchild` is True, replace all other lines with parent
1127 Additionally, if `skipchild` is True, replace all other lines with parent
1128 annotate data as well such that child is never blamed for any lines.
1128 annotate data as well such that child is never blamed for any lines.
1129
1129
1130 See test-annotate.py for unit tests.
1130 See test-annotate.py for unit tests.
1131 '''
1131 '''
1132 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1132 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1133 for parent in parents]
1133 for parent in parents]
1134
1134
1135 if skipchild:
1135 if skipchild:
1136 # Need to iterate over the blocks twice -- make it a list
1136 # Need to iterate over the blocks twice -- make it a list
1137 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1137 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1138 # Mercurial currently prefers p2 over p1 for annotate.
1138 # Mercurial currently prefers p2 over p1 for annotate.
1139 # TODO: change this?
1139 # TODO: change this?
1140 for parent, blocks in pblocks:
1140 for parent, blocks in pblocks:
1141 for (a1, a2, b1, b2), t in blocks:
1141 for (a1, a2, b1, b2), t in blocks:
1142 # Changed blocks ('!') or blocks made only of blank lines ('~')
1142 # Changed blocks ('!') or blocks made only of blank lines ('~')
1143 # belong to the child.
1143 # belong to the child.
1144 if t == '=':
1144 if t == '=':
1145 child[0][b1:b2] = parent[0][a1:a2]
1145 child[0][b1:b2] = parent[0][a1:a2]
1146
1146
1147 if skipchild:
1147 if skipchild:
1148 # Now try and match up anything that couldn't be matched,
1148 # Now try and match up anything that couldn't be matched,
1149 # Reversing pblocks maintains bias towards p2, matching above
1149 # Reversing pblocks maintains bias towards p2, matching above
1150 # behavior.
1150 # behavior.
1151 pblocks.reverse()
1151 pblocks.reverse()
1152
1152
1153 # The heuristics are:
1153 # The heuristics are:
1154 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1154 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1155 # This could potentially be smarter but works well enough.
1155 # This could potentially be smarter but works well enough.
1156 # * For a non-matching section, do a best-effort fit. Match lines in
1156 # * For a non-matching section, do a best-effort fit. Match lines in
1157 # diff hunks 1:1, dropping lines as necessary.
1157 # diff hunks 1:1, dropping lines as necessary.
1158 # * Repeat the last line as a last resort.
1158 # * Repeat the last line as a last resort.
1159
1159
1160 # First, replace as much as possible without repeating the last line.
1160 # First, replace as much as possible without repeating the last line.
1161 remaining = [(parent, []) for parent, _blocks in pblocks]
1161 remaining = [(parent, []) for parent, _blocks in pblocks]
1162 for idx, (parent, blocks) in enumerate(pblocks):
1162 for idx, (parent, blocks) in enumerate(pblocks):
1163 for (a1, a2, b1, b2), _t in blocks:
1163 for (a1, a2, b1, b2), _t in blocks:
1164 if a2 - a1 >= b2 - b1:
1164 if a2 - a1 >= b2 - b1:
1165 for bk in xrange(b1, b2):
1165 for bk in xrange(b1, b2):
1166 if child[0][bk].fctx == childfctx:
1166 if child[0][bk].fctx == childfctx:
1167 ak = min(a1 + (bk - b1), a2 - 1)
1167 ak = min(a1 + (bk - b1), a2 - 1)
1168 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1168 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1169 else:
1169 else:
1170 remaining[idx][1].append((a1, a2, b1, b2))
1170 remaining[idx][1].append((a1, a2, b1, b2))
1171
1171
1172 # Then, look at anything left, which might involve repeating the last
1172 # Then, look at anything left, which might involve repeating the last
1173 # line.
1173 # line.
1174 for parent, blocks in remaining:
1174 for parent, blocks in remaining:
1175 for a1, a2, b1, b2 in blocks:
1175 for a1, a2, b1, b2 in blocks:
1176 for bk in xrange(b1, b2):
1176 for bk in xrange(b1, b2):
1177 if child[0][bk].fctx == childfctx:
1177 if child[0][bk].fctx == childfctx:
1178 ak = min(a1 + (bk - b1), a2 - 1)
1178 ak = min(a1 + (bk - b1), a2 - 1)
1179 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1179 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1180 return child
1180 return child
1181
1181
1182 class filectx(basefilectx):
1182 class filectx(basefilectx):
1183 """A filecontext object makes access to data related to a particular
1183 """A filecontext object makes access to data related to a particular
1184 filerevision convenient."""
1184 filerevision convenient."""
1185 def __init__(self, repo, path, changeid=None, fileid=None,
1185 def __init__(self, repo, path, changeid=None, fileid=None,
1186 filelog=None, changectx=None):
1186 filelog=None, changectx=None):
1187 """changeid can be a changeset revision, node, or tag.
1187 """changeid can be a changeset revision, node, or tag.
1188 fileid can be a file revision or node."""
1188 fileid can be a file revision or node."""
1189 self._repo = repo
1189 self._repo = repo
1190 self._path = path
1190 self._path = path
1191
1191
1192 assert (changeid is not None
1192 assert (changeid is not None
1193 or fileid is not None
1193 or fileid is not None
1194 or changectx is not None), \
1194 or changectx is not None), \
1195 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1195 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1196 % (changeid, fileid, changectx))
1196 % (changeid, fileid, changectx))
1197
1197
1198 if filelog is not None:
1198 if filelog is not None:
1199 self._filelog = filelog
1199 self._filelog = filelog
1200
1200
1201 if changeid is not None:
1201 if changeid is not None:
1202 self._changeid = changeid
1202 self._changeid = changeid
1203 if changectx is not None:
1203 if changectx is not None:
1204 self._changectx = changectx
1204 self._changectx = changectx
1205 if fileid is not None:
1205 if fileid is not None:
1206 self._fileid = fileid
1206 self._fileid = fileid
1207
1207
1208 @propertycache
1208 @propertycache
1209 def _changectx(self):
1209 def _changectx(self):
1210 try:
1210 try:
1211 return changectx(self._repo, self._changeid)
1211 return changectx(self._repo, self._changeid)
1212 except error.FilteredRepoLookupError:
1212 except error.FilteredRepoLookupError:
1213 # Linkrev may point to any revision in the repository. When the
1213 # Linkrev may point to any revision in the repository. When the
1214 # repository is filtered this may lead to `filectx` trying to build
1214 # repository is filtered this may lead to `filectx` trying to build
1215 # `changectx` for filtered revision. In such case we fallback to
1215 # `changectx` for filtered revision. In such case we fallback to
1216 # creating `changectx` on the unfiltered version of the reposition.
1216 # creating `changectx` on the unfiltered version of the reposition.
1217 # This fallback should not be an issue because `changectx` from
1217 # This fallback should not be an issue because `changectx` from
1218 # `filectx` are not used in complex operations that care about
1218 # `filectx` are not used in complex operations that care about
1219 # filtering.
1219 # filtering.
1220 #
1220 #
1221 # This fallback is a cheap and dirty fix that prevent several
1221 # This fallback is a cheap and dirty fix that prevent several
1222 # crashes. It does not ensure the behavior is correct. However the
1222 # crashes. It does not ensure the behavior is correct. However the
1223 # behavior was not correct before filtering either and "incorrect
1223 # behavior was not correct before filtering either and "incorrect
1224 # behavior" is seen as better as "crash"
1224 # behavior" is seen as better as "crash"
1225 #
1225 #
1226 # Linkrevs have several serious troubles with filtering that are
1226 # Linkrevs have several serious troubles with filtering that are
1227 # complicated to solve. Proper handling of the issue here should be
1227 # complicated to solve. Proper handling of the issue here should be
1228 # considered when solving linkrev issue are on the table.
1228 # considered when solving linkrev issue are on the table.
1229 return changectx(self._repo.unfiltered(), self._changeid)
1229 return changectx(self._repo.unfiltered(), self._changeid)
1230
1230
1231 def filectx(self, fileid, changeid=None):
1231 def filectx(self, fileid, changeid=None):
1232 '''opens an arbitrary revision of the file without
1232 '''opens an arbitrary revision of the file without
1233 opening a new filelog'''
1233 opening a new filelog'''
1234 return filectx(self._repo, self._path, fileid=fileid,
1234 return filectx(self._repo, self._path, fileid=fileid,
1235 filelog=self._filelog, changeid=changeid)
1235 filelog=self._filelog, changeid=changeid)
1236
1236
1237 def rawdata(self):
1237 def rawdata(self):
1238 return self._filelog.revision(self._filenode, raw=True)
1238 return self._filelog.revision(self._filenode, raw=True)
1239
1239
1240 def rawflags(self):
1240 def rawflags(self):
1241 """low-level revlog flags"""
1241 """low-level revlog flags"""
1242 return self._filelog.flags(self._filerev)
1242 return self._filelog.flags(self._filerev)
1243
1243
1244 def data(self):
1244 def data(self):
1245 try:
1245 try:
1246 return self._filelog.read(self._filenode)
1246 return self._filelog.read(self._filenode)
1247 except error.CensoredNodeError:
1247 except error.CensoredNodeError:
1248 if self._repo.ui.config("censor", "policy") == "ignore":
1248 if self._repo.ui.config("censor", "policy") == "ignore":
1249 return ""
1249 return ""
1250 raise error.Abort(_("censored node: %s") % short(self._filenode),
1250 raise error.Abort(_("censored node: %s") % short(self._filenode),
1251 hint=_("set censor.policy to ignore errors"))
1251 hint=_("set censor.policy to ignore errors"))
1252
1252
1253 def size(self):
1253 def size(self):
1254 return self._filelog.size(self._filerev)
1254 return self._filelog.size(self._filerev)
1255
1255
1256 @propertycache
1256 @propertycache
1257 def _copied(self):
1257 def _copied(self):
1258 """check if file was actually renamed in this changeset revision
1258 """check if file was actually renamed in this changeset revision
1259
1259
1260 If rename logged in file revision, we report copy for changeset only
1260 If rename logged in file revision, we report copy for changeset only
1261 if file revisions linkrev points back to the changeset in question
1261 if file revisions linkrev points back to the changeset in question
1262 or both changeset parents contain different file revisions.
1262 or both changeset parents contain different file revisions.
1263 """
1263 """
1264
1264
1265 renamed = self._filelog.renamed(self._filenode)
1265 renamed = self._filelog.renamed(self._filenode)
1266 if not renamed:
1266 if not renamed:
1267 return renamed
1267 return renamed
1268
1268
1269 if self.rev() == self.linkrev():
1269 if self.rev() == self.linkrev():
1270 return renamed
1270 return renamed
1271
1271
1272 name = self.path()
1272 name = self.path()
1273 fnode = self._filenode
1273 fnode = self._filenode
1274 for p in self._changectx.parents():
1274 for p in self._changectx.parents():
1275 try:
1275 try:
1276 if fnode == p.filenode(name):
1276 if fnode == p.filenode(name):
1277 return None
1277 return None
1278 except error.LookupError:
1278 except error.LookupError:
1279 pass
1279 pass
1280 return renamed
1280 return renamed
1281
1281
1282 def children(self):
1282 def children(self):
1283 # hard for renames
1283 # hard for renames
1284 c = self._filelog.children(self._filenode)
1284 c = self._filelog.children(self._filenode)
1285 return [filectx(self._repo, self._path, fileid=x,
1285 return [filectx(self._repo, self._path, fileid=x,
1286 filelog=self._filelog) for x in c]
1286 filelog=self._filelog) for x in c]
1287
1287
1288 class committablectx(basectx):
1288 class committablectx(basectx):
1289 """A committablectx object provides common functionality for a context that
1289 """A committablectx object provides common functionality for a context that
1290 wants the ability to commit, e.g. workingctx or memctx."""
1290 wants the ability to commit, e.g. workingctx or memctx."""
1291 def __init__(self, repo, text="", user=None, date=None, extra=None,
1291 def __init__(self, repo, text="", user=None, date=None, extra=None,
1292 changes=None):
1292 changes=None):
1293 self._repo = repo
1293 self._repo = repo
1294 self._rev = None
1294 self._rev = None
1295 self._node = None
1295 self._node = None
1296 self._text = text
1296 self._text = text
1297 if date:
1297 if date:
1298 self._date = util.parsedate(date)
1298 self._date = util.parsedate(date)
1299 if user:
1299 if user:
1300 self._user = user
1300 self._user = user
1301 if changes:
1301 if changes:
1302 self._status = changes
1302 self._status = changes
1303
1303
1304 self._extra = {}
1304 self._extra = {}
1305 if extra:
1305 if extra:
1306 self._extra = extra.copy()
1306 self._extra = extra.copy()
1307 if 'branch' not in self._extra:
1307 if 'branch' not in self._extra:
1308 try:
1308 try:
1309 branch = encoding.fromlocal(self._repo.dirstate.branch())
1309 branch = encoding.fromlocal(self._repo.dirstate.branch())
1310 except UnicodeDecodeError:
1310 except UnicodeDecodeError:
1311 raise error.Abort(_('branch name not in UTF-8!'))
1311 raise error.Abort(_('branch name not in UTF-8!'))
1312 self._extra['branch'] = branch
1312 self._extra['branch'] = branch
1313 if self._extra['branch'] == '':
1313 if self._extra['branch'] == '':
1314 self._extra['branch'] = 'default'
1314 self._extra['branch'] = 'default'
1315
1315
1316 def __bytes__(self):
1316 def __bytes__(self):
1317 return bytes(self._parents[0]) + "+"
1317 return bytes(self._parents[0]) + "+"
1318
1318
1319 __str__ = encoding.strmethod(__bytes__)
1319 __str__ = encoding.strmethod(__bytes__)
1320
1320
1321 def __nonzero__(self):
1321 def __nonzero__(self):
1322 return True
1322 return True
1323
1323
1324 __bool__ = __nonzero__
1324 __bool__ = __nonzero__
1325
1325
1326 def _buildflagfunc(self):
1326 def _buildflagfunc(self):
1327 # Create a fallback function for getting file flags when the
1327 # Create a fallback function for getting file flags when the
1328 # filesystem doesn't support them
1328 # filesystem doesn't support them
1329
1329
1330 copiesget = self._repo.dirstate.copies().get
1330 copiesget = self._repo.dirstate.copies().get
1331 parents = self.parents()
1331 parents = self.parents()
1332 if len(parents) < 2:
1332 if len(parents) < 2:
1333 # when we have one parent, it's easy: copy from parent
1333 # when we have one parent, it's easy: copy from parent
1334 man = parents[0].manifest()
1334 man = parents[0].manifest()
1335 def func(f):
1335 def func(f):
1336 f = copiesget(f, f)
1336 f = copiesget(f, f)
1337 return man.flags(f)
1337 return man.flags(f)
1338 else:
1338 else:
1339 # merges are tricky: we try to reconstruct the unstored
1339 # merges are tricky: we try to reconstruct the unstored
1340 # result from the merge (issue1802)
1340 # result from the merge (issue1802)
1341 p1, p2 = parents
1341 p1, p2 = parents
1342 pa = p1.ancestor(p2)
1342 pa = p1.ancestor(p2)
1343 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1343 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1344
1344
1345 def func(f):
1345 def func(f):
1346 f = copiesget(f, f) # may be wrong for merges with copies
1346 f = copiesget(f, f) # may be wrong for merges with copies
1347 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1347 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1348 if fl1 == fl2:
1348 if fl1 == fl2:
1349 return fl1
1349 return fl1
1350 if fl1 == fla:
1350 if fl1 == fla:
1351 return fl2
1351 return fl2
1352 if fl2 == fla:
1352 if fl2 == fla:
1353 return fl1
1353 return fl1
1354 return '' # punt for conflicts
1354 return '' # punt for conflicts
1355
1355
1356 return func
1356 return func
1357
1357
1358 @propertycache
1358 @propertycache
1359 def _flagfunc(self):
1359 def _flagfunc(self):
1360 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1360 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1361
1361
1362 @propertycache
1362 @propertycache
1363 def _status(self):
1363 def _status(self):
1364 return self._repo.status()
1364 return self._repo.status()
1365
1365
1366 @propertycache
1366 @propertycache
1367 def _user(self):
1367 def _user(self):
1368 return self._repo.ui.username()
1368 return self._repo.ui.username()
1369
1369
1370 @propertycache
1370 @propertycache
1371 def _date(self):
1371 def _date(self):
1372 ui = self._repo.ui
1372 ui = self._repo.ui
1373 date = ui.configdate('devel', 'default-date')
1373 date = ui.configdate('devel', 'default-date')
1374 if date is None:
1374 if date is None:
1375 date = util.makedate()
1375 date = util.makedate()
1376 return date
1376 return date
1377
1377
1378 def subrev(self, subpath):
1378 def subrev(self, subpath):
1379 return None
1379 return None
1380
1380
1381 def manifestnode(self):
1381 def manifestnode(self):
1382 return None
1382 return None
1383 def user(self):
1383 def user(self):
1384 return self._user or self._repo.ui.username()
1384 return self._user or self._repo.ui.username()
1385 def date(self):
1385 def date(self):
1386 return self._date
1386 return self._date
1387 def description(self):
1387 def description(self):
1388 return self._text
1388 return self._text
1389 def files(self):
1389 def files(self):
1390 return sorted(self._status.modified + self._status.added +
1390 return sorted(self._status.modified + self._status.added +
1391 self._status.removed)
1391 self._status.removed)
1392
1392
1393 def modified(self):
1393 def modified(self):
1394 return self._status.modified
1394 return self._status.modified
1395 def added(self):
1395 def added(self):
1396 return self._status.added
1396 return self._status.added
1397 def removed(self):
1397 def removed(self):
1398 return self._status.removed
1398 return self._status.removed
1399 def deleted(self):
1399 def deleted(self):
1400 return self._status.deleted
1400 return self._status.deleted
1401 def branch(self):
1401 def branch(self):
1402 return encoding.tolocal(self._extra['branch'])
1402 return encoding.tolocal(self._extra['branch'])
1403 def closesbranch(self):
1403 def closesbranch(self):
1404 return 'close' in self._extra
1404 return 'close' in self._extra
1405 def extra(self):
1405 def extra(self):
1406 return self._extra
1406 return self._extra
1407
1407
1408 def isinmemory(self):
1408 def isinmemory(self):
1409 return False
1409 return False
1410
1410
1411 def tags(self):
1411 def tags(self):
1412 return []
1412 return []
1413
1413
1414 def bookmarks(self):
1414 def bookmarks(self):
1415 b = []
1415 b = []
1416 for p in self.parents():
1416 for p in self.parents():
1417 b.extend(p.bookmarks())
1417 b.extend(p.bookmarks())
1418 return b
1418 return b
1419
1419
1420 def phase(self):
1420 def phase(self):
1421 phase = phases.draft # default phase to draft
1421 phase = phases.draft # default phase to draft
1422 for p in self.parents():
1422 for p in self.parents():
1423 phase = max(phase, p.phase())
1423 phase = max(phase, p.phase())
1424 return phase
1424 return phase
1425
1425
1426 def hidden(self):
1426 def hidden(self):
1427 return False
1427 return False
1428
1428
1429 def children(self):
1429 def children(self):
1430 return []
1430 return []
1431
1431
1432 def flags(self, path):
1432 def flags(self, path):
1433 if r'_manifest' in self.__dict__:
1433 if r'_manifest' in self.__dict__:
1434 try:
1434 try:
1435 return self._manifest.flags(path)
1435 return self._manifest.flags(path)
1436 except KeyError:
1436 except KeyError:
1437 return ''
1437 return ''
1438
1438
1439 try:
1439 try:
1440 return self._flagfunc(path)
1440 return self._flagfunc(path)
1441 except OSError:
1441 except OSError:
1442 return ''
1442 return ''
1443
1443
1444 def ancestor(self, c2):
1444 def ancestor(self, c2):
1445 """return the "best" ancestor context of self and c2"""
1445 """return the "best" ancestor context of self and c2"""
1446 return self._parents[0].ancestor(c2) # punt on two parents for now
1446 return self._parents[0].ancestor(c2) # punt on two parents for now
1447
1447
1448 def walk(self, match):
1448 def walk(self, match):
1449 '''Generates matching file names.'''
1449 '''Generates matching file names.'''
1450 return sorted(self._repo.dirstate.walk(match,
1450 return sorted(self._repo.dirstate.walk(match,
1451 subrepos=sorted(self.substate),
1451 subrepos=sorted(self.substate),
1452 unknown=True, ignored=False))
1452 unknown=True, ignored=False))
1453
1453
1454 def matches(self, match):
1454 def matches(self, match):
1455 return sorted(self._repo.dirstate.matches(match))
1455 return sorted(self._repo.dirstate.matches(match))
1456
1456
1457 def ancestors(self):
1457 def ancestors(self):
1458 for p in self._parents:
1458 for p in self._parents:
1459 yield p
1459 yield p
1460 for a in self._repo.changelog.ancestors(
1460 for a in self._repo.changelog.ancestors(
1461 [p.rev() for p in self._parents]):
1461 [p.rev() for p in self._parents]):
1462 yield changectx(self._repo, a)
1462 yield changectx(self._repo, a)
1463
1463
1464 def markcommitted(self, node):
1464 def markcommitted(self, node):
1465 """Perform post-commit cleanup necessary after committing this ctx
1465 """Perform post-commit cleanup necessary after committing this ctx
1466
1466
1467 Specifically, this updates backing stores this working context
1467 Specifically, this updates backing stores this working context
1468 wraps to reflect the fact that the changes reflected by this
1468 wraps to reflect the fact that the changes reflected by this
1469 workingctx have been committed. For example, it marks
1469 workingctx have been committed. For example, it marks
1470 modified and added files as normal in the dirstate.
1470 modified and added files as normal in the dirstate.
1471
1471
1472 """
1472 """
1473
1473
1474 with self._repo.dirstate.parentchange():
1474 with self._repo.dirstate.parentchange():
1475 for f in self.modified() + self.added():
1475 for f in self.modified() + self.added():
1476 self._repo.dirstate.normal(f)
1476 self._repo.dirstate.normal(f)
1477 for f in self.removed():
1477 for f in self.removed():
1478 self._repo.dirstate.drop(f)
1478 self._repo.dirstate.drop(f)
1479 self._repo.dirstate.setparents(node)
1479 self._repo.dirstate.setparents(node)
1480
1480
1481 # write changes out explicitly, because nesting wlock at
1481 # write changes out explicitly, because nesting wlock at
1482 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1482 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1483 # from immediately doing so for subsequent changing files
1483 # from immediately doing so for subsequent changing files
1484 self._repo.dirstate.write(self._repo.currenttransaction())
1484 self._repo.dirstate.write(self._repo.currenttransaction())
1485
1485
1486 def dirty(self, missing=False, merge=True, branch=True):
1486 def dirty(self, missing=False, merge=True, branch=True):
1487 return False
1487 return False
1488
1488
1489 class workingctx(committablectx):
1489 class workingctx(committablectx):
1490 """A workingctx object makes access to data related to
1490 """A workingctx object makes access to data related to
1491 the current working directory convenient.
1491 the current working directory convenient.
1492 date - any valid date string or (unixtime, offset), or None.
1492 date - any valid date string or (unixtime, offset), or None.
1493 user - username string, or None.
1493 user - username string, or None.
1494 extra - a dictionary of extra values, or None.
1494 extra - a dictionary of extra values, or None.
1495 changes - a list of file lists as returned by localrepo.status()
1495 changes - a list of file lists as returned by localrepo.status()
1496 or None to use the repository status.
1496 or None to use the repository status.
1497 """
1497 """
1498 def __init__(self, repo, text="", user=None, date=None, extra=None,
1498 def __init__(self, repo, text="", user=None, date=None, extra=None,
1499 changes=None):
1499 changes=None):
1500 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1500 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1501
1501
1502 def __iter__(self):
1502 def __iter__(self):
1503 d = self._repo.dirstate
1503 d = self._repo.dirstate
1504 for f in d:
1504 for f in d:
1505 if d[f] != 'r':
1505 if d[f] != 'r':
1506 yield f
1506 yield f
1507
1507
1508 def __contains__(self, key):
1508 def __contains__(self, key):
1509 return self._repo.dirstate[key] not in "?r"
1509 return self._repo.dirstate[key] not in "?r"
1510
1510
1511 def hex(self):
1511 def hex(self):
1512 return hex(wdirid)
1512 return hex(wdirid)
1513
1513
1514 @propertycache
1514 @propertycache
1515 def _parents(self):
1515 def _parents(self):
1516 p = self._repo.dirstate.parents()
1516 p = self._repo.dirstate.parents()
1517 if p[1] == nullid:
1517 if p[1] == nullid:
1518 p = p[:-1]
1518 p = p[:-1]
1519 return [changectx(self._repo, x) for x in p]
1519 return [changectx(self._repo, x) for x in p]
1520
1520
1521 def filectx(self, path, filelog=None):
1521 def filectx(self, path, filelog=None):
1522 """get a file context from the working directory"""
1522 """get a file context from the working directory"""
1523 return workingfilectx(self._repo, path, workingctx=self,
1523 return workingfilectx(self._repo, path, workingctx=self,
1524 filelog=filelog)
1524 filelog=filelog)
1525
1525
1526 def dirty(self, missing=False, merge=True, branch=True):
1526 def dirty(self, missing=False, merge=True, branch=True):
1527 "check whether a working directory is modified"
1527 "check whether a working directory is modified"
1528 # check subrepos first
1528 # check subrepos first
1529 for s in sorted(self.substate):
1529 for s in sorted(self.substate):
1530 if self.sub(s).dirty(missing=missing):
1530 if self.sub(s).dirty(missing=missing):
1531 return True
1531 return True
1532 # check current working dir
1532 # check current working dir
1533 return ((merge and self.p2()) or
1533 return ((merge and self.p2()) or
1534 (branch and self.branch() != self.p1().branch()) or
1534 (branch and self.branch() != self.p1().branch()) or
1535 self.modified() or self.added() or self.removed() or
1535 self.modified() or self.added() or self.removed() or
1536 (missing and self.deleted()))
1536 (missing and self.deleted()))
1537
1537
1538 def add(self, list, prefix=""):
1538 def add(self, list, prefix=""):
1539 with self._repo.wlock():
1539 with self._repo.wlock():
1540 ui, ds = self._repo.ui, self._repo.dirstate
1540 ui, ds = self._repo.ui, self._repo.dirstate
1541 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1541 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1542 rejected = []
1542 rejected = []
1543 lstat = self._repo.wvfs.lstat
1543 lstat = self._repo.wvfs.lstat
1544 for f in list:
1544 for f in list:
1545 # ds.pathto() returns an absolute file when this is invoked from
1545 # ds.pathto() returns an absolute file when this is invoked from
1546 # the keyword extension. That gets flagged as non-portable on
1546 # the keyword extension. That gets flagged as non-portable on
1547 # Windows, since it contains the drive letter and colon.
1547 # Windows, since it contains the drive letter and colon.
1548 scmutil.checkportable(ui, os.path.join(prefix, f))
1548 scmutil.checkportable(ui, os.path.join(prefix, f))
1549 try:
1549 try:
1550 st = lstat(f)
1550 st = lstat(f)
1551 except OSError:
1551 except OSError:
1552 ui.warn(_("%s does not exist!\n") % uipath(f))
1552 ui.warn(_("%s does not exist!\n") % uipath(f))
1553 rejected.append(f)
1553 rejected.append(f)
1554 continue
1554 continue
1555 if st.st_size > 10000000:
1555 if st.st_size > 10000000:
1556 ui.warn(_("%s: up to %d MB of RAM may be required "
1556 ui.warn(_("%s: up to %d MB of RAM may be required "
1557 "to manage this file\n"
1557 "to manage this file\n"
1558 "(use 'hg revert %s' to cancel the "
1558 "(use 'hg revert %s' to cancel the "
1559 "pending addition)\n")
1559 "pending addition)\n")
1560 % (f, 3 * st.st_size // 1000000, uipath(f)))
1560 % (f, 3 * st.st_size // 1000000, uipath(f)))
1561 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1561 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1562 ui.warn(_("%s not added: only files and symlinks "
1562 ui.warn(_("%s not added: only files and symlinks "
1563 "supported currently\n") % uipath(f))
1563 "supported currently\n") % uipath(f))
1564 rejected.append(f)
1564 rejected.append(f)
1565 elif ds[f] in 'amn':
1565 elif ds[f] in 'amn':
1566 ui.warn(_("%s already tracked!\n") % uipath(f))
1566 ui.warn(_("%s already tracked!\n") % uipath(f))
1567 elif ds[f] == 'r':
1567 elif ds[f] == 'r':
1568 ds.normallookup(f)
1568 ds.normallookup(f)
1569 else:
1569 else:
1570 ds.add(f)
1570 ds.add(f)
1571 return rejected
1571 return rejected
1572
1572
1573 def forget(self, files, prefix=""):
1573 def forget(self, files, prefix=""):
1574 with self._repo.wlock():
1574 with self._repo.wlock():
1575 ds = self._repo.dirstate
1575 ds = self._repo.dirstate
1576 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1576 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1577 rejected = []
1577 rejected = []
1578 for f in files:
1578 for f in files:
1579 if f not in self._repo.dirstate:
1579 if f not in self._repo.dirstate:
1580 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1580 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1581 rejected.append(f)
1581 rejected.append(f)
1582 elif self._repo.dirstate[f] != 'a':
1582 elif self._repo.dirstate[f] != 'a':
1583 self._repo.dirstate.remove(f)
1583 self._repo.dirstate.remove(f)
1584 else:
1584 else:
1585 self._repo.dirstate.drop(f)
1585 self._repo.dirstate.drop(f)
1586 return rejected
1586 return rejected
1587
1587
1588 def undelete(self, list):
1588 def undelete(self, list):
1589 pctxs = self.parents()
1589 pctxs = self.parents()
1590 with self._repo.wlock():
1590 with self._repo.wlock():
1591 ds = self._repo.dirstate
1591 ds = self._repo.dirstate
1592 for f in list:
1592 for f in list:
1593 if self._repo.dirstate[f] != 'r':
1593 if self._repo.dirstate[f] != 'r':
1594 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1594 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1595 else:
1595 else:
1596 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1596 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1597 t = fctx.data()
1597 t = fctx.data()
1598 self._repo.wwrite(f, t, fctx.flags())
1598 self._repo.wwrite(f, t, fctx.flags())
1599 self._repo.dirstate.normal(f)
1599 self._repo.dirstate.normal(f)
1600
1600
1601 def copy(self, source, dest):
1601 def copy(self, source, dest):
1602 try:
1602 try:
1603 st = self._repo.wvfs.lstat(dest)
1603 st = self._repo.wvfs.lstat(dest)
1604 except OSError as err:
1604 except OSError as err:
1605 if err.errno != errno.ENOENT:
1605 if err.errno != errno.ENOENT:
1606 raise
1606 raise
1607 self._repo.ui.warn(_("%s does not exist!\n")
1607 self._repo.ui.warn(_("%s does not exist!\n")
1608 % self._repo.dirstate.pathto(dest))
1608 % self._repo.dirstate.pathto(dest))
1609 return
1609 return
1610 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1610 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1611 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1611 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1612 "symbolic link\n")
1612 "symbolic link\n")
1613 % self._repo.dirstate.pathto(dest))
1613 % self._repo.dirstate.pathto(dest))
1614 else:
1614 else:
1615 with self._repo.wlock():
1615 with self._repo.wlock():
1616 if self._repo.dirstate[dest] in '?':
1616 if self._repo.dirstate[dest] in '?':
1617 self._repo.dirstate.add(dest)
1617 self._repo.dirstate.add(dest)
1618 elif self._repo.dirstate[dest] in 'r':
1618 elif self._repo.dirstate[dest] in 'r':
1619 self._repo.dirstate.normallookup(dest)
1619 self._repo.dirstate.normallookup(dest)
1620 self._repo.dirstate.copy(source, dest)
1620 self._repo.dirstate.copy(source, dest)
1621
1621
1622 def match(self, pats=None, include=None, exclude=None, default='glob',
1622 def match(self, pats=None, include=None, exclude=None, default='glob',
1623 listsubrepos=False, badfn=None):
1623 listsubrepos=False, badfn=None):
1624 r = self._repo
1624 r = self._repo
1625
1625
1626 # Only a case insensitive filesystem needs magic to translate user input
1626 # Only a case insensitive filesystem needs magic to translate user input
1627 # to actual case in the filesystem.
1627 # to actual case in the filesystem.
1628 icasefs = not util.fscasesensitive(r.root)
1628 icasefs = not util.fscasesensitive(r.root)
1629 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1629 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1630 default, auditor=r.auditor, ctx=self,
1630 default, auditor=r.auditor, ctx=self,
1631 listsubrepos=listsubrepos, badfn=badfn,
1631 listsubrepos=listsubrepos, badfn=badfn,
1632 icasefs=icasefs)
1632 icasefs=icasefs)
1633
1633
1634 def flushall(self):
1634 def flushall(self):
1635 pass # For overlayworkingfilectx compatibility.
1635 pass # For overlayworkingfilectx compatibility.
1636
1636
1637 def _filtersuspectsymlink(self, files):
1637 def _filtersuspectsymlink(self, files):
1638 if not files or self._repo.dirstate._checklink:
1638 if not files or self._repo.dirstate._checklink:
1639 return files
1639 return files
1640
1640
1641 # Symlink placeholders may get non-symlink-like contents
1641 # Symlink placeholders may get non-symlink-like contents
1642 # via user error or dereferencing by NFS or Samba servers,
1642 # via user error or dereferencing by NFS or Samba servers,
1643 # so we filter out any placeholders that don't look like a
1643 # so we filter out any placeholders that don't look like a
1644 # symlink
1644 # symlink
1645 sane = []
1645 sane = []
1646 for f in files:
1646 for f in files:
1647 if self.flags(f) == 'l':
1647 if self.flags(f) == 'l':
1648 d = self[f].data()
1648 d = self[f].data()
1649 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1649 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1650 self._repo.ui.debug('ignoring suspect symlink placeholder'
1650 self._repo.ui.debug('ignoring suspect symlink placeholder'
1651 ' "%s"\n' % f)
1651 ' "%s"\n' % f)
1652 continue
1652 continue
1653 sane.append(f)
1653 sane.append(f)
1654 return sane
1654 return sane
1655
1655
1656 def _checklookup(self, files):
1656 def _checklookup(self, files):
1657 # check for any possibly clean files
1657 # check for any possibly clean files
1658 if not files:
1658 if not files:
1659 return [], [], []
1659 return [], [], []
1660
1660
1661 modified = []
1661 modified = []
1662 deleted = []
1662 deleted = []
1663 fixup = []
1663 fixup = []
1664 pctx = self._parents[0]
1664 pctx = self._parents[0]
1665 # do a full compare of any files that might have changed
1665 # do a full compare of any files that might have changed
1666 for f in sorted(files):
1666 for f in sorted(files):
1667 try:
1667 try:
1668 # This will return True for a file that got replaced by a
1668 # This will return True for a file that got replaced by a
1669 # directory in the interim, but fixing that is pretty hard.
1669 # directory in the interim, but fixing that is pretty hard.
1670 if (f not in pctx or self.flags(f) != pctx.flags(f)
1670 if (f not in pctx or self.flags(f) != pctx.flags(f)
1671 or pctx[f].cmp(self[f])):
1671 or pctx[f].cmp(self[f])):
1672 modified.append(f)
1672 modified.append(f)
1673 else:
1673 else:
1674 fixup.append(f)
1674 fixup.append(f)
1675 except (IOError, OSError):
1675 except (IOError, OSError):
1676 # A file become inaccessible in between? Mark it as deleted,
1676 # A file become inaccessible in between? Mark it as deleted,
1677 # matching dirstate behavior (issue5584).
1677 # matching dirstate behavior (issue5584).
1678 # The dirstate has more complex behavior around whether a
1678 # The dirstate has more complex behavior around whether a
1679 # missing file matches a directory, etc, but we don't need to
1679 # missing file matches a directory, etc, but we don't need to
1680 # bother with that: if f has made it to this point, we're sure
1680 # bother with that: if f has made it to this point, we're sure
1681 # it's in the dirstate.
1681 # it's in the dirstate.
1682 deleted.append(f)
1682 deleted.append(f)
1683
1683
1684 return modified, deleted, fixup
1684 return modified, deleted, fixup
1685
1685
1686 def _poststatusfixup(self, status, fixup):
1686 def _poststatusfixup(self, status, fixup):
1687 """update dirstate for files that are actually clean"""
1687 """update dirstate for files that are actually clean"""
1688 poststatus = self._repo.postdsstatus()
1688 poststatus = self._repo.postdsstatus()
1689 if fixup or poststatus:
1689 if fixup or poststatus:
1690 try:
1690 try:
1691 oldid = self._repo.dirstate.identity()
1691 oldid = self._repo.dirstate.identity()
1692
1692
1693 # updating the dirstate is optional
1693 # updating the dirstate is optional
1694 # so we don't wait on the lock
1694 # so we don't wait on the lock
1695 # wlock can invalidate the dirstate, so cache normal _after_
1695 # wlock can invalidate the dirstate, so cache normal _after_
1696 # taking the lock
1696 # taking the lock
1697 with self._repo.wlock(False):
1697 with self._repo.wlock(False):
1698 if self._repo.dirstate.identity() == oldid:
1698 if self._repo.dirstate.identity() == oldid:
1699 if fixup:
1699 if fixup:
1700 normal = self._repo.dirstate.normal
1700 normal = self._repo.dirstate.normal
1701 for f in fixup:
1701 for f in fixup:
1702 normal(f)
1702 normal(f)
1703 # write changes out explicitly, because nesting
1703 # write changes out explicitly, because nesting
1704 # wlock at runtime may prevent 'wlock.release()'
1704 # wlock at runtime may prevent 'wlock.release()'
1705 # after this block from doing so for subsequent
1705 # after this block from doing so for subsequent
1706 # changing files
1706 # changing files
1707 tr = self._repo.currenttransaction()
1707 tr = self._repo.currenttransaction()
1708 self._repo.dirstate.write(tr)
1708 self._repo.dirstate.write(tr)
1709
1709
1710 if poststatus:
1710 if poststatus:
1711 for ps in poststatus:
1711 for ps in poststatus:
1712 ps(self, status)
1712 ps(self, status)
1713 else:
1713 else:
1714 # in this case, writing changes out breaks
1714 # in this case, writing changes out breaks
1715 # consistency, because .hg/dirstate was
1715 # consistency, because .hg/dirstate was
1716 # already changed simultaneously after last
1716 # already changed simultaneously after last
1717 # caching (see also issue5584 for detail)
1717 # caching (see also issue5584 for detail)
1718 self._repo.ui.debug('skip updating dirstate: '
1718 self._repo.ui.debug('skip updating dirstate: '
1719 'identity mismatch\n')
1719 'identity mismatch\n')
1720 except error.LockError:
1720 except error.LockError:
1721 pass
1721 pass
1722 finally:
1722 finally:
1723 # Even if the wlock couldn't be grabbed, clear out the list.
1723 # Even if the wlock couldn't be grabbed, clear out the list.
1724 self._repo.clearpostdsstatus()
1724 self._repo.clearpostdsstatus()
1725
1725
1726 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1726 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1727 '''Gets the status from the dirstate -- internal use only.'''
1727 '''Gets the status from the dirstate -- internal use only.'''
1728 subrepos = []
1728 subrepos = []
1729 if '.hgsub' in self:
1729 if '.hgsub' in self:
1730 subrepos = sorted(self.substate)
1730 subrepos = sorted(self.substate)
1731 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1731 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1732 clean=clean, unknown=unknown)
1732 clean=clean, unknown=unknown)
1733
1733
1734 # check for any possibly clean files
1734 # check for any possibly clean files
1735 fixup = []
1735 fixup = []
1736 if cmp:
1736 if cmp:
1737 modified2, deleted2, fixup = self._checklookup(cmp)
1737 modified2, deleted2, fixup = self._checklookup(cmp)
1738 s.modified.extend(modified2)
1738 s.modified.extend(modified2)
1739 s.deleted.extend(deleted2)
1739 s.deleted.extend(deleted2)
1740
1740
1741 if fixup and clean:
1741 if fixup and clean:
1742 s.clean.extend(fixup)
1742 s.clean.extend(fixup)
1743
1743
1744 self._poststatusfixup(s, fixup)
1744 self._poststatusfixup(s, fixup)
1745
1745
1746 if match.always():
1746 if match.always():
1747 # cache for performance
1747 # cache for performance
1748 if s.unknown or s.ignored or s.clean:
1748 if s.unknown or s.ignored or s.clean:
1749 # "_status" is cached with list*=False in the normal route
1749 # "_status" is cached with list*=False in the normal route
1750 self._status = scmutil.status(s.modified, s.added, s.removed,
1750 self._status = scmutil.status(s.modified, s.added, s.removed,
1751 s.deleted, [], [], [])
1751 s.deleted, [], [], [])
1752 else:
1752 else:
1753 self._status = s
1753 self._status = s
1754
1754
1755 return s
1755 return s
1756
1756
1757 @propertycache
1757 @propertycache
1758 def _manifest(self):
1758 def _manifest(self):
1759 """generate a manifest corresponding to the values in self._status
1759 """generate a manifest corresponding to the values in self._status
1760
1760
1761 This reuse the file nodeid from parent, but we use special node
1761 This reuse the file nodeid from parent, but we use special node
1762 identifiers for added and modified files. This is used by manifests
1762 identifiers for added and modified files. This is used by manifests
1763 merge to see that files are different and by update logic to avoid
1763 merge to see that files are different and by update logic to avoid
1764 deleting newly added files.
1764 deleting newly added files.
1765 """
1765 """
1766 return self._buildstatusmanifest(self._status)
1766 return self._buildstatusmanifest(self._status)
1767
1767
1768 def _buildstatusmanifest(self, status):
1768 def _buildstatusmanifest(self, status):
1769 """Builds a manifest that includes the given status results."""
1769 """Builds a manifest that includes the given status results."""
1770 parents = self.parents()
1770 parents = self.parents()
1771
1771
1772 man = parents[0].manifest().copy()
1772 man = parents[0].manifest().copy()
1773
1773
1774 ff = self._flagfunc
1774 ff = self._flagfunc
1775 for i, l in ((addednodeid, status.added),
1775 for i, l in ((addednodeid, status.added),
1776 (modifiednodeid, status.modified)):
1776 (modifiednodeid, status.modified)):
1777 for f in l:
1777 for f in l:
1778 man[f] = i
1778 man[f] = i
1779 try:
1779 try:
1780 man.setflag(f, ff(f))
1780 man.setflag(f, ff(f))
1781 except OSError:
1781 except OSError:
1782 pass
1782 pass
1783
1783
1784 for f in status.deleted + status.removed:
1784 for f in status.deleted + status.removed:
1785 if f in man:
1785 if f in man:
1786 del man[f]
1786 del man[f]
1787
1787
1788 return man
1788 return man
1789
1789
1790 def _buildstatus(self, other, s, match, listignored, listclean,
1790 def _buildstatus(self, other, s, match, listignored, listclean,
1791 listunknown):
1791 listunknown):
1792 """build a status with respect to another context
1792 """build a status with respect to another context
1793
1793
1794 This includes logic for maintaining the fast path of status when
1794 This includes logic for maintaining the fast path of status when
1795 comparing the working directory against its parent, which is to skip
1795 comparing the working directory against its parent, which is to skip
1796 building a new manifest if self (working directory) is not comparing
1796 building a new manifest if self (working directory) is not comparing
1797 against its parent (repo['.']).
1797 against its parent (repo['.']).
1798 """
1798 """
1799 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1799 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1800 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1800 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1801 # might have accidentally ended up with the entire contents of the file
1801 # might have accidentally ended up with the entire contents of the file
1802 # they are supposed to be linking to.
1802 # they are supposed to be linking to.
1803 s.modified[:] = self._filtersuspectsymlink(s.modified)
1803 s.modified[:] = self._filtersuspectsymlink(s.modified)
1804 if other != self._repo['.']:
1804 if other != self._repo['.']:
1805 s = super(workingctx, self)._buildstatus(other, s, match,
1805 s = super(workingctx, self)._buildstatus(other, s, match,
1806 listignored, listclean,
1806 listignored, listclean,
1807 listunknown)
1807 listunknown)
1808 return s
1808 return s
1809
1809
1810 def _matchstatus(self, other, match):
1810 def _matchstatus(self, other, match):
1811 """override the match method with a filter for directory patterns
1811 """override the match method with a filter for directory patterns
1812
1812
1813 We use inheritance to customize the match.bad method only in cases of
1813 We use inheritance to customize the match.bad method only in cases of
1814 workingctx since it belongs only to the working directory when
1814 workingctx since it belongs only to the working directory when
1815 comparing against the parent changeset.
1815 comparing against the parent changeset.
1816
1816
1817 If we aren't comparing against the working directory's parent, then we
1817 If we aren't comparing against the working directory's parent, then we
1818 just use the default match object sent to us.
1818 just use the default match object sent to us.
1819 """
1819 """
1820 if other != self._repo['.']:
1820 if other != self._repo['.']:
1821 def bad(f, msg):
1821 def bad(f, msg):
1822 # 'f' may be a directory pattern from 'match.files()',
1822 # 'f' may be a directory pattern from 'match.files()',
1823 # so 'f not in ctx1' is not enough
1823 # so 'f not in ctx1' is not enough
1824 if f not in other and not other.hasdir(f):
1824 if f not in other and not other.hasdir(f):
1825 self._repo.ui.warn('%s: %s\n' %
1825 self._repo.ui.warn('%s: %s\n' %
1826 (self._repo.dirstate.pathto(f), msg))
1826 (self._repo.dirstate.pathto(f), msg))
1827 match.bad = bad
1827 match.bad = bad
1828 return match
1828 return match
1829
1829
1830 def markcommitted(self, node):
1830 def markcommitted(self, node):
1831 super(workingctx, self).markcommitted(node)
1831 super(workingctx, self).markcommitted(node)
1832
1832
1833 sparse.aftercommit(self._repo, node)
1833 sparse.aftercommit(self._repo, node)
1834
1834
1835 class committablefilectx(basefilectx):
1835 class committablefilectx(basefilectx):
1836 """A committablefilectx provides common functionality for a file context
1836 """A committablefilectx provides common functionality for a file context
1837 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1837 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1838 def __init__(self, repo, path, filelog=None, ctx=None):
1838 def __init__(self, repo, path, filelog=None, ctx=None):
1839 self._repo = repo
1839 self._repo = repo
1840 self._path = path
1840 self._path = path
1841 self._changeid = None
1841 self._changeid = None
1842 self._filerev = self._filenode = None
1842 self._filerev = self._filenode = None
1843
1843
1844 if filelog is not None:
1844 if filelog is not None:
1845 self._filelog = filelog
1845 self._filelog = filelog
1846 if ctx:
1846 if ctx:
1847 self._changectx = ctx
1847 self._changectx = ctx
1848
1848
1849 def __nonzero__(self):
1849 def __nonzero__(self):
1850 return True
1850 return True
1851
1851
1852 __bool__ = __nonzero__
1852 __bool__ = __nonzero__
1853
1853
1854 def linkrev(self):
1854 def linkrev(self):
1855 # linked to self._changectx no matter if file is modified or not
1855 # linked to self._changectx no matter if file is modified or not
1856 return self.rev()
1856 return self.rev()
1857
1857
1858 def parents(self):
1858 def parents(self):
1859 '''return parent filectxs, following copies if necessary'''
1859 '''return parent filectxs, following copies if necessary'''
1860 def filenode(ctx, path):
1860 def filenode(ctx, path):
1861 return ctx._manifest.get(path, nullid)
1861 return ctx._manifest.get(path, nullid)
1862
1862
1863 path = self._path
1863 path = self._path
1864 fl = self._filelog
1864 fl = self._filelog
1865 pcl = self._changectx._parents
1865 pcl = self._changectx._parents
1866 renamed = self.renamed()
1866 renamed = self.renamed()
1867
1867
1868 if renamed:
1868 if renamed:
1869 pl = [renamed + (None,)]
1869 pl = [renamed + (None,)]
1870 else:
1870 else:
1871 pl = [(path, filenode(pcl[0], path), fl)]
1871 pl = [(path, filenode(pcl[0], path), fl)]
1872
1872
1873 for pc in pcl[1:]:
1873 for pc in pcl[1:]:
1874 pl.append((path, filenode(pc, path), fl))
1874 pl.append((path, filenode(pc, path), fl))
1875
1875
1876 return [self._parentfilectx(p, fileid=n, filelog=l)
1876 return [self._parentfilectx(p, fileid=n, filelog=l)
1877 for p, n, l in pl if n != nullid]
1877 for p, n, l in pl if n != nullid]
1878
1878
1879 def children(self):
1879 def children(self):
1880 return []
1880 return []
1881
1881
1882 class workingfilectx(committablefilectx):
1882 class workingfilectx(committablefilectx):
1883 """A workingfilectx object makes access to data related to a particular
1883 """A workingfilectx object makes access to data related to a particular
1884 file in the working directory convenient."""
1884 file in the working directory convenient."""
1885 def __init__(self, repo, path, filelog=None, workingctx=None):
1885 def __init__(self, repo, path, filelog=None, workingctx=None):
1886 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1886 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1887
1887
1888 @propertycache
1888 @propertycache
1889 def _changectx(self):
1889 def _changectx(self):
1890 return workingctx(self._repo)
1890 return workingctx(self._repo)
1891
1891
1892 def data(self):
1892 def data(self):
1893 return self._repo.wread(self._path)
1893 return self._repo.wread(self._path)
1894 def renamed(self):
1894 def renamed(self):
1895 rp = self._repo.dirstate.copied(self._path)
1895 rp = self._repo.dirstate.copied(self._path)
1896 if not rp:
1896 if not rp:
1897 return None
1897 return None
1898 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1898 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1899
1899
1900 def size(self):
1900 def size(self):
1901 return self._repo.wvfs.lstat(self._path).st_size
1901 return self._repo.wvfs.lstat(self._path).st_size
1902 def date(self):
1902 def date(self):
1903 t, tz = self._changectx.date()
1903 t, tz = self._changectx.date()
1904 try:
1904 try:
1905 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1905 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1906 except OSError as err:
1906 except OSError as err:
1907 if err.errno != errno.ENOENT:
1907 if err.errno != errno.ENOENT:
1908 raise
1908 raise
1909 return (t, tz)
1909 return (t, tz)
1910
1910
1911 def exists(self):
1911 def exists(self):
1912 return self._repo.wvfs.exists(self._path)
1912 return self._repo.wvfs.exists(self._path)
1913
1913
1914 def lexists(self):
1914 def lexists(self):
1915 return self._repo.wvfs.lexists(self._path)
1915 return self._repo.wvfs.lexists(self._path)
1916
1916
1917 def audit(self):
1917 def audit(self):
1918 return self._repo.wvfs.audit(self._path)
1918 return self._repo.wvfs.audit(self._path)
1919
1919
1920 def cmp(self, fctx):
1920 def cmp(self, fctx):
1921 """compare with other file context
1921 """compare with other file context
1922
1922
1923 returns True if different than fctx.
1923 returns True if different than fctx.
1924 """
1924 """
1925 # fctx should be a filectx (not a workingfilectx)
1925 # fctx should be a filectx (not a workingfilectx)
1926 # invert comparison to reuse the same code path
1926 # invert comparison to reuse the same code path
1927 return fctx.cmp(self)
1927 return fctx.cmp(self)
1928
1928
1929 def remove(self, ignoremissing=False):
1929 def remove(self, ignoremissing=False):
1930 """wraps unlink for a repo's working directory"""
1930 """wraps unlink for a repo's working directory"""
1931 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1931 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1932
1932
1933 def write(self, data, flags, backgroundclose=False):
1933 def write(self, data, flags, backgroundclose=False):
1934 """wraps repo.wwrite"""
1934 """wraps repo.wwrite"""
1935 self._repo.wwrite(self._path, data, flags,
1935 self._repo.wwrite(self._path, data, flags,
1936 backgroundclose=backgroundclose)
1936 backgroundclose=backgroundclose)
1937
1937
1938 def markcopied(self, src):
1938 def markcopied(self, src):
1939 """marks this file a copy of `src`"""
1939 """marks this file a copy of `src`"""
1940 if self._repo.dirstate[self._path] in "nma":
1940 if self._repo.dirstate[self._path] in "nma":
1941 self._repo.dirstate.copy(src, self._path)
1941 self._repo.dirstate.copy(src, self._path)
1942
1942
1943 def clearunknown(self):
1943 def clearunknown(self):
1944 """Removes conflicting items in the working directory so that
1944 """Removes conflicting items in the working directory so that
1945 ``write()`` can be called successfully.
1945 ``write()`` can be called successfully.
1946 """
1946 """
1947 wvfs = self._repo.wvfs
1947 wvfs = self._repo.wvfs
1948 f = self._path
1948 f = self._path
1949 wvfs.audit(f)
1949 wvfs.audit(f)
1950 if wvfs.isdir(f) and not wvfs.islink(f):
1950 if wvfs.isdir(f) and not wvfs.islink(f):
1951 wvfs.rmtree(f, forcibly=True)
1951 wvfs.rmtree(f, forcibly=True)
1952 for p in reversed(list(util.finddirs(f))):
1952 for p in reversed(list(util.finddirs(f))):
1953 if wvfs.isfileorlink(p):
1953 if wvfs.isfileorlink(p):
1954 wvfs.unlink(p)
1954 wvfs.unlink(p)
1955 break
1955 break
1956
1956
1957 def setflags(self, l, x):
1957 def setflags(self, l, x):
1958 self._repo.wvfs.setflags(self._path, l, x)
1958 self._repo.wvfs.setflags(self._path, l, x)
1959
1959
1960 class overlayworkingctx(workingctx):
1960 class overlayworkingctx(workingctx):
1961 """Wraps another mutable context with a write-back cache that can be flushed
1961 """Wraps another mutable context with a write-back cache that can be flushed
1962 at a later time.
1962 at a later time.
1963
1963
1964 self._cache[path] maps to a dict with keys: {
1964 self._cache[path] maps to a dict with keys: {
1965 'exists': bool?
1965 'exists': bool?
1966 'date': date?
1966 'date': date?
1967 'data': str?
1967 'data': str?
1968 'flags': str?
1968 'flags': str?
1969 }
1969 }
1970 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1970 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1971 is `False`, the file was deleted.
1971 is `False`, the file was deleted.
1972 """
1972 """
1973
1973
1974 def __init__(self, repo, wrappedctx):
1974 def __init__(self, repo, wrappedctx):
1975 super(overlayworkingctx, self).__init__(repo)
1975 super(overlayworkingctx, self).__init__(repo)
1976 self._repo = repo
1976 self._repo = repo
1977 self._wrappedctx = wrappedctx
1977 self._wrappedctx = wrappedctx
1978 self._clean()
1978 self._clean()
1979
1979
1980 def data(self, path):
1980 def data(self, path):
1981 if self.isdirty(path):
1981 if self.isdirty(path):
1982 if self._cache[path]['exists']:
1982 if self._cache[path]['exists']:
1983 if self._cache[path]['data']:
1983 if self._cache[path]['data']:
1984 return self._cache[path]['data']
1984 return self._cache[path]['data']
1985 else:
1985 else:
1986 # Must fallback here, too, because we only set flags.
1986 # Must fallback here, too, because we only set flags.
1987 return self._wrappedctx[path].data()
1987 return self._wrappedctx[path].data()
1988 else:
1988 else:
1989 raise error.ProgrammingError("No such file or directory: %s" %
1989 raise error.ProgrammingError("No such file or directory: %s" %
1990 self._path)
1990 self._path)
1991 else:
1991 else:
1992 return self._wrappedctx[path].data()
1992 return self._wrappedctx[path].data()
1993
1993
1994 def isinmemory(self):
1994 def isinmemory(self):
1995 return True
1995 return True
1996
1996
1997 def filedate(self, path):
1997 def filedate(self, path):
1998 if self.isdirty(path):
1998 if self.isdirty(path):
1999 return self._cache[path]['date']
1999 return self._cache[path]['date']
2000 else:
2000 else:
2001 return self._wrappedctx[path].date()
2001 return self._wrappedctx[path].date()
2002
2002
2003 def flags(self, path):
2003 def flags(self, path):
2004 if self.isdirty(path):
2004 if self.isdirty(path):
2005 if self._cache[path]['exists']:
2005 if self._cache[path]['exists']:
2006 return self._cache[path]['flags']
2006 return self._cache[path]['flags']
2007 else:
2007 else:
2008 raise error.ProgrammingError("No such file or directory: %s" %
2008 raise error.ProgrammingError("No such file or directory: %s" %
2009 self._path)
2009 self._path)
2010 else:
2010 else:
2011 return self._wrappedctx[path].flags()
2011 return self._wrappedctx[path].flags()
2012
2012
2013 def write(self, path, data, flags=''):
2013 def write(self, path, data, flags=''):
2014 if data is None:
2014 if data is None:
2015 raise error.ProgrammingError("data must be non-None")
2015 raise error.ProgrammingError("data must be non-None")
2016 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2016 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2017 flags=flags)
2017 flags=flags)
2018
2018
2019 def setflags(self, path, l, x):
2019 def setflags(self, path, l, x):
2020 self._markdirty(path, exists=True, date=util.makedate(),
2020 self._markdirty(path, exists=True, date=util.makedate(),
2021 flags=(l and 'l' or '') + (x and 'x' or ''))
2021 flags=(l and 'l' or '') + (x and 'x' or ''))
2022
2022
2023 def remove(self, path):
2023 def remove(self, path):
2024 self._markdirty(path, exists=False)
2024 self._markdirty(path, exists=False)
2025
2025
2026 def exists(self, path):
2026 def exists(self, path):
2027 """exists behaves like `lexists`, but needs to follow symlinks and
2027 """exists behaves like `lexists`, but needs to follow symlinks and
2028 return False if they are broken.
2028 return False if they are broken.
2029 """
2029 """
2030 if self.isdirty(path):
2030 if self.isdirty(path):
2031 # If this path exists and is a symlink, "follow" it by calling
2031 # If this path exists and is a symlink, "follow" it by calling
2032 # exists on the destination path.
2032 # exists on the destination path.
2033 if (self._cache[path]['exists'] and
2033 if (self._cache[path]['exists'] and
2034 'l' in self._cache[path]['flags']):
2034 'l' in self._cache[path]['flags']):
2035 return self.exists(self._cache[path]['data'].strip())
2035 return self.exists(self._cache[path]['data'].strip())
2036 else:
2036 else:
2037 return self._cache[path]['exists']
2037 return self._cache[path]['exists']
2038 return self._wrappedctx[path].exists()
2038 return self._wrappedctx[path].exists()
2039
2039
2040 def lexists(self, path):
2040 def lexists(self, path):
2041 """lexists returns True if the path exists"""
2041 """lexists returns True if the path exists"""
2042 if self.isdirty(path):
2042 if self.isdirty(path):
2043 return self._cache[path]['exists']
2043 return self._cache[path]['exists']
2044 return self._wrappedctx[path].lexists()
2044 return self._wrappedctx[path].lexists()
2045
2045
2046 def size(self, path):
2046 def size(self, path):
2047 if self.isdirty(path):
2047 if self.isdirty(path):
2048 if self._cache[path]['exists']:
2048 if self._cache[path]['exists']:
2049 return len(self._cache[path]['data'])
2049 return len(self._cache[path]['data'])
2050 else:
2050 else:
2051 raise error.ProgrammingError("No such file or directory: %s" %
2051 raise error.ProgrammingError("No such file or directory: %s" %
2052 self._path)
2052 self._path)
2053 return self._wrappedctx[path].size()
2053 return self._wrappedctx[path].size()
2054
2054
2055 def flushall(self):
2055 def flushall(self):
2056 for path in self._writeorder:
2056 for path in self._writeorder:
2057 entry = self._cache[path]
2057 entry = self._cache[path]
2058 if entry['exists']:
2058 if entry['exists']:
2059 self._wrappedctx[path].clearunknown()
2059 self._wrappedctx[path].clearunknown()
2060 if entry['data'] is not None:
2060 if entry['data'] is not None:
2061 if entry['flags'] is None:
2061 if entry['flags'] is None:
2062 raise error.ProgrammingError('data set but not flags')
2062 raise error.ProgrammingError('data set but not flags')
2063 self._wrappedctx[path].write(
2063 self._wrappedctx[path].write(
2064 entry['data'],
2064 entry['data'],
2065 entry['flags'])
2065 entry['flags'])
2066 else:
2066 else:
2067 self._wrappedctx[path].setflags(
2067 self._wrappedctx[path].setflags(
2068 'l' in entry['flags'],
2068 'l' in entry['flags'],
2069 'x' in entry['flags'])
2069 'x' in entry['flags'])
2070 else:
2070 else:
2071 self._wrappedctx[path].remove(path)
2071 self._wrappedctx[path].remove(path)
2072 self._clean()
2072 self._clean()
2073
2073
2074 def isdirty(self, path):
2074 def isdirty(self, path):
2075 return path in self._cache
2075 return path in self._cache
2076
2076
2077 def _clean(self):
2077 def _clean(self):
2078 self._cache = {}
2078 self._cache = {}
2079 self._writeorder = []
2079 self._writeorder = []
2080
2080
2081 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2081 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2082 if path not in self._cache:
2082 if path not in self._cache:
2083 self._writeorder.append(path)
2083 self._writeorder.append(path)
2084
2084
2085 self._cache[path] = {
2085 self._cache[path] = {
2086 'exists': exists,
2086 'exists': exists,
2087 'data': data,
2087 'data': data,
2088 'date': date,
2088 'date': date,
2089 'flags': flags,
2089 'flags': flags,
2090 }
2090 }
2091
2091
2092 def filectx(self, path, filelog=None):
2092 def filectx(self, path, filelog=None):
2093 return overlayworkingfilectx(self._repo, path, parent=self,
2093 return overlayworkingfilectx(self._repo, path, parent=self,
2094 filelog=filelog)
2094 filelog=filelog)
2095
2095
2096 class overlayworkingfilectx(workingfilectx):
2096 class overlayworkingfilectx(workingfilectx):
2097 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2097 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2098 cache, which can be flushed through later by calling ``flush()``."""
2098 cache, which can be flushed through later by calling ``flush()``."""
2099
2099
2100 def __init__(self, repo, path, filelog=None, parent=None):
2100 def __init__(self, repo, path, filelog=None, parent=None):
2101 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2101 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2102 parent)
2102 parent)
2103 self._repo = repo
2103 self._repo = repo
2104 self._parent = parent
2104 self._parent = parent
2105 self._path = path
2105 self._path = path
2106
2106
2107 def cmp(self, fctx):
2107 def cmp(self, fctx):
2108 return self.data() != fctx.data()
2108 return self.data() != fctx.data()
2109
2109
2110 def ctx(self):
2110 def ctx(self):
2111 return self._parent
2111 return self._parent
2112
2112
2113 def data(self):
2113 def data(self):
2114 return self._parent.data(self._path)
2114 return self._parent.data(self._path)
2115
2115
2116 def date(self):
2116 def date(self):
2117 return self._parent.filedate(self._path)
2117 return self._parent.filedate(self._path)
2118
2118
2119 def exists(self):
2119 def exists(self):
2120 return self.lexists()
2120 return self.lexists()
2121
2121
2122 def lexists(self):
2122 def lexists(self):
2123 return self._parent.exists(self._path)
2123 return self._parent.exists(self._path)
2124
2124
2125 def renamed(self):
2125 def renamed(self):
2126 # Copies are currently tracked in the dirstate as before. Straight copy
2126 # Copies are currently tracked in the dirstate as before. Straight copy
2127 # from workingfilectx.
2127 # from workingfilectx.
2128 rp = self._repo.dirstate.copied(self._path)
2128 rp = self._repo.dirstate.copied(self._path)
2129 if not rp:
2129 if not rp:
2130 return None
2130 return None
2131 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2131 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2132
2132
2133 def size(self):
2133 def size(self):
2134 return self._parent.size(self._path)
2134 return self._parent.size(self._path)
2135
2135
2136 def audit(self):
2136 def audit(self):
2137 pass
2137 pass
2138
2138
2139 def flags(self):
2139 def flags(self):
2140 return self._parent.flags(self._path)
2140 return self._parent.flags(self._path)
2141
2141
2142 def setflags(self, islink, isexec):
2142 def setflags(self, islink, isexec):
2143 return self._parent.setflags(self._path, islink, isexec)
2143 return self._parent.setflags(self._path, islink, isexec)
2144
2144
2145 def write(self, data, flags, backgroundclose=False):
2145 def write(self, data, flags, backgroundclose=False):
2146 return self._parent.write(self._path, data, flags)
2146 return self._parent.write(self._path, data, flags)
2147
2147
2148 def remove(self, ignoremissing=False):
2148 def remove(self, ignoremissing=False):
2149 return self._parent.remove(self._path)
2149 return self._parent.remove(self._path)
2150
2150
2151 class workingcommitctx(workingctx):
2151 class workingcommitctx(workingctx):
2152 """A workingcommitctx object makes access to data related to
2152 """A workingcommitctx object makes access to data related to
2153 the revision being committed convenient.
2153 the revision being committed convenient.
2154
2154
2155 This hides changes in the working directory, if they aren't
2155 This hides changes in the working directory, if they aren't
2156 committed in this context.
2156 committed in this context.
2157 """
2157 """
2158 def __init__(self, repo, changes,
2158 def __init__(self, repo, changes,
2159 text="", user=None, date=None, extra=None):
2159 text="", user=None, date=None, extra=None):
2160 super(workingctx, self).__init__(repo, text, user, date, extra,
2160 super(workingctx, self).__init__(repo, text, user, date, extra,
2161 changes)
2161 changes)
2162
2162
2163 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2163 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2164 """Return matched files only in ``self._status``
2164 """Return matched files only in ``self._status``
2165
2165
2166 Uncommitted files appear "clean" via this context, even if
2166 Uncommitted files appear "clean" via this context, even if
2167 they aren't actually so in the working directory.
2167 they aren't actually so in the working directory.
2168 """
2168 """
2169 if clean:
2169 if clean:
2170 clean = [f for f in self._manifest if f not in self._changedset]
2170 clean = [f for f in self._manifest if f not in self._changedset]
2171 else:
2171 else:
2172 clean = []
2172 clean = []
2173 return scmutil.status([f for f in self._status.modified if match(f)],
2173 return scmutil.status([f for f in self._status.modified if match(f)],
2174 [f for f in self._status.added if match(f)],
2174 [f for f in self._status.added if match(f)],
2175 [f for f in self._status.removed if match(f)],
2175 [f for f in self._status.removed if match(f)],
2176 [], [], [], clean)
2176 [], [], [], clean)
2177
2177
2178 @propertycache
2178 @propertycache
2179 def _changedset(self):
2179 def _changedset(self):
2180 """Return the set of files changed in this context
2180 """Return the set of files changed in this context
2181 """
2181 """
2182 changed = set(self._status.modified)
2182 changed = set(self._status.modified)
2183 changed.update(self._status.added)
2183 changed.update(self._status.added)
2184 changed.update(self._status.removed)
2184 changed.update(self._status.removed)
2185 return changed
2185 return changed
2186
2186
2187 def makecachingfilectxfn(func):
2187 def makecachingfilectxfn(func):
2188 """Create a filectxfn that caches based on the path.
2188 """Create a filectxfn that caches based on the path.
2189
2189
2190 We can't use util.cachefunc because it uses all arguments as the cache
2190 We can't use util.cachefunc because it uses all arguments as the cache
2191 key and this creates a cycle since the arguments include the repo and
2191 key and this creates a cycle since the arguments include the repo and
2192 memctx.
2192 memctx.
2193 """
2193 """
2194 cache = {}
2194 cache = {}
2195
2195
2196 def getfilectx(repo, memctx, path):
2196 def getfilectx(repo, memctx, path):
2197 if path not in cache:
2197 if path not in cache:
2198 cache[path] = func(repo, memctx, path)
2198 cache[path] = func(repo, memctx, path)
2199 return cache[path]
2199 return cache[path]
2200
2200
2201 return getfilectx
2201 return getfilectx
2202
2202
2203 def memfilefromctx(ctx):
2203 def memfilefromctx(ctx):
2204 """Given a context return a memfilectx for ctx[path]
2204 """Given a context return a memfilectx for ctx[path]
2205
2205
2206 This is a convenience method for building a memctx based on another
2206 This is a convenience method for building a memctx based on another
2207 context.
2207 context.
2208 """
2208 """
2209 def getfilectx(repo, memctx, path):
2209 def getfilectx(repo, memctx, path):
2210 fctx = ctx[path]
2210 fctx = ctx[path]
2211 # this is weird but apparently we only keep track of one parent
2211 # this is weird but apparently we only keep track of one parent
2212 # (why not only store that instead of a tuple?)
2212 # (why not only store that instead of a tuple?)
2213 copied = fctx.renamed()
2213 copied = fctx.renamed()
2214 if copied:
2214 if copied:
2215 copied = copied[0]
2215 copied = copied[0]
2216 return memfilectx(repo, path, fctx.data(),
2216 return memfilectx(repo, path, fctx.data(),
2217 islink=fctx.islink(), isexec=fctx.isexec(),
2217 islink=fctx.islink(), isexec=fctx.isexec(),
2218 copied=copied, memctx=memctx)
2218 copied=copied, memctx=memctx)
2219
2219
2220 return getfilectx
2220 return getfilectx
2221
2221
2222 def memfilefrompatch(patchstore):
2222 def memfilefrompatch(patchstore):
2223 """Given a patch (e.g. patchstore object) return a memfilectx
2223 """Given a patch (e.g. patchstore object) return a memfilectx
2224
2224
2225 This is a convenience method for building a memctx based on a patchstore.
2225 This is a convenience method for building a memctx based on a patchstore.
2226 """
2226 """
2227 def getfilectx(repo, memctx, path):
2227 def getfilectx(repo, memctx, path):
2228 data, mode, copied = patchstore.getfile(path)
2228 data, mode, copied = patchstore.getfile(path)
2229 if data is None:
2229 if data is None:
2230 return None
2230 return None
2231 islink, isexec = mode
2231 islink, isexec = mode
2232 return memfilectx(repo, path, data, islink=islink,
2232 return memfilectx(repo, path, data, islink=islink,
2233 isexec=isexec, copied=copied,
2233 isexec=isexec, copied=copied,
2234 memctx=memctx)
2234 memctx=memctx)
2235
2235
2236 return getfilectx
2236 return getfilectx
2237
2237
2238 class memctx(committablectx):
2238 class memctx(committablectx):
2239 """Use memctx to perform in-memory commits via localrepo.commitctx().
2239 """Use memctx to perform in-memory commits via localrepo.commitctx().
2240
2240
2241 Revision information is supplied at initialization time while
2241 Revision information is supplied at initialization time while
2242 related files data and is made available through a callback
2242 related files data and is made available through a callback
2243 mechanism. 'repo' is the current localrepo, 'parents' is a
2243 mechanism. 'repo' is the current localrepo, 'parents' is a
2244 sequence of two parent revisions identifiers (pass None for every
2244 sequence of two parent revisions identifiers (pass None for every
2245 missing parent), 'text' is the commit message and 'files' lists
2245 missing parent), 'text' is the commit message and 'files' lists
2246 names of files touched by the revision (normalized and relative to
2246 names of files touched by the revision (normalized and relative to
2247 repository root).
2247 repository root).
2248
2248
2249 filectxfn(repo, memctx, path) is a callable receiving the
2249 filectxfn(repo, memctx, path) is a callable receiving the
2250 repository, the current memctx object and the normalized path of
2250 repository, the current memctx object and the normalized path of
2251 requested file, relative to repository root. It is fired by the
2251 requested file, relative to repository root. It is fired by the
2252 commit function for every file in 'files', but calls order is
2252 commit function for every file in 'files', but calls order is
2253 undefined. If the file is available in the revision being
2253 undefined. If the file is available in the revision being
2254 committed (updated or added), filectxfn returns a memfilectx
2254 committed (updated or added), filectxfn returns a memfilectx
2255 object. If the file was removed, filectxfn return None for recent
2255 object. If the file was removed, filectxfn return None for recent
2256 Mercurial. Moved files are represented by marking the source file
2256 Mercurial. Moved files are represented by marking the source file
2257 removed and the new file added with copy information (see
2257 removed and the new file added with copy information (see
2258 memfilectx).
2258 memfilectx).
2259
2259
2260 user receives the committer name and defaults to current
2260 user receives the committer name and defaults to current
2261 repository username, date is the commit date in any format
2261 repository username, date is the commit date in any format
2262 supported by util.parsedate() and defaults to current date, extra
2262 supported by util.parsedate() and defaults to current date, extra
2263 is a dictionary of metadata or is left empty.
2263 is a dictionary of metadata or is left empty.
2264 """
2264 """
2265
2265
2266 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2266 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2267 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2267 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2268 # this field to determine what to do in filectxfn.
2268 # this field to determine what to do in filectxfn.
2269 _returnnoneformissingfiles = True
2269 _returnnoneformissingfiles = True
2270
2270
2271 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2271 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2272 date=None, extra=None, branch=None, editor=False):
2272 date=None, extra=None, branch=None, editor=False):
2273 super(memctx, self).__init__(repo, text, user, date, extra)
2273 super(memctx, self).__init__(repo, text, user, date, extra)
2274 self._rev = None
2274 self._rev = None
2275 self._node = None
2275 self._node = None
2276 parents = [(p or nullid) for p in parents]
2276 parents = [(p or nullid) for p in parents]
2277 p1, p2 = parents
2277 p1, p2 = parents
2278 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2278 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2279 files = sorted(set(files))
2279 files = sorted(set(files))
2280 self._files = files
2280 self._files = files
2281 if branch is not None:
2281 if branch is not None:
2282 self._extra['branch'] = encoding.fromlocal(branch)
2282 self._extra['branch'] = encoding.fromlocal(branch)
2283 self.substate = {}
2283 self.substate = {}
2284
2284
2285 if isinstance(filectxfn, patch.filestore):
2285 if isinstance(filectxfn, patch.filestore):
2286 filectxfn = memfilefrompatch(filectxfn)
2286 filectxfn = memfilefrompatch(filectxfn)
2287 elif not callable(filectxfn):
2287 elif not callable(filectxfn):
2288 # if store is not callable, wrap it in a function
2288 # if store is not callable, wrap it in a function
2289 filectxfn = memfilefromctx(filectxfn)
2289 filectxfn = memfilefromctx(filectxfn)
2290
2290
2291 # memoizing increases performance for e.g. vcs convert scenarios.
2291 # memoizing increases performance for e.g. vcs convert scenarios.
2292 self._filectxfn = makecachingfilectxfn(filectxfn)
2292 self._filectxfn = makecachingfilectxfn(filectxfn)
2293
2293
2294 if editor:
2294 if editor:
2295 self._text = editor(self._repo, self, [])
2295 self._text = editor(self._repo, self, [])
2296 self._repo.savecommitmessage(self._text)
2296 self._repo.savecommitmessage(self._text)
2297
2297
2298 def filectx(self, path, filelog=None):
2298 def filectx(self, path, filelog=None):
2299 """get a file context from the working directory
2299 """get a file context from the working directory
2300
2300
2301 Returns None if file doesn't exist and should be removed."""
2301 Returns None if file doesn't exist and should be removed."""
2302 return self._filectxfn(self._repo, self, path)
2302 return self._filectxfn(self._repo, self, path)
2303
2303
2304 def commit(self):
2304 def commit(self):
2305 """commit context to the repo"""
2305 """commit context to the repo"""
2306 return self._repo.commitctx(self)
2306 return self._repo.commitctx(self)
2307
2307
2308 @propertycache
2308 @propertycache
2309 def _manifest(self):
2309 def _manifest(self):
2310 """generate a manifest based on the return values of filectxfn"""
2310 """generate a manifest based on the return values of filectxfn"""
2311
2311
2312 # keep this simple for now; just worry about p1
2312 # keep this simple for now; just worry about p1
2313 pctx = self._parents[0]
2313 pctx = self._parents[0]
2314 man = pctx.manifest().copy()
2314 man = pctx.manifest().copy()
2315
2315
2316 for f in self._status.modified:
2316 for f in self._status.modified:
2317 p1node = nullid
2317 p1node = nullid
2318 p2node = nullid
2318 p2node = nullid
2319 p = pctx[f].parents() # if file isn't in pctx, check p2?
2319 p = pctx[f].parents() # if file isn't in pctx, check p2?
2320 if len(p) > 0:
2320 if len(p) > 0:
2321 p1node = p[0].filenode()
2321 p1node = p[0].filenode()
2322 if len(p) > 1:
2322 if len(p) > 1:
2323 p2node = p[1].filenode()
2323 p2node = p[1].filenode()
2324 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2324 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2325
2325
2326 for f in self._status.added:
2326 for f in self._status.added:
2327 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2327 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2328
2328
2329 for f in self._status.removed:
2329 for f in self._status.removed:
2330 if f in man:
2330 if f in man:
2331 del man[f]
2331 del man[f]
2332
2332
2333 return man
2333 return man
2334
2334
2335 @propertycache
2335 @propertycache
2336 def _status(self):
2336 def _status(self):
2337 """Calculate exact status from ``files`` specified at construction
2337 """Calculate exact status from ``files`` specified at construction
2338 """
2338 """
2339 man1 = self.p1().manifest()
2339 man1 = self.p1().manifest()
2340 p2 = self._parents[1]
2340 p2 = self._parents[1]
2341 # "1 < len(self._parents)" can't be used for checking
2341 # "1 < len(self._parents)" can't be used for checking
2342 # existence of the 2nd parent, because "memctx._parents" is
2342 # existence of the 2nd parent, because "memctx._parents" is
2343 # explicitly initialized by the list, of which length is 2.
2343 # explicitly initialized by the list, of which length is 2.
2344 if p2.node() != nullid:
2344 if p2.node() != nullid:
2345 man2 = p2.manifest()
2345 man2 = p2.manifest()
2346 managing = lambda f: f in man1 or f in man2
2346 managing = lambda f: f in man1 or f in man2
2347 else:
2347 else:
2348 managing = lambda f: f in man1
2348 managing = lambda f: f in man1
2349
2349
2350 modified, added, removed = [], [], []
2350 modified, added, removed = [], [], []
2351 for f in self._files:
2351 for f in self._files:
2352 if not managing(f):
2352 if not managing(f):
2353 added.append(f)
2353 added.append(f)
2354 elif self[f]:
2354 elif self[f]:
2355 modified.append(f)
2355 modified.append(f)
2356 else:
2356 else:
2357 removed.append(f)
2357 removed.append(f)
2358
2358
2359 return scmutil.status(modified, added, removed, [], [], [], [])
2359 return scmutil.status(modified, added, removed, [], [], [], [])
2360
2360
2361 class memfilectx(committablefilectx):
2361 class memfilectx(committablefilectx):
2362 """memfilectx represents an in-memory file to commit.
2362 """memfilectx represents an in-memory file to commit.
2363
2363
2364 See memctx and committablefilectx for more details.
2364 See memctx and committablefilectx for more details.
2365 """
2365 """
2366 def __init__(self, repo, path, data, islink=False,
2366 def __init__(self, repo, path, data, islink=False,
2367 isexec=False, copied=None, memctx=None):
2367 isexec=False, copied=None, memctx=None):
2368 """
2368 """
2369 path is the normalized file path relative to repository root.
2369 path is the normalized file path relative to repository root.
2370 data is the file content as a string.
2370 data is the file content as a string.
2371 islink is True if the file is a symbolic link.
2371 islink is True if the file is a symbolic link.
2372 isexec is True if the file is executable.
2372 isexec is True if the file is executable.
2373 copied is the source file path if current file was copied in the
2373 copied is the source file path if current file was copied in the
2374 revision being committed, or None."""
2374 revision being committed, or None."""
2375 super(memfilectx, self).__init__(repo, path, None, memctx)
2375 super(memfilectx, self).__init__(repo, path, None, memctx)
2376 self._data = data
2376 self._data = data
2377 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2377 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2378 self._copied = None
2378 self._copied = None
2379 if copied:
2379 if copied:
2380 self._copied = (copied, nullid)
2380 self._copied = (copied, nullid)
2381
2381
2382 def data(self):
2382 def data(self):
2383 return self._data
2383 return self._data
2384
2384
2385 def remove(self, ignoremissing=False):
2385 def remove(self, ignoremissing=False):
2386 """wraps unlink for a repo's working directory"""
2386 """wraps unlink for a repo's working directory"""
2387 # need to figure out what to do here
2387 # need to figure out what to do here
2388 del self._changectx[self._path]
2388 del self._changectx[self._path]
2389
2389
2390 def write(self, data, flags):
2390 def write(self, data, flags):
2391 """wraps repo.wwrite"""
2391 """wraps repo.wwrite"""
2392 self._data = data
2392 self._data = data
2393
2393
2394 class overlayfilectx(committablefilectx):
2394 class overlayfilectx(committablefilectx):
2395 """Like memfilectx but take an original filectx and optional parameters to
2395 """Like memfilectx but take an original filectx and optional parameters to
2396 override parts of it. This is useful when fctx.data() is expensive (i.e.
2396 override parts of it. This is useful when fctx.data() is expensive (i.e.
2397 flag processor is expensive) and raw data, flags, and filenode could be
2397 flag processor is expensive) and raw data, flags, and filenode could be
2398 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2398 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2399 """
2399 """
2400
2400
2401 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2401 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2402 copied=None, ctx=None):
2402 copied=None, ctx=None):
2403 """originalfctx: filecontext to duplicate
2403 """originalfctx: filecontext to duplicate
2404
2404
2405 datafunc: None or a function to override data (file content). It is a
2405 datafunc: None or a function to override data (file content). It is a
2406 function to be lazy. path, flags, copied, ctx: None or overridden value
2406 function to be lazy. path, flags, copied, ctx: None or overridden value
2407
2407
2408 copied could be (path, rev), or False. copied could also be just path,
2408 copied could be (path, rev), or False. copied could also be just path,
2409 and will be converted to (path, nullid). This simplifies some callers.
2409 and will be converted to (path, nullid). This simplifies some callers.
2410 """
2410 """
2411
2411
2412 if path is None:
2412 if path is None:
2413 path = originalfctx.path()
2413 path = originalfctx.path()
2414 if ctx is None:
2414 if ctx is None:
2415 ctx = originalfctx.changectx()
2415 ctx = originalfctx.changectx()
2416 ctxmatch = lambda: True
2416 ctxmatch = lambda: True
2417 else:
2417 else:
2418 ctxmatch = lambda: ctx == originalfctx.changectx()
2418 ctxmatch = lambda: ctx == originalfctx.changectx()
2419
2419
2420 repo = originalfctx.repo()
2420 repo = originalfctx.repo()
2421 flog = originalfctx.filelog()
2421 flog = originalfctx.filelog()
2422 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2422 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2423
2423
2424 if copied is None:
2424 if copied is None:
2425 copied = originalfctx.renamed()
2425 copied = originalfctx.renamed()
2426 copiedmatch = lambda: True
2426 copiedmatch = lambda: True
2427 else:
2427 else:
2428 if copied and not isinstance(copied, tuple):
2428 if copied and not isinstance(copied, tuple):
2429 # repo._filecommit will recalculate copyrev so nullid is okay
2429 # repo._filecommit will recalculate copyrev so nullid is okay
2430 copied = (copied, nullid)
2430 copied = (copied, nullid)
2431 copiedmatch = lambda: copied == originalfctx.renamed()
2431 copiedmatch = lambda: copied == originalfctx.renamed()
2432
2432
2433 # When data, copied (could affect data), ctx (could affect filelog
2433 # When data, copied (could affect data), ctx (could affect filelog
2434 # parents) are not overridden, rawdata, rawflags, and filenode may be
2434 # parents) are not overridden, rawdata, rawflags, and filenode may be
2435 # reused (repo._filecommit should double check filelog parents).
2435 # reused (repo._filecommit should double check filelog parents).
2436 #
2436 #
2437 # path, flags are not hashed in filelog (but in manifestlog) so they do
2437 # path, flags are not hashed in filelog (but in manifestlog) so they do
2438 # not affect reusable here.
2438 # not affect reusable here.
2439 #
2439 #
2440 # If ctx or copied is overridden to a same value with originalfctx,
2440 # If ctx or copied is overridden to a same value with originalfctx,
2441 # still consider it's reusable. originalfctx.renamed() may be a bit
2441 # still consider it's reusable. originalfctx.renamed() may be a bit
2442 # expensive so it's not called unless necessary. Assuming datafunc is
2442 # expensive so it's not called unless necessary. Assuming datafunc is
2443 # always expensive, do not call it for this "reusable" test.
2443 # always expensive, do not call it for this "reusable" test.
2444 reusable = datafunc is None and ctxmatch() and copiedmatch()
2444 reusable = datafunc is None and ctxmatch() and copiedmatch()
2445
2445
2446 if datafunc is None:
2446 if datafunc is None:
2447 datafunc = originalfctx.data
2447 datafunc = originalfctx.data
2448 if flags is None:
2448 if flags is None:
2449 flags = originalfctx.flags()
2449 flags = originalfctx.flags()
2450
2450
2451 self._datafunc = datafunc
2451 self._datafunc = datafunc
2452 self._flags = flags
2452 self._flags = flags
2453 self._copied = copied
2453 self._copied = copied
2454
2454
2455 if reusable:
2455 if reusable:
2456 # copy extra fields from originalfctx
2456 # copy extra fields from originalfctx
2457 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2457 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2458 for attr_ in attrs:
2458 for attr_ in attrs:
2459 if util.safehasattr(originalfctx, attr_):
2459 if util.safehasattr(originalfctx, attr_):
2460 setattr(self, attr_, getattr(originalfctx, attr_))
2460 setattr(self, attr_, getattr(originalfctx, attr_))
2461
2461
2462 def data(self):
2462 def data(self):
2463 return self._datafunc()
2463 return self._datafunc()
2464
2464
2465 class metadataonlyctx(committablectx):
2465 class metadataonlyctx(committablectx):
2466 """Like memctx but it's reusing the manifest of different commit.
2466 """Like memctx but it's reusing the manifest of different commit.
2467 Intended to be used by lightweight operations that are creating
2467 Intended to be used by lightweight operations that are creating
2468 metadata-only changes.
2468 metadata-only changes.
2469
2469
2470 Revision information is supplied at initialization time. 'repo' is the
2470 Revision information is supplied at initialization time. 'repo' is the
2471 current localrepo, 'ctx' is original revision which manifest we're reuisng
2471 current localrepo, 'ctx' is original revision which manifest we're reuisng
2472 'parents' is a sequence of two parent revisions identifiers (pass None for
2472 'parents' is a sequence of two parent revisions identifiers (pass None for
2473 every missing parent), 'text' is the commit.
2473 every missing parent), 'text' is the commit.
2474
2474
2475 user receives the committer name and defaults to current repository
2475 user receives the committer name and defaults to current repository
2476 username, date is the commit date in any format supported by
2476 username, date is the commit date in any format supported by
2477 util.parsedate() and defaults to current date, extra is a dictionary of
2477 util.parsedate() and defaults to current date, extra is a dictionary of
2478 metadata or is left empty.
2478 metadata or is left empty.
2479 """
2479 """
2480 def __new__(cls, repo, originalctx, *args, **kwargs):
2480 def __new__(cls, repo, originalctx, *args, **kwargs):
2481 return super(metadataonlyctx, cls).__new__(cls, repo)
2481 return super(metadataonlyctx, cls).__new__(cls, repo)
2482
2482
2483 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2483 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2484 date=None, extra=None, editor=False):
2484 date=None, extra=None, editor=False):
2485 if text is None:
2485 if text is None:
2486 text = originalctx.description()
2486 text = originalctx.description()
2487 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2487 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2488 self._rev = None
2488 self._rev = None
2489 self._node = None
2489 self._node = None
2490 self._originalctx = originalctx
2490 self._originalctx = originalctx
2491 self._manifestnode = originalctx.manifestnode()
2491 self._manifestnode = originalctx.manifestnode()
2492 if parents is None:
2492 if parents is None:
2493 parents = originalctx.parents()
2493 parents = originalctx.parents()
2494 else:
2494 else:
2495 parents = [repo[p] for p in parents if p is not None]
2495 parents = [repo[p] for p in parents if p is not None]
2496 parents = parents[:]
2496 parents = parents[:]
2497 while len(parents) < 2:
2497 while len(parents) < 2:
2498 parents.append(repo[nullid])
2498 parents.append(repo[nullid])
2499 p1, p2 = self._parents = parents
2499 p1, p2 = self._parents = parents
2500
2500
2501 # sanity check to ensure that the reused manifest parents are
2501 # sanity check to ensure that the reused manifest parents are
2502 # manifests of our commit parents
2502 # manifests of our commit parents
2503 mp1, mp2 = self.manifestctx().parents
2503 mp1, mp2 = self.manifestctx().parents
2504 if p1 != nullid and p1.manifestnode() != mp1:
2504 if p1 != nullid and p1.manifestnode() != mp1:
2505 raise RuntimeError('can\'t reuse the manifest: '
2505 raise RuntimeError('can\'t reuse the manifest: '
2506 'its p1 doesn\'t match the new ctx p1')
2506 'its p1 doesn\'t match the new ctx p1')
2507 if p2 != nullid and p2.manifestnode() != mp2:
2507 if p2 != nullid and p2.manifestnode() != mp2:
2508 raise RuntimeError('can\'t reuse the manifest: '
2508 raise RuntimeError('can\'t reuse the manifest: '
2509 'its p2 doesn\'t match the new ctx p2')
2509 'its p2 doesn\'t match the new ctx p2')
2510
2510
2511 self._files = originalctx.files()
2511 self._files = originalctx.files()
2512 self.substate = {}
2512 self.substate = {}
2513
2513
2514 if editor:
2514 if editor:
2515 self._text = editor(self._repo, self, [])
2515 self._text = editor(self._repo, self, [])
2516 self._repo.savecommitmessage(self._text)
2516 self._repo.savecommitmessage(self._text)
2517
2517
2518 def manifestnode(self):
2518 def manifestnode(self):
2519 return self._manifestnode
2519 return self._manifestnode
2520
2520
2521 @property
2521 @property
2522 def _manifestctx(self):
2522 def _manifestctx(self):
2523 return self._repo.manifestlog[self._manifestnode]
2523 return self._repo.manifestlog[self._manifestnode]
2524
2524
2525 def filectx(self, path, filelog=None):
2525 def filectx(self, path, filelog=None):
2526 return self._originalctx.filectx(path, filelog=filelog)
2526 return self._originalctx.filectx(path, filelog=filelog)
2527
2527
2528 def commit(self):
2528 def commit(self):
2529 """commit context to the repo"""
2529 """commit context to the repo"""
2530 return self._repo.commitctx(self)
2530 return self._repo.commitctx(self)
2531
2531
2532 @property
2532 @property
2533 def _manifest(self):
2533 def _manifest(self):
2534 return self._originalctx.manifest()
2534 return self._originalctx.manifest()
2535
2535
2536 @propertycache
2536 @propertycache
2537 def _status(self):
2537 def _status(self):
2538 """Calculate exact status from ``files`` specified in the ``origctx``
2538 """Calculate exact status from ``files`` specified in the ``origctx``
2539 and parents manifests.
2539 and parents manifests.
2540 """
2540 """
2541 man1 = self.p1().manifest()
2541 man1 = self.p1().manifest()
2542 p2 = self._parents[1]
2542 p2 = self._parents[1]
2543 # "1 < len(self._parents)" can't be used for checking
2543 # "1 < len(self._parents)" can't be used for checking
2544 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2544 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2545 # explicitly initialized by the list, of which length is 2.
2545 # explicitly initialized by the list, of which length is 2.
2546 if p2.node() != nullid:
2546 if p2.node() != nullid:
2547 man2 = p2.manifest()
2547 man2 = p2.manifest()
2548 managing = lambda f: f in man1 or f in man2
2548 managing = lambda f: f in man1 or f in man2
2549 else:
2549 else:
2550 managing = lambda f: f in man1
2550 managing = lambda f: f in man1
2551
2551
2552 modified, added, removed = [], [], []
2552 modified, added, removed = [], [], []
2553 for f in self._files:
2553 for f in self._files:
2554 if not managing(f):
2554 if not managing(f):
2555 added.append(f)
2555 added.append(f)
2556 elif f in self:
2556 elif f in self:
2557 modified.append(f)
2557 modified.append(f)
2558 else:
2558 else:
2559 removed.append(f)
2559 removed.append(f)
2560
2560
2561 return scmutil.status(modified, added, removed, [], [], [], [])
2561 return scmutil.status(modified, added, removed, [], [], [], [])
2562
2562
2563 class arbitraryfilectx(object):
2563 class arbitraryfilectx(object):
2564 """Allows you to use filectx-like functions on a file in an arbitrary
2564 """Allows you to use filectx-like functions on a file in an arbitrary
2565 location on disk, possibly not in the working directory.
2565 location on disk, possibly not in the working directory.
2566 """
2566 """
2567 def __init__(self, path, repo=None):
2567 def __init__(self, path, repo=None):
2568 # Repo is optional because contrib/simplemerge uses this class.
2568 # Repo is optional because contrib/simplemerge uses this class.
2569 self._repo = repo
2569 self._repo = repo
2570 self._path = path
2570 self._path = path
2571
2571
2572 def cmp(self, fctx):
2572 def cmp(self, fctx):
2573 if isinstance(fctx, workingfilectx) and self._repo:
2573 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2574 # path if either side is a symlink.
2575 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2576 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2574 # Add a fast-path for merge if both sides are disk-backed.
2577 # Add a fast-path for merge if both sides are disk-backed.
2575 # Note that filecmp uses the opposite return values as cmp.
2578 # Note that filecmp uses the opposite return values (True if same)
2579 # from our cmp functions (True if different).
2576 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2580 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2577 return self.data() != fctx.data()
2581 return self.data() != fctx.data()
2578
2582
2579 def path(self):
2583 def path(self):
2580 return self._path
2584 return self._path
2581
2585
2582 def flags(self):
2586 def flags(self):
2583 return ''
2587 return ''
2584
2588
2585 def data(self):
2589 def data(self):
2586 return util.readfile(self._path)
2590 return util.readfile(self._path)
2587
2591
2588 def decodeddata(self):
2592 def decodeddata(self):
2589 with open(self._path, "rb") as f:
2593 with open(self._path, "rb") as f:
2590 return f.read()
2594 return f.read()
2591
2595
2592 def remove(self):
2596 def remove(self):
2593 util.unlink(self._path)
2597 util.unlink(self._path)
2594
2598
2595 def write(self, data, flags):
2599 def write(self, data, flags):
2596 assert not flags
2600 assert not flags
2597 with open(self._path, "w") as f:
2601 with open(self._path, "w") as f:
2598 f.write(data)
2602 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now